fix for a neverending data retrieval in large full inband cases

This commit is contained in:
Miroslav Stampar 2011-07-29 10:45:09 +00:00
parent 4ce93221d1
commit e522263640
2 changed files with 4 additions and 1 deletions

View File

@ -51,6 +51,7 @@ from lib.core.data import logger
from lib.core.data import paths from lib.core.data import paths
from lib.core.data import queries from lib.core.data import queries
from lib.core.convert import htmlunescape from lib.core.convert import htmlunescape
from lib.core.convert import safecharencode
from lib.core.convert import urldecode from lib.core.convert import urldecode
from lib.core.convert import urlencode from lib.core.convert import urlencode
from lib.core.enums import DBMS from lib.core.enums import DBMS
@ -1361,6 +1362,8 @@ def parseUnionPage(output, expression, partial=False, condition=None, sort=True)
output = dict_.values() output = dict_.values()
for entry in output: for entry in output:
entry = safecharencode(entry) if kb.safeCharEncode else entry
info = [] info = []
if DUMP_DEL_MARKER in entry: if DUMP_DEL_MARKER in entry:

View File

@ -97,7 +97,7 @@ def __oneShotUnionUse(expression, unpack=True):
warnMsg += "issues)" warnMsg += "issues)"
singleTimeWarnMessage(warnMsg) singleTimeWarnMessage(warnMsg)
return safecharencode(output) if kb.safeCharEncode else output return output
def configUnion(char=None, columns=None): def configUnion(char=None, columns=None):
def __configUnionChar(char): def __configUnionChar(char):