Another patch regarding #4530

This commit is contained in:
Miroslav Stampar 2021-01-07 14:20:03 +01:00
parent ccf9e7de54
commit 1f39dbd06d
3 changed files with 9 additions and 6 deletions

View File

@ -18,7 +18,7 @@ from lib.core.enums import OS
from thirdparty.six import unichr as _unichr
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.5.1.16"
VERSION = "1.5.1.17"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
@ -757,8 +757,8 @@ MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024
# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher)
MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024
# Maximum size used per page content in getHeuristicCharEncoding() and identYwaf
MAX_CHAR_HEURISTICS_SIZE = 10000
# Page size threshold used in heuristic checks (e.g. getHeuristicCharEncoding(), identYwaf, htmlParser, etc.)
HEURISTIC_PAGE_SIZE_THRESHOLD = 64 * 1024
# Maximum (multi-threaded) length of entry in bisection algorithm
MAX_BISECTION_LENGTH = 50 * 1024 * 1024

View File

@ -13,6 +13,7 @@ from lib.core.common import urldecode
from lib.core.common import parseXmlFile
from lib.core.data import kb
from lib.core.data import paths
from lib.core.settings import HEURISTIC_PAGE_SIZE_THRESHOLD
from lib.core.threads import getCurrentThreadData
class HTMLHandler(ContentHandler):
@ -69,6 +70,8 @@ def htmlParser(page):
>>> threadData.lastErrorPage = None
"""
page = page[:HEURISTIC_PAGE_SIZE_THRESHOLD]
xmlfile = paths.ERRORS_XML
handler = HTMLHandler(page)
key = hash(page)

View File

@ -43,8 +43,8 @@ from lib.core.exception import SqlmapCompressionException
from lib.core.settings import BLOCKED_IP_REGEX
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
from lib.core.settings import EVENTVALIDATION_REGEX
from lib.core.settings import HEURISTIC_PAGE_SIZE_THRESHOLD
from lib.core.settings import IDENTYWAF_PARSE_LIMIT
from lib.core.settings import MAX_CHAR_HEURISTICS_SIZE
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
from lib.core.settings import META_CHARSET_REGEX
from lib.core.settings import PARSE_HEADERS_LIMIT
@ -259,7 +259,7 @@ def getHeuristicCharEncoding(page):
"""
key = hash(page)
retVal = kb.cache.encoding.get(key) or detect(page[:MAX_CHAR_HEURISTICS_SIZE])["encoding"]
retVal = kb.cache.encoding.get(key) or detect(page[:HEURISTIC_PAGE_SIZE_THRESHOLD])["encoding"]
kb.cache.encoding[key] = retVal
if retVal and retVal.lower().replace('-', "") == UNICODE_ENCODING.lower().replace('-', ""):
@ -396,7 +396,7 @@ def processResponse(page, responseHeaders, code=None, status=None):
logger.warning("parsed DBMS error message: '%s'" % msg.rstrip('.'))
if not conf.skipWaf and kb.processResponseCounter < IDENTYWAF_PARSE_LIMIT:
rawResponse = "%s %s %s\n%s\n%s" % (_http_client.HTTPConnection._http_vsn_str, code or "", status or "", "".join(getUnicode(responseHeaders.headers if responseHeaders else [])), page[:MAX_CHAR_HEURISTICS_SIZE])
rawResponse = "%s %s %s\n%s\n%s" % (_http_client.HTTPConnection._http_vsn_str, code or "", status or "", "".join(getUnicode(responseHeaders.headers if responseHeaders else [])), page[:HEURISTIC_PAGE_SIZE_THRESHOLD])
identYwaf.non_blind.clear()
if identYwaf.non_blind_check(rawResponse, silent=True):