From 468eed8532a87d1e925c688b60c132c6b0a4f2d6 Mon Sep 17 00:00:00 2001 From: Miroslav Stampar Date: Thu, 13 Jun 2019 10:58:21 +0200 Subject: [PATCH] Fixes #3753 --- lib/core/settings.py | 8 ++++---- lib/request/connect.py | 23 +++++++++++++---------- lib/request/redirecthandler.py | 4 ++-- 3 files changed, 19 insertions(+), 16 deletions(-) diff --git a/lib/core/settings.py b/lib/core/settings.py index dba74647d..78cb4d00f 100644 --- a/lib/core/settings.py +++ b/lib/core/settings.py @@ -18,7 +18,7 @@ from lib.core.enums import OS from thirdparty.six import unichr as _unichr # sqlmap version (...) -VERSION = "1.3.6.38" +VERSION = "1.3.6.39" TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) @@ -678,8 +678,8 @@ FI_ERROR_REGEX = r"(?i)[^\n]{0,100}(no such file|failed (to )?open)[^\n]{0,100}" # Length of prefix and suffix used in non-SQLI heuristic checks NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6 -# Connection chunk size (processing large responses in chunks to avoid MemoryError crashes - e.g. large table dump in full UNION injections) -MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024 +# Connection read size (processing large responses in parts to avoid MemoryError crashes - e.g. large table dump in full UNION injections) +MAX_CONNECTION_READ_SIZE = 10 * 1024 * 1024 # Maximum response total page size (trimmed if larger) MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024 @@ -690,7 +690,7 @@ MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024 # Maximum (multi-threaded) length of entry in bisection algorithm MAX_BISECTION_LENGTH = 50 * 1024 * 1024 -# Mark used for trimming unnecessary content in large chunks +# Mark used for trimming unnecessary content in large connection reads LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__" # Generic SQL comment formation diff --git a/lib/request/connect.py b/lib/request/connect.py index 481599c34..289c931d8 100644 --- a/lib/request/connect.py +++ b/lib/request/connect.py @@ -97,7 +97,7 @@ from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE from lib.core.settings import IPS_WAF_CHECK_PAYLOAD from lib.core.settings import IS_WIN from lib.core.settings import LARGE_CHUNK_TRIM_MARKER -from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE +from lib.core.settings import MAX_CONNECTION_READ_SIZE from lib.core.settings import MAX_CONNECTIONS_REGEX from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE from lib.core.settings import MAX_CONSECUTIVE_CONNECTION_ERRORS @@ -211,15 +211,18 @@ class Connect(object): if not conn: break else: - _ = conn.read(MAX_CONNECTION_CHUNK_SIZE) + try: + part = conn.read(MAX_CONNECTION_READ_SIZE) + except AssertionError: + part = "" - if len(_) == MAX_CONNECTION_CHUNK_SIZE: + if len(part) == MAX_CONNECTION_READ_SIZE: warnMsg = "large response detected. This could take a while" singleTimeWarnMessage(warnMsg) - _ = re.sub(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_CHUNK_TRIM_MARKER, kb.chars.start), _) - retVal += _ + part = re.sub(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_CHUNK_TRIM_MARKER, kb.chars.start), part) + retVal += part else: - retVal += _ + retVal += part break if len(retVal) > MAX_CONNECTION_TOTAL_SIZE: @@ -631,14 +634,14 @@ class Connect(object): if responseHeaders: logHeaders = getUnicode("".join(responseHeaders.headers).strip()) - logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]), start, time.time()) + logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]), start, time.time()) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: - responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) + responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]) if not multipart: logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) @@ -815,12 +818,12 @@ class Connect(object): if responseHeaders: logHeaders = getUnicode("".join(responseHeaders.headers).strip()) - logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]), start, time.time()) + logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]), start, time.time()) if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: - responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) + responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]) if not multipart: logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) diff --git a/lib/request/redirecthandler.py b/lib/request/redirecthandler.py index c85771d64..9eae57afc 100644 --- a/lib/request/redirecthandler.py +++ b/lib/request/redirecthandler.py @@ -23,7 +23,7 @@ from lib.core.enums import HTTPMETHOD from lib.core.enums import REDIRECTION from lib.core.exception import SqlmapConnectionException from lib.core.settings import DEFAULT_COOKIE_DELIMITER -from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE +from lib.core.settings import MAX_CONNECTION_READ_SIZE from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE from lib.core.settings import MAX_SINGLE_URL_REDIRECTIONS from lib.core.settings import MAX_TOTAL_REDIRECTIONS @@ -101,7 +101,7 @@ class SmartRedirectHandler(_urllib.request.HTTPRedirectHandler): redirectMsg += logHeaders if content: - redirectMsg += "\r\n\r\n%s" % getUnicode(content[:MAX_CONNECTION_CHUNK_SIZE]) + redirectMsg += "\r\n\r\n%s" % getUnicode(content[:MAX_CONNECTION_READ_SIZE]) logHTTPTraffic(threadData.lastRequestMsg, redirectMsg, start, time.time()) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg)