mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-06-25 15:33:31 +03:00
reading page responses in chunks, trimming unnecessary content (especially for large table dumps in full inband cases)
This commit is contained in:
parent
2223c884e5
commit
b2afa87e48
|
@ -475,3 +475,9 @@ MAX_TOTAL_REDIRECTIONS = 10
|
||||||
|
|
||||||
# Reference: http://www.tcpipguide.com/free/t_DNSLabelsNamesandSyntaxRules.htm
|
# Reference: http://www.tcpipguide.com/free/t_DNSLabelsNamesandSyntaxRules.htm
|
||||||
MAX_DNS_LABEL = 63
|
MAX_DNS_LABEL = 63
|
||||||
|
|
||||||
|
# Connection chunk size (processing large responses in chunks to avoid MemoryError crashes - e.g. large table dump in full UNION/inband injections)
|
||||||
|
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
||||||
|
|
||||||
|
# Mark used for trimming unnecessary content in large chunks
|
||||||
|
LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__"
|
||||||
|
|
|
@ -52,12 +52,14 @@ from lib.core.exception import sqlmapConnectionException
|
||||||
from lib.core.exception import sqlmapSyntaxException
|
from lib.core.exception import sqlmapSyntaxException
|
||||||
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
||||||
from lib.core.settings import HTTP_SILENT_TIMEOUT
|
from lib.core.settings import HTTP_SILENT_TIMEOUT
|
||||||
|
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
|
||||||
from lib.core.settings import META_REFRESH_REGEX
|
from lib.core.settings import META_REFRESH_REGEX
|
||||||
from lib.core.settings import IS_WIN
|
|
||||||
from lib.core.settings import MIN_TIME_RESPONSES
|
from lib.core.settings import MIN_TIME_RESPONSES
|
||||||
from lib.core.settings import WARN_TIME_STDEV
|
from lib.core.settings import IS_WIN
|
||||||
|
from lib.core.settings import LARGE_CHUNK_TRIM_MARKER
|
||||||
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
||||||
from lib.core.settings import URI_HTTP_HEADER
|
from lib.core.settings import URI_HTTP_HEADER
|
||||||
|
from lib.core.settings import WARN_TIME_STDEV
|
||||||
from lib.request.basic import decodePage
|
from lib.request.basic import decodePage
|
||||||
from lib.request.basic import forgeHeaders
|
from lib.request.basic import forgeHeaders
|
||||||
from lib.request.basic import processResponse
|
from lib.request.basic import processResponse
|
||||||
|
@ -117,6 +119,21 @@ class Connect:
|
||||||
kwargs['retrying'] = True
|
kwargs['retrying'] = True
|
||||||
return Connect.__getPageProxy(**kwargs)
|
return Connect.__getPageProxy(**kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __connReadProxy(conn):
|
||||||
|
retVal = ""
|
||||||
|
while True:
|
||||||
|
_ = conn.read(MAX_CONNECTION_CHUNK_SIZE)
|
||||||
|
if len(_) == MAX_CONNECTION_CHUNK_SIZE:
|
||||||
|
warnMsg = "large response detected. This could take a while"
|
||||||
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
_ = re.sub(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_CHUNK_TRIM_MARKER, kb.chars.start), _)
|
||||||
|
retVal += _
|
||||||
|
else:
|
||||||
|
retVal += _
|
||||||
|
break
|
||||||
|
return retVal
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def getPage(**kwargs):
|
def getPage(**kwargs):
|
||||||
"""
|
"""
|
||||||
|
@ -205,7 +222,7 @@ class Connect:
|
||||||
|
|
||||||
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
|
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
|
||||||
conn = multipartOpener.open(unicodeencode(url), multipart)
|
conn = multipartOpener.open(unicodeencode(url), multipart)
|
||||||
page = conn.read()
|
page = Connect.__connReadProxy(conn)
|
||||||
responseHeaders = conn.info()
|
responseHeaders = conn.info()
|
||||||
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
||||||
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
|
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
|
||||||
|
@ -306,11 +323,11 @@ class Connect:
|
||||||
# Get HTTP response
|
# Get HTTP response
|
||||||
if hasattr(conn, 'redurl'):
|
if hasattr(conn, 'redurl'):
|
||||||
page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
|
page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
|
||||||
else conn.read()
|
else Connect.__connReadProxy(conn)
|
||||||
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
||||||
code = conn.redcode
|
code = conn.redcode
|
||||||
else:
|
else:
|
||||||
page = conn.read()
|
page = Connect.__connReadProxy(conn)
|
||||||
|
|
||||||
code = code or conn.code
|
code = code or conn.code
|
||||||
responseHeaders = conn.info()
|
responseHeaders = conn.info()
|
||||||
|
|
Loading…
Reference in New Issue
Block a user