Minor patch for crawling

This commit is contained in:
Miroslav Stampar 2019-11-12 22:51:11 +01:00
parent e58d68c203
commit f382443ddd
4 changed files with 8 additions and 7 deletions

View File

@ -2762,7 +2762,7 @@ def findMultipartPostBoundary(post):
return retVal
def urldecode(value, encoding=None, unsafe="%%&=;+%s" % CUSTOM_INJECTION_MARK_CHAR, convall=False, spaceplus=True):
def urldecode(value, encoding=None, unsafe="%%?&=;+%s" % CUSTOM_INJECTION_MARK_CHAR, convall=False, spaceplus=True):
"""
URL decodes given value

View File

@ -18,7 +18,7 @@ from lib.core.enums import OS
from thirdparty.six import unichr as _unichr
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.3.11.32"
VERSION = "1.3.11.33"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

View File

@ -267,7 +267,7 @@ def getHeuristicCharEncoding(page):
return retVal
def decodePage(page, contentEncoding, contentType):
def decodePage(page, contentEncoding, contentType, percentDecode=True):
"""
Decode compressed/charset HTTP response
@ -340,8 +340,9 @@ def decodePage(page, contentEncoding, contentType):
page = re.sub(b"&#(\\d{1,3});", lambda _: six.int2byte(int(_.group(1))) if int(_.group(1)) < 256 else _.group(0), page)
# e.g. %20%28%29
if b"%" in page:
page = re.sub(b"%([0-9a-fA-F]{2})", lambda _: decodeHex(_.group(1)), page)
if percentDecode:
if b"%" in page:
page = re.sub(b"%([0-9a-fA-F]{2})", lambda _: decodeHex(_.group(1)), page)
# e.g. &amp;
page = re.sub(b"&([^;]+);", lambda _: six.int2byte(HTML_ENTITIES[getText(_.group(1))]) if HTML_ENTITIES.get(getText(_.group(1)), 256) < 256 else _.group(0), page)

View File

@ -550,7 +550,7 @@ class Connect(object):
code = None
responseHeaders = {}
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE), percentDecode=not crawling)
status = getUnicode(conn.msg) if conn and getattr(conn, "msg", None) else None
kb.connErrorCounter = 0
@ -628,7 +628,7 @@ class Connect(object):
responseHeaders = ex.info()
responseHeaders[URI_HTTP_HEADER] = ex.geturl()
patchHeaders(responseHeaders)
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE), percentDecode=not crawling)
except socket.timeout:
warnMsg = "connection timed out while trying "
warnMsg += "to get error page information (%d)" % ex.code