From c154e64a1956f33a68d1664230f528e60d9a66fb Mon Sep 17 00:00:00 2001 From: Miroslav Stampar Date: Tue, 4 Jun 2019 10:55:07 +0200 Subject: [PATCH] Fixes #3737 --- lib/core/settings.py | 2 +- lib/utils/search.py | 16 ++++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/lib/core/settings.py b/lib/core/settings.py index 93fff3647..1c4737330 100644 --- a/lib/core/settings.py +++ b/lib/core/settings.py @@ -18,7 +18,7 @@ from lib.core.enums import OS from thirdparty.six import unichr as _unichr # sqlmap version (...) -VERSION = "1.3.6.8" +VERSION = "1.3.6.9" TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) diff --git a/lib/utils/search.py b/lib/utils/search.py index 3a20be03a..e14540c21 100644 --- a/lib/utils/search.py +++ b/lib/utils/search.py @@ -44,14 +44,16 @@ def _search(dork): if not dork: return None + page = None data = None - headers = {} + requestHeaders = {} + responseHeaders = {} - headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT) - headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE + requestHeaders[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT) + requestHeaders[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE try: - req = _urllib.request.Request("https://www.google.com/ncr", headers=headers) + req = _urllib.request.Request("https://www.google.com/ncr", headers=requestHeaders) conn = _urllib.request.urlopen(req) except Exception as ex: errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex) @@ -66,7 +68,7 @@ def _search(dork): url += "&start=%d" % ((gpage - 1) * 100) try: - req = _urllib.request.Request(url, headers=headers) + req = _urllib.request.Request(url, headers=requestHeaders) conn = _urllib.request.urlopen(req) requestMsg = "HTTP request:\nGET %s" % url @@ -77,7 +79,6 @@ def _search(dork): code = conn.code status = conn.msg responseHeaders = conn.info() - page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type")) responseMsg = "HTTP response (%s - %d):\n" % (status, code) @@ -90,6 +91,7 @@ def _search(dork): except _urllib.error.HTTPError as ex: try: page = ex.read() + responseHeaders = ex.info() except Exception as _: warnMsg = "problem occurred while trying to get " warnMsg += "an error page information (%s)" % getSafeExString(_) @@ -99,6 +101,8 @@ def _search(dork): errMsg = "unable to connect to Google" raise SqlmapConnectionException(errMsg) + page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) + retVal = [_urllib.parse.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)] if not retVal and "detected unusual traffic" in page: