diff --git a/lib/controller/checks.py b/lib/controller/checks.py index db8e97ba0..55b00bd74 100644 --- a/lib/controller/checks.py +++ b/lib/controller/checks.py @@ -1131,6 +1131,14 @@ def checkNullConnection(): infoMsg = "NULL connection is supported with GET header " infoMsg += "'%s'" % kb.nullConnection logger.info(infoMsg) + else: + _, headers, _ = Request.getPage(skipRead = True) + + if HTTP_HEADER.CONTENT_LENGTH in (headers or {}): + kb.nullConnection = NULLCONNECTION.SKIP_READ + + infoMsg = "NULL connection is supported with 'skip-read' method" + logger.info(infoMsg) except SqlmapConnectionException, errMsg: errMsg = getUnicode(errMsg) diff --git a/lib/core/enums.py b/lib/core/enums.py index 7b4d9949c..8d9a201ee 100644 --- a/lib/core/enums.py +++ b/lib/core/enums.py @@ -86,6 +86,7 @@ class HTTPMETHOD: class NULLCONNECTION: HEAD = "HEAD" RANGE = "Range" + SKIP_READ = "skip-read" class REFLECTIVE_COUNTER: MISS = "MISS" diff --git a/lib/request/connect.py b/lib/request/connect.py index 4ab8f03e6..5cb77ebfa 100644 --- a/lib/request/connect.py +++ b/lib/request/connect.py @@ -211,6 +211,7 @@ class Connect(object): refreshing = kwargs.get("refreshing", False) retrying = kwargs.get("retrying", False) crawling = kwargs.get("crawling", False) + skipRead = kwargs.get("skipRead", False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) @@ -266,7 +267,7 @@ class Connect(object): multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) - page = Connect._connReadProxy(conn) + page = Connect._connReadProxy(conn) if not skipRead else None responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) @@ -380,12 +381,12 @@ class Connect(object): # Get HTTP response if hasattr(conn, 'redurl'): - page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ - else Connect._connReadProxy(conn) + page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ + else Connect._connReadProxy(conn)) if not skipRead else None skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: - page = Connect._connReadProxy(conn) + page = Connect._connReadProxy(conn) if not skipRead else None code = code or conn.code responseHeaders = conn.info() @@ -439,7 +440,7 @@ class Connect(object): responseHeaders = None try: - page = e.read() + page = e.read() if not skipRead else None responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) @@ -820,10 +821,10 @@ class Connect(object): auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1" - _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) + _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ)) if headers: - if kb.nullConnection == NULLCONNECTION.HEAD and HTTP_HEADER.CONTENT_LENGTH in headers: + if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers: pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])