mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 17:46:37 +03:00
Minor bug fix and adjustment to deal with Keep-Alive also against Google (-g)
This commit is contained in:
parent
6f03a9ab5c
commit
9bce22683b
|
@ -265,18 +265,26 @@ def __setGoogleDorking():
|
|||
the results and save the testable hosts into the knowledge base.
|
||||
"""
|
||||
|
||||
global proxyHandler
|
||||
|
||||
if not conf.googleDork:
|
||||
return
|
||||
|
||||
global keepAliveHandler
|
||||
global proxyHandler
|
||||
|
||||
debugMsg = "initializing Google dorking requests"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
logMsg = "first request to Google to get the session cookie"
|
||||
logger.info(logMsg)
|
||||
|
||||
googleObj = Google(proxyHandler)
|
||||
handlers = [ proxyHandler ]
|
||||
|
||||
# Use Keep-Alive (persistent HTTP connection) only if a proxy is not set
|
||||
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
|
||||
if conf.keepAlive and not conf.proxy:
|
||||
handlers.append(keepAliveHandler)
|
||||
|
||||
googleObj = Google(handlers)
|
||||
googleObj.getCookie()
|
||||
|
||||
matches = googleObj.search(conf.googleDork)
|
||||
|
|
|
@ -88,7 +88,6 @@ def parseResponse(page, headers):
|
|||
if absFilePath not in kb.absFilePaths:
|
||||
kb.absFilePaths.add(absFilePath)
|
||||
|
||||
|
||||
def decodePage(page, contentEncoding, contentType):
|
||||
"""
|
||||
Decode compressed/charset HTTP response
|
||||
|
|
|
@ -41,10 +41,13 @@ class Google:
|
|||
line option '-g <google dork>'
|
||||
"""
|
||||
|
||||
def __init__(self, proxy):
|
||||
def __init__(self, handlers):
|
||||
self.__matches = []
|
||||
self.__cj = cookielib.LWPCookieJar()
|
||||
self.opener = urllib2.build_opener(proxy, urllib2.HTTPCookieProcessor(self.__cj))
|
||||
|
||||
handlers.append(urllib2.HTTPCookieProcessor(self.__cj))
|
||||
|
||||
self.opener = urllib2.build_opener(*handlers)
|
||||
self.opener.addheaders = conf.httpHeaders
|
||||
|
||||
def __parsePage(self, page):
|
||||
|
@ -83,7 +86,7 @@ class Google:
|
|||
_ = conn.info()
|
||||
except urllib2.HTTPError, e:
|
||||
_ = e.info()
|
||||
except urllib2.URLError, e:
|
||||
except urllib2.URLError, _:
|
||||
errMsg = "unable to connect to Google"
|
||||
raise sqlmapConnectionException, errMsg
|
||||
|
||||
|
@ -107,9 +110,8 @@ class Google:
|
|||
try:
|
||||
conn = self.opener.open(url)
|
||||
|
||||
requestMsg = "HTTP request:\nGET %s HTTP/1.1" % url
|
||||
#requestHeaders = "\n".join(["%s: %s" % (header, value) for header, value in conn.headers.items()])
|
||||
#requestMsg += "\n%s" % requestHeaders
|
||||
requestMsg = "HTTP request:\nGET %s HTTP/1.1\n" % url
|
||||
requestMsg += "\n".join(["%s: %s" % (header, value) for header, value in conn.headers.items()])
|
||||
requestMsg += "\n"
|
||||
logger.log(9, requestMsg)
|
||||
|
||||
|
@ -117,9 +119,7 @@ class Google:
|
|||
code = conn.code
|
||||
status = conn.msg
|
||||
responseHeaders = conn.info()
|
||||
|
||||
encoding = responseHeaders.get("Content-Encoding")
|
||||
page = decodePage(page, encoding)
|
||||
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
|
||||
|
||||
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
|
||||
|
||||
|
@ -137,7 +137,7 @@ class Google:
|
|||
warnMsg += "to get error page information (%d)" % e.code
|
||||
logger.warn(warnMsg)
|
||||
return None
|
||||
except (urllib2.URLError, socket.error, socket.timeout), e:
|
||||
except (urllib2.URLError, socket.error, socket.timeout), _:
|
||||
errMsg = "unable to connect to Google"
|
||||
raise sqlmapConnectionException, errMsg
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user