mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-23 01:56:36 +03:00
Minor bug fix and adjustment to deal with Keep-Alive also against Google (-g)
This commit is contained in:
parent
6f03a9ab5c
commit
9bce22683b
|
@ -265,18 +265,26 @@ def __setGoogleDorking():
|
||||||
the results and save the testable hosts into the knowledge base.
|
the results and save the testable hosts into the knowledge base.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
global proxyHandler
|
|
||||||
|
|
||||||
if not conf.googleDork:
|
if not conf.googleDork:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
global keepAliveHandler
|
||||||
|
global proxyHandler
|
||||||
|
|
||||||
debugMsg = "initializing Google dorking requests"
|
debugMsg = "initializing Google dorking requests"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
logMsg = "first request to Google to get the session cookie"
|
logMsg = "first request to Google to get the session cookie"
|
||||||
logger.info(logMsg)
|
logger.info(logMsg)
|
||||||
|
|
||||||
googleObj = Google(proxyHandler)
|
handlers = [ proxyHandler ]
|
||||||
|
|
||||||
|
# Use Keep-Alive (persistent HTTP connection) only if a proxy is not set
|
||||||
|
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
|
||||||
|
if conf.keepAlive and not conf.proxy:
|
||||||
|
handlers.append(keepAliveHandler)
|
||||||
|
|
||||||
|
googleObj = Google(handlers)
|
||||||
googleObj.getCookie()
|
googleObj.getCookie()
|
||||||
|
|
||||||
matches = googleObj.search(conf.googleDork)
|
matches = googleObj.search(conf.googleDork)
|
||||||
|
|
|
@ -88,7 +88,6 @@ def parseResponse(page, headers):
|
||||||
if absFilePath not in kb.absFilePaths:
|
if absFilePath not in kb.absFilePaths:
|
||||||
kb.absFilePaths.add(absFilePath)
|
kb.absFilePaths.add(absFilePath)
|
||||||
|
|
||||||
|
|
||||||
def decodePage(page, contentEncoding, contentType):
|
def decodePage(page, contentEncoding, contentType):
|
||||||
"""
|
"""
|
||||||
Decode compressed/charset HTTP response
|
Decode compressed/charset HTTP response
|
||||||
|
|
|
@ -41,10 +41,13 @@ class Google:
|
||||||
line option '-g <google dork>'
|
line option '-g <google dork>'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, proxy):
|
def __init__(self, handlers):
|
||||||
self.__matches = []
|
self.__matches = []
|
||||||
self.__cj = cookielib.LWPCookieJar()
|
self.__cj = cookielib.LWPCookieJar()
|
||||||
self.opener = urllib2.build_opener(proxy, urllib2.HTTPCookieProcessor(self.__cj))
|
|
||||||
|
handlers.append(urllib2.HTTPCookieProcessor(self.__cj))
|
||||||
|
|
||||||
|
self.opener = urllib2.build_opener(*handlers)
|
||||||
self.opener.addheaders = conf.httpHeaders
|
self.opener.addheaders = conf.httpHeaders
|
||||||
|
|
||||||
def __parsePage(self, page):
|
def __parsePage(self, page):
|
||||||
|
@ -83,7 +86,7 @@ class Google:
|
||||||
_ = conn.info()
|
_ = conn.info()
|
||||||
except urllib2.HTTPError, e:
|
except urllib2.HTTPError, e:
|
||||||
_ = e.info()
|
_ = e.info()
|
||||||
except urllib2.URLError, e:
|
except urllib2.URLError, _:
|
||||||
errMsg = "unable to connect to Google"
|
errMsg = "unable to connect to Google"
|
||||||
raise sqlmapConnectionException, errMsg
|
raise sqlmapConnectionException, errMsg
|
||||||
|
|
||||||
|
@ -107,9 +110,8 @@ class Google:
|
||||||
try:
|
try:
|
||||||
conn = self.opener.open(url)
|
conn = self.opener.open(url)
|
||||||
|
|
||||||
requestMsg = "HTTP request:\nGET %s HTTP/1.1" % url
|
requestMsg = "HTTP request:\nGET %s HTTP/1.1\n" % url
|
||||||
#requestHeaders = "\n".join(["%s: %s" % (header, value) for header, value in conn.headers.items()])
|
requestMsg += "\n".join(["%s: %s" % (header, value) for header, value in conn.headers.items()])
|
||||||
#requestMsg += "\n%s" % requestHeaders
|
|
||||||
requestMsg += "\n"
|
requestMsg += "\n"
|
||||||
logger.log(9, requestMsg)
|
logger.log(9, requestMsg)
|
||||||
|
|
||||||
|
@ -117,9 +119,7 @@ class Google:
|
||||||
code = conn.code
|
code = conn.code
|
||||||
status = conn.msg
|
status = conn.msg
|
||||||
responseHeaders = conn.info()
|
responseHeaders = conn.info()
|
||||||
|
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
|
||||||
encoding = responseHeaders.get("Content-Encoding")
|
|
||||||
page = decodePage(page, encoding)
|
|
||||||
|
|
||||||
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
|
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ class Google:
|
||||||
warnMsg += "to get error page information (%d)" % e.code
|
warnMsg += "to get error page information (%d)" % e.code
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
return None
|
return None
|
||||||
except (urllib2.URLError, socket.error, socket.timeout), e:
|
except (urllib2.URLError, socket.error, socket.timeout), _:
|
||||||
errMsg = "unable to connect to Google"
|
errMsg = "unable to connect to Google"
|
||||||
raise sqlmapConnectionException, errMsg
|
raise sqlmapConnectionException, errMsg
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user