2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2012-07-12 21:38:03 +04:00
|
|
|
Copyright (c) 2006-2012 sqlmap developers (http://sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
import cookielib
|
2010-10-31 18:41:28 +03:00
|
|
|
import httplib
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2010-03-26 14:51:23 +03:00
|
|
|
import socket
|
2012-07-05 14:34:27 +04:00
|
|
|
import urllib
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
|
2010-06-02 16:45:40 +04:00
|
|
|
from lib.core.common import getUnicode
|
2011-11-06 15:18:16 +04:00
|
|
|
from lib.core.common import readInput
|
2012-07-31 13:03:44 +04:00
|
|
|
from lib.core.common import urldecode
|
|
|
|
from lib.core.common import urlencode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
2008-11-28 01:33:33 +03:00
|
|
|
from lib.core.data import kb
|
2010-01-02 05:02:12 +03:00
|
|
|
from lib.core.data import logger
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapConnectionException
|
2011-01-03 17:21:41 +03:00
|
|
|
from lib.core.exception import sqlmapGenericException
|
2012-02-20 14:02:19 +04:00
|
|
|
from lib.core.settings import GOOGLE_REGEX
|
2011-01-30 14:36:03 +03:00
|
|
|
from lib.core.settings import UNICODE_ENCODING
|
2011-01-31 23:36:01 +03:00
|
|
|
from lib.core.settings import URI_INJECTABLE_REGEX
|
2010-01-02 05:02:12 +03:00
|
|
|
from lib.request.basic import decodePage
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
class Google:
|
|
|
|
"""
|
|
|
|
This class defines methods used to perform Google dorking (command
|
|
|
|
line option '-g <google dork>'
|
|
|
|
"""
|
|
|
|
|
2010-06-11 14:08:19 +04:00
|
|
|
def __init__(self, handlers):
|
2012-02-20 14:02:19 +04:00
|
|
|
self._matches = []
|
2012-03-07 18:48:45 +04:00
|
|
|
self._cj = cookielib.CookieJar()
|
2010-06-11 14:08:19 +04:00
|
|
|
|
2012-02-20 14:02:19 +04:00
|
|
|
handlers.append(urllib2.HTTPCookieProcessor(self._cj))
|
2010-06-11 14:08:19 +04:00
|
|
|
|
|
|
|
self.opener = urllib2.build_opener(*handlers)
|
2008-10-15 19:38:22 +04:00
|
|
|
self.opener.addheaders = conf.httpHeaders
|
|
|
|
|
2012-02-20 14:02:19 +04:00
|
|
|
def _parsePage(self, page):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Parse Google dork search results page to get the list of
|
|
|
|
HTTP addresses
|
|
|
|
"""
|
|
|
|
|
2012-07-05 14:34:27 +04:00
|
|
|
retVal = [urllib.unquote(match.group(1)) for match in re.finditer(GOOGLE_REGEX, page, re.I | re.S)]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-02-22 14:40:11 +04:00
|
|
|
return retVal
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def getTargetUrls(self):
|
|
|
|
"""
|
|
|
|
This method returns the list of hosts with parameters out of
|
|
|
|
your Google dork search results
|
|
|
|
"""
|
|
|
|
|
2012-02-20 14:02:19 +04:00
|
|
|
for _ in self._matches:
|
|
|
|
_ = urldecode(_)
|
|
|
|
if re.search(r"(.*?)\?(.+)", _):
|
|
|
|
kb.targetUrls.add((_, None, None, None))
|
|
|
|
elif re.search(URI_INJECTABLE_REGEX, _, re.I):
|
2011-11-06 15:18:16 +04:00
|
|
|
if kb.scanOnlyGoogleGETs is None:
|
|
|
|
message = "do you want to scan only results containing GET parameters? [Y/n] "
|
|
|
|
test = readInput(message, default="Y")
|
|
|
|
kb.scanOnlyGoogleGETs = test.lower() != 'n'
|
|
|
|
if not kb.scanOnlyGoogleGETs:
|
2012-02-20 14:02:19 +04:00
|
|
|
kb.targetUrls.add((_, None, None, None))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def getCookie(self):
|
|
|
|
"""
|
|
|
|
This method is the first to be called when initializing a
|
|
|
|
Google dorking object through this library. It is used to
|
|
|
|
retrieve the Google session cookie needed to perform the
|
|
|
|
further search
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
conn = self.opener.open("http://www.google.com/ncr")
|
2009-04-28 03:05:11 +04:00
|
|
|
_ = conn.info()
|
2008-10-15 19:38:22 +04:00
|
|
|
except urllib2.HTTPError, e:
|
2009-04-28 03:05:11 +04:00
|
|
|
_ = e.info()
|
2012-02-22 14:40:11 +04:00
|
|
|
except urllib2.URLError:
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg = "unable to connect to Google"
|
|
|
|
raise sqlmapConnectionException, errMsg
|
|
|
|
|
|
|
|
def search(self, googleDork):
|
|
|
|
"""
|
|
|
|
This method performs the effective search on Google providing
|
|
|
|
the google dork and the Google session cookie
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
gpage = conf.googlePage if conf.googlePage > 1 else 1
|
2010-03-05 17:59:33 +03:00
|
|
|
logger.info("using Google result page #%d" % gpage)
|
2010-09-27 17:41:18 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if not googleDork:
|
|
|
|
return None
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
url = "http://www.google.com/search?"
|
2010-12-22 02:09:41 +03:00
|
|
|
url += "q=%s&" % urlencode(googleDork, convall=True)
|
2011-11-06 15:42:02 +04:00
|
|
|
url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
|
2010-01-02 05:02:12 +03:00
|
|
|
url += "&start=%d" % ((gpage-1) * 100)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
try:
|
|
|
|
conn = self.opener.open(url)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2010-10-31 18:41:28 +03:00
|
|
|
requestMsg = "HTTP request:\nGET %s" % url
|
|
|
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(8, requestMsg)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2010-06-11 14:08:19 +04:00
|
|
|
page = conn.read()
|
|
|
|
code = conn.code
|
|
|
|
status = conn.msg
|
2010-01-02 05:02:12 +03:00
|
|
|
responseHeaders = conn.info()
|
2010-06-11 14:08:19 +04:00
|
|
|
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
|
2010-01-02 05:02:12 +03:00
|
|
|
|
|
|
|
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
|
2010-09-27 17:41:18 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.verbose <= 4:
|
2011-01-30 14:36:03 +03:00
|
|
|
responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
|
2010-01-02 05:02:12 +03:00
|
|
|
elif conf.verbose > 4:
|
|
|
|
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
2010-09-27 17:41:18 +04:00
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(7, responseMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
except urllib2.HTTPError, e:
|
2010-05-16 00:44:08 +04:00
|
|
|
try:
|
|
|
|
page = e.read()
|
|
|
|
except socket.timeout:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "connection timed out while trying "
|
2010-05-16 00:44:08 +04:00
|
|
|
warnMsg += "to get error page information (%d)" % e.code
|
2010-09-27 17:41:18 +04:00
|
|
|
logger.critical(warnMsg)
|
2010-05-16 00:44:08 +04:00
|
|
|
return None
|
2012-02-22 14:40:11 +04:00
|
|
|
except (urllib2.URLError, socket.error, socket.timeout):
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg = "unable to connect to Google"
|
|
|
|
raise sqlmapConnectionException, errMsg
|
|
|
|
|
2012-02-20 14:02:19 +04:00
|
|
|
self._matches = self._parsePage(page)
|
2011-02-08 03:02:54 +03:00
|
|
|
|
2012-02-20 14:02:19 +04:00
|
|
|
if not self._matches and "detected unusual traffic" in page:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "Google has detected 'unusual' traffic from "
|
|
|
|
warnMsg += "this computer disabling further searches"
|
2011-01-03 17:21:41 +03:00
|
|
|
raise sqlmapGenericException, warnMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-02-20 14:02:19 +04:00
|
|
|
return self._matches
|