2013-02-14 15:32:17 +04:00
|
|
|
#!/usr/bin/env python
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
"""
|
2014-01-13 21:24:49 +04:00
|
|
|
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
import cookielib
|
2010-10-31 18:41:28 +03:00
|
|
|
import httplib
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2010-03-26 14:51:23 +03:00
|
|
|
import socket
|
2012-07-05 14:34:27 +04:00
|
|
|
import urllib
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
|
2010-06-02 16:45:40 +04:00
|
|
|
from lib.core.common import getUnicode
|
2013-11-25 23:57:07 +04:00
|
|
|
from lib.core.common import readInput
|
2012-07-31 13:03:44 +04:00
|
|
|
from lib.core.common import urlencode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
2010-01-02 05:02:12 +03:00
|
|
|
from lib.core.data import logger
|
2012-12-07 14:54:34 +04:00
|
|
|
from lib.core.enums import CUSTOM_LOGGING
|
2013-11-25 23:57:07 +04:00
|
|
|
from lib.core.enums import HTTP_HEADER
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapConnectionException
|
|
|
|
from lib.core.exception import SqlmapGenericException
|
2012-02-20 14:02:19 +04:00
|
|
|
from lib.core.settings import GOOGLE_REGEX
|
2013-11-25 23:57:07 +04:00
|
|
|
from lib.core.settings import DUCKDUCKGO_REGEX
|
2013-12-17 12:30:04 +04:00
|
|
|
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
2011-01-30 14:36:03 +03:00
|
|
|
from lib.core.settings import UNICODE_ENCODING
|
2010-01-02 05:02:12 +03:00
|
|
|
from lib.request.basic import decodePage
|
2013-12-17 12:30:04 +04:00
|
|
|
from lib.request.httpshandler import HTTPSHandler
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 13:42:53 +04:00
|
|
|
class Google(object):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This class defines methods used to perform Google dorking (command
|
|
|
|
line option '-g <google dork>'
|
|
|
|
"""
|
|
|
|
|
2010-06-11 14:08:19 +04:00
|
|
|
def __init__(self, handlers):
|
2012-03-07 18:48:45 +04:00
|
|
|
self._cj = cookielib.CookieJar()
|
2010-06-11 14:08:19 +04:00
|
|
|
|
2012-02-20 14:02:19 +04:00
|
|
|
handlers.append(urllib2.HTTPCookieProcessor(self._cj))
|
2013-12-17 12:30:04 +04:00
|
|
|
handlers.append(HTTPSHandler())
|
2010-06-11 14:08:19 +04:00
|
|
|
|
|
|
|
self.opener = urllib2.build_opener(*handlers)
|
2008-10-15 19:38:22 +04:00
|
|
|
self.opener.addheaders = conf.httpHeaders
|
|
|
|
|
|
|
|
try:
|
|
|
|
conn = self.opener.open("http://www.google.com/ncr")
|
2012-12-06 14:15:05 +04:00
|
|
|
conn.info() # retrieve session cookie
|
2014-10-28 17:34:53 +03:00
|
|
|
except Exception, ex:
|
|
|
|
errMsg = "unable to connect to Google ('%s')" % ex
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
def search(self, dork):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This method performs the effective search on Google providing
|
|
|
|
the google dork and the Google session cookie
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
gpage = conf.googlePage if conf.googlePage > 1 else 1
|
2010-03-05 17:59:33 +03:00
|
|
|
logger.info("using Google result page #%d" % gpage)
|
2010-09-27 17:41:18 +04:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
if not dork:
|
2008-10-15 19:38:22 +04:00
|
|
|
return None
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
url = "http://www.google.com/search?"
|
2012-10-30 21:38:10 +04:00
|
|
|
url += "q=%s&" % urlencode(dork, convall=True)
|
2011-11-06 15:42:02 +04:00
|
|
|
url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
|
2013-01-10 16:18:44 +04:00
|
|
|
url += "&start=%d" % ((gpage - 1) * 100)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
try:
|
|
|
|
conn = self.opener.open(url)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2010-10-31 18:41:28 +03:00
|
|
|
requestMsg = "HTTP request:\nGET %s" % url
|
|
|
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
2012-12-07 14:54:34 +04:00
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2010-06-11 14:08:19 +04:00
|
|
|
page = conn.read()
|
|
|
|
code = conn.code
|
|
|
|
status = conn.msg
|
2010-01-02 05:02:12 +03:00
|
|
|
responseHeaders = conn.info()
|
2010-06-11 14:08:19 +04:00
|
|
|
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
|
2010-01-02 05:02:12 +03:00
|
|
|
|
|
|
|
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
|
2010-09-27 17:41:18 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.verbose <= 4:
|
2011-01-30 14:36:03 +03:00
|
|
|
responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
|
2010-01-02 05:02:12 +03:00
|
|
|
elif conf.verbose > 4:
|
|
|
|
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
2010-09-27 17:41:18 +04:00
|
|
|
|
2012-12-07 14:54:34 +04:00
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
except urllib2.HTTPError, e:
|
2010-05-16 00:44:08 +04:00
|
|
|
try:
|
|
|
|
page = e.read()
|
|
|
|
except socket.timeout:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "connection timed out while trying "
|
2010-05-16 00:44:08 +04:00
|
|
|
warnMsg += "to get error page information (%d)" % e.code
|
2010-09-27 17:41:18 +04:00
|
|
|
logger.critical(warnMsg)
|
2010-05-16 00:44:08 +04:00
|
|
|
return None
|
2012-02-22 14:40:11 +04:00
|
|
|
except (urllib2.URLError, socket.error, socket.timeout):
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg = "unable to connect to Google"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
retVal = [urllib.unquote(match.group(1)) for match in re.finditer(GOOGLE_REGEX, page, re.I | re.S)]
|
2011-02-08 03:02:54 +03:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
if not retVal and "detected unusual traffic" in page:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "Google has detected 'unusual' traffic from "
|
2013-03-18 14:41:15 +04:00
|
|
|
warnMsg += "used IP address disabling further searches"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2013-11-25 23:57:07 +04:00
|
|
|
if not retVal:
|
|
|
|
message = "no usable links found. "
|
|
|
|
message += "do you want to (re)try with DuckDuckGo? [Y/n] "
|
|
|
|
output = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if output.strip().lower() != 'n':
|
|
|
|
url = "https://duckduckgo.com/d.js?"
|
|
|
|
url += "q=%s&p=%d&s=100" % (urlencode(dork, convall=True), gpage)
|
|
|
|
|
|
|
|
if not conf.randomAgent:
|
2013-12-17 12:30:04 +04:00
|
|
|
self.opener.addheaders = [_ for _ in self.opener.addheaders if _[0].lower() != HTTP_HEADER.USER_AGENT.lower()]
|
|
|
|
self.opener.addheaders.append((HTTP_HEADER.USER_AGENT, "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0"))
|
|
|
|
|
|
|
|
self.opener.addheaders = [_ for _ in self.opener.addheaders if _[0].lower() != HTTP_HEADER.ACCEPT_ENCODING.lower()]
|
|
|
|
self.opener.addheaders.append((HTTP_HEADER.ACCEPT_ENCODING, HTTP_ACCEPT_ENCODING_HEADER_VALUE))
|
2013-11-25 23:57:07 +04:00
|
|
|
|
|
|
|
try:
|
|
|
|
conn = self.opener.open(url)
|
|
|
|
|
|
|
|
requestMsg = "HTTP request:\nGET %s" % url
|
|
|
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
|
|
|
|
|
|
|
page = conn.read()
|
|
|
|
code = conn.code
|
|
|
|
status = conn.msg
|
|
|
|
responseHeaders = conn.info()
|
|
|
|
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
|
|
|
|
|
|
|
|
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
|
|
|
|
|
|
|
|
if conf.verbose <= 4:
|
|
|
|
responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
|
|
|
|
elif conf.verbose > 4:
|
|
|
|
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
|
|
|
|
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
try:
|
|
|
|
page = e.read()
|
|
|
|
except socket.timeout:
|
|
|
|
warnMsg = "connection timed out while trying "
|
|
|
|
warnMsg += "to get error page information (%d)" % e.code
|
|
|
|
logger.critical(warnMsg)
|
|
|
|
return None
|
2014-11-14 02:21:04 +03:00
|
|
|
except:
|
2013-11-25 23:57:07 +04:00
|
|
|
errMsg = "unable to connect to DuckDuckGo"
|
|
|
|
raise SqlmapConnectionException(errMsg)
|
|
|
|
|
|
|
|
retVal = [urllib.unquote(match.group(1)) for match in re.finditer(DUCKDUCKGO_REGEX, page, re.I | re.S)]
|
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
return retVal
|