2019-05-08 13:47:52 +03:00
|
|
|
#!/usr/bin/env python
|
2015-11-08 18:37:46 +03:00
|
|
|
|
|
|
|
"""
|
2019-01-05 23:38:52 +03:00
|
|
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
2017-10-11 15:50:46 +03:00
|
|
|
See the file 'LICENSE' for copying permission
|
2015-11-08 18:37:46 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
import re
|
|
|
|
import socket
|
|
|
|
|
|
|
|
from lib.core.common import getSafeExString
|
|
|
|
from lib.core.common import popValue
|
|
|
|
from lib.core.common import pushValue
|
|
|
|
from lib.core.common import readInput
|
|
|
|
from lib.core.common import urlencode
|
2019-05-06 01:54:21 +03:00
|
|
|
from lib.core.convert import getUnicode
|
2015-11-08 18:37:46 +03:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2018-04-01 13:45:47 +03:00
|
|
|
from lib.core.decorators import stackedmethod
|
2015-11-08 18:37:46 +03:00
|
|
|
from lib.core.enums import CUSTOM_LOGGING
|
|
|
|
from lib.core.enums import HTTP_HEADER
|
|
|
|
from lib.core.enums import REDIRECTION
|
|
|
|
from lib.core.exception import SqlmapBaseException
|
|
|
|
from lib.core.exception import SqlmapConnectionException
|
|
|
|
from lib.core.exception import SqlmapUserQuitException
|
2017-10-09 15:25:08 +03:00
|
|
|
from lib.core.settings import BING_REGEX
|
2015-11-08 18:37:46 +03:00
|
|
|
from lib.core.settings import DUMMY_SEARCH_USER_AGENT
|
|
|
|
from lib.core.settings import DUCKDUCKGO_REGEX
|
|
|
|
from lib.core.settings import GOOGLE_REGEX
|
|
|
|
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
|
|
|
from lib.core.settings import UNICODE_ENCODING
|
|
|
|
from lib.request.basic import decodePage
|
2019-03-27 15:33:46 +03:00
|
|
|
from thirdparty.six.moves import http_client as _http_client
|
|
|
|
from thirdparty.six.moves import urllib as _urllib
|
2015-11-08 18:37:46 +03:00
|
|
|
from thirdparty.socks import socks
|
|
|
|
|
|
|
|
def _search(dork):
|
|
|
|
"""
|
|
|
|
This method performs the effective search on Google providing
|
|
|
|
the google dork and the Google session cookie
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not dork:
|
|
|
|
return None
|
|
|
|
|
2019-01-17 17:06:00 +03:00
|
|
|
data = None
|
2015-11-08 18:37:46 +03:00
|
|
|
headers = {}
|
|
|
|
|
|
|
|
headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT)
|
|
|
|
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
|
|
|
|
|
|
|
try:
|
2019-03-27 15:33:46 +03:00
|
|
|
req = _urllib.request.Request("https://www.google.com/ncr", headers=headers)
|
|
|
|
conn = _urllib.request.urlopen(req)
|
2019-01-22 02:40:48 +03:00
|
|
|
except Exception as ex:
|
2015-11-08 18:37:46 +03:00
|
|
|
errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex)
|
|
|
|
raise SqlmapConnectionException(errMsg)
|
|
|
|
|
|
|
|
gpage = conf.googlePage if conf.googlePage > 1 else 1
|
|
|
|
logger.info("using search result page #%d" % gpage)
|
|
|
|
|
|
|
|
url = "https://www.google.com/search?"
|
|
|
|
url += "q=%s&" % urlencode(dork, convall=True)
|
|
|
|
url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
|
|
|
|
url += "&start=%d" % ((gpage - 1) * 100)
|
|
|
|
|
|
|
|
try:
|
2019-03-27 15:33:46 +03:00
|
|
|
req = _urllib.request.Request(url, headers=headers)
|
|
|
|
conn = _urllib.request.urlopen(req)
|
2015-11-08 18:37:46 +03:00
|
|
|
|
|
|
|
requestMsg = "HTTP request:\nGET %s" % url
|
2019-03-28 13:22:38 +03:00
|
|
|
requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
|
2015-11-08 18:37:46 +03:00
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
|
|
|
|
|
|
|
page = conn.read()
|
|
|
|
code = conn.code
|
|
|
|
status = conn.msg
|
|
|
|
responseHeaders = conn.info()
|
|
|
|
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
|
|
|
|
|
|
|
|
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
|
|
|
|
|
|
|
|
if conf.verbose <= 4:
|
|
|
|
responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
|
|
|
|
elif conf.verbose > 4:
|
|
|
|
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
|
|
|
|
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
2019-03-27 15:33:46 +03:00
|
|
|
except _urllib.error.HTTPError as ex:
|
2015-11-08 18:37:46 +03:00
|
|
|
try:
|
2019-01-22 04:08:02 +03:00
|
|
|
page = ex.read()
|
|
|
|
except Exception as _:
|
2015-11-08 18:37:46 +03:00
|
|
|
warnMsg = "problem occurred while trying to get "
|
2019-01-22 04:08:02 +03:00
|
|
|
warnMsg += "an error page information (%s)" % getSafeExString(_)
|
2015-11-08 18:37:46 +03:00
|
|
|
logger.critical(warnMsg)
|
|
|
|
return None
|
2019-03-27 15:33:46 +03:00
|
|
|
except (_urllib.error.URLError, _http_client.error, socket.error, socket.timeout, socks.ProxyError):
|
2015-11-08 18:37:46 +03:00
|
|
|
errMsg = "unable to connect to Google"
|
|
|
|
raise SqlmapConnectionException(errMsg)
|
|
|
|
|
2019-03-27 15:33:46 +03:00
|
|
|
retVal = [_urllib.parse.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)]
|
2015-11-08 18:37:46 +03:00
|
|
|
|
|
|
|
if not retVal and "detected unusual traffic" in page:
|
|
|
|
warnMsg = "Google has detected 'unusual' traffic from "
|
|
|
|
warnMsg += "used IP address disabling further searches"
|
2017-11-01 12:35:01 +03:00
|
|
|
|
|
|
|
if conf.proxyList:
|
|
|
|
raise SqlmapBaseException(warnMsg)
|
|
|
|
else:
|
|
|
|
logger.critical(warnMsg)
|
2015-11-08 18:37:46 +03:00
|
|
|
|
|
|
|
if not retVal:
|
|
|
|
message = "no usable links found. What do you want to do?"
|
|
|
|
message += "\n[1] (re)try with DuckDuckGo (default)"
|
2017-10-09 15:25:08 +03:00
|
|
|
message += "\n[2] (re)try with Bing"
|
|
|
|
message += "\n[3] quit"
|
2017-04-19 15:46:27 +03:00
|
|
|
choice = readInput(message, default='1')
|
2015-11-08 18:37:46 +03:00
|
|
|
|
2017-10-09 15:25:08 +03:00
|
|
|
if choice == '3':
|
2015-11-08 18:37:46 +03:00
|
|
|
raise SqlmapUserQuitException
|
2017-10-09 15:25:08 +03:00
|
|
|
elif choice == '2':
|
|
|
|
url = "https://www.bing.com/search?q=%s&first=%d" % (urlencode(dork, convall=True), (gpage - 1) * 10 + 1)
|
|
|
|
regex = BING_REGEX
|
2017-11-06 12:00:29 +03:00
|
|
|
else:
|
2019-01-17 17:06:00 +03:00
|
|
|
url = "https://duckduckgo.com/html/"
|
|
|
|
data = "q=%s&s=%d" % (urlencode(dork, convall=True), (gpage - 1) * 30)
|
2015-11-08 18:37:46 +03:00
|
|
|
regex = DUCKDUCKGO_REGEX
|
|
|
|
|
|
|
|
try:
|
2019-03-27 15:33:46 +03:00
|
|
|
req = _urllib.request.Request(url, data=data, headers=headers)
|
|
|
|
conn = _urllib.request.urlopen(req)
|
2015-11-08 18:37:46 +03:00
|
|
|
|
|
|
|
requestMsg = "HTTP request:\nGET %s" % url
|
2019-03-28 13:22:38 +03:00
|
|
|
requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
|
2015-11-08 18:37:46 +03:00
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
|
|
|
|
|
|
|
page = conn.read()
|
|
|
|
code = conn.code
|
|
|
|
status = conn.msg
|
|
|
|
responseHeaders = conn.info()
|
|
|
|
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
|
|
|
|
|
|
|
|
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
|
|
|
|
|
|
|
|
if conf.verbose <= 4:
|
|
|
|
responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
|
|
|
|
elif conf.verbose > 4:
|
|
|
|
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
|
|
|
|
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
2019-03-27 15:33:46 +03:00
|
|
|
except _urllib.error.HTTPError as ex:
|
2015-11-08 18:37:46 +03:00
|
|
|
try:
|
2019-01-22 04:08:02 +03:00
|
|
|
page = ex.read()
|
|
|
|
page = decodePage(page, ex.headers.get("Content-Encoding"), ex.headers.get("Content-Type"))
|
2015-11-08 18:37:46 +03:00
|
|
|
except socket.timeout:
|
|
|
|
warnMsg = "connection timed out while trying "
|
2019-01-22 04:08:02 +03:00
|
|
|
warnMsg += "to get error page information (%d)" % ex.code
|
2015-11-08 18:37:46 +03:00
|
|
|
logger.critical(warnMsg)
|
|
|
|
return None
|
|
|
|
except:
|
|
|
|
errMsg = "unable to connect"
|
|
|
|
raise SqlmapConnectionException(errMsg)
|
|
|
|
|
2019-03-27 15:33:46 +03:00
|
|
|
retVal = [_urllib.parse.unquote(match.group(1).replace("&", "&")) for match in re.finditer(regex, page, re.I | re.S)]
|
2015-11-08 18:37:46 +03:00
|
|
|
|
2019-01-17 17:06:00 +03:00
|
|
|
if not retVal and "issue with the Tor Exit Node you are currently using" in page:
|
|
|
|
warnMsg = "DuckDuckGo has detected 'unusual' traffic from "
|
|
|
|
warnMsg += "used (Tor) IP address"
|
|
|
|
|
|
|
|
if conf.proxyList:
|
|
|
|
raise SqlmapBaseException(warnMsg)
|
|
|
|
else:
|
|
|
|
logger.critical(warnMsg)
|
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
return retVal
|
|
|
|
|
2018-04-01 13:45:47 +03:00
|
|
|
@stackedmethod
|
2015-11-08 18:37:46 +03:00
|
|
|
def search(dork):
|
|
|
|
pushValue(kb.redirectChoice)
|
|
|
|
kb.redirectChoice = REDIRECTION.YES
|
|
|
|
|
|
|
|
try:
|
|
|
|
return _search(dork)
|
2019-01-22 02:40:48 +03:00
|
|
|
except SqlmapBaseException as ex:
|
2015-11-08 18:37:46 +03:00
|
|
|
if conf.proxyList:
|
|
|
|
logger.critical(getSafeExString(ex))
|
|
|
|
|
|
|
|
warnMsg = "changing proxy"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.proxy = None
|
|
|
|
|
|
|
|
setHTTPHandlers()
|
|
|
|
return search(dork)
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
kb.redirectChoice = popValue()
|
|
|
|
|
2018-03-21 16:29:54 +03:00
|
|
|
def setHTTPHandlers(): # Cross-referenced function
|
2015-11-08 18:37:46 +03:00
|
|
|
raise NotImplementedError
|