sqlmap/lib/request/connect.py

781 lines
34 KiB
Python
Raw Normal View History

2008-10-15 19:38:22 +04:00
#!/usr/bin/env python
"""
2012-07-12 21:38:03 +04:00
Copyright (c) 2006-2012 sqlmap developers (http://sqlmap.org/)
2010-10-15 03:18:29 +04:00
See the file 'doc/COPYING' for copying permission
2008-10-15 19:38:22 +04:00
"""
import httplib
2012-10-04 13:25:44 +04:00
import json
2008-10-15 19:38:22 +04:00
import re
import socket
import string
import time
2008-10-15 19:38:22 +04:00
import urllib2
import urlparse
import traceback
2008-10-15 19:38:22 +04:00
from extra.safe2bin.safe2bin import safecharencode
from lib.core.agent import agent
from lib.core.common import asciifyUrl
from lib.core.common import calculateDeltaSeconds
2010-12-21 04:09:39 +03:00
from lib.core.common import clearConsoleLine
2011-02-22 15:54:22 +03:00
from lib.core.common import cpuThrottle
2012-02-16 18:42:28 +04:00
from lib.core.common import evaluateCode
from lib.core.common import extractRegexResult
2010-12-21 01:45:01 +03:00
from lib.core.common import getCurrentThreadData
2011-11-11 15:28:27 +04:00
from lib.core.common import getHostHeader
from lib.core.common import getRequestHeader
2010-06-10 15:34:17 +04:00
from lib.core.common import getUnicode
2010-11-08 14:22:47 +03:00
from lib.core.common import logHTTPTraffic
from lib.core.common import randomizeParameterValue
from lib.core.common import readInput
from lib.core.common import removeReflectiveValues
2011-06-08 18:35:23 +04:00
from lib.core.common import singleTimeWarnMessage
2010-12-08 14:26:54 +03:00
from lib.core.common import stdev
2010-12-08 17:26:40 +03:00
from lib.core.common import wasLastRequestDelayed
2012-07-31 13:03:44 +04:00
from lib.core.common import unicodeencode
from lib.core.common import urlencode
2008-10-15 19:38:22 +04:00
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
2012-10-04 13:25:44 +04:00
from lib.core.dicts import POST_HINT_CONTENT_TYPES
2011-12-26 16:24:39 +04:00
from lib.core.enums import CUSTOM_LOGGING
2011-03-11 23:16:34 +03:00
from lib.core.enums import HTTPHEADER
2010-11-08 12:44:32 +03:00
from lib.core.enums import HTTPMETHOD
2010-11-08 12:49:57 +03:00
from lib.core.enums import NULLCONNECTION
from lib.core.enums import PAYLOAD
from lib.core.enums import PLACE
2012-10-04 13:25:44 +04:00
from lib.core.enums import POST_HINT
from lib.core.enums import REDIRECTION
2012-09-11 16:58:52 +04:00
from lib.core.exception import sqlmapCompressionException
2008-10-15 19:38:22 +04:00
from lib.core.exception import sqlmapConnectionException
2010-12-12 00:28:11 +03:00
from lib.core.exception import sqlmapSyntaxException
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
2012-10-04 13:25:44 +04:00
from lib.core.settings import DEFAULT_CONTENT_TYPE
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
2012-07-23 16:14:22 +04:00
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
2011-02-22 16:00:58 +03:00
from lib.core.settings import HTTP_SILENT_TIMEOUT
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
2012-10-02 15:36:15 +04:00
from lib.core.settings import MAX_CONNECTIONS_REGEX
2012-08-07 02:50:58 +04:00
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
from lib.core.settings import META_REFRESH_REGEX
2010-12-09 10:49:18 +03:00
from lib.core.settings import MIN_TIME_RESPONSES
from lib.core.settings import IS_WIN
from lib.core.settings import LARGE_CHUNK_TRIM_MARKER
2012-10-02 15:36:15 +04:00
from lib.core.settings import PERMISSION_DENIED_REGEX
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
from lib.core.settings import URI_HTTP_HEADER
from lib.core.settings import WARN_TIME_STDEV
from lib.request.basic import decodePage
2008-10-15 19:38:22 +04:00
from lib.request.basic import forgeHeaders
from lib.request.basic import processResponse
from lib.request.direct import direct
from lib.request.comparison import comparison
from lib.request.methodrequest import MethodRequest
2010-10-25 22:38:54 +04:00
from lib.utils.checkpayload import checkPayload
2012-07-14 19:01:04 +04:00
from thirdparty.socks.socks import ProxyError
from thirdparty.multipart import multipartpost
2008-10-15 19:38:22 +04:00
class Connect:
"""
This class defines methods used to perform HTTP requests
"""
@staticmethod
def __getPageProxy(**kwargs):
return Connect.getPage(**kwargs)
@staticmethod
def __retryProxy(**kwargs):
threadData = getCurrentThreadData()
threadData.retriesCount += 1
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
# timed based payloads can cause web server unresponsiveness
# if the injectable piece of code is some kind of JOIN-like query
warnMsg = "most probably web server instance hasn't recovered yet "
2011-11-10 14:30:53 +04:00
warnMsg += "from previous timed based payload. If the problem "
warnMsg += "persists please wait for few minutes and rerun "
2012-02-01 19:10:06 +04:00
warnMsg += "without flag T in option '--technique' "
warnMsg += "(e.g. --flush-session --technique=BEUS) or try to "
2012-02-01 19:10:06 +04:00
warnMsg += "lower the value of option '--time-sec' (e.g. --time-sec=2)"
singleTimeWarnMessage(warnMsg)
elif kb.originalPage is None:
if conf.tor:
warnMsg = "please make sure that you have "
warnMsg += "Tor installed and running so "
warnMsg += "you could successfully use "
2012-02-01 18:49:42 +04:00
warnMsg += "switch '--tor' "
if IS_WIN:
warnMsg += "(e.g. https://www.torproject.org/download/download.html.en)"
else:
warnMsg += "(e.g. https://help.ubuntu.com/community/Tor)"
else:
warnMsg = "if the problem persists please check that the provided "
2012-09-11 16:58:52 +04:00
warnMsg += "target url is valid. In case that it is, you can try to rerun "
2012-02-01 18:49:42 +04:00
warnMsg += "with the switch '--random-agent' turned on "
warnMsg += "and/or proxy switches (--ignore-proxy, --proxy,...)"
singleTimeWarnMessage(warnMsg)
elif conf.threads > 1:
warnMsg = "if the problem persists please try to lower "
warnMsg += "the number of used threads (--threads)"
singleTimeWarnMessage(warnMsg)
time.sleep(1)
kwargs['retrying'] = True
return Connect.__getPageProxy(**kwargs)
@staticmethod
def __connReadProxy(conn):
retVal = ""
2012-09-03 00:48:41 +04:00
if not kb.dnsMode and conn:
if conn.headers and (conn.headers.getheader(HTTPHEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
or "text" not in conn.headers.getheader(HTTPHEADER.CONTENT_TYPE, "").lower()):
retVal = conn.read()
else:
while True:
_ = conn.read(MAX_CONNECTION_CHUNK_SIZE)
if len(_) == MAX_CONNECTION_CHUNK_SIZE:
warnMsg = "large response detected. This could take a while"
singleTimeWarnMessage(warnMsg)
_ = re.sub(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_CHUNK_TRIM_MARKER, kb.chars.start), _)
retVal += _
else:
retVal += _
break
2012-08-07 02:50:58 +04:00
if len(retVal) > MAX_CONNECTION_TOTAL_SIZE:
warnMsg = "too large response detected. Automatically trimming it"
singleTimeWarnMessage(warnMsg)
break
return retVal
2008-10-15 19:38:22 +04:00
@staticmethod
def getPage(**kwargs):
"""
This method connects to the target url or proxy and returns
the target url page content
"""
if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
time.sleep(conf.delay)
2010-05-21 16:18:43 +04:00
elif conf.cpuThrottle:
2011-02-22 15:54:22 +03:00
cpuThrottle(conf.cpuThrottle)
2010-12-21 01:45:01 +03:00
threadData = getCurrentThreadData()
threadData.lastRequestUID += 1
2010-11-08 14:22:47 +03:00
url = kwargs.get('url', conf.url)
get = kwargs.get('get', None)
post = kwargs.get('post', None)
method = kwargs.get('method', None)
cookie = kwargs.get('cookie', None)
ua = kwargs.get('ua', None)
referer = kwargs.get('referer', None)
host = kwargs.get('host', conf.host)
direct = kwargs.get('direct', False)
multipart = kwargs.get('multipart', False)
silent = kwargs.get('silent', False)
raise404 = kwargs.get('raise404', True)
auxHeaders = kwargs.get('auxHeaders', None)
response = kwargs.get('response', False)
ignoreTimeout = kwargs.get('ignoreTimeout', kb.ignoreTimeout)
refreshing = kwargs.get('refreshing', False)
retrying = kwargs.get('retrying', False)
crawling = kwargs.get('crawling', False)
if not urlparse.urlsplit(url).netloc:
url = urlparse.urljoin(conf.url, url)
# flag to know if we are dealing with the same target host
2011-12-20 13:10:44 +04:00
target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""]))
if not retrying:
# Reset the number of connection retries
threadData.retriesCount = 0
# fix for known issue when urllib2 just skips the other part of provided
# url splitted with space char while urlencoding it in the later phase
url = url.replace(" ", "%20")
2011-04-30 17:20:05 +04:00
2012-03-15 19:58:25 +04:00
code = None
2011-11-11 15:07:49 +04:00
page = None
2012-09-08 19:58:03 +04:00
_ = urlparse.urlsplit(url)
2011-11-14 15:39:18 +04:00
requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post else HTTPMETHOD.GET))
2012-09-08 19:58:03 +04:00
requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url
2011-11-14 15:39:18 +04:00
responseMsg = u"HTTP response "
requestHeaders = u""
2011-01-25 19:05:06 +03:00
responseHeaders = None
2011-11-14 15:39:18 +04:00
logHeaders = u""
2012-03-14 18:31:41 +04:00
skipLogTraffic = False
2008-10-15 19:38:22 +04:00
2011-12-05 13:25:56 +04:00
raise404 = raise404 and not kb.ignoreNotFound
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
# support those by default
url = asciifyUrl(url)
# fix for known issues when using url in unicode format
# (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case)
url = unicodeencode(url)
2008-10-15 19:38:22 +04:00
try:
if silent:
2011-02-22 16:00:58 +03:00
socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT)
else:
socket.setdefaulttimeout(conf.timeout)
if direct:
if "?" in url:
url, params = url.split("?")
params = urlencode(params)
url = "%s?%s" % (url, params)
requestMsg += "?%s" % params
elif multipart:
# Needed in this form because of potential circle dependency
2010-05-29 03:39:52 +04:00
# problem (option -> update -> connect -> option)
2010-02-25 16:45:28 +03:00
from lib.core.option import proxyHandler
2010-02-25 16:45:28 +03:00
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
conn = multipartOpener.open(unicodeencode(url), multipart)
page = Connect.__connReadProxy(conn)
responseHeaders = conn.info()
2011-02-01 01:50:54 +03:00
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
2011-03-18 03:24:02 +03:00
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
return page
2012-08-20 12:41:43 +04:00
elif any((refreshing, crawling)):
2011-05-27 20:26:00 +04:00
pass
2011-05-13 13:56:12 +04:00
elif target:
2012-07-14 13:01:30 +04:00
if PLACE.GET in conf.parameters and not get:
get = conf.parameters[PLACE.GET]
if get:
url = "%s?%s" % (url, get)
requestMsg += "?%s" % get
2011-04-18 01:39:00 +04:00
if conf.method == HTTPMETHOD.POST and not post:
2012-10-04 13:25:44 +04:00
for place in (PLACE.POST,):
2012-07-14 13:01:30 +04:00
if place in conf.parameters:
2011-04-18 01:39:00 +04:00
post = conf.parameters[place]
break
2011-05-13 13:56:12 +04:00
elif get:
url = "%s?%s" % (url, get)
requestMsg += "?%s" % get
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
# Prepare HTTP headers
2012-07-13 14:22:37 +04:00
headers = forgeHeaders({HTTPHEADER.COOKIE: cookie, HTTPHEADER.USER_AGENT: ua, HTTPHEADER.REFERER: referer})
2010-10-18 12:54:08 +04:00
if kb.authHeader:
2011-03-18 03:24:02 +03:00
headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader
2010-10-18 12:54:08 +04:00
if kb.proxyAuthHeader:
2011-03-18 03:24:02 +03:00
headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
headers[HTTPHEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
2012-09-11 16:58:52 +04:00
headers[HTTPHEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if method != HTTPMETHOD.HEAD and kb.pageCompress else "identity"
headers[HTTPHEADER.HOST] = host or getHostHeader(url)
2011-06-22 00:59:34 +04:00
2012-10-04 13:25:44 +04:00
if post:
headers[HTTPHEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
2010-09-16 12:43:10 +04:00
if auxHeaders:
for key, item in auxHeaders.items():
headers[key] = item
for key, item in headers.items():
del headers[key]
2011-04-29 19:22:32 +04:00
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)
2011-04-29 19:22:32 +04:00
post = unicodeencode(post, kb.pageEncoding)
if method:
req = MethodRequest(url, post, headers)
req.set_method(method)
else:
req = urllib2.Request(url, post, headers)
2011-11-21 00:14:47 +04:00
requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items())
2010-10-29 03:22:13 +04:00
if not getRequestHeader(req, HTTPHEADER.COOKIE) and conf.cj:
conf.cj._policy._now = conf.cj._now = int(time.time())
cookies = conf.cj._cookies_for_request(req)
requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
2010-10-29 03:22:13 +04:00
if post:
if not getRequestHeader(req, HTTPHEADER.CONTENT_LENGTH):
requestHeaders += "\n%s: %d" % (string.capwords(HTTPHEADER.CONTENT_LENGTH), len(post))
if not getRequestHeader(req, HTTPHEADER.CONNECTION):
2011-03-18 03:24:02 +03:00
requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION
2010-10-29 03:22:13 +04:00
2008-10-15 19:38:22 +04:00
requestMsg += "\n%s" % requestHeaders
if post:
2012-01-04 03:44:56 +04:00
requestMsg += "\n\n%s" % getUnicode(post)
2008-10-15 19:38:22 +04:00
requestMsg += "\n"
threadData.lastRequestMsg = requestMsg
2011-12-26 16:24:39 +04:00
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
2008-10-15 19:38:22 +04:00
conn = urllib2.urlopen(req)
if not kb.authHeader and getRequestHeader(req, HTTPHEADER.AUTHORIZATION):
kb.authHeader = getRequestHeader(req, HTTPHEADER.AUTHORIZATION)
2010-10-18 12:54:08 +04:00
if not kb.proxyAuthHeader and getRequestHeader(req, HTTPHEADER.PROXY_AUTHORIZATION):
kb.proxyAuthHeader = getRequestHeader(req, HTTPHEADER.PROXY_AUTHORIZATION)
2010-10-10 22:56:43 +04:00
# Return response object
if response:
return conn, None, None
2008-10-15 19:38:22 +04:00
# Get HTTP response
2012-03-15 15:10:58 +04:00
if hasattr(conn, 'redurl'):
page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
else Connect.__connReadProxy(conn)
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
code = conn.redcode
2012-03-15 15:10:58 +04:00
else:
page = Connect.__connReadProxy(conn)
2011-12-22 02:59:23 +04:00
2012-03-15 19:58:25 +04:00
code = code or conn.code
2008-10-15 19:38:22 +04:00
responseHeaders = conn.info()
2011-02-01 01:50:54 +03:00
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
2011-03-18 03:24:02 +03:00
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
status = getUnicode(conn.msg)
if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing:
url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE)
debugMsg = "got HTML meta refresh header"
logger.debug(debugMsg)
if kb.alwaysRefresh is None:
msg = "sqlmap got a refresh request "
msg += "(redirect like response common to login pages). "
2011-06-08 20:08:20 +04:00
msg += "Do you want to apply the refresh "
msg += "from now on (or stay on the original page)? [Y/n]"
choice = readInput(msg, default="Y")
kb.alwaysRefresh = choice not in ("n", "N")
if kb.alwaysRefresh:
if url.lower().startswith('http://'):
kwargs['url'] = url
else:
kwargs['url'] = conf.url[:conf.url.rfind('/')+1] + url
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
kwargs['refreshing'] = True
kwargs['get'] = None
kwargs['post'] = None
try:
return Connect.__getPageProxy(**kwargs)
except sqlmapSyntaxException:
pass
# Explicit closing of connection object
if not conf.keepAlive:
try:
2011-10-21 13:06:00 +04:00
if hasattr(conn.fp, '_sock'):
conn.fp._sock.close()
conn.close()
except Exception, msg:
warnMsg = "problem occured during connection closing ('%s')" % msg
logger.warn(warnMsg)
2008-10-15 19:38:22 +04:00
except urllib2.HTTPError, e:
page = None
2011-01-25 19:05:06 +03:00
responseHeaders = None
2011-02-01 01:51:14 +03:00
try:
page = e.read()
responseHeaders = e.info()
2011-01-31 15:41:39 +03:00
responseHeaders[URI_HTTP_HEADER] = e.geturl()
2011-03-18 03:24:02 +03:00
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
except socket.timeout:
2011-04-30 17:20:05 +04:00
warnMsg = "connection timed out while trying "
warnMsg += "to get error page information (%d)" % e.code
logger.warn(warnMsg)
return None, None, None
2012-01-16 14:04:18 +04:00
except KeyboardInterrupt:
raise
except:
pass
2012-01-16 14:04:18 +04:00
finally:
page = page if isinstance(page, unicode) else getUnicode(page)
code = e.code
threadData.lastHTTPError = (threadData.lastRequestUID, code)
2012-03-15 15:10:58 +04:00
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
status = getUnicode(e.msg)
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
if responseHeaders:
2011-11-21 00:14:47 +04:00
logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items())
2012-07-23 12:16:47 +04:00
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
2010-11-17 15:04:33 +03:00
skipLogTraffic = True
if conf.verbose <= 5:
responseMsg += getUnicode(logHeaders)
elif conf.verbose > 5:
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
2011-12-26 16:24:39 +04:00
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
2011-12-16 03:33:44 +04:00
if e.code == httplib.UNAUTHORIZED:
2011-04-30 17:20:05 +04:00
errMsg = "not authorized, try to provide right HTTP "
2010-11-04 01:07:13 +03:00
errMsg += "authentication type and valid credentials (%d)" % code
raise sqlmapConnectionException, errMsg
2011-12-16 03:33:44 +04:00
elif e.code == httplib.NOT_FOUND:
2011-08-12 18:48:44 +04:00
if raise404:
errMsg = "page not found (%d)" % code
raise sqlmapConnectionException, errMsg
else:
debugMsg = "page not found (%d)" % code
logger.debug(debugMsg)
processResponse(page, responseHeaders)
2011-12-16 03:33:44 +04:00
elif e.code == httplib.GATEWAY_TIMEOUT:
if ignoreTimeout:
return None, None, None
else:
warnMsg = "unable to connect to the target url (%d - %s)" % (e.code, httplib.responses[e.code])
2012-08-20 13:40:49 +04:00
if threadData.retriesCount < conf.retries and not kb.threadException:
2012-10-04 20:28:36 +04:00
warnMsg += ". sqlmap is going to retry the request"
logger.critical(warnMsg)
return Connect.__retryProxy(**kwargs)
2011-06-19 14:11:27 +04:00
elif kb.testMode:
logger.critical(warnMsg)
return None, None, None
else:
raise sqlmapConnectionException, warnMsg
2008-10-15 19:38:22 +04:00
else:
2010-06-10 18:15:32 +04:00
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
logger.debug(debugMsg)
2012-09-11 16:58:52 +04:00
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead, ProxyError, sqlmapCompressionException), e:
tbMsg = traceback.format_exc()
2010-12-12 00:28:11 +03:00
if "no host given" in tbMsg:
warnMsg = "invalid url address used (%s)" % repr(url)
raise sqlmapSyntaxException, warnMsg
elif "forcibly closed" in tbMsg:
warnMsg = "connection was forcibly closed by the target url"
elif "timed out" in tbMsg:
warnMsg = "connection timed out to the target url"
2010-12-12 00:28:11 +03:00
elif "URLError" in tbMsg or "error" in tbMsg:
warnMsg = "unable to connect to the target url"
elif "BadStatusLine" in tbMsg:
2012-04-21 00:33:15 +04:00
warnMsg = "connection dropped or unknown HTTP "
warnMsg += "status code received. Try to force the HTTP User-Agent "
2012-02-01 19:10:06 +04:00
warnMsg += "header with option '--user-agent' or switch '--random-agent'"
elif "IncompleteRead" in tbMsg:
warnMsg = "there was an incomplete read error while retrieving data "
warnMsg += "from the target url"
2009-12-31 15:34:18 +03:00
else:
warnMsg = "unable to connect to the target url"
if "BadStatusLine" not in tbMsg:
warnMsg += " or proxy"
if "forcibly closed" in tbMsg:
logger.critical(warnMsg)
return None, None, None
2011-12-22 02:59:23 +04:00
elif silent or (ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead"))):
return None, None, None
2012-08-20 13:40:49 +04:00
elif threadData.retriesCount < conf.retries and not kb.threadException:
2012-10-04 20:28:36 +04:00
warnMsg += ". sqlmap is going to retry the request"
logger.critical(warnMsg)
return Connect.__retryProxy(**kwargs)
2011-05-22 14:29:25 +04:00
elif kb.testMode:
logger.critical(warnMsg)
return None, None, None
else:
raise sqlmapConnectionException, warnMsg
finally:
2012-01-16 14:04:18 +04:00
page = page if isinstance(page, unicode) else getUnicode(page)
socket.setdefaulttimeout(conf.timeout)
processResponse(page, responseHeaders)
2010-05-04 12:43:14 +04:00
2010-12-21 01:45:01 +03:00
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
if responseHeaders:
2011-11-21 00:14:47 +04:00
logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items())
2011-03-17 15:35:40 +03:00
2012-03-14 18:31:41 +04:00
if not skipLogTraffic:
2012-07-23 12:16:47 +04:00
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
2010-05-04 12:43:14 +04:00
if conf.verbose <= 5:
2010-11-08 14:55:56 +03:00
responseMsg += getUnicode(logHeaders)
elif conf.verbose > 5:
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
2010-05-04 12:43:14 +04:00
2011-12-26 16:24:39 +04:00
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
2008-10-15 19:38:22 +04:00
return page, responseHeaders, code
2008-10-15 19:38:22 +04:00
@staticmethod
2011-10-24 04:46:54 +04:00
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True):
2008-10-15 19:38:22 +04:00
"""
This method calls a function to get the target url page content
and returns its page MD5 hash or a boolean value in case of
string match check ('--string' command line parameter)
"""
if conf.direct:
return direct(value, content)
2011-04-30 17:20:05 +04:00
get = None
post = None
cookie = None
ua = None
referer = None
host = None
2011-04-30 17:20:05 +04:00
page = None
pageLength = None
uri = None
code = None
2012-08-31 14:38:02 +04:00
skipUrlEncode = conf.skipUrlEncode
2008-10-15 19:38:22 +04:00
if not place:
place = kb.injection.place or PLACE.GET
raise404 = place != PLACE.URI if raise404 is None else raise404
value = agent.adjustLateValues(value)
2010-11-08 00:55:24 +03:00
payload = agent.extractPayload(value)
2010-12-21 01:45:01 +03:00
threadData = getCurrentThreadData()
2010-10-30 03:00:48 +04:00
2012-08-31 14:38:02 +04:00
if skipUrlEncode is None and conf.httpHeaders:
headers = dict(conf.httpHeaders)
_ = max(headers[_] if _.upper() == HTTPHEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
if _ and "urlencoded" not in _:
skipUrlEncode = True
2010-11-08 00:55:24 +03:00
if payload:
if kb.tamperFunctions:
for function in kb.tamperFunctions:
2012-07-27 02:11:07 +04:00
payload, auxHeaders = function(payload=payload, headers=auxHeaders)
2010-10-30 03:00:48 +04:00
value = agent.replacePayload(value, payload)
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload))
2012-10-04 13:25:44 +04:00
if place == PLACE.CUSTOM_POST:
if kb.postHint == POST_HINT.SOAP:
# payloads in SOAP should have chars > and < replaced
# with their HTML encoded counterparts
payload = payload.replace('>', "&gt;").replace('<', "&lt;")
elif kb.postHint == POST_HINT.JSON:
if payload.startswith('"') and payload.endswith('"'):
payload = json.dumps(payload[1:-1])
else:
payload = json.dumps(payload)[1:-1]
2012-09-22 22:59:40 +04:00
value = agent.replacePayload(value, payload)
else:
# payloads in GET and/or POST need to be urlencoded
# throughly without safe chars (especially & and =)
# addendum: as we support url encoding in tampering
# functions therefore we need to use % as a safe char
2011-12-30 18:11:02 +04:00
if place != PLACE.URI or (value and payload and '?' in value and value.find('?') < value.find(payload)):
2012-08-31 14:38:02 +04:00
payload = urlencode(payload, '%', False, True) if place not in (PLACE.POST, PLACE.CUSTOM_POST) and not skipUrlEncode else payload
2011-11-10 14:22:35 +04:00
value = agent.replacePayload(value, payload)
2012-05-10 17:39:54 +04:00
if place:
value = agent.removePayloadDelimiters(value)
2008-10-15 19:38:22 +04:00
2010-10-25 19:37:43 +04:00
if conf.checkPayload:
checkPayload(value)
if PLACE.GET in conf.parameters:
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
2008-10-15 19:38:22 +04:00
if PLACE.POST in conf.parameters:
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
2008-10-15 19:38:22 +04:00
if PLACE.CUSTOM_POST in conf.parameters:
post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value
if PLACE.COOKIE in conf.parameters:
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
2012-07-26 14:26:57 +04:00
if PLACE.USER_AGENT in conf.parameters:
ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value
2008-10-15 19:38:22 +04:00
2011-02-12 02:07:03 +03:00
if PLACE.REFERER in conf.parameters:
referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value
if PLACE.HOST in conf.parameters:
host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value
if PLACE.URI in conf.parameters:
uri = conf.url if place != PLACE.URI or not value else value
2010-09-23 18:07:23 +04:00
else:
uri = conf.url
2010-09-22 15:56:35 +04:00
if conf.rParam:
def _randomizeParameter(paramString, randomParameter):
retVal = paramString
match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString)
if match:
origValue = match.group("value")
retVal = re.sub("%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString)
return retVal
2011-08-29 17:08:25 +04:00
for randomParameter in conf.rParam:
2012-07-06 19:18:22 +04:00
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
2011-08-29 17:08:25 +04:00
if item in conf.parameters:
if item == PLACE.GET and get:
get = _randomizeParameter(get, randomParameter)
elif item == PLACE.POST and post:
post = _randomizeParameter(post, randomParameter)
elif item == PLACE.COOKIE and cookie:
cookie = _randomizeParameter(cookie, randomParameter)
2011-11-21 20:41:02 +04:00
if conf.evalCode:
2011-11-21 21:39:18 +04:00
delimiter = conf.pDel or "&"
2011-11-21 20:41:02 +04:00
variables = {}
originals = {}
2011-11-21 21:39:18 +04:00
for item in filter(None, (get, post)):
for part in item.split(delimiter):
if '=' in part:
name, value = part.split('=', 1)
2012-08-21 16:34:19 +04:00
evaluateCode("%s=%s" % (name, repr(value)), variables)
2011-11-21 20:41:02 +04:00
originals.update(variables)
2012-02-16 18:42:28 +04:00
evaluateCode(conf.evalCode, variables)
2011-11-21 20:41:02 +04:00
for name, value in variables.items():
if name != "__builtins__" and originals.get(name, "") != value:
if isinstance(value, (basestring, int)):
value = unicode(value)
if '%s=' % name in (get or ""):
2011-11-21 21:39:18 +04:00
get = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, get)
2011-11-21 20:41:02 +04:00
elif '%s=' % name in (post or ""):
2011-11-21 21:39:18 +04:00
post = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, post)
2011-11-21 20:41:02 +04:00
elif post:
2011-11-21 21:39:18 +04:00
post += "%s%s=%s" % (delimiter, name, value)
2011-11-21 20:41:02 +04:00
else:
2011-11-21 21:39:18 +04:00
get += "%s%s=%s" % (delimiter, name, value)
2011-11-28 15:21:39 +04:00
get = urlencode(get, limit=True)
2012-07-20 11:48:09 +04:00
if post:
2012-10-04 13:25:44 +04:00
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
2012-07-20 11:48:09 +04:00
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
2012-10-04 20:01:42 +04:00
elif not skipUrlEncode and kb.postHint not in POST_HINT_CONTENT_TYPES.keys():
2012-07-20 11:48:09 +04:00
post = urlencode(post)
2010-12-09 10:49:18 +03:00
if timeBasedCompare:
if len(kb.responseTimes) < MIN_TIME_RESPONSES:
2010-12-21 04:09:39 +03:00
clearConsoleLine()
2011-11-21 03:17:57 +04:00
if conf.tor:
2012-02-01 18:49:42 +04:00
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
2012-03-14 02:03:23 +04:00
warnMsg += "time-based injections because of its high latency time"
2011-11-21 03:17:57 +04:00
singleTimeWarnMessage(warnMsg)
2010-12-09 16:57:38 +03:00
warnMsg = "time-based comparison needs larger statistical "
2010-12-10 18:28:56 +03:00
warnMsg += "model. Making a few dummy requests, please wait.."
2011-06-28 01:38:12 +04:00
singleTimeWarnMessage(warnMsg)
2010-12-09 10:49:18 +03:00
while len(kb.responseTimes) < MIN_TIME_RESPONSES:
2011-01-07 18:41:09 +03:00
Connect.queryPage(content=True)
2010-12-09 10:49:18 +03:00
2011-08-12 17:47:38 +04:00
deviation = stdev(kb.responseTimes)
if deviation > WARN_TIME_STDEV:
kb.adjustTimeDelay = False
warnMsg = "there is considerable lagging "
2011-08-12 17:47:38 +04:00
warnMsg += "in connection response(s). Please use as high "
2012-02-01 19:10:06 +04:00
warnMsg += "value for option '--time-sec' as possible (e.g. "
2011-08-12 17:47:38 +04:00
warnMsg += "%d or more)" % (conf.timeSec * 2)
logger.critical(warnMsg)
2011-06-28 01:59:31 +04:00
elif not kb.testMode:
warnMsg = "it is very important not to stress the network adapter's "
warnMsg += "bandwidth during usage of time-based queries"
singleTimeWarnMessage(warnMsg)
2011-04-19 14:37:20 +04:00
if conf.safUrl and conf.saFreq > 0:
kb.queryCounter += 1
if kb.queryCounter % conf.saFreq == 0:
Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
2010-09-16 13:32:09 +04:00
start = time.time()
2010-12-08 02:49:00 +03:00
if kb.nullConnection and not content and not response and not timeBasedCompare:
2012-06-12 18:22:14 +04:00
noteResponseTime = False
2010-11-08 12:49:57 +03:00
if kb.nullConnection == NULLCONNECTION.HEAD:
2010-11-08 12:44:32 +03:00
method = HTTPMETHOD.HEAD
2010-11-08 12:49:57 +03:00
elif kb.nullConnection == NULLCONNECTION.RANGE:
2010-09-16 13:32:09 +04:00
if not auxHeaders:
auxHeaders = {}
2010-10-15 15:17:17 +04:00
2011-03-11 23:16:34 +03:00
auxHeaders[HTTPHEADER.RANGE] = "bytes=-1"
2010-10-10 22:56:43 +04:00
_, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404)
2010-10-10 22:56:43 +04:00
if headers:
if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers:
pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH])
elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers:
pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:])
2010-09-16 13:47:33 +04:00
if not pageLength:
page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
2010-12-08 02:49:00 +03:00
2012-07-26 16:07:05 +04:00
if conf.secondOrder:
page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
2010-12-21 01:45:01 +03:00
threadData.lastQueryDuration = calculateDeltaSeconds(start)
2010-11-08 12:44:32 +03:00
kb.originalCode = kb.originalCode or code
2010-12-07 16:34:06 +03:00
if kb.testMode:
kb.testQueryCount += 1
2011-01-12 00:46:21 +03:00
2010-12-08 14:26:54 +03:00
if timeBasedCompare:
2010-12-08 17:26:40 +03:00
return wasLastRequestDelayed()
2010-12-08 17:33:10 +03:00
elif noteResponseTime:
2010-12-21 01:45:01 +03:00
kb.responseTimes.append(threadData.lastQueryDuration)
2010-12-07 19:04:53 +03:00
2011-10-24 04:46:54 +04:00
if not response and removeReflection:
page = removeReflectiveValues(page, payload)
2012-10-02 15:36:15 +04:00
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None
2012-02-08 16:00:03 +04:00
2010-10-10 22:56:43 +04:00
if content or response:
return page, headers
if getRatioValue:
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
2010-09-16 13:32:09 +04:00
elif pageLength or page:
return comparison(page, headers, code, getRatioValue, pageLength)
else:
return False