2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-07-08 00:10:03 +04:00
|
|
|
Copyright (c) 2006-2011 sqlmap developers (http://www.sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2008-11-15 15:25:19 +03:00
|
|
|
import httplib
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2008-11-09 19:57:47 +03:00
|
|
|
import socket
|
|
|
|
import time
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
2008-12-04 20:40:03 +03:00
|
|
|
import traceback
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-08 03:15:02 +03:00
|
|
|
from extra.multipart import multipartpost
|
2010-10-29 20:11:50 +04:00
|
|
|
from lib.core.agent import agent
|
2011-10-23 21:02:48 +04:00
|
|
|
from lib.core.common import asciifyUrl
|
2010-12-08 14:26:54 +03:00
|
|
|
from lib.core.common import average
|
2010-12-07 22:19:12 +03:00
|
|
|
from lib.core.common import calculateDeltaSeconds
|
2010-12-21 04:09:39 +03:00
|
|
|
from lib.core.common import clearConsoleLine
|
2011-02-22 15:54:22 +03:00
|
|
|
from lib.core.common import cpuThrottle
|
2011-11-21 20:41:02 +04:00
|
|
|
from lib.core.common import executeCode
|
2011-03-29 18:16:28 +04:00
|
|
|
from lib.core.common import extractRegexResult
|
2010-12-21 01:45:01 +03:00
|
|
|
from lib.core.common import getCurrentThreadData
|
2010-11-07 03:12:00 +03:00
|
|
|
from lib.core.common import getFilteredPageContent
|
2011-11-11 15:28:27 +04:00
|
|
|
from lib.core.common import getHostHeader
|
2010-06-10 15:34:17 +04:00
|
|
|
from lib.core.common import getUnicode
|
2010-11-08 14:22:47 +03:00
|
|
|
from lib.core.common import logHTTPTraffic
|
2011-01-24 17:45:35 +03:00
|
|
|
from lib.core.common import parseTargetUrl
|
2011-08-29 16:50:52 +04:00
|
|
|
from lib.core.common import randomizeParameterValue
|
2010-11-16 13:42:42 +03:00
|
|
|
from lib.core.common import readInput
|
2011-02-25 12:22:44 +03:00
|
|
|
from lib.core.common import removeReflectiveValues
|
2011-06-08 18:35:23 +04:00
|
|
|
from lib.core.common import singleTimeWarnMessage
|
2010-12-08 14:26:54 +03:00
|
|
|
from lib.core.common import stdev
|
2011-04-29 19:22:32 +04:00
|
|
|
from lib.core.common import urlEncodeCookieValues
|
2010-12-08 17:26:40 +03:00
|
|
|
from lib.core.common import wasLastRequestDelayed
|
2011-04-29 19:22:32 +04:00
|
|
|
from lib.core.convert import unicodeencode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.convert import urlencode
|
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2011-03-11 23:16:34 +03:00
|
|
|
from lib.core.enums import HTTPHEADER
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2010-11-08 12:49:57 +03:00
|
|
|
from lib.core.enums import NULLCONNECTION
|
2011-05-27 01:54:19 +04:00
|
|
|
from lib.core.enums import PAYLOAD
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PLACE
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapConnectionException
|
2010-12-12 00:28:11 +03:00
|
|
|
from lib.core.exception import sqlmapSyntaxException
|
2011-07-06 09:44:47 +04:00
|
|
|
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
2011-02-22 16:00:58 +03:00
|
|
|
from lib.core.settings import HTTP_SILENT_TIMEOUT
|
2011-03-29 18:16:28 +04:00
|
|
|
from lib.core.settings import META_REFRESH_REGEX
|
2011-05-12 09:46:17 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2010-12-09 10:49:18 +03:00
|
|
|
from lib.core.settings import MIN_TIME_RESPONSES
|
2011-04-19 18:28:51 +04:00
|
|
|
from lib.core.settings import WARN_TIME_STDEV
|
2011-10-25 13:53:44 +04:00
|
|
|
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
2011-04-19 18:50:09 +04:00
|
|
|
from lib.core.settings import URI_HTTP_HEADER
|
2010-12-24 15:13:48 +03:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2010-01-02 05:02:12 +03:00
|
|
|
from lib.request.basic import decodePage
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request.basic import forgeHeaders
|
2010-12-25 13:16:20 +03:00
|
|
|
from lib.request.basic import processResponse
|
2010-03-31 14:50:47 +04:00
|
|
|
from lib.request.direct import direct
|
2008-12-05 18:34:13 +03:00
|
|
|
from lib.request.comparison import comparison
|
2010-09-15 16:45:41 +04:00
|
|
|
from lib.request.methodrequest import MethodRequest
|
2010-10-25 22:38:54 +04:00
|
|
|
from lib.utils.checkpayload import checkPayload
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
|
|
|
|
class Connect:
|
|
|
|
"""
|
|
|
|
This class defines methods used to perform HTTP requests
|
|
|
|
"""
|
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
@staticmethod
|
|
|
|
def __getPageProxy(**kwargs):
|
|
|
|
return Connect.getPage(**kwargs)
|
|
|
|
|
2011-06-19 13:57:41 +04:00
|
|
|
@staticmethod
|
|
|
|
def __retryProxy(**kwargs):
|
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
threadData.retriesCount += 1
|
|
|
|
|
|
|
|
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
|
|
|
|
# timed based payloads can cause web server unresponsiveness
|
|
|
|
# if the injectable piece of code is some kind of JOIN-like query
|
|
|
|
warnMsg = "most probably web server instance hasn't recovered yet "
|
2011-11-10 14:30:53 +04:00
|
|
|
warnMsg += "from previous timed based payload. If the problem "
|
2011-06-19 13:57:41 +04:00
|
|
|
warnMsg += "persists please wait for few minutes and rerun "
|
|
|
|
warnMsg += "without flag T in --technique option "
|
|
|
|
warnMsg += "(e.g. --flush-session --technique=BEUS) or try to "
|
|
|
|
warnMsg += "lower the --time-sec value (e.g. --time-sec=2)"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
elif kb.originalPage is None:
|
|
|
|
warnMsg = "if the problem persists please check that the provided "
|
|
|
|
warnMsg += "target url is valid. If it is, you can try to rerun "
|
|
|
|
warnMsg += "with the --random-agent switch turned on "
|
|
|
|
warnMsg += "and/or proxy switches (--ignore-proxy, --proxy,...)"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
elif conf.threads > 1:
|
|
|
|
warnMsg = "if the problem persists please try to lower "
|
|
|
|
warnMsg += "the number of used threads (--threads)"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
kwargs['retrying'] = True
|
|
|
|
return Connect.__getPageProxy(**kwargs)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
@staticmethod
|
|
|
|
def getPage(**kwargs):
|
|
|
|
"""
|
|
|
|
This method connects to the target url or proxy and returns
|
|
|
|
the target url page content
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
2008-12-04 20:40:03 +03:00
|
|
|
time.sleep(conf.delay)
|
2010-05-21 16:18:43 +04:00
|
|
|
elif conf.cpuThrottle:
|
2011-02-22 15:54:22 +03:00
|
|
|
cpuThrottle(conf.cpuThrottle)
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
threadData.lastRequestUID += 1
|
2010-11-08 14:22:47 +03:00
|
|
|
|
2011-05-22 11:46:09 +04:00
|
|
|
url = kwargs.get('url', conf.url)
|
2011-05-10 16:51:37 +04:00
|
|
|
get = kwargs.get('get', None)
|
|
|
|
post = kwargs.get('post', None)
|
|
|
|
method = kwargs.get('method', None)
|
|
|
|
cookie = kwargs.get('cookie', None)
|
|
|
|
ua = kwargs.get('ua', None)
|
|
|
|
referer = kwargs.get('referer', None)
|
|
|
|
direct = kwargs.get('direct', False)
|
|
|
|
multipart = kwargs.get('multipart', False)
|
|
|
|
silent = kwargs.get('silent', False)
|
|
|
|
raise404 = kwargs.get('raise404', True)
|
|
|
|
auxHeaders = kwargs.get('auxHeaders', None)
|
|
|
|
response = kwargs.get('response', False)
|
2011-05-22 12:24:13 +04:00
|
|
|
ignoreTimeout = kwargs.get('ignoreTimeout', kb.ignoreTimeout)
|
2011-05-10 16:51:37 +04:00
|
|
|
refreshing = kwargs.get('refreshing', False)
|
2011-05-22 14:59:56 +04:00
|
|
|
retrying = kwargs.get('retrying', False)
|
2011-10-24 00:48:37 +04:00
|
|
|
redirecting = kwargs.get('redirecting', None)
|
2011-06-24 23:24:11 +04:00
|
|
|
crawling = kwargs.get('crawling', False)
|
2011-05-22 11:46:09 +04:00
|
|
|
|
2011-05-24 09:26:51 +04:00
|
|
|
if not urlparse.urlsplit(url).netloc:
|
|
|
|
url = urlparse.urljoin(conf.url, url)
|
|
|
|
|
2011-05-22 11:46:09 +04:00
|
|
|
# flag to know if we are dealing with the same target host
|
|
|
|
target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url]))
|
|
|
|
|
2011-05-22 14:59:56 +04:00
|
|
|
if not retrying:
|
|
|
|
# Reset the number of connection retries
|
|
|
|
threadData.retriesCount = 0
|
|
|
|
|
2011-05-22 11:46:09 +04:00
|
|
|
# fix for known issue when urllib2 just skips the other part of provided
|
|
|
|
# url splitted with space char while urlencoding it in the later phase
|
|
|
|
url = url.replace(" ", "%20")
|
2011-04-30 17:20:05 +04:00
|
|
|
|
2011-11-11 15:07:49 +04:00
|
|
|
page = None
|
2011-11-14 15:39:18 +04:00
|
|
|
cookieStr = u""
|
|
|
|
requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post else HTTPMETHOD.GET))
|
2011-04-30 17:20:05 +04:00
|
|
|
requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
|
2011-11-14 15:39:18 +04:00
|
|
|
responseMsg = u"HTTP response "
|
|
|
|
requestHeaders = u""
|
2011-01-25 19:05:06 +03:00
|
|
|
responseHeaders = None
|
2011-11-14 15:39:18 +04:00
|
|
|
logHeaders = u""
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-10-24 00:19:42 +04:00
|
|
|
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
|
2011-10-23 21:02:48 +04:00
|
|
|
# support those by default
|
|
|
|
url = asciifyUrl(url)
|
|
|
|
|
2011-05-22 11:46:09 +04:00
|
|
|
# fix for known issues when using url in unicode format
|
2011-05-10 16:51:37 +04:00
|
|
|
# (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case)
|
|
|
|
url = unicodeencode(url)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
try:
|
2010-02-09 17:02:47 +03:00
|
|
|
if silent:
|
2011-02-22 16:00:58 +03:00
|
|
|
socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT)
|
2011-06-19 13:57:41 +04:00
|
|
|
else:
|
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
if direct:
|
|
|
|
if "?" in url:
|
|
|
|
url, params = url.split("?")
|
|
|
|
params = urlencode(params)
|
|
|
|
url = "%s?%s" % (url, params)
|
|
|
|
requestMsg += "?%s" % params
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
elif multipart:
|
2010-05-29 03:39:52 +04:00
|
|
|
# Needed in this form because of potential circle dependency
|
|
|
|
# problem (option -> update -> connect -> option)
|
2010-02-25 16:45:28 +03:00
|
|
|
from lib.core.option import proxyHandler
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-25 16:45:28 +03:00
|
|
|
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
|
2011-04-29 20:56:02 +04:00
|
|
|
conn = multipartOpener.open(unicodeencode(url), multipart)
|
2010-06-09 18:40:36 +04:00
|
|
|
page = conn.read()
|
2010-02-09 17:02:47 +03:00
|
|
|
responseHeaders = conn.info()
|
2011-02-01 01:50:54 +03:00
|
|
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
2011-03-18 03:24:02 +03:00
|
|
|
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
return page
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2011-06-24 23:24:11 +04:00
|
|
|
elif any ([refreshing, crawling]):
|
2011-05-27 20:26:00 +04:00
|
|
|
pass
|
2011-03-29 18:16:28 +04:00
|
|
|
|
2011-05-13 13:56:12 +04:00
|
|
|
elif target:
|
2010-11-08 11:02:36 +03:00
|
|
|
if conf.parameters.has_key(PLACE.GET) and not get:
|
|
|
|
get = conf.parameters[PLACE.GET]
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
if get:
|
|
|
|
url = "%s?%s" % (url, get)
|
|
|
|
requestMsg += "?%s" % get
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2011-04-18 01:39:00 +04:00
|
|
|
if conf.method == HTTPMETHOD.POST and not post:
|
|
|
|
for place in (PLACE.POST, PLACE.SOAP):
|
|
|
|
if conf.parameters.has_key(place):
|
|
|
|
post = conf.parameters[place]
|
|
|
|
break
|
2010-03-23 13:27:39 +03:00
|
|
|
|
2011-05-13 13:56:12 +04:00
|
|
|
elif get:
|
|
|
|
url = "%s?%s" % (url, get)
|
|
|
|
requestMsg += "?%s" % get
|
|
|
|
|
2010-10-21 13:10:07 +04:00
|
|
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
2010-02-09 17:02:47 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Perform HTTP request
|
2011-02-12 02:07:03 +03:00
|
|
|
headers = forgeHeaders(cookie, ua, referer)
|
2010-09-15 16:45:41 +04:00
|
|
|
|
2010-12-24 13:07:56 +03:00
|
|
|
if conf.realTest:
|
2011-03-18 03:24:02 +03:00
|
|
|
headers[HTTPHEADER.REFERER] = "%s://%s" % (conf.scheme, conf.hostname)
|
2010-12-24 13:07:56 +03:00
|
|
|
|
2010-10-18 12:54:08 +04:00
|
|
|
if kb.authHeader:
|
2011-03-18 03:24:02 +03:00
|
|
|
headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-10-18 13:02:56 +04:00
|
|
|
if kb.proxyAuthHeader:
|
2011-03-18 03:24:02 +03:00
|
|
|
headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
2010-10-18 13:02:56 +04:00
|
|
|
|
2011-07-06 09:44:47 +04:00
|
|
|
headers[HTTPHEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
|
|
|
|
2011-11-11 15:28:27 +04:00
|
|
|
headers[HTTPHEADER.HOST] = getHostHeader(url)
|
2011-06-22 00:59:34 +04:00
|
|
|
|
2010-09-16 12:43:10 +04:00
|
|
|
if auxHeaders:
|
|
|
|
for key, item in auxHeaders.items():
|
|
|
|
headers[key] = item
|
|
|
|
|
2010-12-28 17:40:34 +03:00
|
|
|
for key, item in headers.items():
|
2011-02-25 12:43:04 +03:00
|
|
|
del headers[key]
|
2011-04-29 19:22:32 +04:00
|
|
|
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)
|
2010-12-28 17:40:34 +03:00
|
|
|
|
2011-04-29 19:22:32 +04:00
|
|
|
post = unicodeencode(post, kb.pageEncoding)
|
2010-12-28 17:40:34 +03:00
|
|
|
|
2010-09-15 16:45:41 +04:00
|
|
|
if method:
|
|
|
|
req = MethodRequest(url, post, headers)
|
|
|
|
req.set_method(method)
|
|
|
|
else:
|
|
|
|
req = urllib2.Request(url, post, headers)
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2010-04-09 14:16:15 +04:00
|
|
|
if not conf.dropSetCookie and conf.cj:
|
2010-01-02 05:02:12 +03:00
|
|
|
for _, cookie in enumerate(conf.cj):
|
|
|
|
if not cookieStr:
|
|
|
|
cookieStr = "Cookie: "
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-06-10 15:34:17 +04:00
|
|
|
cookie = getUnicode(cookie)
|
2011-04-30 17:20:05 +04:00
|
|
|
index = cookie.index(" for ")
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
cookieStr += "%s; " % cookie[8:index]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-03-18 03:24:02 +03:00
|
|
|
if not req.has_header(HTTPHEADER.ACCEPT_ENCODING):
|
|
|
|
requestHeaders += "%s: identity\n" % HTTPHEADER.ACCEPT_ENCODING
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2011-11-21 00:14:47 +04:00
|
|
|
requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items())
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2011-03-18 03:24:02 +03:00
|
|
|
if not req.has_header(HTTPHEADER.COOKIE) and cookieStr:
|
2010-10-29 03:22:13 +04:00
|
|
|
requestHeaders += "\n%s" % cookieStr[:-2]
|
|
|
|
|
2011-03-18 03:24:02 +03:00
|
|
|
if not req.has_header(HTTPHEADER.CONNECTION):
|
|
|
|
requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
requestMsg += "\n%s" % requestHeaders
|
|
|
|
|
|
|
|
if post:
|
2010-12-22 16:41:36 +03:00
|
|
|
requestMsg += "\n\n%s" % post
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
requestMsg += "\n"
|
|
|
|
|
2011-09-28 12:13:46 +04:00
|
|
|
threadData.lastRequestMsg = requestMsg
|
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(8, requestMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-22 16:41:36 +03:00
|
|
|
conn = urllib2.urlopen(req)
|
|
|
|
|
2011-03-18 03:24:02 +03:00
|
|
|
if not kb.authHeader and req.has_header(HTTPHEADER.AUTHORIZATION):
|
|
|
|
kb.authHeader = req.get_header(HTTPHEADER.AUTHORIZATION)
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2011-03-18 03:24:02 +03:00
|
|
|
if not kb.proxyAuthHeader and req.has_header(HTTPHEADER.PROXY_AUTHORIZATION):
|
|
|
|
kb.proxyAuthHeader = req.get_header(HTTPHEADER.PROXY_AUTHORIZATION)
|
2010-10-18 13:02:56 +04:00
|
|
|
|
2010-12-03 20:41:10 +03:00
|
|
|
if hasattr(conn, "setcookie"):
|
|
|
|
kb.redirectSetCookie = conn.setcookie
|
|
|
|
|
2011-05-24 03:20:03 +04:00
|
|
|
if hasattr(conn, "redurl") and hasattr(conn, "redcode") and target\
|
|
|
|
and not redirecting and not conf.realTest:
|
|
|
|
|
|
|
|
if kb.alwaysRedirect is None:
|
|
|
|
msg = "sqlmap got a %d redirect to " % conn.redcode
|
2011-06-08 20:08:20 +04:00
|
|
|
msg += "'%s'. Do you want to follow redirects " % conn.redurl
|
2011-11-11 15:07:49 +04:00
|
|
|
msg += "from now on (or stay on the original page otherwise)? [Y/n]"
|
2011-05-24 03:20:03 +04:00
|
|
|
choice = readInput(msg, default="Y")
|
|
|
|
|
2011-05-28 02:42:23 +04:00
|
|
|
kb.alwaysRedirect = choice not in ("n", "N")
|
2011-05-24 03:20:03 +04:00
|
|
|
|
2011-11-11 15:07:49 +04:00
|
|
|
if kb.alwaysRedirect:
|
|
|
|
kwargs['url'] = conn.redurl
|
|
|
|
kwargs['redirecting'] = conn.redcode
|
|
|
|
return Connect.__getPageProxy(**kwargs)
|
|
|
|
else:
|
|
|
|
redirecting = conn.redcode
|
|
|
|
page = threadData.lastRedirectMsg[1]
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
# Return response object
|
|
|
|
if response:
|
2011-08-12 20:48:11 +04:00
|
|
|
return conn, None, None
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Get HTTP response
|
2011-11-11 15:07:49 +04:00
|
|
|
page = conn.read() if page is None else page
|
2011-10-24 00:48:37 +04:00
|
|
|
code = redirecting or conn.code
|
2008-10-15 19:38:22 +04:00
|
|
|
responseHeaders = conn.info()
|
2011-02-01 01:50:54 +03:00
|
|
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
2011-03-18 03:24:02 +03:00
|
|
|
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
|
2011-01-04 01:02:58 +03:00
|
|
|
status = getUnicode(conn.msg)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2011-03-29 18:16:28 +04:00
|
|
|
if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing:
|
|
|
|
url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE)
|
|
|
|
|
|
|
|
debugMsg = "got HTML meta refresh header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-05-28 02:42:23 +04:00
|
|
|
if kb.alwaysRefresh is None:
|
|
|
|
msg = "sqlmap got a refresh request "
|
|
|
|
msg += "(redirect like response common to login pages). "
|
2011-06-08 20:08:20 +04:00
|
|
|
msg += "Do you want to apply the refresh "
|
2011-05-28 02:42:23 +04:00
|
|
|
msg += "from now on (or stay on the original page)? [Y/n]"
|
|
|
|
choice = readInput(msg, default="Y")
|
|
|
|
|
|
|
|
kb.alwaysRefresh = choice not in ("n", "N")
|
|
|
|
|
|
|
|
if kb.alwaysRefresh:
|
|
|
|
if url.lower().startswith('http://'):
|
|
|
|
kwargs['url'] = url
|
|
|
|
else:
|
|
|
|
kwargs['url'] = conf.url[:conf.url.rfind('/')+1] + url
|
|
|
|
|
|
|
|
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
|
|
|
|
kwargs['refreshing'] = True
|
|
|
|
kwargs['get'] = None
|
|
|
|
kwargs['post'] = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
return Connect.__getPageProxy(**kwargs)
|
|
|
|
except sqlmapSyntaxException:
|
|
|
|
pass
|
2011-03-29 18:16:28 +04:00
|
|
|
|
2010-12-26 17:36:51 +03:00
|
|
|
# Explicit closing of connection object
|
|
|
|
if not conf.keepAlive:
|
|
|
|
try:
|
2011-10-21 13:06:00 +04:00
|
|
|
if hasattr(conn.fp, '_sock'):
|
|
|
|
conn.fp._sock.close()
|
2010-12-26 17:36:51 +03:00
|
|
|
conn.close()
|
|
|
|
except Exception, msg:
|
|
|
|
warnMsg = "problem occured during connection closing ('%s')" % msg
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
except urllib2.HTTPError, e:
|
2011-01-04 01:02:58 +03:00
|
|
|
page = None
|
2011-01-25 19:05:06 +03:00
|
|
|
responseHeaders = None
|
2011-02-01 01:51:14 +03:00
|
|
|
|
2010-11-17 15:16:48 +03:00
|
|
|
try:
|
|
|
|
page = e.read()
|
|
|
|
responseHeaders = e.info()
|
2011-01-31 15:41:39 +03:00
|
|
|
responseHeaders[URI_HTTP_HEADER] = e.geturl()
|
2011-03-18 03:24:02 +03:00
|
|
|
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
|
2010-11-17 15:16:48 +03:00
|
|
|
except socket.timeout:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "connection timed out while trying "
|
2011-01-25 14:02:41 +03:00
|
|
|
warnMsg += "to get error page information (%d)" % e.code
|
2010-11-17 15:16:48 +03:00
|
|
|
logger.warn(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2010-11-17 15:16:48 +03:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2011-01-04 01:02:58 +03:00
|
|
|
code = e.code
|
|
|
|
threadData.lastHTTPError = (threadData.lastRequestUID, code)
|
|
|
|
|
|
|
|
if code not in kb.httpErrorCodes:
|
|
|
|
kb.httpErrorCodes[code] = 0
|
|
|
|
kb.httpErrorCodes[code] += 1
|
|
|
|
|
|
|
|
status = getUnicode(e.msg)
|
2010-12-22 16:41:36 +03:00
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
2011-01-04 01:02:58 +03:00
|
|
|
|
2010-11-17 15:16:48 +03:00
|
|
|
if responseHeaders:
|
2011-11-21 00:14:47 +04:00
|
|
|
logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items())
|
2011-01-04 01:02:58 +03:00
|
|
|
|
2011-02-22 14:32:55 +03:00
|
|
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page)))
|
2010-11-17 15:04:33 +03:00
|
|
|
|
2010-12-22 16:41:36 +03:00
|
|
|
if conf.verbose <= 5:
|
|
|
|
responseMsg += getUnicode(logHeaders)
|
|
|
|
elif conf.verbose > 5:
|
2011-03-17 15:35:40 +03:00
|
|
|
responseMsg += "%s\n\n%s\n" % (logHeaders, page)
|
2010-12-22 16:41:36 +03:00
|
|
|
|
|
|
|
logger.log(7, responseMsg)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if e.code == 401:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "not authorized, try to provide right HTTP "
|
2010-11-04 01:07:13 +03:00
|
|
|
errMsg += "authentication type and valid credentials (%d)" % code
|
2010-03-16 15:14:02 +03:00
|
|
|
raise sqlmapConnectionException, errMsg
|
2011-08-12 18:51:39 +04:00
|
|
|
elif e.code == 404:
|
2011-08-12 18:48:44 +04:00
|
|
|
if raise404:
|
|
|
|
errMsg = "page not found (%d)" % code
|
|
|
|
raise sqlmapConnectionException, errMsg
|
|
|
|
else:
|
|
|
|
debugMsg = "page not found (%d)" % code
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
processResponse(page, responseHeaders)
|
2011-06-19 13:57:41 +04:00
|
|
|
elif e.code == 504:
|
|
|
|
if ignoreTimeout:
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2011-06-19 13:57:41 +04:00
|
|
|
else:
|
|
|
|
warnMsg = "unable to connect to the target url (%d - %s)" % (e.code, httplib.responses[e.code])
|
|
|
|
if threadData.retriesCount < conf.retries and not kb.threadException and not conf.realTest:
|
|
|
|
warnMsg += ", sqlmap is going to retry the request"
|
|
|
|
logger.critical(warnMsg)
|
|
|
|
return Connect.__retryProxy(**kwargs)
|
2011-06-19 14:11:27 +04:00
|
|
|
elif kb.testMode:
|
2011-06-19 13:57:41 +04:00
|
|
|
logger.critical(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2011-06-19 13:57:41 +04:00
|
|
|
else:
|
|
|
|
raise sqlmapConnectionException, warnMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-06-10 18:15:32 +04:00
|
|
|
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
2010-01-19 13:27:54 +03:00
|
|
|
logger.debug(debugMsg)
|
2011-08-12 17:40:37 +04:00
|
|
|
processResponse(page, responseHeaders)
|
2011-08-12 20:48:11 +04:00
|
|
|
return page, responseHeaders, code
|
2010-01-19 13:27:54 +03:00
|
|
|
|
2011-05-24 15:06:58 +04:00
|
|
|
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead), e:
|
2008-12-04 20:40:03 +03:00
|
|
|
tbMsg = traceback.format_exc()
|
2010-12-21 13:31:56 +03:00
|
|
|
|
2010-12-12 00:28:11 +03:00
|
|
|
if "no host given" in tbMsg:
|
|
|
|
warnMsg = "invalid url address used (%s)" % repr(url)
|
|
|
|
raise sqlmapSyntaxException, warnMsg
|
2011-01-03 16:04:20 +03:00
|
|
|
elif "forcibly closed" in tbMsg:
|
|
|
|
warnMsg = "connection was forcibly closed by the target url"
|
|
|
|
elif "timed out" in tbMsg:
|
|
|
|
warnMsg = "connection timed out to the target url"
|
2010-12-12 00:28:11 +03:00
|
|
|
elif "URLError" in tbMsg or "error" in tbMsg:
|
2008-12-04 20:40:03 +03:00
|
|
|
warnMsg = "unable to connect to the target url"
|
|
|
|
elif "BadStatusLine" in tbMsg:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "the target url responded with an unknown HTTP "
|
2008-12-04 20:40:03 +03:00
|
|
|
warnMsg += "status code, try to force the HTTP User-Agent "
|
2011-02-20 00:05:15 +03:00
|
|
|
warnMsg += "header with option --user-agent or --random-agent"
|
2010-11-13 01:57:33 +03:00
|
|
|
elif "IncompleteRead" in tbMsg:
|
|
|
|
warnMsg = "there was an incomplete read error while retrieving data "
|
|
|
|
warnMsg += "from the target url"
|
2009-12-31 15:34:18 +03:00
|
|
|
else:
|
|
|
|
warnMsg = "unable to connect to the target url"
|
|
|
|
|
2008-12-05 18:34:13 +03:00
|
|
|
if "BadStatusLine" not in tbMsg:
|
|
|
|
warnMsg += " or proxy"
|
|
|
|
|
2011-01-03 16:04:20 +03:00
|
|
|
if "forcibly closed" in tbMsg:
|
|
|
|
logger.critical(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2011-03-29 01:05:04 +04:00
|
|
|
elif silent or (ignoreTimeout and any(map(lambda x: x in tbMsg, ["timed out", "IncompleteRead"]))):
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2011-05-10 16:40:01 +04:00
|
|
|
elif threadData.retriesCount < conf.retries and not kb.threadException and not conf.realTest:
|
2008-12-04 20:40:03 +03:00
|
|
|
warnMsg += ", sqlmap is going to retry the request"
|
2010-09-27 17:34:52 +04:00
|
|
|
logger.critical(warnMsg)
|
2011-06-19 13:57:41 +04:00
|
|
|
return Connect.__retryProxy(**kwargs)
|
2011-05-22 14:29:25 +04:00
|
|
|
elif kb.testMode:
|
2011-05-22 14:59:56 +04:00
|
|
|
logger.critical(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2008-11-15 15:25:19 +03:00
|
|
|
else:
|
|
|
|
raise sqlmapConnectionException, warnMsg
|
|
|
|
|
2011-06-19 13:57:41 +04:00
|
|
|
finally:
|
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
2009-12-18 01:04:01 +03:00
|
|
|
|
2011-08-12 17:40:37 +04:00
|
|
|
processResponse(page, responseHeaders)
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
2010-12-22 16:41:36 +03:00
|
|
|
if responseHeaders:
|
2011-11-21 00:14:47 +04:00
|
|
|
logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items())
|
2011-03-17 15:35:40 +03:00
|
|
|
|
2011-02-22 14:32:55 +03:00
|
|
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page)))
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
if conf.verbose <= 5:
|
2010-11-08 14:55:56 +03:00
|
|
|
responseMsg += getUnicode(logHeaders)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose > 5:
|
2011-03-17 15:35:40 +03:00
|
|
|
responseMsg += "%s\n\n%s\n" % (logHeaders, page)
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(7, responseMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-08-12 20:48:11 +04:00
|
|
|
return page, responseHeaders, code
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
@staticmethod
|
2011-10-24 04:46:54 +04:00
|
|
|
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This method calls a function to get the target url page content
|
|
|
|
and returns its page MD5 hash or a boolean value in case of
|
|
|
|
string match check ('--string' command line parameter)
|
|
|
|
"""
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if conf.direct:
|
2010-03-31 14:50:47 +04:00
|
|
|
return direct(value, content)
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
get = None
|
|
|
|
post = None
|
|
|
|
cookie = None
|
|
|
|
ua = None
|
|
|
|
referer = None
|
|
|
|
page = None
|
|
|
|
pageLength = None
|
|
|
|
uri = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if not place:
|
2011-07-06 09:44:47 +04:00
|
|
|
place = kb.injection.place or PLACE.GET
|
|
|
|
|
|
|
|
raise404 = place != PLACE.URI if raise404 is None else raise404
|
2010-10-14 15:06:28 +04:00
|
|
|
|
2010-11-08 00:55:24 +03:00
|
|
|
payload = agent.extractPayload(value)
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
2010-10-30 03:00:48 +04:00
|
|
|
|
2010-11-08 00:55:24 +03:00
|
|
|
if payload:
|
|
|
|
if kb.tamperFunctions:
|
2010-10-29 20:11:50 +04:00
|
|
|
for function in kb.tamperFunctions:
|
|
|
|
payload = function(payload)
|
2010-10-30 03:00:48 +04:00
|
|
|
|
2010-10-29 20:11:50 +04:00
|
|
|
value = agent.replacePayload(value, payload)
|
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(9, payload)
|
2010-11-08 00:18:09 +03:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if place == PLACE.COOKIE and conf.cookieUrlencode:
|
2011-01-27 22:44:24 +03:00
|
|
|
value = agent.removePayloadDelimiters(value)
|
2010-10-31 14:26:33 +03:00
|
|
|
value = urlEncodeCookieValues(value)
|
2011-03-11 22:57:44 +03:00
|
|
|
|
2010-10-31 14:26:33 +03:00
|
|
|
elif place:
|
2011-11-10 14:22:35 +04:00
|
|
|
if place in (PLACE.GET, PLACE.POST, PLACE.URI):
|
2011-03-11 23:07:52 +03:00
|
|
|
# payloads in GET and/or POST need to be urlencoded
|
2011-04-04 12:04:47 +04:00
|
|
|
# throughly without safe chars (especially & and =)
|
|
|
|
# addendum: as we support url encoding in tampering
|
|
|
|
# functions therefore we need to use % as a safe char
|
2011-11-10 14:22:35 +04:00
|
|
|
if place != PLACE.URI or ('?' in value and value.find('?') < value.find(payload)):
|
|
|
|
payload = urlencode(payload, "%", False, True)
|
|
|
|
value = agent.replacePayload(value, payload)
|
2011-04-18 18:29:52 +04:00
|
|
|
elif place == PLACE.SOAP:
|
|
|
|
# payloads in SOAP should have chars > and < replaced
|
|
|
|
# with their HTML encoded counterparts
|
|
|
|
payload = payload.replace('>', '>').replace('<', '<')
|
|
|
|
value = agent.replacePayload(value, payload)
|
2011-03-11 22:57:44 +03:00
|
|
|
|
2011-01-27 22:44:24 +03:00
|
|
|
value = agent.removePayloadDelimiters(value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-25 19:37:43 +04:00
|
|
|
if conf.checkPayload:
|
|
|
|
checkPayload(value)
|
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.GET in conf.parameters:
|
2011-08-29 16:50:52 +04:00
|
|
|
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.POST in conf.parameters:
|
2011-08-29 16:50:52 +04:00
|
|
|
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-18 01:39:00 +04:00
|
|
|
if PLACE.SOAP in conf.parameters:
|
|
|
|
post = conf.parameters[PLACE.SOAP] if place != PLACE.SOAP or not value else value
|
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.COOKIE in conf.parameters:
|
|
|
|
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
|
2010-05-14 19:20:34 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.UA in conf.parameters:
|
2011-02-02 18:18:06 +03:00
|
|
|
ua = conf.parameters[PLACE.UA] if place != PLACE.UA or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-12 02:07:03 +03:00
|
|
|
if PLACE.REFERER in conf.parameters:
|
|
|
|
referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value
|
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.URI in conf.parameters:
|
|
|
|
uri = conf.url if place != PLACE.URI or not value else value
|
2010-09-23 18:07:23 +04:00
|
|
|
else:
|
|
|
|
uri = conf.url
|
2010-09-22 15:56:35 +04:00
|
|
|
|
2011-08-29 16:50:52 +04:00
|
|
|
if conf.rParam:
|
|
|
|
def _randomizeParameter(paramString, randomParameter):
|
|
|
|
retVal = paramString
|
|
|
|
match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString)
|
|
|
|
if match:
|
|
|
|
origValue = match.group("value")
|
|
|
|
retVal = re.sub("%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString)
|
|
|
|
return retVal
|
|
|
|
|
2011-08-29 17:08:25 +04:00
|
|
|
for randomParameter in conf.rParam:
|
|
|
|
for item in [PLACE.GET, PLACE.POST, PLACE.COOKIE]:
|
|
|
|
if item in conf.parameters:
|
|
|
|
origValue = conf.parameters[item]
|
|
|
|
if item == PLACE.GET and get:
|
|
|
|
get = _randomizeParameter(get, randomParameter)
|
|
|
|
elif item == PLACE.POST and post:
|
|
|
|
post = _randomizeParameter(post, randomParameter)
|
|
|
|
elif item == PLACE.COOKIE and cookie:
|
|
|
|
cookie = _randomizeParameter(cookie, randomParameter)
|
2011-08-29 16:50:52 +04:00
|
|
|
|
2011-11-21 20:41:02 +04:00
|
|
|
if conf.evalCode:
|
|
|
|
variables = {}
|
|
|
|
originals = {}
|
|
|
|
|
|
|
|
if get:
|
|
|
|
executeCode(get.replace("&", ";"), variables)
|
|
|
|
if post:
|
|
|
|
executeCode(post.replace("&", ";"), variables)
|
|
|
|
|
|
|
|
originals.update(variables)
|
|
|
|
executeCode(conf.evalCode, variables)
|
|
|
|
|
|
|
|
for name, value in variables.items():
|
|
|
|
if name != "__builtins__" and originals.get(name, "") != value:
|
|
|
|
if isinstance(value, (basestring, int)):
|
|
|
|
value = unicode(value)
|
|
|
|
if '%s=' % name in (get or ""):
|
|
|
|
get = re.sub("(%s=)([^&]+)" % name, "\g<1>%s" % value, get)
|
|
|
|
elif '%s=' % name in (post or ""):
|
|
|
|
post = re.sub("(%s=)([^&]+)" % name, "\g<1>%s" % value, post)
|
|
|
|
elif post:
|
|
|
|
post += "&%s=%s" % (name, value)
|
|
|
|
else:
|
|
|
|
get += "&%s=%s" % (name, value)
|
|
|
|
|
2011-08-29 16:50:52 +04:00
|
|
|
get = urlencode(get, limit=True)
|
2011-10-25 13:53:44 +04:00
|
|
|
if post and place != PLACE.POST and hasattr(post, UNENCODED_ORIGINAL_VALUE):
|
|
|
|
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
|
|
|
|
else:
|
|
|
|
post = urlencode(post)
|
2011-08-29 16:50:52 +04:00
|
|
|
|
2010-12-09 10:49:18 +03:00
|
|
|
if timeBasedCompare:
|
|
|
|
if len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
2010-12-21 04:09:39 +03:00
|
|
|
clearConsoleLine()
|
|
|
|
|
2011-11-21 03:17:57 +04:00
|
|
|
if conf.tor:
|
|
|
|
warnMsg = "it's highly recommended to avoid usage of --tor switch for "
|
|
|
|
warnMsg += "time-based injections because of it's high latency time"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
2010-12-09 16:57:38 +03:00
|
|
|
warnMsg = "time-based comparison needs larger statistical "
|
2010-12-10 18:28:56 +03:00
|
|
|
warnMsg += "model. Making a few dummy requests, please wait.."
|
2011-06-28 01:38:12 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
2010-12-09 10:49:18 +03:00
|
|
|
|
|
|
|
while len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
2011-01-07 18:41:09 +03:00
|
|
|
Connect.queryPage(content=True)
|
2010-12-09 10:49:18 +03:00
|
|
|
|
2011-08-12 17:47:38 +04:00
|
|
|
deviation = stdev(kb.responseTimes)
|
|
|
|
|
|
|
|
if deviation > WARN_TIME_STDEV:
|
2011-04-19 18:28:51 +04:00
|
|
|
kb.adjustTimeDelay = False
|
|
|
|
|
2011-08-12 17:47:38 +04:00
|
|
|
warnMsg = "there is considerable lagging (standard deviation: "
|
2011-08-18 01:17:02 +04:00
|
|
|
warnMsg += "%.1f sec%s) " % (deviation, "s" if deviation > 1 else "")
|
2011-08-12 17:47:38 +04:00
|
|
|
warnMsg += "in connection response(s). Please use as high "
|
|
|
|
warnMsg += "value for --time-sec option as possible (e.g. "
|
|
|
|
warnMsg += "%d or more)" % (conf.timeSec * 2)
|
2011-04-19 18:50:09 +04:00
|
|
|
logger.critical(warnMsg)
|
2011-06-28 01:59:31 +04:00
|
|
|
elif not kb.testMode:
|
|
|
|
warnMsg = "it is very important not to stress the network adapter's "
|
|
|
|
warnMsg += "bandwidth during usage of time-based queries"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
2011-04-19 14:37:20 +04:00
|
|
|
|
2010-04-16 16:44:47 +04:00
|
|
|
if conf.safUrl and conf.saFreq > 0:
|
|
|
|
kb.queryCounter += 1
|
|
|
|
if kb.queryCounter % conf.saFreq == 0:
|
2011-02-12 02:07:03 +03:00
|
|
|
Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer)
|
2010-09-16 13:32:09 +04:00
|
|
|
|
2010-12-08 02:32:33 +03:00
|
|
|
start = time.time()
|
2010-12-08 02:49:00 +03:00
|
|
|
|
2010-12-20 19:45:41 +03:00
|
|
|
if kb.nullConnection and not content and not response and not timeBasedCompare:
|
2010-11-08 12:49:57 +03:00
|
|
|
if kb.nullConnection == NULLCONNECTION.HEAD:
|
2010-11-08 12:44:32 +03:00
|
|
|
method = HTTPMETHOD.HEAD
|
2010-11-08 12:49:57 +03:00
|
|
|
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
2010-09-16 13:32:09 +04:00
|
|
|
if not auxHeaders:
|
|
|
|
auxHeaders = {}
|
2010-10-15 15:17:17 +04:00
|
|
|
|
2011-03-11 23:16:34 +03:00
|
|
|
auxHeaders[HTTPHEADER.RANGE] = "bytes=-1"
|
2010-10-10 22:56:43 +04:00
|
|
|
|
2011-08-12 20:48:11 +04:00
|
|
|
_, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404)
|
2010-10-10 22:56:43 +04:00
|
|
|
|
2011-06-08 01:46:49 +04:00
|
|
|
if headers:
|
|
|
|
if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers:
|
|
|
|
pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH])
|
|
|
|
elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers:
|
|
|
|
pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:])
|
2010-09-16 13:47:33 +04:00
|
|
|
|
|
|
|
if not pageLength:
|
2011-08-12 20:48:11 +04:00
|
|
|
page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
2010-12-08 02:49:00 +03:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
2010-11-08 12:44:32 +03:00
|
|
|
|
2010-12-07 16:34:06 +03:00
|
|
|
if kb.testMode:
|
|
|
|
kb.testQueryCount += 1
|
2011-01-12 00:46:21 +03:00
|
|
|
|
2010-12-07 16:34:06 +03:00
|
|
|
if conf.cj:
|
|
|
|
conf.cj.clear()
|
2010-12-04 18:47:02 +03:00
|
|
|
|
2010-12-08 14:26:54 +03:00
|
|
|
if timeBasedCompare:
|
2010-12-08 17:26:40 +03:00
|
|
|
return wasLastRequestDelayed()
|
2010-12-08 17:33:10 +03:00
|
|
|
elif noteResponseTime:
|
2010-12-21 01:45:01 +03:00
|
|
|
kb.responseTimes.append(threadData.lastQueryDuration)
|
2010-12-07 19:04:53 +03:00
|
|
|
|
2011-10-24 04:46:54 +04:00
|
|
|
if not response and removeReflection:
|
|
|
|
page = removeReflectiveValues(page, payload)
|
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
if content or response:
|
2008-12-18 23:48:23 +03:00
|
|
|
return page, headers
|
2011-02-25 12:22:44 +03:00
|
|
|
|
|
|
|
if getRatioValue:
|
2011-08-12 20:48:11 +04:00
|
|
|
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
|
2010-09-16 13:32:09 +04:00
|
|
|
elif pageLength or page:
|
2011-08-12 20:48:11 +04:00
|
|
|
return comparison(page, headers, code, getRatioValue, pageLength)
|
2008-12-18 23:38:57 +03:00
|
|
|
else:
|
|
|
|
return False
|