sqlmap/lib/request/connect.py

541 lines
21 KiB
Python
Raw Normal View History

2008-10-15 19:38:22 +04:00
#!/usr/bin/env python
"""
2008-10-15 19:56:32 +04:00
$Id$
2008-10-15 19:38:22 +04:00
Copyright (c) 2006-2010 sqlmap developers (http://sqlmap.sourceforge.net/)
2010-10-15 03:18:29 +04:00
See the file 'doc/COPYING' for copying permission
2008-10-15 19:38:22 +04:00
"""
import httplib
2008-10-15 19:38:22 +04:00
import re
import socket
import time
2008-10-15 19:38:22 +04:00
import urllib2
import urlparse
import traceback
2008-10-15 19:38:22 +04:00
2011-02-08 03:15:02 +03:00
from extra.multipart import multipartpost
from lib.core.agent import agent
2010-12-08 14:26:54 +03:00
from lib.core.common import average
from lib.core.common import calculateDeltaSeconds
2010-12-21 04:09:39 +03:00
from lib.core.common import clearConsoleLine
2011-02-22 15:54:22 +03:00
from lib.core.common import cpuThrottle
from lib.core.common import extractRegexResult
2010-12-21 01:45:01 +03:00
from lib.core.common import getCurrentThreadData
2010-11-07 03:12:00 +03:00
from lib.core.common import getFilteredPageContent
from lib.core.common import unicodeToSafeHTMLValue
2010-06-10 15:34:17 +04:00
from lib.core.common import getUnicode
2010-11-08 14:22:47 +03:00
from lib.core.common import logHTTPTraffic
from lib.core.common import parseTargetUrl
from lib.core.common import readInput
from lib.core.common import removeReflectiveValues
2010-12-08 14:26:54 +03:00
from lib.core.common import stdev
2010-12-08 17:26:40 +03:00
from lib.core.common import wasLastRequestDelayed
2008-10-15 19:38:22 +04:00
from lib.core.convert import urlencode
2010-10-31 14:26:33 +03:00
from lib.core.common import urlEncodeCookieValues
2008-10-15 19:38:22 +04:00
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
2011-03-11 23:16:34 +03:00
from lib.core.enums import HTTPHEADER
2010-11-08 12:44:32 +03:00
from lib.core.enums import HTTPMETHOD
2010-11-08 12:49:57 +03:00
from lib.core.enums import NULLCONNECTION
from lib.core.enums import PLACE
2008-10-15 19:38:22 +04:00
from lib.core.exception import sqlmapConnectionException
2010-12-12 00:28:11 +03:00
from lib.core.exception import sqlmapSyntaxException
2011-02-22 16:00:58 +03:00
from lib.core.settings import HTTP_SILENT_TIMEOUT
from lib.core.settings import META_REFRESH_REGEX
2010-12-09 10:49:18 +03:00
from lib.core.settings import MIN_TIME_RESPONSES
2011-01-31 15:41:39 +03:00
from lib.core.settings import URI_HTTP_HEADER
from lib.core.threads import getCurrentThreadData
from lib.request.basic import decodePage
2008-10-15 19:38:22 +04:00
from lib.request.basic import forgeHeaders
from lib.request.basic import processResponse
from lib.request.direct import direct
from lib.request.comparison import comparison
from lib.request.methodrequest import MethodRequest
2010-10-25 22:38:54 +04:00
from lib.utils.checkpayload import checkPayload
2008-10-15 19:38:22 +04:00
class Connect:
"""
This class defines methods used to perform HTTP requests
"""
@staticmethod
def __getPageProxy(**kwargs):
return Connect.getPage(**kwargs)
2008-10-15 19:38:22 +04:00
@staticmethod
def getPage(**kwargs):
"""
This method connects to the target url or proxy and returns
the target url page content
"""
if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
time.sleep(conf.delay)
2010-05-21 16:18:43 +04:00
elif conf.cpuThrottle:
2011-02-22 15:54:22 +03:00
cpuThrottle(conf.cpuThrottle)
2010-12-21 01:45:01 +03:00
threadData = getCurrentThreadData()
threadData.lastRequestUID += 1
2010-11-08 14:22:47 +03:00
# dirty hack because urllib2 just skips the other part of provided url
# splitted with space char while urlencoding it in the later phase
url = kwargs.get('url', conf.url).replace(" ", "%20")
get = kwargs.get('get', None)
post = kwargs.get('post', None)
method = kwargs.get('method', None)
cookie = kwargs.get('cookie', None)
ua = kwargs.get('ua', None)
2011-02-12 02:07:03 +03:00
referer = kwargs.get('referer', None)
direct = kwargs.get('direct', False)
multipart = kwargs.get('multipart', False)
silent = kwargs.get('silent', False)
raise404 = kwargs.get('raise404', True)
auxHeaders = kwargs.get('auxHeaders', None)
response = kwargs.get('response', False)
ignoreTimeout = kwargs.get('ignoreTimeout', False)
refreshing = kwargs.get('refreshing', False)
2008-10-15 19:38:22 +04:00
page = ""
2008-10-15 19:38:22 +04:00
cookieStr = ""
2010-12-21 01:45:01 +03:00
requestMsg = "HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, conf.method)
requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
2010-01-24 02:29:34 +03:00
responseMsg = "HTTP response "
2008-10-15 19:38:22 +04:00
requestHeaders = ""
2011-01-25 19:05:06 +03:00
responseHeaders = None
logHeaders = ""
2008-10-15 19:38:22 +04:00
try:
if silent:
2011-02-22 16:00:58 +03:00
socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT)
if direct:
if "?" in url:
url, params = url.split("?")
params = urlencode(params)
url = "%s?%s" % (url, params)
requestMsg += "?%s" % params
elif multipart:
2010-05-29 03:39:52 +04:00
# Needed in this form because of potential circle dependency
# problem (option -> update -> connect -> option)
2010-02-25 16:45:28 +03:00
from lib.core.option import proxyHandler
2010-02-25 16:45:28 +03:00
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
conn = multipartOpener.open(url, multipart)
page = conn.read()
responseHeaders = conn.info()
2011-02-01 01:50:54 +03:00
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
2011-03-18 03:24:02 +03:00
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
return page
elif refreshing:
# Reference(s):
# http://vancouver-webpages.com/META/metatags.detail.html
# http://webdesign.about.com/od/metataglibraries/a/aa080300a.htm
get = None
post = None
else:
if conf.parameters.has_key(PLACE.GET) and not get:
get = conf.parameters[PLACE.GET]
if get:
url = "%s?%s" % (url, get)
requestMsg += "?%s" % get
2010-11-08 12:44:32 +03:00
if conf.method == HTTPMETHOD.POST:
if conf.parameters.has_key(PLACE.POST) and not post:
post = conf.parameters[PLACE.POST]
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
2008-10-15 19:38:22 +04:00
# Perform HTTP request
2011-02-12 02:07:03 +03:00
headers = forgeHeaders(cookie, ua, referer)
2010-12-24 13:07:56 +03:00
if conf.realTest:
2011-03-18 03:24:02 +03:00
headers[HTTPHEADER.REFERER] = "%s://%s" % (conf.scheme, conf.hostname)
2010-12-24 13:07:56 +03:00
2010-10-18 12:54:08 +04:00
if kb.authHeader:
2011-03-18 03:24:02 +03:00
headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader
2010-10-18 12:54:08 +04:00
if kb.proxyAuthHeader:
2011-03-18 03:24:02 +03:00
headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
2010-09-16 12:43:10 +04:00
if auxHeaders:
for key, item in auxHeaders.items():
headers[key] = item
for key, item in headers.items():
del headers[key]
headers[unicodeToSafeHTMLValue(key)] = unicodeToSafeHTMLValue(item)
post = unicodeToSafeHTMLValue(post)
if method:
req = MethodRequest(url, post, headers)
req.set_method(method)
else:
req = urllib2.Request(url, post, headers)
if not conf.dropSetCookie and conf.cj:
for _, cookie in enumerate(conf.cj):
if not cookieStr:
cookieStr = "Cookie: "
2010-10-18 12:54:08 +04:00
2010-06-10 15:34:17 +04:00
cookie = getUnicode(cookie)
index = cookie.index(" for ")
2010-10-18 12:54:08 +04:00
cookieStr += "%s; " % cookie[8:index]
2008-10-15 19:38:22 +04:00
2011-03-18 03:24:02 +03:00
if not req.has_header(HTTPHEADER.ACCEPT_ENCODING):
requestHeaders += "%s: identity\n" % HTTPHEADER.ACCEPT_ENCODING
2010-10-29 03:22:13 +04:00
2010-10-29 02:59:51 +04:00
requestHeaders += "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])
2010-10-29 03:22:13 +04:00
2011-03-18 03:24:02 +03:00
if not req.has_header(HTTPHEADER.COOKIE) and cookieStr:
2010-10-29 03:22:13 +04:00
requestHeaders += "\n%s" % cookieStr[:-2]
2011-03-18 03:24:02 +03:00
if not req.has_header(HTTPHEADER.CONNECTION):
requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION
2010-10-29 03:22:13 +04:00
2008-10-15 19:38:22 +04:00
requestMsg += "\n%s" % requestHeaders
if post:
requestMsg += "\n\n%s" % post
2008-10-15 19:38:22 +04:00
requestMsg += "\n"
logger.log(8, requestMsg)
2008-10-15 19:38:22 +04:00
conn = urllib2.urlopen(req)
2011-03-18 03:24:02 +03:00
if not kb.authHeader and req.has_header(HTTPHEADER.AUTHORIZATION):
kb.authHeader = req.get_header(HTTPHEADER.AUTHORIZATION)
2010-10-18 12:54:08 +04:00
2011-03-18 03:24:02 +03:00
if not kb.proxyAuthHeader and req.has_header(HTTPHEADER.PROXY_AUTHORIZATION):
kb.proxyAuthHeader = req.get_header(HTTPHEADER.PROXY_AUTHORIZATION)
if hasattr(conn, "setcookie"):
kb.redirectSetCookie = conn.setcookie
2010-12-23 17:06:22 +03:00
if hasattr(conn, "redurl") and hasattr(conn, "redcode") and not conf.redirectHandled and not conf.realTest:
msg = "sqlmap got a %d redirect to " % conn.redcode
msg += "%s - What target address do you " % conn.redurl
msg += "want to use from now on? %s " % conf.url
msg += "(default) or provide another target address based "
msg += "also on the redirection got from the application\n"
while True:
choice = readInput(msg, default="1")
if not choice or choice == "1":
pass
else:
conf.url = choice
try:
parseTargetUrl()
return Connect.__getPageProxy(**kwargs)
except sqlmapSyntaxException:
continue
break
conf.redirectHandled = True
# Reset the number of connection retries
2010-11-29 18:25:45 +03:00
kb.retriesCount = 0
2010-10-10 22:56:43 +04:00
# Return response object
if response:
return conn, None
2008-10-15 19:38:22 +04:00
# Get HTTP response
page = conn.read()
code = conn.code
2008-10-15 19:38:22 +04:00
responseHeaders = conn.info()
2011-02-01 01:50:54 +03:00
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
2011-03-18 03:24:02 +03:00
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
status = getUnicode(conn.msg)
if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing:
url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE)
if url.lower().startswith('http://'):
kwargs['url'] = url
else:
kwargs['url'] = conf.url[:conf.url.rfind('/')+1] + url
kwargs['refreshing'] = True
debugMsg = "got HTML meta refresh header"
logger.debug(debugMsg)
try:
return Connect.__getPageProxy(**kwargs)
except sqlmapSyntaxException:
pass
# Explicit closing of connection object
if not conf.keepAlive:
try:
conn.fp._sock.close()
conn.close()
except Exception, msg:
warnMsg = "problem occured during connection closing ('%s')" % msg
logger.warn(warnMsg)
2008-10-15 19:38:22 +04:00
except urllib2.HTTPError, e:
page = None
2011-01-25 19:05:06 +03:00
responseHeaders = None
2011-02-01 01:51:14 +03:00
try:
page = e.read()
responseHeaders = e.info()
2011-01-31 15:41:39 +03:00
responseHeaders[URI_HTTP_HEADER] = e.geturl()
2011-03-18 03:24:02 +03:00
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
except socket.timeout:
warnMsg = "connection timed out while trying "
warnMsg += "to get error page information (%d)" % e.code
logger.warn(warnMsg)
return None, None
except:
pass
code = e.code
threadData.lastHTTPError = (threadData.lastRequestUID, code)
if code not in kb.httpErrorCodes:
kb.httpErrorCodes[code] = 0
kb.httpErrorCodes[code] += 1
status = getUnicode(e.msg)
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
if responseHeaders:
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()])
2011-02-22 14:32:55 +03:00
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page)))
2010-11-17 15:04:33 +03:00
if conf.verbose <= 5:
responseMsg += getUnicode(logHeaders)
elif conf.verbose > 5:
2011-03-17 15:35:40 +03:00
responseMsg += "%s\n\n%s\n" % (logHeaders, page)
logger.log(7, responseMsg)
2008-10-15 19:38:22 +04:00
if e.code == 401:
errMsg = "not authorized, try to provide right HTTP "
2010-11-04 01:07:13 +03:00
errMsg += "authentication type and valid credentials (%d)" % code
raise sqlmapConnectionException, errMsg
2010-03-16 16:56:36 +03:00
elif e.code == 404 and raise404:
2010-11-04 01:07:13 +03:00
errMsg = "page not found (%d)" % code
raise sqlmapConnectionException, errMsg
2008-10-15 19:38:22 +04:00
else:
2010-06-10 18:15:32 +04:00
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
logger.debug(debugMsg)
page = processResponse(page, responseHeaders)
2010-12-18 00:45:20 +03:00
return page, responseHeaders
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead), e:
tbMsg = traceback.format_exc()
2010-12-12 00:28:11 +03:00
if "no host given" in tbMsg:
warnMsg = "invalid url address used (%s)" % repr(url)
raise sqlmapSyntaxException, warnMsg
elif "forcibly closed" in tbMsg:
warnMsg = "connection was forcibly closed by the target url"
elif "timed out" in tbMsg:
warnMsg = "connection timed out to the target url"
2010-12-12 00:28:11 +03:00
elif "URLError" in tbMsg or "error" in tbMsg:
warnMsg = "unable to connect to the target url"
elif "BadStatusLine" in tbMsg:
warnMsg = "the target url responded with an unknown HTTP "
warnMsg += "status code, try to force the HTTP User-Agent "
2011-02-20 00:05:15 +03:00
warnMsg += "header with option --user-agent or --random-agent"
elif "IncompleteRead" in tbMsg:
warnMsg = "there was an incomplete read error while retrieving data "
warnMsg += "from the target url"
2009-12-31 15:34:18 +03:00
else:
warnMsg = "unable to connect to the target url"
if "BadStatusLine" not in tbMsg:
warnMsg += " or proxy"
if "forcibly closed" in tbMsg:
logger.critical(warnMsg)
return None, None
elif silent or (ignoreTimeout and any(map(lambda x: x in tbMsg, ["timed out", "IncompleteRead"]))):
return None, None
2010-12-23 17:06:22 +03:00
elif kb.retriesCount < conf.retries and not kb.threadException and not conf.realTest:
2010-11-29 18:25:45 +03:00
kb.retriesCount += 1
warnMsg += ", sqlmap is going to retry the request"
logger.critical(warnMsg)
time.sleep(1)
socket.setdefaulttimeout(conf.timeout)
2009-12-21 14:21:18 +03:00
return Connect.__getPageProxy(**kwargs)
else:
socket.setdefaulttimeout(conf.timeout)
raise sqlmapConnectionException, warnMsg
socket.setdefaulttimeout(conf.timeout)
page = processResponse(page, responseHeaders)
2010-05-04 12:43:14 +04:00
2010-12-21 01:45:01 +03:00
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
if responseHeaders:
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()])
2011-03-17 15:35:40 +03:00
2011-02-22 14:32:55 +03:00
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page)))
2010-05-04 12:43:14 +04:00
if conf.verbose <= 5:
2010-11-08 14:55:56 +03:00
responseMsg += getUnicode(logHeaders)
elif conf.verbose > 5:
2011-03-17 15:35:40 +03:00
responseMsg += "%s\n\n%s\n" % (logHeaders, page)
2010-05-04 12:43:14 +04:00
logger.log(7, responseMsg)
2008-10-15 19:38:22 +04:00
return page, responseHeaders
2008-10-15 19:38:22 +04:00
@staticmethod
2011-02-04 02:25:56 +03:00
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None):
2008-10-15 19:38:22 +04:00
"""
This method calls a function to get the target url page content
and returns its page MD5 hash or a boolean value in case of
string match check ('--string' command line parameter)
"""
if conf.direct:
return direct(value, content)
2010-09-16 13:32:09 +04:00
get = None
post = None
cookie = None
ua = None
2011-02-12 02:07:03 +03:00
referer = None
2010-09-16 13:32:09 +04:00
page = None
pageLength = None
2010-09-23 18:07:23 +04:00
uri = None
raise404 = place != PLACE.URI if raise404 is None else raise404
2008-10-15 19:38:22 +04:00
if not place:
place = kb.injection.place
2010-11-08 00:55:24 +03:00
payload = agent.extractPayload(value)
2010-12-21 01:45:01 +03:00
threadData = getCurrentThreadData()
2010-10-30 03:00:48 +04:00
2010-11-08 00:55:24 +03:00
if payload:
if kb.tamperFunctions:
for function in kb.tamperFunctions:
payload = function(payload)
2010-10-30 03:00:48 +04:00
value = agent.replacePayload(value, payload)
logger.log(9, payload)
if place == PLACE.COOKIE and conf.cookieUrlencode:
value = agent.removePayloadDelimiters(value)
2010-10-31 14:26:33 +03:00
value = urlEncodeCookieValues(value)
2010-10-31 14:26:33 +03:00
elif place:
if place in (PLACE.GET, PLACE.POST):
2011-03-11 23:07:52 +03:00
# payloads in GET and/or POST need to be urlencoded
# throughly without safe chars (especially &, = and %)
payload = urlencode(payload, None, True, True)
value = agent.replacePayload(value, payload)
value = agent.removePayloadDelimiters(value)
2008-10-15 19:38:22 +04:00
2010-10-25 19:37:43 +04:00
if conf.checkPayload:
checkPayload(value)
if PLACE.GET in conf.parameters:
get = urlencode(conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value, limit=True)
2008-10-15 19:38:22 +04:00
if PLACE.POST in conf.parameters:
post = urlencode(conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value)
2008-10-15 19:38:22 +04:00
if PLACE.COOKIE in conf.parameters:
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
if PLACE.UA in conf.parameters:
2011-02-02 18:18:06 +03:00
ua = conf.parameters[PLACE.UA] if place != PLACE.UA or not value else value
2008-10-15 19:38:22 +04:00
2011-02-12 02:07:03 +03:00
if PLACE.REFERER in conf.parameters:
referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value
if PLACE.URI in conf.parameters:
uri = conf.url if place != PLACE.URI or not value else value
2010-09-23 18:07:23 +04:00
else:
uri = conf.url
2010-09-22 15:56:35 +04:00
2010-12-09 10:49:18 +03:00
if timeBasedCompare:
if len(kb.responseTimes) < MIN_TIME_RESPONSES:
2010-12-21 04:09:39 +03:00
clearConsoleLine()
2010-12-09 16:57:38 +03:00
warnMsg = "time-based comparison needs larger statistical "
2010-12-10 18:28:56 +03:00
warnMsg += "model. Making a few dummy requests, please wait.."
2010-12-09 10:49:18 +03:00
logger.warn(warnMsg)
while len(kb.responseTimes) < MIN_TIME_RESPONSES:
2011-01-07 18:41:09 +03:00
Connect.queryPage(content=True)
2010-12-09 10:49:18 +03:00
if conf.safUrl and conf.saFreq > 0:
kb.queryCounter += 1
if kb.queryCounter % conf.saFreq == 0:
2011-02-12 02:07:03 +03:00
Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer)
2010-09-16 13:32:09 +04:00
start = time.time()
2010-12-08 02:49:00 +03:00
if kb.nullConnection and not content and not response and not timeBasedCompare:
2010-11-08 12:49:57 +03:00
if kb.nullConnection == NULLCONNECTION.HEAD:
2010-11-08 12:44:32 +03:00
method = HTTPMETHOD.HEAD
2010-11-08 12:49:57 +03:00
elif kb.nullConnection == NULLCONNECTION.RANGE:
2010-09-16 13:32:09 +04:00
if not auxHeaders:
auxHeaders = {}
2010-10-15 15:17:17 +04:00
2011-03-11 23:16:34 +03:00
auxHeaders[HTTPHEADER.RANGE] = "bytes=-1"
2010-10-10 22:56:43 +04:00
2011-02-12 02:07:03 +03:00
_, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404)
2010-10-10 22:56:43 +04:00
2011-03-11 23:16:34 +03:00
if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers:
pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH])
elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers:
pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:])
2010-09-16 13:47:33 +04:00
if not pageLength:
2011-02-12 02:07:03 +03:00
page, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
2010-12-08 02:49:00 +03:00
2010-12-21 01:45:01 +03:00
threadData.lastQueryDuration = calculateDeltaSeconds(start)
2010-11-08 12:44:32 +03:00
2010-12-07 16:34:06 +03:00
if kb.testMode:
kb.testQueryCount += 1
2011-01-12 00:46:21 +03:00
2010-12-07 16:34:06 +03:00
if conf.cj:
conf.cj.clear()
2010-12-04 18:47:02 +03:00
2010-12-08 14:26:54 +03:00
if timeBasedCompare:
2010-12-08 17:26:40 +03:00
return wasLastRequestDelayed()
2010-12-08 17:33:10 +03:00
elif noteResponseTime:
2010-12-21 01:45:01 +03:00
kb.responseTimes.append(threadData.lastQueryDuration)
2010-12-07 19:04:53 +03:00
2010-10-10 22:56:43 +04:00
if content or response:
return page, headers
page = removeReflectiveValues(page, payload)
if getRatioValue:
2011-02-04 02:25:56 +03:00
return comparison(page, getRatioValue=False, pageLength=pageLength), comparison(page, getRatioValue=True, pageLength=pageLength)
2010-09-16 13:32:09 +04:00
elif pageLength or page:
2011-02-04 02:25:56 +03:00
return comparison(page, getRatioValue, pageLength)
else:
return False