sqlmap/lib/request/connect.py

1401 lines
67 KiB
Python
Raw Normal View History

2019-05-08 13:47:52 +03:00
#!/usr/bin/env python
2008-10-15 19:38:22 +04:00
"""
2019-01-05 23:38:52 +03:00
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
2017-10-11 15:50:46 +03:00
See the file 'LICENSE' for copying permission
2008-10-15 19:38:22 +04:00
"""
2015-09-12 16:13:30 +03:00
import binascii
import logging
2019-07-19 13:17:07 +03:00
import random
2008-10-15 19:38:22 +04:00
import re
import socket
import string
2014-12-07 18:11:07 +03:00
import struct
import time
2014-03-21 23:28:16 +04:00
import traceback
2008-10-15 19:38:22 +04:00
try:
import websocket
from websocket import WebSocketException
except ImportError:
class WebSocketException(Exception):
pass
2008-10-15 19:38:22 +04:00
from lib.core.agent import agent
from lib.core.common import asciifyUrl
from lib.core.common import calculateDeltaSeconds
2016-12-20 11:53:44 +03:00
from lib.core.common import checkSameHost
2019-03-19 16:07:39 +03:00
from lib.core.common import chunkSplitPostData
2010-12-21 04:09:39 +03:00
from lib.core.common import clearConsoleLine
2014-03-07 00:08:31 +04:00
from lib.core.common import dataToStdout
2018-04-11 16:19:44 +03:00
from lib.core.common import escapeJsonValue
2012-02-16 18:42:28 +04:00
from lib.core.common import evaluateCode
from lib.core.common import extractRegexResult
2019-03-29 04:28:16 +03:00
from lib.core.common import filterNone
2012-10-16 14:32:58 +04:00
from lib.core.common import findMultipartPostBoundary
2010-12-21 01:45:01 +03:00
from lib.core.common import getCurrentThreadData
2015-06-05 18:02:56 +03:00
from lib.core.common import getHeader
2011-11-11 15:28:27 +04:00
from lib.core.common import getHostHeader
from lib.core.common import getRequestHeader
from lib.core.common import getSafeExString
from lib.core.common import isMultiThreadMode
2010-11-08 14:22:47 +03:00
from lib.core.common import logHTTPTraffic
2019-08-02 21:29:52 +03:00
from lib.core.common import openFile
2013-05-17 18:04:05 +04:00
from lib.core.common import popValue
2019-06-04 15:44:06 +03:00
from lib.core.common import pushValue
from lib.core.common import randomizeParameterValue
from lib.core.common import randomInt
from lib.core.common import randomStr
from lib.core.common import readInput
from lib.core.common import removeReflectiveValues
2017-10-10 17:08:13 +03:00
from lib.core.common import safeVariableNaming
from lib.core.common import singleTimeLogMessage
2011-06-08 18:35:23 +04:00
from lib.core.common import singleTimeWarnMessage
2010-12-08 14:26:54 +03:00
from lib.core.common import stdev
2017-10-10 17:08:13 +03:00
from lib.core.common import unsafeVariableNaming
from lib.core.common import urldecode
2012-07-31 13:03:44 +04:00
from lib.core.common import urlencode
2019-06-04 15:44:06 +03:00
from lib.core.common import wasLastResponseDelayed
2019-05-02 17:54:54 +03:00
from lib.core.compat import patchHeaders
2019-03-28 18:04:38 +03:00
from lib.core.compat import xrange
2019-05-03 14:20:15 +03:00
from lib.core.convert import getBytes
2019-08-02 21:29:52 +03:00
from lib.core.convert import getText
2019-05-06 01:54:21 +03:00
from lib.core.convert import getUnicode
2008-10-15 19:38:22 +04:00
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
2018-12-10 16:53:11 +03:00
from lib.core.datatype import AttribDict
from lib.core.decorators import stackedmethod
2012-10-04 13:25:44 +04:00
from lib.core.dicts import POST_HINT_CONTENT_TYPES
2012-10-09 17:19:47 +04:00
from lib.core.enums import ADJUST_TIME_DELAY
from lib.core.enums import AUTH_TYPE
2011-12-26 16:24:39 +04:00
from lib.core.enums import CUSTOM_LOGGING
2018-10-26 13:08:04 +03:00
from lib.core.enums import HINT
from lib.core.enums import HTTP_HEADER
2010-11-08 12:44:32 +03:00
from lib.core.enums import HTTPMETHOD
2010-11-08 12:49:57 +03:00
from lib.core.enums import NULLCONNECTION
from lib.core.enums import PAYLOAD
from lib.core.enums import PLACE
2012-10-04 13:25:44 +04:00
from lib.core.enums import POST_HINT
from lib.core.enums import REDIRECTION
2018-12-21 13:29:57 +03:00
from lib.core.enums import WEB_PLATFORM
from lib.core.exception import SqlmapCompressionException
from lib.core.exception import SqlmapConnectionException
2014-11-05 12:03:19 +03:00
from lib.core.exception import SqlmapGenericException
from lib.core.exception import SqlmapSyntaxException
2014-10-23 13:23:53 +04:00
from lib.core.exception import SqlmapTokenException
from lib.core.exception import SqlmapUserQuitException
from lib.core.exception import SqlmapValueException
from lib.core.settings import ASTERISK_MARKER
from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
2012-10-04 13:25:44 +04:00
from lib.core.settings import DEFAULT_CONTENT_TYPE
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
2018-11-29 02:09:05 +03:00
from lib.core.settings import DEFAULT_USER_AGENT
2019-03-05 14:24:41 +03:00
from lib.core.settings import EVALCODE_ENCODED_PREFIX
2012-07-23 16:14:22 +04:00
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
2019-06-04 15:44:06 +03:00
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
from lib.core.settings import IPS_WAF_CHECK_PAYLOAD
from lib.core.settings import IS_WIN
from lib.core.settings import JAVASCRIPT_HREF_REGEX
2019-06-13 11:59:56 +03:00
from lib.core.settings import LARGE_READ_TRIM_MARKER
2019-06-13 11:58:21 +03:00
from lib.core.settings import MAX_CONNECTION_READ_SIZE
2012-10-02 15:36:15 +04:00
from lib.core.settings import MAX_CONNECTIONS_REGEX
2012-08-07 02:50:58 +04:00
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
2016-10-02 12:13:40 +03:00
from lib.core.settings import MAX_CONSECUTIVE_CONNECTION_ERRORS
2016-09-27 15:03:59 +03:00
from lib.core.settings import MAX_MURPHY_SLEEP_TIME
from lib.core.settings import META_REFRESH_REGEX
2018-11-22 10:07:27 +03:00
from lib.core.settings import MAX_TIME_RESPONSES
2019-06-04 15:44:06 +03:00
from lib.core.settings import MIN_TIME_RESPONSES
from lib.core.settings import PAYLOAD_DELIMITER
2012-10-02 15:36:15 +04:00
from lib.core.settings import PERMISSION_DENIED_REGEX
from lib.core.settings import PLAIN_TEXT_CONTENT_TYPE
2016-01-09 19:32:19 +03:00
from lib.core.settings import RANDOM_INTEGER_MARKER
from lib.core.settings import RANDOM_STRING_MARKER
from lib.core.settings import REPLACEMENT_MARKER
2014-08-21 01:42:40 +04:00
from lib.core.settings import TEXT_CONTENT_TYPE_REGEX
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
from lib.core.settings import UNICODE_ENCODING
from lib.core.settings import URI_HTTP_HEADER
from lib.core.settings import WARN_TIME_STDEV
from lib.request.basic import decodePage
2008-10-15 19:38:22 +04:00
from lib.request.basic import forgeHeaders
from lib.request.basic import processResponse
from lib.request.comparison import comparison
2019-06-04 15:44:06 +03:00
from lib.request.direct import direct
from lib.request.methodrequest import MethodRequest
2019-09-11 15:05:25 +03:00
from lib.utils.safe2bin import safecharencode
from thirdparty import six
from thirdparty.odict import OrderedDict
2019-05-15 11:57:22 +03:00
from thirdparty.six import unichr as _unichr
2019-03-27 04:46:59 +03:00
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import urllib as _urllib
2014-10-22 15:41:36 +04:00
from thirdparty.socks.socks import ProxyError
2008-10-15 19:38:22 +04:00
class Connect(object):
2008-10-15 19:38:22 +04:00
"""
This class defines methods used to perform HTTP requests
"""
@staticmethod
def _getPageProxy(**kwargs):
2016-06-17 17:51:23 +03:00
try:
return Connect.getPage(**kwargs)
except RuntimeError:
return None, None, None
@staticmethod
def _retryProxy(**kwargs):
threadData = getCurrentThreadData()
threadData.retriesCount += 1
2019-05-16 02:10:49 +03:00
if conf.proxyList and threadData.retriesCount >= conf.retries and not kb.locks.handlers.locked():
2013-08-12 16:25:51 +04:00
warnMsg = "changing proxy"
logger.warn(warnMsg)
conf.proxy = None
2015-10-25 17:58:43 +03:00
threadData.retriesCount = 0
setHTTPHandlers()
2013-08-12 16:25:51 +04:00
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
# timed based payloads can cause web server unresponsiveness
# if the injectable piece of code is some kind of JOIN-like query
warnMsg = "most likely web server instance hasn't recovered yet "
2011-11-10 14:30:53 +04:00
warnMsg += "from previous timed based payload. If the problem "
warnMsg += "persists please wait for a few minutes and rerun "
2016-05-30 11:51:35 +03:00
warnMsg += "without flag 'T' in option '--technique' "
2013-01-17 22:55:56 +04:00
warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to "
warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')"
singleTimeWarnMessage(warnMsg)
2015-09-27 17:17:58 +03:00
elif kb.originalPage is None:
if conf.tor:
warnMsg = "please make sure that you have "
warnMsg += "Tor installed and running so "
warnMsg += "you could successfully use "
2012-02-01 18:49:42 +04:00
warnMsg += "switch '--tor' "
if IS_WIN:
2013-01-17 22:55:56 +04:00
warnMsg += "(e.g. 'https://www.torproject.org/download/download.html.en')"
else:
2013-01-17 22:55:56 +04:00
warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')"
else:
warnMsg = "if the problem persists please check that the provided "
2018-12-27 01:23:49 +03:00
warnMsg += "target URL is reachable. In case that it is, "
warnMsg += "you can try to rerun with "
if not conf.randomAgent:
warnMsg += "switch '--random-agent' and/or "
warnMsg += "proxy switches ('--ignore-proxy', '--proxy',...)"
singleTimeWarnMessage(warnMsg)
2015-09-27 17:17:58 +03:00
elif conf.threads > 1:
warnMsg = "if the problem persists please try to lower "
2013-01-17 22:55:56 +04:00
warnMsg += "the number of used threads (option '--threads')"
singleTimeWarnMessage(warnMsg)
kwargs['retrying'] = True
return Connect._getPageProxy(**kwargs)
@staticmethod
def _connReadProxy(conn):
2019-05-02 01:45:44 +03:00
retVal = b""
2012-09-03 00:48:41 +04:00
if not kb.dnsMode and conn:
2012-11-20 15:10:29 +04:00
headers = conn.info()
2018-03-19 02:33:30 +03:00
if kb.pageCompress and headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate") or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
2012-12-18 12:36:26 +04:00
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
warnMsg = "large compressed response detected. Disabling compression"
singleTimeWarnMessage(warnMsg)
kb.pageCompress = False
2018-03-19 02:33:30 +03:00
raise SqlmapCompressionException
else:
while True:
2015-10-07 10:25:14 +03:00
if not conn:
break
else:
2019-06-13 11:58:21 +03:00
try:
part = conn.read(MAX_CONNECTION_READ_SIZE)
except AssertionError:
part = ""
2015-10-07 10:25:14 +03:00
2019-06-13 11:58:21 +03:00
if len(part) == MAX_CONNECTION_READ_SIZE:
warnMsg = "large response detected. This could take a while"
singleTimeWarnMessage(warnMsg)
2019-06-13 11:59:56 +03:00
part = re.sub(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_READ_TRIM_MARKER, kb.chars.start), part)
2019-06-13 11:58:21 +03:00
retVal += part
else:
2019-06-13 11:58:21 +03:00
retVal += part
break
2012-08-07 02:50:58 +04:00
if len(retVal) > MAX_CONNECTION_TOTAL_SIZE:
warnMsg = "too large response detected. Automatically trimming it"
singleTimeWarnMessage(warnMsg)
break
return retVal
2008-10-15 19:38:22 +04:00
@staticmethod
def getPage(**kwargs):
"""
This method connects to the target URL or proxy and returns
the target URL page content
2008-10-15 19:38:22 +04:00
"""
if conf.offline:
return None, None, None
2010-11-08 14:22:47 +03:00
url = kwargs.get("url", None) or conf.url
get = kwargs.get("get", None)
post = kwargs.get("post", None)
method = kwargs.get("method", None)
cookie = kwargs.get("cookie", None)
ua = kwargs.get("ua", None) or conf.agent
referer = kwargs.get("referer", None) or conf.referer
host = kwargs.get("host", None) or conf.host
direct_ = kwargs.get("direct", False)
multipart = kwargs.get("multipart", None)
silent = kwargs.get("silent", False)
raise404 = kwargs.get("raise404", True)
timeout = kwargs.get("timeout", None) or conf.timeout
auxHeaders = kwargs.get("auxHeaders", None)
response = kwargs.get("response", False)
2016-10-14 00:17:54 +03:00
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts
refreshing = kwargs.get("refreshing", False)
retrying = kwargs.get("retrying", False)
crawling = kwargs.get("crawling", False)
checking = kwargs.get("checking", False)
skipRead = kwargs.get("skipRead", False)
finalCode = kwargs.get("finalCode", False)
2019-03-19 16:07:39 +03:00
chunked = kwargs.get("chunked", False) or conf.chunked
2015-05-11 11:56:10 +03:00
2019-05-24 11:58:47 +03:00
start = time.time()
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
time.sleep(conf.delay)
threadData = getCurrentThreadData()
with kb.locks.request:
kb.requestCounter += 1
threadData.lastRequestUID = kb.requestCounter
if conf.dummy or conf.murphyRate and randomInt() % conf.murphyRate == 0:
if conf.murphyRate:
time.sleep(randomInt() % (MAX_MURPHY_SLEEP_TIME + 1))
page, headers, code = randomStr(int(randomInt()), alphabet=[_unichr(_) for _ in xrange(256)]), None, None if not conf.murphyRate else randomInt(3)
threadData.lastPage = page
threadData.lastCode = code
return page, headers, code
2016-09-02 15:14:17 +03:00
if multipart:
post = multipart
2019-05-24 16:01:43 +03:00
else:
if not post:
chunked = False
2019-03-19 16:07:39 +03:00
2019-05-24 16:01:43 +03:00
elif chunked:
post = _urllib.parse.unquote(post)
post = chunkSplitPostData(post)
2016-09-02 15:14:17 +03:00
2019-06-07 00:13:34 +03:00
webSocket = url.lower().startswith("ws")
2019-03-27 04:46:59 +03:00
if not _urllib.parse.urlsplit(url).netloc:
url = _urllib.parse.urljoin(conf.url, url)
# flag to know if we are dealing with the same target host
2016-12-20 11:53:44 +03:00
target = checkSameHost(url, conf.url)
if not retrying:
# Reset the number of connection retries
threadData.retriesCount = 0
# fix for known issue when urllib2 just skips the other part of provided
# url splitted with space char while urlencoding it in the later phase
url = url.replace(" ", "%20")
2011-04-30 17:20:05 +04:00
2017-05-21 23:52:27 +03:00
if "://" not in url:
url = "http://%s" % url
conn = None
2011-11-11 15:07:49 +04:00
page = None
2017-05-17 01:22:18 +03:00
code = None
status = None
2012-09-08 19:58:03 +04:00
2019-03-27 04:46:59 +03:00
_ = _urllib.parse.urlsplit(url)
2017-07-04 13:14:17 +03:00
requestMsg = u"HTTP request [#%d]:\r\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
2017-06-18 14:19:11 +03:00
requestMsg += getUnicode(("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling, checking)) else url)
2011-11-14 15:39:18 +04:00
responseMsg = u"HTTP response "
requestHeaders = u""
2011-01-25 19:05:06 +03:00
responseHeaders = None
2011-11-14 15:39:18 +04:00
logHeaders = u""
2012-03-14 18:31:41 +04:00
skipLogTraffic = False
2008-10-15 19:38:22 +04:00
2011-12-05 13:25:56 +04:00
raise404 = raise404 and not kb.ignoreNotFound
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
# support those by default
url = asciifyUrl(url)
2008-10-15 19:38:22 +04:00
try:
2013-03-19 22:24:14 +04:00
socket.setdefaulttimeout(timeout)
if direct_:
2013-05-27 12:38:47 +04:00
if '?' in url:
url, params = url.split('?', 1)
params = urlencode(params)
url = "%s?%s" % (url, params)
2017-05-08 00:12:42 +03:00
elif any((refreshing, crawling, checking)):
2011-05-27 20:26:00 +04:00
pass
2011-05-13 13:56:12 +04:00
elif target:
if conf.forceSSL:
url = re.sub(r"(?i)\A(http|ws):", r"\g<1>s:", url)
2017-10-31 13:38:09 +03:00
url = re.sub(r"(?i):80/", ":443/", url)
2012-07-14 13:01:30 +04:00
if PLACE.GET in conf.parameters and not get:
get = conf.parameters[PLACE.GET]
2013-06-04 02:05:25 +04:00
if not conf.skipUrlEncode:
get = urlencode(get, limit=True)
if get:
2015-01-17 19:31:00 +03:00
if '?' in url:
url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get)
requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get)
else:
url = "%s?%s" % (url, get)
requestMsg += "?%s" % get
2014-11-21 11:41:39 +03:00
if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET:
2013-06-03 17:14:56 +04:00
post = conf.parameters[PLACE.POST]
2011-05-13 13:56:12 +04:00
elif get:
url = "%s?%s" % (url, get)
requestMsg += "?%s" % get
2019-03-27 04:46:59 +03:00
requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
# Prepare HTTP headers
2018-03-26 16:39:48 +03:00
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host}, base=None if target else {})
2015-12-03 03:43:37 +03:00
if HTTP_HEADER.COOKIE in headers:
cookie = headers[HTTP_HEADER.COOKIE]
2010-10-18 12:54:08 +04:00
if kb.authHeader:
headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader
2010-10-18 12:54:08 +04:00
if kb.proxyAuthHeader:
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
2018-11-29 02:09:05 +03:00
if not conf.requestFile or not target:
if not getHeader(headers, HTTP_HEADER.HOST):
headers[HTTP_HEADER.HOST] = getHostHeader(url)
2018-11-29 02:09:05 +03:00
if not getHeader(headers, HTTP_HEADER.ACCEPT):
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
2015-03-20 02:56:36 +03:00
2018-11-29 02:09:05 +03:00
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
elif conf.requestFile and getHeader(headers, HTTP_HEADER.USER_AGENT) == DEFAULT_USER_AGENT:
for header in headers:
if header.upper() == HTTP_HEADER.USER_AGENT.upper():
del headers[header]
break
2016-09-02 15:14:17 +03:00
if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
2012-10-04 13:25:44 +04:00
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE
2012-10-16 14:32:58 +04:00
warnMsg += "Will try to reconstruct"
singleTimeWarnMessage(warnMsg)
boundary = findMultipartPostBoundary(conf.data)
if boundary:
headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)
2012-10-16 14:32:58 +04:00
2016-05-17 11:47:17 +03:00
if conf.keepAlive:
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
2019-04-19 14:54:48 +03:00
if chunked:
2019-03-19 16:07:39 +03:00
headers[HTTP_HEADER.TRANSFER_ENCODING] = "chunked"
2016-05-17 11:47:17 +03:00
2010-09-16 12:43:10 +04:00
if auxHeaders:
2017-08-16 04:08:58 +03:00
headers = forgeHeaders(auxHeaders, headers)
2010-09-16 12:43:10 +04:00
2019-08-02 21:29:52 +03:00
if kb.headersFile:
content = openFile(kb.headersFile, "rb").read()
for line in content.split("\n"):
line = getText(line.strip())
if ':' in line:
header, value = line.split(':', 1)
headers[header] = value
2019-05-02 01:45:44 +03:00
for key, value in list(headers.items()):
del headers[key]
2019-05-06 00:37:48 +03:00
if isinstance(value, six.string_types):
for char in (r"\r", r"\n"):
value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", value)
headers[getBytes(key) if six.PY2 else key] = getBytes(value.strip("\r\n")) # Note: Python3 has_header() expects non-bytes value
2019-05-03 17:03:08 +03:00
if six.PY2:
url = getBytes(url) # Note: Python3 requires text while Python2 has problems when mixing text with binary POST
2019-04-18 12:52:33 +03:00
post = getBytes(post)
2019-06-07 00:13:34 +03:00
if webSocket:
2015-03-24 13:21:50 +03:00
ws = websocket.WebSocket()
ws.settimeout(timeout)
2015-05-11 11:56:10 +03:00
ws.connect(url, header=("%s: %s" % _ for _ in headers.items() if _[0] not in ("Host",)), cookie=cookie) # WebSocket will add Host field of headers automatically
ws.send(urldecode(post or ""))
page = ws.recv()
2015-03-24 13:21:50 +03:00
ws.close()
2015-05-11 11:56:10 +03:00
code = ws.status
status = _http_client.responses[code]
2018-06-10 00:38:00 +03:00
2015-05-11 11:56:10 +03:00
class _(dict):
pass
2018-06-10 00:38:00 +03:00
2015-05-11 11:56:10 +03:00
responseHeaders = _(ws.getheaders())
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
requestHeaders += "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if hasattr(key, "capitalize") else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
2017-07-04 13:14:17 +03:00
requestMsg += "\r\n%s" % requestHeaders
2015-05-11 11:56:10 +03:00
if post is not None:
2017-07-04 13:14:17 +03:00
requestMsg += "\r\n\r\n%s" % getUnicode(post)
2017-07-04 13:14:17 +03:00
requestMsg += "\r\n"
2015-05-11 11:56:10 +03:00
threadData.lastRequestMsg = requestMsg
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
else:
2015-05-11 11:56:10 +03:00
if method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST):
req = MethodRequest(url, post, headers)
req.set_method(method)
elif url is not None:
2019-03-27 04:46:59 +03:00
req = _urllib.request.Request(url, post, headers)
else:
return None, None, None
requestHeaders += "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if hasattr(key, "capitalize") else key), getUnicode(value)) for (key, value) in req.header_items()])
2010-10-29 03:22:13 +04:00
2015-05-11 11:56:10 +03:00
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
conf.cj._policy._now = conf.cj._now = int(time.time())
cookies = conf.cj._cookies_for_request(req)
2017-07-04 13:14:17 +03:00
requestHeaders += "\r\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
2010-10-29 03:22:13 +04:00
2015-05-11 11:56:10 +03:00
if post is not None:
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH) and not chunked:
2017-07-04 13:14:17 +03:00
requestHeaders += "\r\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
2015-05-11 11:56:10 +03:00
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
2017-07-04 13:14:17 +03:00
requestHeaders += "\r\n%s: %s" % (HTTP_HEADER.CONNECTION, "close" if not conf.keepAlive else "keep-alive")
2010-10-29 03:22:13 +04:00
2017-07-04 13:14:17 +03:00
requestMsg += "\r\n%s" % requestHeaders
2008-10-15 19:38:22 +04:00
2015-05-11 11:56:10 +03:00
if post is not None:
2017-07-04 13:14:17 +03:00
requestMsg += "\r\n\r\n%s" % getUnicode(post)
2008-10-15 19:38:22 +04:00
if not chunked:
requestMsg += "\r\n"
2008-10-15 19:38:22 +04:00
2016-09-02 15:14:17 +03:00
if not multipart:
threadData.lastRequestMsg = requestMsg
2016-09-02 15:14:17 +03:00
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
2008-10-15 19:38:22 +04:00
2015-10-13 14:31:28 +03:00
if conf.cj:
for cookie in conf.cj:
if cookie.value is None:
cookie.value = ""
2015-12-15 13:29:37 +03:00
else:
for char in (r"\r", r"\n"):
cookie.value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", cookie.value)
2015-10-13 14:31:28 +03:00
2019-03-27 04:46:59 +03:00
conn = _urllib.request.urlopen(req)
2015-05-11 11:56:10 +03:00
if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower():
kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION)
2010-10-18 12:54:08 +04:00
2015-05-11 11:56:10 +03:00
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION)
2015-05-11 11:56:10 +03:00
# Return response object
if response:
return conn, None, None
2015-05-11 11:56:10 +03:00
# Get HTTP response
2017-01-16 16:29:23 +03:00
if hasattr(conn, "redurl"):
2018-02-07 18:05:41 +03:00
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO else Connect._connReadProxy(conn)) if not skipRead else None
2015-05-11 11:56:10 +03:00
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
code = conn.redcode if not finalCode else code
2015-05-11 11:56:10 +03:00
else:
page = Connect._connReadProxy(conn) if not skipRead else None
2011-12-22 02:59:23 +04:00
2017-03-12 11:52:37 +03:00
if conn:
code = (code or conn.code) if conn.code == kb.originalCode else conn.code # do not override redirection code (for comparison purposes)
2017-03-12 11:52:37 +03:00
responseHeaders = conn.info()
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
2019-05-02 17:54:54 +03:00
patchHeaders(responseHeaders)
kb.serverHeader = responseHeaders.get(HTTP_HEADER.SERVER, kb.serverHeader)
2017-03-12 11:52:37 +03:00
else:
code = None
responseHeaders = {}
2019-11-13 00:51:11 +03:00
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE), percentDecode=not crawling)
2018-04-06 02:13:04 +03:00
status = getUnicode(conn.msg) if conn and getattr(conn, "msg", None) else None
2016-10-02 12:13:40 +03:00
kb.connErrorCounter = 0
2017-01-16 16:29:23 +03:00
if not refreshing:
2017-01-16 17:23:38 +03:00
refresh = responseHeaders.get(HTTP_HEADER.REFRESH, "").split("url=")[-1].strip()
2017-01-16 16:29:23 +03:00
if extractRegexResult(META_REFRESH_REGEX, page):
refresh = extractRegexResult(META_REFRESH_REGEX, page)
2017-01-16 16:29:23 +03:00
debugMsg = "got HTML meta refresh header"
logger.debug(debugMsg)
if not refresh:
refresh = extractRegexResult(JAVASCRIPT_HREF_REGEX, page)
2019-11-09 01:28:51 +03:00
if refresh:
debugMsg = "got Javascript redirect request"
logger.debug(debugMsg)
2017-01-16 16:29:23 +03:00
if refresh:
if kb.alwaysRefresh is None:
2019-11-07 02:03:06 +03:00
msg = "got a refresh request "
msg += "(redirect like response common to login pages) to '%s'. " % refresh
2017-01-16 16:29:23 +03:00
msg += "Do you want to apply the refresh "
msg += "from now on (or stay on the original page)? [Y/n]"
2017-04-18 16:48:05 +03:00
kb.alwaysRefresh = readInput(msg, default='Y', boolean=True)
2017-01-16 16:29:23 +03:00
if kb.alwaysRefresh:
if re.search(r"\Ahttps?://", refresh, re.I):
url = refresh
else:
2019-03-27 04:46:59 +03:00
url = _urllib.parse.urljoin(url, refresh)
2017-01-16 16:29:23 +03:00
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
kwargs["refreshing"] = True
kwargs["url"] = url
kwargs["get"] = None
kwargs["post"] = None
try:
return Connect._getPageProxy(**kwargs)
except SqlmapSyntaxException:
pass
# Explicit closing of connection object
2015-05-11 11:56:10 +03:00
if conn and not conf.keepAlive:
try:
2011-10-21 13:06:00 +04:00
if hasattr(conn.fp, '_sock'):
conn.fp._sock.close()
conn.close()
2019-01-22 02:40:48 +03:00
except Exception as ex:
warnMsg = "problem occurred during connection closing ('%s')" % getSafeExString(ex)
logger.warn(warnMsg)
2019-01-22 02:40:48 +03:00
except SqlmapConnectionException as ex:
2017-08-28 12:08:36 +03:00
if conf.proxyList and not kb.threadException:
2017-08-23 14:52:51 +03:00
warnMsg = "unable to connect to the target URL ('%s')" % ex
2017-08-28 12:08:36 +03:00
logger.critical(warnMsg)
threadData.retriesCount = conf.retries
return Connect._retryProxy(**kwargs)
2017-08-23 14:52:51 +03:00
else:
raise
2019-03-27 04:46:59 +03:00
except _urllib.error.HTTPError as ex:
page = None
2011-01-25 19:05:06 +03:00
responseHeaders = None
2011-02-01 01:51:14 +03:00
2017-05-08 00:12:42 +03:00
if checking:
return None, None, None
try:
page = ex.read() if not skipRead else None
responseHeaders = ex.info()
responseHeaders[URI_HTTP_HEADER] = ex.geturl()
2019-05-02 17:54:54 +03:00
patchHeaders(responseHeaders)
2019-11-13 00:51:11 +03:00
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE), percentDecode=not crawling)
except socket.timeout:
2011-04-30 17:20:05 +04:00
warnMsg = "connection timed out while trying "
warnMsg += "to get error page information (%d)" % ex.code
logger.warn(warnMsg)
return None, None, None
2012-01-16 14:04:18 +04:00
except KeyboardInterrupt:
raise
except:
pass
2012-01-16 14:04:18 +04:00
finally:
2019-03-29 04:28:16 +03:00
page = getUnicode(page)
code = ex.code
2019-05-06 13:19:27 +03:00
status = getUnicode(getattr(ex, "reason", None) or getSafeExString(ex).split(": ", 1)[-1])
2014-12-03 15:22:55 +03:00
kb.originalCode = kb.originalCode or code
2017-05-17 01:22:18 +03:00
threadData.lastHTTPError = (threadData.lastRequestUID, code, status)
2012-03-15 15:10:58 +04:00
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
2018-11-04 16:17:53 +03:00
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
if responseHeaders:
2019-03-07 16:58:55 +03:00
logHeaders = getUnicode("".join(responseHeaders.headers).strip())
2019-06-13 11:58:21 +03:00
logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]), start, time.time())
2010-11-17 15:04:33 +03:00
skipLogTraffic = True
if conf.verbose <= 5:
responseMsg += getUnicode(logHeaders)
elif conf.verbose > 5:
2019-06-13 11:58:21 +03:00
responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE])
2016-09-02 15:14:17 +03:00
if not multipart:
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
2019-07-17 14:20:24 +03:00
if ex.code not in (conf.ignoreCode or []):
2019-03-27 04:46:59 +03:00
if ex.code == _http_client.UNAUTHORIZED:
2017-08-23 14:17:37 +03:00
errMsg = "not authorized, try to provide right HTTP "
errMsg += "authentication type and valid credentials (%d)" % code
raise SqlmapConnectionException(errMsg)
2019-04-18 11:24:38 +03:00
elif chunked and ex.code in (_http_client.METHOD_NOT_ALLOWED, _http_client.LENGTH_REQUIRED):
2019-04-18 11:36:41 +03:00
warnMsg = "turning off HTTP chunked transfer encoding "
warnMsg += "as it seems that the target site doesn't support it (%d)" % code
singleTimeWarnMessage(warnMsg)
conf.chunked = kwargs["chunked"] = False
return Connect.getPage(**kwargs)
2019-03-27 04:46:59 +03:00
elif ex.code == _http_client.NOT_FOUND:
2017-08-23 14:17:37 +03:00
if raise404:
errMsg = "page not found (%d)" % code
raise SqlmapConnectionException(errMsg)
else:
2017-08-23 14:17:37 +03:00
debugMsg = "page not found (%d)" % code
singleTimeLogMessage(debugMsg, logging.DEBUG)
2019-03-27 04:46:59 +03:00
elif ex.code == _http_client.GATEWAY_TIMEOUT:
2017-08-23 14:17:37 +03:00
if ignoreTimeout:
return None if not conf.ignoreTimeouts else "", None, None
else:
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, _http_client.responses[ex.code])
2017-08-23 14:17:37 +03:00
if threadData.retriesCount < conf.retries and not kb.threadException:
warnMsg += ". sqlmap is going to retry the request"
logger.critical(warnMsg)
return Connect._retryProxy(**kwargs)
elif kb.testMode:
logger.critical(warnMsg)
return None, None, None
else:
raise SqlmapConnectionException(warnMsg)
else:
2019-11-09 02:54:47 +03:00
debugMsg = "got HTTP error code: %d ('%s')" % (code, status)
2017-08-23 14:17:37 +03:00
logger.debug(debugMsg)
2019-03-27 04:46:59 +03:00
except (_urllib.error.URLError, socket.error, socket.timeout, _http_client.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError, ValueError, OverflowError):
tbMsg = traceback.format_exc()
2019-05-08 13:28:50 +03:00
if conf.debug:
dataToStdout(tbMsg)
2017-05-08 00:12:42 +03:00
if checking:
return None, None, None
elif "no host given" in tbMsg:
warnMsg = "invalid URL address used (%s)" % repr(url)
raise SqlmapSyntaxException(warnMsg)
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
warnMsg = "connection was forcibly closed by the target URL"
elif "timed out" in tbMsg:
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
2018-09-14 11:01:31 +03:00
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS) is dropping 'suspicious' requests")
2017-05-26 15:14:35 +03:00
kb.droppingRequests = True
warnMsg = "connection timed out to the target URL"
2017-03-30 13:05:05 +03:00
elif "Connection reset" in tbMsg:
if not conf.disablePrecon:
singleTimeWarnMessage("turning off pre-connect mechanism because of connection reset(s)")
conf.disablePrecon = True
if kb.testMode:
2018-09-14 11:01:31 +03:00
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS) is resetting 'suspicious' requests")
2017-05-26 15:14:35 +03:00
kb.droppingRequests = True
2017-03-30 13:05:05 +03:00
warnMsg = "connection reset to the target URL"
2010-12-12 00:28:11 +03:00
elif "URLError" in tbMsg or "error" in tbMsg:
warnMsg = "unable to connect to the target URL"
2019-05-31 16:42:20 +03:00
match = re.search(r"Errno \d+\] ([^>\n]+)", tbMsg)
2016-06-01 11:53:32 +03:00
if match:
2016-06-15 08:54:47 +03:00
warnMsg += " ('%s')" % match.group(1).strip()
2014-12-07 18:14:48 +03:00
elif "NTLM" in tbMsg:
2014-12-07 18:11:07 +03:00
warnMsg = "there has been a problem with NTLM authentication"
2017-06-05 11:38:05 +03:00
elif "Invalid header name" in tbMsg: # (e.g. PostgreSQL ::Text payload)
return None, None, None
elif "BadStatusLine" in tbMsg:
2012-04-21 00:33:15 +04:00
warnMsg = "connection dropped or unknown HTTP "
2014-06-16 11:51:24 +04:00
warnMsg += "status code received"
if not conf.agent and not conf.randomAgent:
warnMsg += ". Try to force the HTTP User-Agent "
warnMsg += "header with option '--user-agent' or switch '--random-agent'"
elif "IncompleteRead" in tbMsg:
warnMsg = "there was an incomplete read error while retrieving data "
warnMsg += "from the target URL"
2015-03-24 13:21:50 +03:00
elif "Handshake status" in tbMsg:
2017-10-31 13:38:09 +03:00
status = re.search(r"Handshake status ([\d]{3})", tbMsg)
2015-05-11 12:01:21 +03:00
errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown"
2015-03-24 13:21:50 +03:00
raise SqlmapConnectionException(errMsg)
2018-03-19 02:33:30 +03:00
elif "SqlmapCompressionException" in tbMsg:
warnMsg = "problems with response (de)compression"
retrying = True
2009-12-31 15:34:18 +03:00
else:
warnMsg = "unable to connect to the target URL"
2009-12-31 15:34:18 +03:00
2016-06-15 08:57:10 +03:00
if "BadStatusLine" not in tbMsg and any((conf.proxy, conf.tor)):
warnMsg += " or proxy"
2017-06-05 11:38:05 +03:00
if silent:
return None, None, None
2016-10-02 12:13:40 +03:00
with kb.locks.connError:
kb.connErrorCounter += 1
if kb.connErrorCounter >= MAX_CONSECUTIVE_CONNECTION_ERRORS and kb.connErrorChoice is None:
message = "there seems to be a continuous problem with connection to the target. "
2019-08-21 16:19:42 +03:00
message += "Are you sure that you want to continue? [y/N] "
2017-04-18 16:48:05 +03:00
kb.connErrorChoice = readInput(message, default='N', boolean=True)
2016-10-02 12:13:40 +03:00
2019-08-21 16:29:51 +03:00
if kb.connErrorChoice is False:
raise SqlmapUserQuitException
2016-10-02 12:13:40 +03:00
2017-06-05 11:38:05 +03:00
if "forcibly closed" in tbMsg:
logger.critical(warnMsg)
return None, None, None
2019-08-21 16:29:51 +03:00
elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead", "Interrupted system call")):
2016-10-14 00:25:46 +03:00
return None if not conf.ignoreTimeouts else "", None, None
2012-08-20 13:40:49 +04:00
elif threadData.retriesCount < conf.retries and not kb.threadException:
2012-10-04 20:28:36 +04:00
warnMsg += ". sqlmap is going to retry the request"
if not retrying:
warnMsg += "(s)"
logger.critical(warnMsg)
else:
logger.debug(warnMsg)
return Connect._retryProxy(**kwargs)
elif kb.testMode or isMultiThreadMode():
logger.critical(warnMsg)
return None, None, None
else:
raise SqlmapConnectionException(warnMsg)
finally:
2019-03-28 16:13:52 +03:00
if isinstance(page, six.binary_type):
2014-08-21 01:42:40 +04:00
if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]):
2019-03-28 16:13:52 +03:00
page = six.text_type(page, errors="ignore")
2014-08-21 01:42:40 +04:00
else:
page = getUnicode(page)
2019-03-20 13:33:10 +03:00
for function in kb.preprocessFunctions:
try:
page, responseHeaders, code = function(page, responseHeaders, code)
except Exception as ex:
errMsg = "error occurred while running preprocess "
2019-03-29 04:28:16 +03:00
errMsg += "function '%s' ('%s')" % (function.__name__, getSafeExString(ex))
2019-03-20 13:33:10 +03:00
raise SqlmapGenericException(errMsg)
threadData.lastPage = page
threadData.lastCode = code
socket.setdefaulttimeout(conf.timeout)
2019-03-04 17:24:12 +03:00
2019-05-24 14:54:10 +03:00
processResponse(page, responseHeaders, code, status)
2010-05-04 12:43:14 +04:00
if not skipLogTraffic:
if conn and getattr(conn, "redurl", None):
_ = _urllib.parse.urlsplit(conn.redurl)
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
requestMsg = re.sub(r"(\n[A-Z]+ ).+?( HTTP/\d)", r"\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
if kb.resendPostOnRedirect is False:
requestMsg = re.sub(r"(\[#\d+\]:\n)POST ", r"\g<1>GET ", requestMsg)
requestMsg = re.sub(r"(?i)Content-length: \d+\n", "", requestMsg)
requestMsg = re.sub(r"(?s)\n\n.+", "\n", requestMsg)
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, conn.code, status)
elif "\n" not in responseMsg:
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
if responseHeaders:
2019-05-08 00:00:15 +03:00
logHeaders = getUnicode("".join(responseHeaders.headers).strip())
2011-03-17 15:35:40 +03:00
2019-06-13 11:58:21 +03:00
logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]), start, time.time())
2010-05-04 12:43:14 +04:00
if conf.verbose <= 5:
responseMsg += getUnicode(logHeaders)
elif conf.verbose > 5:
2019-06-13 11:58:21 +03:00
responseMsg += "%s\r\n\r\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE])
2010-05-04 12:43:14 +04:00
if not multipart:
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
2008-10-15 19:38:22 +04:00
return page, responseHeaders, code
2008-10-15 19:38:22 +04:00
@staticmethod
@stackedmethod
2019-10-09 20:41:33 +03:00
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True, disableTampering=False, ignoreSecondOrder=False):
2008-10-15 19:38:22 +04:00
"""
This method calls a function to get the target URL page content
2017-09-11 11:00:35 +03:00
and returns its page ratio (0 <= ratio <= 1) or a boolean value
representing False/True match in case of !getRatioValue
2008-10-15 19:38:22 +04:00
"""
if conf.direct:
return direct(value, content)
2011-04-30 17:20:05 +04:00
get = None
post = None
cookie = None
ua = None
referer = None
host = None
2011-04-30 17:20:05 +04:00
page = None
pageLength = None
uri = None
code = None
2008-10-15 19:38:22 +04:00
if not place:
place = kb.injection.place or PLACE.GET
2014-07-10 10:49:20 +04:00
if not auxHeaders:
auxHeaders = {}
raise404 = place != PLACE.URI if raise404 is None else raise404
2014-11-21 11:41:39 +03:00
method = method or conf.method
2017-11-19 04:51:29 +03:00
postUrlEncode = kb.postUrlEncode
2017-11-13 16:07:12 +03:00
value = agent.adjustLateValues(value)
2010-11-08 00:55:24 +03:00
payload = agent.extractPayload(value)
2010-12-21 01:45:01 +03:00
threadData = getCurrentThreadData()
2010-10-30 03:00:48 +04:00
if conf.httpHeaders:
2014-10-22 15:41:36 +04:00
headers = OrderedDict(conf.httpHeaders)
2019-05-02 01:45:44 +03:00
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else "" for _ in headers) or None
2017-11-19 04:51:29 +03:00
if (kb.postHint or conf.skipUrlEncode) and postUrlEncode:
postUrlEncode = False
conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType]
contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE)
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
2012-08-31 14:38:02 +04:00
2010-11-08 00:55:24 +03:00
if payload:
2018-10-26 13:08:04 +03:00
delimiter = conf.paramDel or (DEFAULT_GET_POST_DELIMITER if place != PLACE.COOKIE else DEFAULT_COOKIE_DELIMITER)
if not disableTampering and kb.tamperFunctions:
for function in kb.tamperFunctions:
2018-10-26 13:08:04 +03:00
hints = {}
2014-11-05 12:03:19 +03:00
try:
2018-10-26 13:08:04 +03:00
payload = function(payload=payload, headers=auxHeaders, delimiter=delimiter, hints=hints)
2019-01-22 02:40:48 +03:00
except Exception as ex:
2014-11-05 12:03:19 +03:00
errMsg = "error occurred while running tamper "
2019-03-29 04:28:16 +03:00
errMsg += "function '%s' ('%s')" % (function.__name__, getSafeExString(ex))
2014-11-05 12:03:19 +03:00
raise SqlmapGenericException(errMsg)
if not isinstance(payload, six.string_types):
2019-03-29 04:28:16 +03:00
errMsg = "tamper function '%s' returns " % function.__name__
2012-11-10 14:01:29 +04:00
errMsg += "invalid payload type ('%s')" % type(payload)
raise SqlmapValueException(errMsg)
2010-10-30 03:00:48 +04:00
value = agent.replacePayload(value, payload)
2018-10-26 13:08:04 +03:00
if hints:
if HINT.APPEND in hints:
value = "%s%s%s" % (value, delimiter, hints[HINT.APPEND])
if HINT.PREPEND in hints:
2018-10-26 15:00:51 +03:00
if place == PLACE.URI:
match = re.search(r"\w+\s*=\s*%s" % PAYLOAD_DELIMITER, value) or re.search(r"[^?%s/]=\s*%s" % (re.escape(delimiter), PAYLOAD_DELIMITER), value)
if match:
value = value.replace(match.group(0), "%s%s%s" % (hints[HINT.PREPEND], delimiter, match.group(0)))
else:
value = "%s%s%s" % (hints[HINT.PREPEND], delimiter, value)
2018-10-26 13:08:04 +03:00
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload.replace('\\', BOUNDARY_BACKSLASH_MARKER)).replace(BOUNDARY_BACKSLASH_MARKER, '\\'))
if place == PLACE.CUSTOM_POST and kb.postHint:
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
# payloads in SOAP/XML should have chars > and < replaced
2012-10-04 13:25:44 +04:00
# with their HTML encoded counterparts
payload = payload.replace('&', "&amp;").replace('>', "&gt;").replace('<', "&lt;").replace('"', "&quot;").replace("'", "&apos;") # Reference: https://stackoverflow.com/a/1091953
2012-10-04 13:25:44 +04:00
elif kb.postHint == POST_HINT.JSON:
2018-04-11 16:19:44 +03:00
payload = escapeJsonValue(payload)
elif kb.postHint == POST_HINT.JSON_LIKE:
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
2018-04-11 16:19:44 +03:00
payload = escapeJsonValue(payload)
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
2012-09-22 22:59:40 +04:00
value = agent.replacePayload(value, payload)
else:
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
2018-11-28 02:29:17 +03:00
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0].upper() == HTTP_HEADER.COOKIE.upper()) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and postUrlEncode:
skip = False
2018-11-28 02:29:17 +03:00
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0].upper() == HTTP_HEADER.COOKIE.upper():
if kb.cookieEncodeChoice is None:
msg = "do you want to URL encode cookie values (implementation specific)? %s" % ("[Y/n]" if not conf.url.endswith(".aspx") else "[y/N]") # Reference: https://support.microsoft.com/en-us/kb/313282
2017-07-26 01:54:29 +03:00
kb.cookieEncodeChoice = readInput(msg, default='Y' if not conf.url.endswith(".aspx") else 'N', boolean=True)
if not kb.cookieEncodeChoice:
skip = True
if not skip:
if place in (PLACE.POST, PLACE.CUSTOM_POST): # potential problems in other cases (e.g. URL encoding of whole URI - including path)
value = urlencode(value, spaceplus=kb.postSpaceToPlus)
payload = urlencode(payload, safe='%', spaceplus=kb.postSpaceToPlus)
value = agent.replacePayload(value, payload)
2017-11-19 04:51:29 +03:00
postUrlEncode = False
2012-05-10 17:39:54 +04:00
if conf.hpp:
2018-12-21 13:29:57 +03:00
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_PLATFORM.ASP, WEB_PLATFORM.ASPX)):
warnMsg = "HTTP parameter pollution should work only against "
warnMsg += "ASP(.NET) targets"
singleTimeWarnMessage(warnMsg)
if place in (PLACE.GET, PLACE.POST):
_ = re.escape(PAYLOAD_DELIMITER)
2017-10-31 13:38:09 +03:00
match = re.search(r"(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value)
if match:
2012-12-10 16:05:41 +04:00
payload = match.group("value")
for splitter in (urlencode(' '), ' '):
if splitter in payload:
prefix, suffix = ("*/", "/*") if splitter == ' ' else (urlencode(_) for _ in ("*/", "/*"))
2012-12-10 16:07:36 +04:00
parts = payload.split(splitter)
parts[0] = "%s%s" % (parts[0], suffix)
2012-12-10 15:54:01 +04:00
parts[-1] = "%s%s=%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1])
for i in xrange(1, len(parts) - 1):
2012-12-10 15:54:01 +04:00
parts[i] = "%s%s=%s%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix)
payload = "".join(parts)
2012-12-10 16:05:41 +04:00
for splitter in (urlencode(','), ','):
payload = payload.replace(splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name")))
2012-12-10 16:05:41 +04:00
value = agent.replacePayload(value, payload)
else:
warnMsg = "HTTP parameter pollution works only with regular "
warnMsg += "GET and POST parameters"
singleTimeWarnMessage(warnMsg)
2012-05-10 17:39:54 +04:00
if place:
value = agent.removePayloadDelimiters(value)
2008-10-15 19:38:22 +04:00
if PLACE.GET in conf.parameters:
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
2016-05-27 14:33:14 +03:00
elif place == PLACE.GET: # Note: for (e.g.) checkWaf() when there are no GET parameters
get = value
2008-10-15 19:38:22 +04:00
if PLACE.POST in conf.parameters:
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
2016-05-27 14:33:14 +03:00
elif place == PLACE.POST:
post = value
2008-10-15 19:38:22 +04:00
if PLACE.CUSTOM_POST in conf.parameters:
2017-07-20 03:41:47 +03:00
post = conf.parameters[PLACE.CUSTOM_POST].replace(kb.customInjectionMark, "") if place != PLACE.CUSTOM_POST or not value else value
post = post.replace(ASTERISK_MARKER, '*') if post else post
if PLACE.COOKIE in conf.parameters:
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
2012-07-26 14:26:57 +04:00
if PLACE.USER_AGENT in conf.parameters:
ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value
2008-10-15 19:38:22 +04:00
2011-02-12 02:07:03 +03:00
if PLACE.REFERER in conf.parameters:
referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value
if PLACE.HOST in conf.parameters:
host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value
if PLACE.URI in conf.parameters:
uri = conf.url if place != PLACE.URI or not value else value
2010-09-23 18:07:23 +04:00
else:
uri = conf.url
2010-09-22 15:56:35 +04:00
2013-01-25 15:41:51 +04:00
if value and place == PLACE.CUSTOM_HEADER:
2016-10-20 01:47:53 +03:00
if value.split(',')[0].capitalize() == PLACE.COOKIE:
2018-03-08 03:21:34 +03:00
cookie = value.split(',', 1)[-1]
2016-10-20 01:47:53 +03:00
else:
2018-03-08 03:21:34 +03:00
auxHeaders[value.split(',')[0]] = value.split(',', 1)[-1]
2013-01-13 19:22:43 +04:00
2014-10-23 13:23:53 +04:00
if conf.csrfToken:
def _adjustParameter(paramString, parameter, newValue):
retVal = paramString
2019-03-06 19:35:19 +03:00
if urlencode(parameter) in paramString:
parameter = urlencode(parameter)
2018-12-10 16:53:11 +03:00
match = re.search(r"%s=[^&]*" % re.escape(parameter), paramString, re.I)
2014-10-23 13:23:53 +04:00
if match:
2018-12-23 12:18:27 +03:00
retVal = re.sub("(?i)%s" % re.escape(match.group(0)), ("%s=%s" % (parameter, newValue)).replace('\\', r'\\'), paramString)
2016-05-26 17:47:38 +03:00
else:
2018-12-10 16:53:11 +03:00
match = re.search(r"(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString, re.I)
2016-05-26 17:47:38 +03:00
if match:
2018-12-23 12:18:27 +03:00
retVal = re.sub("(?i)%s" % re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
2019-03-06 19:35:19 +03:00
2014-10-23 13:23:53 +04:00
return retVal
2018-12-10 16:53:11 +03:00
token = AttribDict()
2019-09-09 14:56:37 +03:00
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.csrfMethod or (conf.method if conf.csrfUrl == conf.url else None), cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
2019-03-06 13:20:57 +03:00
page = urldecode(page) # for anti-CSRF tokens with special characters in their name (e.g. 'foo:bar=...')
2018-12-10 16:53:11 +03:00
match = re.search(r"(?i)<input[^>]+\bname=[\"']?(?P<name>%s)\b[^>]*\bvalue=[\"']?(?P<value>[^>'\"]*)" % conf.csrfToken, page or "", re.I)
2014-10-23 13:23:53 +04:00
2018-12-10 16:53:11 +03:00
if not match:
match = re.search(r"(?i)<input[^>]+\bvalue=[\"']?(?P<value>[^>'\"]*)[\"']?[^>]*\bname=[\"']?(?P<name>%s)\b" % conf.csrfToken, page or "", re.I)
2017-08-20 11:00:04 +03:00
2018-12-10 16:53:11 +03:00
if not match:
match = re.search(r"(?P<name>%s)[\"']:[\"'](?P<value>[^\"']+)" % conf.csrfToken, page or "", re.I)
2016-05-26 17:08:59 +03:00
2018-12-10 16:53:11 +03:00
if not match:
match = re.search(r"\b(?P<name>%s)\s*[:=]\s*(?P<value>\w+)" % conf.csrfToken, str(headers), re.I)
2018-12-10 16:53:11 +03:00
if not match:
match = re.search(r"\b(?P<name>%s)\s*=\s*['\"]?(?P<value>[^;'\"]+)" % conf.csrfToken, page or "", re.I)
2018-12-10 16:53:11 +03:00
if match:
token.name, token.value = match.group("name"), match.group("value")
2018-12-10 16:53:11 +03:00
match = re.search(r"String\.fromCharCode\(([\d+, ]+)\)", token.value)
if match:
2019-05-15 11:57:22 +03:00
token.value = "".join(_unichr(int(_)) for _ in match.group(1).replace(' ', "").split(','))
2014-10-23 13:23:53 +04:00
if not token:
2019-03-27 04:46:59 +03:00
if conf.csrfUrl and conf.csrfToken and conf.csrfUrl != conf.url and code == _http_client.OK:
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
2018-12-14 12:09:40 +03:00
token.name = conf.csrfToken
token.value = page
2018-12-10 16:53:11 +03:00
if not token and conf.cj and any(re.search(conf.csrfToken, _.name, re.I) for _ in conf.cj):
2014-10-28 16:41:21 +03:00
for _ in conf.cj:
2018-12-10 16:53:11 +03:00
if re.search(conf.csrfToken, _.name, re.I):
token.name, token.value = _.name, _.value
if not any(re.search(conf.csrfToken, ' '.join(_), re.I) for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
if post:
2018-12-10 16:53:11 +03:00
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, token.name, token.value)
elif get:
2018-12-10 16:53:11 +03:00
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, token.name, token.value)
else:
2018-12-10 16:53:11 +03:00
get = "%s=%s" % (token.name, token.value)
break
if not token:
2018-12-10 16:53:11 +03:00
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken._original, conf.csrfUrl or conf.url)
if not conf.csrfUrl:
errMsg += ". You can try to rerun by providing "
errMsg += "a valid value for option '--csrf-url'"
2018-03-13 13:13:38 +03:00
raise SqlmapTokenException(errMsg)
2014-10-23 13:23:53 +04:00
if token:
2018-12-10 16:53:11 +03:00
token.value = token.value.strip("'\"")
2017-08-20 11:00:04 +03:00
for candidate in (PLACE.GET, PLACE.POST):
if candidate in conf.parameters:
if candidate == PLACE.GET and get:
2018-12-10 16:53:11 +03:00
get = _adjustParameter(get, token.name, token.value)
elif candidate == PLACE.POST and post:
2018-12-10 16:53:11 +03:00
post = _adjustParameter(post, token.name, token.value)
2014-10-23 13:23:53 +04:00
for i in xrange(len(conf.httpHeaders)):
2018-12-10 16:53:11 +03:00
if conf.httpHeaders[i][0].lower() == token.name.lower():
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token.value)
if conf.rParam:
def _randomizeParameter(paramString, randomParameter):
retVal = paramString
match = re.search(r"(\A|\b)%s=(?P<value>[^&;]*)" % re.escape(randomParameter), paramString)
if match:
origValue = match.group("value")
2019-07-19 13:17:07 +03:00
newValue = randomizeParameterValue(origValue) if randomParameter not in kb.randomPool else random.sample(kb.randomPool[randomParameter], 1)[0]
retVal = re.sub(r"(\A|\b)%s=[^&;]*" % re.escape(randomParameter), "%s=%s" % (randomParameter, newValue), paramString)
return retVal
2011-08-29 17:08:25 +04:00
for randomParameter in conf.rParam:
2015-01-17 23:47:57 +03:00
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE, PLACE.URI, PLACE.CUSTOM_POST):
2011-08-29 17:08:25 +04:00
if item in conf.parameters:
if item == PLACE.GET and get:
get = _randomizeParameter(get, randomParameter)
2015-01-17 23:47:57 +03:00
elif item in (PLACE.POST, PLACE.CUSTOM_POST) and post:
2011-08-29 17:08:25 +04:00
post = _randomizeParameter(post, randomParameter)
elif item == PLACE.COOKIE and cookie:
cookie = _randomizeParameter(cookie, randomParameter)
2015-01-17 23:47:57 +03:00
elif item == PLACE.URI and uri:
uri = _randomizeParameter(uri, randomParameter)
2011-11-21 20:41:02 +04:00
if conf.evalCode:
2014-04-06 18:48:46 +04:00
delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER
2015-09-17 16:25:40 +03:00
variables = {"uri": uri, "lastPage": threadData.lastPage, "_locals": locals()}
2011-11-21 20:41:02 +04:00
originals = {}
2015-08-25 03:03:56 +03:00
if not get and PLACE.URI in conf.parameters:
2019-03-27 04:46:59 +03:00
query = _urllib.parse.urlsplit(uri).query or ""
2015-08-25 03:03:56 +03:00
else:
query = None
2019-03-29 04:28:16 +03:00
for item in filterNone((get, post if not kb.postHint else None, query)):
2011-11-21 21:39:18 +04:00
for part in item.split(delimiter):
if '=' in part:
name, value = part.split('=', 1)
2017-10-10 17:08:13 +03:00
name = name.strip()
if safeVariableNaming(name) != name:
conf.evalCode = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), conf.evalCode)
name = safeVariableNaming(name)
value = urldecode(value, convall=True, spaceplus=(item == post and kb.postSpaceToPlus))
variables[name] = value
2011-11-21 20:41:02 +04:00
if cookie:
2014-04-06 18:50:58 +04:00
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
if '=' in part:
name, value = part.split('=', 1)
2017-10-10 17:08:13 +03:00
name = name.strip()
if safeVariableNaming(name) != name:
conf.evalCode = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), conf.evalCode)
name = safeVariableNaming(name)
value = urldecode(value, convall=True)
variables[name] = value
2015-01-09 17:33:53 +03:00
while True:
try:
2019-04-18 17:06:19 +03:00
compile(getBytes(conf.evalCode.replace(';', '\n')), "", "exec")
2019-01-22 02:40:48 +03:00
except SyntaxError as ex:
2017-06-24 00:46:25 +03:00
if ex.text:
original = replacement = ex.text.strip()
2019-03-05 14:24:41 +03:00
2017-10-10 17:08:13 +03:00
if '=' in original:
name, value = original.split('=', 1)
name = name.strip()
if safeVariableNaming(name) != name:
replacement = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), replacement)
else:
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
2019-03-05 14:24:41 +03:00
if safeVariableNaming(_) != _:
replacement = replacement.replace(_, safeVariableNaming(_))
2017-10-10 17:08:13 +03:00
break
2019-03-05 14:24:41 +03:00
2017-06-24 00:46:25 +03:00
if original == replacement:
2019-03-05 14:24:41 +03:00
conf.evalCode = conf.evalCode.replace(EVALCODE_ENCODED_PREFIX, "")
2015-01-09 17:33:53 +03:00
break
2017-06-24 00:46:25 +03:00
else:
conf.evalCode = conf.evalCode.replace(getUnicode(ex.text.strip(), UNICODE_ENCODING), replacement)
2015-01-09 17:33:53 +03:00
else:
2017-06-24 00:46:25 +03:00
break
2015-01-09 17:33:53 +03:00
else:
break
2011-11-21 20:41:02 +04:00
originals.update(variables)
2012-02-16 18:42:28 +04:00
evaluateCode(conf.evalCode, variables)
2015-01-09 17:33:53 +03:00
for variable in list(variables.keys()):
2017-10-10 17:08:13 +03:00
if unsafeVariableNaming(variable) != variable:
value = variables[variable]
del variables[variable]
variables[unsafeVariableNaming(variable)] = value
2014-09-28 15:38:09 +04:00
uri = variables["uri"]
2011-11-21 20:41:02 +04:00
for name, value in variables.items():
if name != "__builtins__" and originals.get(name, "") != value:
if isinstance(value, (int, six.string_types)):
2013-08-31 02:28:51 +04:00
found = False
value = getUnicode(value, UNICODE_ENCODING)
2013-08-31 02:28:51 +04:00
2017-03-30 11:16:35 +03:00
if kb.postHint and re.search(r"\b%s\b" % re.escape(name), post or ""):
if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP):
if re.search(r"<%s\b" % re.escape(name), post):
found = True
2018-06-10 00:38:00 +03:00
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
2017-03-30 11:16:35 +03:00
elif re.search(r"\b%s>" % re.escape(name), post):
found = True
2018-06-10 00:38:00 +03:00
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
2017-03-30 11:16:35 +03:00
regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name)
if not found and re.search(regex, (post or "")):
found = True
2018-06-10 00:38:00 +03:00
post = re.sub(regex, r"\g<1>\g<2>%s" % value.replace('\\', r'\\'), post)
2017-02-06 15:57:33 +03:00
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
2017-03-30 11:16:35 +03:00
if not found and re.search(regex, (post or "")):
2013-08-31 02:28:51 +04:00
found = True
2018-06-10 00:38:00 +03:00
post = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
2013-08-31 02:28:51 +04:00
2017-03-30 11:16:35 +03:00
if re.search(regex, (get or "")):
2013-08-31 02:28:51 +04:00
found = True
2018-06-10 00:38:00 +03:00
get = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), get)
2013-08-31 02:28:51 +04:00
2015-08-25 03:03:56 +03:00
if re.search(regex, (query or "")):
found = True
2018-06-10 00:38:00 +03:00
uri = re.sub(regex.replace(r"\A", r"\?"), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), uri)
2015-08-25 03:03:56 +03:00
2017-11-22 15:07:04 +03:00
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), re.escape(name), re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
2013-08-31 02:28:51 +04:00
if re.search(regex, (cookie or "")):
found = True
2018-06-10 00:38:00 +03:00
cookie = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), cookie)
2013-08-31 02:28:51 +04:00
if not found:
if post is not None:
post += "%s%s=%s" % (delimiter, name, value)
elif get is not None:
get += "%s%s=%s" % (delimiter, name, value)
elif cookie is not None:
2014-04-06 18:50:58 +04:00
cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value)
2011-11-28 15:21:39 +04:00
if not conf.skipUrlEncode:
get = urlencode(get, limit=True)
2012-11-13 13:21:11 +04:00
if post is not None:
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
2012-07-20 11:48:09 +04:00
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
2017-11-19 04:51:29 +03:00
elif postUrlEncode:
2013-01-19 21:06:36 +04:00
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
if timeBasedCompare and not conf.disableStats:
2016-01-09 19:32:19 +03:00
if len(kb.responseTimes.get(kb.responseTimeMode, [])) < MIN_TIME_RESPONSES:
2010-12-21 04:09:39 +03:00
clearConsoleLine()
2016-01-09 19:32:19 +03:00
kb.responseTimes.setdefault(kb.responseTimeMode, [])
2011-11-21 03:17:57 +04:00
if conf.tor:
2012-02-01 18:49:42 +04:00
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
2017-12-04 15:59:35 +03:00
warnMsg += "time-based injections because of inherent high latency time"
2011-11-21 03:17:57 +04:00
singleTimeWarnMessage(warnMsg)
2016-01-09 19:32:19 +03:00
warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "")
warnMsg += "%s statistical model, please wait" % ("larger" if len(kb.responseTimes) == 1 else "reset of")
2014-03-07 00:08:31 +04:00
dataToStdout(warnMsg)
2010-12-09 10:49:18 +03:00
2016-01-09 19:32:19 +03:00
while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES:
value = kb.responseTimePayload.replace(RANDOM_INTEGER_MARKER, str(randomInt(6))).replace(RANDOM_STRING_MARKER, randomStr()) if kb.responseTimePayload else kb.responseTimePayload
Connect.queryPage(value=value, content=True, raise404=False)
2014-03-07 00:08:31 +04:00
dataToStdout('.')
2016-01-09 19:32:19 +03:00
dataToStdout(" (done)\n")
2010-12-09 10:49:18 +03:00
elif not kb.testMode:
2016-09-29 15:55:43 +03:00
warnMsg = "it is very important to not stress the network connection "
2014-03-07 00:08:31 +04:00
warnMsg += "during usage of time-based payloads to prevent potential "
2016-01-09 19:32:19 +03:00
warnMsg += "disruptions "
singleTimeWarnMessage(warnMsg)
if not kb.laggingChecked:
kb.laggingChecked = True
2016-01-09 19:32:19 +03:00
deviation = stdev(kb.responseTimes[kb.responseTimeMode])
2011-08-12 17:47:38 +04:00
if deviation > WARN_TIME_STDEV:
2012-10-09 17:19:47 +04:00
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
2014-09-08 16:48:31 +04:00
warnMsg = "considerable lagging has been detected "
2011-08-12 17:47:38 +04:00
warnMsg += "in connection response(s). Please use as high "
2012-02-01 19:10:06 +04:00
warnMsg += "value for option '--time-sec' as possible (e.g. "
warnMsg += "10 or more)"
logger.critical(warnMsg)
2012-11-10 14:01:29 +04:00
2019-05-02 01:45:44 +03:00
if (conf.safeFreq or 0) > 0:
kb.queryCounter += 1
2015-04-21 01:02:47 +03:00
if kb.queryCounter % conf.safeFreq == 0:
2015-04-22 17:28:54 +03:00
if conf.safeUrl:
Connect.getPage(url=conf.safeUrl, post=conf.safePost, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
elif kb.safeReq:
Connect.getPage(url=kb.safeReq.url, post=kb.safeReq.post, method=kb.safeReq.method, auxHeaders=kb.safeReq.headers)
2010-09-16 13:32:09 +04:00
start = time.time()
2010-12-08 02:49:00 +03:00
if kb.nullConnection and not content and not response and not timeBasedCompare:
2012-06-12 18:22:14 +04:00
noteResponseTime = False
2015-07-18 18:01:34 +03:00
try:
pushValue(kb.pageCompress)
kb.pageCompress = False
2010-10-10 22:56:43 +04:00
2015-07-18 18:01:34 +03:00
if kb.nullConnection == NULLCONNECTION.HEAD:
method = HTTPMETHOD.HEAD
elif kb.nullConnection == NULLCONNECTION.RANGE:
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
2010-10-10 22:56:43 +04:00
2015-07-18 18:01:34 +03:00
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
2010-09-16 13:47:33 +04:00
2015-07-18 18:01:34 +03:00
if headers:
2018-12-31 03:03:40 +03:00
try:
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and headers.get(HTTP_HEADER.CONTENT_LENGTH):
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH].split(',')[0])
elif kb.nullConnection == NULLCONNECTION.RANGE and headers.get(HTTP_HEADER.CONTENT_RANGE):
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
except ValueError:
pass
2015-07-18 18:01:34 +03:00
finally:
kb.pageCompress = popValue()
2013-05-17 18:04:05 +04:00
2018-12-31 03:01:19 +03:00
if pageLength is None:
2012-12-18 12:36:26 +04:00
try:
2014-11-21 12:31:55 +03:00
page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
2012-12-18 12:36:26 +04:00
except MemoryError:
page, headers, code = None, None, None
warnMsg = "site returned insanely large response"
if kb.testMode:
warnMsg += " in testing phase. This is a common "
2018-09-14 11:01:31 +03:00
warnMsg += "behavior in custom WAF/IPS solutions"
2012-12-18 12:36:26 +04:00
singleTimeWarnMessage(warnMsg)
2010-12-08 02:49:00 +03:00
2019-10-09 20:41:33 +03:00
if not ignoreSecondOrder:
if conf.secondUrl:
page, headers, code = Connect.getPage(url=conf.secondUrl, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
elif kb.secondReq and IPS_WAF_CHECK_PAYLOAD not in _urllib.parse.unquote(value or ""):
def _(value):
if kb.customInjectionMark in (value or ""):
if payload is None:
value = value.replace(kb.customInjectionMark, "")
else:
value = re.sub(r"\w*%s" % re.escape(kb.customInjectionMark), payload, value)
return value
page, headers, code = Connect.getPage(url=_(kb.secondReq[0]), post=_(kb.secondReq[2]), method=kb.secondReq[1], cookie=kb.secondReq[3], silent=silent, auxHeaders=dict(auxHeaders, **dict(kb.secondReq[4])), response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
2012-07-26 16:07:05 +04:00
2010-12-21 01:45:01 +03:00
threadData.lastQueryDuration = calculateDeltaSeconds(start)
2010-11-08 12:44:32 +03:00
2019-03-20 13:33:10 +03:00
kb.originalCode = code if kb.originalCode is None else kb.originalCode
kb.originalPage = page if kb.originalPage is None else kb.originalPage
2010-12-07 16:34:06 +03:00
if kb.testMode:
kb.testQueryCount += 1
2011-01-12 00:46:21 +03:00
2010-12-08 14:26:54 +03:00
if timeBasedCompare:
2013-01-29 23:53:11 +04:00
return wasLastResponseDelayed()
2010-12-08 17:33:10 +03:00
elif noteResponseTime:
2016-01-09 19:32:19 +03:00
kb.responseTimes.setdefault(kb.responseTimeMode, [])
kb.responseTimes[kb.responseTimeMode].append(threadData.lastQueryDuration)
if len(kb.responseTimes[kb.responseTimeMode]) > MAX_TIME_RESPONSES:
kb.responseTimes[kb.responseTimeMode] = kb.responseTimes[kb.responseTimeMode][-MAX_TIME_RESPONSES // 2:]
2010-12-07 19:04:53 +03:00
2011-10-24 04:46:54 +04:00
if not response and removeReflection:
page = removeReflectiveValues(page, payload)
2012-10-02 15:36:15 +04:00
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
message = extractRegexResult(PERMISSION_DENIED_REGEX, page or "", re.I)
if message:
kb.permissionFlag = True
singleTimeWarnMessage("potential permission problems detected ('%s')" % message)
2012-02-08 16:00:03 +04:00
2019-05-02 17:54:54 +03:00
patchHeaders(headers)
2019-05-02 11:22:44 +03:00
2010-10-10 22:56:43 +04:00
if content or response:
2017-06-05 17:28:19 +03:00
return page, headers, code
if getRatioValue:
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
else:
2013-06-10 14:20:58 +04:00
return comparison(page, headers, code, getRatioValue, pageLength)
2018-03-21 16:29:54 +03:00
def setHTTPHandlers(): # Cross-referenced function
raise NotImplementedError