2013-02-14 15:32:17 +04:00
|
|
|
#!/usr/bin/env python
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
"""
|
2017-01-02 16:19:18 +03:00
|
|
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2015-09-12 16:13:30 +03:00
|
|
|
import binascii
|
2015-01-09 17:33:53 +03:00
|
|
|
import compiler
|
2008-11-15 15:25:19 +03:00
|
|
|
import httplib
|
2012-10-04 13:25:44 +04:00
|
|
|
import json
|
2015-01-09 17:33:53 +03:00
|
|
|
import keyword
|
2013-02-19 12:46:51 +04:00
|
|
|
import logging
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2008-11-09 19:57:47 +03:00
|
|
|
import socket
|
2012-08-21 00:17:39 +04:00
|
|
|
import string
|
2014-12-07 18:11:07 +03:00
|
|
|
import struct
|
2008-11-09 19:57:47 +03:00
|
|
|
import time
|
2014-03-21 23:28:16 +04:00
|
|
|
import traceback
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
|
|
|
|
2015-03-24 17:25:16 +03:00
|
|
|
try:
|
|
|
|
import websocket
|
|
|
|
from websocket import WebSocketException
|
|
|
|
except ImportError:
|
|
|
|
class WebSocketException(Exception):
|
|
|
|
pass
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-04-07 18:06:11 +04:00
|
|
|
from extra.safe2bin.safe2bin import safecharencode
|
2010-10-29 20:11:50 +04:00
|
|
|
from lib.core.agent import agent
|
2011-10-23 21:02:48 +04:00
|
|
|
from lib.core.common import asciifyUrl
|
2010-12-07 22:19:12 +03:00
|
|
|
from lib.core.common import calculateDeltaSeconds
|
2016-12-20 11:53:44 +03:00
|
|
|
from lib.core.common import checkSameHost
|
2010-12-21 04:09:39 +03:00
|
|
|
from lib.core.common import clearConsoleLine
|
2014-03-07 00:08:31 +04:00
|
|
|
from lib.core.common import dataToStdout
|
2012-02-16 18:42:28 +04:00
|
|
|
from lib.core.common import evaluateCode
|
2011-03-29 18:16:28 +04:00
|
|
|
from lib.core.common import extractRegexResult
|
2012-10-16 14:32:58 +04:00
|
|
|
from lib.core.common import findMultipartPostBoundary
|
2010-12-21 01:45:01 +03:00
|
|
|
from lib.core.common import getCurrentThreadData
|
2015-06-05 18:02:56 +03:00
|
|
|
from lib.core.common import getHeader
|
2011-11-11 15:28:27 +04:00
|
|
|
from lib.core.common import getHostHeader
|
2012-08-31 14:15:09 +04:00
|
|
|
from lib.core.common import getRequestHeader
|
2015-09-10 16:51:33 +03:00
|
|
|
from lib.core.common import getSafeExString
|
2010-06-10 15:34:17 +04:00
|
|
|
from lib.core.common import getUnicode
|
2010-11-08 14:22:47 +03:00
|
|
|
from lib.core.common import logHTTPTraffic
|
2013-05-17 18:04:05 +04:00
|
|
|
from lib.core.common import pushValue
|
|
|
|
from lib.core.common import popValue
|
2011-08-29 16:50:52 +04:00
|
|
|
from lib.core.common import randomizeParameterValue
|
2013-02-28 23:20:08 +04:00
|
|
|
from lib.core.common import randomInt
|
|
|
|
from lib.core.common import randomStr
|
2010-11-16 13:42:42 +03:00
|
|
|
from lib.core.common import readInput
|
2011-02-25 12:22:44 +03:00
|
|
|
from lib.core.common import removeReflectiveValues
|
2013-02-19 12:46:51 +04:00
|
|
|
from lib.core.common import singleTimeLogMessage
|
2011-06-08 18:35:23 +04:00
|
|
|
from lib.core.common import singleTimeWarnMessage
|
2010-12-08 14:26:54 +03:00
|
|
|
from lib.core.common import stdev
|
2013-01-29 23:53:11 +04:00
|
|
|
from lib.core.common import wasLastResponseDelayed
|
2012-07-31 13:03:44 +04:00
|
|
|
from lib.core.common import unicodeencode
|
2013-02-12 20:01:47 +04:00
|
|
|
from lib.core.common import urldecode
|
2012-07-31 13:03:44 +04:00
|
|
|
from lib.core.common import urlencode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2012-10-04 13:25:44 +04:00
|
|
|
from lib.core.dicts import POST_HINT_CONTENT_TYPES
|
2012-10-09 17:19:47 +04:00
|
|
|
from lib.core.enums import ADJUST_TIME_DELAY
|
2013-03-13 00:16:44 +04:00
|
|
|
from lib.core.enums import AUTH_TYPE
|
2011-12-26 16:24:39 +04:00
|
|
|
from lib.core.enums import CUSTOM_LOGGING
|
2013-03-20 14:10:24 +04:00
|
|
|
from lib.core.enums import HTTP_HEADER
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2010-11-08 12:49:57 +03:00
|
|
|
from lib.core.enums import NULLCONNECTION
|
2011-05-27 01:54:19 +04:00
|
|
|
from lib.core.enums import PAYLOAD
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PLACE
|
2012-10-04 13:25:44 +04:00
|
|
|
from lib.core.enums import POST_HINT
|
2011-12-05 02:42:19 +04:00
|
|
|
from lib.core.enums import REDIRECTION
|
2012-12-10 14:55:31 +04:00
|
|
|
from lib.core.enums import WEB_API
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapCompressionException
|
|
|
|
from lib.core.exception import SqlmapConnectionException
|
2014-11-05 12:03:19 +03:00
|
|
|
from lib.core.exception import SqlmapGenericException
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapSyntaxException
|
2014-10-23 13:23:53 +04:00
|
|
|
from lib.core.exception import SqlmapTokenException
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapValueException
|
2013-02-13 15:24:42 +04:00
|
|
|
from lib.core.settings import ASTERISK_MARKER
|
2016-02-05 14:00:57 +03:00
|
|
|
from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
|
2012-04-17 18:23:00 +04:00
|
|
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
2012-10-04 13:25:44 +04:00
|
|
|
from lib.core.settings import DEFAULT_CONTENT_TYPE
|
2013-07-31 19:28:22 +04:00
|
|
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
2012-12-10 14:55:31 +04:00
|
|
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
2015-01-09 17:33:53 +03:00
|
|
|
from lib.core.settings import EVALCODE_KEYWORD_SUFFIX
|
2011-07-06 09:44:47 +04:00
|
|
|
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
2012-07-23 16:14:22 +04:00
|
|
|
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
2012-04-06 12:42:36 +04:00
|
|
|
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
|
2012-10-02 15:36:15 +04:00
|
|
|
from lib.core.settings import MAX_CONNECTIONS_REGEX
|
2012-08-07 02:50:58 +04:00
|
|
|
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
2016-10-02 12:13:40 +03:00
|
|
|
from lib.core.settings import MAX_CONSECUTIVE_CONNECTION_ERRORS
|
2016-09-27 15:03:59 +03:00
|
|
|
from lib.core.settings import MAX_MURPHY_SLEEP_TIME
|
2011-03-29 18:16:28 +04:00
|
|
|
from lib.core.settings import META_REFRESH_REGEX
|
2010-12-09 10:49:18 +03:00
|
|
|
from lib.core.settings import MIN_TIME_RESPONSES
|
2012-04-06 12:42:36 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
|
|
|
from lib.core.settings import LARGE_CHUNK_TRIM_MARKER
|
2012-12-10 14:55:31 +04:00
|
|
|
from lib.core.settings import PAYLOAD_DELIMITER
|
2012-10-02 15:36:15 +04:00
|
|
|
from lib.core.settings import PERMISSION_DENIED_REGEX
|
2013-03-27 16:39:27 +04:00
|
|
|
from lib.core.settings import PLAIN_TEXT_CONTENT_TYPE
|
2016-01-09 19:32:19 +03:00
|
|
|
from lib.core.settings import RANDOM_INTEGER_MARKER
|
|
|
|
from lib.core.settings import RANDOM_STRING_MARKER
|
2014-02-26 12:30:37 +04:00
|
|
|
from lib.core.settings import REPLACEMENT_MARKER
|
2014-08-21 01:42:40 +04:00
|
|
|
from lib.core.settings import TEXT_CONTENT_TYPE_REGEX
|
2011-10-25 13:53:44 +04:00
|
|
|
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
2011-04-19 18:50:09 +04:00
|
|
|
from lib.core.settings import URI_HTTP_HEADER
|
2012-04-06 12:42:36 +04:00
|
|
|
from lib.core.settings import WARN_TIME_STDEV
|
2010-01-02 05:02:12 +03:00
|
|
|
from lib.request.basic import decodePage
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request.basic import forgeHeaders
|
2010-12-25 13:16:20 +03:00
|
|
|
from lib.request.basic import processResponse
|
2010-03-31 14:50:47 +04:00
|
|
|
from lib.request.direct import direct
|
2008-12-05 18:34:13 +03:00
|
|
|
from lib.request.comparison import comparison
|
2010-09-15 16:45:41 +04:00
|
|
|
from lib.request.methodrequest import MethodRequest
|
2014-10-22 15:41:36 +04:00
|
|
|
from thirdparty.odict.odict import OrderedDict
|
|
|
|
from thirdparty.socks.socks import ProxyError
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
|
2012-12-06 13:42:53 +04:00
|
|
|
class Connect(object):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This class defines methods used to perform HTTP requests
|
|
|
|
"""
|
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
@staticmethod
|
2012-12-06 17:14:19 +04:00
|
|
|
def _getPageProxy(**kwargs):
|
2016-06-17 17:51:23 +03:00
|
|
|
try:
|
|
|
|
return Connect.getPage(**kwargs)
|
|
|
|
except RuntimeError:
|
|
|
|
return None, None, None
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2011-06-19 13:57:41 +04:00
|
|
|
@staticmethod
|
2012-12-06 17:14:19 +04:00
|
|
|
def _retryProxy(**kwargs):
|
2011-06-19 13:57:41 +04:00
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
threadData.retriesCount += 1
|
|
|
|
|
2013-08-13 08:42:49 +04:00
|
|
|
if conf.proxyList and threadData.retriesCount >= conf.retries:
|
2013-08-12 16:25:51 +04:00
|
|
|
warnMsg = "changing proxy"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2013-08-20 21:35:49 +04:00
|
|
|
conf.proxy = None
|
2015-10-25 17:58:43 +03:00
|
|
|
threadData.retriesCount = 0
|
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
setHTTPHandlers()
|
2013-08-12 16:25:51 +04:00
|
|
|
|
2011-06-19 13:57:41 +04:00
|
|
|
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
|
|
|
|
# timed based payloads can cause web server unresponsiveness
|
|
|
|
# if the injectable piece of code is some kind of JOIN-like query
|
|
|
|
warnMsg = "most probably web server instance hasn't recovered yet "
|
2011-11-10 14:30:53 +04:00
|
|
|
warnMsg += "from previous timed based payload. If the problem "
|
2011-06-19 13:57:41 +04:00
|
|
|
warnMsg += "persists please wait for few minutes and rerun "
|
2016-05-30 11:51:35 +03:00
|
|
|
warnMsg += "without flag 'T' in option '--technique' "
|
2013-01-17 22:55:56 +04:00
|
|
|
warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to "
|
|
|
|
warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')"
|
2011-06-19 13:57:41 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
2015-09-27 17:17:58 +03:00
|
|
|
|
2011-06-19 13:57:41 +04:00
|
|
|
elif kb.originalPage is None:
|
2011-11-24 01:17:08 +04:00
|
|
|
if conf.tor:
|
|
|
|
warnMsg = "please make sure that you have "
|
|
|
|
warnMsg += "Tor installed and running so "
|
|
|
|
warnMsg += "you could successfully use "
|
2012-02-01 18:49:42 +04:00
|
|
|
warnMsg += "switch '--tor' "
|
2011-11-24 01:17:08 +04:00
|
|
|
if IS_WIN:
|
2013-01-17 22:55:56 +04:00
|
|
|
warnMsg += "(e.g. 'https://www.torproject.org/download/download.html.en')"
|
2011-11-24 01:17:08 +04:00
|
|
|
else:
|
2013-01-17 22:55:56 +04:00
|
|
|
warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')"
|
2011-11-24 01:17:08 +04:00
|
|
|
else:
|
|
|
|
warnMsg = "if the problem persists please check that the provided "
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg += "target URL is valid. In case that it is, you can try to rerun "
|
2012-02-01 18:49:42 +04:00
|
|
|
warnMsg += "with the switch '--random-agent' turned on "
|
2013-01-17 22:55:56 +04:00
|
|
|
warnMsg += "and/or proxy switches ('--ignore-proxy', '--proxy',...)"
|
2011-06-19 13:57:41 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
2015-09-27 17:17:58 +03:00
|
|
|
|
2011-06-19 13:57:41 +04:00
|
|
|
elif conf.threads > 1:
|
|
|
|
warnMsg = "if the problem persists please try to lower "
|
2013-01-17 22:55:56 +04:00
|
|
|
warnMsg += "the number of used threads (option '--threads')"
|
2011-06-19 13:57:41 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
|
|
|
kwargs['retrying'] = True
|
2012-12-06 17:14:19 +04:00
|
|
|
return Connect._getPageProxy(**kwargs)
|
2011-06-19 13:57:41 +04:00
|
|
|
|
2012-04-06 12:42:36 +04:00
|
|
|
@staticmethod
|
2012-12-06 17:14:19 +04:00
|
|
|
def _connReadProxy(conn):
|
2012-04-06 12:42:36 +04:00
|
|
|
retVal = ""
|
2012-05-27 01:28:43 +04:00
|
|
|
|
2012-09-03 00:48:41 +04:00
|
|
|
if not kb.dnsMode and conn:
|
2012-11-20 15:10:29 +04:00
|
|
|
headers = conn.info()
|
2015-02-04 17:01:03 +03:00
|
|
|
if headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
|
2013-03-20 14:10:24 +04:00
|
|
|
or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
2012-12-18 12:36:26 +04:00
|
|
|
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
|
|
|
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
|
|
|
warnMsg = "large compressed response detected. Disabling compression"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
kb.pageCompress = False
|
2012-07-23 16:46:43 +04:00
|
|
|
else:
|
|
|
|
while True:
|
2015-10-07 10:25:14 +03:00
|
|
|
if not conn:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
_ = conn.read(MAX_CONNECTION_CHUNK_SIZE)
|
|
|
|
|
2012-07-23 16:46:43 +04:00
|
|
|
if len(_) == MAX_CONNECTION_CHUNK_SIZE:
|
|
|
|
warnMsg = "large response detected. This could take a while"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
_ = re.sub(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_CHUNK_TRIM_MARKER, kb.chars.start), _)
|
|
|
|
retVal += _
|
|
|
|
else:
|
|
|
|
retVal += _
|
|
|
|
break
|
2012-05-27 01:28:43 +04:00
|
|
|
|
2012-08-07 02:50:58 +04:00
|
|
|
if len(retVal) > MAX_CONNECTION_TOTAL_SIZE:
|
|
|
|
warnMsg = "too large response detected. Automatically trimming it"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
break
|
|
|
|
|
2012-04-06 12:42:36 +04:00
|
|
|
return retVal
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
@staticmethod
|
|
|
|
def getPage(**kwargs):
|
|
|
|
"""
|
2013-04-09 13:48:42 +04:00
|
|
|
This method connects to the target URL or proxy and returns
|
|
|
|
the target URL page content
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2014-04-06 20:09:54 +04:00
|
|
|
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
2008-12-04 20:40:03 +03:00
|
|
|
time.sleep(conf.delay)
|
|
|
|
|
2015-07-10 17:10:24 +03:00
|
|
|
if conf.offline:
|
|
|
|
return None, None, None
|
2016-09-27 15:03:59 +03:00
|
|
|
elif conf.dummy or conf.murphyRate and randomInt() % conf.murphyRate == 0:
|
|
|
|
if conf.murphyRate:
|
|
|
|
time.sleep(randomInt() % (MAX_MURPHY_SLEEP_TIME + 1))
|
|
|
|
return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None if not conf.murphyRate else randomInt(3)
|
2013-02-28 23:20:08 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
2013-01-16 19:04:00 +04:00
|
|
|
with kb.locks.request:
|
|
|
|
kb.requestCounter += 1
|
|
|
|
threadData.lastRequestUID = kb.requestCounter
|
2010-11-08 14:22:47 +03:00
|
|
|
|
2013-04-16 16:17:41 +04:00
|
|
|
url = kwargs.get("url", None) or conf.url
|
2013-03-09 22:41:24 +04:00
|
|
|
get = kwargs.get("get", None)
|
|
|
|
post = kwargs.get("post", None)
|
|
|
|
method = kwargs.get("method", None)
|
|
|
|
cookie = kwargs.get("cookie", None)
|
2013-04-16 16:17:41 +04:00
|
|
|
ua = kwargs.get("ua", None) or conf.agent
|
|
|
|
referer = kwargs.get("referer", None) or conf.referer
|
|
|
|
host = kwargs.get("host", None) or conf.host
|
2013-03-09 22:41:24 +04:00
|
|
|
direct_ = kwargs.get("direct", False)
|
2016-09-02 15:14:17 +03:00
|
|
|
multipart = kwargs.get("multipart", None)
|
2013-03-09 22:41:24 +04:00
|
|
|
silent = kwargs.get("silent", False)
|
|
|
|
raise404 = kwargs.get("raise404", True)
|
2013-04-16 16:17:41 +04:00
|
|
|
timeout = kwargs.get("timeout", None) or conf.timeout
|
2013-03-09 22:41:24 +04:00
|
|
|
auxHeaders = kwargs.get("auxHeaders", None)
|
|
|
|
response = kwargs.get("response", False)
|
2016-10-14 00:17:54 +03:00
|
|
|
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts
|
2013-03-09 22:41:24 +04:00
|
|
|
refreshing = kwargs.get("refreshing", False)
|
|
|
|
retrying = kwargs.get("retrying", False)
|
|
|
|
crawling = kwargs.get("crawling", False)
|
2013-05-17 17:04:25 +04:00
|
|
|
skipRead = kwargs.get("skipRead", False)
|
2015-05-11 11:56:10 +03:00
|
|
|
|
2016-09-02 15:14:17 +03:00
|
|
|
if multipart:
|
|
|
|
post = multipart
|
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
websocket_ = url.lower().startswith("ws")
|
2011-05-22 11:46:09 +04:00
|
|
|
|
2011-05-24 09:26:51 +04:00
|
|
|
if not urlparse.urlsplit(url).netloc:
|
|
|
|
url = urlparse.urljoin(conf.url, url)
|
|
|
|
|
2011-05-22 11:46:09 +04:00
|
|
|
# flag to know if we are dealing with the same target host
|
2016-12-20 11:53:44 +03:00
|
|
|
target = checkSameHost(url, conf.url)
|
2011-05-22 11:46:09 +04:00
|
|
|
|
2011-05-22 14:59:56 +04:00
|
|
|
if not retrying:
|
|
|
|
# Reset the number of connection retries
|
|
|
|
threadData.retriesCount = 0
|
|
|
|
|
2011-05-22 11:46:09 +04:00
|
|
|
# fix for known issue when urllib2 just skips the other part of provided
|
|
|
|
# url splitted with space char while urlencoding it in the later phase
|
|
|
|
url = url.replace(" ", "%20")
|
2011-04-30 17:20:05 +04:00
|
|
|
|
2013-04-30 19:46:26 +04:00
|
|
|
conn = None
|
2012-03-15 19:58:25 +04:00
|
|
|
code = None
|
2011-11-11 15:07:49 +04:00
|
|
|
page = None
|
2012-09-08 19:58:03 +04:00
|
|
|
|
|
|
|
_ = urlparse.urlsplit(url)
|
2012-11-13 13:21:11 +04:00
|
|
|
requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
|
2012-09-08 19:58:03 +04:00
|
|
|
requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url
|
2011-11-14 15:39:18 +04:00
|
|
|
responseMsg = u"HTTP response "
|
|
|
|
requestHeaders = u""
|
2011-01-25 19:05:06 +03:00
|
|
|
responseHeaders = None
|
2011-11-14 15:39:18 +04:00
|
|
|
logHeaders = u""
|
2012-03-14 18:31:41 +04:00
|
|
|
skipLogTraffic = False
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-12-05 13:25:56 +04:00
|
|
|
raise404 = raise404 and not kb.ignoreNotFound
|
|
|
|
|
2011-10-24 00:19:42 +04:00
|
|
|
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
|
2011-10-23 21:02:48 +04:00
|
|
|
# support those by default
|
|
|
|
url = asciifyUrl(url)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
try:
|
2013-03-19 22:24:14 +04:00
|
|
|
socket.setdefaulttimeout(timeout)
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2012-12-06 14:15:05 +04:00
|
|
|
if direct_:
|
2013-05-27 12:38:47 +04:00
|
|
|
if '?' in url:
|
|
|
|
url, params = url.split('?', 1)
|
2010-02-09 17:02:47 +03:00
|
|
|
params = urlencode(params)
|
|
|
|
url = "%s?%s" % (url, params)
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2012-08-20 12:41:43 +04:00
|
|
|
elif any((refreshing, crawling)):
|
2011-05-27 20:26:00 +04:00
|
|
|
pass
|
2011-03-29 18:16:28 +04:00
|
|
|
|
2011-05-13 13:56:12 +04:00
|
|
|
elif target:
|
2013-04-24 14:35:39 +04:00
|
|
|
if conf.forceSSL and urlparse.urlparse(url).scheme != "https":
|
|
|
|
url = re.sub("\Ahttp:", "https:", url, re.I)
|
|
|
|
url = re.sub(":80/", ":443/", url, re.I)
|
|
|
|
|
2012-07-14 13:01:30 +04:00
|
|
|
if PLACE.GET in conf.parameters and not get:
|
2010-11-08 11:02:36 +03:00
|
|
|
get = conf.parameters[PLACE.GET]
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2013-06-04 02:05:25 +04:00
|
|
|
if not conf.skipUrlEncode:
|
|
|
|
get = urlencode(get, limit=True)
|
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
if get:
|
2015-01-17 19:31:00 +03:00
|
|
|
if '?' in url:
|
|
|
|
url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get)
|
|
|
|
requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get)
|
|
|
|
else:
|
|
|
|
url = "%s?%s" % (url, get)
|
|
|
|
requestMsg += "?%s" % get
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2014-11-21 11:41:39 +03:00
|
|
|
if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET:
|
2013-06-03 17:14:56 +04:00
|
|
|
post = conf.parameters[PLACE.POST]
|
2010-03-23 13:27:39 +03:00
|
|
|
|
2011-05-13 13:56:12 +04:00
|
|
|
elif get:
|
|
|
|
url = "%s?%s" % (url, get)
|
|
|
|
requestMsg += "?%s" % get
|
|
|
|
|
2010-10-21 13:10:07 +04:00
|
|
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
2010-02-09 17:02:47 +03:00
|
|
|
|
2011-12-20 16:52:41 +04:00
|
|
|
# Prepare HTTP headers
|
2015-03-20 02:56:36 +03:00
|
|
|
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host})
|
2010-09-15 16:45:41 +04:00
|
|
|
|
2015-12-03 03:43:37 +03:00
|
|
|
if HTTP_HEADER.COOKIE in headers:
|
|
|
|
cookie = headers[HTTP_HEADER.COOKIE]
|
|
|
|
|
2010-10-18 12:54:08 +04:00
|
|
|
if kb.authHeader:
|
2013-03-20 14:10:24 +04:00
|
|
|
headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-10-18 13:02:56 +04:00
|
|
|
if kb.proxyAuthHeader:
|
2013-03-20 14:10:24 +04:00
|
|
|
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
2010-10-18 13:02:56 +04:00
|
|
|
|
2015-06-05 18:02:56 +03:00
|
|
|
if not getHeader(headers, HTTP_HEADER.ACCEPT):
|
2015-02-03 00:07:16 +03:00
|
|
|
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
|
|
|
|
2015-06-05 18:02:56 +03:00
|
|
|
if not getHeader(headers, HTTP_HEADER.HOST) or not target:
|
2015-03-20 02:56:36 +03:00
|
|
|
headers[HTTP_HEADER.HOST] = getHostHeader(url)
|
|
|
|
|
2015-06-05 18:02:56 +03:00
|
|
|
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
|
2015-03-03 16:37:36 +03:00
|
|
|
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
|
|
|
|
2016-09-02 15:14:17 +03:00
|
|
|
if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
|
2013-03-20 14:10:24 +04:00
|
|
|
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
2012-10-04 13:25:44 +04:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
|
|
|
|
warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE
|
2012-10-16 14:32:58 +04:00
|
|
|
warnMsg += "Will try to reconstruct"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
|
|
|
boundary = findMultipartPostBoundary(conf.data)
|
|
|
|
if boundary:
|
2013-03-20 14:10:24 +04:00
|
|
|
headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)
|
2012-10-16 14:32:58 +04:00
|
|
|
|
2016-05-17 11:47:17 +03:00
|
|
|
if conf.keepAlive:
|
|
|
|
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
|
|
|
|
|
2015-12-03 03:43:37 +03:00
|
|
|
# Reset header values to original in case of provided request file
|
|
|
|
if target and conf.requestFile:
|
2017-02-06 14:03:18 +03:00
|
|
|
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie})
|
2015-12-03 03:43:37 +03:00
|
|
|
|
2010-09-16 12:43:10 +04:00
|
|
|
if auxHeaders:
|
2015-11-02 00:56:26 +03:00
|
|
|
for key, value in auxHeaders.items():
|
2013-08-23 13:54:08 +04:00
|
|
|
for _ in headers.keys():
|
|
|
|
if _.upper() == key.upper():
|
|
|
|
del headers[_]
|
2015-11-02 00:56:26 +03:00
|
|
|
headers[key] = value
|
2010-09-16 12:43:10 +04:00
|
|
|
|
2015-11-02 00:56:26 +03:00
|
|
|
for key, value in headers.items():
|
2011-02-25 12:43:04 +03:00
|
|
|
del headers[key]
|
2016-03-06 22:04:45 +03:00
|
|
|
value = unicodeencode(value, kb.pageEncoding)
|
2015-11-02 00:56:26 +03:00
|
|
|
for char in (r"\r", r"\n"):
|
|
|
|
value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", value)
|
2016-03-06 22:04:45 +03:00
|
|
|
headers[unicodeencode(key, kb.pageEncoding)] = value.strip("\r\n")
|
2010-12-28 17:40:34 +03:00
|
|
|
|
2015-01-07 13:40:11 +03:00
|
|
|
url = unicodeencode(url)
|
2015-08-30 03:13:07 +03:00
|
|
|
post = unicodeencode(post)
|
2010-12-28 17:40:34 +03:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
if websocket_:
|
2015-03-24 13:21:50 +03:00
|
|
|
ws = websocket.WebSocket()
|
2016-10-20 22:13:39 +03:00
|
|
|
ws.settimeout(timeout)
|
2015-05-11 11:56:10 +03:00
|
|
|
ws.connect(url, header=("%s: %s" % _ for _ in headers.items() if _[0] not in ("Host",)), cookie=cookie) # WebSocket will add Host field of headers automatically
|
|
|
|
ws.send(urldecode(post or ""))
|
|
|
|
page = ws.recv()
|
2015-03-24 13:21:50 +03:00
|
|
|
ws.close()
|
2015-05-11 11:56:10 +03:00
|
|
|
code = ws.status
|
|
|
|
status = httplib.responses[code]
|
|
|
|
class _(dict):
|
|
|
|
pass
|
|
|
|
responseHeaders = _(ws.getheaders())
|
|
|
|
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
|
|
|
|
2016-09-09 12:06:38 +03:00
|
|
|
requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
2015-05-11 11:56:10 +03:00
|
|
|
requestMsg += "\n%s" % requestHeaders
|
|
|
|
|
|
|
|
if post is not None:
|
|
|
|
requestMsg += "\n\n%s" % getUnicode(post)
|
2015-03-24 12:19:37 +03:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
requestMsg += "\n"
|
|
|
|
|
|
|
|
threadData.lastRequestMsg = requestMsg
|
|
|
|
|
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
2010-09-15 16:45:41 +04:00
|
|
|
else:
|
2015-05-11 11:56:10 +03:00
|
|
|
if method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST):
|
|
|
|
method = unicodeencode(method)
|
|
|
|
req = MethodRequest(url, post, headers)
|
|
|
|
req.set_method(method)
|
|
|
|
else:
|
|
|
|
req = urllib2.Request(url, post, headers)
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2016-09-09 12:06:38 +03:00
|
|
|
requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()])
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
|
|
|
|
conf.cj._policy._now = conf.cj._now = int(time.time())
|
|
|
|
cookies = conf.cj._cookies_for_request(req)
|
|
|
|
requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
if post is not None:
|
|
|
|
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH):
|
|
|
|
requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
|
2012-08-21 00:17:39 +04:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
|
2016-07-20 21:04:44 +03:00
|
|
|
requestHeaders += "\n%s: %s" % (HTTP_HEADER.CONNECTION, "close" if not conf.keepAlive else "keep-alive")
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
requestMsg += "\n%s" % requestHeaders
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
if post is not None:
|
|
|
|
requestMsg += "\n\n%s" % getUnicode(post)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
requestMsg += "\n"
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2016-09-02 15:14:17 +03:00
|
|
|
if not multipart:
|
|
|
|
threadData.lastRequestMsg = requestMsg
|
2011-09-28 12:13:46 +04:00
|
|
|
|
2016-09-02 15:14:17 +03:00
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-10-13 14:31:28 +03:00
|
|
|
if conf.cj:
|
|
|
|
for cookie in conf.cj:
|
|
|
|
if cookie.value is None:
|
|
|
|
cookie.value = ""
|
2015-12-15 13:29:37 +03:00
|
|
|
else:
|
|
|
|
for char in (r"\r", r"\n"):
|
|
|
|
cookie.value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", cookie.value)
|
2015-10-13 14:31:28 +03:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
conn = urllib2.urlopen(req)
|
2010-12-22 16:41:36 +03:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower():
|
|
|
|
kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION)
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
|
|
|
|
kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION)
|
2010-10-18 13:02:56 +04:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
# Return response object
|
|
|
|
if response:
|
|
|
|
return conn, None, None
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2015-05-11 11:56:10 +03:00
|
|
|
# Get HTTP response
|
2017-01-16 16:29:23 +03:00
|
|
|
if hasattr(conn, "redurl"):
|
2015-05-11 11:56:10 +03:00
|
|
|
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
|
|
|
|
else Connect._connReadProxy(conn)) if not skipRead else None
|
|
|
|
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
|
|
|
code = conn.redcode
|
|
|
|
else:
|
|
|
|
page = Connect._connReadProxy(conn) if not skipRead else None
|
2011-12-22 02:59:23 +04:00
|
|
|
|
2016-12-10 01:10:14 +03:00
|
|
|
code = code or (conn.code if conn else None)
|
2015-05-11 11:56:10 +03:00
|
|
|
responseHeaders = conn.info()
|
|
|
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
|
|
|
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
|
|
|
status = getUnicode(conn.msg)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2016-10-02 12:13:40 +03:00
|
|
|
kb.connErrorCounter = 0
|
|
|
|
|
2017-01-16 16:29:23 +03:00
|
|
|
if not refreshing:
|
2017-01-16 17:23:38 +03:00
|
|
|
refresh = responseHeaders.get(HTTP_HEADER.REFRESH, "").split("url=")[-1].strip()
|
2011-03-29 18:16:28 +04:00
|
|
|
|
2017-01-16 16:29:23 +03:00
|
|
|
if extractRegexResult(META_REFRESH_REGEX, page):
|
|
|
|
refresh = extractRegexResult(META_REFRESH_REGEX, page)
|
2011-03-29 18:16:28 +04:00
|
|
|
|
2017-01-16 16:29:23 +03:00
|
|
|
debugMsg = "got HTML meta refresh header"
|
|
|
|
logger.debug(debugMsg)
|
2011-05-28 02:42:23 +04:00
|
|
|
|
2017-01-16 16:29:23 +03:00
|
|
|
if refresh:
|
|
|
|
if kb.alwaysRefresh is None:
|
|
|
|
msg = "sqlmap got a refresh request "
|
|
|
|
msg += "(redirect like response common to login pages). "
|
|
|
|
msg += "Do you want to apply the refresh "
|
|
|
|
msg += "from now on (or stay on the original page)? [Y/n]"
|
|
|
|
choice = readInput(msg, default="Y")
|
2011-05-28 02:42:23 +04:00
|
|
|
|
2017-01-16 16:29:23 +03:00
|
|
|
kb.alwaysRefresh = choice not in ("n", "N")
|
|
|
|
|
|
|
|
if kb.alwaysRefresh:
|
|
|
|
if re.search(r"\Ahttps?://", refresh, re.I):
|
|
|
|
url = refresh
|
|
|
|
else:
|
|
|
|
url = urlparse.urljoin(url, refresh)
|
|
|
|
|
|
|
|
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
|
|
|
|
kwargs["refreshing"] = True
|
|
|
|
kwargs["url"] = url
|
|
|
|
kwargs["get"] = None
|
|
|
|
kwargs["post"] = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
return Connect._getPageProxy(**kwargs)
|
|
|
|
except SqlmapSyntaxException:
|
|
|
|
pass
|
2011-03-29 18:16:28 +04:00
|
|
|
|
2010-12-26 17:36:51 +03:00
|
|
|
# Explicit closing of connection object
|
2015-05-11 11:56:10 +03:00
|
|
|
if conn and not conf.keepAlive:
|
2010-12-26 17:36:51 +03:00
|
|
|
try:
|
2011-10-21 13:06:00 +04:00
|
|
|
if hasattr(conn.fp, '_sock'):
|
|
|
|
conn.fp._sock.close()
|
2010-12-26 17:36:51 +03:00
|
|
|
conn.close()
|
2015-09-10 16:51:33 +03:00
|
|
|
except Exception, ex:
|
|
|
|
warnMsg = "problem occurred during connection closing ('%s')" % getSafeExString(ex)
|
2010-12-26 17:36:51 +03:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2015-09-10 16:51:33 +03:00
|
|
|
except urllib2.HTTPError, ex:
|
2011-01-04 01:02:58 +03:00
|
|
|
page = None
|
2011-01-25 19:05:06 +03:00
|
|
|
responseHeaders = None
|
2011-02-01 01:51:14 +03:00
|
|
|
|
2010-11-17 15:16:48 +03:00
|
|
|
try:
|
2015-09-10 16:51:33 +03:00
|
|
|
page = ex.read() if not skipRead else None
|
|
|
|
responseHeaders = ex.info()
|
|
|
|
responseHeaders[URI_HTTP_HEADER] = ex.geturl()
|
2013-03-20 14:10:24 +04:00
|
|
|
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
2010-11-17 15:16:48 +03:00
|
|
|
except socket.timeout:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "connection timed out while trying "
|
2015-09-10 16:51:33 +03:00
|
|
|
warnMsg += "to get error page information (%d)" % ex.code
|
2010-11-17 15:16:48 +03:00
|
|
|
logger.warn(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2012-01-16 14:04:18 +04:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
raise
|
2010-11-17 15:16:48 +03:00
|
|
|
except:
|
|
|
|
pass
|
2012-01-16 14:04:18 +04:00
|
|
|
finally:
|
|
|
|
page = page if isinstance(page, unicode) else getUnicode(page)
|
2010-11-17 15:16:48 +03:00
|
|
|
|
2015-09-10 16:51:33 +03:00
|
|
|
code = ex.code
|
2011-01-04 01:02:58 +03:00
|
|
|
|
2014-12-03 15:22:55 +03:00
|
|
|
kb.originalCode = kb.originalCode or code
|
|
|
|
threadData.lastHTTPError = (threadData.lastRequestUID, code)
|
2012-03-15 15:10:58 +04:00
|
|
|
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
2011-01-04 01:02:58 +03:00
|
|
|
|
2015-09-10 16:51:33 +03:00
|
|
|
status = getUnicode(ex.msg)
|
2010-12-22 16:41:36 +03:00
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
2011-01-04 01:02:58 +03:00
|
|
|
|
2010-11-17 15:16:48 +03:00
|
|
|
if responseHeaders:
|
2016-09-09 12:06:38 +03:00
|
|
|
logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
2011-01-04 01:02:58 +03:00
|
|
|
|
2012-07-23 12:16:47 +04:00
|
|
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
|
2010-11-17 15:04:33 +03:00
|
|
|
|
2012-03-15 18:51:16 +04:00
|
|
|
skipLogTraffic = True
|
|
|
|
|
2010-12-22 16:41:36 +03:00
|
|
|
if conf.verbose <= 5:
|
|
|
|
responseMsg += getUnicode(logHeaders)
|
|
|
|
elif conf.verbose > 5:
|
2012-08-21 11:58:40 +04:00
|
|
|
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
|
2010-12-22 16:41:36 +03:00
|
|
|
|
2016-09-02 15:14:17 +03:00
|
|
|
if not multipart:
|
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
2010-12-22 16:41:36 +03:00
|
|
|
|
2015-09-10 16:51:33 +03:00
|
|
|
if ex.code == httplib.UNAUTHORIZED and not conf.ignore401:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "not authorized, try to provide right HTTP "
|
2010-11-04 01:07:13 +03:00
|
|
|
errMsg += "authentication type and valid credentials (%d)" % code
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2015-09-10 16:51:33 +03:00
|
|
|
elif ex.code == httplib.NOT_FOUND:
|
2011-08-12 18:48:44 +04:00
|
|
|
if raise404:
|
|
|
|
errMsg = "page not found (%d)" % code
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2011-08-12 18:48:44 +04:00
|
|
|
else:
|
|
|
|
debugMsg = "page not found (%d)" % code
|
2013-02-19 12:46:51 +04:00
|
|
|
singleTimeLogMessage(debugMsg, logging.DEBUG)
|
2011-08-12 18:48:44 +04:00
|
|
|
processResponse(page, responseHeaders)
|
2015-09-10 16:51:33 +03:00
|
|
|
elif ex.code == httplib.GATEWAY_TIMEOUT:
|
2011-06-19 13:57:41 +04:00
|
|
|
if ignoreTimeout:
|
2016-10-14 00:25:46 +03:00
|
|
|
return None if not conf.ignoreTimeouts else "", None, None
|
2011-06-19 13:57:41 +04:00
|
|
|
else:
|
2015-09-10 16:51:33 +03:00
|
|
|
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, httplib.responses[ex.code])
|
2012-08-20 13:40:49 +04:00
|
|
|
if threadData.retriesCount < conf.retries and not kb.threadException:
|
2012-10-04 20:28:36 +04:00
|
|
|
warnMsg += ". sqlmap is going to retry the request"
|
2011-06-19 13:57:41 +04:00
|
|
|
logger.critical(warnMsg)
|
2012-12-06 17:14:19 +04:00
|
|
|
return Connect._retryProxy(**kwargs)
|
2011-06-19 14:11:27 +04:00
|
|
|
elif kb.testMode:
|
2011-06-19 13:57:41 +04:00
|
|
|
logger.critical(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2011-06-19 13:57:41 +04:00
|
|
|
else:
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-06-10 18:15:32 +04:00
|
|
|
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
2010-01-19 13:27:54 +03:00
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2015-11-03 16:53:41 +03:00
|
|
|
except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError):
|
2008-12-04 20:40:03 +03:00
|
|
|
tbMsg = traceback.format_exc()
|
2010-12-21 13:31:56 +03:00
|
|
|
|
2010-12-12 00:28:11 +03:00
|
|
|
if "no host given" in tbMsg:
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "invalid URL address used (%s)" % repr(url)
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(warnMsg)
|
2015-03-24 13:50:57 +03:00
|
|
|
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "connection was forcibly closed by the target URL"
|
2011-01-03 16:04:20 +03:00
|
|
|
elif "timed out" in tbMsg:
|
2016-05-02 11:45:50 +03:00
|
|
|
if not conf.disablePrecon:
|
|
|
|
singleTimeWarnMessage("turning off pre-connect mechanism because of connection time out(s)")
|
|
|
|
conf.disablePrecon = True
|
|
|
|
|
|
|
|
if kb.testMode and kb.testType not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
|
|
|
kb.responseTimes.clear()
|
2016-01-11 02:34:03 +03:00
|
|
|
|
2014-09-08 16:33:13 +04:00
|
|
|
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
2016-10-14 00:06:04 +03:00
|
|
|
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is dropping 'suspicious' requests")
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "connection timed out to the target URL"
|
2010-12-12 00:28:11 +03:00
|
|
|
elif "URLError" in tbMsg or "error" in tbMsg:
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "unable to connect to the target URL"
|
2016-06-01 11:53:32 +03:00
|
|
|
match = re.search(r"Errno \d+\] ([^>]+)", tbMsg)
|
|
|
|
if match:
|
2016-06-15 08:54:47 +03:00
|
|
|
warnMsg += " ('%s')" % match.group(1).strip()
|
2014-12-07 18:14:48 +03:00
|
|
|
elif "NTLM" in tbMsg:
|
2014-12-07 18:11:07 +03:00
|
|
|
warnMsg = "there has been a problem with NTLM authentication"
|
2008-12-04 20:40:03 +03:00
|
|
|
elif "BadStatusLine" in tbMsg:
|
2012-04-21 00:33:15 +04:00
|
|
|
warnMsg = "connection dropped or unknown HTTP "
|
2014-06-16 11:51:24 +04:00
|
|
|
warnMsg += "status code received"
|
|
|
|
if not conf.agent and not conf.randomAgent:
|
|
|
|
warnMsg += ". Try to force the HTTP User-Agent "
|
|
|
|
warnMsg += "header with option '--user-agent' or switch '--random-agent'"
|
2010-11-13 01:57:33 +03:00
|
|
|
elif "IncompleteRead" in tbMsg:
|
|
|
|
warnMsg = "there was an incomplete read error while retrieving data "
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg += "from the target URL"
|
2015-03-24 13:21:50 +03:00
|
|
|
elif "Handshake status" in tbMsg:
|
|
|
|
status = re.search("Handshake status ([\d]{3})", tbMsg)
|
2015-05-11 12:01:21 +03:00
|
|
|
errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown"
|
2015-03-24 13:21:50 +03:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2009-12-31 15:34:18 +03:00
|
|
|
else:
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "unable to connect to the target URL"
|
2009-12-31 15:34:18 +03:00
|
|
|
|
2016-06-15 08:57:10 +03:00
|
|
|
if "BadStatusLine" not in tbMsg and any((conf.proxy, conf.tor)):
|
2008-12-05 18:34:13 +03:00
|
|
|
warnMsg += " or proxy"
|
|
|
|
|
2016-10-02 12:13:40 +03:00
|
|
|
with kb.locks.connError:
|
|
|
|
kb.connErrorCounter += 1
|
|
|
|
|
|
|
|
if kb.connErrorCounter >= MAX_CONSECUTIVE_CONNECTION_ERRORS and kb.connErrorChoice is None:
|
|
|
|
message = "there seems to be a continuous problem with connection to the target. "
|
|
|
|
message += "Are you sure that you want to continue "
|
|
|
|
message += "with further target testing? [y/N] "
|
|
|
|
kb.connErrorChoice = readInput(message, default="N") in ("Y", "y")
|
|
|
|
|
|
|
|
if kb.connErrorChoice is False:
|
|
|
|
raise SqlmapConnectionException(warnMsg)
|
|
|
|
|
2013-03-19 22:24:14 +04:00
|
|
|
if silent:
|
|
|
|
return None, None, None
|
|
|
|
elif "forcibly closed" in tbMsg:
|
2011-01-03 16:04:20 +03:00
|
|
|
logger.critical(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2013-03-19 22:24:14 +04:00
|
|
|
elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead")):
|
2016-10-14 00:25:46 +03:00
|
|
|
return None if not conf.ignoreTimeouts else "", None, None
|
2012-08-20 13:40:49 +04:00
|
|
|
elif threadData.retriesCount < conf.retries and not kb.threadException:
|
2012-10-04 20:28:36 +04:00
|
|
|
warnMsg += ". sqlmap is going to retry the request"
|
2015-09-27 17:36:20 +03:00
|
|
|
if not retrying:
|
|
|
|
warnMsg += "(s)"
|
|
|
|
logger.critical(warnMsg)
|
|
|
|
else:
|
|
|
|
logger.debug(warnMsg)
|
2012-12-06 17:14:19 +04:00
|
|
|
return Connect._retryProxy(**kwargs)
|
2011-05-22 14:29:25 +04:00
|
|
|
elif kb.testMode:
|
2011-05-22 14:59:56 +04:00
|
|
|
logger.critical(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2008-11-15 15:25:19 +03:00
|
|
|
else:
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(warnMsg)
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2011-06-19 13:57:41 +04:00
|
|
|
finally:
|
2015-09-15 11:48:23 +03:00
|
|
|
if isinstance(page, basestring) and not isinstance(page, unicode):
|
2014-08-21 01:42:40 +04:00
|
|
|
if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]):
|
|
|
|
page = unicode(page, errors="ignore")
|
|
|
|
else:
|
|
|
|
page = getUnicode(page)
|
2011-06-19 13:57:41 +04:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
2009-12-18 01:04:01 +03:00
|
|
|
|
2011-08-12 17:40:37 +04:00
|
|
|
processResponse(page, responseHeaders)
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2013-06-11 00:26:34 +04:00
|
|
|
if conn and getattr(conn, "redurl", None):
|
2013-06-05 01:46:39 +04:00
|
|
|
_ = urlparse.urlsplit(conn.redurl)
|
|
|
|
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
2015-11-09 16:05:53 +03:00
|
|
|
requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
|
2015-06-16 13:00:56 +03:00
|
|
|
|
|
|
|
if kb.resendPostOnRedirect is False:
|
|
|
|
requestMsg = re.sub("(\[#\d+\]:\n)POST ", "\g<1>GET ", requestMsg)
|
|
|
|
requestMsg = re.sub("(?i)Content-length: \d+\n", "", requestMsg)
|
|
|
|
requestMsg = re.sub("(?s)\n\n.+", "\n", requestMsg)
|
|
|
|
|
2013-04-30 19:46:26 +04:00
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, conn.code, status)
|
|
|
|
else:
|
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
|
|
|
|
2010-12-22 16:41:36 +03:00
|
|
|
if responseHeaders:
|
2016-09-09 12:06:38 +03:00
|
|
|
logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
2011-03-17 15:35:40 +03:00
|
|
|
|
2012-03-14 18:31:41 +04:00
|
|
|
if not skipLogTraffic:
|
2012-07-23 12:16:47 +04:00
|
|
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
if conf.verbose <= 5:
|
2010-11-08 14:55:56 +03:00
|
|
|
responseMsg += getUnicode(logHeaders)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose > 5:
|
2012-08-21 11:58:40 +04:00
|
|
|
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2016-09-02 15:14:17 +03:00
|
|
|
if not multipart:
|
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-08-12 20:48:11 +04:00
|
|
|
return page, responseHeaders, code
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
@staticmethod
|
2011-10-24 04:46:54 +04:00
|
|
|
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2013-04-09 13:48:42 +04:00
|
|
|
This method calls a function to get the target URL page content
|
2008-10-15 19:38:22 +04:00
|
|
|
and returns its page MD5 hash or a boolean value in case of
|
|
|
|
string match check ('--string' command line parameter)
|
|
|
|
"""
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if conf.direct:
|
2010-03-31 14:50:47 +04:00
|
|
|
return direct(value, content)
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
get = None
|
|
|
|
post = None
|
|
|
|
cookie = None
|
|
|
|
ua = None
|
|
|
|
referer = None
|
2011-12-20 16:52:41 +04:00
|
|
|
host = None
|
2011-04-30 17:20:05 +04:00
|
|
|
page = None
|
|
|
|
pageLength = None
|
|
|
|
uri = None
|
2012-03-16 00:17:40 +04:00
|
|
|
code = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if not place:
|
2011-07-06 09:44:47 +04:00
|
|
|
place = kb.injection.place or PLACE.GET
|
|
|
|
|
2014-07-10 10:49:20 +04:00
|
|
|
if not auxHeaders:
|
|
|
|
auxHeaders = {}
|
|
|
|
|
2011-07-06 09:44:47 +04:00
|
|
|
raise404 = place != PLACE.URI if raise404 is None else raise404
|
2014-11-21 11:41:39 +03:00
|
|
|
method = method or conf.method
|
2010-10-14 15:06:28 +04:00
|
|
|
|
2012-05-22 13:33:22 +04:00
|
|
|
value = agent.adjustLateValues(value)
|
2010-11-08 00:55:24 +03:00
|
|
|
payload = agent.extractPayload(value)
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
2010-10-30 03:00:48 +04:00
|
|
|
|
2013-03-27 16:39:27 +04:00
|
|
|
if conf.httpHeaders:
|
2014-10-22 15:41:36 +04:00
|
|
|
headers = OrderedDict(conf.httpHeaders)
|
2013-03-27 16:39:27 +04:00
|
|
|
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
|
|
|
|
|
2014-03-21 23:28:16 +04:00
|
|
|
if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode:
|
|
|
|
kb.postUrlEncode = False
|
2013-03-27 16:39:27 +04:00
|
|
|
conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType]
|
|
|
|
contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE)
|
|
|
|
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
|
2012-08-31 14:38:02 +04:00
|
|
|
|
2010-11-08 00:55:24 +03:00
|
|
|
if payload:
|
|
|
|
if kb.tamperFunctions:
|
2010-10-29 20:11:50 +04:00
|
|
|
for function in kb.tamperFunctions:
|
2014-11-05 12:03:19 +03:00
|
|
|
try:
|
|
|
|
payload = function(payload=payload, headers=auxHeaders)
|
|
|
|
except Exception, ex:
|
|
|
|
errMsg = "error occurred while running tamper "
|
2015-09-10 16:51:33 +03:00
|
|
|
errMsg += "function '%s' ('%s')" % (function.func_name, getSafeExString(ex))
|
2014-11-05 12:03:19 +03:00
|
|
|
raise SqlmapGenericException(errMsg)
|
|
|
|
|
2012-11-10 14:01:29 +04:00
|
|
|
if not isinstance(payload, basestring):
|
|
|
|
errMsg = "tamper function '%s' returns " % function.func_name
|
|
|
|
errMsg += "invalid payload type ('%s')" % type(payload)
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapValueException(errMsg)
|
2010-10-30 03:00:48 +04:00
|
|
|
|
2010-10-29 20:11:50 +04:00
|
|
|
value = agent.replacePayload(value, payload)
|
|
|
|
|
2016-02-05 14:00:57 +03:00
|
|
|
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload.replace('\\', BOUNDARY_BACKSLASH_MARKER)).replace(BOUNDARY_BACKSLASH_MARKER, '\\'))
|
2010-11-08 00:18:09 +03:00
|
|
|
|
2013-12-04 13:09:54 +04:00
|
|
|
if place == PLACE.CUSTOM_POST and kb.postHint:
|
2012-10-04 20:44:12 +04:00
|
|
|
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
|
|
|
# payloads in SOAP/XML should have chars > and < replaced
|
2012-10-04 13:25:44 +04:00
|
|
|
# with their HTML encoded counterparts
|
|
|
|
payload = payload.replace('>', ">").replace('<', "<")
|
|
|
|
elif kb.postHint == POST_HINT.JSON:
|
2012-10-04 18:08:37 +04:00
|
|
|
if payload.startswith('"') and payload.endswith('"'):
|
|
|
|
payload = json.dumps(payload[1:-1])
|
|
|
|
else:
|
|
|
|
payload = json.dumps(payload)[1:-1]
|
2014-02-26 11:56:17 +04:00
|
|
|
elif kb.postHint == POST_HINT.JSON_LIKE:
|
2014-02-26 12:30:37 +04:00
|
|
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
|
|
|
if payload.startswith('"') and payload.endswith('"'):
|
2014-02-26 11:56:17 +04:00
|
|
|
payload = json.dumps(payload[1:-1])
|
|
|
|
else:
|
|
|
|
payload = json.dumps(payload)[1:-1]
|
2014-02-26 12:30:37 +04:00
|
|
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
2012-09-22 22:59:40 +04:00
|
|
|
value = agent.replacePayload(value, payload)
|
|
|
|
else:
|
2015-03-04 15:31:29 +03:00
|
|
|
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
2016-05-30 15:09:53 +03:00
|
|
|
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
|
2016-05-30 18:47:08 +03:00
|
|
|
skip = False
|
|
|
|
|
|
|
|
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE:
|
|
|
|
if kb.cookieEncodeChoice is None:
|
|
|
|
msg = "do you want to URL encode cookie values (implementation specific)? %s" % ("[Y/n]" if not conf.url.endswith(".aspx") else "[y/N]") # Reference: https://support.microsoft.com/en-us/kb/313282
|
|
|
|
choice = readInput(msg, default='Y' if not conf.url.endswith(".aspx") else 'N')
|
|
|
|
kb.cookieEncodeChoice = choice.upper().strip() == "Y"
|
|
|
|
if not kb.cookieEncodeChoice:
|
|
|
|
skip = True
|
|
|
|
|
|
|
|
if not skip:
|
|
|
|
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
|
|
|
value = agent.replacePayload(value, payload)
|
2012-05-10 17:39:54 +04:00
|
|
|
|
2012-12-10 14:55:31 +04:00
|
|
|
if conf.hpp:
|
|
|
|
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)):
|
|
|
|
warnMsg = "HTTP parameter pollution should work only against "
|
|
|
|
warnMsg += "ASP(.NET) targets"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
if place in (PLACE.GET, PLACE.POST):
|
|
|
|
_ = re.escape(PAYLOAD_DELIMITER)
|
2012-12-10 15:54:01 +04:00
|
|
|
match = re.search("(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value)
|
2012-12-10 14:55:31 +04:00
|
|
|
if match:
|
2012-12-10 16:05:41 +04:00
|
|
|
payload = match.group("value")
|
|
|
|
|
2012-12-10 15:00:15 +04:00
|
|
|
for splitter in (urlencode(' '), ' '):
|
2012-12-10 15:58:17 +04:00
|
|
|
if splitter in payload:
|
2012-12-10 15:00:15 +04:00
|
|
|
prefix, suffix = ("*/", "/*") if splitter == ' ' else (urlencode(_) for _ in ("*/", "/*"))
|
2012-12-10 16:07:36 +04:00
|
|
|
parts = payload.split(splitter)
|
2012-12-10 15:00:15 +04:00
|
|
|
parts[0] = "%s%s" % (parts[0], suffix)
|
2012-12-10 15:54:01 +04:00
|
|
|
parts[-1] = "%s%s=%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1])
|
2012-12-10 15:00:15 +04:00
|
|
|
for i in xrange(1, len(parts) - 1):
|
2012-12-10 15:54:01 +04:00
|
|
|
parts[i] = "%s%s=%s%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix)
|
2012-12-10 15:00:15 +04:00
|
|
|
payload = "".join(parts)
|
2012-12-10 16:05:41 +04:00
|
|
|
|
2012-12-10 15:58:17 +04:00
|
|
|
for splitter in (urlencode(','), ','):
|
|
|
|
payload = payload.replace(splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name")))
|
2012-12-10 16:05:41 +04:00
|
|
|
|
2012-12-10 15:58:17 +04:00
|
|
|
value = agent.replacePayload(value, payload)
|
2012-12-10 14:55:31 +04:00
|
|
|
else:
|
|
|
|
warnMsg = "HTTP parameter pollution works only with regular "
|
|
|
|
warnMsg += "GET and POST parameters"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
2012-05-10 17:39:54 +04:00
|
|
|
if place:
|
2011-01-27 22:44:24 +03:00
|
|
|
value = agent.removePayloadDelimiters(value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.GET in conf.parameters:
|
2011-08-29 16:50:52 +04:00
|
|
|
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
|
2016-05-27 14:33:14 +03:00
|
|
|
elif place == PLACE.GET: # Note: for (e.g.) checkWaf() when there are no GET parameters
|
|
|
|
get = value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.POST in conf.parameters:
|
2011-08-29 16:50:52 +04:00
|
|
|
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
|
2016-05-27 14:33:14 +03:00
|
|
|
elif place == PLACE.POST:
|
|
|
|
post = value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-04-17 18:23:00 +04:00
|
|
|
if PLACE.CUSTOM_POST in conf.parameters:
|
|
|
|
post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value
|
2013-02-13 15:24:42 +04:00
|
|
|
post = post.replace(ASTERISK_MARKER, '*') if post else post
|
2012-04-17 18:23:00 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.COOKIE in conf.parameters:
|
|
|
|
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
|
2010-05-14 19:20:34 +04:00
|
|
|
|
2012-07-26 14:26:57 +04:00
|
|
|
if PLACE.USER_AGENT in conf.parameters:
|
|
|
|
ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-12 02:07:03 +03:00
|
|
|
if PLACE.REFERER in conf.parameters:
|
|
|
|
referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value
|
|
|
|
|
2011-12-20 16:52:41 +04:00
|
|
|
if PLACE.HOST in conf.parameters:
|
|
|
|
host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value
|
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.URI in conf.parameters:
|
|
|
|
uri = conf.url if place != PLACE.URI or not value else value
|
2010-09-23 18:07:23 +04:00
|
|
|
else:
|
|
|
|
uri = conf.url
|
2010-09-22 15:56:35 +04:00
|
|
|
|
2013-01-25 15:41:51 +04:00
|
|
|
if value and place == PLACE.CUSTOM_HEADER:
|
2016-10-20 01:47:53 +03:00
|
|
|
if value.split(',')[0].capitalize() == PLACE.COOKIE:
|
|
|
|
cookie = value.split(',', 1)[1]
|
|
|
|
else:
|
|
|
|
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
|
2013-01-13 19:22:43 +04:00
|
|
|
|
2014-10-23 13:23:53 +04:00
|
|
|
if conf.csrfToken:
|
|
|
|
def _adjustParameter(paramString, parameter, newValue):
|
|
|
|
retVal = paramString
|
2016-05-26 17:47:38 +03:00
|
|
|
match = re.search("%s=[^&]*" % re.escape(parameter), paramString)
|
2014-10-23 13:23:53 +04:00
|
|
|
if match:
|
2016-10-20 01:19:16 +03:00
|
|
|
retVal = re.sub(re.escape(match.group(0)), "%s=%s" % (parameter, newValue), paramString)
|
2016-05-26 17:47:38 +03:00
|
|
|
else:
|
|
|
|
match = re.search("(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString)
|
|
|
|
if match:
|
2016-10-20 01:19:16 +03:00
|
|
|
retVal = re.sub(re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
|
2014-10-23 13:23:53 +04:00
|
|
|
return retVal
|
|
|
|
|
2014-11-21 12:31:55 +03:00
|
|
|
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
2014-10-28 16:02:55 +03:00
|
|
|
match = re.search(r"<input[^>]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "")
|
2014-10-23 13:23:53 +04:00
|
|
|
token = (match.group(2) or match.group(3) or match.group(4)) if match else None
|
|
|
|
|
2016-05-26 17:08:59 +03:00
|
|
|
if not token:
|
|
|
|
match = re.search(r"%s[\"']:[\"']([^\"']+)" % re.escape(conf.csrfToken), page or "")
|
|
|
|
token = match.group(1) if match else None
|
|
|
|
|
2014-10-23 13:23:53 +04:00
|
|
|
if not token:
|
2014-10-23 16:33:22 +04:00
|
|
|
if conf.csrfUrl != conf.url and code == httplib.OK:
|
|
|
|
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
|
|
|
|
token = page
|
|
|
|
|
2015-10-14 16:19:44 +03:00
|
|
|
if not token and conf.cj and any(_.name == conf.csrfToken for _ in conf.cj):
|
2014-10-28 16:41:21 +03:00
|
|
|
for _ in conf.cj:
|
|
|
|
if _.name == conf.csrfToken:
|
|
|
|
token = _.value
|
2016-10-17 13:36:42 +03:00
|
|
|
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
|
2014-10-24 11:37:51 +04:00
|
|
|
if post:
|
|
|
|
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
|
|
|
elif get:
|
|
|
|
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
|
|
|
else:
|
|
|
|
get = "%s=%s" % (conf.csrfToken, token)
|
|
|
|
break
|
|
|
|
|
2014-10-23 16:33:22 +04:00
|
|
|
if not token:
|
2014-11-17 13:50:05 +03:00
|
|
|
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken, conf.csrfUrl or conf.url)
|
2014-10-23 16:33:22 +04:00
|
|
|
if not conf.csrfUrl:
|
|
|
|
errMsg += ". You can try to rerun by providing "
|
|
|
|
errMsg += "a valid value for option '--csrf-url'"
|
|
|
|
raise SqlmapTokenException, errMsg
|
2014-10-23 13:23:53 +04:00
|
|
|
|
|
|
|
if token:
|
2014-10-24 11:37:51 +04:00
|
|
|
for place in (PLACE.GET, PLACE.POST):
|
|
|
|
if place in conf.parameters:
|
|
|
|
if place == PLACE.GET and get:
|
2014-10-23 13:23:53 +04:00
|
|
|
get = _adjustParameter(get, conf.csrfToken, token)
|
2014-10-24 11:37:51 +04:00
|
|
|
elif place == PLACE.POST and post:
|
2014-10-23 13:23:53 +04:00
|
|
|
post = _adjustParameter(post, conf.csrfToken, token)
|
|
|
|
|
2014-10-23 16:33:22 +04:00
|
|
|
for i in xrange(len(conf.httpHeaders)):
|
|
|
|
if conf.httpHeaders[i][0].lower() == conf.csrfToken.lower():
|
|
|
|
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token)
|
|
|
|
|
2011-08-29 16:50:52 +04:00
|
|
|
if conf.rParam:
|
|
|
|
def _randomizeParameter(paramString, randomParameter):
|
|
|
|
retVal = paramString
|
2015-01-17 23:47:57 +03:00
|
|
|
match = re.search(r"(\A|\b)%s=(?P<value>[^&;]+)" % re.escape(randomParameter), paramString)
|
2011-08-29 16:50:52 +04:00
|
|
|
if match:
|
|
|
|
origValue = match.group("value")
|
2015-01-17 23:47:57 +03:00
|
|
|
retVal = re.sub(r"(\A|\b)%s=[^&;]+" % re.escape(randomParameter), "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString)
|
2011-08-29 16:50:52 +04:00
|
|
|
return retVal
|
|
|
|
|
2011-08-29 17:08:25 +04:00
|
|
|
for randomParameter in conf.rParam:
|
2015-01-17 23:47:57 +03:00
|
|
|
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE, PLACE.URI, PLACE.CUSTOM_POST):
|
2011-08-29 17:08:25 +04:00
|
|
|
if item in conf.parameters:
|
|
|
|
if item == PLACE.GET and get:
|
|
|
|
get = _randomizeParameter(get, randomParameter)
|
2015-01-17 23:47:57 +03:00
|
|
|
elif item in (PLACE.POST, PLACE.CUSTOM_POST) and post:
|
2011-08-29 17:08:25 +04:00
|
|
|
post = _randomizeParameter(post, randomParameter)
|
|
|
|
elif item == PLACE.COOKIE and cookie:
|
|
|
|
cookie = _randomizeParameter(cookie, randomParameter)
|
2015-01-17 23:47:57 +03:00
|
|
|
elif item == PLACE.URI and uri:
|
|
|
|
uri = _randomizeParameter(uri, randomParameter)
|
2011-08-29 16:50:52 +04:00
|
|
|
|
2011-11-21 20:41:02 +04:00
|
|
|
if conf.evalCode:
|
2014-04-06 18:48:46 +04:00
|
|
|
delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER
|
2015-09-17 16:25:40 +03:00
|
|
|
variables = {"uri": uri, "lastPage": threadData.lastPage, "_locals": locals()}
|
2011-11-21 20:41:02 +04:00
|
|
|
originals = {}
|
2015-01-09 17:33:53 +03:00
|
|
|
keywords = keyword.kwlist
|
2011-11-21 20:41:02 +04:00
|
|
|
|
2015-08-25 03:03:56 +03:00
|
|
|
if not get and PLACE.URI in conf.parameters:
|
|
|
|
query = urlparse.urlsplit(uri).query or ""
|
|
|
|
else:
|
|
|
|
query = None
|
|
|
|
|
|
|
|
for item in filter(None, (get, post if not kb.postHint else None, query)):
|
2011-11-21 21:39:18 +04:00
|
|
|
for part in item.split(delimiter):
|
|
|
|
if '=' in part:
|
|
|
|
name, value = part.split('=', 1)
|
2015-02-24 17:05:44 +03:00
|
|
|
name = re.sub(r"[^\w]", "", name.strip())
|
2015-01-09 17:33:53 +03:00
|
|
|
if name in keywords:
|
|
|
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
2013-02-12 20:01:47 +04:00
|
|
|
value = urldecode(value, convall=True, plusspace=(item==post and kb.postSpaceToPlus))
|
2015-02-25 12:19:51 +03:00
|
|
|
variables[name] = value
|
2011-11-21 20:41:02 +04:00
|
|
|
|
2013-07-31 19:28:22 +04:00
|
|
|
if cookie:
|
2014-04-06 18:50:58 +04:00
|
|
|
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
|
2013-07-31 19:28:22 +04:00
|
|
|
if '=' in part:
|
|
|
|
name, value = part.split('=', 1)
|
2015-02-24 17:05:44 +03:00
|
|
|
name = re.sub(r"[^\w]", "", name.strip())
|
2015-01-09 17:33:53 +03:00
|
|
|
if name in keywords:
|
|
|
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
2013-07-31 19:28:22 +04:00
|
|
|
value = urldecode(value, convall=True)
|
2015-02-25 12:19:51 +03:00
|
|
|
variables[name] = value
|
2015-01-09 17:33:53 +03:00
|
|
|
|
|
|
|
while True:
|
|
|
|
try:
|
2016-06-01 11:56:42 +03:00
|
|
|
compiler.parse(unicodeencode(conf.evalCode.replace(';', '\n')))
|
2015-01-09 17:33:53 +03:00
|
|
|
except SyntaxError, ex:
|
|
|
|
original = replacement = ex.text.strip()
|
|
|
|
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
|
|
|
if _ in keywords:
|
|
|
|
replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX))
|
|
|
|
break
|
|
|
|
if original == replacement:
|
|
|
|
conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "")
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
conf.evalCode = conf.evalCode.replace(ex.text.strip(), replacement)
|
|
|
|
else:
|
|
|
|
break
|
2013-07-31 19:28:22 +04:00
|
|
|
|
2011-11-21 20:41:02 +04:00
|
|
|
originals.update(variables)
|
2012-02-16 18:42:28 +04:00
|
|
|
evaluateCode(conf.evalCode, variables)
|
2015-01-09 17:33:53 +03:00
|
|
|
|
|
|
|
for variable in variables.keys():
|
|
|
|
if variable.endswith(EVALCODE_KEYWORD_SUFFIX):
|
|
|
|
value = variables[variable]
|
|
|
|
del variables[variable]
|
|
|
|
variables[variable.replace(EVALCODE_KEYWORD_SUFFIX, "")] = value
|
|
|
|
|
2014-09-28 15:38:09 +04:00
|
|
|
uri = variables["uri"]
|
2011-11-21 20:41:02 +04:00
|
|
|
|
|
|
|
for name, value in variables.items():
|
|
|
|
if name != "__builtins__" and originals.get(name, "") != value:
|
|
|
|
if isinstance(value, (basestring, int)):
|
2013-08-31 02:28:51 +04:00
|
|
|
found = False
|
2016-01-11 01:15:43 +03:00
|
|
|
value = getUnicode(value)
|
2013-08-31 02:28:51 +04:00
|
|
|
|
2014-10-28 16:02:55 +03:00
|
|
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
2013-08-31 02:28:51 +04:00
|
|
|
if re.search(regex, (get or "")):
|
|
|
|
found = True
|
|
|
|
get = re.sub(regex, "\g<1>%s\g<3>" % value, get)
|
|
|
|
|
|
|
|
if re.search(regex, (post or "")):
|
|
|
|
found = True
|
|
|
|
post = re.sub(regex, "\g<1>%s\g<3>" % value, post)
|
|
|
|
|
2015-08-25 03:03:56 +03:00
|
|
|
if re.search(regex, (query or "")):
|
|
|
|
found = True
|
|
|
|
uri = re.sub(regex.replace(r"\A", r"\?"), "\g<1>%s\g<3>" % value, uri)
|
|
|
|
|
2014-04-06 18:50:58 +04:00
|
|
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), name, re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
|
2013-08-31 02:28:51 +04:00
|
|
|
if re.search(regex, (cookie or "")):
|
|
|
|
found = True
|
|
|
|
cookie = re.sub(regex, "\g<1>%s\g<3>" % value, cookie)
|
|
|
|
|
|
|
|
if not found:
|
|
|
|
if post is not None:
|
|
|
|
post += "%s%s=%s" % (delimiter, name, value)
|
|
|
|
elif get is not None:
|
|
|
|
get += "%s%s=%s" % (delimiter, name, value)
|
|
|
|
elif cookie is not None:
|
2014-04-06 18:50:58 +04:00
|
|
|
cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value)
|
2011-11-28 15:21:39 +04:00
|
|
|
|
2013-03-27 16:39:27 +04:00
|
|
|
if not conf.skipUrlEncode:
|
2013-01-15 13:14:02 +04:00
|
|
|
get = urlencode(get, limit=True)
|
|
|
|
|
2012-11-13 13:21:11 +04:00
|
|
|
if post is not None:
|
2013-03-27 16:39:27 +04:00
|
|
|
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
|
2012-07-20 11:48:09 +04:00
|
|
|
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
|
2014-03-21 23:28:16 +04:00
|
|
|
elif kb.postUrlEncode:
|
2013-01-19 21:06:36 +04:00
|
|
|
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
|
2011-08-29 16:50:52 +04:00
|
|
|
|
2010-12-09 10:49:18 +03:00
|
|
|
if timeBasedCompare:
|
2016-01-09 19:32:19 +03:00
|
|
|
if len(kb.responseTimes.get(kb.responseTimeMode, [])) < MIN_TIME_RESPONSES:
|
2010-12-21 04:09:39 +03:00
|
|
|
clearConsoleLine()
|
|
|
|
|
2016-01-09 19:32:19 +03:00
|
|
|
kb.responseTimes.setdefault(kb.responseTimeMode, [])
|
|
|
|
|
2011-11-21 03:17:57 +04:00
|
|
|
if conf.tor:
|
2012-02-01 18:49:42 +04:00
|
|
|
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
|
2012-03-14 02:03:23 +04:00
|
|
|
warnMsg += "time-based injections because of its high latency time"
|
2011-11-21 03:17:57 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
2016-01-09 19:32:19 +03:00
|
|
|
warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "")
|
2014-03-07 00:08:31 +04:00
|
|
|
warnMsg += "larger statistical model, please wait"
|
|
|
|
dataToStdout(warnMsg)
|
2010-12-09 10:49:18 +03:00
|
|
|
|
2016-01-09 19:32:19 +03:00
|
|
|
while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES:
|
|
|
|
value = kb.responseTimePayload.replace(RANDOM_INTEGER_MARKER, str(randomInt(6))).replace(RANDOM_STRING_MARKER, randomStr()) if kb.responseTimePayload else kb.responseTimePayload
|
|
|
|
Connect.queryPage(value=value, content=True, raise404=False)
|
2014-03-07 00:08:31 +04:00
|
|
|
dataToStdout('.')
|
|
|
|
|
2016-01-09 19:32:19 +03:00
|
|
|
dataToStdout(" (done)\n")
|
2010-12-09 10:49:18 +03:00
|
|
|
|
2013-05-18 23:30:21 +04:00
|
|
|
elif not kb.testMode:
|
2016-09-29 15:55:43 +03:00
|
|
|
warnMsg = "it is very important to not stress the network connection "
|
2014-03-07 00:08:31 +04:00
|
|
|
warnMsg += "during usage of time-based payloads to prevent potential "
|
2016-01-09 19:32:19 +03:00
|
|
|
warnMsg += "disruptions "
|
2013-05-18 23:30:21 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
|
|
|
if not kb.laggingChecked:
|
|
|
|
kb.laggingChecked = True
|
|
|
|
|
2016-01-09 19:32:19 +03:00
|
|
|
deviation = stdev(kb.responseTimes[kb.responseTimeMode])
|
2011-08-12 17:47:38 +04:00
|
|
|
|
|
|
|
if deviation > WARN_TIME_STDEV:
|
2012-10-09 17:19:47 +04:00
|
|
|
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
|
2011-04-19 18:28:51 +04:00
|
|
|
|
2014-09-08 16:48:31 +04:00
|
|
|
warnMsg = "considerable lagging has been detected "
|
2011-08-12 17:47:38 +04:00
|
|
|
warnMsg += "in connection response(s). Please use as high "
|
2012-02-01 19:10:06 +04:00
|
|
|
warnMsg += "value for option '--time-sec' as possible (e.g. "
|
2013-05-18 23:30:21 +04:00
|
|
|
warnMsg += "10 or more)"
|
2011-04-19 18:50:09 +04:00
|
|
|
logger.critical(warnMsg)
|
2012-11-10 14:01:29 +04:00
|
|
|
|
2015-04-22 17:28:54 +03:00
|
|
|
if conf.safeFreq > 0:
|
2010-04-16 16:44:47 +04:00
|
|
|
kb.queryCounter += 1
|
2015-04-21 01:02:47 +03:00
|
|
|
if kb.queryCounter % conf.safeFreq == 0:
|
2015-04-22 17:28:54 +03:00
|
|
|
if conf.safeUrl:
|
|
|
|
Connect.getPage(url=conf.safeUrl, post=conf.safePost, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
|
|
|
|
elif kb.safeReq:
|
|
|
|
Connect.getPage(url=kb.safeReq.url, post=kb.safeReq.post, method=kb.safeReq.method, auxHeaders=kb.safeReq.headers)
|
2010-09-16 13:32:09 +04:00
|
|
|
|
2010-12-08 02:32:33 +03:00
|
|
|
start = time.time()
|
2010-12-08 02:49:00 +03:00
|
|
|
|
2010-12-20 19:45:41 +03:00
|
|
|
if kb.nullConnection and not content and not response and not timeBasedCompare:
|
2012-06-12 18:22:14 +04:00
|
|
|
noteResponseTime = False
|
|
|
|
|
2015-07-18 18:01:34 +03:00
|
|
|
try:
|
|
|
|
pushValue(kb.pageCompress)
|
|
|
|
kb.pageCompress = False
|
2010-10-10 22:56:43 +04:00
|
|
|
|
2015-07-18 18:01:34 +03:00
|
|
|
if kb.nullConnection == NULLCONNECTION.HEAD:
|
|
|
|
method = HTTPMETHOD.HEAD
|
|
|
|
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
|
|
|
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
|
2010-10-10 22:56:43 +04:00
|
|
|
|
2015-07-18 18:01:34 +03:00
|
|
|
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
2010-09-16 13:47:33 +04:00
|
|
|
|
2015-07-18 18:01:34 +03:00
|
|
|
if headers:
|
2015-09-17 18:09:36 +03:00
|
|
|
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and headers.get(HTTP_HEADER.CONTENT_LENGTH):
|
2015-07-18 18:01:34 +03:00
|
|
|
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH])
|
2015-09-17 18:09:36 +03:00
|
|
|
elif kb.nullConnection == NULLCONNECTION.RANGE and headers.get(HTTP_HEADER.CONTENT_RANGE):
|
2015-07-18 18:01:34 +03:00
|
|
|
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
|
|
|
|
finally:
|
|
|
|
kb.pageCompress = popValue()
|
2013-05-17 18:04:05 +04:00
|
|
|
|
2010-09-16 13:47:33 +04:00
|
|
|
if not pageLength:
|
2012-12-18 12:36:26 +04:00
|
|
|
try:
|
2014-11-21 12:31:55 +03:00
|
|
|
page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
2012-12-18 12:36:26 +04:00
|
|
|
except MemoryError:
|
|
|
|
page, headers, code = None, None, None
|
|
|
|
warnMsg = "site returned insanely large response"
|
|
|
|
if kb.testMode:
|
|
|
|
warnMsg += " in testing phase. This is a common "
|
2016-10-11 01:35:39 +03:00
|
|
|
warnMsg += "behavior in custom WAF/IPS/IDS solutions"
|
2012-12-18 12:36:26 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
2010-12-08 02:49:00 +03:00
|
|
|
|
2012-07-26 16:07:05 +04:00
|
|
|
if conf.secondOrder:
|
|
|
|
page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
2015-08-15 00:29:31 +03:00
|
|
|
threadData.lastPage = page
|
2016-09-27 11:20:36 +03:00
|
|
|
threadData.lastCode = code
|
2010-11-08 12:44:32 +03:00
|
|
|
|
2012-03-16 00:17:40 +04:00
|
|
|
kb.originalCode = kb.originalCode or code
|
|
|
|
|
2010-12-07 16:34:06 +03:00
|
|
|
if kb.testMode:
|
|
|
|
kb.testQueryCount += 1
|
2011-01-12 00:46:21 +03:00
|
|
|
|
2010-12-08 14:26:54 +03:00
|
|
|
if timeBasedCompare:
|
2013-01-29 23:53:11 +04:00
|
|
|
return wasLastResponseDelayed()
|
2010-12-08 17:33:10 +03:00
|
|
|
elif noteResponseTime:
|
2016-01-09 19:32:19 +03:00
|
|
|
kb.responseTimes.setdefault(kb.responseTimeMode, [])
|
|
|
|
kb.responseTimes[kb.responseTimeMode].append(threadData.lastQueryDuration)
|
2010-12-07 19:04:53 +03:00
|
|
|
|
2011-10-24 04:46:54 +04:00
|
|
|
if not response and removeReflection:
|
|
|
|
page = removeReflectiveValues(page, payload)
|
|
|
|
|
2012-10-02 15:36:15 +04:00
|
|
|
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
|
|
|
|
kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None
|
2012-02-08 16:00:03 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
if content or response:
|
2008-12-18 23:48:23 +03:00
|
|
|
return page, headers
|
2011-02-25 12:22:44 +03:00
|
|
|
|
|
|
|
if getRatioValue:
|
2011-08-12 20:48:11 +04:00
|
|
|
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
|
2008-12-18 23:38:57 +03:00
|
|
|
else:
|
2013-06-10 14:20:58 +04:00
|
|
|
return comparison(page, headers, code, getRatioValue, pageLength)
|
2013-08-20 21:35:49 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
def setHTTPHandlers(): # Cross-linked function
|
2013-08-20 21:35:49 +04:00
|
|
|
raise NotImplementedError
|