2013-02-14 15:32:17 +04:00
|
|
|
#!/usr/bin/env python
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
"""
|
2015-01-06 17:02:16 +03:00
|
|
|
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2015-01-09 17:33:53 +03:00
|
|
|
import compiler
|
2008-11-15 15:25:19 +03:00
|
|
|
import httplib
|
2012-10-04 13:25:44 +04:00
|
|
|
import json
|
2015-01-09 17:33:53 +03:00
|
|
|
import keyword
|
2013-02-19 12:46:51 +04:00
|
|
|
import logging
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2008-11-09 19:57:47 +03:00
|
|
|
import socket
|
2012-08-21 00:17:39 +04:00
|
|
|
import string
|
2014-12-07 18:11:07 +03:00
|
|
|
import struct
|
2008-11-09 19:57:47 +03:00
|
|
|
import time
|
2014-03-21 23:28:16 +04:00
|
|
|
import traceback
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
2015-03-24 12:19:37 +03:00
|
|
|
import websocket
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-04-07 18:06:11 +04:00
|
|
|
from extra.safe2bin.safe2bin import safecharencode
|
2010-10-29 20:11:50 +04:00
|
|
|
from lib.core.agent import agent
|
2011-10-23 21:02:48 +04:00
|
|
|
from lib.core.common import asciifyUrl
|
2010-12-07 22:19:12 +03:00
|
|
|
from lib.core.common import calculateDeltaSeconds
|
2010-12-21 04:09:39 +03:00
|
|
|
from lib.core.common import clearConsoleLine
|
2011-02-22 15:54:22 +03:00
|
|
|
from lib.core.common import cpuThrottle
|
2014-03-07 00:08:31 +04:00
|
|
|
from lib.core.common import dataToStdout
|
2012-02-16 18:42:28 +04:00
|
|
|
from lib.core.common import evaluateCode
|
2011-03-29 18:16:28 +04:00
|
|
|
from lib.core.common import extractRegexResult
|
2012-10-16 14:32:58 +04:00
|
|
|
from lib.core.common import findMultipartPostBoundary
|
2010-12-21 01:45:01 +03:00
|
|
|
from lib.core.common import getCurrentThreadData
|
2011-11-11 15:28:27 +04:00
|
|
|
from lib.core.common import getHostHeader
|
2012-08-31 14:15:09 +04:00
|
|
|
from lib.core.common import getRequestHeader
|
2010-06-10 15:34:17 +04:00
|
|
|
from lib.core.common import getUnicode
|
2010-11-08 14:22:47 +03:00
|
|
|
from lib.core.common import logHTTPTraffic
|
2013-05-17 18:04:05 +04:00
|
|
|
from lib.core.common import pushValue
|
|
|
|
from lib.core.common import popValue
|
2011-08-29 16:50:52 +04:00
|
|
|
from lib.core.common import randomizeParameterValue
|
2013-02-28 23:20:08 +04:00
|
|
|
from lib.core.common import randomInt
|
|
|
|
from lib.core.common import randomStr
|
2010-11-16 13:42:42 +03:00
|
|
|
from lib.core.common import readInput
|
2011-02-25 12:22:44 +03:00
|
|
|
from lib.core.common import removeReflectiveValues
|
2013-02-19 12:46:51 +04:00
|
|
|
from lib.core.common import singleTimeLogMessage
|
2011-06-08 18:35:23 +04:00
|
|
|
from lib.core.common import singleTimeWarnMessage
|
2010-12-08 14:26:54 +03:00
|
|
|
from lib.core.common import stdev
|
2013-01-29 23:53:11 +04:00
|
|
|
from lib.core.common import wasLastResponseDelayed
|
2012-07-31 13:03:44 +04:00
|
|
|
from lib.core.common import unicodeencode
|
2013-02-12 20:01:47 +04:00
|
|
|
from lib.core.common import urldecode
|
2012-07-31 13:03:44 +04:00
|
|
|
from lib.core.common import urlencode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2012-10-04 13:25:44 +04:00
|
|
|
from lib.core.dicts import POST_HINT_CONTENT_TYPES
|
2012-10-09 17:19:47 +04:00
|
|
|
from lib.core.enums import ADJUST_TIME_DELAY
|
2013-03-13 00:16:44 +04:00
|
|
|
from lib.core.enums import AUTH_TYPE
|
2011-12-26 16:24:39 +04:00
|
|
|
from lib.core.enums import CUSTOM_LOGGING
|
2013-03-20 14:10:24 +04:00
|
|
|
from lib.core.enums import HTTP_HEADER
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2010-11-08 12:49:57 +03:00
|
|
|
from lib.core.enums import NULLCONNECTION
|
2011-05-27 01:54:19 +04:00
|
|
|
from lib.core.enums import PAYLOAD
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PLACE
|
2012-10-04 13:25:44 +04:00
|
|
|
from lib.core.enums import POST_HINT
|
2011-12-05 02:42:19 +04:00
|
|
|
from lib.core.enums import REDIRECTION
|
2012-12-10 14:55:31 +04:00
|
|
|
from lib.core.enums import WEB_API
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapCompressionException
|
|
|
|
from lib.core.exception import SqlmapConnectionException
|
2014-11-05 12:03:19 +03:00
|
|
|
from lib.core.exception import SqlmapGenericException
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapSyntaxException
|
2014-10-23 13:23:53 +04:00
|
|
|
from lib.core.exception import SqlmapTokenException
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapValueException
|
2013-02-13 15:24:42 +04:00
|
|
|
from lib.core.settings import ASTERISK_MARKER
|
2012-04-17 18:23:00 +04:00
|
|
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
2012-10-04 13:25:44 +04:00
|
|
|
from lib.core.settings import DEFAULT_CONTENT_TYPE
|
2013-07-31 19:28:22 +04:00
|
|
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
2012-12-10 14:55:31 +04:00
|
|
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
2015-01-09 17:33:53 +03:00
|
|
|
from lib.core.settings import EVALCODE_KEYWORD_SUFFIX
|
2011-07-06 09:44:47 +04:00
|
|
|
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
2012-07-23 16:14:22 +04:00
|
|
|
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
2012-04-06 12:42:36 +04:00
|
|
|
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
|
2012-10-02 15:36:15 +04:00
|
|
|
from lib.core.settings import MAX_CONNECTIONS_REGEX
|
2012-08-07 02:50:58 +04:00
|
|
|
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
2011-03-29 18:16:28 +04:00
|
|
|
from lib.core.settings import META_REFRESH_REGEX
|
2010-12-09 10:49:18 +03:00
|
|
|
from lib.core.settings import MIN_TIME_RESPONSES
|
2012-04-06 12:42:36 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
|
|
|
from lib.core.settings import LARGE_CHUNK_TRIM_MARKER
|
2012-12-10 14:55:31 +04:00
|
|
|
from lib.core.settings import PAYLOAD_DELIMITER
|
2012-10-02 15:36:15 +04:00
|
|
|
from lib.core.settings import PERMISSION_DENIED_REGEX
|
2013-03-27 16:39:27 +04:00
|
|
|
from lib.core.settings import PLAIN_TEXT_CONTENT_TYPE
|
2014-02-26 12:30:37 +04:00
|
|
|
from lib.core.settings import REPLACEMENT_MARKER
|
2014-08-21 01:42:40 +04:00
|
|
|
from lib.core.settings import TEXT_CONTENT_TYPE_REGEX
|
2011-10-25 13:53:44 +04:00
|
|
|
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
2011-04-19 18:50:09 +04:00
|
|
|
from lib.core.settings import URI_HTTP_HEADER
|
2012-04-06 12:42:36 +04:00
|
|
|
from lib.core.settings import WARN_TIME_STDEV
|
2010-01-02 05:02:12 +03:00
|
|
|
from lib.request.basic import decodePage
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request.basic import forgeHeaders
|
2010-12-25 13:16:20 +03:00
|
|
|
from lib.request.basic import processResponse
|
2010-03-31 14:50:47 +04:00
|
|
|
from lib.request.direct import direct
|
2008-12-05 18:34:13 +03:00
|
|
|
from lib.request.comparison import comparison
|
2010-09-15 16:45:41 +04:00
|
|
|
from lib.request.methodrequest import MethodRequest
|
2012-07-14 19:01:04 +04:00
|
|
|
from thirdparty.multipart import multipartpost
|
2014-10-22 15:41:36 +04:00
|
|
|
from thirdparty.odict.odict import OrderedDict
|
|
|
|
from thirdparty.socks.socks import ProxyError
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
|
2012-12-06 13:42:53 +04:00
|
|
|
class Connect(object):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This class defines methods used to perform HTTP requests
|
|
|
|
"""
|
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
@staticmethod
|
2012-12-06 17:14:19 +04:00
|
|
|
def _getPageProxy(**kwargs):
|
2008-12-04 20:40:03 +03:00
|
|
|
return Connect.getPage(**kwargs)
|
|
|
|
|
2011-06-19 13:57:41 +04:00
|
|
|
@staticmethod
|
2012-12-06 17:14:19 +04:00
|
|
|
def _retryProxy(**kwargs):
|
2011-06-19 13:57:41 +04:00
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
threadData.retriesCount += 1
|
|
|
|
|
2013-08-13 08:42:49 +04:00
|
|
|
if conf.proxyList and threadData.retriesCount >= conf.retries:
|
2013-08-12 16:25:51 +04:00
|
|
|
warnMsg = "changing proxy"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2013-08-20 21:35:49 +04:00
|
|
|
conf.proxy = None
|
2013-08-12 16:25:51 +04:00
|
|
|
setHTTPProxy()
|
|
|
|
|
2011-06-19 13:57:41 +04:00
|
|
|
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
|
|
|
|
# timed based payloads can cause web server unresponsiveness
|
|
|
|
# if the injectable piece of code is some kind of JOIN-like query
|
|
|
|
warnMsg = "most probably web server instance hasn't recovered yet "
|
2011-11-10 14:30:53 +04:00
|
|
|
warnMsg += "from previous timed based payload. If the problem "
|
2011-06-19 13:57:41 +04:00
|
|
|
warnMsg += "persists please wait for few minutes and rerun "
|
2012-02-01 19:10:06 +04:00
|
|
|
warnMsg += "without flag T in option '--technique' "
|
2013-01-17 22:55:56 +04:00
|
|
|
warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to "
|
|
|
|
warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')"
|
2011-06-19 13:57:41 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
elif kb.originalPage is None:
|
2011-11-24 01:17:08 +04:00
|
|
|
if conf.tor:
|
|
|
|
warnMsg = "please make sure that you have "
|
|
|
|
warnMsg += "Tor installed and running so "
|
|
|
|
warnMsg += "you could successfully use "
|
2012-02-01 18:49:42 +04:00
|
|
|
warnMsg += "switch '--tor' "
|
2011-11-24 01:17:08 +04:00
|
|
|
if IS_WIN:
|
2013-01-17 22:55:56 +04:00
|
|
|
warnMsg += "(e.g. 'https://www.torproject.org/download/download.html.en')"
|
2011-11-24 01:17:08 +04:00
|
|
|
else:
|
2013-01-17 22:55:56 +04:00
|
|
|
warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')"
|
2011-11-24 01:17:08 +04:00
|
|
|
else:
|
|
|
|
warnMsg = "if the problem persists please check that the provided "
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg += "target URL is valid. In case that it is, you can try to rerun "
|
2012-02-01 18:49:42 +04:00
|
|
|
warnMsg += "with the switch '--random-agent' turned on "
|
2013-01-17 22:55:56 +04:00
|
|
|
warnMsg += "and/or proxy switches ('--ignore-proxy', '--proxy',...)"
|
2011-06-19 13:57:41 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
elif conf.threads > 1:
|
|
|
|
warnMsg = "if the problem persists please try to lower "
|
2013-01-17 22:55:56 +04:00
|
|
|
warnMsg += "the number of used threads (option '--threads')"
|
2011-06-19 13:57:41 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
kwargs['retrying'] = True
|
2012-12-06 17:14:19 +04:00
|
|
|
return Connect._getPageProxy(**kwargs)
|
2011-06-19 13:57:41 +04:00
|
|
|
|
2012-04-06 12:42:36 +04:00
|
|
|
@staticmethod
|
2012-12-06 17:14:19 +04:00
|
|
|
def _connReadProxy(conn):
|
2012-04-06 12:42:36 +04:00
|
|
|
retVal = ""
|
2012-05-27 01:28:43 +04:00
|
|
|
|
2012-09-03 00:48:41 +04:00
|
|
|
if not kb.dnsMode and conn:
|
2012-11-20 15:10:29 +04:00
|
|
|
headers = conn.info()
|
2015-02-04 17:01:03 +03:00
|
|
|
if headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
|
2013-03-20 14:10:24 +04:00
|
|
|
or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
2012-12-18 12:36:26 +04:00
|
|
|
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
|
|
|
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
|
|
|
warnMsg = "large compressed response detected. Disabling compression"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
kb.pageCompress = False
|
2012-07-23 16:46:43 +04:00
|
|
|
else:
|
|
|
|
while True:
|
|
|
|
_ = conn.read(MAX_CONNECTION_CHUNK_SIZE)
|
|
|
|
if len(_) == MAX_CONNECTION_CHUNK_SIZE:
|
|
|
|
warnMsg = "large response detected. This could take a while"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
_ = re.sub(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_CHUNK_TRIM_MARKER, kb.chars.start), _)
|
|
|
|
retVal += _
|
|
|
|
else:
|
|
|
|
retVal += _
|
|
|
|
break
|
2012-05-27 01:28:43 +04:00
|
|
|
|
2012-08-07 02:50:58 +04:00
|
|
|
if len(retVal) > MAX_CONNECTION_TOTAL_SIZE:
|
|
|
|
warnMsg = "too large response detected. Automatically trimming it"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
break
|
|
|
|
|
2012-04-06 12:42:36 +04:00
|
|
|
return retVal
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
@staticmethod
|
|
|
|
def getPage(**kwargs):
|
|
|
|
"""
|
2013-04-09 13:48:42 +04:00
|
|
|
This method connects to the target URL or proxy and returns
|
|
|
|
the target URL page content
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2014-04-06 20:09:54 +04:00
|
|
|
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
2008-12-04 20:40:03 +03:00
|
|
|
time.sleep(conf.delay)
|
2010-05-21 16:18:43 +04:00
|
|
|
elif conf.cpuThrottle:
|
2011-02-22 15:54:22 +03:00
|
|
|
cpuThrottle(conf.cpuThrottle)
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2013-02-28 23:20:08 +04:00
|
|
|
if conf.dummy:
|
2013-03-09 22:40:24 +04:00
|
|
|
return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())
|
2013-02-28 23:20:08 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
2013-01-16 19:04:00 +04:00
|
|
|
with kb.locks.request:
|
|
|
|
kb.requestCounter += 1
|
|
|
|
threadData.lastRequestUID = kb.requestCounter
|
2010-11-08 14:22:47 +03:00
|
|
|
|
2013-04-16 16:17:41 +04:00
|
|
|
url = kwargs.get("url", None) or conf.url
|
2013-03-09 22:41:24 +04:00
|
|
|
get = kwargs.get("get", None)
|
|
|
|
post = kwargs.get("post", None)
|
|
|
|
method = kwargs.get("method", None)
|
|
|
|
cookie = kwargs.get("cookie", None)
|
2013-04-16 16:17:41 +04:00
|
|
|
ua = kwargs.get("ua", None) or conf.agent
|
|
|
|
referer = kwargs.get("referer", None) or conf.referer
|
|
|
|
host = kwargs.get("host", None) or conf.host
|
2013-03-09 22:41:24 +04:00
|
|
|
direct_ = kwargs.get("direct", False)
|
|
|
|
multipart = kwargs.get("multipart", False)
|
|
|
|
silent = kwargs.get("silent", False)
|
|
|
|
raise404 = kwargs.get("raise404", True)
|
2013-04-16 16:17:41 +04:00
|
|
|
timeout = kwargs.get("timeout", None) or conf.timeout
|
2013-03-09 22:41:24 +04:00
|
|
|
auxHeaders = kwargs.get("auxHeaders", None)
|
|
|
|
response = kwargs.get("response", False)
|
2013-04-16 16:17:41 +04:00
|
|
|
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout
|
2013-03-09 22:41:24 +04:00
|
|
|
refreshing = kwargs.get("refreshing", False)
|
|
|
|
retrying = kwargs.get("retrying", False)
|
|
|
|
crawling = kwargs.get("crawling", False)
|
2013-05-17 17:04:25 +04:00
|
|
|
skipRead = kwargs.get("skipRead", False)
|
2015-03-24 12:19:37 +03:00
|
|
|
is_websocket = conf.url.startswith("ws")
|
2011-05-22 11:46:09 +04:00
|
|
|
|
2011-05-24 09:26:51 +04:00
|
|
|
if not urlparse.urlsplit(url).netloc:
|
|
|
|
url = urlparse.urljoin(conf.url, url)
|
|
|
|
|
2011-05-22 11:46:09 +04:00
|
|
|
# flag to know if we are dealing with the same target host
|
2011-12-20 13:10:44 +04:00
|
|
|
target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""]))
|
2011-05-22 11:46:09 +04:00
|
|
|
|
2011-05-22 14:59:56 +04:00
|
|
|
if not retrying:
|
|
|
|
# Reset the number of connection retries
|
|
|
|
threadData.retriesCount = 0
|
|
|
|
|
2011-05-22 11:46:09 +04:00
|
|
|
# fix for known issue when urllib2 just skips the other part of provided
|
|
|
|
# url splitted with space char while urlencoding it in the later phase
|
|
|
|
url = url.replace(" ", "%20")
|
2011-04-30 17:20:05 +04:00
|
|
|
|
2013-04-30 19:46:26 +04:00
|
|
|
conn = None
|
2012-03-15 19:58:25 +04:00
|
|
|
code = None
|
2011-11-11 15:07:49 +04:00
|
|
|
page = None
|
2012-09-08 19:58:03 +04:00
|
|
|
|
|
|
|
_ = urlparse.urlsplit(url)
|
2012-11-13 13:21:11 +04:00
|
|
|
requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
|
2012-09-08 19:58:03 +04:00
|
|
|
requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url
|
2011-11-14 15:39:18 +04:00
|
|
|
responseMsg = u"HTTP response "
|
|
|
|
requestHeaders = u""
|
2011-01-25 19:05:06 +03:00
|
|
|
responseHeaders = None
|
2011-11-14 15:39:18 +04:00
|
|
|
logHeaders = u""
|
2012-03-14 18:31:41 +04:00
|
|
|
skipLogTraffic = False
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-12-05 13:25:56 +04:00
|
|
|
raise404 = raise404 and not kb.ignoreNotFound
|
|
|
|
|
2011-10-24 00:19:42 +04:00
|
|
|
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
|
2011-10-23 21:02:48 +04:00
|
|
|
# support those by default
|
|
|
|
url = asciifyUrl(url)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
try:
|
2013-03-19 22:24:14 +04:00
|
|
|
socket.setdefaulttimeout(timeout)
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2012-12-06 14:15:05 +04:00
|
|
|
if direct_:
|
2013-05-27 12:38:47 +04:00
|
|
|
if '?' in url:
|
|
|
|
url, params = url.split('?', 1)
|
2010-02-09 17:02:47 +03:00
|
|
|
params = urlencode(params)
|
|
|
|
url = "%s?%s" % (url, params)
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
elif multipart:
|
2012-02-22 14:40:11 +04:00
|
|
|
# Needed in this form because of potential circle dependency
|
2010-05-29 03:39:52 +04:00
|
|
|
# problem (option -> update -> connect -> option)
|
2010-02-25 16:45:28 +03:00
|
|
|
from lib.core.option import proxyHandler
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-25 16:45:28 +03:00
|
|
|
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
|
2011-04-29 20:56:02 +04:00
|
|
|
conn = multipartOpener.open(unicodeencode(url), multipart)
|
2013-05-17 17:04:25 +04:00
|
|
|
page = Connect._connReadProxy(conn) if not skipRead else None
|
2010-02-09 17:02:47 +03:00
|
|
|
responseHeaders = conn.info()
|
2011-02-01 01:50:54 +03:00
|
|
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
2013-03-20 14:10:24 +04:00
|
|
|
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
return page
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2012-08-20 12:41:43 +04:00
|
|
|
elif any((refreshing, crawling)):
|
2011-05-27 20:26:00 +04:00
|
|
|
pass
|
2011-03-29 18:16:28 +04:00
|
|
|
|
2011-05-13 13:56:12 +04:00
|
|
|
elif target:
|
2013-04-24 14:35:39 +04:00
|
|
|
if conf.forceSSL and urlparse.urlparse(url).scheme != "https":
|
|
|
|
url = re.sub("\Ahttp:", "https:", url, re.I)
|
|
|
|
url = re.sub(":80/", ":443/", url, re.I)
|
|
|
|
|
2012-07-14 13:01:30 +04:00
|
|
|
if PLACE.GET in conf.parameters and not get:
|
2010-11-08 11:02:36 +03:00
|
|
|
get = conf.parameters[PLACE.GET]
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2013-06-04 02:05:25 +04:00
|
|
|
if not conf.skipUrlEncode:
|
|
|
|
get = urlencode(get, limit=True)
|
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
if get:
|
2015-01-17 19:31:00 +03:00
|
|
|
if '?' in url:
|
|
|
|
url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get)
|
|
|
|
requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get)
|
|
|
|
else:
|
|
|
|
url = "%s?%s" % (url, get)
|
|
|
|
requestMsg += "?%s" % get
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2014-11-21 11:41:39 +03:00
|
|
|
if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET:
|
2013-06-03 17:14:56 +04:00
|
|
|
post = conf.parameters[PLACE.POST]
|
2010-03-23 13:27:39 +03:00
|
|
|
|
2011-05-13 13:56:12 +04:00
|
|
|
elif get:
|
|
|
|
url = "%s?%s" % (url, get)
|
|
|
|
requestMsg += "?%s" % get
|
|
|
|
|
2010-10-21 13:10:07 +04:00
|
|
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
2010-02-09 17:02:47 +03:00
|
|
|
|
2011-12-20 16:52:41 +04:00
|
|
|
# Prepare HTTP headers
|
2015-03-20 02:56:36 +03:00
|
|
|
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host})
|
2010-09-15 16:45:41 +04:00
|
|
|
|
2010-10-18 12:54:08 +04:00
|
|
|
if kb.authHeader:
|
2013-03-20 14:10:24 +04:00
|
|
|
headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-10-18 13:02:56 +04:00
|
|
|
if kb.proxyAuthHeader:
|
2013-03-20 14:10:24 +04:00
|
|
|
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
2010-10-18 13:02:56 +04:00
|
|
|
|
2015-02-03 00:07:16 +03:00
|
|
|
if HTTP_HEADER.ACCEPT not in headers:
|
|
|
|
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
|
|
|
|
2015-03-20 02:56:36 +03:00
|
|
|
if HTTP_HEADER.HOST not in headers:
|
|
|
|
headers[HTTP_HEADER.HOST] = getHostHeader(url)
|
|
|
|
|
2015-03-03 16:37:36 +03:00
|
|
|
if HTTP_HEADER.ACCEPT_ENCODING not in headers:
|
|
|
|
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers:
|
|
|
|
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
2012-10-04 13:25:44 +04:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
|
|
|
|
warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE
|
2012-10-16 14:32:58 +04:00
|
|
|
warnMsg += "Will try to reconstruct"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
|
|
|
boundary = findMultipartPostBoundary(conf.data)
|
|
|
|
if boundary:
|
2013-03-20 14:10:24 +04:00
|
|
|
headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)
|
2012-10-16 14:32:58 +04:00
|
|
|
|
2010-09-16 12:43:10 +04:00
|
|
|
if auxHeaders:
|
|
|
|
for key, item in auxHeaders.items():
|
2013-08-23 13:54:08 +04:00
|
|
|
for _ in headers.keys():
|
|
|
|
if _.upper() == key.upper():
|
|
|
|
del headers[_]
|
2010-09-16 12:43:10 +04:00
|
|
|
headers[key] = item
|
|
|
|
|
2010-12-28 17:40:34 +03:00
|
|
|
for key, item in headers.items():
|
2011-02-25 12:43:04 +03:00
|
|
|
del headers[key]
|
2011-04-29 19:22:32 +04:00
|
|
|
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)
|
2010-12-28 17:40:34 +03:00
|
|
|
|
2015-01-07 13:40:11 +03:00
|
|
|
url = unicodeencode(url)
|
2011-04-29 19:22:32 +04:00
|
|
|
post = unicodeencode(post, kb.pageEncoding)
|
2010-12-28 17:40:34 +03:00
|
|
|
|
2015-03-24 12:19:37 +03:00
|
|
|
if is_websocket:
|
|
|
|
try:
|
|
|
|
ws = websocket.WebSocket()
|
|
|
|
ws.connect(url)
|
|
|
|
ws.send(urldecode(post) if post else '')
|
|
|
|
response = ws.recv()
|
|
|
|
ws.close()
|
|
|
|
return response, {}, 101
|
|
|
|
|
|
|
|
except websocket.WebSocketConnectionClosedException:
|
|
|
|
# TODO: more exception to handle
|
|
|
|
warnMsg = "connection was forcibly closed by the target URL"
|
|
|
|
logger.critical(warnMsg)
|
|
|
|
return Connect._retryProxy(**kwargs)
|
|
|
|
except Exception:
|
|
|
|
return None, None, None
|
|
|
|
|
|
|
|
elif method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST):
|
2014-11-24 12:55:19 +03:00
|
|
|
method = unicodeencode(method)
|
2010-09-15 16:45:41 +04:00
|
|
|
req = MethodRequest(url, post, headers)
|
|
|
|
req.set_method(method)
|
|
|
|
else:
|
|
|
|
req = urllib2.Request(url, post, headers)
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2014-12-15 16:07:28 +03:00
|
|
|
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items())
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
|
2012-03-13 14:58:15 +04:00
|
|
|
conf.cj._policy._now = conf.cj._now = int(time.time())
|
|
|
|
cookies = conf.cj._cookies_for_request(req)
|
|
|
|
requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2012-11-13 13:21:11 +04:00
|
|
|
if post is not None:
|
2013-03-20 14:10:24 +04:00
|
|
|
if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH):
|
|
|
|
requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
|
2012-08-21 00:17:39 +04:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
|
|
|
|
requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
requestMsg += "\n%s" % requestHeaders
|
|
|
|
|
2012-11-13 13:21:11 +04:00
|
|
|
if post is not None:
|
2012-01-04 03:44:56 +04:00
|
|
|
requestMsg += "\n\n%s" % getUnicode(post)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
requestMsg += "\n"
|
|
|
|
|
2011-09-28 12:13:46 +04:00
|
|
|
threadData.lastRequestMsg = requestMsg
|
|
|
|
|
2011-12-26 16:24:39 +04:00
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-22 16:41:36 +03:00
|
|
|
conn = urllib2.urlopen(req)
|
|
|
|
|
2014-09-26 19:00:38 +04:00
|
|
|
if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower():
|
2013-03-20 14:10:24 +04:00
|
|
|
kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION)
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
|
|
|
|
kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION)
|
2010-10-18 13:02:56 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
# Return response object
|
|
|
|
if response:
|
2011-08-12 20:48:11 +04:00
|
|
|
return conn, None, None
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Get HTTP response
|
2012-03-15 15:10:58 +04:00
|
|
|
if hasattr(conn, 'redurl'):
|
2013-05-17 17:04:25 +04:00
|
|
|
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
|
|
|
|
else Connect._connReadProxy(conn)) if not skipRead else None
|
2012-03-18 21:27:08 +04:00
|
|
|
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
|
|
|
code = conn.redcode
|
2012-03-15 15:10:58 +04:00
|
|
|
else:
|
2013-05-17 17:04:25 +04:00
|
|
|
page = Connect._connReadProxy(conn) if not skipRead else None
|
2011-12-22 02:59:23 +04:00
|
|
|
|
2012-03-15 19:58:25 +04:00
|
|
|
code = code or conn.code
|
2008-10-15 19:38:22 +04:00
|
|
|
responseHeaders = conn.info()
|
2011-02-01 01:50:54 +03:00
|
|
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
2013-03-20 14:10:24 +04:00
|
|
|
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
2011-01-04 01:02:58 +03:00
|
|
|
status = getUnicode(conn.msg)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2012-12-03 15:13:59 +04:00
|
|
|
if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing:
|
|
|
|
url = extractRegexResult(META_REFRESH_REGEX, page)
|
2011-03-29 18:16:28 +04:00
|
|
|
|
|
|
|
debugMsg = "got HTML meta refresh header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-05-28 02:42:23 +04:00
|
|
|
if kb.alwaysRefresh is None:
|
|
|
|
msg = "sqlmap got a refresh request "
|
|
|
|
msg += "(redirect like response common to login pages). "
|
2011-06-08 20:08:20 +04:00
|
|
|
msg += "Do you want to apply the refresh "
|
2011-05-28 02:42:23 +04:00
|
|
|
msg += "from now on (or stay on the original page)? [Y/n]"
|
|
|
|
choice = readInput(msg, default="Y")
|
|
|
|
|
|
|
|
kb.alwaysRefresh = choice not in ("n", "N")
|
|
|
|
|
|
|
|
if kb.alwaysRefresh:
|
|
|
|
if url.lower().startswith('http://'):
|
|
|
|
kwargs['url'] = url
|
|
|
|
else:
|
2013-01-10 16:18:44 +04:00
|
|
|
kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url
|
2011-05-28 02:42:23 +04:00
|
|
|
|
|
|
|
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
|
|
|
|
kwargs['refreshing'] = True
|
|
|
|
kwargs['get'] = None
|
|
|
|
kwargs['post'] = None
|
|
|
|
|
|
|
|
try:
|
2012-12-06 17:14:19 +04:00
|
|
|
return Connect._getPageProxy(**kwargs)
|
|
|
|
except SqlmapSyntaxException:
|
2011-05-28 02:42:23 +04:00
|
|
|
pass
|
2011-03-29 18:16:28 +04:00
|
|
|
|
2010-12-26 17:36:51 +03:00
|
|
|
# Explicit closing of connection object
|
|
|
|
if not conf.keepAlive:
|
|
|
|
try:
|
2011-10-21 13:06:00 +04:00
|
|
|
if hasattr(conn.fp, '_sock'):
|
|
|
|
conn.fp._sock.close()
|
2010-12-26 17:36:51 +03:00
|
|
|
conn.close()
|
|
|
|
except Exception, msg:
|
2013-07-31 11:24:34 +04:00
|
|
|
warnMsg = "problem occurred during connection closing ('%s')" % msg
|
2010-12-26 17:36:51 +03:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
except urllib2.HTTPError, e:
|
2011-01-04 01:02:58 +03:00
|
|
|
page = None
|
2011-01-25 19:05:06 +03:00
|
|
|
responseHeaders = None
|
2011-02-01 01:51:14 +03:00
|
|
|
|
2010-11-17 15:16:48 +03:00
|
|
|
try:
|
2013-05-17 17:04:25 +04:00
|
|
|
page = e.read() if not skipRead else None
|
2010-11-17 15:16:48 +03:00
|
|
|
responseHeaders = e.info()
|
2011-01-31 15:41:39 +03:00
|
|
|
responseHeaders[URI_HTTP_HEADER] = e.geturl()
|
2013-03-20 14:10:24 +04:00
|
|
|
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
2010-11-17 15:16:48 +03:00
|
|
|
except socket.timeout:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "connection timed out while trying "
|
2011-01-25 14:02:41 +03:00
|
|
|
warnMsg += "to get error page information (%d)" % e.code
|
2010-11-17 15:16:48 +03:00
|
|
|
logger.warn(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2012-01-16 14:04:18 +04:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
raise
|
2010-11-17 15:16:48 +03:00
|
|
|
except:
|
|
|
|
pass
|
2012-01-16 14:04:18 +04:00
|
|
|
finally:
|
|
|
|
page = page if isinstance(page, unicode) else getUnicode(page)
|
2010-11-17 15:16:48 +03:00
|
|
|
|
2011-01-04 01:02:58 +03:00
|
|
|
code = e.code
|
|
|
|
|
2014-12-03 15:22:55 +03:00
|
|
|
kb.originalCode = kb.originalCode or code
|
|
|
|
threadData.lastHTTPError = (threadData.lastRequestUID, code)
|
2012-03-15 15:10:58 +04:00
|
|
|
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
2011-01-04 01:02:58 +03:00
|
|
|
|
|
|
|
status = getUnicode(e.msg)
|
2010-12-22 16:41:36 +03:00
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
2011-01-04 01:02:58 +03:00
|
|
|
|
2010-11-17 15:16:48 +03:00
|
|
|
if responseHeaders:
|
2013-01-23 23:10:25 +04:00
|
|
|
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
|
2011-01-04 01:02:58 +03:00
|
|
|
|
2012-07-23 12:16:47 +04:00
|
|
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
|
2010-11-17 15:04:33 +03:00
|
|
|
|
2012-03-15 18:51:16 +04:00
|
|
|
skipLogTraffic = True
|
|
|
|
|
2010-12-22 16:41:36 +03:00
|
|
|
if conf.verbose <= 5:
|
|
|
|
responseMsg += getUnicode(logHeaders)
|
|
|
|
elif conf.verbose > 5:
|
2012-08-21 11:58:40 +04:00
|
|
|
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
|
2010-12-22 16:41:36 +03:00
|
|
|
|
2011-12-26 16:24:39 +04:00
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
2010-12-22 16:41:36 +03:00
|
|
|
|
2014-04-30 01:26:45 +04:00
|
|
|
if e.code == httplib.UNAUTHORIZED and not conf.ignore401:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "not authorized, try to provide right HTTP "
|
2010-11-04 01:07:13 +03:00
|
|
|
errMsg += "authentication type and valid credentials (%d)" % code
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2011-12-16 03:33:44 +04:00
|
|
|
elif e.code == httplib.NOT_FOUND:
|
2011-08-12 18:48:44 +04:00
|
|
|
if raise404:
|
|
|
|
errMsg = "page not found (%d)" % code
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2011-08-12 18:48:44 +04:00
|
|
|
else:
|
|
|
|
debugMsg = "page not found (%d)" % code
|
2013-02-19 12:46:51 +04:00
|
|
|
singleTimeLogMessage(debugMsg, logging.DEBUG)
|
2011-08-12 18:48:44 +04:00
|
|
|
processResponse(page, responseHeaders)
|
2011-12-16 03:33:44 +04:00
|
|
|
elif e.code == httplib.GATEWAY_TIMEOUT:
|
2011-06-19 13:57:41 +04:00
|
|
|
if ignoreTimeout:
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2011-06-19 13:57:41 +04:00
|
|
|
else:
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "unable to connect to the target URL (%d - %s)" % (e.code, httplib.responses[e.code])
|
2012-08-20 13:40:49 +04:00
|
|
|
if threadData.retriesCount < conf.retries and not kb.threadException:
|
2012-10-04 20:28:36 +04:00
|
|
|
warnMsg += ". sqlmap is going to retry the request"
|
2011-06-19 13:57:41 +04:00
|
|
|
logger.critical(warnMsg)
|
2012-12-06 17:14:19 +04:00
|
|
|
return Connect._retryProxy(**kwargs)
|
2011-06-19 14:11:27 +04:00
|
|
|
elif kb.testMode:
|
2011-06-19 13:57:41 +04:00
|
|
|
logger.critical(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2011-06-19 13:57:41 +04:00
|
|
|
else:
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-06-10 18:15:32 +04:00
|
|
|
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
2010-01-19 13:27:54 +03:00
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2014-12-07 18:11:07 +03:00
|
|
|
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead, struct.error, ProxyError, SqlmapCompressionException), e:
|
2008-12-04 20:40:03 +03:00
|
|
|
tbMsg = traceback.format_exc()
|
2010-12-21 13:31:56 +03:00
|
|
|
|
2010-12-12 00:28:11 +03:00
|
|
|
if "no host given" in tbMsg:
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "invalid URL address used (%s)" % repr(url)
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(warnMsg)
|
2011-01-03 16:04:20 +03:00
|
|
|
elif "forcibly closed" in tbMsg:
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "connection was forcibly closed by the target URL"
|
2011-01-03 16:04:20 +03:00
|
|
|
elif "timed out" in tbMsg:
|
2014-09-08 16:33:13 +04:00
|
|
|
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
|
|
|
singleTimeWarnMessage("there is a possibility that the target (or WAF) is dropping 'suspicious' requests")
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "connection timed out to the target URL"
|
2010-12-12 00:28:11 +03:00
|
|
|
elif "URLError" in tbMsg or "error" in tbMsg:
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "unable to connect to the target URL"
|
2014-12-07 18:14:48 +03:00
|
|
|
elif "NTLM" in tbMsg:
|
2014-12-07 18:11:07 +03:00
|
|
|
warnMsg = "there has been a problem with NTLM authentication"
|
2008-12-04 20:40:03 +03:00
|
|
|
elif "BadStatusLine" in tbMsg:
|
2012-04-21 00:33:15 +04:00
|
|
|
warnMsg = "connection dropped or unknown HTTP "
|
2014-06-16 11:51:24 +04:00
|
|
|
warnMsg += "status code received"
|
|
|
|
if not conf.agent and not conf.randomAgent:
|
|
|
|
warnMsg += ". Try to force the HTTP User-Agent "
|
|
|
|
warnMsg += "header with option '--user-agent' or switch '--random-agent'"
|
2010-11-13 01:57:33 +03:00
|
|
|
elif "IncompleteRead" in tbMsg:
|
|
|
|
warnMsg = "there was an incomplete read error while retrieving data "
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg += "from the target URL"
|
2009-12-31 15:34:18 +03:00
|
|
|
else:
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "unable to connect to the target URL"
|
2009-12-31 15:34:18 +03:00
|
|
|
|
2008-12-05 18:34:13 +03:00
|
|
|
if "BadStatusLine" not in tbMsg:
|
|
|
|
warnMsg += " or proxy"
|
|
|
|
|
2013-03-19 22:24:14 +04:00
|
|
|
if silent:
|
|
|
|
return None, None, None
|
|
|
|
elif "forcibly closed" in tbMsg:
|
2011-01-03 16:04:20 +03:00
|
|
|
logger.critical(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2013-03-19 22:24:14 +04:00
|
|
|
elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead")):
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2012-08-20 13:40:49 +04:00
|
|
|
elif threadData.retriesCount < conf.retries and not kb.threadException:
|
2012-10-04 20:28:36 +04:00
|
|
|
warnMsg += ". sqlmap is going to retry the request"
|
2013-03-19 22:24:14 +04:00
|
|
|
logger.critical(warnMsg)
|
2012-12-06 17:14:19 +04:00
|
|
|
return Connect._retryProxy(**kwargs)
|
2011-05-22 14:29:25 +04:00
|
|
|
elif kb.testMode:
|
2011-05-22 14:59:56 +04:00
|
|
|
logger.critical(warnMsg)
|
2011-08-12 20:48:11 +04:00
|
|
|
return None, None, None
|
2008-11-15 15:25:19 +03:00
|
|
|
else:
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(warnMsg)
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2011-06-19 13:57:41 +04:00
|
|
|
finally:
|
2014-08-21 01:42:40 +04:00
|
|
|
if not isinstance(page, unicode):
|
|
|
|
if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]):
|
|
|
|
page = unicode(page, errors="ignore")
|
|
|
|
else:
|
|
|
|
page = getUnicode(page)
|
2011-06-19 13:57:41 +04:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
2009-12-18 01:04:01 +03:00
|
|
|
|
2011-08-12 17:40:37 +04:00
|
|
|
processResponse(page, responseHeaders)
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2013-06-11 00:26:34 +04:00
|
|
|
if conn and getattr(conn, "redurl", None):
|
2013-06-05 01:46:39 +04:00
|
|
|
_ = urlparse.urlsplit(conn.redurl)
|
|
|
|
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
2015-01-30 23:59:03 +03:00
|
|
|
requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % re.escape(getUnicode(_)), requestMsg, 1)
|
2013-04-30 19:46:26 +04:00
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, conn.code, status)
|
|
|
|
else:
|
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
|
|
|
|
2010-12-22 16:41:36 +03:00
|
|
|
if responseHeaders:
|
2013-01-23 23:10:25 +04:00
|
|
|
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
|
2011-03-17 15:35:40 +03:00
|
|
|
|
2012-03-14 18:31:41 +04:00
|
|
|
if not skipLogTraffic:
|
2012-07-23 12:16:47 +04:00
|
|
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
if conf.verbose <= 5:
|
2010-11-08 14:55:56 +03:00
|
|
|
responseMsg += getUnicode(logHeaders)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose > 5:
|
2012-08-21 11:58:40 +04:00
|
|
|
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2011-12-26 16:24:39 +04:00
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-08-12 20:48:11 +04:00
|
|
|
return page, responseHeaders, code
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
@staticmethod
|
2011-10-24 04:46:54 +04:00
|
|
|
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2013-04-09 13:48:42 +04:00
|
|
|
This method calls a function to get the target URL page content
|
2008-10-15 19:38:22 +04:00
|
|
|
and returns its page MD5 hash or a boolean value in case of
|
|
|
|
string match check ('--string' command line parameter)
|
|
|
|
"""
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if conf.direct:
|
2010-03-31 14:50:47 +04:00
|
|
|
return direct(value, content)
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
get = None
|
|
|
|
post = None
|
|
|
|
cookie = None
|
|
|
|
ua = None
|
|
|
|
referer = None
|
2011-12-20 16:52:41 +04:00
|
|
|
host = None
|
2011-04-30 17:20:05 +04:00
|
|
|
page = None
|
|
|
|
pageLength = None
|
|
|
|
uri = None
|
2012-03-16 00:17:40 +04:00
|
|
|
code = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if not place:
|
2011-07-06 09:44:47 +04:00
|
|
|
place = kb.injection.place or PLACE.GET
|
|
|
|
|
2014-07-10 10:49:20 +04:00
|
|
|
if not auxHeaders:
|
|
|
|
auxHeaders = {}
|
|
|
|
|
2011-07-06 09:44:47 +04:00
|
|
|
raise404 = place != PLACE.URI if raise404 is None else raise404
|
2014-11-21 11:41:39 +03:00
|
|
|
method = method or conf.method
|
2010-10-14 15:06:28 +04:00
|
|
|
|
2012-05-22 13:33:22 +04:00
|
|
|
value = agent.adjustLateValues(value)
|
2010-11-08 00:55:24 +03:00
|
|
|
payload = agent.extractPayload(value)
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
2010-10-30 03:00:48 +04:00
|
|
|
|
2013-03-27 16:39:27 +04:00
|
|
|
if conf.httpHeaders:
|
2014-10-22 15:41:36 +04:00
|
|
|
headers = OrderedDict(conf.httpHeaders)
|
2013-03-27 16:39:27 +04:00
|
|
|
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
|
|
|
|
|
2014-03-21 23:28:16 +04:00
|
|
|
if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode:
|
|
|
|
kb.postUrlEncode = False
|
2013-03-27 16:39:27 +04:00
|
|
|
conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType]
|
|
|
|
contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE)
|
|
|
|
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
|
2012-08-31 14:38:02 +04:00
|
|
|
|
2010-11-08 00:55:24 +03:00
|
|
|
if payload:
|
|
|
|
if kb.tamperFunctions:
|
2010-10-29 20:11:50 +04:00
|
|
|
for function in kb.tamperFunctions:
|
2014-11-05 12:03:19 +03:00
|
|
|
try:
|
|
|
|
payload = function(payload=payload, headers=auxHeaders)
|
|
|
|
except Exception, ex:
|
|
|
|
errMsg = "error occurred while running tamper "
|
|
|
|
errMsg += "function '%s' ('%s')" % (function.func_name, ex)
|
|
|
|
raise SqlmapGenericException(errMsg)
|
|
|
|
|
2012-11-10 14:01:29 +04:00
|
|
|
if not isinstance(payload, basestring):
|
|
|
|
errMsg = "tamper function '%s' returns " % function.func_name
|
|
|
|
errMsg += "invalid payload type ('%s')" % type(payload)
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapValueException(errMsg)
|
2010-10-30 03:00:48 +04:00
|
|
|
|
2010-10-29 20:11:50 +04:00
|
|
|
value = agent.replacePayload(value, payload)
|
|
|
|
|
2012-04-07 18:06:11 +04:00
|
|
|
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload))
|
2010-11-08 00:18:09 +03:00
|
|
|
|
2013-12-04 13:09:54 +04:00
|
|
|
if place == PLACE.CUSTOM_POST and kb.postHint:
|
2012-10-04 20:44:12 +04:00
|
|
|
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
|
|
|
# payloads in SOAP/XML should have chars > and < replaced
|
2012-10-04 13:25:44 +04:00
|
|
|
# with their HTML encoded counterparts
|
|
|
|
payload = payload.replace('>', ">").replace('<', "<")
|
|
|
|
elif kb.postHint == POST_HINT.JSON:
|
2012-10-04 18:08:37 +04:00
|
|
|
if payload.startswith('"') and payload.endswith('"'):
|
|
|
|
payload = json.dumps(payload[1:-1])
|
|
|
|
else:
|
|
|
|
payload = json.dumps(payload)[1:-1]
|
2014-02-26 11:56:17 +04:00
|
|
|
elif kb.postHint == POST_HINT.JSON_LIKE:
|
2014-02-26 12:30:37 +04:00
|
|
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
|
|
|
if payload.startswith('"') and payload.endswith('"'):
|
2014-02-26 11:56:17 +04:00
|
|
|
payload = json.dumps(payload[1:-1])
|
|
|
|
else:
|
|
|
|
payload = json.dumps(payload)[1:-1]
|
2014-02-26 12:30:37 +04:00
|
|
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
2012-09-22 22:59:40 +04:00
|
|
|
value = agent.replacePayload(value, payload)
|
|
|
|
else:
|
2015-03-04 15:31:29 +03:00
|
|
|
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
2014-03-21 23:28:16 +04:00
|
|
|
if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
|
2014-03-26 01:02:14 +04:00
|
|
|
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
2012-12-17 03:40:01 +04:00
|
|
|
value = agent.replacePayload(value, payload)
|
2012-05-10 17:39:54 +04:00
|
|
|
|
2012-12-10 14:55:31 +04:00
|
|
|
if conf.hpp:
|
|
|
|
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)):
|
|
|
|
warnMsg = "HTTP parameter pollution should work only against "
|
|
|
|
warnMsg += "ASP(.NET) targets"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
if place in (PLACE.GET, PLACE.POST):
|
|
|
|
_ = re.escape(PAYLOAD_DELIMITER)
|
2012-12-10 15:54:01 +04:00
|
|
|
match = re.search("(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value)
|
2012-12-10 14:55:31 +04:00
|
|
|
if match:
|
2012-12-10 16:05:41 +04:00
|
|
|
payload = match.group("value")
|
|
|
|
|
2012-12-10 15:00:15 +04:00
|
|
|
for splitter in (urlencode(' '), ' '):
|
2012-12-10 15:58:17 +04:00
|
|
|
if splitter in payload:
|
2012-12-10 15:00:15 +04:00
|
|
|
prefix, suffix = ("*/", "/*") if splitter == ' ' else (urlencode(_) for _ in ("*/", "/*"))
|
2012-12-10 16:07:36 +04:00
|
|
|
parts = payload.split(splitter)
|
2012-12-10 15:00:15 +04:00
|
|
|
parts[0] = "%s%s" % (parts[0], suffix)
|
2012-12-10 15:54:01 +04:00
|
|
|
parts[-1] = "%s%s=%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1])
|
2012-12-10 15:00:15 +04:00
|
|
|
for i in xrange(1, len(parts) - 1):
|
2012-12-10 15:54:01 +04:00
|
|
|
parts[i] = "%s%s=%s%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix)
|
2012-12-10 15:00:15 +04:00
|
|
|
payload = "".join(parts)
|
2012-12-10 16:05:41 +04:00
|
|
|
|
2012-12-10 15:58:17 +04:00
|
|
|
for splitter in (urlencode(','), ','):
|
|
|
|
payload = payload.replace(splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name")))
|
2012-12-10 16:05:41 +04:00
|
|
|
|
2012-12-10 15:58:17 +04:00
|
|
|
value = agent.replacePayload(value, payload)
|
2012-12-10 14:55:31 +04:00
|
|
|
else:
|
|
|
|
warnMsg = "HTTP parameter pollution works only with regular "
|
|
|
|
warnMsg += "GET and POST parameters"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
2012-05-10 17:39:54 +04:00
|
|
|
if place:
|
2011-01-27 22:44:24 +03:00
|
|
|
value = agent.removePayloadDelimiters(value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.GET in conf.parameters:
|
2011-08-29 16:50:52 +04:00
|
|
|
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.POST in conf.parameters:
|
2011-08-29 16:50:52 +04:00
|
|
|
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-04-17 18:23:00 +04:00
|
|
|
if PLACE.CUSTOM_POST in conf.parameters:
|
|
|
|
post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value
|
2013-02-13 15:24:42 +04:00
|
|
|
post = post.replace(ASTERISK_MARKER, '*') if post else post
|
2012-04-17 18:23:00 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.COOKIE in conf.parameters:
|
|
|
|
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
|
2010-05-14 19:20:34 +04:00
|
|
|
|
2012-07-26 14:26:57 +04:00
|
|
|
if PLACE.USER_AGENT in conf.parameters:
|
|
|
|
ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-12 02:07:03 +03:00
|
|
|
if PLACE.REFERER in conf.parameters:
|
|
|
|
referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value
|
|
|
|
|
2011-12-20 16:52:41 +04:00
|
|
|
if PLACE.HOST in conf.parameters:
|
|
|
|
host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value
|
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.URI in conf.parameters:
|
|
|
|
uri = conf.url if place != PLACE.URI or not value else value
|
2010-09-23 18:07:23 +04:00
|
|
|
else:
|
|
|
|
uri = conf.url
|
2010-09-22 15:56:35 +04:00
|
|
|
|
2013-01-25 15:41:51 +04:00
|
|
|
if value and place == PLACE.CUSTOM_HEADER:
|
2013-01-13 19:22:43 +04:00
|
|
|
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
|
|
|
|
|
2014-10-23 13:23:53 +04:00
|
|
|
if conf.csrfToken:
|
|
|
|
def _adjustParameter(paramString, parameter, newValue):
|
|
|
|
retVal = paramString
|
2014-10-28 16:02:55 +03:00
|
|
|
match = re.search("%s=(?P<value>[^&]*)" % re.escape(parameter), paramString)
|
2014-10-23 13:23:53 +04:00
|
|
|
if match:
|
|
|
|
origValue = match.group("value")
|
2014-10-28 16:02:55 +03:00
|
|
|
retVal = re.sub("%s=[^&]*" % re.escape(parameter), "%s=%s" % (parameter, newValue), paramString)
|
2014-10-23 13:23:53 +04:00
|
|
|
return retVal
|
|
|
|
|
2014-11-21 12:31:55 +03:00
|
|
|
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
2014-10-28 16:02:55 +03:00
|
|
|
match = re.search(r"<input[^>]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "")
|
2014-10-23 13:23:53 +04:00
|
|
|
token = (match.group(2) or match.group(3) or match.group(4)) if match else None
|
|
|
|
|
|
|
|
if not token:
|
2014-10-23 16:33:22 +04:00
|
|
|
if conf.csrfUrl != conf.url and code == httplib.OK:
|
|
|
|
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
|
|
|
|
token = page
|
|
|
|
|
2014-10-28 16:41:21 +03:00
|
|
|
if not token and any(_.name == conf.csrfToken for _ in conf.cj):
|
|
|
|
for _ in conf.cj:
|
|
|
|
if _.name == conf.csrfToken:
|
|
|
|
token = _.value
|
2014-10-24 11:37:51 +04:00
|
|
|
if not any (conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
|
|
|
|
if post:
|
|
|
|
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
|
|
|
elif get:
|
|
|
|
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
|
|
|
else:
|
|
|
|
get = "%s=%s" % (conf.csrfToken, token)
|
|
|
|
break
|
|
|
|
|
2014-10-23 16:33:22 +04:00
|
|
|
if not token:
|
2014-11-17 13:50:05 +03:00
|
|
|
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken, conf.csrfUrl or conf.url)
|
2014-10-23 16:33:22 +04:00
|
|
|
if not conf.csrfUrl:
|
|
|
|
errMsg += ". You can try to rerun by providing "
|
|
|
|
errMsg += "a valid value for option '--csrf-url'"
|
|
|
|
raise SqlmapTokenException, errMsg
|
2014-10-23 13:23:53 +04:00
|
|
|
|
|
|
|
if token:
|
2014-10-24 11:37:51 +04:00
|
|
|
for place in (PLACE.GET, PLACE.POST):
|
|
|
|
if place in conf.parameters:
|
|
|
|
if place == PLACE.GET and get:
|
2014-10-23 13:23:53 +04:00
|
|
|
get = _adjustParameter(get, conf.csrfToken, token)
|
2014-10-24 11:37:51 +04:00
|
|
|
elif place == PLACE.POST and post:
|
2014-10-23 13:23:53 +04:00
|
|
|
post = _adjustParameter(post, conf.csrfToken, token)
|
|
|
|
|
2014-10-23 16:33:22 +04:00
|
|
|
for i in xrange(len(conf.httpHeaders)):
|
|
|
|
if conf.httpHeaders[i][0].lower() == conf.csrfToken.lower():
|
|
|
|
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token)
|
|
|
|
|
2011-08-29 16:50:52 +04:00
|
|
|
if conf.rParam:
|
|
|
|
def _randomizeParameter(paramString, randomParameter):
|
|
|
|
retVal = paramString
|
2015-01-17 23:47:57 +03:00
|
|
|
match = re.search(r"(\A|\b)%s=(?P<value>[^&;]+)" % re.escape(randomParameter), paramString)
|
2011-08-29 16:50:52 +04:00
|
|
|
if match:
|
|
|
|
origValue = match.group("value")
|
2015-01-17 23:47:57 +03:00
|
|
|
retVal = re.sub(r"(\A|\b)%s=[^&;]+" % re.escape(randomParameter), "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString)
|
2011-08-29 16:50:52 +04:00
|
|
|
return retVal
|
|
|
|
|
2011-08-29 17:08:25 +04:00
|
|
|
for randomParameter in conf.rParam:
|
2015-01-17 23:47:57 +03:00
|
|
|
for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE, PLACE.URI, PLACE.CUSTOM_POST):
|
2011-08-29 17:08:25 +04:00
|
|
|
if item in conf.parameters:
|
|
|
|
if item == PLACE.GET and get:
|
|
|
|
get = _randomizeParameter(get, randomParameter)
|
2015-01-17 23:47:57 +03:00
|
|
|
elif item in (PLACE.POST, PLACE.CUSTOM_POST) and post:
|
2011-08-29 17:08:25 +04:00
|
|
|
post = _randomizeParameter(post, randomParameter)
|
|
|
|
elif item == PLACE.COOKIE and cookie:
|
|
|
|
cookie = _randomizeParameter(cookie, randomParameter)
|
2015-01-17 23:47:57 +03:00
|
|
|
elif item == PLACE.URI and uri:
|
|
|
|
uri = _randomizeParameter(uri, randomParameter)
|
2011-08-29 16:50:52 +04:00
|
|
|
|
2011-11-21 20:41:02 +04:00
|
|
|
if conf.evalCode:
|
2014-04-06 18:48:46 +04:00
|
|
|
delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER
|
2014-09-28 15:38:09 +04:00
|
|
|
variables = {"uri": uri}
|
2011-11-21 20:41:02 +04:00
|
|
|
originals = {}
|
2015-01-09 17:33:53 +03:00
|
|
|
keywords = keyword.kwlist
|
2011-11-21 20:41:02 +04:00
|
|
|
|
2013-10-12 23:05:18 +04:00
|
|
|
for item in filter(None, (get, post if not kb.postHint else None)):
|
2011-11-21 21:39:18 +04:00
|
|
|
for part in item.split(delimiter):
|
|
|
|
if '=' in part:
|
|
|
|
name, value = part.split('=', 1)
|
2015-02-24 17:05:44 +03:00
|
|
|
name = re.sub(r"[^\w]", "", name.strip())
|
2015-01-09 17:33:53 +03:00
|
|
|
if name in keywords:
|
|
|
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
2013-02-12 20:01:47 +04:00
|
|
|
value = urldecode(value, convall=True, plusspace=(item==post and kb.postSpaceToPlus))
|
2015-02-25 12:19:51 +03:00
|
|
|
variables[name] = value
|
2011-11-21 20:41:02 +04:00
|
|
|
|
2013-07-31 19:28:22 +04:00
|
|
|
if cookie:
|
2014-04-06 18:50:58 +04:00
|
|
|
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
|
2013-07-31 19:28:22 +04:00
|
|
|
if '=' in part:
|
|
|
|
name, value = part.split('=', 1)
|
2015-02-24 17:05:44 +03:00
|
|
|
name = re.sub(r"[^\w]", "", name.strip())
|
2015-01-09 17:33:53 +03:00
|
|
|
if name in keywords:
|
|
|
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
2013-07-31 19:28:22 +04:00
|
|
|
value = urldecode(value, convall=True)
|
2015-02-25 12:19:51 +03:00
|
|
|
variables[name] = value
|
2015-01-09 17:33:53 +03:00
|
|
|
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
compiler.parse(conf.evalCode.replace(';', '\n'))
|
|
|
|
except SyntaxError, ex:
|
|
|
|
original = replacement = ex.text.strip()
|
|
|
|
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
|
|
|
if _ in keywords:
|
|
|
|
replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX))
|
|
|
|
break
|
|
|
|
if original == replacement:
|
|
|
|
conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "")
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
conf.evalCode = conf.evalCode.replace(ex.text.strip(), replacement)
|
|
|
|
else:
|
|
|
|
break
|
2013-07-31 19:28:22 +04:00
|
|
|
|
2011-11-21 20:41:02 +04:00
|
|
|
originals.update(variables)
|
2012-02-16 18:42:28 +04:00
|
|
|
evaluateCode(conf.evalCode, variables)
|
2015-01-09 17:33:53 +03:00
|
|
|
|
|
|
|
for variable in variables.keys():
|
|
|
|
if variable.endswith(EVALCODE_KEYWORD_SUFFIX):
|
|
|
|
value = variables[variable]
|
|
|
|
del variables[variable]
|
|
|
|
variables[variable.replace(EVALCODE_KEYWORD_SUFFIX, "")] = value
|
|
|
|
|
2014-09-28 15:38:09 +04:00
|
|
|
uri = variables["uri"]
|
2011-11-21 20:41:02 +04:00
|
|
|
|
|
|
|
for name, value in variables.items():
|
|
|
|
if name != "__builtins__" and originals.get(name, "") != value:
|
|
|
|
if isinstance(value, (basestring, int)):
|
2013-08-31 02:28:51 +04:00
|
|
|
found = False
|
2011-11-21 20:41:02 +04:00
|
|
|
value = unicode(value)
|
2013-08-31 02:28:51 +04:00
|
|
|
|
2014-10-28 16:02:55 +03:00
|
|
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
2013-08-31 02:28:51 +04:00
|
|
|
if re.search(regex, (get or "")):
|
|
|
|
found = True
|
|
|
|
get = re.sub(regex, "\g<1>%s\g<3>" % value, get)
|
|
|
|
|
|
|
|
if re.search(regex, (post or "")):
|
|
|
|
found = True
|
|
|
|
post = re.sub(regex, "\g<1>%s\g<3>" % value, post)
|
|
|
|
|
2014-04-06 18:50:58 +04:00
|
|
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), name, re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
|
2013-08-31 02:28:51 +04:00
|
|
|
if re.search(regex, (cookie or "")):
|
|
|
|
found = True
|
|
|
|
cookie = re.sub(regex, "\g<1>%s\g<3>" % value, cookie)
|
|
|
|
|
|
|
|
if not found:
|
|
|
|
if post is not None:
|
|
|
|
post += "%s%s=%s" % (delimiter, name, value)
|
|
|
|
elif get is not None:
|
|
|
|
get += "%s%s=%s" % (delimiter, name, value)
|
|
|
|
elif cookie is not None:
|
2014-04-06 18:50:58 +04:00
|
|
|
cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value)
|
2011-11-28 15:21:39 +04:00
|
|
|
|
2013-03-27 16:39:27 +04:00
|
|
|
if not conf.skipUrlEncode:
|
2013-01-15 13:14:02 +04:00
|
|
|
get = urlencode(get, limit=True)
|
|
|
|
|
2012-11-13 13:21:11 +04:00
|
|
|
if post is not None:
|
2013-03-27 16:39:27 +04:00
|
|
|
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
|
2012-07-20 11:48:09 +04:00
|
|
|
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
|
2014-03-21 23:28:16 +04:00
|
|
|
elif kb.postUrlEncode:
|
2013-01-19 21:06:36 +04:00
|
|
|
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
|
2011-08-29 16:50:52 +04:00
|
|
|
|
2010-12-09 10:49:18 +03:00
|
|
|
if timeBasedCompare:
|
|
|
|
if len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
2010-12-21 04:09:39 +03:00
|
|
|
clearConsoleLine()
|
|
|
|
|
2011-11-21 03:17:57 +04:00
|
|
|
if conf.tor:
|
2012-02-01 18:49:42 +04:00
|
|
|
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
|
2012-03-14 02:03:23 +04:00
|
|
|
warnMsg += "time-based injections because of its high latency time"
|
2011-11-21 03:17:57 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
2014-03-07 00:08:31 +04:00
|
|
|
warnMsg = "[%s] [WARNING] time-based comparison requires " % time.strftime("%X")
|
|
|
|
warnMsg += "larger statistical model, please wait"
|
|
|
|
dataToStdout(warnMsg)
|
2010-12-09 10:49:18 +03:00
|
|
|
|
|
|
|
while len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
2011-01-07 18:41:09 +03:00
|
|
|
Connect.queryPage(content=True)
|
2014-03-07 00:08:31 +04:00
|
|
|
dataToStdout('.')
|
|
|
|
|
|
|
|
dataToStdout("\n")
|
2010-12-09 10:49:18 +03:00
|
|
|
|
2013-05-18 23:30:21 +04:00
|
|
|
elif not kb.testMode:
|
2014-03-07 00:08:31 +04:00
|
|
|
warnMsg = "it is very important not to stress the network adapter "
|
|
|
|
warnMsg += "during usage of time-based payloads to prevent potential "
|
|
|
|
warnMsg += "errors "
|
2013-05-18 23:30:21 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
|
|
|
if not kb.laggingChecked:
|
|
|
|
kb.laggingChecked = True
|
|
|
|
|
2011-08-12 17:47:38 +04:00
|
|
|
deviation = stdev(kb.responseTimes)
|
|
|
|
|
|
|
|
if deviation > WARN_TIME_STDEV:
|
2012-10-09 17:19:47 +04:00
|
|
|
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
|
2011-04-19 18:28:51 +04:00
|
|
|
|
2014-09-08 16:48:31 +04:00
|
|
|
warnMsg = "considerable lagging has been detected "
|
2011-08-12 17:47:38 +04:00
|
|
|
warnMsg += "in connection response(s). Please use as high "
|
2012-02-01 19:10:06 +04:00
|
|
|
warnMsg += "value for option '--time-sec' as possible (e.g. "
|
2013-05-18 23:30:21 +04:00
|
|
|
warnMsg += "10 or more)"
|
2011-04-19 18:50:09 +04:00
|
|
|
logger.critical(warnMsg)
|
2012-11-10 14:01:29 +04:00
|
|
|
|
2011-04-19 14:37:20 +04:00
|
|
|
|
2010-04-16 16:44:47 +04:00
|
|
|
if conf.safUrl and conf.saFreq > 0:
|
|
|
|
kb.queryCounter += 1
|
|
|
|
if kb.queryCounter % conf.saFreq == 0:
|
2011-12-20 16:52:41 +04:00
|
|
|
Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host)
|
2010-09-16 13:32:09 +04:00
|
|
|
|
2010-12-08 02:32:33 +03:00
|
|
|
start = time.time()
|
2010-12-08 02:49:00 +03:00
|
|
|
|
2010-12-20 19:45:41 +03:00
|
|
|
if kb.nullConnection and not content and not response and not timeBasedCompare:
|
2012-06-12 18:22:14 +04:00
|
|
|
noteResponseTime = False
|
|
|
|
|
2013-05-17 18:04:05 +04:00
|
|
|
pushValue(kb.pageCompress)
|
|
|
|
kb.pageCompress = False
|
|
|
|
|
2010-11-08 12:49:57 +03:00
|
|
|
if kb.nullConnection == NULLCONNECTION.HEAD:
|
2010-11-08 12:44:32 +03:00
|
|
|
method = HTTPMETHOD.HEAD
|
2010-11-08 12:49:57 +03:00
|
|
|
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
2013-03-20 14:10:24 +04:00
|
|
|
auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1"
|
2010-10-10 22:56:43 +04:00
|
|
|
|
2014-11-21 12:31:55 +03:00
|
|
|
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
2010-10-10 22:56:43 +04:00
|
|
|
|
2011-06-08 01:46:49 +04:00
|
|
|
if headers:
|
2013-05-17 17:04:25 +04:00
|
|
|
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers:
|
2013-03-20 14:10:24 +04:00
|
|
|
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH])
|
|
|
|
elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers:
|
|
|
|
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
|
2010-09-16 13:47:33 +04:00
|
|
|
|
2013-05-17 18:04:05 +04:00
|
|
|
kb.pageCompress = popValue()
|
|
|
|
|
2010-09-16 13:47:33 +04:00
|
|
|
if not pageLength:
|
2012-12-18 12:36:26 +04:00
|
|
|
try:
|
2014-11-21 12:31:55 +03:00
|
|
|
page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
2012-12-18 12:36:26 +04:00
|
|
|
except MemoryError:
|
|
|
|
page, headers, code = None, None, None
|
|
|
|
warnMsg = "site returned insanely large response"
|
|
|
|
if kb.testMode:
|
|
|
|
warnMsg += " in testing phase. This is a common "
|
|
|
|
warnMsg += "behavior in custom WAF/IDS/IPS solutions"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
2010-12-08 02:49:00 +03:00
|
|
|
|
2012-07-26 16:07:05 +04:00
|
|
|
if conf.secondOrder:
|
|
|
|
page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
2010-11-08 12:44:32 +03:00
|
|
|
|
2012-03-16 00:17:40 +04:00
|
|
|
kb.originalCode = kb.originalCode or code
|
|
|
|
|
2010-12-07 16:34:06 +03:00
|
|
|
if kb.testMode:
|
|
|
|
kb.testQueryCount += 1
|
2011-01-12 00:46:21 +03:00
|
|
|
|
2010-12-08 14:26:54 +03:00
|
|
|
if timeBasedCompare:
|
2013-01-29 23:53:11 +04:00
|
|
|
return wasLastResponseDelayed()
|
2010-12-08 17:33:10 +03:00
|
|
|
elif noteResponseTime:
|
2010-12-21 01:45:01 +03:00
|
|
|
kb.responseTimes.append(threadData.lastQueryDuration)
|
2010-12-07 19:04:53 +03:00
|
|
|
|
2011-10-24 04:46:54 +04:00
|
|
|
if not response and removeReflection:
|
|
|
|
page = removeReflectiveValues(page, payload)
|
|
|
|
|
2012-10-02 15:36:15 +04:00
|
|
|
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
|
|
|
|
kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None
|
2012-02-08 16:00:03 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
if content or response:
|
2008-12-18 23:48:23 +03:00
|
|
|
return page, headers
|
2011-02-25 12:22:44 +03:00
|
|
|
|
|
|
|
if getRatioValue:
|
2011-08-12 20:48:11 +04:00
|
|
|
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
|
2008-12-18 23:38:57 +03:00
|
|
|
else:
|
2013-06-10 14:20:58 +04:00
|
|
|
return comparison(page, headers, code, getRatioValue, pageLength)
|
2013-08-20 21:35:49 +04:00
|
|
|
|
|
|
|
def setHTTPProxy(): # Cross-linked function
|
|
|
|
raise NotImplementedError
|