2019-05-08 13:47:52 +03:00
#!/usr/bin/env python
2008-10-15 19:38:22 +04:00
"""
2020-12-31 13:46:27 +03:00
Copyright ( c ) 2006 - 2021 sqlmap developers ( http : / / sqlmap . org / )
2017-10-11 15:50:46 +03:00
See the file ' LICENSE ' for copying permission
2008-10-15 19:38:22 +04:00
"""
2015-09-12 16:13:30 +03:00
import binascii
2013-02-19 12:46:51 +04:00
import logging
2020-10-29 15:51:11 +03:00
import os
2019-07-19 13:17:07 +03:00
import random
2008-10-15 19:38:22 +04:00
import re
2008-11-09 19:57:47 +03:00
import socket
2012-08-21 00:17:39 +04:00
import string
2014-12-07 18:11:07 +03:00
import struct
2008-11-09 19:57:47 +03:00
import time
2014-03-21 23:28:16 +04:00
import traceback
2008-10-15 19:38:22 +04:00
2015-03-24 17:25:16 +03:00
try :
import websocket
from websocket import WebSocketException
except ImportError :
class WebSocketException ( Exception ) :
pass
2008-10-15 19:38:22 +04:00
2010-10-29 20:11:50 +04:00
from lib . core . agent import agent
2011-10-23 21:02:48 +04:00
from lib . core . common import asciifyUrl
2010-12-07 22:19:12 +03:00
from lib . core . common import calculateDeltaSeconds
2020-10-29 15:51:11 +03:00
from lib . core . common import checkFile
2016-12-20 11:53:44 +03:00
from lib . core . common import checkSameHost
2019-03-19 16:07:39 +03:00
from lib . core . common import chunkSplitPostData
2010-12-21 04:09:39 +03:00
from lib . core . common import clearConsoleLine
2014-03-07 00:08:31 +04:00
from lib . core . common import dataToStdout
2018-04-11 16:19:44 +03:00
from lib . core . common import escapeJsonValue
2012-02-16 18:42:28 +04:00
from lib . core . common import evaluateCode
2011-03-29 18:16:28 +04:00
from lib . core . common import extractRegexResult
2019-03-29 04:28:16 +03:00
from lib . core . common import filterNone
2012-10-16 14:32:58 +04:00
from lib . core . common import findMultipartPostBoundary
2010-12-21 01:45:01 +03:00
from lib . core . common import getCurrentThreadData
2015-06-05 18:02:56 +03:00
from lib . core . common import getHeader
2011-11-11 15:28:27 +04:00
from lib . core . common import getHostHeader
2012-08-31 14:15:09 +04:00
from lib . core . common import getRequestHeader
2015-09-10 16:51:33 +03:00
from lib . core . common import getSafeExString
2019-01-22 15:45:16 +03:00
from lib . core . common import isMultiThreadMode
2010-11-08 14:22:47 +03:00
from lib . core . common import logHTTPTraffic
2019-08-02 21:29:52 +03:00
from lib . core . common import openFile
2013-05-17 18:04:05 +04:00
from lib . core . common import popValue
2019-06-04 15:44:06 +03:00
from lib . core . common import pushValue
2011-08-29 16:50:52 +04:00
from lib . core . common import randomizeParameterValue
2013-02-28 23:20:08 +04:00
from lib . core . common import randomInt
from lib . core . common import randomStr
2010-11-16 13:42:42 +03:00
from lib . core . common import readInput
2011-02-25 12:22:44 +03:00
from lib . core . common import removeReflectiveValues
2017-10-10 17:08:13 +03:00
from lib . core . common import safeVariableNaming
2013-02-19 12:46:51 +04:00
from lib . core . common import singleTimeLogMessage
2011-06-08 18:35:23 +04:00
from lib . core . common import singleTimeWarnMessage
2010-12-08 14:26:54 +03:00
from lib . core . common import stdev
2017-10-10 17:08:13 +03:00
from lib . core . common import unsafeVariableNaming
2013-02-12 20:01:47 +04:00
from lib . core . common import urldecode
2012-07-31 13:03:44 +04:00
from lib . core . common import urlencode
2019-06-04 15:44:06 +03:00
from lib . core . common import wasLastResponseDelayed
2019-05-02 17:54:54 +03:00
from lib . core . compat import patchHeaders
2019-03-28 18:04:38 +03:00
from lib . core . compat import xrange
2019-05-03 14:20:15 +03:00
from lib . core . convert import getBytes
2019-08-02 21:29:52 +03:00
from lib . core . convert import getText
2019-05-06 01:54:21 +03:00
from lib . core . convert import getUnicode
2020-12-11 00:47:29 +03:00
from lib . core . data import cmdLineOptions
2008-10-15 19:38:22 +04:00
from lib . core . data import conf
from lib . core . data import kb
from lib . core . data import logger
2018-12-10 16:53:11 +03:00
from lib . core . datatype import AttribDict
2018-04-01 13:45:47 +03:00
from lib . core . decorators import stackedmethod
2012-10-04 13:25:44 +04:00
from lib . core . dicts import POST_HINT_CONTENT_TYPES
2012-10-09 17:19:47 +04:00
from lib . core . enums import ADJUST_TIME_DELAY
2013-03-13 00:16:44 +04:00
from lib . core . enums import AUTH_TYPE
2011-12-26 16:24:39 +04:00
from lib . core . enums import CUSTOM_LOGGING
2018-10-26 13:08:04 +03:00
from lib . core . enums import HINT
2013-03-20 14:10:24 +04:00
from lib . core . enums import HTTP_HEADER
2010-11-08 12:44:32 +03:00
from lib . core . enums import HTTPMETHOD
2010-11-08 12:49:57 +03:00
from lib . core . enums import NULLCONNECTION
2011-05-27 01:54:19 +04:00
from lib . core . enums import PAYLOAD
2010-11-08 12:20:02 +03:00
from lib . core . enums import PLACE
2012-10-04 13:25:44 +04:00
from lib . core . enums import POST_HINT
2011-12-05 02:42:19 +04:00
from lib . core . enums import REDIRECTION
2018-12-21 13:29:57 +03:00
from lib . core . enums import WEB_PLATFORM
2012-12-06 17:14:19 +04:00
from lib . core . exception import SqlmapCompressionException
from lib . core . exception import SqlmapConnectionException
2014-11-05 12:03:19 +03:00
from lib . core . exception import SqlmapGenericException
2020-04-29 15:36:11 +03:00
from lib . core . exception import SqlmapSkipTargetException
2012-12-06 17:14:19 +04:00
from lib . core . exception import SqlmapSyntaxException
2014-10-23 13:23:53 +04:00
from lib . core . exception import SqlmapTokenException
2012-12-06 17:14:19 +04:00
from lib . core . exception import SqlmapValueException
2013-02-13 15:24:42 +04:00
from lib . core . settings import ASTERISK_MARKER
2016-02-05 14:00:57 +03:00
from lib . core . settings import BOUNDARY_BACKSLASH_MARKER
2012-10-04 13:25:44 +04:00
from lib . core . settings import DEFAULT_CONTENT_TYPE
2013-07-31 19:28:22 +04:00
from lib . core . settings import DEFAULT_COOKIE_DELIMITER
2012-12-10 14:55:31 +04:00
from lib . core . settings import DEFAULT_GET_POST_DELIMITER
2018-11-29 02:09:05 +03:00
from lib . core . settings import DEFAULT_USER_AGENT
2019-03-05 14:24:41 +03:00
from lib . core . settings import EVALCODE_ENCODED_PREFIX
2012-07-23 16:14:22 +04:00
from lib . core . settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
2019-06-04 15:44:06 +03:00
from lib . core . settings import HTTP_ACCEPT_HEADER_VALUE
from lib . core . settings import IPS_WAF_CHECK_PAYLOAD
from lib . core . settings import IS_WIN
2019-11-07 18:23:52 +03:00
from lib . core . settings import JAVASCRIPT_HREF_REGEX
2019-06-13 11:59:56 +03:00
from lib . core . settings import LARGE_READ_TRIM_MARKER
2020-10-29 15:51:11 +03:00
from lib . core . settings import LIVE_COOKIES_TIMEOUT
2019-06-13 11:58:21 +03:00
from lib . core . settings import MAX_CONNECTION_READ_SIZE
2012-10-02 15:36:15 +04:00
from lib . core . settings import MAX_CONNECTIONS_REGEX
2012-08-07 02:50:58 +04:00
from lib . core . settings import MAX_CONNECTION_TOTAL_SIZE
2016-10-02 12:13:40 +03:00
from lib . core . settings import MAX_CONSECUTIVE_CONNECTION_ERRORS
2016-09-27 15:03:59 +03:00
from lib . core . settings import MAX_MURPHY_SLEEP_TIME
2011-03-29 18:16:28 +04:00
from lib . core . settings import META_REFRESH_REGEX
2018-11-22 10:07:27 +03:00
from lib . core . settings import MAX_TIME_RESPONSES
2019-06-04 15:44:06 +03:00
from lib . core . settings import MIN_TIME_RESPONSES
2012-12-10 14:55:31 +04:00
from lib . core . settings import PAYLOAD_DELIMITER
2012-10-02 15:36:15 +04:00
from lib . core . settings import PERMISSION_DENIED_REGEX
2013-03-27 16:39:27 +04:00
from lib . core . settings import PLAIN_TEXT_CONTENT_TYPE
2016-01-09 19:32:19 +03:00
from lib . core . settings import RANDOM_INTEGER_MARKER
from lib . core . settings import RANDOM_STRING_MARKER
2014-02-26 12:30:37 +04:00
from lib . core . settings import REPLACEMENT_MARKER
2014-08-21 01:42:40 +04:00
from lib . core . settings import TEXT_CONTENT_TYPE_REGEX
2011-10-25 13:53:44 +04:00
from lib . core . settings import UNENCODED_ORIGINAL_VALUE
2017-05-04 16:45:15 +03:00
from lib . core . settings import UNICODE_ENCODING
2011-04-19 18:50:09 +04:00
from lib . core . settings import URI_HTTP_HEADER
2012-04-06 12:42:36 +04:00
from lib . core . settings import WARN_TIME_STDEV
2019-11-28 15:53:47 +03:00
from lib . core . settings import WEBSOCKET_INITIAL_TIMEOUT
2010-01-02 05:02:12 +03:00
from lib . request . basic import decodePage
2008-10-15 19:38:22 +04:00
from lib . request . basic import forgeHeaders
2010-12-25 13:16:20 +03:00
from lib . request . basic import processResponse
2008-12-05 18:34:13 +03:00
from lib . request . comparison import comparison
2019-06-04 15:44:06 +03:00
from lib . request . direct import direct
2010-09-15 16:45:41 +04:00
from lib . request . methodrequest import MethodRequest
2019-09-11 15:05:25 +03:00
from lib . utils . safe2bin import safecharencode
2019-03-28 15:53:54 +03:00
from thirdparty import six
2019-03-11 16:36:01 +03:00
from thirdparty . odict import OrderedDict
2019-05-15 11:57:22 +03:00
from thirdparty . six import unichr as _unichr
2019-03-27 04:46:59 +03:00
from thirdparty . six . moves import http_client as _http_client
from thirdparty . six . moves import urllib as _urllib
2014-10-22 15:41:36 +04:00
from thirdparty . socks . socks import ProxyError
2008-10-15 19:38:22 +04:00
2012-12-06 13:42:53 +04:00
class Connect ( object ) :
2008-10-15 19:38:22 +04:00
"""
This class defines methods used to perform HTTP requests
"""
2008-12-04 20:40:03 +03:00
@staticmethod
2012-12-06 17:14:19 +04:00
def _getPageProxy ( * * kwargs ) :
2016-06-17 17:51:23 +03:00
try :
return Connect . getPage ( * * kwargs )
except RuntimeError :
return None , None , None
2008-12-04 20:40:03 +03:00
2011-06-19 13:57:41 +04:00
@staticmethod
2012-12-06 17:14:19 +04:00
def _retryProxy ( * * kwargs ) :
2011-06-19 13:57:41 +04:00
threadData = getCurrentThreadData ( )
threadData . retriesCount + = 1
2019-05-16 02:10:49 +03:00
if conf . proxyList and threadData . retriesCount > = conf . retries and not kb . locks . handlers . locked ( ) :
2013-08-12 16:25:51 +04:00
warnMsg = " changing proxy "
logger . warn ( warnMsg )
2013-08-20 21:35:49 +04:00
conf . proxy = None
2015-10-25 17:58:43 +03:00
threadData . retriesCount = 0
2015-11-08 18:37:46 +03:00
setHTTPHandlers ( )
2013-08-12 16:25:51 +04:00
2011-06-19 13:57:41 +04:00
if kb . testMode and kb . previousMethod == PAYLOAD . METHOD . TIME :
# timed based payloads can cause web server unresponsiveness
# if the injectable piece of code is some kind of JOIN-like query
2017-02-28 00:14:52 +03:00
warnMsg = " most likely web server instance hasn ' t recovered yet "
2011-11-10 14:30:53 +04:00
warnMsg + = " from previous timed based payload. If the problem "
2017-02-28 00:03:15 +03:00
warnMsg + = " persists please wait for a few minutes and rerun "
2016-05-30 11:51:35 +03:00
warnMsg + = " without flag ' T ' in option ' --technique ' "
2013-01-17 22:55:56 +04:00
warnMsg + = " (e.g. ' --flush-session --technique=BEUS ' ) or try to "
warnMsg + = " lower the value of option ' --time-sec ' (e.g. ' --time-sec=2 ' ) "
2011-06-19 13:57:41 +04:00
singleTimeWarnMessage ( warnMsg )
2015-09-27 17:17:58 +03:00
2011-06-19 13:57:41 +04:00
elif kb . originalPage is None :
2011-11-24 01:17:08 +04:00
if conf . tor :
warnMsg = " please make sure that you have "
warnMsg + = " Tor installed and running so "
warnMsg + = " you could successfully use "
2012-02-01 18:49:42 +04:00
warnMsg + = " switch ' --tor ' "
2011-11-24 01:17:08 +04:00
if IS_WIN :
2013-01-17 22:55:56 +04:00
warnMsg + = " (e.g. ' https://www.torproject.org/download/download.html.en ' ) "
2011-11-24 01:17:08 +04:00
else :
2013-01-17 22:55:56 +04:00
warnMsg + = " (e.g. ' https://help.ubuntu.com/community/Tor ' ) "
2011-11-24 01:17:08 +04:00
else :
warnMsg = " if the problem persists please check that the provided "
2018-12-27 01:23:49 +03:00
warnMsg + = " target URL is reachable. In case that it is, "
warnMsg + = " you can try to rerun with "
if not conf . randomAgent :
warnMsg + = " switch ' --random-agent ' and/or "
warnMsg + = " proxy switches ( ' --ignore-proxy ' , ' --proxy ' ,...) "
2011-06-19 13:57:41 +04:00
singleTimeWarnMessage ( warnMsg )
2015-09-27 17:17:58 +03:00
2011-06-19 13:57:41 +04:00
elif conf . threads > 1 :
warnMsg = " if the problem persists please try to lower "
2013-01-17 22:55:56 +04:00
warnMsg + = " the number of used threads (option ' --threads ' ) "
2011-06-19 13:57:41 +04:00
singleTimeWarnMessage ( warnMsg )
kwargs [ ' retrying ' ] = True
2012-12-06 17:14:19 +04:00
return Connect . _getPageProxy ( * * kwargs )
2011-06-19 13:57:41 +04:00
2012-04-06 12:42:36 +04:00
@staticmethod
2012-12-06 17:14:19 +04:00
def _connReadProxy ( conn ) :
2019-05-02 01:45:44 +03:00
retVal = b " "
2012-05-27 01:28:43 +04:00
2012-09-03 00:48:41 +04:00
if not kb . dnsMode and conn :
2012-11-20 15:10:29 +04:00
headers = conn . info ( )
2018-03-19 02:33:30 +03:00
if kb . pageCompress and headers and hasattr ( headers , " getheader " ) and ( headers . getheader ( HTTP_HEADER . CONTENT_ENCODING , " " ) . lower ( ) in ( " gzip " , " deflate " ) or " text " not in headers . getheader ( HTTP_HEADER . CONTENT_TYPE , " " ) . lower ( ) ) :
2012-12-18 12:36:26 +04:00
retVal = conn . read ( MAX_CONNECTION_TOTAL_SIZE )
if len ( retVal ) == MAX_CONNECTION_TOTAL_SIZE :
warnMsg = " large compressed response detected. Disabling compression "
singleTimeWarnMessage ( warnMsg )
kb . pageCompress = False
2018-03-19 02:33:30 +03:00
raise SqlmapCompressionException
2012-07-23 16:46:43 +04:00
else :
while True :
2015-10-07 10:25:14 +03:00
if not conn :
break
else :
2019-06-13 11:58:21 +03:00
try :
part = conn . read ( MAX_CONNECTION_READ_SIZE )
except AssertionError :
2020-12-01 01:33:08 +03:00
part = b " "
2015-10-07 10:25:14 +03:00
2019-06-13 11:58:21 +03:00
if len ( part ) == MAX_CONNECTION_READ_SIZE :
2012-07-23 16:46:43 +04:00
warnMsg = " large response detected. This could take a while "
singleTimeWarnMessage ( warnMsg )
2019-06-13 11:59:56 +03:00
part = re . sub ( r " (?si) %s .+? %s " % ( kb . chars . stop , kb . chars . start ) , " %s %s %s " % ( kb . chars . stop , LARGE_READ_TRIM_MARKER , kb . chars . start ) , part )
2019-06-13 11:58:21 +03:00
retVal + = part
2012-07-23 16:46:43 +04:00
else :
2019-06-13 11:58:21 +03:00
retVal + = part
2012-07-23 16:46:43 +04:00
break
2012-05-27 01:28:43 +04:00
2012-08-07 02:50:58 +04:00
if len ( retVal ) > MAX_CONNECTION_TOTAL_SIZE :
warnMsg = " too large response detected. Automatically trimming it "
singleTimeWarnMessage ( warnMsg )
break
2012-04-06 12:42:36 +04:00
return retVal
2008-10-15 19:38:22 +04:00
@staticmethod
def getPage ( * * kwargs ) :
"""
2013-04-09 13:48:42 +04:00
This method connects to the target URL or proxy and returns
the target URL page content
2008-10-15 19:38:22 +04:00
"""
2015-07-10 17:10:24 +03:00
if conf . offline :
return None , None , None
2010-11-08 14:22:47 +03:00
2018-03-13 15:45:42 +03:00
url = kwargs . get ( " url " , None ) or conf . url
get = kwargs . get ( " get " , None )
post = kwargs . get ( " post " , None )
method = kwargs . get ( " method " , None )
cookie = kwargs . get ( " cookie " , None )
ua = kwargs . get ( " ua " , None ) or conf . agent
referer = kwargs . get ( " referer " , None ) or conf . referer
host = kwargs . get ( " host " , None ) or conf . host
direct_ = kwargs . get ( " direct " , False )
multipart = kwargs . get ( " multipart " , None )
silent = kwargs . get ( " silent " , False )
raise404 = kwargs . get ( " raise404 " , True )
timeout = kwargs . get ( " timeout " , None ) or conf . timeout
auxHeaders = kwargs . get ( " auxHeaders " , None )
response = kwargs . get ( " response " , False )
2016-10-14 00:17:54 +03:00
ignoreTimeout = kwargs . get ( " ignoreTimeout " , False ) or kb . ignoreTimeout or conf . ignoreTimeouts
2018-03-13 15:45:42 +03:00
refreshing = kwargs . get ( " refreshing " , False )
retrying = kwargs . get ( " retrying " , False )
crawling = kwargs . get ( " crawling " , False )
checking = kwargs . get ( " checking " , False )
skipRead = kwargs . get ( " skipRead " , False )
2019-01-09 18:26:11 +03:00
finalCode = kwargs . get ( " finalCode " , False )
2019-03-19 16:07:39 +03:00
chunked = kwargs . get ( " chunked " , False ) or conf . chunked
2015-05-11 11:56:10 +03:00
2019-05-24 11:58:47 +03:00
start = time . time ( )
if isinstance ( conf . delay , ( int , float ) ) and conf . delay > 0 :
time . sleep ( conf . delay )
threadData = getCurrentThreadData ( )
with kb . locks . request :
kb . requestCounter + = 1
threadData . lastRequestUID = kb . requestCounter
2020-12-27 01:04:48 +03:00
if conf . proxyFreq :
if kb . requestCounter % conf . proxyFreq == 1 :
conf . proxy = None
warnMsg = " changing proxy "
logger . warn ( warnMsg )
setHTTPHandlers ( )
2019-05-24 11:58:47 +03:00
if conf . dummy or conf . murphyRate and randomInt ( ) % conf . murphyRate == 0 :
if conf . murphyRate :
time . sleep ( randomInt ( ) % ( MAX_MURPHY_SLEEP_TIME + 1 ) )
page , headers , code = randomStr ( int ( randomInt ( ) ) , alphabet = [ _unichr ( _ ) for _ in xrange ( 256 ) ] ) , None , None if not conf . murphyRate else randomInt ( 3 )
threadData . lastPage = page
threadData . lastCode = code
return page , headers , code
2020-10-29 15:51:11 +03:00
if conf . liveCookies :
with kb . locks . liveCookies :
if not checkFile ( conf . liveCookies , raiseOnError = False ) or os . path . getsize ( conf . liveCookies ) == 0 :
warnMsg = " [ %s ] [WARNING] live cookies file ' %s ' is empty or non-existent. Waiting for timeout ( %d seconds) " % ( time . strftime ( " %X " ) , conf . liveCookies , LIVE_COOKIES_TIMEOUT )
dataToStdout ( warnMsg )
valid = False
for _ in xrange ( LIVE_COOKIES_TIMEOUT ) :
if checkFile ( conf . liveCookies , raiseOnError = False ) and os . path . getsize ( conf . liveCookies ) > 0 :
valid = True
break
else :
dataToStdout ( ' . ' )
time . sleep ( 1 )
dataToStdout ( " \n " )
if not valid :
errMsg = " problem occurred while loading cookies from file ' %s ' " % conf . liveCookies
raise SqlmapValueException ( errMsg )
cookie = openFile ( conf . liveCookies ) . read ( ) . strip ( )
cookie = re . sub ( r " (?i) \ ACookie: \ s* " , " " , cookie )
2016-09-02 15:14:17 +03:00
if multipart :
post = multipart
2019-05-24 16:01:43 +03:00
else :
if not post :
chunked = False
2019-03-19 16:07:39 +03:00
2019-05-24 16:01:43 +03:00
elif chunked :
post = _urllib . parse . unquote ( post )
post = chunkSplitPostData ( post )
2016-09-02 15:14:17 +03:00
2019-06-07 00:13:34 +03:00
webSocket = url . lower ( ) . startswith ( " ws " )
2011-05-22 11:46:09 +04:00
2019-03-27 04:46:59 +03:00
if not _urllib . parse . urlsplit ( url ) . netloc :
url = _urllib . parse . urljoin ( conf . url , url )
2011-05-24 09:26:51 +04:00
2011-05-22 11:46:09 +04:00
# flag to know if we are dealing with the same target host
2016-12-20 11:53:44 +03:00
target = checkSameHost ( url , conf . url )
2011-05-22 11:46:09 +04:00
2011-05-22 14:59:56 +04:00
if not retrying :
# Reset the number of connection retries
threadData . retriesCount = 0
2011-05-22 11:46:09 +04:00
# fix for known issue when urllib2 just skips the other part of provided
# url splitted with space char while urlencoding it in the later phase
url = url . replace ( " " , " % 20 " )
2011-04-30 17:20:05 +04:00
2017-05-21 23:52:27 +03:00
if " :// " not in url :
url = " http:// %s " % url
2013-04-30 19:46:26 +04:00
conn = None
2011-11-11 15:07:49 +04:00
page = None
2017-05-17 01:22:18 +03:00
code = None
status = None
2012-09-08 19:58:03 +04:00
2019-03-27 04:46:59 +03:00
_ = _urllib . parse . urlsplit ( url )
2017-07-04 13:14:17 +03:00
requestMsg = u " HTTP request [# %d ]: \r \n %s " % ( threadData . lastRequestUID , method or ( HTTPMETHOD . POST if post is not None else HTTPMETHOD . GET ) )
2017-06-18 14:19:11 +03:00
requestMsg + = getUnicode ( ( " %s %s " % ( _ . path or " / " , ( " ? %s " % _ . query ) if _ . query else " " ) ) if not any ( ( refreshing , crawling , checking ) ) else url )
2011-11-14 15:39:18 +04:00
responseMsg = u " HTTP response "
requestHeaders = u " "
2011-01-25 19:05:06 +03:00
responseHeaders = None
2011-11-14 15:39:18 +04:00
logHeaders = u " "
2012-03-14 18:31:41 +04:00
skipLogTraffic = False
2008-10-15 19:38:22 +04:00
2011-12-05 13:25:56 +04:00
raise404 = raise404 and not kb . ignoreNotFound
2011-10-24 00:19:42 +04:00
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
2011-10-23 21:02:48 +04:00
# support those by default
url = asciifyUrl ( url )
2008-10-15 19:38:22 +04:00
try :
2013-03-19 22:24:14 +04:00
socket . setdefaulttimeout ( timeout )
2010-06-09 18:40:36 +04:00
2012-12-06 14:15:05 +04:00
if direct_ :
2013-05-27 12:38:47 +04:00
if ' ? ' in url :
url , params = url . split ( ' ? ' , 1 )
2010-02-09 17:02:47 +03:00
params = urlencode ( params )
url = " %s ? %s " % ( url , params )
2010-06-09 18:40:36 +04:00
2017-05-08 00:12:42 +03:00
elif any ( ( refreshing , crawling , checking ) ) :
2011-05-27 20:26:00 +04:00
pass
2011-03-29 18:16:28 +04:00
2011-05-13 13:56:12 +04:00
elif target :
2019-06-07 00:45:30 +03:00
if conf . forceSSL :
url = re . sub ( r " (?i) \ A(http|ws): " , r " \ g<1>s: " , url )
2017-10-31 13:38:09 +03:00
url = re . sub ( r " (?i):80/ " , " :443/ " , url )
2013-04-24 14:35:39 +04:00
2012-07-14 13:01:30 +04:00
if PLACE . GET in conf . parameters and not get :
2010-11-08 11:02:36 +03:00
get = conf . parameters [ PLACE . GET ]
2010-06-09 18:40:36 +04:00
2013-06-04 02:05:25 +04:00
if not conf . skipUrlEncode :
get = urlencode ( get , limit = True )
2010-02-09 17:02:47 +03:00
if get :
2015-01-17 19:31:00 +03:00
if ' ? ' in url :
url = " %s %s %s " % ( url , DEFAULT_GET_POST_DELIMITER , get )
requestMsg + = " %s %s " % ( DEFAULT_GET_POST_DELIMITER , get )
else :
url = " %s ? %s " % ( url , get )
requestMsg + = " ? %s " % get
2010-06-10 18:42:17 +04:00
2014-11-21 11:41:39 +03:00
if PLACE . POST in conf . parameters and not post and method != HTTPMETHOD . GET :
2013-06-03 17:14:56 +04:00
post = conf . parameters [ PLACE . POST ]
2010-03-23 13:27:39 +03:00
2011-05-13 13:56:12 +04:00
elif get :
url = " %s ? %s " % ( url , get )
requestMsg + = " ? %s " % get
2019-03-27 04:46:59 +03:00
requestMsg + = " %s " % _http_client . HTTPConnection . _http_vsn_str
2010-02-09 17:02:47 +03:00
2011-12-20 16:52:41 +04:00
# Prepare HTTP headers
2018-03-26 16:39:48 +03:00
headers = forgeHeaders ( { HTTP_HEADER . COOKIE : cookie , HTTP_HEADER . USER_AGENT : ua , HTTP_HEADER . REFERER : referer , HTTP_HEADER . HOST : host } , base = None if target else { } )
2010-09-15 16:45:41 +04:00
2015-12-03 03:43:37 +03:00
if HTTP_HEADER . COOKIE in headers :
cookie = headers [ HTTP_HEADER . COOKIE ]
2010-10-18 12:54:08 +04:00
if kb . authHeader :
2013-03-20 14:10:24 +04:00
headers [ HTTP_HEADER . AUTHORIZATION ] = kb . authHeader
2010-10-18 12:54:08 +04:00
2010-10-18 13:02:56 +04:00
if kb . proxyAuthHeader :
2013-03-20 14:10:24 +04:00
headers [ HTTP_HEADER . PROXY_AUTHORIZATION ] = kb . proxyAuthHeader
2010-10-18 13:02:56 +04:00
2018-11-29 02:09:05 +03:00
if not conf . requestFile or not target :
if not getHeader ( headers , HTTP_HEADER . HOST ) :
headers [ HTTP_HEADER . HOST ] = getHostHeader ( url )
2015-02-03 00:07:16 +03:00
2018-11-29 02:09:05 +03:00
if not getHeader ( headers , HTTP_HEADER . ACCEPT ) :
headers [ HTTP_HEADER . ACCEPT ] = HTTP_ACCEPT_HEADER_VALUE
2015-03-20 02:56:36 +03:00
2018-11-29 02:09:05 +03:00
if not getHeader ( headers , HTTP_HEADER . ACCEPT_ENCODING ) :
headers [ HTTP_HEADER . ACCEPT_ENCODING ] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb . pageCompress else " identity "
elif conf . requestFile and getHeader ( headers , HTTP_HEADER . USER_AGENT ) == DEFAULT_USER_AGENT :
for header in headers :
if header . upper ( ) == HTTP_HEADER . USER_AGENT . upper ( ) :
del headers [ header ]
break
2015-03-03 16:37:36 +03:00
2016-09-02 15:14:17 +03:00
if post is not None and not multipart and not getHeader ( headers , HTTP_HEADER . CONTENT_TYPE ) :
2013-03-20 14:10:24 +04:00
headers [ HTTP_HEADER . CONTENT_TYPE ] = POST_HINT_CONTENT_TYPES . get ( kb . postHint , DEFAULT_CONTENT_TYPE )
2012-10-04 13:25:44 +04:00
2013-03-20 14:10:24 +04:00
if headers . get ( HTTP_HEADER . CONTENT_TYPE ) == POST_HINT_CONTENT_TYPES [ POST_HINT . MULTIPART ] :
warnMsg = " missing ' boundary parameter ' in ' %s ' header. " % HTTP_HEADER . CONTENT_TYPE
2012-10-16 14:32:58 +04:00
warnMsg + = " Will try to reconstruct "
singleTimeWarnMessage ( warnMsg )
boundary = findMultipartPostBoundary ( conf . data )
if boundary :
2013-03-20 14:10:24 +04:00
headers [ HTTP_HEADER . CONTENT_TYPE ] = " %s ; boundary= %s " % ( headers [ HTTP_HEADER . CONTENT_TYPE ] , boundary )
2012-10-16 14:32:58 +04:00
2016-05-17 11:47:17 +03:00
if conf . keepAlive :
headers [ HTTP_HEADER . CONNECTION ] = " keep-alive "
2019-04-19 14:54:48 +03:00
2019-03-19 15:26:29 +03:00
if chunked :
2019-03-19 16:07:39 +03:00
headers [ HTTP_HEADER . TRANSFER_ENCODING ] = " chunked "
2016-05-17 11:47:17 +03:00
2010-09-16 12:43:10 +04:00
if auxHeaders :
2017-08-16 04:08:58 +03:00
headers = forgeHeaders ( auxHeaders , headers )
2010-09-16 12:43:10 +04:00
2019-08-02 21:29:52 +03:00
if kb . headersFile :
content = openFile ( kb . headersFile , " rb " ) . read ( )
for line in content . split ( " \n " ) :
line = getText ( line . strip ( ) )
if ' : ' in line :
header , value = line . split ( ' : ' , 1 )
headers [ header ] = value
2019-05-02 01:45:44 +03:00
for key , value in list ( headers . items ( ) ) :
2011-02-25 12:43:04 +03:00
del headers [ key ]
2019-05-06 00:37:48 +03:00
if isinstance ( value , six . string_types ) :
for char in ( r " \ r " , r " \ n " ) :
value = re . sub ( r " ( %s )([^ \ t]) " % char , r " \ g<1> \ t \ g<2> " , value )
2019-05-07 16:49:03 +03:00
headers [ getBytes ( key ) if six . PY2 else key ] = getBytes ( value . strip ( " \r \n " ) ) # Note: Python3 has_header() expects non-bytes value
2010-12-28 17:40:34 +03:00
2019-05-03 17:03:08 +03:00
if six . PY2 :
url = getBytes ( url ) # Note: Python3 requires text while Python2 has problems when mixing text with binary POST
2019-04-18 12:52:33 +03:00
post = getBytes ( post )
2010-12-28 17:40:34 +03:00
2019-06-07 00:13:34 +03:00
if webSocket :
2015-03-24 13:21:50 +03:00
ws = websocket . WebSocket ( )
2019-11-28 15:53:47 +03:00
ws . settimeout ( WEBSOCKET_INITIAL_TIMEOUT if kb . webSocketRecvCount is None else timeout )
2015-05-11 11:56:10 +03:00
ws . connect ( url , header = ( " %s : %s " % _ for _ in headers . items ( ) if _ [ 0 ] not in ( " Host " , ) ) , cookie = cookie ) # WebSocket will add Host field of headers automatically
ws . send ( urldecode ( post or " " ) )
2019-11-28 15:53:47 +03:00
_page = [ ]
if kb . webSocketRecvCount is None :
while True :
try :
_page . append ( ws . recv ( ) )
except websocket . WebSocketTimeoutException :
kb . webSocketRecvCount = len ( _page )
break
else :
for i in xrange ( max ( 1 , kb . webSocketRecvCount ) ) :
_page . append ( ws . recv ( ) )
page = " \n " . join ( _page )
2015-03-24 13:21:50 +03:00
ws . close ( )
2015-05-11 11:56:10 +03:00
code = ws . status
2019-03-27 15:33:46 +03:00
status = _http_client . responses [ code ]
2018-06-10 00:38:00 +03:00
2015-05-11 11:56:10 +03:00
class _ ( dict ) :
pass
2018-06-10 00:38:00 +03:00
2015-05-11 11:56:10 +03:00
responseHeaders = _ ( ws . getheaders ( ) )
responseHeaders . headers = [ " %s : %s \r \n " % ( _ [ 0 ] . capitalize ( ) , _ [ 1 ] ) for _ in responseHeaders . items ( ) ]
2019-03-28 15:53:54 +03:00
requestHeaders + = " \r \n " . join ( [ " %s : %s " % ( getUnicode ( key . capitalize ( ) if hasattr ( key , " capitalize " ) else key ) , getUnicode ( value ) ) for ( key , value ) in responseHeaders . items ( ) ] )
2017-07-04 13:14:17 +03:00
requestMsg + = " \r \n %s " % requestHeaders
2015-05-11 11:56:10 +03:00
if post is not None :
2017-07-04 13:14:17 +03:00
requestMsg + = " \r \n \r \n %s " % getUnicode ( post )
2015-03-24 12:19:37 +03:00
2017-07-04 13:14:17 +03:00
requestMsg + = " \r \n "
2015-05-11 11:56:10 +03:00
threadData . lastRequestMsg = requestMsg
logger . log ( CUSTOM_LOGGING . TRAFFIC_OUT , requestMsg )
2010-09-15 16:45:41 +04:00
else :
2020-12-11 00:47:29 +03:00
if target and cmdLineOptions . method or method and method not in ( HTTPMETHOD . GET , HTTPMETHOD . POST ) :
2015-05-11 11:56:10 +03:00
req = MethodRequest ( url , post , headers )
2020-12-11 00:47:29 +03:00
req . set_method ( cmdLineOptions . method or method )
2018-04-24 20:45:53 +03:00
elif url is not None :
2019-03-27 04:46:59 +03:00
req = _urllib . request . Request ( url , post , headers )
2018-04-24 20:45:53 +03:00
else :
return None , None , None
2008-12-04 20:40:03 +03:00
2020-09-21 18:04:44 +03:00
for function in kb . preprocessFunctions :
try :
function ( req )
except Exception as ex :
errMsg = " error occurred while running preprocess "
errMsg + = " function ' %s ' ( ' %s ' ) " % ( function . __name__ , getSafeExString ( ex ) )
raise SqlmapGenericException ( errMsg )
else :
post , headers = req . data , req . headers
2019-03-28 15:53:54 +03:00
requestHeaders + = " \r \n " . join ( [ " %s : %s " % ( getUnicode ( key . capitalize ( ) if hasattr ( key , " capitalize " ) else key ) , getUnicode ( value ) ) for ( key , value ) in req . header_items ( ) ] )
2010-10-29 03:22:13 +04:00
2015-05-11 11:56:10 +03:00
if not getRequestHeader ( req , HTTP_HEADER . COOKIE ) and conf . cj :
conf . cj . _policy . _now = conf . cj . _now = int ( time . time ( ) )
cookies = conf . cj . _cookies_for_request ( req )
2017-07-04 13:14:17 +03:00
requestHeaders + = " \r \n %s " % ( " Cookie: %s " % " ; " . join ( " %s = %s " % ( getUnicode ( cookie . name ) , getUnicode ( cookie . value ) ) for cookie in cookies ) )
2010-10-29 03:22:13 +04:00
2015-05-11 11:56:10 +03:00
if post is not None :
2019-03-19 15:26:29 +03:00
if not getRequestHeader ( req , HTTP_HEADER . CONTENT_LENGTH ) and not chunked :
2017-07-04 13:14:17 +03:00
requestHeaders + = " \r \n %s : %d " % ( string . capwords ( HTTP_HEADER . CONTENT_LENGTH ) , len ( post ) )
2012-08-21 00:17:39 +04:00
2015-05-11 11:56:10 +03:00
if not getRequestHeader ( req , HTTP_HEADER . CONNECTION ) :
2017-07-04 13:14:17 +03:00
requestHeaders + = " \r \n %s : %s " % ( HTTP_HEADER . CONNECTION , " close " if not conf . keepAlive else " keep-alive " )
2010-10-29 03:22:13 +04:00
2017-07-04 13:14:17 +03:00
requestMsg + = " \r \n %s " % requestHeaders
2008-10-15 19:38:22 +04:00
2015-05-11 11:56:10 +03:00
if post is not None :
2017-07-04 13:14:17 +03:00
requestMsg + = " \r \n \r \n %s " % getUnicode ( post )
2008-10-15 19:38:22 +04:00
2019-03-19 15:26:29 +03:00
if not chunked :
requestMsg + = " \r \n "
2008-10-15 19:38:22 +04:00
2016-09-02 15:14:17 +03:00
if not multipart :
threadData . lastRequestMsg = requestMsg
2011-09-28 12:13:46 +04:00
2016-09-02 15:14:17 +03:00
logger . log ( CUSTOM_LOGGING . TRAFFIC_OUT , requestMsg )
2008-10-15 19:38:22 +04:00
2015-10-13 14:31:28 +03:00
if conf . cj :
for cookie in conf . cj :
if cookie . value is None :
cookie . value = " "
2015-12-15 13:29:37 +03:00
else :
for char in ( r " \ r " , r " \ n " ) :
cookie . value = re . sub ( r " ( %s )([^ \ t]) " % char , r " \ g<1> \ t \ g<2> " , cookie . value )
2015-10-13 14:31:28 +03:00
2019-03-27 04:46:59 +03:00
conn = _urllib . request . urlopen ( req )
2010-12-22 16:41:36 +03:00
2015-05-11 11:56:10 +03:00
if not kb . authHeader and getRequestHeader ( req , HTTP_HEADER . AUTHORIZATION ) and ( conf . authType or " " ) . lower ( ) == AUTH_TYPE . BASIC . lower ( ) :
2020-09-01 16:35:14 +03:00
kb . authHeader = getUnicode ( getRequestHeader ( req , HTTP_HEADER . AUTHORIZATION ) )
2010-10-18 12:54:08 +04:00
2015-05-11 11:56:10 +03:00
if not kb . proxyAuthHeader and getRequestHeader ( req , HTTP_HEADER . PROXY_AUTHORIZATION ) :
kb . proxyAuthHeader = getRequestHeader ( req , HTTP_HEADER . PROXY_AUTHORIZATION )
2010-10-18 13:02:56 +04:00
2015-05-11 11:56:10 +03:00
# Return response object
if response :
return conn , None , None
2010-06-10 18:42:17 +04:00
2015-05-11 11:56:10 +03:00
# Get HTTP response
2017-01-16 16:29:23 +03:00
if hasattr ( conn , " redurl " ) :
2018-02-07 18:05:41 +03:00
page = ( threadData . lastRedirectMsg [ 1 ] if kb . redirectChoice == REDIRECTION . NO else Connect . _connReadProxy ( conn ) ) if not skipRead else None
2015-05-11 11:56:10 +03:00
skipLogTraffic = kb . redirectChoice == REDIRECTION . NO
2019-01-09 18:26:11 +03:00
code = conn . redcode if not finalCode else code
2015-05-11 11:56:10 +03:00
else :
page = Connect . _connReadProxy ( conn ) if not skipRead else None
2011-12-22 02:59:23 +04:00
2017-03-12 11:52:37 +03:00
if conn :
2018-08-09 16:39:37 +03:00
code = ( code or conn . code ) if conn . code == kb . originalCode else conn . code # do not override redirection code (for comparison purposes)
2017-03-12 11:52:37 +03:00
responseHeaders = conn . info ( )
responseHeaders [ URI_HTTP_HEADER ] = conn . geturl ( )
2020-12-01 17:56:47 +03:00
if hasattr ( conn , " redurl " ) :
responseHeaders [ HTTP_HEADER . LOCATION ] = conn . redurl
2019-05-02 17:54:54 +03:00
patchHeaders ( responseHeaders )
2018-09-05 00:01:17 +03:00
kb . serverHeader = responseHeaders . get ( HTTP_HEADER . SERVER , kb . serverHeader )
2017-03-12 11:52:37 +03:00
else :
code = None
responseHeaders = { }
2019-11-13 00:51:11 +03:00
page = decodePage ( page , responseHeaders . get ( HTTP_HEADER . CONTENT_ENCODING ) , responseHeaders . get ( HTTP_HEADER . CONTENT_TYPE ) , percentDecode = not crawling )
2018-04-06 02:13:04 +03:00
status = getUnicode ( conn . msg ) if conn and getattr ( conn , " msg " , None ) else None
2010-01-02 05:02:12 +03:00
2016-10-02 12:13:40 +03:00
kb . connErrorCounter = 0
2017-01-16 16:29:23 +03:00
if not refreshing :
2017-01-16 17:23:38 +03:00
refresh = responseHeaders . get ( HTTP_HEADER . REFRESH , " " ) . split ( " url= " ) [ - 1 ] . strip ( )
2011-03-29 18:16:28 +04:00
2017-01-16 16:29:23 +03:00
if extractRegexResult ( META_REFRESH_REGEX , page ) :
refresh = extractRegexResult ( META_REFRESH_REGEX , page )
2011-03-29 18:16:28 +04:00
2017-01-16 16:29:23 +03:00
debugMsg = " got HTML meta refresh header "
logger . debug ( debugMsg )
2011-05-28 02:42:23 +04:00
2019-11-07 18:23:52 +03:00
if not refresh :
refresh = extractRegexResult ( JAVASCRIPT_HREF_REGEX , page )
2019-11-09 01:28:51 +03:00
if refresh :
2020-02-07 12:12:33 +03:00
debugMsg = " got Javascript redirect logic "
2019-11-09 01:28:51 +03:00
logger . debug ( debugMsg )
2019-11-07 18:23:52 +03:00
2017-01-16 16:29:23 +03:00
if refresh :
if kb . alwaysRefresh is None :
2020-02-07 12:12:33 +03:00
msg = " got a refresh intent "
2019-11-07 18:23:52 +03:00
msg + = " (redirect like response common to login pages) to ' %s ' . " % refresh
2020-02-07 12:12:33 +03:00
msg + = " Do you want to apply it from now on? [Y/n] "
2011-05-28 02:42:23 +04:00
2017-04-18 16:48:05 +03:00
kb . alwaysRefresh = readInput ( msg , default = ' Y ' , boolean = True )
2017-01-16 16:29:23 +03:00
if kb . alwaysRefresh :
if re . search ( r " \ Ahttps?:// " , refresh , re . I ) :
url = refresh
else :
2019-03-27 04:46:59 +03:00
url = _urllib . parse . urljoin ( url , refresh )
2017-01-16 16:29:23 +03:00
threadData . lastRedirectMsg = ( threadData . lastRequestUID , page )
kwargs [ " refreshing " ] = True
kwargs [ " url " ] = url
kwargs [ " get " ] = None
kwargs [ " post " ] = None
try :
return Connect . _getPageProxy ( * * kwargs )
except SqlmapSyntaxException :
pass
2011-03-29 18:16:28 +04:00
2010-12-26 17:36:51 +03:00
# Explicit closing of connection object
2015-05-11 11:56:10 +03:00
if conn and not conf . keepAlive :
2010-12-26 17:36:51 +03:00
try :
2011-10-21 13:06:00 +04:00
if hasattr ( conn . fp , ' _sock ' ) :
conn . fp . _sock . close ( )
2010-12-26 17:36:51 +03:00
conn . close ( )
2019-01-22 02:40:48 +03:00
except Exception as ex :
2015-09-10 16:51:33 +03:00
warnMsg = " problem occurred during connection closing ( ' %s ' ) " % getSafeExString ( ex )
2010-12-26 17:36:51 +03:00
logger . warn ( warnMsg )
2019-01-22 02:40:48 +03:00
except SqlmapConnectionException as ex :
2017-08-28 12:08:36 +03:00
if conf . proxyList and not kb . threadException :
2017-08-23 14:52:51 +03:00
warnMsg = " unable to connect to the target URL ( ' %s ' ) " % ex
2017-08-28 12:08:36 +03:00
logger . critical ( warnMsg )
threadData . retriesCount = conf . retries
return Connect . _retryProxy ( * * kwargs )
2017-08-23 14:52:51 +03:00
else :
raise
2019-03-27 04:46:59 +03:00
except _urllib . error . HTTPError as ex :
2011-01-04 01:02:58 +03:00
page = None
2011-01-25 19:05:06 +03:00
responseHeaders = None
2011-02-01 01:51:14 +03:00
2017-05-08 00:12:42 +03:00
if checking :
return None , None , None
2010-11-17 15:16:48 +03:00
try :
2015-09-10 16:51:33 +03:00
page = ex . read ( ) if not skipRead else None
responseHeaders = ex . info ( )
responseHeaders [ URI_HTTP_HEADER ] = ex . geturl ( )
2019-05-02 17:54:54 +03:00
patchHeaders ( responseHeaders )
2019-11-13 00:51:11 +03:00
page = decodePage ( page , responseHeaders . get ( HTTP_HEADER . CONTENT_ENCODING ) , responseHeaders . get ( HTTP_HEADER . CONTENT_TYPE ) , percentDecode = not crawling )
2010-11-17 15:16:48 +03:00
except socket . timeout :
2011-04-30 17:20:05 +04:00
warnMsg = " connection timed out while trying "
2015-09-10 16:51:33 +03:00
warnMsg + = " to get error page information ( %d ) " % ex . code
2010-11-17 15:16:48 +03:00
logger . warn ( warnMsg )
2011-08-12 20:48:11 +04:00
return None , None , None
2012-01-16 14:04:18 +04:00
except KeyboardInterrupt :
raise
2010-11-17 15:16:48 +03:00
except :
pass
2012-01-16 14:04:18 +04:00
finally :
2019-03-29 04:28:16 +03:00
page = getUnicode ( page )
2010-11-17 15:16:48 +03:00
2015-09-10 16:51:33 +03:00
code = ex . code
2019-05-06 13:19:27 +03:00
status = getUnicode ( getattr ( ex , " reason " , None ) or getSafeExString ( ex ) . split ( " : " , 1 ) [ - 1 ] )
2011-01-04 01:02:58 +03:00
2014-12-03 15:22:55 +03:00
kb . originalCode = kb . originalCode or code
2017-05-17 01:22:18 +03:00
threadData . lastHTTPError = ( threadData . lastRequestUID , code , status )
2012-03-15 15:10:58 +04:00
kb . httpErrorCodes [ code ] = kb . httpErrorCodes . get ( code , 0 ) + 1
2011-01-04 01:02:58 +03:00
2018-11-04 16:17:53 +03:00
responseMsg + = " [# %d ] ( %s %s ): \r \n " % ( threadData . lastRequestUID , code , status )
2011-01-04 01:02:58 +03:00
2010-11-17 15:16:48 +03:00
if responseHeaders :
2020-12-07 13:42:46 +03:00
logHeaders = " " . join ( getUnicode ( responseHeaders . headers ) ) . strip ( )
2011-01-04 01:02:58 +03:00
2019-06-13 11:58:21 +03:00
logHTTPTraffic ( requestMsg , " %s %s \r \n \r \n %s " % ( responseMsg , logHeaders , ( page or " " ) [ : MAX_CONNECTION_READ_SIZE ] ) , start , time . time ( ) )
2010-11-17 15:04:33 +03:00
2012-03-15 18:51:16 +04:00
skipLogTraffic = True
2010-12-22 16:41:36 +03:00
if conf . verbose < = 5 :
responseMsg + = getUnicode ( logHeaders )
elif conf . verbose > 5 :
2019-06-13 11:58:21 +03:00
responseMsg + = " %s \r \n \r \n %s " % ( logHeaders , ( page or " " ) [ : MAX_CONNECTION_READ_SIZE ] )
2010-12-22 16:41:36 +03:00
2016-09-02 15:14:17 +03:00
if not multipart :
logger . log ( CUSTOM_LOGGING . TRAFFIC_IN , responseMsg )
2010-12-22 16:41:36 +03:00
2019-07-17 14:20:24 +03:00
if ex . code not in ( conf . ignoreCode or [ ] ) :
2019-03-27 04:46:59 +03:00
if ex . code == _http_client . UNAUTHORIZED :
2017-08-23 14:17:37 +03:00
errMsg = " not authorized, try to provide right HTTP "
errMsg + = " authentication type and valid credentials ( %d ) " % code
2013-01-04 02:20:55 +04:00
raise SqlmapConnectionException ( errMsg )
2019-04-18 11:24:38 +03:00
elif chunked and ex . code in ( _http_client . METHOD_NOT_ALLOWED , _http_client . LENGTH_REQUIRED ) :
2019-04-18 11:36:41 +03:00
warnMsg = " turning off HTTP chunked transfer encoding "
warnMsg + = " as it seems that the target site doesn ' t support it ( %d ) " % code
singleTimeWarnMessage ( warnMsg )
conf . chunked = kwargs [ " chunked " ] = False
return Connect . getPage ( * * kwargs )
2019-03-27 04:46:59 +03:00
elif ex . code == _http_client . NOT_FOUND :
2017-08-23 14:17:37 +03:00
if raise404 :
errMsg = " page not found ( %d ) " % code
raise SqlmapConnectionException ( errMsg )
2011-06-19 13:57:41 +04:00
else :
2017-08-23 14:17:37 +03:00
debugMsg = " page not found ( %d ) " % code
singleTimeLogMessage ( debugMsg , logging . DEBUG )
2019-03-27 04:46:59 +03:00
elif ex . code == _http_client . GATEWAY_TIMEOUT :
2017-08-23 14:17:37 +03:00
if ignoreTimeout :
return None if not conf . ignoreTimeouts else " " , None , None
else :
2019-03-27 15:33:46 +03:00
warnMsg = " unable to connect to the target URL ( %d - %s ) " % ( ex . code , _http_client . responses [ ex . code ] )
2017-08-23 14:17:37 +03:00
if threadData . retriesCount < conf . retries and not kb . threadException :
warnMsg + = " . sqlmap is going to retry the request "
logger . critical ( warnMsg )
return Connect . _retryProxy ( * * kwargs )
elif kb . testMode :
logger . critical ( warnMsg )
return None , None , None
else :
raise SqlmapConnectionException ( warnMsg )
else :
2019-11-09 02:54:47 +03:00
debugMsg = " got HTTP error code: %d ( ' %s ' ) " % ( code , status )
2017-08-23 14:17:37 +03:00
logger . debug ( debugMsg )
2010-01-19 13:27:54 +03:00
2019-03-27 04:46:59 +03:00
except ( _urllib . error . URLError , socket . error , socket . timeout , _http_client . HTTPException , struct . error , binascii . Error , ProxyError , SqlmapCompressionException , WebSocketException , TypeError , ValueError , OverflowError ) :
2008-12-04 20:40:03 +03:00
tbMsg = traceback . format_exc ( )
2010-12-21 13:31:56 +03:00
2019-05-08 13:28:50 +03:00
if conf . debug :
dataToStdout ( tbMsg )
2017-05-08 00:12:42 +03:00
if checking :
return None , None , None
elif " no host given " in tbMsg :
2013-04-09 13:48:42 +04:00
warnMsg = " invalid URL address used ( %s ) " % repr ( url )
2013-01-04 02:20:55 +04:00
raise SqlmapSyntaxException ( warnMsg )
2015-03-24 13:50:57 +03:00
elif " forcibly closed " in tbMsg or " Connection is already closed " in tbMsg :
2013-04-09 13:48:42 +04:00
warnMsg = " connection was forcibly closed by the target URL "
2011-01-03 16:04:20 +03:00
elif " timed out " in tbMsg :
2014-09-08 16:33:13 +04:00
if kb . testMode and kb . testType not in ( None , PAYLOAD . TECHNIQUE . TIME , PAYLOAD . TECHNIQUE . STACKED ) :
2018-09-14 11:01:31 +03:00
singleTimeWarnMessage ( " there is a possibility that the target (or WAF/IPS) is dropping ' suspicious ' requests " )
2017-05-26 15:14:35 +03:00
kb . droppingRequests = True
2013-04-09 13:48:42 +04:00
warnMsg = " connection timed out to the target URL "
2017-03-30 13:05:05 +03:00
elif " Connection reset " in tbMsg :
if not conf . disablePrecon :
singleTimeWarnMessage ( " turning off pre-connect mechanism because of connection reset(s) " )
conf . disablePrecon = True
if kb . testMode :
2018-09-14 11:01:31 +03:00
singleTimeWarnMessage ( " there is a possibility that the target (or WAF/IPS) is resetting ' suspicious ' requests " )
2017-05-26 15:14:35 +03:00
kb . droppingRequests = True
2017-03-30 13:05:05 +03:00
warnMsg = " connection reset to the target URL "
2010-12-12 00:28:11 +03:00
elif " URLError " in tbMsg or " error " in tbMsg :
2013-04-09 13:48:42 +04:00
warnMsg = " unable to connect to the target URL "
2019-05-31 16:42:20 +03:00
match = re . search ( r " Errno \ d+ \ ] ([^> \ n]+) " , tbMsg )
2016-06-01 11:53:32 +03:00
if match :
2016-06-15 08:54:47 +03:00
warnMsg + = " ( ' %s ' ) " % match . group ( 1 ) . strip ( )
2014-12-07 18:14:48 +03:00
elif " NTLM " in tbMsg :
2014-12-07 18:11:07 +03:00
warnMsg = " there has been a problem with NTLM authentication "
2017-06-05 11:38:05 +03:00
elif " Invalid header name " in tbMsg : # (e.g. PostgreSQL ::Text payload)
return None , None , None
2008-12-04 20:40:03 +03:00
elif " BadStatusLine " in tbMsg :
2012-04-21 00:33:15 +04:00
warnMsg = " connection dropped or unknown HTTP "
2014-06-16 11:51:24 +04:00
warnMsg + = " status code received "
if not conf . agent and not conf . randomAgent :
warnMsg + = " . Try to force the HTTP User-Agent "
warnMsg + = " header with option ' --user-agent ' or switch ' --random-agent ' "
2010-11-13 01:57:33 +03:00
elif " IncompleteRead " in tbMsg :
warnMsg = " there was an incomplete read error while retrieving data "
2013-04-09 13:48:42 +04:00
warnMsg + = " from the target URL "
2015-03-24 13:21:50 +03:00
elif " Handshake status " in tbMsg :
2017-10-31 13:38:09 +03:00
status = re . search ( r " Handshake status ([ \ d] {3} ) " , tbMsg )
2015-05-11 12:01:21 +03:00
errMsg = " websocket handshake status %s " % status . group ( 1 ) if status else " unknown "
2015-03-24 13:21:50 +03:00
raise SqlmapConnectionException ( errMsg )
2018-03-19 02:33:30 +03:00
elif " SqlmapCompressionException " in tbMsg :
warnMsg = " problems with response (de)compression "
retrying = True
2009-12-31 15:34:18 +03:00
else :
2013-04-09 13:48:42 +04:00
warnMsg = " unable to connect to the target URL "
2009-12-31 15:34:18 +03:00
2016-06-15 08:57:10 +03:00
if " BadStatusLine " not in tbMsg and any ( ( conf . proxy , conf . tor ) ) :
2008-12-05 18:34:13 +03:00
warnMsg + = " or proxy "
2017-06-05 11:38:05 +03:00
if silent :
return None , None , None
2016-10-02 12:13:40 +03:00
with kb . locks . connError :
kb . connErrorCounter + = 1
if kb . connErrorCounter > = MAX_CONSECUTIVE_CONNECTION_ERRORS and kb . connErrorChoice is None :
message = " there seems to be a continuous problem with connection to the target. "
2019-08-21 16:19:42 +03:00
message + = " Are you sure that you want to continue? [y/N] "
2017-04-18 16:48:05 +03:00
kb . connErrorChoice = readInput ( message , default = ' N ' , boolean = True )
2016-10-02 12:13:40 +03:00
2019-08-21 16:29:51 +03:00
if kb . connErrorChoice is False :
2020-04-29 15:36:11 +03:00
raise SqlmapSkipTargetException
2016-10-02 12:13:40 +03:00
2017-06-05 11:38:05 +03:00
if " forcibly closed " in tbMsg :
2011-01-03 16:04:20 +03:00
logger . critical ( warnMsg )
2011-08-12 20:48:11 +04:00
return None , None , None
2019-08-21 16:29:51 +03:00
elif ignoreTimeout and any ( _ in tbMsg for _ in ( " timed out " , " IncompleteRead " , " Interrupted system call " ) ) :
2016-10-14 00:25:46 +03:00
return None if not conf . ignoreTimeouts else " " , None , None
2012-08-20 13:40:49 +04:00
elif threadData . retriesCount < conf . retries and not kb . threadException :
2012-10-04 20:28:36 +04:00
warnMsg + = " . sqlmap is going to retry the request "
2015-09-27 17:36:20 +03:00
if not retrying :
warnMsg + = " (s) "
logger . critical ( warnMsg )
else :
logger . debug ( warnMsg )
2012-12-06 17:14:19 +04:00
return Connect . _retryProxy ( * * kwargs )
2019-01-22 15:45:16 +03:00
elif kb . testMode or isMultiThreadMode ( ) :
2011-05-22 14:59:56 +04:00
logger . critical ( warnMsg )
2011-08-12 20:48:11 +04:00
return None , None , None
2008-11-15 15:25:19 +03:00
else :
2013-01-04 02:20:55 +04:00
raise SqlmapConnectionException ( warnMsg )
2008-11-15 15:25:19 +03:00
2011-06-19 13:57:41 +04:00
finally :
2019-03-28 16:13:52 +03:00
if isinstance ( page , six . binary_type ) :
2014-08-21 01:42:40 +04:00
if HTTP_HEADER . CONTENT_TYPE in ( responseHeaders or { } ) and not re . search ( TEXT_CONTENT_TYPE_REGEX , responseHeaders [ HTTP_HEADER . CONTENT_TYPE ] ) :
2019-03-28 16:13:52 +03:00
page = six . text_type ( page , errors = " ignore " )
2014-08-21 01:42:40 +04:00
else :
page = getUnicode ( page )
2009-12-18 01:04:01 +03:00
2020-09-21 18:04:44 +03:00
for function in kb . postprocessFunctions :
2019-03-20 13:33:10 +03:00
try :
page , responseHeaders , code = function ( page , responseHeaders , code )
except Exception as ex :
2020-09-21 18:04:44 +03:00
errMsg = " error occurred while running postprocess "
2019-03-29 04:28:16 +03:00
errMsg + = " function ' %s ' ( ' %s ' ) " % ( function . __name__ , getSafeExString ( ex ) )
2019-03-20 13:33:10 +03:00
raise SqlmapGenericException ( errMsg )
threadData . lastPage = page
threadData . lastCode = code
socket . setdefaulttimeout ( conf . timeout )
2019-03-04 17:24:12 +03:00
2019-05-24 14:54:10 +03:00
processResponse ( page , responseHeaders , code , status )
2010-05-04 12:43:14 +04:00
2019-04-18 11:55:58 +03:00
if not skipLogTraffic :
if conn and getattr ( conn , " redurl " , None ) :
_ = _urllib . parse . urlsplit ( conn . redurl )
_ = ( " %s %s " % ( _ . path or " / " , ( " ? %s " % _ . query ) if _ . query else " " ) )
requestMsg = re . sub ( r " ( \ n[A-Z]+ ).+?( HTTP/ \ d) " , r " \ g<1> %s \ g<2> " % getUnicode ( _ ) . replace ( " \\ " , " \\ \\ " ) , requestMsg , 1 )
2015-06-16 13:00:56 +03:00
2019-04-18 11:55:58 +03:00
if kb . resendPostOnRedirect is False :
requestMsg = re . sub ( r " ( \ [# \ d+ \ ]: \ n)POST " , r " \ g<1>GET " , requestMsg )
requestMsg = re . sub ( r " (?i)Content-length: \ d+ \ n " , " " , requestMsg )
requestMsg = re . sub ( r " (?s) \ n \ n.+ " , " \n " , requestMsg )
2015-06-16 13:00:56 +03:00
2019-04-18 11:55:58 +03:00
responseMsg + = " [# %d ] ( %s %s ): \r \n " % ( threadData . lastRequestUID , conn . code , status )
elif " \n " not in responseMsg :
responseMsg + = " [# %d ] ( %s %s ): \r \n " % ( threadData . lastRequestUID , code , status )
2013-04-30 19:46:26 +04:00
2019-04-18 11:55:58 +03:00
if responseHeaders :
2020-12-07 13:42:46 +03:00
logHeaders = " " . join ( getUnicode ( responseHeaders . headers ) ) . strip ( )
2011-03-17 15:35:40 +03:00
2019-06-13 11:58:21 +03:00
logHTTPTraffic ( requestMsg , " %s %s \r \n \r \n %s " % ( responseMsg , logHeaders , ( page or " " ) [ : MAX_CONNECTION_READ_SIZE ] ) , start , time . time ( ) )
2010-05-04 12:43:14 +04:00
2019-04-18 11:55:58 +03:00
if conf . verbose < = 5 :
responseMsg + = getUnicode ( logHeaders )
elif conf . verbose > 5 :
2019-06-13 11:58:21 +03:00
responseMsg + = " %s \r \n \r \n %s " % ( logHeaders , ( page or " " ) [ : MAX_CONNECTION_READ_SIZE ] )
2010-05-04 12:43:14 +04:00
2019-04-18 11:55:58 +03:00
if not multipart :
logger . log ( CUSTOM_LOGGING . TRAFFIC_IN , responseMsg )
2008-10-15 19:38:22 +04:00
2011-08-12 20:48:11 +04:00
return page , responseHeaders , code
2008-10-15 19:38:22 +04:00
@staticmethod
2018-04-01 13:45:47 +03:00
@stackedmethod
2019-10-09 20:41:33 +03:00
def queryPage ( value = None , place = None , content = False , getRatioValue = False , silent = False , method = None , timeBasedCompare = False , noteResponseTime = True , auxHeaders = None , response = False , raise404 = None , removeReflection = True , disableTampering = False , ignoreSecondOrder = False ) :
2008-10-15 19:38:22 +04:00
"""
2013-04-09 13:48:42 +04:00
This method calls a function to get the target URL page content
2017-09-11 11:00:35 +03:00
and returns its page ratio ( 0 < = ratio < = 1 ) or a boolean value
representing False / True match in case of ! getRatioValue
2008-10-15 19:38:22 +04:00
"""
2010-03-27 02:23:25 +03:00
if conf . direct :
2010-03-31 14:50:47 +04:00
return direct ( value , content )
2010-03-27 02:23:25 +03:00
2011-04-30 17:20:05 +04:00
get = None
post = None
cookie = None
ua = None
referer = None
2011-12-20 16:52:41 +04:00
host = None
2011-04-30 17:20:05 +04:00
page = None
pageLength = None
uri = None
2012-03-16 00:17:40 +04:00
code = None
2008-10-15 19:38:22 +04:00
if not place :
2011-07-06 09:44:47 +04:00
place = kb . injection . place or PLACE . GET
2014-07-10 10:49:20 +04:00
if not auxHeaders :
auxHeaders = { }
2011-07-06 09:44:47 +04:00
raise404 = place != PLACE . URI if raise404 is None else raise404
2014-11-21 11:41:39 +03:00
method = method or conf . method
2010-10-14 15:06:28 +04:00
2017-11-19 04:51:29 +03:00
postUrlEncode = kb . postUrlEncode
2017-11-13 16:07:12 +03:00
2012-05-22 13:33:22 +04:00
value = agent . adjustLateValues ( value )
2010-11-08 00:55:24 +03:00
payload = agent . extractPayload ( value )
2010-12-21 01:45:01 +03:00
threadData = getCurrentThreadData ( )
2010-10-30 03:00:48 +04:00
2013-03-27 16:39:27 +04:00
if conf . httpHeaders :
2014-10-22 15:41:36 +04:00
headers = OrderedDict ( conf . httpHeaders )
2019-05-02 01:45:44 +03:00
contentType = max ( headers [ _ ] if _ . upper ( ) == HTTP_HEADER . CONTENT_TYPE . upper ( ) else " " for _ in headers ) or None
2013-03-27 16:39:27 +04:00
2017-11-19 04:51:29 +03:00
if ( kb . postHint or conf . skipUrlEncode ) and postUrlEncode :
postUrlEncode = False
2013-03-27 16:39:27 +04:00
conf . httpHeaders = [ _ for _ in conf . httpHeaders if _ [ 1 ] != contentType ]
contentType = POST_HINT_CONTENT_TYPES . get ( kb . postHint , PLAIN_TEXT_CONTENT_TYPE )
conf . httpHeaders . append ( ( HTTP_HEADER . CONTENT_TYPE , contentType ) )
2012-08-31 14:38:02 +04:00
2010-11-08 00:55:24 +03:00
if payload :
2018-10-26 13:08:04 +03:00
delimiter = conf . paramDel or ( DEFAULT_GET_POST_DELIMITER if place != PLACE . COOKIE else DEFAULT_COOKIE_DELIMITER )
2018-04-11 15:48:54 +03:00
if not disableTampering and kb . tamperFunctions :
2010-10-29 20:11:50 +04:00
for function in kb . tamperFunctions :
2018-10-26 13:08:04 +03:00
hints = { }
2014-11-05 12:03:19 +03:00
try :
2018-10-26 13:08:04 +03:00
payload = function ( payload = payload , headers = auxHeaders , delimiter = delimiter , hints = hints )
2019-01-22 02:40:48 +03:00
except Exception as ex :
2014-11-05 12:03:19 +03:00
errMsg = " error occurred while running tamper "
2019-03-29 04:28:16 +03:00
errMsg + = " function ' %s ' ( ' %s ' ) " % ( function . __name__ , getSafeExString ( ex ) )
2014-11-05 12:03:19 +03:00
raise SqlmapGenericException ( errMsg )
2019-03-28 15:53:54 +03:00
if not isinstance ( payload , six . string_types ) :
2019-03-29 04:28:16 +03:00
errMsg = " tamper function ' %s ' returns " % function . __name__
2012-11-10 14:01:29 +04:00
errMsg + = " invalid payload type ( ' %s ' ) " % type ( payload )
2013-01-04 02:20:55 +04:00
raise SqlmapValueException ( errMsg )
2010-10-30 03:00:48 +04:00
2010-10-29 20:11:50 +04:00
value = agent . replacePayload ( value , payload )
2018-10-26 13:08:04 +03:00
if hints :
if HINT . APPEND in hints :
value = " %s %s %s " % ( value , delimiter , hints [ HINT . APPEND ] )
if HINT . PREPEND in hints :
2018-10-26 15:00:51 +03:00
if place == PLACE . URI :
match = re . search ( r " \ w+ \ s*= \ s* %s " % PAYLOAD_DELIMITER , value ) or re . search ( r " [^? %s /]= \ s* %s " % ( re . escape ( delimiter ) , PAYLOAD_DELIMITER ) , value )
if match :
value = value . replace ( match . group ( 0 ) , " %s %s %s " % ( hints [ HINT . PREPEND ] , delimiter , match . group ( 0 ) ) )
else :
value = " %s %s %s " % ( hints [ HINT . PREPEND ] , delimiter , value )
2018-10-26 13:08:04 +03:00
2016-02-05 14:00:57 +03:00
logger . log ( CUSTOM_LOGGING . PAYLOAD , safecharencode ( payload . replace ( ' \\ ' , BOUNDARY_BACKSLASH_MARKER ) ) . replace ( BOUNDARY_BACKSLASH_MARKER , ' \\ ' ) )
2010-11-08 00:18:09 +03:00
2013-12-04 13:09:54 +04:00
if place == PLACE . CUSTOM_POST and kb . postHint :
2012-10-04 20:44:12 +04:00
if kb . postHint in ( POST_HINT . SOAP , POST_HINT . XML ) :
# payloads in SOAP/XML should have chars > and < replaced
2012-10-04 13:25:44 +04:00
# with their HTML encoded counterparts
2019-11-14 13:49:30 +03:00
payload = payload . replace ( ' & ' , " & " ) . replace ( ' > ' , " > " ) . replace ( ' < ' , " < " ) . replace ( ' " ' , " " " ) . replace ( " ' " , " ' " ) # Reference: https://stackoverflow.com/a/1091953
2012-10-04 13:25:44 +04:00
elif kb . postHint == POST_HINT . JSON :
2018-04-11 16:19:44 +03:00
payload = escapeJsonValue ( payload )
2014-02-26 11:56:17 +04:00
elif kb . postHint == POST_HINT . JSON_LIKE :
2014-02-26 12:30:37 +04:00
payload = payload . replace ( " ' " , REPLACEMENT_MARKER ) . replace ( ' " ' , " ' " ) . replace ( REPLACEMENT_MARKER , ' " ' )
2018-04-11 16:19:44 +03:00
payload = escapeJsonValue ( payload )
2014-02-26 12:30:37 +04:00
payload = payload . replace ( " ' " , REPLACEMENT_MARKER ) . replace ( ' " ' , " ' " ) . replace ( REPLACEMENT_MARKER , ' " ' )
2012-09-22 22:59:40 +04:00
value = agent . replacePayload ( value , payload )
else :
2015-03-04 15:31:29 +03:00
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
2018-11-28 02:29:17 +03:00
if ( place in ( PLACE . GET , PLACE . URI , PLACE . COOKIE ) or place == PLACE . CUSTOM_HEADER and value . split ( ' , ' ) [ 0 ] . upper ( ) == HTTP_HEADER . COOKIE . upper ( ) ) and not conf . skipUrlEncode or place in ( PLACE . POST , PLACE . CUSTOM_POST ) and postUrlEncode :
2016-05-30 18:47:08 +03:00
skip = False
2018-11-28 02:29:17 +03:00
if place == PLACE . COOKIE or place == PLACE . CUSTOM_HEADER and value . split ( ' , ' ) [ 0 ] . upper ( ) == HTTP_HEADER . COOKIE . upper ( ) :
2016-05-30 18:47:08 +03:00
if kb . cookieEncodeChoice is None :
msg = " do you want to URL encode cookie values (implementation specific)? %s " % ( " [Y/n] " if not conf . url . endswith ( " .aspx " ) else " [y/N] " ) # Reference: https://support.microsoft.com/en-us/kb/313282
2017-07-26 01:54:29 +03:00
kb . cookieEncodeChoice = readInput ( msg , default = ' Y ' if not conf . url . endswith ( " .aspx " ) else ' N ' , boolean = True )
2016-05-30 18:47:08 +03:00
if not kb . cookieEncodeChoice :
skip = True
if not skip :
2018-03-11 04:46:37 +03:00
if place in ( PLACE . POST , PLACE . CUSTOM_POST ) : # potential problems in other cases (e.g. URL encoding of whole URI - including path)
value = urlencode ( value , spaceplus = kb . postSpaceToPlus )
payload = urlencode ( payload , safe = ' % ' , spaceplus = kb . postSpaceToPlus )
2016-05-30 18:47:08 +03:00
value = agent . replacePayload ( value , payload )
2017-11-19 04:51:29 +03:00
postUrlEncode = False
2012-05-10 17:39:54 +04:00
2012-12-10 14:55:31 +04:00
if conf . hpp :
2018-12-21 13:29:57 +03:00
if not any ( conf . url . lower ( ) . endswith ( _ . lower ( ) ) for _ in ( WEB_PLATFORM . ASP , WEB_PLATFORM . ASPX ) ) :
2012-12-10 14:55:31 +04:00
warnMsg = " HTTP parameter pollution should work only against "
warnMsg + = " ASP(.NET) targets "
singleTimeWarnMessage ( warnMsg )
if place in ( PLACE . GET , PLACE . POST ) :
_ = re . escape ( PAYLOAD_DELIMITER )
2017-10-31 13:38:09 +03:00
match = re . search ( r " (?P<name> \ w+)= %s (?P<value>.+?) %s " % ( _ , _ ) , value )
2012-12-10 14:55:31 +04:00
if match :
2012-12-10 16:05:41 +04:00
payload = match . group ( " value " )
2012-12-10 15:00:15 +04:00
for splitter in ( urlencode ( ' ' ) , ' ' ) :
2012-12-10 15:58:17 +04:00
if splitter in payload :
2012-12-10 15:00:15 +04:00
prefix , suffix = ( " */ " , " /* " ) if splitter == ' ' else ( urlencode ( _ ) for _ in ( " */ " , " /* " ) )
2012-12-10 16:07:36 +04:00
parts = payload . split ( splitter )
2012-12-10 15:00:15 +04:00
parts [ 0 ] = " %s %s " % ( parts [ 0 ] , suffix )
2012-12-10 15:54:01 +04:00
parts [ - 1 ] = " %s %s = %s %s " % ( DEFAULT_GET_POST_DELIMITER , match . group ( " name " ) , prefix , parts [ - 1 ] )
2012-12-10 15:00:15 +04:00
for i in xrange ( 1 , len ( parts ) - 1 ) :
2012-12-10 15:54:01 +04:00
parts [ i ] = " %s %s = %s %s %s " % ( DEFAULT_GET_POST_DELIMITER , match . group ( " name " ) , prefix , parts [ i ] , suffix )
2012-12-10 15:00:15 +04:00
payload = " " . join ( parts )
2012-12-10 16:05:41 +04:00
2012-12-10 15:58:17 +04:00
for splitter in ( urlencode ( ' , ' ) , ' , ' ) :
payload = payload . replace ( splitter , " %s %s = " % ( DEFAULT_GET_POST_DELIMITER , match . group ( " name " ) ) )
2012-12-10 16:05:41 +04:00
2012-12-10 15:58:17 +04:00
value = agent . replacePayload ( value , payload )
2012-12-10 14:55:31 +04:00
else :
warnMsg = " HTTP parameter pollution works only with regular "
warnMsg + = " GET and POST parameters "
singleTimeWarnMessage ( warnMsg )
2012-05-10 17:39:54 +04:00
if place :
2011-01-27 22:44:24 +03:00
value = agent . removePayloadDelimiters ( value )
2008-10-15 19:38:22 +04:00
2010-11-08 11:02:36 +03:00
if PLACE . GET in conf . parameters :
2011-08-29 16:50:52 +04:00
get = conf . parameters [ PLACE . GET ] if place != PLACE . GET or not value else value
2016-05-27 14:33:14 +03:00
elif place == PLACE . GET : # Note: for (e.g.) checkWaf() when there are no GET parameters
get = value
2008-10-15 19:38:22 +04:00
2010-11-08 11:02:36 +03:00
if PLACE . POST in conf . parameters :
2011-08-29 16:50:52 +04:00
post = conf . parameters [ PLACE . POST ] if place != PLACE . POST or not value else value
2016-05-27 14:33:14 +03:00
elif place == PLACE . POST :
post = value
2008-10-15 19:38:22 +04:00
2012-04-17 18:23:00 +04:00
if PLACE . CUSTOM_POST in conf . parameters :
2017-07-20 03:41:47 +03:00
post = conf . parameters [ PLACE . CUSTOM_POST ] . replace ( kb . customInjectionMark , " " ) if place != PLACE . CUSTOM_POST or not value else value
2013-02-13 15:24:42 +04:00
post = post . replace ( ASTERISK_MARKER , ' * ' ) if post else post
2012-04-17 18:23:00 +04:00
2010-11-08 11:02:36 +03:00
if PLACE . COOKIE in conf . parameters :
cookie = conf . parameters [ PLACE . COOKIE ] if place != PLACE . COOKIE or not value else value
2010-05-14 19:20:34 +04:00
2012-07-26 14:26:57 +04:00
if PLACE . USER_AGENT in conf . parameters :
ua = conf . parameters [ PLACE . USER_AGENT ] if place != PLACE . USER_AGENT or not value else value
2008-10-15 19:38:22 +04:00
2011-02-12 02:07:03 +03:00
if PLACE . REFERER in conf . parameters :
referer = conf . parameters [ PLACE . REFERER ] if place != PLACE . REFERER or not value else value
2011-12-20 16:52:41 +04:00
if PLACE . HOST in conf . parameters :
host = conf . parameters [ PLACE . HOST ] if place != PLACE . HOST or not value else value
2010-11-08 11:02:36 +03:00
if PLACE . URI in conf . parameters :
uri = conf . url if place != PLACE . URI or not value else value
2010-09-23 18:07:23 +04:00
else :
uri = conf . url
2010-09-22 15:56:35 +04:00
2013-01-25 15:41:51 +04:00
if value and place == PLACE . CUSTOM_HEADER :
2016-10-20 01:47:53 +03:00
if value . split ( ' , ' ) [ 0 ] . capitalize ( ) == PLACE . COOKIE :
2018-03-08 03:21:34 +03:00
cookie = value . split ( ' , ' , 1 ) [ - 1 ]
2016-10-20 01:47:53 +03:00
else :
2018-03-08 03:21:34 +03:00
auxHeaders [ value . split ( ' , ' ) [ 0 ] ] = value . split ( ' , ' , 1 ) [ - 1 ]
2013-01-13 19:22:43 +04:00
2014-10-23 13:23:53 +04:00
if conf . csrfToken :
2020-06-10 13:49:35 +03:00
token = AttribDict ( )
2014-10-23 13:23:53 +04:00
def _adjustParameter ( paramString , parameter , newValue ) :
retVal = paramString
2019-03-06 19:35:19 +03:00
if urlencode ( parameter ) in paramString :
parameter = urlencode ( parameter )
2018-12-10 16:53:11 +03:00
match = re . search ( r " %s =[^&]* " % re . escape ( parameter ) , paramString , re . I )
2014-10-23 13:23:53 +04:00
if match :
2019-12-15 18:33:03 +03:00
retVal = re . sub ( r " (?i) %s " % re . escape ( match . group ( 0 ) ) , ( " %s = %s " % ( parameter , newValue ) ) . replace ( ' \\ ' , r ' \\ ' ) , paramString )
2016-05-26 17:47:38 +03:00
else :
2018-12-10 16:53:11 +03:00
match = re . search ( r " ( %s [ \" ' ]:[ \" ' ])([^ \" ' ]+) " % re . escape ( parameter ) , paramString , re . I )
2016-05-26 17:47:38 +03:00
if match :
2019-12-15 18:33:03 +03:00
retVal = re . sub ( r " (?i) %s " % re . escape ( match . group ( 0 ) ) , " %s %s " % ( match . group ( 1 ) , newValue ) , paramString )
2019-03-06 19:35:19 +03:00
2014-10-23 13:23:53 +04:00
return retVal
2020-06-10 13:49:35 +03:00
for attempt in xrange ( conf . csrfRetries + 1 ) :
if token :
break
if attempt > 0 :
warnMsg = " unable to find anti-CSRF token ' %s ' at ' %s ' " % ( conf . csrfToken . _original , conf . csrfUrl or conf . url )
warnMsg + = " . sqlmap is going to retry the request "
logger . warn ( warnMsg )
2019-03-06 13:20:57 +03:00
2020-06-10 13:49:35 +03:00
page , headers , code = Connect . getPage ( url = conf . csrfUrl or conf . url , data = conf . data if conf . csrfUrl == conf . url else None , method = conf . csrfMethod or ( conf . method if conf . csrfUrl == conf . url else None ) , cookie = conf . parameters . get ( PLACE . COOKIE ) , direct = True , silent = True , ua = conf . parameters . get ( PLACE . USER_AGENT ) , referer = conf . parameters . get ( PLACE . REFERER ) , host = conf . parameters . get ( PLACE . HOST ) )
page = urldecode ( page ) # for anti-CSRF tokens with special characters in their name (e.g. 'foo:bar=...')
2014-10-23 13:23:53 +04:00
2020-06-10 13:49:35 +03:00
match = re . search ( r " (?i)<input[^>]+ \ bname=[ \" ' ]?(?P<name> %s ) \ b[^>]* \ bvalue=[ \" ' ]?(?P<value>[^> ' \" ]*) " % conf . csrfToken , page or " " , re . I )
2017-08-20 11:00:04 +03:00
2018-12-10 16:53:11 +03:00
if not match :
2020-06-10 13:49:35 +03:00
match = re . search ( r " (?i)<input[^>]+ \ bvalue=[ \" ' ]?(?P<value>[^> ' \" ]*)[ \" ' ]?[^>]* \ bname=[ \" ' ]?(?P<name> %s ) \ b " % conf . csrfToken , page or " " , re . I )
2016-05-26 17:08:59 +03:00
2018-12-10 16:53:11 +03:00
if not match :
2020-06-10 13:49:35 +03:00
match = re . search ( r " (?P<name> %s )[ \" ' ]:[ \" ' ](?P<value>[^ \" ' ]+) " % conf . csrfToken , page or " " , re . I )
2018-09-18 23:05:52 +03:00
2018-12-10 16:53:11 +03:00
if not match :
2020-12-04 13:45:40 +03:00
match = re . search ( r " \ b(?P<name> %s ) \ s*[:=] \ s*(?P<value> \ w+) " % conf . csrfToken , getUnicode ( headers ) , re . I )
2018-09-18 23:05:52 +03:00
2020-06-10 13:49:35 +03:00
if not match :
match = re . search ( r " \ b(?P<name> %s ) \ s*= \ s*[ ' \" ]?(?P<value>[^; ' \" ]+) " % conf . csrfToken , page or " " , re . I )
2018-09-18 23:05:52 +03:00
2020-09-07 00:32:47 +03:00
if not match :
match = re . search ( r " <meta \ s+name=[ \" ' ]?(?P<name> %s )[ \" ' ]?[^>]+ \ b(value|content)=[ \" ' ]?(?P<value>[^> \" ' ]+) " % conf . csrfToken , page or " " , re . I )
2018-12-10 16:53:11 +03:00
if match :
2020-06-10 13:49:35 +03:00
token . name , token . value = match . group ( " name " ) , match . group ( " value " )
2018-09-18 23:05:52 +03:00
2020-06-10 13:49:35 +03:00
match = re . search ( r " String \ .fromCharCode \ (([ \ d+, ]+) \ ) " , token . value )
if match :
token . value = " " . join ( _unichr ( int ( _ ) ) for _ in match . group ( 1 ) . replace ( ' ' , " " ) . split ( ' , ' ) )
2014-10-24 11:37:51 +04:00
2014-10-23 16:33:22 +04:00
if not token :
2020-06-10 13:49:35 +03:00
if conf . csrfUrl and conf . csrfToken and conf . csrfUrl != conf . url and code == _http_client . OK :
if headers and " text/plain " in headers . get ( HTTP_HEADER . CONTENT_TYPE , " " ) :
token . name = conf . csrfToken
token . value = page
if not token and conf . cj and any ( re . search ( conf . csrfToken , _ . name , re . I ) for _ in conf . cj ) :
for _ in conf . cj :
if re . search ( conf . csrfToken , _ . name , re . I ) :
token . name , token . value = _ . name , _ . value
if not any ( re . search ( conf . csrfToken , ' ' . join ( _ ) , re . I ) for _ in ( conf . paramDict . get ( PLACE . GET , { } ) , conf . paramDict . get ( PLACE . POST , { } ) ) ) :
if post :
post = " %s %s %s = %s " % ( post , conf . paramDel or DEFAULT_GET_POST_DELIMITER , token . name , token . value )
elif get :
get = " %s %s %s = %s " % ( get , conf . paramDel or DEFAULT_GET_POST_DELIMITER , token . name , token . value )
else :
get = " %s = %s " % ( token . name , token . value )
break
if not token :
errMsg = " anti-CSRF token ' %s ' can ' t be found at ' %s ' " % ( conf . csrfToken . _original , conf . csrfUrl or conf . url )
if not conf . csrfUrl :
errMsg + = " . You can try to rerun by providing "
errMsg + = " a valid value for option ' --csrf-url ' "
raise SqlmapTokenException ( errMsg )
2014-10-23 13:23:53 +04:00
if token :
2018-12-10 16:53:11 +03:00
token . value = token . value . strip ( " ' \" " )
2017-08-20 11:00:04 +03:00
2020-08-13 17:18:31 +03:00
for candidate in ( PLACE . GET , PLACE . POST , PLACE . CUSTOM_POST , PLACE . URI ) :
2019-05-30 22:27:00 +03:00
if candidate in conf . parameters :
2020-08-13 17:18:31 +03:00
if candidate == PLACE . URI and uri :
uri = _adjustParameter ( uri , token . name , token . value )
elif candidate == PLACE . GET and get :
2018-12-10 16:53:11 +03:00
get = _adjustParameter ( get , token . name , token . value )
2020-08-13 17:22:09 +03:00
elif candidate in ( PLACE . POST , PLACE . CUSTOM_POST ) and post :
2018-12-10 16:53:11 +03:00
post = _adjustParameter ( post , token . name , token . value )
2014-10-23 13:23:53 +04:00
2014-10-23 16:33:22 +04:00
for i in xrange ( len ( conf . httpHeaders ) ) :
2018-12-10 16:53:11 +03:00
if conf . httpHeaders [ i ] [ 0 ] . lower ( ) == token . name . lower ( ) :
conf . httpHeaders [ i ] = ( conf . httpHeaders [ i ] [ 0 ] , token . value )
2014-10-23 16:33:22 +04:00
2011-08-29 16:50:52 +04:00
if conf . rParam :
def _randomizeParameter ( paramString , randomParameter ) :
retVal = paramString
2019-02-28 04:29:13 +03:00
match = re . search ( r " ( \ A| \ b) %s =(?P<value>[^&;]*) " % re . escape ( randomParameter ) , paramString )
2011-08-29 16:50:52 +04:00
if match :
origValue = match . group ( " value " )
2019-07-19 13:17:07 +03:00
newValue = randomizeParameterValue ( origValue ) if randomParameter not in kb . randomPool else random . sample ( kb . randomPool [ randomParameter ] , 1 ) [ 0 ]
retVal = re . sub ( r " ( \ A| \ b) %s =[^&;]* " % re . escape ( randomParameter ) , " %s = %s " % ( randomParameter , newValue ) , paramString )
2011-08-29 16:50:52 +04:00
return retVal
2011-08-29 17:08:25 +04:00
for randomParameter in conf . rParam :
2015-01-17 23:47:57 +03:00
for item in ( PLACE . GET , PLACE . POST , PLACE . COOKIE , PLACE . URI , PLACE . CUSTOM_POST ) :
2011-08-29 17:08:25 +04:00
if item in conf . parameters :
if item == PLACE . GET and get :
get = _randomizeParameter ( get , randomParameter )
2015-01-17 23:47:57 +03:00
elif item in ( PLACE . POST , PLACE . CUSTOM_POST ) and post :
2011-08-29 17:08:25 +04:00
post = _randomizeParameter ( post , randomParameter )
elif item == PLACE . COOKIE and cookie :
cookie = _randomizeParameter ( cookie , randomParameter )
2015-01-17 23:47:57 +03:00
elif item == PLACE . URI and uri :
uri = _randomizeParameter ( uri , randomParameter )
2011-08-29 16:50:52 +04:00
2011-11-21 20:41:02 +04:00
if conf . evalCode :
2014-04-06 18:48:46 +04:00
delimiter = conf . paramDel or DEFAULT_GET_POST_DELIMITER
2020-05-06 00:57:15 +03:00
variables = { " uri " : uri , " lastPage " : threadData . lastPage , " _locals " : locals ( ) , " cookie " : cookie }
2011-11-21 20:41:02 +04:00
originals = { }
2015-08-25 03:03:56 +03:00
if not get and PLACE . URI in conf . parameters :
2019-03-27 04:46:59 +03:00
query = _urllib . parse . urlsplit ( uri ) . query or " "
2015-08-25 03:03:56 +03:00
else :
query = None
2019-03-29 04:28:16 +03:00
for item in filterNone ( ( get , post if not kb . postHint else None , query ) ) :
2011-11-21 21:39:18 +04:00
for part in item . split ( delimiter ) :
if ' = ' in part :
name , value = part . split ( ' = ' , 1 )
2017-10-10 17:08:13 +03:00
name = name . strip ( )
if safeVariableNaming ( name ) != name :
conf . evalCode = re . sub ( r " \ b %s \ b " % re . escape ( name ) , safeVariableNaming ( name ) , conf . evalCode )
name = safeVariableNaming ( name )
2018-03-13 15:45:42 +03:00
value = urldecode ( value , convall = True , spaceplus = ( item == post and kb . postSpaceToPlus ) )
2015-02-25 12:19:51 +03:00
variables [ name ] = value
2011-11-21 20:41:02 +04:00
2013-07-31 19:28:22 +04:00
if cookie :
2014-04-06 18:50:58 +04:00
for part in cookie . split ( conf . cookieDel or DEFAULT_COOKIE_DELIMITER ) :
2013-07-31 19:28:22 +04:00
if ' = ' in part :
name , value = part . split ( ' = ' , 1 )
2017-10-10 17:08:13 +03:00
name = name . strip ( )
if safeVariableNaming ( name ) != name :
conf . evalCode = re . sub ( r " \ b %s \ b " % re . escape ( name ) , safeVariableNaming ( name ) , conf . evalCode )
name = safeVariableNaming ( name )
2013-07-31 19:28:22 +04:00
value = urldecode ( value , convall = True )
2015-02-25 12:19:51 +03:00
variables [ name ] = value
2015-01-09 17:33:53 +03:00
while True :
try :
2019-04-18 17:06:19 +03:00
compile ( getBytes ( conf . evalCode . replace ( ' ; ' , ' \n ' ) ) , " " , " exec " )
2019-01-22 02:40:48 +03:00
except SyntaxError as ex :
2017-06-24 00:46:25 +03:00
if ex . text :
original = replacement = ex . text . strip ( )
2019-03-05 14:24:41 +03:00
2017-10-10 17:08:13 +03:00
if ' = ' in original :
name , value = original . split ( ' = ' , 1 )
name = name . strip ( )
if safeVariableNaming ( name ) != name :
replacement = re . sub ( r " \ b %s \ b " % re . escape ( name ) , safeVariableNaming ( name ) , replacement )
else :
for _ in re . findall ( r " [A-Za-z_]+ " , original ) [ : : - 1 ] :
2019-03-05 14:24:41 +03:00
if safeVariableNaming ( _ ) != _ :
replacement = replacement . replace ( _ , safeVariableNaming ( _ ) )
2017-10-10 17:08:13 +03:00
break
2019-03-05 14:24:41 +03:00
2017-06-24 00:46:25 +03:00
if original == replacement :
2019-03-05 14:24:41 +03:00
conf . evalCode = conf . evalCode . replace ( EVALCODE_ENCODED_PREFIX , " " )
2015-01-09 17:33:53 +03:00
break
2017-06-24 00:46:25 +03:00
else :
conf . evalCode = conf . evalCode . replace ( getUnicode ( ex . text . strip ( ) , UNICODE_ENCODING ) , replacement )
2015-01-09 17:33:53 +03:00
else :
2017-06-24 00:46:25 +03:00
break
2015-01-09 17:33:53 +03:00
else :
break
2013-07-31 19:28:22 +04:00
2011-11-21 20:41:02 +04:00
originals . update ( variables )
2012-02-16 18:42:28 +04:00
evaluateCode ( conf . evalCode , variables )
2015-01-09 17:33:53 +03:00
2019-01-22 05:00:44 +03:00
for variable in list ( variables . keys ( ) ) :
2017-10-10 17:08:13 +03:00
if unsafeVariableNaming ( variable ) != variable :
value = variables [ variable ]
del variables [ variable ]
variables [ unsafeVariableNaming ( variable ) ] = value
2014-09-28 15:38:09 +04:00
uri = variables [ " uri " ]
2020-05-06 00:57:15 +03:00
cookie = variables [ " cookie " ]
2011-11-21 20:41:02 +04:00
for name , value in variables . items ( ) :
if name != " __builtins__ " and originals . get ( name , " " ) != value :
2020-12-04 14:28:13 +03:00
if isinstance ( value , ( int , float , six . string_types , six . binary_type ) ) :
2013-08-31 02:28:51 +04:00
found = False
2017-05-04 16:45:15 +03:00
value = getUnicode ( value , UNICODE_ENCODING )
2013-08-31 02:28:51 +04:00
2017-03-30 11:16:35 +03:00
if kb . postHint and re . search ( r " \ b %s \ b " % re . escape ( name ) , post or " " ) :
if kb . postHint in ( POST_HINT . XML , POST_HINT . SOAP ) :
if re . search ( r " < %s \ b " % re . escape ( name ) , post ) :
found = True
2018-06-10 00:38:00 +03:00
post = re . sub ( r " (?s)(< %s \ b[^>]*>)(.*?)(</ %s ) " % ( re . escape ( name ) , re . escape ( name ) ) , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , post )
2017-03-30 11:16:35 +03:00
elif re . search ( r " \ b %s > " % re . escape ( name ) , post ) :
found = True
2018-06-10 00:38:00 +03:00
post = re . sub ( r " (?s)( \ b %s >)(.*?)(</[^<]* \ b %s >) " % ( re . escape ( name ) , re . escape ( name ) ) , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , post )
2017-03-30 11:16:35 +03:00
regex = r " \ b( %s ) \ b([^ \ w]+)( \ w+) " % re . escape ( name )
if not found and re . search ( regex , ( post or " " ) ) :
found = True
2018-06-10 00:38:00 +03:00
post = re . sub ( regex , r " \ g<1> \ g<2> %s " % value . replace ( ' \\ ' , r ' \\ ' ) , post )
2017-02-06 15:57:33 +03:00
2014-10-28 16:02:55 +03:00
regex = r " (( \ A| %s ) %s =).+?( %s | \ Z) " % ( re . escape ( delimiter ) , re . escape ( name ) , re . escape ( delimiter ) )
2017-03-30 11:16:35 +03:00
if not found and re . search ( regex , ( post or " " ) ) :
2013-08-31 02:28:51 +04:00
found = True
2018-06-10 00:38:00 +03:00
post = re . sub ( regex , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , post )
2013-08-31 02:28:51 +04:00
2017-03-30 11:16:35 +03:00
if re . search ( regex , ( get or " " ) ) :
2013-08-31 02:28:51 +04:00
found = True
2018-06-10 00:38:00 +03:00
get = re . sub ( regex , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , get )
2013-08-31 02:28:51 +04:00
2015-08-25 03:03:56 +03:00
if re . search ( regex , ( query or " " ) ) :
found = True
2018-06-10 00:38:00 +03:00
uri = re . sub ( regex . replace ( r " \ A " , r " \ ? " ) , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , uri )
2015-08-25 03:03:56 +03:00
2017-11-22 15:07:04 +03:00
regex = r " (( \ A| %s ) %s =).+?( %s | \ Z) " % ( re . escape ( conf . cookieDel or DEFAULT_COOKIE_DELIMITER ) , re . escape ( name ) , re . escape ( conf . cookieDel or DEFAULT_COOKIE_DELIMITER ) )
2013-08-31 02:28:51 +04:00
if re . search ( regex , ( cookie or " " ) ) :
found = True
2018-06-10 00:38:00 +03:00
cookie = re . sub ( regex , r " \ g<1> %s \ g<3> " % value . replace ( ' \\ ' , r ' \\ ' ) , cookie )
2013-08-31 02:28:51 +04:00
if not found :
if post is not None :
post + = " %s %s = %s " % ( delimiter , name , value )
elif get is not None :
get + = " %s %s = %s " % ( delimiter , name , value )
elif cookie is not None :
2014-04-06 18:50:58 +04:00
cookie + = " %s %s = %s " % ( conf . cookieDel or DEFAULT_COOKIE_DELIMITER , name , value )
2011-11-28 15:21:39 +04:00
2013-03-27 16:39:27 +04:00
if not conf . skipUrlEncode :
2013-01-15 13:14:02 +04:00
get = urlencode ( get , limit = True )
2012-11-13 13:21:11 +04:00
if post is not None :
2013-03-27 16:39:27 +04:00
if place not in ( PLACE . POST , PLACE . CUSTOM_POST ) and hasattr ( post , UNENCODED_ORIGINAL_VALUE ) :
2012-07-20 11:48:09 +04:00
post = getattr ( post , UNENCODED_ORIGINAL_VALUE )
2017-11-19 04:51:29 +03:00
elif postUrlEncode :
2013-01-19 21:06:36 +04:00
post = urlencode ( post , spaceplus = kb . postSpaceToPlus )
2011-08-29 16:50:52 +04:00
2017-02-14 15:14:35 +03:00
if timeBasedCompare and not conf . disableStats :
2016-01-09 19:32:19 +03:00
if len ( kb . responseTimes . get ( kb . responseTimeMode , [ ] ) ) < MIN_TIME_RESPONSES :
2010-12-21 04:09:39 +03:00
clearConsoleLine ( )
2016-01-09 19:32:19 +03:00
kb . responseTimes . setdefault ( kb . responseTimeMode , [ ] )
2011-11-21 03:17:57 +04:00
if conf . tor :
2012-02-01 18:49:42 +04:00
warnMsg = " it ' s highly recommended to avoid usage of switch ' --tor ' for "
2017-12-04 15:59:35 +03:00
warnMsg + = " time-based injections because of inherent high latency time "
2011-11-21 03:17:57 +04:00
singleTimeWarnMessage ( warnMsg )
2016-01-09 19:32:19 +03:00
warnMsg = " [ %s ] [WARNING] %s time-based comparison requires " % ( time . strftime ( " %X " ) , " (case) " if kb . responseTimeMode else " " )
2018-06-30 00:57:20 +03:00
warnMsg + = " %s statistical model, please wait " % ( " larger " if len ( kb . responseTimes ) == 1 else " reset of " )
2014-03-07 00:08:31 +04:00
dataToStdout ( warnMsg )
2010-12-09 10:49:18 +03:00
2016-01-09 19:32:19 +03:00
while len ( kb . responseTimes [ kb . responseTimeMode ] ) < MIN_TIME_RESPONSES :
value = kb . responseTimePayload . replace ( RANDOM_INTEGER_MARKER , str ( randomInt ( 6 ) ) ) . replace ( RANDOM_STRING_MARKER , randomStr ( ) ) if kb . responseTimePayload else kb . responseTimePayload
Connect . queryPage ( value = value , content = True , raise404 = False )
2014-03-07 00:08:31 +04:00
dataToStdout ( ' . ' )
2016-01-09 19:32:19 +03:00
dataToStdout ( " (done) \n " )
2010-12-09 10:49:18 +03:00
2013-05-18 23:30:21 +04:00
elif not kb . testMode :
2016-09-29 15:55:43 +03:00
warnMsg = " it is very important to not stress the network connection "
2014-03-07 00:08:31 +04:00
warnMsg + = " during usage of time-based payloads to prevent potential "
2016-01-09 19:32:19 +03:00
warnMsg + = " disruptions "
2013-05-18 23:30:21 +04:00
singleTimeWarnMessage ( warnMsg )
if not kb . laggingChecked :
kb . laggingChecked = True
2016-01-09 19:32:19 +03:00
deviation = stdev ( kb . responseTimes [ kb . responseTimeMode ] )
2011-08-12 17:47:38 +04:00
if deviation > WARN_TIME_STDEV :
2012-10-09 17:19:47 +04:00
kb . adjustTimeDelay = ADJUST_TIME_DELAY . DISABLE
2011-04-19 18:28:51 +04:00
2014-09-08 16:48:31 +04:00
warnMsg = " considerable lagging has been detected "
2011-08-12 17:47:38 +04:00
warnMsg + = " in connection response(s). Please use as high "
2012-02-01 19:10:06 +04:00
warnMsg + = " value for option ' --time-sec ' as possible (e.g. "
2013-05-18 23:30:21 +04:00
warnMsg + = " 10 or more) "
2011-04-19 18:50:09 +04:00
logger . critical ( warnMsg )
2012-11-10 14:01:29 +04:00
2019-05-02 01:45:44 +03:00
if ( conf . safeFreq or 0 ) > 0 :
2010-04-16 16:44:47 +04:00
kb . queryCounter + = 1
2015-04-21 01:02:47 +03:00
if kb . queryCounter % conf . safeFreq == 0 :
2015-04-22 17:28:54 +03:00
if conf . safeUrl :
Connect . getPage ( url = conf . safeUrl , post = conf . safePost , cookie = cookie , direct = True , silent = True , ua = ua , referer = referer , host = host )
elif kb . safeReq :
Connect . getPage ( url = kb . safeReq . url , post = kb . safeReq . post , method = kb . safeReq . method , auxHeaders = kb . safeReq . headers )
2010-09-16 13:32:09 +04:00
2010-12-08 02:32:33 +03:00
start = time . time ( )
2010-12-08 02:49:00 +03:00
2010-12-20 19:45:41 +03:00
if kb . nullConnection and not content and not response and not timeBasedCompare :
2012-06-12 18:22:14 +04:00
noteResponseTime = False
2015-07-18 18:01:34 +03:00
try :
pushValue ( kb . pageCompress )
kb . pageCompress = False
2010-10-10 22:56:43 +04:00
2015-07-18 18:01:34 +03:00
if kb . nullConnection == NULLCONNECTION . HEAD :
method = HTTPMETHOD . HEAD
elif kb . nullConnection == NULLCONNECTION . RANGE :
auxHeaders [ HTTP_HEADER . RANGE ] = " bytes=-1 "
2010-10-10 22:56:43 +04:00
2015-07-18 18:01:34 +03:00
_ , headers , code = Connect . getPage ( url = uri , get = get , post = post , method = method , cookie = cookie , ua = ua , referer = referer , host = host , silent = silent , auxHeaders = auxHeaders , raise404 = raise404 , skipRead = ( kb . nullConnection == NULLCONNECTION . SKIP_READ ) )
2010-09-16 13:47:33 +04:00
2015-07-18 18:01:34 +03:00
if headers :
2018-12-31 03:03:40 +03:00
try :
if kb . nullConnection in ( NULLCONNECTION . HEAD , NULLCONNECTION . SKIP_READ ) and headers . get ( HTTP_HEADER . CONTENT_LENGTH ) :
pageLength = int ( headers [ HTTP_HEADER . CONTENT_LENGTH ] . split ( ' , ' ) [ 0 ] )
elif kb . nullConnection == NULLCONNECTION . RANGE and headers . get ( HTTP_HEADER . CONTENT_RANGE ) :
pageLength = int ( headers [ HTTP_HEADER . CONTENT_RANGE ] [ headers [ HTTP_HEADER . CONTENT_RANGE ] . find ( ' / ' ) + 1 : ] )
except ValueError :
pass
2015-07-18 18:01:34 +03:00
finally :
kb . pageCompress = popValue ( )
2013-05-17 18:04:05 +04:00
2018-12-31 03:01:19 +03:00
if pageLength is None :
2012-12-18 12:36:26 +04:00
try :
2014-11-21 12:31:55 +03:00
page , headers , code = Connect . getPage ( url = uri , get = get , post = post , method = method , cookie = cookie , ua = ua , referer = referer , host = host , silent = silent , auxHeaders = auxHeaders , response = response , raise404 = raise404 , ignoreTimeout = timeBasedCompare )
2012-12-18 12:36:26 +04:00
except MemoryError :
page , headers , code = None , None , None
warnMsg = " site returned insanely large response "
if kb . testMode :
warnMsg + = " in testing phase. This is a common "
2018-09-14 11:01:31 +03:00
warnMsg + = " behavior in custom WAF/IPS solutions "
2012-12-18 12:36:26 +04:00
singleTimeWarnMessage ( warnMsg )
2010-12-08 02:49:00 +03:00
2019-10-09 20:41:33 +03:00
if not ignoreSecondOrder :
if conf . secondUrl :
page , headers , code = Connect . getPage ( url = conf . secondUrl , cookie = cookie , ua = ua , silent = silent , auxHeaders = auxHeaders , response = response , raise404 = False , ignoreTimeout = timeBasedCompare , refreshing = True )
elif kb . secondReq and IPS_WAF_CHECK_PAYLOAD not in _urllib . parse . unquote ( value or " " ) :
def _ ( value ) :
if kb . customInjectionMark in ( value or " " ) :
if payload is None :
value = value . replace ( kb . customInjectionMark , " " )
else :
value = re . sub ( r " \ w* %s " % re . escape ( kb . customInjectionMark ) , payload , value )
return value
page , headers , code = Connect . getPage ( url = _ ( kb . secondReq [ 0 ] ) , post = _ ( kb . secondReq [ 2 ] ) , method = kb . secondReq [ 1 ] , cookie = kb . secondReq [ 3 ] , silent = silent , auxHeaders = dict ( auxHeaders , * * dict ( kb . secondReq [ 4 ] ) ) , response = response , raise404 = False , ignoreTimeout = timeBasedCompare , refreshing = True )
2012-07-26 16:07:05 +04:00
2010-12-21 01:45:01 +03:00
threadData . lastQueryDuration = calculateDeltaSeconds ( start )
2010-11-08 12:44:32 +03:00
2019-03-20 13:33:10 +03:00
kb . originalCode = code if kb . originalCode is None else kb . originalCode
kb . originalPage = page if kb . originalPage is None else kb . originalPage
2012-03-16 00:17:40 +04:00
2010-12-07 16:34:06 +03:00
if kb . testMode :
kb . testQueryCount + = 1
2011-01-12 00:46:21 +03:00
2010-12-08 14:26:54 +03:00
if timeBasedCompare :
2013-01-29 23:53:11 +04:00
return wasLastResponseDelayed ( )
2010-12-08 17:33:10 +03:00
elif noteResponseTime :
2016-01-09 19:32:19 +03:00
kb . responseTimes . setdefault ( kb . responseTimeMode , [ ] )
kb . responseTimes [ kb . responseTimeMode ] . append ( threadData . lastQueryDuration )
2019-01-29 19:40:06 +03:00
if len ( kb . responseTimes [ kb . responseTimeMode ] ) > MAX_TIME_RESPONSES :
kb . responseTimes [ kb . responseTimeMode ] = kb . responseTimes [ kb . responseTimeMode ] [ - MAX_TIME_RESPONSES / / 2 : ]
2010-12-07 19:04:53 +03:00
2011-10-24 04:46:54 +04:00
if not response and removeReflection :
page = removeReflectiveValues ( page , payload )
2012-10-02 15:36:15 +04:00
kb . maxConnectionsFlag = re . search ( MAX_CONNECTIONS_REGEX , page or " " , re . I ) is not None
2018-03-16 16:20:43 +03:00
message = extractRegexResult ( PERMISSION_DENIED_REGEX , page or " " , re . I )
if message :
kb . permissionFlag = True
singleTimeWarnMessage ( " potential permission problems detected ( ' %s ' ) " % message )
2012-02-08 16:00:03 +04:00
2019-05-02 17:54:54 +03:00
patchHeaders ( headers )
2019-05-02 11:22:44 +03:00
2010-10-10 22:56:43 +04:00
if content or response :
2017-06-05 17:28:19 +03:00
return page , headers , code
2011-02-25 12:22:44 +03:00
if getRatioValue :
2011-08-12 20:48:11 +04:00
return comparison ( page , headers , code , getRatioValue = False , pageLength = pageLength ) , comparison ( page , headers , code , getRatioValue = True , pageLength = pageLength )
2008-12-18 23:38:57 +03:00
else :
2013-06-10 14:20:58 +04:00
return comparison ( page , headers , code , getRatioValue , pageLength )
2013-08-20 21:35:49 +04:00
2018-03-21 16:29:54 +03:00
def setHTTPHandlers ( ) : # Cross-referenced function
2013-08-20 21:35:49 +04:00
raise NotImplementedError