sqlmap/lib/request/redirecthandler.py

196 lines
7.9 KiB
Python
Raw Normal View History

2019-05-08 13:47:52 +03:00
#!/usr/bin/env python
"""
2023-01-03 01:24:59 +03:00
Copyright (c) 2006-2023 sqlmap developers (https://sqlmap.org/)
2017-10-11 15:50:46 +03:00
See the file 'LICENSE' for copying permission
"""
import io
2017-07-04 13:14:17 +03:00
import time
2015-02-04 17:01:03 +03:00
import types
2011-11-11 15:28:27 +04:00
from lib.core.common import getHostHeader
2019-12-18 13:04:01 +03:00
from lib.core.common import getSafeExString
from lib.core.common import logHTTPTraffic
2012-03-15 15:10:58 +04:00
from lib.core.common import readInput
2020-12-01 01:33:08 +03:00
from lib.core.convert import getBytes
2019-05-06 01:54:21 +03:00
from lib.core.convert import getUnicode
2019-06-04 15:44:06 +03:00
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
2012-12-07 14:52:21 +04:00
from lib.core.enums import CUSTOM_LOGGING
from lib.core.enums import HTTP_HEADER
2013-01-18 00:49:58 +04:00
from lib.core.enums import HTTPMETHOD
2012-03-15 15:10:58 +04:00
from lib.core.enums import REDIRECTION
from lib.core.exception import SqlmapConnectionException
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
2019-06-13 11:58:21 +03:00
from lib.core.settings import MAX_CONNECTION_READ_SIZE
2012-12-07 18:29:54 +04:00
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
2012-03-15 15:10:58 +04:00
from lib.core.settings import MAX_SINGLE_URL_REDIRECTIONS
from lib.core.settings import MAX_TOTAL_REDIRECTIONS
from lib.core.threads import getCurrentThreadData
2011-03-18 03:24:02 +03:00
from lib.request.basic import decodePage
2015-10-15 14:07:43 +03:00
from lib.request.basic import parseResponse
2020-12-11 01:05:19 +03:00
from thirdparty import six
from thirdparty.six.moves import urllib as _urllib
class SmartRedirectHandler(_urllib.request.HTTPRedirectHandler):
2011-11-11 15:28:27 +04:00
def _get_header_redirect(self, headers):
retVal = None
2012-03-15 15:10:58 +04:00
if headers:
2019-05-07 12:18:38 +03:00
if HTTP_HEADER.LOCATION in headers:
retVal = headers[HTTP_HEADER.LOCATION]
elif HTTP_HEADER.URI in headers:
retVal = headers[HTTP_HEADER.URI]
2011-11-11 15:28:27 +04:00
return retVal
2013-01-18 00:49:58 +04:00
def _ask_redirect_choice(self, redcode, redurl, method):
2013-01-17 14:50:12 +04:00
with kb.locks.redirect:
2021-01-12 15:21:51 +03:00
if kb.choices.redirect is None:
2019-11-07 02:03:06 +03:00
msg = "got a %d redirect to " % redcode
2013-01-17 14:50:12 +04:00
msg += "'%s'. Do you want to follow? [Y/n] " % redurl
2012-03-15 15:10:58 +04:00
2021-01-12 15:21:51 +03:00
kb.choices.redirect = REDIRECTION.YES if readInput(msg, default='Y', boolean=True) else REDIRECTION.NO
2012-03-15 15:10:58 +04:00
2021-01-12 15:21:51 +03:00
if kb.choices.redirect == REDIRECTION.YES and method == HTTPMETHOD.POST and kb.resendPostOnRedirect is None:
msg = "redirect is a result of a "
msg += "POST request. Do you want to "
msg += "resend original POST data to a new "
msg += "location? [%s] " % ("Y/n" if not kb.originalPage else "y/N")
2017-04-19 15:46:27 +03:00
kb.resendPostOnRedirect = readInput(msg, default=('Y' if not kb.originalPage else 'N'), boolean=True)
if kb.resendPostOnRedirect:
self.redirect_request = self._redirect_request
2013-01-18 00:49:58 +04:00
def _redirect_request(self, req, fp, code, msg, headers, newurl):
2020-12-01 01:33:08 +03:00
return _urllib.request.Request(newurl.replace(' ', '%20'), data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host())
2013-01-18 00:49:58 +04:00
def http_error_302(self, req, fp, code, msg, headers):
2017-07-04 13:14:17 +03:00
start = time.time()
2012-03-15 16:14:50 +04:00
content = None
redurl = self._get_header_redirect(headers) if not conf.ignoreRedirects else None
2012-03-15 15:10:58 +04:00
2012-12-07 15:14:33 +04:00
try:
2012-12-07 18:29:54 +04:00
content = fp.read(MAX_CONNECTION_TOTAL_SIZE)
2019-11-26 15:18:21 +03:00
except: # e.g. IncompleteRead
2020-12-01 01:33:08 +03:00
content = b""
2012-12-07 18:29:54 +04:00
finally:
if content:
try: # try to write it back to the read buffer so we could reuse it in further steps
fp.fp._rbuf.truncate(0)
fp.fp._rbuf.write(content)
except:
pass
2012-12-07 15:14:33 +04:00
content = decodePage(content, headers.get(HTTP_HEADER.CONTENT_ENCODING), headers.get(HTTP_HEADER.CONTENT_TYPE))
2012-12-07 14:52:21 +04:00
threadData = getCurrentThreadData()
threadData.lastRedirectMsg = (threadData.lastRequestUID, content)
2012-12-07 14:52:21 +04:00
redirectMsg = "HTTP redirect "
2017-07-04 13:14:17 +03:00
redirectMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code, getUnicode(msg))
2012-12-07 14:52:21 +04:00
if headers:
logHeaders = "\r\n".join("%s: %s" % (getUnicode(key.capitalize() if hasattr(key, "capitalize") else key), getUnicode(value)) for (key, value) in headers.items())
2012-12-07 14:52:21 +04:00
else:
logHeaders = ""
redirectMsg += logHeaders
2012-12-07 15:14:33 +04:00
if content:
2019-06-13 11:58:21 +03:00
redirectMsg += "\r\n\r\n%s" % getUnicode(content[:MAX_CONNECTION_READ_SIZE])
2012-12-07 14:52:21 +04:00
2017-07-04 13:14:17 +03:00
logHTTPTraffic(threadData.lastRequestMsg, redirectMsg, start, time.time())
2012-12-07 14:52:21 +04:00
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg)
2012-12-07 14:23:18 +04:00
2012-07-17 02:19:33 +04:00
if redurl:
2014-04-07 23:01:40 +04:00
try:
if not _urllib.parse.urlsplit(redurl).netloc:
redurl = _urllib.parse.urljoin(req.get_full_url(), redurl)
2014-04-07 23:01:40 +04:00
self._infinite_loop_check(req)
self._ask_redirect_choice(code, redurl, req.get_method())
except ValueError:
redurl = None
result = fp
2021-01-12 15:21:51 +03:00
if redurl and kb.choices.redirect == REDIRECTION.YES:
2015-10-15 14:07:43 +03:00
parseResponse(content, headers)
req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl)
if headers and HTTP_HEADER.SET_COOKIE in headers:
2018-06-29 23:37:57 +03:00
cookies = dict()
2017-03-27 23:36:04 +03:00
delimiter = conf.cookieDel or DEFAULT_COOKIE_DELIMITER
2018-06-29 23:37:57 +03:00
last = None
2021-07-12 01:35:50 +03:00
for part in getUnicode(req.headers.get(HTTP_HEADER.COOKIE, "")).split(delimiter) + ([headers[HTTP_HEADER.SET_COOKIE]] if HTTP_HEADER.SET_COOKIE in headers else []):
2018-06-29 23:37:57 +03:00
if '=' in part:
part = part.strip()
key, value = part.split('=', 1)
cookies[key] = value
last = key
elif last:
2018-06-29 23:48:43 +03:00
cookies[last] += "%s%s" % (delimiter, part)
2018-06-29 23:37:57 +03:00
req.headers[HTTP_HEADER.COOKIE] = delimiter.join("%s=%s" % (key, cookies[key]) for key in cookies)
2014-06-10 23:57:54 +04:00
try:
result = _urllib.request.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
except _urllib.error.HTTPError as ex:
result = ex
2015-02-04 17:01:03 +03:00
2019-12-18 13:04:01 +03:00
# Dirty hack for https://github.com/sqlmapproject/sqlmap/issues/4046
try:
hasattr(result, "read")
except KeyError:
class _(object):
pass
result = _()
2015-02-04 17:01:03 +03:00
# Dirty hack for http://bugs.python.org/issue15701
try:
result.info()
except AttributeError:
def _(self):
2020-08-11 16:09:23 +03:00
return getattr(self, "hdrs", {})
2020-05-15 13:58:03 +03:00
2015-02-04 17:01:03 +03:00
result.info = types.MethodType(_, result)
if not hasattr(result, "read"):
def _(self, length=None):
2019-12-18 13:04:01 +03:00
try:
2020-09-09 15:07:13 +03:00
retVal = getSafeExString(ex) # Note: pyflakes mistakenly marks 'ex' as undefined (NOTE: tested in both Python2 and Python3)
2019-12-18 13:04:01 +03:00
except:
retVal = ""
2020-12-01 01:33:08 +03:00
return getBytes(retVal)
2020-05-15 13:58:03 +03:00
2015-02-04 17:01:03 +03:00
result.read = types.MethodType(_, result)
if not getattr(result, "url", None):
result.url = redurl
if not getattr(result, "code", None):
result.code = 999
2014-06-10 23:57:54 +04:00
except:
redurl = None
result = fp
2019-12-31 13:03:14 +03:00
fp.read = io.BytesIO(b"").read
2012-03-15 15:10:58 +04:00
else:
result = fp
2011-11-11 15:28:27 +04:00
2013-04-30 20:08:26 +04:00
threadData.lastRedirectURL = (threadData.lastRequestUID, redurl)
result.redcode = code
2020-12-11 01:05:19 +03:00
result.redurl = getUnicode(redurl) if six.PY3 else redurl
return result
2012-03-15 16:14:50 +04:00
http_error_301 = http_error_303 = http_error_307 = http_error_302
2012-03-15 15:10:58 +04:00
def _infinite_loop_check(self, req):
if hasattr(req, 'redirect_dict') and (req.redirect_dict.get(req.get_full_url(), 0) >= MAX_SINGLE_URL_REDIRECTIONS or len(req.redirect_dict) >= MAX_TOTAL_REDIRECTIONS):
2011-04-30 17:20:05 +04:00
errMsg = "infinite redirect loop detected (%s). " % ", ".join(item for item in req.redirect_dict.keys())
2014-01-02 14:06:19 +04:00
errMsg += "Please check all provided parameters and/or provide missing ones"
raise SqlmapConnectionException(errMsg)