2013-02-14 15:32:17 +04:00
|
|
|
#!/usr/bin/env python
|
2010-03-15 17:24:43 +03:00
|
|
|
|
|
|
|
"""
|
2017-01-02 16:19:18 +03:00
|
|
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2010-03-15 17:24:43 +03:00
|
|
|
"""
|
|
|
|
|
2017-03-27 23:36:04 +03:00
|
|
|
import re
|
2017-07-04 13:14:17 +03:00
|
|
|
import time
|
2015-02-04 17:01:03 +03:00
|
|
|
import types
|
2010-03-15 17:24:43 +03:00
|
|
|
import urllib2
|
2011-05-24 03:20:03 +04:00
|
|
|
import urlparse
|
2010-03-15 17:24:43 +03:00
|
|
|
|
2014-06-12 11:08:55 +04:00
|
|
|
from StringIO import StringIO
|
|
|
|
|
2013-08-01 21:48:20 +04:00
|
|
|
from lib.core.data import conf
|
2012-03-15 15:10:58 +04:00
|
|
|
from lib.core.data import kb
|
2011-03-17 14:25:37 +03:00
|
|
|
from lib.core.data import logger
|
2011-11-11 15:28:27 +04:00
|
|
|
from lib.core.common import getHostHeader
|
2011-03-17 15:21:27 +03:00
|
|
|
from lib.core.common import getUnicode
|
2011-09-28 12:13:46 +04:00
|
|
|
from lib.core.common import logHTTPTraffic
|
2012-03-15 15:10:58 +04:00
|
|
|
from lib.core.common import readInput
|
2012-12-07 14:52:21 +04:00
|
|
|
from lib.core.enums import CUSTOM_LOGGING
|
2013-03-20 14:10:24 +04:00
|
|
|
from lib.core.enums import HTTP_HEADER
|
2013-01-18 00:49:58 +04:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2012-03-15 15:10:58 +04:00
|
|
|
from lib.core.enums import REDIRECTION
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapConnectionException
|
2013-02-28 16:51:08 +04:00
|
|
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
2012-12-07 15:14:33 +04:00
|
|
|
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
|
2012-12-07 18:29:54 +04:00
|
|
|
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
2012-03-15 15:10:58 +04:00
|
|
|
from lib.core.settings import MAX_SINGLE_URL_REDIRECTIONS
|
|
|
|
from lib.core.settings import MAX_TOTAL_REDIRECTIONS
|
2011-03-17 09:39:05 +03:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2011-03-18 03:24:02 +03:00
|
|
|
from lib.request.basic import decodePage
|
2015-10-15 14:07:43 +03:00
|
|
|
from lib.request.basic import parseResponse
|
2010-07-19 16:38:30 +04:00
|
|
|
|
2010-03-15 17:24:43 +03:00
|
|
|
class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
2011-11-11 15:28:27 +04:00
|
|
|
def _get_header_redirect(self, headers):
|
|
|
|
retVal = None
|
|
|
|
|
2012-03-15 15:10:58 +04:00
|
|
|
if headers:
|
|
|
|
if "location" in headers:
|
2015-11-09 16:11:08 +03:00
|
|
|
retVal = headers.getheaders("location")[0]
|
2012-03-15 15:10:58 +04:00
|
|
|
elif "uri" in headers:
|
2015-11-09 16:11:08 +03:00
|
|
|
retVal = headers.getheaders("uri")[0]
|
2011-11-11 15:28:27 +04:00
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
2013-01-18 00:49:58 +04:00
|
|
|
def _ask_redirect_choice(self, redcode, redurl, method):
|
2013-01-17 14:50:12 +04:00
|
|
|
with kb.locks.redirect:
|
|
|
|
if kb.redirectChoice is None:
|
|
|
|
msg = "sqlmap got a %d redirect to " % redcode
|
|
|
|
msg += "'%s'. Do you want to follow? [Y/n] " % redurl
|
2012-03-15 15:10:58 +04:00
|
|
|
|
2017-04-18 16:48:05 +03:00
|
|
|
kb.redirectChoice = REDIRECTION.YES if readInput(msg, default='Y', boolean=True) else REDIRECTION.NO
|
2012-03-15 15:10:58 +04:00
|
|
|
|
2013-01-18 01:44:55 +04:00
|
|
|
if kb.redirectChoice == REDIRECTION.YES and method == HTTPMETHOD.POST and kb.resendPostOnRedirect is None:
|
|
|
|
msg = "redirect is a result of a "
|
|
|
|
msg += "POST request. Do you want to "
|
|
|
|
msg += "resend original POST data to a new "
|
|
|
|
msg += "location? [%s] " % ("Y/n" if not kb.originalPage else "y/N")
|
|
|
|
|
2017-04-19 15:46:27 +03:00
|
|
|
kb.resendPostOnRedirect = readInput(msg, default=('Y' if not kb.originalPage else 'N'), boolean=True)
|
2013-01-18 01:44:55 +04:00
|
|
|
|
2015-06-16 13:00:56 +03:00
|
|
|
if kb.resendPostOnRedirect:
|
|
|
|
self.redirect_request = self._redirect_request
|
2013-01-18 00:49:58 +04:00
|
|
|
|
|
|
|
def _redirect_request(self, req, fp, code, msg, headers, newurl):
|
|
|
|
newurl = newurl.replace(' ', '%20')
|
|
|
|
return urllib2.Request(newurl, data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host())
|
|
|
|
|
2010-03-15 17:24:43 +03:00
|
|
|
def http_error_302(self, req, fp, code, msg, headers):
|
2017-07-04 13:14:17 +03:00
|
|
|
start = time.time()
|
2012-03-15 16:14:50 +04:00
|
|
|
content = None
|
2016-11-25 15:32:28 +03:00
|
|
|
redurl = self._get_header_redirect(headers) if not conf.ignoreRedirects else None
|
2012-03-15 15:10:58 +04:00
|
|
|
|
2012-12-07 15:14:33 +04:00
|
|
|
try:
|
2012-12-07 18:29:54 +04:00
|
|
|
content = fp.read(MAX_CONNECTION_TOTAL_SIZE)
|
2012-12-07 15:14:33 +04:00
|
|
|
except Exception, msg:
|
|
|
|
dbgMsg = "there was a problem while retrieving "
|
|
|
|
dbgMsg += "redirect response content (%s)" % msg
|
|
|
|
logger.debug(dbgMsg)
|
2012-12-07 18:29:54 +04:00
|
|
|
finally:
|
|
|
|
if content:
|
|
|
|
try: # try to write it back to the read buffer so we could reuse it in further steps
|
|
|
|
fp.fp._rbuf.truncate(0)
|
|
|
|
fp.fp._rbuf.write(content)
|
|
|
|
except:
|
|
|
|
pass
|
2012-12-07 15:14:33 +04:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
content = decodePage(content, headers.get(HTTP_HEADER.CONTENT_ENCODING), headers.get(HTTP_HEADER.CONTENT_TYPE))
|
2012-12-07 15:40:19 +04:00
|
|
|
|
2012-12-07 14:52:21 +04:00
|
|
|
threadData = getCurrentThreadData()
|
2012-12-07 15:40:19 +04:00
|
|
|
threadData.lastRedirectMsg = (threadData.lastRequestUID, content)
|
|
|
|
|
2012-12-07 14:52:21 +04:00
|
|
|
redirectMsg = "HTTP redirect "
|
2017-07-04 13:14:17 +03:00
|
|
|
redirectMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code, getUnicode(msg))
|
2012-12-07 14:52:21 +04:00
|
|
|
|
|
|
|
if headers:
|
2017-07-04 13:14:17 +03:00
|
|
|
logHeaders = "\r\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in headers.items())
|
2012-12-07 14:52:21 +04:00
|
|
|
else:
|
|
|
|
logHeaders = ""
|
|
|
|
|
2012-12-07 15:40:19 +04:00
|
|
|
redirectMsg += logHeaders
|
2012-12-07 15:14:33 +04:00
|
|
|
if content:
|
2017-07-04 13:14:17 +03:00
|
|
|
redirectMsg += "\r\n\r\n%s" % getUnicode(content[:MAX_CONNECTION_CHUNK_SIZE])
|
2012-12-07 14:52:21 +04:00
|
|
|
|
2017-07-04 13:14:17 +03:00
|
|
|
logHTTPTraffic(threadData.lastRequestMsg, redirectMsg, start, time.time())
|
2012-12-07 14:52:21 +04:00
|
|
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg)
|
2012-12-07 14:23:18 +04:00
|
|
|
|
2012-07-17 02:19:33 +04:00
|
|
|
if redurl:
|
2014-04-07 23:01:40 +04:00
|
|
|
try:
|
|
|
|
if not urlparse.urlsplit(redurl).netloc:
|
|
|
|
redurl = urlparse.urljoin(req.get_full_url(), redurl)
|
|
|
|
|
|
|
|
self._infinite_loop_check(req)
|
|
|
|
self._ask_redirect_choice(code, redurl, req.get_method())
|
|
|
|
except ValueError:
|
|
|
|
redurl = None
|
|
|
|
result = fp
|
2011-03-17 14:25:37 +03:00
|
|
|
|
2012-07-17 02:19:33 +04:00
|
|
|
if redurl and kb.redirectChoice == REDIRECTION.YES:
|
2015-10-15 14:07:43 +03:00
|
|
|
parseResponse(content, headers)
|
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl)
|
|
|
|
if headers and HTTP_HEADER.SET_COOKIE in headers:
|
2017-03-27 23:36:04 +03:00
|
|
|
delimiter = conf.cookieDel or DEFAULT_COOKIE_DELIMITER
|
|
|
|
_ = headers[HTTP_HEADER.SET_COOKIE].split(delimiter)[0]
|
|
|
|
if HTTP_HEADER.COOKIE not in req.headers:
|
|
|
|
req.headers[HTTP_HEADER.COOKIE] = _
|
|
|
|
else:
|
2017-07-28 01:16:06 +03:00
|
|
|
req.headers[HTTP_HEADER.COOKIE] = re.sub("%s{2,}" % delimiter, delimiter, ("%s%s%s" % (re.sub(r"\b%s=[^%s]*%s?" % (re.escape(_.split('=')[0]), delimiter, delimiter), "", req.headers[HTTP_HEADER.COOKIE]), delimiter, _)).strip(delimiter))
|
2014-06-10 23:57:54 +04:00
|
|
|
try:
|
|
|
|
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
2014-12-18 17:58:19 +03:00
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
result = e
|
2015-02-04 17:01:03 +03:00
|
|
|
|
|
|
|
# Dirty hack for http://bugs.python.org/issue15701
|
|
|
|
try:
|
|
|
|
result.info()
|
|
|
|
except AttributeError:
|
|
|
|
def _(self):
|
|
|
|
return getattr(self, "hdrs") or {}
|
|
|
|
result.info = types.MethodType(_, result)
|
|
|
|
|
|
|
|
if not hasattr(result, "read"):
|
|
|
|
def _(self, length=None):
|
|
|
|
return e.msg
|
|
|
|
result.read = types.MethodType(_, result)
|
|
|
|
|
|
|
|
if not getattr(result, "url", None):
|
|
|
|
result.url = redurl
|
|
|
|
|
|
|
|
if not getattr(result, "code", None):
|
|
|
|
result.code = 999
|
2014-06-10 23:57:54 +04:00
|
|
|
except:
|
|
|
|
redurl = None
|
|
|
|
result = fp
|
2014-06-12 11:08:55 +04:00
|
|
|
fp.read = StringIO("").read
|
2012-03-15 15:10:58 +04:00
|
|
|
else:
|
|
|
|
result = fp
|
2011-11-11 15:28:27 +04:00
|
|
|
|
2013-04-30 20:08:26 +04:00
|
|
|
threadData.lastRedirectURL = (threadData.lastRequestUID, redurl)
|
|
|
|
|
2012-12-07 15:40:19 +04:00
|
|
|
result.redcode = code
|
|
|
|
result.redurl = redurl
|
|
|
|
return result
|
2012-03-15 16:14:50 +04:00
|
|
|
|
|
|
|
http_error_301 = http_error_303 = http_error_307 = http_error_302
|
2010-07-19 16:38:30 +04:00
|
|
|
|
2012-03-15 15:10:58 +04:00
|
|
|
def _infinite_loop_check(self, req):
|
|
|
|
if hasattr(req, 'redirect_dict') and (req.redirect_dict.get(req.get_full_url(), 0) >= MAX_SINGLE_URL_REDIRECTIONS or len(req.redirect_dict) >= MAX_TOTAL_REDIRECTIONS):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "infinite redirect loop detected (%s). " % ", ".join(item for item in req.redirect_dict.keys())
|
2014-01-02 14:06:19 +04:00
|
|
|
errMsg += "Please check all provided parameters and/or provide missing ones"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|