2010-03-15 17:24:43 +03:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
|
|
|
$Id$
|
|
|
|
|
2011-07-08 00:10:03 +04:00
|
|
|
Copyright (c) 2006-2011 sqlmap developers (http://www.sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2010-03-15 17:24:43 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
import urllib2
|
2011-05-24 03:20:03 +04:00
|
|
|
import urlparse
|
2010-03-15 17:24:43 +03:00
|
|
|
|
2011-03-17 15:21:27 +03:00
|
|
|
from lib.core.data import conf
|
2011-03-17 14:25:37 +03:00
|
|
|
from lib.core.data import logger
|
2011-03-17 15:21:27 +03:00
|
|
|
from lib.core.common import getUnicode
|
2011-03-18 03:24:02 +03:00
|
|
|
from lib.core.enums import HTTPHEADER
|
2010-07-19 16:38:30 +04:00
|
|
|
from lib.core.exception import sqlmapConnectionException
|
2011-03-17 09:39:05 +03:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2011-03-18 03:24:02 +03:00
|
|
|
from lib.request.basic import decodePage
|
2010-07-19 16:38:30 +04:00
|
|
|
|
2010-03-15 17:24:43 +03:00
|
|
|
class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
2010-07-19 16:38:30 +04:00
|
|
|
# maximum number of redirections to any single URL
|
|
|
|
# this is needed because of the state that cookies introduce
|
|
|
|
max_repeats = 4
|
|
|
|
|
|
|
|
# maximum total number of redirections (regardless of URL) before
|
|
|
|
# assuming we're in a loop
|
|
|
|
max_redirections = 10
|
|
|
|
|
2011-03-17 15:21:27 +03:00
|
|
|
def common_http_redirect(self, result, headers, code, content, msg):
|
2011-03-18 03:24:02 +03:00
|
|
|
content = decodePage(content, headers.get(HTTPHEADER.CONTENT_ENCODING), headers.get(HTTPHEADER.CONTENT_TYPE))
|
|
|
|
|
2011-03-17 09:39:05 +03:00
|
|
|
threadData = getCurrentThreadData()
|
2011-03-17 14:25:37 +03:00
|
|
|
threadData.lastRedirectMsg = (threadData.lastRequestUID, content)
|
2011-03-17 09:39:05 +03:00
|
|
|
|
2011-03-17 15:35:40 +03:00
|
|
|
responseMsg = "HTTP response "
|
2011-03-17 15:21:27 +03:00
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, getUnicode(msg))
|
2011-03-17 15:23:56 +03:00
|
|
|
|
2011-03-17 15:21:27 +03:00
|
|
|
if headers:
|
|
|
|
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in headers.items()])
|
|
|
|
else:
|
|
|
|
logHeaders = ""
|
|
|
|
|
|
|
|
if conf.verbose <= 5:
|
|
|
|
responseMsg += getUnicode(logHeaders)
|
|
|
|
elif conf.verbose > 5:
|
2011-03-17 15:35:40 +03:00
|
|
|
responseMsg += "%s\n\n%s\n" % (logHeaders, content)
|
2011-03-17 15:21:27 +03:00
|
|
|
|
|
|
|
logger.log(7, responseMsg)
|
|
|
|
|
2011-06-11 02:28:15 +04:00
|
|
|
if result:
|
|
|
|
if "location" in headers:
|
|
|
|
result.redurl = headers.getheaders("location")[0].split("?")[0]
|
|
|
|
elif "uri" in headers:
|
|
|
|
result.redurl = headers.getheaders("uri")[0].split("?")[0]
|
2010-03-15 17:24:43 +03:00
|
|
|
|
2011-06-11 02:28:15 +04:00
|
|
|
if hasattr(result, 'redurl'):
|
|
|
|
if not urlparse.urlsplit(result.redurl).netloc:
|
|
|
|
result.redurl = urlparse.urljoin(conf.url, result.redurl)
|
2011-05-24 03:20:03 +04:00
|
|
|
|
2011-06-11 02:28:15 +04:00
|
|
|
if "set-cookie" in headers:
|
|
|
|
result.setcookie = headers["set-cookie"].split("; path")[0]
|
2010-12-03 20:41:10 +03:00
|
|
|
|
2011-04-12 17:25:28 +04:00
|
|
|
result.redcode = code
|
2010-03-16 17:30:57 +03:00
|
|
|
|
2010-03-15 17:24:43 +03:00
|
|
|
return result
|
|
|
|
|
2010-03-16 17:30:57 +03:00
|
|
|
def http_error_301(self, req, fp, code, msg, headers):
|
2010-07-19 16:38:30 +04:00
|
|
|
self.infinite_loop_check(req)
|
2011-03-17 14:25:37 +03:00
|
|
|
|
|
|
|
content = None
|
|
|
|
try:
|
|
|
|
content = fp.read()
|
|
|
|
except Exception, msg:
|
2011-04-30 17:20:05 +04:00
|
|
|
dbgMsg = "there was a problem while retrieving "
|
2011-03-17 14:25:37 +03:00
|
|
|
dbgMsg += "redirect response content (%s)" % msg
|
|
|
|
logger.debug(dbgMsg)
|
|
|
|
|
2010-03-16 17:30:57 +03:00
|
|
|
result = urllib2.HTTPRedirectHandler.http_error_301(self, req, fp, code, msg, headers)
|
2011-03-17 15:21:27 +03:00
|
|
|
return self.common_http_redirect(result, headers, code, content, msg)
|
2010-03-16 17:30:57 +03:00
|
|
|
|
2010-03-15 17:24:43 +03:00
|
|
|
def http_error_302(self, req, fp, code, msg, headers):
|
2010-07-19 16:38:30 +04:00
|
|
|
self.infinite_loop_check(req)
|
2011-03-17 14:25:37 +03:00
|
|
|
|
|
|
|
content = None
|
|
|
|
try:
|
|
|
|
content = fp.read()
|
|
|
|
except Exception, msg:
|
2011-04-30 17:20:05 +04:00
|
|
|
dbgMsg = "there was a problem while retrieving "
|
2011-03-17 14:25:37 +03:00
|
|
|
dbgMsg += "redirect response content (%s)" % msg
|
|
|
|
logger.debug(dbgMsg)
|
|
|
|
|
2010-03-15 17:24:43 +03:00
|
|
|
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
2011-03-17 15:21:27 +03:00
|
|
|
return self.common_http_redirect(result, headers, code, content, msg)
|
2010-07-19 16:38:30 +04:00
|
|
|
|
|
|
|
def infinite_loop_check(self, req):
|
|
|
|
if hasattr(req, 'redirect_dict') and (req.redirect_dict.get(req.get_full_url(), 0) >= self.max_repeats or len(req.redirect_dict) >= self.max_redirections):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "infinite redirect loop detected (%s). " % ", ".join(item for item in req.redirect_dict.keys())
|
2010-07-19 16:38:30 +04:00
|
|
|
errMsg += "please check all provided parameters and/or provide missing ones."
|
|
|
|
raise sqlmapConnectionException, errMsg
|