2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-14 18:41:14 +04:00
|
|
|
Copyright (c) 2006-2010 sqlmap developers (http://sqlmap.sourceforge.net/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2008-11-15 15:25:19 +03:00
|
|
|
import httplib
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2008-11-09 19:57:47 +03:00
|
|
|
import socket
|
|
|
|
import time
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
2008-12-04 20:40:03 +03:00
|
|
|
import traceback
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-08 03:15:02 +03:00
|
|
|
from extra.multipart import multipartpost
|
2010-10-29 20:11:50 +04:00
|
|
|
from lib.core.agent import agent
|
2010-12-08 14:26:54 +03:00
|
|
|
from lib.core.common import average
|
2010-12-07 22:19:12 +03:00
|
|
|
from lib.core.common import calculateDeltaSeconds
|
2010-12-21 04:09:39 +03:00
|
|
|
from lib.core.common import clearConsoleLine
|
2011-02-22 15:54:22 +03:00
|
|
|
from lib.core.common import cpuThrottle
|
2011-03-29 18:16:28 +04:00
|
|
|
from lib.core.common import extractRegexResult
|
2010-12-21 01:45:01 +03:00
|
|
|
from lib.core.common import getCurrentThreadData
|
2010-11-07 03:12:00 +03:00
|
|
|
from lib.core.common import getFilteredPageContent
|
2010-12-28 17:40:34 +03:00
|
|
|
from lib.core.common import unicodeToSafeHTMLValue
|
2010-06-10 15:34:17 +04:00
|
|
|
from lib.core.common import getUnicode
|
2010-11-08 14:22:47 +03:00
|
|
|
from lib.core.common import logHTTPTraffic
|
2011-01-24 17:45:35 +03:00
|
|
|
from lib.core.common import parseTargetUrl
|
2010-11-16 13:42:42 +03:00
|
|
|
from lib.core.common import readInput
|
2011-02-25 12:22:44 +03:00
|
|
|
from lib.core.common import removeReflectiveValues
|
2010-12-08 14:26:54 +03:00
|
|
|
from lib.core.common import stdev
|
2010-12-08 17:26:40 +03:00
|
|
|
from lib.core.common import wasLastRequestDelayed
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.convert import urlencode
|
2010-10-31 14:26:33 +03:00
|
|
|
from lib.core.common import urlEncodeCookieValues
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2011-03-11 23:16:34 +03:00
|
|
|
from lib.core.enums import HTTPHEADER
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2010-11-08 12:49:57 +03:00
|
|
|
from lib.core.enums import NULLCONNECTION
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PLACE
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapConnectionException
|
2010-12-12 00:28:11 +03:00
|
|
|
from lib.core.exception import sqlmapSyntaxException
|
2011-02-22 16:00:58 +03:00
|
|
|
from lib.core.settings import HTTP_SILENT_TIMEOUT
|
2011-03-29 18:16:28 +04:00
|
|
|
from lib.core.settings import META_REFRESH_REGEX
|
2010-12-09 10:49:18 +03:00
|
|
|
from lib.core.settings import MIN_TIME_RESPONSES
|
2011-01-31 15:41:39 +03:00
|
|
|
from lib.core.settings import URI_HTTP_HEADER
|
2010-12-24 15:13:48 +03:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2010-01-02 05:02:12 +03:00
|
|
|
from lib.request.basic import decodePage
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request.basic import forgeHeaders
|
2010-12-25 13:16:20 +03:00
|
|
|
from lib.request.basic import processResponse
|
2010-03-31 14:50:47 +04:00
|
|
|
from lib.request.direct import direct
|
2008-12-05 18:34:13 +03:00
|
|
|
from lib.request.comparison import comparison
|
2010-09-15 16:45:41 +04:00
|
|
|
from lib.request.methodrequest import MethodRequest
|
2010-10-25 22:38:54 +04:00
|
|
|
from lib.utils.checkpayload import checkPayload
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
|
|
|
|
class Connect:
|
|
|
|
"""
|
|
|
|
This class defines methods used to perform HTTP requests
|
|
|
|
"""
|
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
@staticmethod
|
|
|
|
def __getPageProxy(**kwargs):
|
|
|
|
return Connect.getPage(**kwargs)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
@staticmethod
|
|
|
|
def getPage(**kwargs):
|
|
|
|
"""
|
|
|
|
This method connects to the target url or proxy and returns
|
|
|
|
the target url page content
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
2008-12-04 20:40:03 +03:00
|
|
|
time.sleep(conf.delay)
|
2010-05-21 16:18:43 +04:00
|
|
|
elif conf.cpuThrottle:
|
2011-02-22 15:54:22 +03:00
|
|
|
cpuThrottle(conf.cpuThrottle)
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
threadData.lastRequestUID += 1
|
2010-11-08 14:22:47 +03:00
|
|
|
|
2011-02-28 21:58:05 +03:00
|
|
|
# dirty hack because urllib2 just skips the other part of provided url
|
|
|
|
# splitted with space char while urlencoding it in the later phase
|
|
|
|
url = kwargs.get('url', conf.url).replace(" ", "%20")
|
|
|
|
|
2010-12-20 19:45:41 +03:00
|
|
|
get = kwargs.get('get', None)
|
|
|
|
post = kwargs.get('post', None)
|
|
|
|
method = kwargs.get('method', None)
|
|
|
|
cookie = kwargs.get('cookie', None)
|
|
|
|
ua = kwargs.get('ua', None)
|
2011-02-12 02:07:03 +03:00
|
|
|
referer = kwargs.get('referer', None)
|
2010-12-20 19:45:41 +03:00
|
|
|
direct = kwargs.get('direct', False)
|
|
|
|
multipart = kwargs.get('multipart', False)
|
|
|
|
silent = kwargs.get('silent', False)
|
|
|
|
raise404 = kwargs.get('raise404', True)
|
|
|
|
auxHeaders = kwargs.get('auxHeaders', None)
|
|
|
|
response = kwargs.get('response', False)
|
|
|
|
ignoreTimeout = kwargs.get('ignoreTimeout', False)
|
2011-03-29 18:16:28 +04:00
|
|
|
refreshing = kwargs.get('refreshing', False)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
page = ""
|
2008-10-15 19:38:22 +04:00
|
|
|
cookieStr = ""
|
2010-12-21 01:45:01 +03:00
|
|
|
requestMsg = "HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, conf.method)
|
2010-01-17 00:47:52 +03:00
|
|
|
requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
|
2010-01-24 02:29:34 +03:00
|
|
|
responseMsg = "HTTP response "
|
2008-10-15 19:38:22 +04:00
|
|
|
requestHeaders = ""
|
2011-01-25 19:05:06 +03:00
|
|
|
responseHeaders = None
|
2010-11-17 15:16:48 +03:00
|
|
|
logHeaders = ""
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
try:
|
2010-02-09 17:02:47 +03:00
|
|
|
if silent:
|
2011-02-22 16:00:58 +03:00
|
|
|
socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT)
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
if direct:
|
|
|
|
if "?" in url:
|
|
|
|
url, params = url.split("?")
|
|
|
|
params = urlencode(params)
|
|
|
|
url = "%s?%s" % (url, params)
|
|
|
|
requestMsg += "?%s" % params
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
elif multipart:
|
2010-05-29 03:39:52 +04:00
|
|
|
# Needed in this form because of potential circle dependency
|
|
|
|
# problem (option -> update -> connect -> option)
|
2010-02-25 16:45:28 +03:00
|
|
|
from lib.core.option import proxyHandler
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-25 16:45:28 +03:00
|
|
|
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
|
2010-02-09 17:02:47 +03:00
|
|
|
conn = multipartOpener.open(url, multipart)
|
2010-06-09 18:40:36 +04:00
|
|
|
page = conn.read()
|
2010-02-09 17:02:47 +03:00
|
|
|
responseHeaders = conn.info()
|
2011-02-01 01:50:54 +03:00
|
|
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
2011-03-18 03:24:02 +03:00
|
|
|
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
return page
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2011-03-29 18:16:28 +04:00
|
|
|
elif refreshing:
|
|
|
|
# Reference(s):
|
|
|
|
# http://vancouver-webpages.com/META/metatags.detail.html
|
|
|
|
# http://webdesign.about.com/od/metataglibraries/a/aa080300a.htm
|
|
|
|
get = None
|
|
|
|
post = None
|
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
else:
|
2010-11-08 11:02:36 +03:00
|
|
|
if conf.parameters.has_key(PLACE.GET) and not get:
|
|
|
|
get = conf.parameters[PLACE.GET]
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
if get:
|
|
|
|
url = "%s?%s" % (url, get)
|
|
|
|
requestMsg += "?%s" % get
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2010-11-08 12:44:32 +03:00
|
|
|
if conf.method == HTTPMETHOD.POST:
|
2010-11-08 11:02:36 +03:00
|
|
|
if conf.parameters.has_key(PLACE.POST) and not post:
|
|
|
|
post = conf.parameters[PLACE.POST]
|
2010-03-23 13:27:39 +03:00
|
|
|
|
2010-10-21 13:10:07 +04:00
|
|
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
2010-02-09 17:02:47 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Perform HTTP request
|
2011-02-12 02:07:03 +03:00
|
|
|
headers = forgeHeaders(cookie, ua, referer)
|
2010-09-15 16:45:41 +04:00
|
|
|
|
2010-12-24 13:07:56 +03:00
|
|
|
if conf.realTest:
|
2011-03-18 03:24:02 +03:00
|
|
|
headers[HTTPHEADER.REFERER] = "%s://%s" % (conf.scheme, conf.hostname)
|
2010-12-24 13:07:56 +03:00
|
|
|
|
2010-10-18 12:54:08 +04:00
|
|
|
if kb.authHeader:
|
2011-03-18 03:24:02 +03:00
|
|
|
headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-10-18 13:02:56 +04:00
|
|
|
if kb.proxyAuthHeader:
|
2011-03-18 03:24:02 +03:00
|
|
|
headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
2010-10-18 13:02:56 +04:00
|
|
|
|
2010-09-16 12:43:10 +04:00
|
|
|
if auxHeaders:
|
|
|
|
for key, item in auxHeaders.items():
|
|
|
|
headers[key] = item
|
|
|
|
|
2010-12-28 17:40:34 +03:00
|
|
|
for key, item in headers.items():
|
2011-02-25 12:43:04 +03:00
|
|
|
del headers[key]
|
|
|
|
headers[unicodeToSafeHTMLValue(key)] = unicodeToSafeHTMLValue(item)
|
2010-12-28 17:40:34 +03:00
|
|
|
|
|
|
|
post = unicodeToSafeHTMLValue(post)
|
|
|
|
|
2010-09-15 16:45:41 +04:00
|
|
|
if method:
|
|
|
|
req = MethodRequest(url, post, headers)
|
|
|
|
req.set_method(method)
|
|
|
|
else:
|
|
|
|
req = urllib2.Request(url, post, headers)
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2010-04-09 14:16:15 +04:00
|
|
|
if not conf.dropSetCookie and conf.cj:
|
2010-01-02 05:02:12 +03:00
|
|
|
for _, cookie in enumerate(conf.cj):
|
|
|
|
if not cookieStr:
|
|
|
|
cookieStr = "Cookie: "
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-06-10 15:34:17 +04:00
|
|
|
cookie = getUnicode(cookie)
|
2010-01-02 05:02:12 +03:00
|
|
|
index = cookie.index(" for ")
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
cookieStr += "%s; " % cookie[8:index]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-03-18 03:24:02 +03:00
|
|
|
if not req.has_header(HTTPHEADER.ACCEPT_ENCODING):
|
|
|
|
requestHeaders += "%s: identity\n" % HTTPHEADER.ACCEPT_ENCODING
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2010-10-29 02:59:51 +04:00
|
|
|
requestHeaders += "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2011-03-18 03:24:02 +03:00
|
|
|
if not req.has_header(HTTPHEADER.COOKIE) and cookieStr:
|
2010-10-29 03:22:13 +04:00
|
|
|
requestHeaders += "\n%s" % cookieStr[:-2]
|
|
|
|
|
2011-03-18 03:24:02 +03:00
|
|
|
if not req.has_header(HTTPHEADER.CONNECTION):
|
|
|
|
requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
requestMsg += "\n%s" % requestHeaders
|
|
|
|
|
|
|
|
if post:
|
2010-12-22 16:41:36 +03:00
|
|
|
requestMsg += "\n\n%s" % post
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
requestMsg += "\n"
|
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(8, requestMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-22 16:41:36 +03:00
|
|
|
conn = urllib2.urlopen(req)
|
|
|
|
|
2011-03-18 03:24:02 +03:00
|
|
|
if not kb.authHeader and req.has_header(HTTPHEADER.AUTHORIZATION):
|
|
|
|
kb.authHeader = req.get_header(HTTPHEADER.AUTHORIZATION)
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2011-03-18 03:24:02 +03:00
|
|
|
if not kb.proxyAuthHeader and req.has_header(HTTPHEADER.PROXY_AUTHORIZATION):
|
|
|
|
kb.proxyAuthHeader = req.get_header(HTTPHEADER.PROXY_AUTHORIZATION)
|
2010-10-18 13:02:56 +04:00
|
|
|
|
2010-12-03 20:41:10 +03:00
|
|
|
if hasattr(conn, "setcookie"):
|
|
|
|
kb.redirectSetCookie = conn.setcookie
|
|
|
|
|
2010-12-23 17:06:22 +03:00
|
|
|
if hasattr(conn, "redurl") and hasattr(conn, "redcode") and not conf.redirectHandled and not conf.realTest:
|
2010-06-10 18:42:17 +04:00
|
|
|
msg = "sqlmap got a %d redirect to " % conn.redcode
|
|
|
|
msg += "%s - What target address do you " % conn.redurl
|
|
|
|
msg += "want to use from now on? %s " % conf.url
|
|
|
|
msg += "(default) or provide another target address based "
|
|
|
|
msg += "also on the redirection got from the application\n"
|
|
|
|
|
|
|
|
while True:
|
|
|
|
choice = readInput(msg, default="1")
|
|
|
|
|
|
|
|
if not choice or choice == "1":
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
conf.url = choice
|
2011-01-24 17:45:35 +03:00
|
|
|
try:
|
|
|
|
parseTargetUrl()
|
|
|
|
return Connect.__getPageProxy(**kwargs)
|
|
|
|
except sqlmapSyntaxException:
|
|
|
|
continue
|
2010-06-10 18:42:17 +04:00
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
conf.redirectHandled = True
|
|
|
|
|
|
|
|
# Reset the number of connection retries
|
2010-11-29 18:25:45 +03:00
|
|
|
kb.retriesCount = 0
|
2010-10-18 16:11:16 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
# Return response object
|
|
|
|
if response:
|
|
|
|
return conn, None
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Get HTTP response
|
2010-06-10 18:42:17 +04:00
|
|
|
page = conn.read()
|
|
|
|
code = conn.code
|
2008-10-15 19:38:22 +04:00
|
|
|
responseHeaders = conn.info()
|
2011-02-01 01:50:54 +03:00
|
|
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
2011-03-18 03:24:02 +03:00
|
|
|
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
|
2011-01-04 01:02:58 +03:00
|
|
|
status = getUnicode(conn.msg)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2011-03-29 18:16:28 +04:00
|
|
|
if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing:
|
|
|
|
url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE)
|
|
|
|
|
|
|
|
if url.lower().startswith('http://'):
|
|
|
|
kwargs['url'] = url
|
|
|
|
else:
|
|
|
|
kwargs['url'] = conf.url[:conf.url.rfind('/')+1] + url
|
|
|
|
|
|
|
|
kwargs['refreshing'] = True
|
|
|
|
|
|
|
|
debugMsg = "got HTML meta refresh header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
try:
|
|
|
|
return Connect.__getPageProxy(**kwargs)
|
|
|
|
except sqlmapSyntaxException:
|
|
|
|
pass
|
|
|
|
|
2010-12-26 17:36:51 +03:00
|
|
|
# Explicit closing of connection object
|
|
|
|
if not conf.keepAlive:
|
|
|
|
try:
|
2011-01-01 18:20:29 +03:00
|
|
|
conn.fp._sock.close()
|
2010-12-26 17:36:51 +03:00
|
|
|
conn.close()
|
|
|
|
except Exception, msg:
|
|
|
|
warnMsg = "problem occured during connection closing ('%s')" % msg
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
except urllib2.HTTPError, e:
|
2011-01-04 01:02:58 +03:00
|
|
|
page = None
|
2011-01-25 19:05:06 +03:00
|
|
|
responseHeaders = None
|
2011-02-01 01:51:14 +03:00
|
|
|
|
2010-11-17 15:16:48 +03:00
|
|
|
try:
|
|
|
|
page = e.read()
|
|
|
|
responseHeaders = e.info()
|
2011-01-31 15:41:39 +03:00
|
|
|
responseHeaders[URI_HTTP_HEADER] = e.geturl()
|
2011-03-18 03:24:02 +03:00
|
|
|
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
|
2010-11-17 15:16:48 +03:00
|
|
|
except socket.timeout:
|
|
|
|
warnMsg = "connection timed out while trying "
|
2011-01-25 14:02:41 +03:00
|
|
|
warnMsg += "to get error page information (%d)" % e.code
|
2010-11-17 15:16:48 +03:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
return None, None
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2011-01-04 01:02:58 +03:00
|
|
|
code = e.code
|
|
|
|
threadData.lastHTTPError = (threadData.lastRequestUID, code)
|
|
|
|
|
|
|
|
if code not in kb.httpErrorCodes:
|
|
|
|
kb.httpErrorCodes[code] = 0
|
|
|
|
kb.httpErrorCodes[code] += 1
|
|
|
|
|
|
|
|
status = getUnicode(e.msg)
|
2010-12-22 16:41:36 +03:00
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
2011-01-04 01:02:58 +03:00
|
|
|
|
2010-11-17 15:16:48 +03:00
|
|
|
if responseHeaders:
|
2011-01-04 15:51:51 +03:00
|
|
|
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()])
|
2011-01-04 01:02:58 +03:00
|
|
|
|
2011-02-22 14:32:55 +03:00
|
|
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page)))
|
2010-11-17 15:04:33 +03:00
|
|
|
|
2010-12-22 16:41:36 +03:00
|
|
|
if conf.verbose <= 5:
|
|
|
|
responseMsg += getUnicode(logHeaders)
|
|
|
|
elif conf.verbose > 5:
|
2011-03-17 15:35:40 +03:00
|
|
|
responseMsg += "%s\n\n%s\n" % (logHeaders, page)
|
2010-12-22 16:41:36 +03:00
|
|
|
|
|
|
|
logger.log(7, responseMsg)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if e.code == 401:
|
2010-03-16 15:14:02 +03:00
|
|
|
errMsg = "not authorized, try to provide right HTTP "
|
2010-11-04 01:07:13 +03:00
|
|
|
errMsg += "authentication type and valid credentials (%d)" % code
|
2010-03-16 15:14:02 +03:00
|
|
|
raise sqlmapConnectionException, errMsg
|
2010-03-16 16:56:36 +03:00
|
|
|
elif e.code == 404 and raise404:
|
2010-11-04 01:07:13 +03:00
|
|
|
errMsg = "page not found (%d)" % code
|
2010-03-16 15:14:02 +03:00
|
|
|
raise sqlmapConnectionException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-06-10 18:15:32 +04:00
|
|
|
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
2010-01-19 13:27:54 +03:00
|
|
|
logger.debug(debugMsg)
|
2010-12-25 13:16:20 +03:00
|
|
|
page = processResponse(page, responseHeaders)
|
2010-12-18 00:45:20 +03:00
|
|
|
return page, responseHeaders
|
2010-01-19 13:27:54 +03:00
|
|
|
|
2010-11-13 01:57:33 +03:00
|
|
|
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead), e:
|
2008-12-04 20:40:03 +03:00
|
|
|
tbMsg = traceback.format_exc()
|
2010-12-21 13:31:56 +03:00
|
|
|
|
2010-12-12 00:28:11 +03:00
|
|
|
if "no host given" in tbMsg:
|
|
|
|
warnMsg = "invalid url address used (%s)" % repr(url)
|
|
|
|
raise sqlmapSyntaxException, warnMsg
|
2011-01-03 16:04:20 +03:00
|
|
|
elif "forcibly closed" in tbMsg:
|
|
|
|
warnMsg = "connection was forcibly closed by the target url"
|
|
|
|
elif "timed out" in tbMsg:
|
|
|
|
warnMsg = "connection timed out to the target url"
|
2010-12-12 00:28:11 +03:00
|
|
|
elif "URLError" in tbMsg or "error" in tbMsg:
|
2008-12-04 20:40:03 +03:00
|
|
|
warnMsg = "unable to connect to the target url"
|
|
|
|
elif "BadStatusLine" in tbMsg:
|
|
|
|
warnMsg = "the target url responded with an unknown HTTP "
|
|
|
|
warnMsg += "status code, try to force the HTTP User-Agent "
|
2011-02-20 00:05:15 +03:00
|
|
|
warnMsg += "header with option --user-agent or --random-agent"
|
2010-11-13 01:57:33 +03:00
|
|
|
elif "IncompleteRead" in tbMsg:
|
|
|
|
warnMsg = "there was an incomplete read error while retrieving data "
|
|
|
|
warnMsg += "from the target url"
|
2009-12-31 15:34:18 +03:00
|
|
|
else:
|
|
|
|
warnMsg = "unable to connect to the target url"
|
|
|
|
|
2008-12-05 18:34:13 +03:00
|
|
|
if "BadStatusLine" not in tbMsg:
|
|
|
|
warnMsg += " or proxy"
|
|
|
|
|
2011-01-03 16:04:20 +03:00
|
|
|
if "forcibly closed" in tbMsg:
|
|
|
|
logger.critical(warnMsg)
|
|
|
|
return None, None
|
2011-03-29 01:05:04 +04:00
|
|
|
elif silent or (ignoreTimeout and any(map(lambda x: x in tbMsg, ["timed out", "IncompleteRead"]))):
|
2009-04-22 15:48:07 +04:00
|
|
|
return None, None
|
2010-12-23 17:06:22 +03:00
|
|
|
elif kb.retriesCount < conf.retries and not kb.threadException and not conf.realTest:
|
2010-11-29 18:25:45 +03:00
|
|
|
kb.retriesCount += 1
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
warnMsg += ", sqlmap is going to retry the request"
|
2010-09-27 17:34:52 +04:00
|
|
|
logger.critical(warnMsg)
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
time.sleep(1)
|
2008-12-18 23:38:57 +03:00
|
|
|
|
2009-12-18 01:04:01 +03:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
2009-12-21 14:21:18 +03:00
|
|
|
return Connect.__getPageProxy(**kwargs)
|
2008-11-15 15:25:19 +03:00
|
|
|
else:
|
2009-12-18 01:04:01 +03:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
2008-11-15 15:25:19 +03:00
|
|
|
raise sqlmapConnectionException, warnMsg
|
|
|
|
|
2009-12-18 01:04:01 +03:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
|
|
|
|
2010-12-25 13:16:20 +03:00
|
|
|
page = processResponse(page, responseHeaders)
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
2010-12-22 16:41:36 +03:00
|
|
|
if responseHeaders:
|
2011-01-04 01:02:58 +03:00
|
|
|
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()])
|
2011-03-17 15:35:40 +03:00
|
|
|
|
2011-02-22 14:32:55 +03:00
|
|
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page)))
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
if conf.verbose <= 5:
|
2010-11-08 14:55:56 +03:00
|
|
|
responseMsg += getUnicode(logHeaders)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose > 5:
|
2011-03-17 15:35:40 +03:00
|
|
|
responseMsg += "%s\n\n%s\n" % (logHeaders, page)
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(7, responseMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-05 18:34:13 +03:00
|
|
|
return page, responseHeaders
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
@staticmethod
|
2011-02-04 02:25:56 +03:00
|
|
|
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This method calls a function to get the target url page content
|
|
|
|
and returns its page MD5 hash or a boolean value in case of
|
|
|
|
string match check ('--string' command line parameter)
|
|
|
|
"""
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if conf.direct:
|
2010-03-31 14:50:47 +04:00
|
|
|
return direct(value, content)
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2010-09-16 13:32:09 +04:00
|
|
|
get = None
|
|
|
|
post = None
|
|
|
|
cookie = None
|
|
|
|
ua = None
|
2011-02-12 02:07:03 +03:00
|
|
|
referer = None
|
2010-09-16 13:32:09 +04:00
|
|
|
page = None
|
|
|
|
pageLength = None
|
2010-09-23 18:07:23 +04:00
|
|
|
uri = None
|
2010-11-08 11:02:36 +03:00
|
|
|
raise404 = place != PLACE.URI if raise404 is None else raise404
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if not place:
|
2010-11-28 21:10:54 +03:00
|
|
|
place = kb.injection.place
|
2010-10-14 15:06:28 +04:00
|
|
|
|
2010-11-08 00:55:24 +03:00
|
|
|
payload = agent.extractPayload(value)
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
2010-10-30 03:00:48 +04:00
|
|
|
|
2010-11-08 00:55:24 +03:00
|
|
|
if payload:
|
|
|
|
if kb.tamperFunctions:
|
2010-10-29 20:11:50 +04:00
|
|
|
for function in kb.tamperFunctions:
|
|
|
|
payload = function(payload)
|
2010-10-30 03:00:48 +04:00
|
|
|
|
2010-10-29 20:11:50 +04:00
|
|
|
value = agent.replacePayload(value, payload)
|
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(9, payload)
|
2010-11-08 00:18:09 +03:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if place == PLACE.COOKIE and conf.cookieUrlencode:
|
2011-01-27 22:44:24 +03:00
|
|
|
value = agent.removePayloadDelimiters(value)
|
2010-10-31 14:26:33 +03:00
|
|
|
value = urlEncodeCookieValues(value)
|
2011-03-11 22:57:44 +03:00
|
|
|
|
2010-10-31 14:26:33 +03:00
|
|
|
elif place:
|
2011-03-11 22:57:44 +03:00
|
|
|
if place in (PLACE.GET, PLACE.POST):
|
2011-03-11 23:07:52 +03:00
|
|
|
# payloads in GET and/or POST need to be urlencoded
|
|
|
|
# throughly without safe chars (especially &, = and %)
|
2011-03-11 22:57:44 +03:00
|
|
|
payload = urlencode(payload, None, True, True)
|
|
|
|
value = agent.replacePayload(value, payload)
|
|
|
|
|
2011-01-27 22:44:24 +03:00
|
|
|
value = agent.removePayloadDelimiters(value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-25 19:37:43 +04:00
|
|
|
if conf.checkPayload:
|
|
|
|
checkPayload(value)
|
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.GET in conf.parameters:
|
2011-03-09 12:36:56 +03:00
|
|
|
get = urlencode(conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value, limit=True)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.POST in conf.parameters:
|
2011-01-27 22:44:24 +03:00
|
|
|
post = urlencode(conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.COOKIE in conf.parameters:
|
|
|
|
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
|
2010-05-14 19:20:34 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.UA in conf.parameters:
|
2011-02-02 18:18:06 +03:00
|
|
|
ua = conf.parameters[PLACE.UA] if place != PLACE.UA or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-12 02:07:03 +03:00
|
|
|
if PLACE.REFERER in conf.parameters:
|
|
|
|
referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value
|
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.URI in conf.parameters:
|
|
|
|
uri = conf.url if place != PLACE.URI or not value else value
|
2010-09-23 18:07:23 +04:00
|
|
|
else:
|
|
|
|
uri = conf.url
|
2010-09-22 15:56:35 +04:00
|
|
|
|
2010-12-09 10:49:18 +03:00
|
|
|
if timeBasedCompare:
|
|
|
|
if len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
2010-12-21 04:09:39 +03:00
|
|
|
clearConsoleLine()
|
|
|
|
|
2010-12-09 16:57:38 +03:00
|
|
|
warnMsg = "time-based comparison needs larger statistical "
|
2010-12-10 18:28:56 +03:00
|
|
|
warnMsg += "model. Making a few dummy requests, please wait.."
|
2010-12-09 10:49:18 +03:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
while len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
2011-01-07 18:41:09 +03:00
|
|
|
Connect.queryPage(content=True)
|
2010-12-09 10:49:18 +03:00
|
|
|
|
2010-04-16 16:44:47 +04:00
|
|
|
if conf.safUrl and conf.saFreq > 0:
|
|
|
|
kb.queryCounter += 1
|
|
|
|
if kb.queryCounter % conf.saFreq == 0:
|
2011-02-12 02:07:03 +03:00
|
|
|
Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer)
|
2010-09-16 13:32:09 +04:00
|
|
|
|
2010-12-08 02:32:33 +03:00
|
|
|
start = time.time()
|
2010-12-08 02:49:00 +03:00
|
|
|
|
2010-12-20 19:45:41 +03:00
|
|
|
if kb.nullConnection and not content and not response and not timeBasedCompare:
|
2010-11-08 12:49:57 +03:00
|
|
|
if kb.nullConnection == NULLCONNECTION.HEAD:
|
2010-11-08 12:44:32 +03:00
|
|
|
method = HTTPMETHOD.HEAD
|
2010-11-08 12:49:57 +03:00
|
|
|
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
2010-09-16 13:32:09 +04:00
|
|
|
if not auxHeaders:
|
|
|
|
auxHeaders = {}
|
2010-10-15 15:17:17 +04:00
|
|
|
|
2011-03-11 23:16:34 +03:00
|
|
|
auxHeaders[HTTPHEADER.RANGE] = "bytes=-1"
|
2010-10-10 22:56:43 +04:00
|
|
|
|
2011-02-12 02:07:03 +03:00
|
|
|
_, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404)
|
2010-10-10 22:56:43 +04:00
|
|
|
|
2011-03-11 23:16:34 +03:00
|
|
|
if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers:
|
|
|
|
pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH])
|
|
|
|
elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers:
|
|
|
|
pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:])
|
2010-09-16 13:47:33 +04:00
|
|
|
|
|
|
|
if not pageLength:
|
2011-02-12 02:07:03 +03:00
|
|
|
page, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
2010-12-08 02:49:00 +03:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
2010-11-08 12:44:32 +03:00
|
|
|
|
2010-12-07 16:34:06 +03:00
|
|
|
if kb.testMode:
|
|
|
|
kb.testQueryCount += 1
|
2011-01-12 00:46:21 +03:00
|
|
|
|
2010-12-07 16:34:06 +03:00
|
|
|
if conf.cj:
|
|
|
|
conf.cj.clear()
|
2010-12-04 18:47:02 +03:00
|
|
|
|
2010-12-08 14:26:54 +03:00
|
|
|
if timeBasedCompare:
|
2010-12-08 17:26:40 +03:00
|
|
|
return wasLastRequestDelayed()
|
2010-12-08 17:33:10 +03:00
|
|
|
elif noteResponseTime:
|
2010-12-21 01:45:01 +03:00
|
|
|
kb.responseTimes.append(threadData.lastQueryDuration)
|
2010-12-07 19:04:53 +03:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
if content or response:
|
2008-12-18 23:48:23 +03:00
|
|
|
return page, headers
|
2011-02-25 12:22:44 +03:00
|
|
|
|
2011-02-27 20:43:41 +03:00
|
|
|
page = removeReflectiveValues(page, payload)
|
2011-02-25 12:22:44 +03:00
|
|
|
|
|
|
|
if getRatioValue:
|
2011-02-04 02:25:56 +03:00
|
|
|
return comparison(page, getRatioValue=False, pageLength=pageLength), comparison(page, getRatioValue=True, pageLength=pageLength)
|
2010-09-16 13:32:09 +04:00
|
|
|
elif pageLength or page:
|
2011-02-04 02:25:56 +03:00
|
|
|
return comparison(page, getRatioValue, pageLength)
|
2008-12-18 23:38:57 +03:00
|
|
|
else:
|
|
|
|
return False
|