sqlmap/lib/request/connect.py

311 lines
11 KiB
Python
Raw Normal View History

2008-10-15 19:38:22 +04:00
#!/usr/bin/env python
"""
2008-10-15 19:56:32 +04:00
$Id$
2008-10-15 19:38:22 +04:00
This file is part of the sqlmap project, http://sqlmap.sourceforge.net.
2010-03-03 18:26:27 +03:00
Copyright (c) 2007-2010 Bernardo Damele A. G. <bernardo.damele@gmail.com>
Copyright (c) 2006 Daniele Bellucci <daniele.bellucci@gmail.com>
2008-10-15 19:38:22 +04:00
sqlmap is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation version 2 of the License.
sqlmap is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along
with sqlmap; if not, write to the Free Software Foundation, Inc., 51
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import httplib
2008-10-15 19:38:22 +04:00
import re
import socket
import time
2008-10-15 19:38:22 +04:00
import urllib2
import urlparse
import traceback
2008-10-15 19:38:22 +04:00
from lib.contrib import multipartpost
from lib.core.common import readInput
2008-10-15 19:38:22 +04:00
from lib.core.convert import urlencode
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
2010-01-24 02:29:34 +03:00
from lib.core.common import sanitizeAsciiString
2008-10-15 19:38:22 +04:00
from lib.core.exception import sqlmapConnectionException
from lib.request.basic import decodePage
2008-10-15 19:38:22 +04:00
from lib.request.basic import forgeHeaders
from lib.request.basic import parseResponse
from lib.request.direct import direct
from lib.request.comparison import comparison
2008-10-15 19:38:22 +04:00
class Connect:
"""
This class defines methods used to perform HTTP requests
"""
@staticmethod
def __getPageProxy(**kwargs):
return Connect.getPage(**kwargs)
2008-10-15 19:38:22 +04:00
@staticmethod
def getPage(**kwargs):
"""
This method connects to the target url or proxy and returns
the target url page content
"""
if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
time.sleep(conf.delay)
2008-10-15 19:38:22 +04:00
url = kwargs.get('url', conf.url).replace(" ", "%20")
get = kwargs.get('get', None)
post = kwargs.get('post', None)
cookie = kwargs.get('cookie', None)
ua = kwargs.get('ua', None)
direct = kwargs.get('direct', False)
multipart = kwargs.get('multipart', False)
silent = kwargs.get('silent', False)
2010-03-16 16:56:36 +03:00
raise404 = kwargs.get('raise404', True)
2008-10-15 19:38:22 +04:00
page = ""
2008-10-15 19:38:22 +04:00
cookieStr = ""
requestMsg = "HTTP request:\n%s " % conf.method
requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
2010-01-24 02:29:34 +03:00
responseMsg = "HTTP response "
2008-10-15 19:38:22 +04:00
requestHeaders = ""
responseHeaders = ""
try:
if silent:
socket.setdefaulttimeout(3)
if direct:
if "?" in url:
url, params = url.split("?")
params = urlencode(params)
url = "%s?%s" % (url, params)
requestMsg += "?%s" % params
elif multipart:
2010-02-25 16:45:28 +03:00
#needed in this form because of potential circle dependency problem (option -> update -> connect -> option)
from lib.core.option import proxyHandler
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
conn = multipartOpener.open(url, multipart)
page = conn.read()
responseHeaders = conn.info()
encoding = responseHeaders.get("Content-Encoding")
page = decodePage(page, encoding)
return page
else:
if conf.parameters.has_key("GET") and not get:
get = conf.parameters["GET"]
if get:
get = urlencode(get)
url = "%s?%s" % (url, get)
requestMsg += "?%s" % get
if conf.method == "POST":
if conf.parameters.has_key("POST") and not post:
post = conf.parameters["POST"]
requestMsg += " HTTP/1.1"
2008-10-15 19:38:22 +04:00
# Perform HTTP request
headers = forgeHeaders(cookie, ua)
2008-10-15 19:38:22 +04:00
req = urllib2.Request(url, post, headers)
conn = urllib2.urlopen(req)
if hasattr(conn, "redurl") and hasattr(conn, "redcode") and not conf.redirectHandled:
msg = "sqlmap got a %d redirect to " % conn.redcode
msg += "%s - What target address do you " % conn.redurl
msg += "want to use from now on? %s " % conf.url
msg += "(default) or provide another target address based "
msg += "also on the redirection got from the application\n"
while True:
choice = readInput(msg, default="1")
if not choice or choice == "1":
pass
else:
conf.url = choice
2010-05-06 15:00:58 +04:00
return Connect.__getPageProxy(**kwargs)
break
conf.redirectHandled = True
# Reset the number of connection retries
conf.retriesCount = 0
if not req.has_header("Accept-Encoding"):
2008-10-15 19:38:22 +04:00
requestHeaders += "\nAccept-Encoding: identity"
requestHeaders = "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])
if not conf.dropSetCookie and conf.cj:
for _, cookie in enumerate(conf.cj):
if not cookieStr:
cookieStr = "Cookie: "
cookie = str(cookie)
index = cookie.index(" for ")
cookieStr += "%s; " % cookie[8:index]
2008-10-15 19:38:22 +04:00
if not req.has_header("Cookie") and cookieStr:
2008-10-15 19:38:22 +04:00
requestHeaders += "\n%s" % cookieStr[:-2]
if not req.has_header("Connection"):
2008-10-15 19:38:22 +04:00
requestHeaders += "\nConnection: close"
requestMsg += "\n%s" % requestHeaders
if post:
requestMsg += "\n%s" % post
requestMsg += "\n"
logger.log(9, requestMsg)
# Get HTTP response
page = conn.read()
code = conn.code
status = conn.msg
2008-10-15 19:38:22 +04:00
responseHeaders = conn.info()
encoding = responseHeaders.get("Content-Encoding")
page = decodePage(page, encoding)
2008-10-15 19:38:22 +04:00
except urllib2.HTTPError, e:
if e.code == 401:
errMsg = "not authorized, try to provide right HTTP "
errMsg += "authentication type and valid credentials"
raise sqlmapConnectionException, errMsg
2010-03-16 16:56:36 +03:00
elif e.code == 404 and raise404:
errMsg = "page not found"
raise sqlmapConnectionException, errMsg
2008-10-15 19:38:22 +04:00
else:
try:
page = e.read()
responseHeaders = e.info()
except socket.timeout:
warnMsg = "connection timed out while trying "
2010-05-04 12:45:10 +04:00
warnMsg += "to get error page information (%d)" % e.code
logger.warn(warnMsg)
2010-05-04 12:43:14 +04:00
return None, None
2008-10-15 19:38:22 +04:00
code = e.code
status = e.msg
debugMsg = "got HTTP error code: %d" % code
logger.debug(debugMsg)
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine), e:
tbMsg = traceback.format_exc()
2008-10-15 19:38:22 +04:00
if "URLError" in tbMsg or "error" in tbMsg:
warnMsg = "unable to connect to the target url"
elif "timeout" in tbMsg:
warnMsg = "connection timed out to the target url"
elif "BadStatusLine" in tbMsg:
warnMsg = "the target url responded with an unknown HTTP "
warnMsg += "status code, try to force the HTTP User-Agent "
warnMsg += "header with option --user-agent or -a"
2009-12-31 15:34:18 +03:00
else:
warnMsg = "unable to connect to the target url"
if "BadStatusLine" not in tbMsg:
warnMsg += " or proxy"
if silent:
return None, None
elif conf.retriesCount < conf.retries:
conf.retriesCount += 1
warnMsg += ", sqlmap is going to retry the request"
logger.warn(warnMsg)
time.sleep(1)
socket.setdefaulttimeout(conf.timeout)
2009-12-21 14:21:18 +03:00
return Connect.__getPageProxy(**kwargs)
else:
socket.setdefaulttimeout(conf.timeout)
raise sqlmapConnectionException, warnMsg
socket.setdefaulttimeout(conf.timeout)
2010-01-24 02:29:34 +03:00
page = sanitizeAsciiString(page)
parseResponse(page, responseHeaders)
2010-05-04 12:43:14 +04:00
2008-10-15 19:38:22 +04:00
responseMsg += "(%s - %d):\n" % (status, code)
2010-05-04 12:43:14 +04:00
2008-10-15 19:38:22 +04:00
if conf.verbose <= 4:
responseMsg += str(responseHeaders)
elif conf.verbose > 4:
responseMsg += "%s\n%s\n" % (responseHeaders, page)
2010-05-04 12:43:14 +04:00
2008-10-15 19:38:22 +04:00
logger.log(8, responseMsg)
time.sleep(conf.cpuThrottleDelay)
return page, responseHeaders
2008-10-15 19:38:22 +04:00
@staticmethod
def queryPage(value=None, place=None, content=False, getSeqMatcher=False, silent=False):
2008-10-15 19:38:22 +04:00
"""
This method calls a function to get the target url page content
and returns its page MD5 hash or a boolean value in case of
string match check ('--string' command line parameter)
"""
if conf.direct:
return direct(value, content)
2008-10-15 19:38:22 +04:00
get = None
post = None
cookie = None
ua = None
if not place:
place = kb.injPlace
if "GET" in conf.parameters:
get = conf.parameters["GET"] if place != "GET" or not value else value
2008-10-15 19:38:22 +04:00
if "POST" in conf.parameters:
post = conf.parameters["POST"] if place != "POST" or not value else value
2008-10-15 19:38:22 +04:00
if "Cookie" in conf.parameters:
cookie = conf.parameters["Cookie"] if place != "Cookie" or not value else value
if "User-Agent" in conf.parameters:
ua = conf.parameters["User-Agent"] if place != "User-Agent" or not value else value
2008-10-15 19:38:22 +04:00
if conf.safUrl and conf.saFreq > 0:
kb.queryCounter += 1
if kb.queryCounter % conf.saFreq == 0:
Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua)
page, headers = Connect.getPage(get=get, post=post, cookie=cookie, ua=ua, silent=silent)
2008-10-15 19:38:22 +04:00
if content:
return page, headers
elif page:
return comparison(page, headers, getSeqMatcher)
else:
return False