sqlmap/lib/request/connect.py

267 lines
8.2 KiB
Python
Raw Normal View History

2008-10-15 19:38:22 +04:00
#!/usr/bin/env python
"""
2008-10-15 19:56:32 +04:00
$Id$
2008-10-15 19:38:22 +04:00
This file is part of the sqlmap project, http://sqlmap.sourceforge.net.
Copyright (c) 2006-2008 Bernardo Damele A. G. <bernardo.damele@gmail.com>
and Daniele Bellucci <daniele.bellucci@gmail.com>
sqlmap is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation version 2 of the License.
sqlmap is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along
with sqlmap; if not, write to the Free Software Foundation, Inc., 51
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import httplib
2008-10-15 19:38:22 +04:00
import md5
import re
import socket
import time
2008-10-15 19:38:22 +04:00
import urllib2
import urlparse
from lib.contrib import multipartpost
from lib.core.convert import urlencode
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.exception import sqlmapConnectionException
from lib.request.basic import forgeHeaders
from lib.request.basic import parseResponse
2008-10-15 19:38:22 +04:00
class Connect:
"""
This class defines methods used to perform HTTP requests
"""
@staticmethod
def getPage(**kwargs):
"""
This method connects to the target url or proxy and returns
the target url page content
"""
url = kwargs.get('url', conf.url).replace(" ", "%20")
get = kwargs.get('get', None)
post = kwargs.get('post', None)
cookie = kwargs.get('cookie', None)
ua = kwargs.get('ua', None)
direct = kwargs.get('direct', False)
multipart = kwargs.get('multipart', False)
cookieStr = ""
requestMsg = "HTTP request:\n%s " % conf.method
responseMsg = "HTTP response "
requestHeaders = ""
responseHeaders = ""
if re.search("http[s]*://%s" % conf.hostname, url, re.I):
requestMsg += "%s" % conf.path or "/"
else:
requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
if direct:
if "?" in url:
url, params = url.split("?")
params = urlencode(params).replace("%%", "%")
2008-10-15 19:38:22 +04:00
url = "%s?%s" % (url, params)
requestMsg += "?%s" % params
2008-10-15 19:38:22 +04:00
elif multipart:
multipartOpener = urllib2.build_opener(multipartpost.MultipartPostHandler)
conn = multipartOpener.open(url, multipart)
page = conn.read()
return page
else:
2008-10-15 19:38:22 +04:00
if conf.parameters.has_key("GET") and not get:
get = conf.parameters["GET"]
if get:
get = urlencode(get).replace("%%", "%")
2008-10-15 19:38:22 +04:00
url = "%s?%s" % (url, get)
requestMsg += "?%s" % get
if conf.method == "POST":
if conf.parameters.has_key("POST") and not post:
post = conf.parameters["POST"]
post = urlencode(post).replace("%%", "%")
2008-10-15 19:38:22 +04:00
requestMsg += " HTTP/1.1"
if cookie:
cookie = urlencode(cookie).replace("%%", "%")
2008-10-15 19:38:22 +04:00
try:
# Perform HTTP request
headers = forgeHeaders(cookie, ua)
2008-10-15 19:38:22 +04:00
req = urllib2.Request(url, post, headers)
conn = urllib2.urlopen(req)
if not req.has_header("Accept-Encoding"):
2008-10-15 19:38:22 +04:00
requestHeaders += "\nAccept-Encoding: identity"
requestHeaders = "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])
for _, cookie in enumerate(conf.cj):
if not cookieStr:
cookieStr = "Cookie: "
cookie = str(cookie)
2008-10-15 19:38:22 +04:00
index = cookie.index(" for ")
cookieStr += "%s; " % cookie[8:index]
if not req.has_header("Cookie") and cookieStr:
2008-10-15 19:38:22 +04:00
requestHeaders += "\n%s" % cookieStr[:-2]
if not req.has_header("Connection"):
2008-10-15 19:38:22 +04:00
requestHeaders += "\nConnection: close"
requestMsg += "\n%s" % requestHeaders
if post:
requestMsg += "\n%s" % post
requestMsg += "\n"
logger.log(9, requestMsg)
# Get HTTP response
page = conn.read()
code = conn.code
status = conn.msg
responseHeaders = conn.info()
except urllib2.HTTPError, e:
if e.code == 401:
2008-10-17 17:23:24 +04:00
exceptionMsg = "not authorized, try to provide right HTTP "
exceptionMsg += "authentication type and valid credentials"
2008-10-15 19:38:22 +04:00
raise sqlmapConnectionException, exceptionMsg
else:
page = e.read()
code = e.code
status = e.msg
responseHeaders = e.info()
except (urllib2.URLError, socket.error), _:
2008-10-15 19:38:22 +04:00
warnMsg = "unable to connect to the target url"
if conf.multipleTargets:
2008-10-15 19:38:22 +04:00
warnMsg += ", skipping to next url"
logger.warn(warnMsg)
return None
else:
warnMsg += " or proxy"
raise sqlmapConnectionException, warnMsg
except socket.timeout, _:
warnMsg = "connection timed out to the target url"
if conf.multipleTargets:
warnMsg += ", skipping to next url"
logger.warn(warnMsg)
return None
else:
warnMsg += " or proxy"
raise sqlmapConnectionException, warnMsg
except httplib.BadStatusLine, _:
warnMsg = "the target url responded with an unknown HTTP "
warnMsg += "status code, try to force the HTTP User-Agent "
warnMsg += "header with option --user-agent or -a"
if conf.multipleTargets:
warnMsg += ", skipping to next url"
logger.warn(warnMsg)
return None
else:
raise sqlmapConnectionException, warnMsg
parseResponse(page, responseHeaders)
2008-10-15 19:38:22 +04:00
responseMsg += "(%s - %d):\n" % (status, code)
if conf.verbose <= 4:
responseMsg += str(responseHeaders)
elif conf.verbose > 4:
responseMsg += "%s\n%s\n" % (responseHeaders, page)
logger.log(8, responseMsg)
if conf.delay != None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
time.sleep(conf.delay)
2008-10-15 19:38:22 +04:00
return page
@staticmethod
def queryPage(value=None, place=None, content=False):
"""
This method calls a function to get the target url page content
and returns its page MD5 hash or a boolean value in case of
string match check ('--string' command line parameter)
"""
get = None
post = None
cookie = None
ua = None
if not place:
place = kb.injPlace
if conf.parameters.has_key("GET"):
if place == "GET" and value:
get = value
else:
get = conf.parameters["GET"]
if conf.parameters.has_key("POST"):
if place == "POST" and value:
post = value
else:
post = conf.parameters["POST"]
if conf.parameters.has_key("Cookie"):
if place == "Cookie" and value:
cookie = value
else:
cookie = conf.parameters["Cookie"]
if conf.parameters.has_key("User-Agent"):
if place == "User-Agent" and value:
ua = value
else:
ua = conf.parameters["User-Agent"]
page = Connect.getPage(get=get, post=post, cookie=cookie, ua=ua)
2008-12-02 02:04:01 +03:00
# TODO: create a comparison library and move these checks there
2008-10-15 19:38:22 +04:00
if content:
return page
elif conf.string:
if conf.string in page:
return True
else:
return False
else:
return md5.new(page).hexdigest()