sqlmap/lib/request/connect.py

286 lines
9.2 KiB
Python
Raw Normal View History

2008-10-15 19:38:22 +04:00
#!/usr/bin/env python
"""
2008-10-15 19:56:32 +04:00
$Id$
2008-10-15 19:38:22 +04:00
This file is part of the sqlmap project, http://sqlmap.sourceforge.net.
Copyright (c) 2007-2009 Bernardo Damele A. G. <bernardo.damele@gmail.com>
Copyright (c) 2006 Daniele Bellucci <daniele.bellucci@gmail.com>
2008-10-15 19:38:22 +04:00
sqlmap is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation version 2 of the License.
sqlmap is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along
with sqlmap; if not, write to the Free Software Foundation, Inc., 51
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import httplib
2008-10-15 19:38:22 +04:00
import re
import socket
import time
2008-10-15 19:38:22 +04:00
import urllib2
import urlparse
import traceback
2008-10-15 19:38:22 +04:00
from lib.contrib import multipartpost
from lib.core.convert import urlencode
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
2010-01-24 02:29:34 +03:00
from lib.core.common import sanitizeAsciiString
2008-10-15 19:38:22 +04:00
from lib.core.exception import sqlmapConnectionException
from lib.request.basic import decodePage
2008-10-15 19:38:22 +04:00
from lib.request.basic import forgeHeaders
from lib.request.basic import parseResponse
from lib.request.comparison import comparison
2008-10-15 19:38:22 +04:00
class Connect:
"""
This class defines methods used to perform HTTP requests
"""
@staticmethod
def __getPageProxy(**kwargs):
return Connect.getPage(**kwargs)
2008-10-15 19:38:22 +04:00
@staticmethod
def getPage(**kwargs):
"""
This method connects to the target url or proxy and returns
the target url page content
"""
if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
time.sleep(conf.delay)
2008-10-15 19:38:22 +04:00
url = kwargs.get('url', conf.url).replace(" ", "%20")
get = kwargs.get('get', None)
post = kwargs.get('post', None)
cookie = kwargs.get('cookie', None)
ua = kwargs.get('ua', None)
direct = kwargs.get('direct', False)
multipart = kwargs.get('multipart', False)
silent = kwargs.get('silent', False)
2008-10-15 19:38:22 +04:00
page = ""
2008-10-15 19:38:22 +04:00
cookieStr = ""
requestMsg = "HTTP request:\n%s " % conf.method
requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
2010-01-24 02:29:34 +03:00
responseMsg = "HTTP response "
2008-10-15 19:38:22 +04:00
requestHeaders = ""
responseHeaders = ""
if silent:
socket.setdefaulttimeout(3)
2008-10-15 19:38:22 +04:00
if direct:
if "?" in url:
url, params = url.split("?")
params = urlencode(params)
2008-10-15 19:38:22 +04:00
url = "%s?%s" % (url, params)
requestMsg += "?%s" % params
2008-10-15 19:38:22 +04:00
elif multipart:
multipartOpener = urllib2.build_opener(multipartpost.MultipartPostHandler)
conn = multipartOpener.open(url, multipart)
2010-01-24 02:29:34 +03:00
page = conn.read()
responseHeaders = conn.info()
encoding = responseHeaders.get("Content-Encoding")
page = decodePage(page, encoding)
return page
else:
2008-10-15 19:38:22 +04:00
if conf.parameters.has_key("GET") and not get:
get = conf.parameters["GET"]
if get:
get = urlencode(get)
2008-10-15 19:38:22 +04:00
url = "%s?%s" % (url, get)
requestMsg += "?%s" % get
if conf.method == "POST":
if conf.parameters.has_key("POST") and not post:
post = conf.parameters["POST"]
2008-10-15 19:38:22 +04:00
requestMsg += " HTTP/1.1"
try:
# Perform HTTP request
headers = forgeHeaders(cookie, ua)
2008-10-15 19:38:22 +04:00
req = urllib2.Request(url, post, headers)
conn = urllib2.urlopen(req)
# Reset the number of connection retries
conf.retriesCount = 0
if not req.has_header("Accept-Encoding"):
2008-10-15 19:38:22 +04:00
requestHeaders += "\nAccept-Encoding: identity"
requestHeaders = "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])
if not conf.dropSetCookie:
for _, cookie in enumerate(conf.cj):
if not cookieStr:
cookieStr = "Cookie: "
cookie = str(cookie)
index = cookie.index(" for ")
cookieStr += "%s; " % cookie[8:index]
2008-10-15 19:38:22 +04:00
if not req.has_header("Cookie") and cookieStr:
2008-10-15 19:38:22 +04:00
requestHeaders += "\n%s" % cookieStr[:-2]
if not req.has_header("Connection"):
2008-10-15 19:38:22 +04:00
requestHeaders += "\nConnection: close"
requestMsg += "\n%s" % requestHeaders
if post:
requestMsg += "\n%s" % post
requestMsg += "\n"
logger.log(9, requestMsg)
# Get HTTP response
page = conn.read()
code = conn.code
status = conn.msg
2008-10-15 19:38:22 +04:00
responseHeaders = conn.info()
2010-01-24 02:29:34 +03:00
encoding = responseHeaders.get("Content-Encoding")
page = decodePage(page, encoding)
2008-10-15 19:38:22 +04:00
except urllib2.HTTPError, e:
if e.code == 401:
2008-10-17 17:23:24 +04:00
exceptionMsg = "not authorized, try to provide right HTTP "
exceptionMsg += "authentication type and valid credentials"
2008-10-15 19:38:22 +04:00
raise sqlmapConnectionException, exceptionMsg
elif e.code == 404:
exceptionMsg = "page not found"
raise sqlmapConnectionException, exceptionMsg
2008-10-15 19:38:22 +04:00
else:
page = e.read()
code = e.code
status = e.msg
responseHeaders = e.info()
debugMsg = "got HTTP error code: %d" % code
logger.debug(debugMsg)
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine), e:
tbMsg = traceback.format_exc()
2008-10-15 19:38:22 +04:00
if "URLError" in tbMsg or "error" in tbMsg:
warnMsg = "unable to connect to the target url"
elif "timeout" in tbMsg:
warnMsg = "connection timed out to the target url"
elif "BadStatusLine" in tbMsg:
warnMsg = "the target url responded with an unknown HTTP "
warnMsg += "status code, try to force the HTTP User-Agent "
warnMsg += "header with option --user-agent or -a"
2009-12-31 15:34:18 +03:00
else:
warnMsg = "unable to connect to the target url"
if "BadStatusLine" not in tbMsg:
warnMsg += " or proxy"
if conf.multipleTargets:
warnMsg += ", skipping to next url"
logger.warn(warnMsg)
return None, None
if silent:
return None, None
elif conf.retriesCount < conf.retries:
conf.retriesCount += 1
warnMsg += ", sqlmap is going to retry the request"
logger.warn(warnMsg)
time.sleep(1)
socket.setdefaulttimeout(conf.timeout)
2009-12-21 14:21:18 +03:00
return Connect.__getPageProxy(**kwargs)
else:
socket.setdefaulttimeout(conf.timeout)
raise sqlmapConnectionException, warnMsg
socket.setdefaulttimeout(conf.timeout)
2010-01-24 02:29:34 +03:00
page = sanitizeAsciiString(page)
parseResponse(page, responseHeaders)
2008-10-15 19:38:22 +04:00
responseMsg += "(%s - %d):\n" % (status, code)
2010-01-24 02:29:34 +03:00
2008-10-15 19:38:22 +04:00
if conf.verbose <= 4:
responseMsg += str(responseHeaders)
elif conf.verbose > 4:
responseMsg += "%s\n%s\n" % (responseHeaders, page)
2008-10-15 19:38:22 +04:00
logger.log(8, responseMsg)
return page, responseHeaders
2008-10-15 19:38:22 +04:00
@staticmethod
def queryPage(value=None, place=None, content=False, getSeqMatcher=False, silent=False):
2008-10-15 19:38:22 +04:00
"""
This method calls a function to get the target url page content
and returns its page MD5 hash or a boolean value in case of
string match check ('--string' command line parameter)
"""
get = None
post = None
cookie = None
ua = None
if not place:
place = kb.injPlace
if conf.parameters.has_key("GET"):
if place == "GET" and value:
get = value
else:
get = conf.parameters["GET"]
if conf.parameters.has_key("POST"):
if place == "POST" and value:
post = value
else:
post = conf.parameters["POST"]
if conf.parameters.has_key("Cookie"):
if place == "Cookie" and value:
cookie = value
else:
cookie = conf.parameters["Cookie"]
if conf.parameters.has_key("User-Agent"):
if place == "User-Agent" and value:
ua = value
else:
ua = conf.parameters["User-Agent"]
page, headers = Connect.getPage(get=get, post=post, cookie=cookie, ua=ua, silent=silent)
2008-10-15 19:38:22 +04:00
if content:
return page, headers
elif page:
return comparison(page, headers, getSeqMatcher)
else:
return False