2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
This file is part of the sqlmap project, http://sqlmap.sourceforge.net.
|
|
|
|
|
2010-03-03 18:26:27 +03:00
|
|
|
Copyright (c) 2007-2010 Bernardo Damele A. G. <bernardo.damele@gmail.com>
|
2009-04-22 15:48:07 +04:00
|
|
|
Copyright (c) 2006 Daniele Bellucci <daniele.bellucci@gmail.com>
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
sqlmap is free software; you can redistribute it and/or modify it under
|
|
|
|
the terms of the GNU General Public License as published by the Free
|
|
|
|
Software Foundation version 2 of the License.
|
|
|
|
|
|
|
|
sqlmap is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
|
|
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
|
|
|
details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License along
|
|
|
|
with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
|
|
|
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
|
|
|
"""
|
|
|
|
|
2008-11-15 15:25:19 +03:00
|
|
|
import httplib
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2008-11-09 19:57:47 +03:00
|
|
|
import socket
|
|
|
|
import time
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
2008-12-04 20:40:03 +03:00
|
|
|
import traceback
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
from lib.contrib import multipartpost
|
|
|
|
from lib.core.convert import urlencode
|
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2010-01-24 02:29:34 +03:00
|
|
|
from lib.core.common import sanitizeAsciiString
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapConnectionException
|
2010-01-02 05:02:12 +03:00
|
|
|
from lib.request.basic import decodePage
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request.basic import forgeHeaders
|
2008-11-17 03:00:54 +03:00
|
|
|
from lib.request.basic import parseResponse
|
2008-12-05 18:34:13 +03:00
|
|
|
from lib.request.comparison import comparison
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
|
|
|
|
class Connect:
|
|
|
|
"""
|
|
|
|
This class defines methods used to perform HTTP requests
|
|
|
|
"""
|
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
@staticmethod
|
|
|
|
def __getPageProxy(**kwargs):
|
|
|
|
return Connect.getPage(**kwargs)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
@staticmethod
|
|
|
|
def getPage(**kwargs):
|
|
|
|
"""
|
|
|
|
This method connects to the target url or proxy and returns
|
|
|
|
the target url page content
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
2008-12-04 20:40:03 +03:00
|
|
|
time.sleep(conf.delay)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
url = kwargs.get('url', conf.url).replace(" ", "%20")
|
|
|
|
get = kwargs.get('get', None)
|
|
|
|
post = kwargs.get('post', None)
|
|
|
|
cookie = kwargs.get('cookie', None)
|
|
|
|
ua = kwargs.get('ua', None)
|
|
|
|
direct = kwargs.get('direct', False)
|
|
|
|
multipart = kwargs.get('multipart', False)
|
2009-04-22 15:48:07 +04:00
|
|
|
silent = kwargs.get('silent', False)
|
2010-03-16 16:11:44 +03:00
|
|
|
raise404 = kwargs.get('raise404', None)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
page = ""
|
2008-10-15 19:38:22 +04:00
|
|
|
cookieStr = ""
|
|
|
|
requestMsg = "HTTP request:\n%s " % conf.method
|
2010-01-17 00:47:52 +03:00
|
|
|
requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
|
2010-01-24 02:29:34 +03:00
|
|
|
responseMsg = "HTTP response "
|
2008-10-15 19:38:22 +04:00
|
|
|
requestHeaders = ""
|
|
|
|
responseHeaders = ""
|
|
|
|
|
|
|
|
try:
|
2010-02-09 17:02:47 +03:00
|
|
|
if silent:
|
|
|
|
socket.setdefaulttimeout(3)
|
|
|
|
|
|
|
|
if direct:
|
|
|
|
if "?" in url:
|
|
|
|
url, params = url.split("?")
|
|
|
|
params = urlencode(params)
|
|
|
|
url = "%s?%s" % (url, params)
|
|
|
|
requestMsg += "?%s" % params
|
|
|
|
|
|
|
|
elif multipart:
|
2010-02-25 16:45:28 +03:00
|
|
|
#needed in this form because of potential circle dependency problem (option -> update -> connect -> option)
|
|
|
|
from lib.core.option import proxyHandler
|
|
|
|
|
|
|
|
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
|
2010-02-09 17:02:47 +03:00
|
|
|
conn = multipartOpener.open(url, multipart)
|
|
|
|
page = conn.read()
|
|
|
|
responseHeaders = conn.info()
|
|
|
|
|
|
|
|
encoding = responseHeaders.get("Content-Encoding")
|
|
|
|
page = decodePage(page, encoding)
|
|
|
|
|
|
|
|
return page
|
|
|
|
|
|
|
|
else:
|
|
|
|
if conf.parameters.has_key("GET") and not get:
|
|
|
|
get = conf.parameters["GET"]
|
|
|
|
|
|
|
|
if get:
|
|
|
|
get = urlencode(get)
|
|
|
|
url = "%s?%s" % (url, get)
|
|
|
|
requestMsg += "?%s" % get
|
|
|
|
|
|
|
|
if conf.method == "POST":
|
|
|
|
if conf.parameters.has_key("POST") and not post:
|
|
|
|
post = conf.parameters["POST"]
|
|
|
|
|
|
|
|
requestMsg += " HTTP/1.1"
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Perform HTTP request
|
2010-01-14 21:05:03 +03:00
|
|
|
headers = forgeHeaders(cookie, ua)
|
2008-10-15 19:38:22 +04:00
|
|
|
req = urllib2.Request(url, post, headers)
|
|
|
|
conn = urllib2.urlopen(req)
|
|
|
|
|
2010-03-15 17:24:43 +03:00
|
|
|
if hasattr(conn, "redurl"):
|
|
|
|
infoMsg = "connection redirected, going to use "
|
|
|
|
infoMsg += "%s as target address" % conn.redurl
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
conf.url = conn.redurl
|
|
|
|
|
|
|
|
return Connect.__getPageProxy(**kwargs)
|
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
# Reset the number of connection retries
|
2009-04-22 15:48:07 +04:00
|
|
|
conf.retriesCount = 0
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2008-10-28 02:56:02 +03:00
|
|
|
if not req.has_header("Accept-Encoding"):
|
2008-10-15 19:38:22 +04:00
|
|
|
requestHeaders += "\nAccept-Encoding: identity"
|
|
|
|
|
|
|
|
requestHeaders = "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not conf.dropSetCookie:
|
|
|
|
for _, cookie in enumerate(conf.cj):
|
|
|
|
if not cookieStr:
|
|
|
|
cookieStr = "Cookie: "
|
|
|
|
|
|
|
|
cookie = str(cookie)
|
|
|
|
index = cookie.index(" for ")
|
|
|
|
|
|
|
|
cookieStr += "%s; " % cookie[8:index]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-10-28 02:56:02 +03:00
|
|
|
if not req.has_header("Cookie") and cookieStr:
|
2008-10-15 19:38:22 +04:00
|
|
|
requestHeaders += "\n%s" % cookieStr[:-2]
|
|
|
|
|
2008-10-28 02:56:02 +03:00
|
|
|
if not req.has_header("Connection"):
|
2008-10-15 19:38:22 +04:00
|
|
|
requestHeaders += "\nConnection: close"
|
|
|
|
|
|
|
|
requestMsg += "\n%s" % requestHeaders
|
|
|
|
|
|
|
|
if post:
|
|
|
|
requestMsg += "\n%s" % post
|
|
|
|
|
|
|
|
requestMsg += "\n"
|
|
|
|
|
|
|
|
logger.log(9, requestMsg)
|
|
|
|
|
|
|
|
# Get HTTP response
|
2009-04-28 15:05:07 +04:00
|
|
|
page = conn.read()
|
|
|
|
code = conn.code
|
|
|
|
status = conn.msg
|
2008-10-15 19:38:22 +04:00
|
|
|
responseHeaders = conn.info()
|
2010-03-15 17:24:43 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
encoding = responseHeaders.get("Content-Encoding")
|
|
|
|
page = decodePage(page, encoding)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
if e.code == 401:
|
2010-03-16 15:14:02 +03:00
|
|
|
errMsg = "not authorized, try to provide right HTTP "
|
|
|
|
errMsg += "authentication type and valid credentials"
|
|
|
|
raise sqlmapConnectionException, errMsg
|
2010-03-16 16:11:44 +03:00
|
|
|
elif e.code == 404 and (raise404 or (raise404 is None and conf.raise404)):
|
2010-03-16 15:14:02 +03:00
|
|
|
errMsg = "page not found"
|
|
|
|
raise sqlmapConnectionException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
|
|
|
page = e.read()
|
|
|
|
code = e.code
|
|
|
|
status = e.msg
|
|
|
|
responseHeaders = e.info()
|
|
|
|
|
2010-01-19 13:27:54 +03:00
|
|
|
debugMsg = "got HTTP error code: %d" % code
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine), e:
|
2008-12-04 20:40:03 +03:00
|
|
|
tbMsg = traceback.format_exc()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
if "URLError" in tbMsg or "error" in tbMsg:
|
|
|
|
warnMsg = "unable to connect to the target url"
|
|
|
|
elif "timeout" in tbMsg:
|
|
|
|
warnMsg = "connection timed out to the target url"
|
|
|
|
elif "BadStatusLine" in tbMsg:
|
|
|
|
warnMsg = "the target url responded with an unknown HTTP "
|
|
|
|
warnMsg += "status code, try to force the HTTP User-Agent "
|
|
|
|
warnMsg += "header with option --user-agent or -a"
|
2009-12-31 15:34:18 +03:00
|
|
|
else:
|
|
|
|
warnMsg = "unable to connect to the target url"
|
|
|
|
|
2008-12-05 18:34:13 +03:00
|
|
|
if "BadStatusLine" not in tbMsg:
|
|
|
|
warnMsg += " or proxy"
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if silent:
|
2009-04-22 15:48:07 +04:00
|
|
|
return None, None
|
|
|
|
elif conf.retriesCount < conf.retries:
|
|
|
|
conf.retriesCount += 1
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
warnMsg += ", sqlmap is going to retry the request"
|
2008-11-15 15:25:19 +03:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
time.sleep(1)
|
2008-12-18 23:38:57 +03:00
|
|
|
|
2009-12-18 01:04:01 +03:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
2009-12-21 14:21:18 +03:00
|
|
|
return Connect.__getPageProxy(**kwargs)
|
2008-11-15 15:25:19 +03:00
|
|
|
else:
|
2009-12-18 01:04:01 +03:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
2008-11-15 15:25:19 +03:00
|
|
|
raise sqlmapConnectionException, warnMsg
|
|
|
|
|
2009-12-18 01:04:01 +03:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
|
|
|
|
2010-01-24 02:29:34 +03:00
|
|
|
page = sanitizeAsciiString(page)
|
|
|
|
|
2008-11-17 03:00:54 +03:00
|
|
|
parseResponse(page, responseHeaders)
|
2008-10-15 19:38:22 +04:00
|
|
|
responseMsg += "(%s - %d):\n" % (status, code)
|
2010-01-24 02:29:34 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.verbose <= 4:
|
|
|
|
responseMsg += str(responseHeaders)
|
|
|
|
elif conf.verbose > 4:
|
|
|
|
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
2010-01-23 18:36:55 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.log(8, responseMsg)
|
|
|
|
|
2008-12-05 18:34:13 +03:00
|
|
|
return page, responseHeaders
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
@staticmethod
|
2009-04-22 15:48:07 +04:00
|
|
|
def queryPage(value=None, place=None, content=False, getSeqMatcher=False, silent=False):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This method calls a function to get the target url page content
|
|
|
|
and returns its page MD5 hash or a boolean value in case of
|
|
|
|
string match check ('--string' command line parameter)
|
|
|
|
"""
|
|
|
|
|
|
|
|
get = None
|
|
|
|
post = None
|
|
|
|
cookie = None
|
|
|
|
ua = None
|
|
|
|
|
|
|
|
if not place:
|
|
|
|
place = kb.injPlace
|
|
|
|
|
|
|
|
if conf.parameters.has_key("GET"):
|
|
|
|
if place == "GET" and value:
|
|
|
|
get = value
|
|
|
|
else:
|
|
|
|
get = conf.parameters["GET"]
|
|
|
|
|
|
|
|
if conf.parameters.has_key("POST"):
|
|
|
|
if place == "POST" and value:
|
|
|
|
post = value
|
|
|
|
else:
|
|
|
|
post = conf.parameters["POST"]
|
|
|
|
|
|
|
|
if conf.parameters.has_key("Cookie"):
|
|
|
|
if place == "Cookie" and value:
|
|
|
|
cookie = value
|
|
|
|
else:
|
|
|
|
cookie = conf.parameters["Cookie"]
|
|
|
|
|
|
|
|
if conf.parameters.has_key("User-Agent"):
|
|
|
|
if place == "User-Agent" and value:
|
|
|
|
ua = value
|
|
|
|
else:
|
|
|
|
ua = conf.parameters["User-Agent"]
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
page, headers = Connect.getPage(get=get, post=post, cookie=cookie, ua=ua, silent=silent)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if content:
|
2008-12-18 23:48:23 +03:00
|
|
|
return page, headers
|
2008-12-20 04:54:08 +03:00
|
|
|
elif page:
|
|
|
|
return comparison(page, headers, getSeqMatcher)
|
2008-12-18 23:38:57 +03:00
|
|
|
else:
|
|
|
|
return False
|