2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-14 18:41:14 +04:00
|
|
|
Copyright (c) 2006-2010 sqlmap developers (http://sqlmap.sourceforge.net/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2008-11-15 15:25:19 +03:00
|
|
|
import httplib
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2008-11-09 19:57:47 +03:00
|
|
|
import socket
|
|
|
|
import time
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
2008-12-04 20:40:03 +03:00
|
|
|
import traceback
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
from lib.contrib import multipartpost
|
2010-10-29 20:11:50 +04:00
|
|
|
from lib.core.agent import agent
|
2010-11-16 13:42:42 +03:00
|
|
|
from lib.core.common import extractErrorMessage
|
2010-11-07 03:12:00 +03:00
|
|
|
from lib.core.common import getFilteredPageContent
|
2010-06-10 15:34:17 +04:00
|
|
|
from lib.core.common import getUnicode
|
2010-11-08 14:22:47 +03:00
|
|
|
from lib.core.common import logHTTPTraffic
|
2010-11-16 13:42:42 +03:00
|
|
|
from lib.core.common import readInput
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.convert import urlencode
|
2010-10-31 14:26:33 +03:00
|
|
|
from lib.core.common import urlEncodeCookieValues
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2010-01-24 02:29:34 +03:00
|
|
|
from lib.core.common import sanitizeAsciiString
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2010-11-08 12:49:57 +03:00
|
|
|
from lib.core.enums import NULLCONNECTION
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PLACE
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapConnectionException
|
2010-01-02 05:02:12 +03:00
|
|
|
from lib.request.basic import decodePage
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request.basic import forgeHeaders
|
2008-11-17 03:00:54 +03:00
|
|
|
from lib.request.basic import parseResponse
|
2010-03-31 14:50:47 +04:00
|
|
|
from lib.request.direct import direct
|
2008-12-05 18:34:13 +03:00
|
|
|
from lib.request.comparison import comparison
|
2010-09-15 16:45:41 +04:00
|
|
|
from lib.request.methodrequest import MethodRequest
|
2010-10-25 22:38:54 +04:00
|
|
|
from lib.utils.checkpayload import checkPayload
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
|
|
|
|
class Connect:
|
|
|
|
"""
|
|
|
|
This class defines methods used to perform HTTP requests
|
|
|
|
"""
|
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
@staticmethod
|
|
|
|
def __getPageProxy(**kwargs):
|
|
|
|
return Connect.getPage(**kwargs)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
@staticmethod
|
|
|
|
def getPage(**kwargs):
|
|
|
|
"""
|
|
|
|
This method connects to the target url or proxy and returns
|
|
|
|
the target url page content
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
2008-12-04 20:40:03 +03:00
|
|
|
time.sleep(conf.delay)
|
2010-05-21 16:18:43 +04:00
|
|
|
elif conf.cpuThrottle:
|
|
|
|
delay = 0.00001 * (conf.cpuThrottle ** 2)
|
|
|
|
time.sleep(delay)
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2010-11-08 14:22:47 +03:00
|
|
|
kb.locks.reqLock.acquire()
|
|
|
|
|
|
|
|
kb.lastRequestUID += 1
|
|
|
|
requestID = kb.lastRequestUID
|
|
|
|
|
|
|
|
kb.locks.reqLock.release()
|
|
|
|
|
2010-09-16 13:32:09 +04:00
|
|
|
url = kwargs.get('url', conf.url).replace(" ", "%20")
|
|
|
|
get = kwargs.get('get', None)
|
|
|
|
post = kwargs.get('post', None)
|
|
|
|
method = kwargs.get('method', None)
|
|
|
|
cookie = kwargs.get('cookie', None)
|
|
|
|
ua = kwargs.get('ua', None)
|
|
|
|
direct = kwargs.get('direct', False)
|
|
|
|
multipart = kwargs.get('multipart', False)
|
|
|
|
silent = kwargs.get('silent', False)
|
|
|
|
raise404 = kwargs.get('raise404', True)
|
|
|
|
auxHeaders = kwargs.get('auxHeaders', None)
|
2010-10-10 22:56:43 +04:00
|
|
|
response = kwargs.get('response', False)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
page = ""
|
2008-10-15 19:38:22 +04:00
|
|
|
cookieStr = ""
|
2010-11-08 14:22:47 +03:00
|
|
|
requestMsg = "HTTP request [#%d]:\n%s " % (requestID, conf.method)
|
2010-01-17 00:47:52 +03:00
|
|
|
requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
|
2010-01-24 02:29:34 +03:00
|
|
|
responseMsg = "HTTP response "
|
2008-10-15 19:38:22 +04:00
|
|
|
requestHeaders = ""
|
|
|
|
responseHeaders = ""
|
|
|
|
|
|
|
|
try:
|
2010-02-09 17:02:47 +03:00
|
|
|
if silent:
|
|
|
|
socket.setdefaulttimeout(3)
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
if direct:
|
|
|
|
if "?" in url:
|
|
|
|
url, params = url.split("?")
|
|
|
|
params = urlencode(params)
|
|
|
|
url = "%s?%s" % (url, params)
|
|
|
|
requestMsg += "?%s" % params
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
elif multipart:
|
2010-05-29 03:39:52 +04:00
|
|
|
# Needed in this form because of potential circle dependency
|
|
|
|
# problem (option -> update -> connect -> option)
|
2010-02-25 16:45:28 +03:00
|
|
|
from lib.core.option import proxyHandler
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-25 16:45:28 +03:00
|
|
|
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
|
2010-02-09 17:02:47 +03:00
|
|
|
conn = multipartOpener.open(url, multipart)
|
2010-06-09 18:40:36 +04:00
|
|
|
page = conn.read()
|
2010-02-09 17:02:47 +03:00
|
|
|
responseHeaders = conn.info()
|
2010-06-09 18:40:36 +04:00
|
|
|
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
|
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
return page
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
else:
|
2010-11-08 11:02:36 +03:00
|
|
|
if conf.parameters.has_key(PLACE.GET) and not get:
|
|
|
|
get = conf.parameters[PLACE.GET]
|
2010-06-09 18:40:36 +04:00
|
|
|
|
2010-02-09 17:02:47 +03:00
|
|
|
if get:
|
|
|
|
url = "%s?%s" % (url, get)
|
|
|
|
requestMsg += "?%s" % get
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2010-11-08 12:44:32 +03:00
|
|
|
if conf.method == HTTPMETHOD.POST:
|
2010-11-08 11:02:36 +03:00
|
|
|
if conf.parameters.has_key(PLACE.POST) and not post:
|
|
|
|
post = conf.parameters[PLACE.POST]
|
2010-03-23 13:27:39 +03:00
|
|
|
|
2010-10-21 13:10:07 +04:00
|
|
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
2010-02-09 17:02:47 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Perform HTTP request
|
2010-06-10 18:42:17 +04:00
|
|
|
headers = forgeHeaders(cookie, ua)
|
2010-09-15 16:45:41 +04:00
|
|
|
|
2010-10-18 12:54:08 +04:00
|
|
|
if kb.authHeader:
|
|
|
|
headers["Authorization"] = kb.authHeader
|
|
|
|
|
2010-10-18 13:02:56 +04:00
|
|
|
if kb.proxyAuthHeader:
|
|
|
|
headers["Proxy-authorization"] = kb.proxyAuthHeader
|
|
|
|
|
2010-09-16 12:43:10 +04:00
|
|
|
if auxHeaders:
|
|
|
|
for key, item in auxHeaders.items():
|
|
|
|
headers[key] = item
|
|
|
|
|
2010-09-15 16:45:41 +04:00
|
|
|
if method:
|
|
|
|
req = MethodRequest(url, post, headers)
|
|
|
|
req.set_method(method)
|
|
|
|
else:
|
|
|
|
req = urllib2.Request(url, post, headers)
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2010-04-09 14:16:15 +04:00
|
|
|
if not conf.dropSetCookie and conf.cj:
|
2010-01-02 05:02:12 +03:00
|
|
|
for _, cookie in enumerate(conf.cj):
|
|
|
|
if not cookieStr:
|
|
|
|
cookieStr = "Cookie: "
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-06-10 15:34:17 +04:00
|
|
|
cookie = getUnicode(cookie)
|
2010-01-02 05:02:12 +03:00
|
|
|
index = cookie.index(" for ")
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
cookieStr += "%s; " % cookie[8:index]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-29 02:59:51 +04:00
|
|
|
conn = urllib2.urlopen(req)
|
2010-10-29 20:11:50 +04:00
|
|
|
|
2010-10-29 03:22:13 +04:00
|
|
|
if not req.has_header("Accept-Encoding"):
|
|
|
|
requestHeaders += "Accept-Encoding: identity\n"
|
|
|
|
|
2010-10-29 02:59:51 +04:00
|
|
|
requestHeaders += "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])
|
2010-10-29 03:22:13 +04:00
|
|
|
|
2010-11-08 16:26:45 +03:00
|
|
|
if not req.has_header("Cookie") and cookieStr:
|
2010-10-29 03:22:13 +04:00
|
|
|
requestHeaders += "\n%s" % cookieStr[:-2]
|
|
|
|
|
|
|
|
if not req.has_header("Connection"):
|
|
|
|
requestHeaders += "\nConnection: close"
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
requestMsg += "\n%s" % requestHeaders
|
|
|
|
|
|
|
|
if post:
|
|
|
|
requestMsg += "\n%s" % post
|
|
|
|
|
|
|
|
requestMsg += "\n"
|
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(8, requestMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-18 13:50:23 +04:00
|
|
|
if not kb.authHeader and req.has_header("Authorization"):
|
|
|
|
kb.authHeader = req.get_header("Authorization")
|
2010-10-18 12:54:08 +04:00
|
|
|
|
2010-10-18 13:50:23 +04:00
|
|
|
if not kb.proxyAuthHeader and req.has_header("Proxy-authorization"):
|
|
|
|
kb.proxyAuthHeader = req.get_header("Proxy-authorization")
|
2010-10-18 13:02:56 +04:00
|
|
|
|
2010-06-10 18:42:17 +04:00
|
|
|
if hasattr(conn, "redurl") and hasattr(conn, "redcode") and not conf.redirectHandled:
|
|
|
|
msg = "sqlmap got a %d redirect to " % conn.redcode
|
|
|
|
msg += "%s - What target address do you " % conn.redurl
|
|
|
|
msg += "want to use from now on? %s " % conf.url
|
|
|
|
msg += "(default) or provide another target address based "
|
|
|
|
msg += "also on the redirection got from the application\n"
|
|
|
|
|
|
|
|
while True:
|
|
|
|
choice = readInput(msg, default="1")
|
|
|
|
|
|
|
|
if not choice or choice == "1":
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
conf.url = choice
|
|
|
|
return Connect.__getPageProxy(**kwargs)
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
conf.redirectHandled = True
|
|
|
|
|
|
|
|
# Reset the number of connection retries
|
|
|
|
conf.retriesCount = 0
|
2010-10-18 16:11:16 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
# Return response object
|
|
|
|
if response:
|
|
|
|
return conn, None
|
2010-06-10 18:42:17 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Get HTTP response
|
2010-06-10 18:42:17 +04:00
|
|
|
page = conn.read()
|
|
|
|
code = conn.code
|
|
|
|
status = conn.msg
|
2008-10-15 19:38:22 +04:00
|
|
|
responseHeaders = conn.info()
|
2010-06-09 18:40:36 +04:00
|
|
|
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2010-11-16 13:42:42 +03:00
|
|
|
msg = extractErrorMessage(page)
|
|
|
|
if msg and conf.parseErrors:
|
|
|
|
logger.error("error message: '%s'" % msg)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
except urllib2.HTTPError, e:
|
2010-11-04 01:07:13 +03:00
|
|
|
code = e.code
|
|
|
|
status = e.msg
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if e.code == 401:
|
2010-03-16 15:14:02 +03:00
|
|
|
errMsg = "not authorized, try to provide right HTTP "
|
2010-11-04 01:07:13 +03:00
|
|
|
errMsg += "authentication type and valid credentials (%d)" % code
|
2010-03-16 15:14:02 +03:00
|
|
|
raise sqlmapConnectionException, errMsg
|
2010-03-16 16:56:36 +03:00
|
|
|
elif e.code == 404 and raise404:
|
2010-11-04 01:07:13 +03:00
|
|
|
errMsg = "page not found (%d)" % code
|
2010-03-16 15:14:02 +03:00
|
|
|
raise sqlmapConnectionException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-05-04 12:03:48 +04:00
|
|
|
try:
|
|
|
|
page = e.read()
|
2010-05-04 12:36:35 +04:00
|
|
|
responseHeaders = e.info()
|
2010-05-04 12:03:48 +04:00
|
|
|
except socket.timeout:
|
2010-05-04 12:36:35 +04:00
|
|
|
warnMsg = "connection timed out while trying "
|
2010-11-04 01:07:13 +03:00
|
|
|
warnMsg += "to get error page information (%d)" % code
|
2010-05-04 12:36:35 +04:00
|
|
|
logger.warn(warnMsg)
|
2010-05-04 12:43:14 +04:00
|
|
|
return None, None
|
2010-05-06 14:52:40 +04:00
|
|
|
|
2010-06-10 18:15:32 +04:00
|
|
|
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
2010-01-19 13:27:54 +03:00
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-11-13 01:57:33 +03:00
|
|
|
except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead), e:
|
2008-12-04 20:40:03 +03:00
|
|
|
tbMsg = traceback.format_exc()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
if "URLError" in tbMsg or "error" in tbMsg:
|
|
|
|
warnMsg = "unable to connect to the target url"
|
|
|
|
elif "timeout" in tbMsg:
|
|
|
|
warnMsg = "connection timed out to the target url"
|
|
|
|
elif "BadStatusLine" in tbMsg:
|
|
|
|
warnMsg = "the target url responded with an unknown HTTP "
|
|
|
|
warnMsg += "status code, try to force the HTTP User-Agent "
|
|
|
|
warnMsg += "header with option --user-agent or -a"
|
2010-11-13 01:57:33 +03:00
|
|
|
elif "IncompleteRead" in tbMsg:
|
|
|
|
warnMsg = "there was an incomplete read error while retrieving data "
|
|
|
|
warnMsg += "from the target url"
|
2009-12-31 15:34:18 +03:00
|
|
|
else:
|
|
|
|
warnMsg = "unable to connect to the target url"
|
|
|
|
|
2008-12-05 18:34:13 +03:00
|
|
|
if "BadStatusLine" not in tbMsg:
|
|
|
|
warnMsg += " or proxy"
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if silent:
|
2009-04-22 15:48:07 +04:00
|
|
|
return None, None
|
|
|
|
elif conf.retriesCount < conf.retries:
|
|
|
|
conf.retriesCount += 1
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
warnMsg += ", sqlmap is going to retry the request"
|
2010-09-27 17:34:52 +04:00
|
|
|
logger.critical(warnMsg)
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
time.sleep(1)
|
2008-12-18 23:38:57 +03:00
|
|
|
|
2009-12-18 01:04:01 +03:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
2009-12-21 14:21:18 +03:00
|
|
|
return Connect.__getPageProxy(**kwargs)
|
2008-11-15 15:25:19 +03:00
|
|
|
else:
|
2009-12-18 01:04:01 +03:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
2008-11-15 15:25:19 +03:00
|
|
|
raise sqlmapConnectionException, warnMsg
|
|
|
|
|
2009-12-18 01:04:01 +03:00
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
|
|
|
|
2010-01-24 02:29:34 +03:00
|
|
|
page = sanitizeAsciiString(page)
|
2010-11-12 14:48:25 +03:00
|
|
|
page = getUnicode(page)
|
2008-11-17 03:00:54 +03:00
|
|
|
parseResponse(page, responseHeaders)
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-11-08 15:36:48 +03:00
|
|
|
responseMsg += "[#%d] (%d %s):\n" % (requestID, code, status)
|
2010-11-08 14:55:56 +03:00
|
|
|
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, value) for (key, value) in responseHeaders.items()])
|
2010-11-08 14:22:47 +03:00
|
|
|
|
2010-11-08 14:55:56 +03:00
|
|
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page))
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
if conf.verbose <= 5:
|
2010-11-08 14:55:56 +03:00
|
|
|
responseMsg += getUnicode(logHeaders)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose > 5:
|
2010-11-08 14:55:56 +03:00
|
|
|
responseMsg += "%s\n%s\n" % (logHeaders, page)
|
2010-05-04 12:43:14 +04:00
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(7, responseMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-05 18:34:13 +03:00
|
|
|
return page, responseHeaders
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
@staticmethod
|
2010-11-04 00:51:36 +03:00
|
|
|
def queryPage(value=None, place=None, content=False, getSeqMatcher=False, silent=False, method=None, auxHeaders=None, response=False, raise404 = None):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This method calls a function to get the target url page content
|
|
|
|
and returns its page MD5 hash or a boolean value in case of
|
|
|
|
string match check ('--string' command line parameter)
|
|
|
|
"""
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if conf.direct:
|
2010-03-31 14:50:47 +04:00
|
|
|
return direct(value, content)
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2010-09-16 13:32:09 +04:00
|
|
|
get = None
|
|
|
|
post = None
|
|
|
|
cookie = None
|
|
|
|
ua = None
|
|
|
|
page = None
|
|
|
|
pageLength = None
|
2010-09-23 18:07:23 +04:00
|
|
|
uri = None
|
2010-11-08 11:02:36 +03:00
|
|
|
raise404 = place != PLACE.URI if raise404 is None else raise404
|
|
|
|
toUrlencode = { PLACE.GET: True, PLACE.POST: True, PLACE.COOKIE: conf.cookieUrlencode, PLACE.UA: True, PLACE.URI: False }
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if not place:
|
|
|
|
place = kb.injPlace
|
2010-10-14 15:06:28 +04:00
|
|
|
|
2010-11-08 00:55:24 +03:00
|
|
|
payload = agent.extractPayload(value)
|
2010-10-30 03:00:48 +04:00
|
|
|
|
2010-11-08 00:55:24 +03:00
|
|
|
if payload:
|
|
|
|
if kb.tamperFunctions:
|
2010-10-29 20:11:50 +04:00
|
|
|
for function in kb.tamperFunctions:
|
|
|
|
payload = function(payload)
|
2010-10-30 03:00:48 +04:00
|
|
|
|
2010-10-29 20:11:50 +04:00
|
|
|
value = agent.replacePayload(value, payload)
|
|
|
|
|
2010-11-08 01:34:29 +03:00
|
|
|
logger.log(9, payload)
|
2010-11-08 00:18:09 +03:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if place == PLACE.COOKIE and conf.cookieUrlencode:
|
2010-10-31 14:26:33 +03:00
|
|
|
value = agent.removePayloadDelimiters(value, False)
|
|
|
|
value = urlEncodeCookieValues(value)
|
|
|
|
elif place:
|
2010-10-31 14:03:59 +03:00
|
|
|
value = agent.removePayloadDelimiters(value, toUrlencode[place])
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-25 19:37:43 +04:00
|
|
|
if conf.checkPayload:
|
|
|
|
checkPayload(value)
|
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.GET in conf.parameters:
|
|
|
|
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.POST in conf.parameters:
|
|
|
|
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.COOKIE in conf.parameters:
|
|
|
|
cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value
|
2010-05-14 19:20:34 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.UA in conf.parameters:
|
|
|
|
ua = conf.parameters[PLACE.UA] if place != PLACE.UA or not value else value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if PLACE.URI in conf.parameters:
|
|
|
|
uri = conf.url if place != PLACE.URI or not value else value
|
2010-09-23 18:07:23 +04:00
|
|
|
else:
|
|
|
|
uri = conf.url
|
2010-09-22 15:56:35 +04:00
|
|
|
|
2010-04-16 16:44:47 +04:00
|
|
|
if conf.safUrl and conf.saFreq > 0:
|
|
|
|
kb.queryCounter += 1
|
|
|
|
if kb.queryCounter % conf.saFreq == 0:
|
2010-09-16 13:32:09 +04:00
|
|
|
Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua)
|
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
if not content and not response and kb.nullConnection:
|
2010-11-08 12:49:57 +03:00
|
|
|
if kb.nullConnection == NULLCONNECTION.HEAD:
|
2010-11-08 12:44:32 +03:00
|
|
|
method = HTTPMETHOD.HEAD
|
2010-11-08 12:49:57 +03:00
|
|
|
elif kb.nullConnection == NULLCONNECTION.RANGE:
|
2010-09-16 13:32:09 +04:00
|
|
|
if not auxHeaders:
|
|
|
|
auxHeaders = {}
|
2010-10-15 15:17:17 +04:00
|
|
|
|
2010-11-08 16:26:45 +03:00
|
|
|
auxHeaders["Range"] = "bytes=-1"
|
2010-10-10 22:56:43 +04:00
|
|
|
|
|
|
|
_, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404)
|
|
|
|
|
2010-11-08 12:49:57 +03:00
|
|
|
if kb.nullConnection == NULLCONNECTION.HEAD and 'Content-Length' in headers:
|
2010-10-10 22:56:43 +04:00
|
|
|
pageLength = int(headers['Content-Length'])
|
2010-11-08 12:49:57 +03:00
|
|
|
elif kb.nullConnection == NULLCONNECTION.RANGE and 'Content-Range' in headers:
|
2010-10-14 03:01:23 +04:00
|
|
|
pageLength = int(headers['Content-Range'][headers['Content-Range'].find('/') + 1:])
|
2010-09-16 13:47:33 +04:00
|
|
|
|
|
|
|
if not pageLength:
|
2010-10-10 22:56:43 +04:00
|
|
|
page, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404)
|
2010-11-08 12:44:32 +03:00
|
|
|
|
2010-11-07 03:12:00 +03:00
|
|
|
if conf.textOnly:
|
|
|
|
page = getFilteredPageContent(page)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-10 22:56:43 +04:00
|
|
|
if content or response:
|
2008-12-18 23:48:23 +03:00
|
|
|
return page, headers
|
2010-11-16 00:42:56 +03:00
|
|
|
elif getSeqMatcher:
|
|
|
|
return comparison(page, headers, getSeqMatcher=False, pageLength=pageLength), comparison(page, headers, getSeqMatcher=True, pageLength=pageLength)
|
2010-09-16 13:32:09 +04:00
|
|
|
elif pageLength or page:
|
|
|
|
return comparison(page, headers, getSeqMatcher, pageLength)
|
2008-12-18 23:38:57 +03:00
|
|
|
else:
|
|
|
|
return False
|