mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-21 17:16:35 +03:00
Update for #2597
This commit is contained in:
parent
aef5d6667f
commit
1678b606a2
|
@ -117,7 +117,6 @@ def checkSqlInjection(place, parameter, value):
|
|||
|
||||
while tests:
|
||||
test = tests.pop(0)
|
||||
threadData.requestCollector.reset()
|
||||
|
||||
try:
|
||||
if kb.endDetection:
|
||||
|
@ -701,7 +700,6 @@ def checkSqlInjection(place, parameter, value):
|
|||
injection.data[stype].matchRatio = kb.matchRatio
|
||||
injection.data[stype].trueCode = trueCode
|
||||
injection.data[stype].falseCode = falseCode
|
||||
injection.data[stype].collectedRequests = threadData.requestCollector.obtain()
|
||||
|
||||
injection.conf.textOnly = conf.textOnly
|
||||
injection.conf.titles = conf.titles
|
||||
|
|
|
@ -2601,17 +2601,15 @@ def logHTTPTraffic(requestLogMsg, responseLogMsg):
|
|||
"""
|
||||
Logs HTTP traffic to the output file
|
||||
"""
|
||||
threadData = getCurrentThreadData()
|
||||
assert threadData.requestCollector is not None, "Request collector should be initialized by now"
|
||||
threadData.requestCollector.collectRequest(requestLogMsg, responseLogMsg)
|
||||
|
||||
if conf.harFile:
|
||||
conf.httpCollector.collectRequest(requestLogMsg, responseLogMsg)
|
||||
|
||||
if not conf.trafficFile:
|
||||
return
|
||||
|
||||
with kb.locks.log:
|
||||
dataToTrafficFile("%s%s" % (requestLogMsg, os.linesep))
|
||||
dataToTrafficFile("%s%s" % (responseLogMsg, os.linesep))
|
||||
dataToTrafficFile("%s%s%s%s" % (os.linesep, 76 * '#', os.linesep, os.linesep))
|
||||
with kb.locks.log:
|
||||
dataToTrafficFile("%s%s" % (requestLogMsg, os.linesep))
|
||||
dataToTrafficFile("%s%s" % (responseLogMsg, os.linesep))
|
||||
dataToTrafficFile("%s%s%s%s" % (os.linesep, 76 * '#', os.linesep, os.linesep))
|
||||
|
||||
def getPageTemplate(payload, place): # Cross-linked function
|
||||
raise NotImplementedError
|
||||
|
|
|
@ -149,7 +149,7 @@ from lib.request.pkihandler import HTTPSPKIAuthHandler
|
|||
from lib.request.rangehandler import HTTPRangeHandler
|
||||
from lib.request.redirecthandler import SmartRedirectHandler
|
||||
from lib.request.templates import getPageTemplate
|
||||
from lib.utils.collect import RequestCollectorFactory
|
||||
from lib.utils.har import HTTPCollectorFactory
|
||||
from lib.utils.crawler import crawl
|
||||
from lib.utils.deps import checkDependencies
|
||||
from lib.utils.search import search
|
||||
|
@ -1830,6 +1830,7 @@ def _setConfAttributes():
|
|||
conf.dumpPath = None
|
||||
conf.hashDB = None
|
||||
conf.hashDBFile = None
|
||||
conf.httpCollector = None
|
||||
conf.httpHeaders = []
|
||||
conf.hostname = None
|
||||
conf.ipv6 = False
|
||||
|
@ -1845,7 +1846,7 @@ def _setConfAttributes():
|
|||
conf.scheme = None
|
||||
conf.tests = []
|
||||
conf.trafficFP = None
|
||||
conf.requestCollectorFactory = None
|
||||
conf.HARCollectorFactory = None
|
||||
conf.wFileType = None
|
||||
|
||||
def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
|
@ -2230,10 +2231,11 @@ def _setTrafficOutputFP():
|
|||
|
||||
conf.trafficFP = openFile(conf.trafficFile, "w+")
|
||||
|
||||
def _setupRequestCollector():
|
||||
conf.requestCollectorFactory = RequestCollectorFactory(collect=conf.collectRequests)
|
||||
threadData = getCurrentThreadData()
|
||||
threadData.requestCollector = conf.requestCollectorFactory.create()
|
||||
def _setupHTTPCollector():
|
||||
if not conf.harFile:
|
||||
return
|
||||
|
||||
conf.httpCollector = HTTPCollectorFactory(conf.harFile).create()
|
||||
|
||||
def _setDNSServer():
|
||||
if not conf.dnsDomain:
|
||||
|
@ -2611,7 +2613,7 @@ def init():
|
|||
_setTamperingFunctions()
|
||||
_setWafFunctions()
|
||||
_setTrafficOutputFP()
|
||||
_setupRequestCollector()
|
||||
_setupHTTPCollector()
|
||||
_resolveCrossReferences()
|
||||
_checkWebSocket()
|
||||
|
||||
|
|
|
@ -197,7 +197,6 @@ optDict = {
|
|||
"binaryFields": "string",
|
||||
"charset": "string",
|
||||
"checkInternet": "boolean",
|
||||
"collectRequests": "string",
|
||||
"crawlDepth": "integer",
|
||||
"crawlExclude": "string",
|
||||
"csvDel": "string",
|
||||
|
@ -206,6 +205,7 @@ optDict = {
|
|||
"flushSession": "boolean",
|
||||
"forms": "boolean",
|
||||
"freshQueries": "boolean",
|
||||
"harFile": "string",
|
||||
"hexConvert": "boolean",
|
||||
"outputDir": "string",
|
||||
"parseErrors": "boolean",
|
||||
|
|
|
@ -19,7 +19,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
|||
from lib.core.enums import OS
|
||||
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.1.7.2"
|
||||
VERSION = "1.1.7.3"
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||
|
|
|
@ -618,9 +618,6 @@ def cmdLineParser(argv=None):
|
|||
general = OptionGroup(parser, "General", "These options can be used "
|
||||
"to set some general working parameters")
|
||||
|
||||
#general.add_option("-x", dest="xmlFile",
|
||||
# help="Dump the data into an XML file")
|
||||
|
||||
general.add_option("-s", dest="sessionFile",
|
||||
help="Load session from a stored (.sqlite) file")
|
||||
|
||||
|
@ -632,10 +629,6 @@ def cmdLineParser(argv=None):
|
|||
action="store_true",
|
||||
help="Never ask for user input, use the default behaviour")
|
||||
|
||||
general.add_option("--collect-requests", dest="collectRequests",
|
||||
action="store_true",
|
||||
help="Collect requests in HAR format")
|
||||
|
||||
general.add_option("--binary-fields", dest="binaryFields",
|
||||
help="Result fields having binary values (e.g. \"digest\")")
|
||||
|
||||
|
@ -661,8 +654,7 @@ def cmdLineParser(argv=None):
|
|||
|
||||
general.add_option("--eta", dest="eta",
|
||||
action="store_true",
|
||||
help="Display for each output the "
|
||||
"estimated time of arrival")
|
||||
help="Display for each output the estimated time of arrival")
|
||||
|
||||
general.add_option("--flush-session", dest="flushSession",
|
||||
action="store_true",
|
||||
|
@ -676,6 +668,9 @@ def cmdLineParser(argv=None):
|
|||
action="store_true",
|
||||
help="Ignore query results stored in session file")
|
||||
|
||||
general.add_option("--har", dest="harFile",
|
||||
help="Log all HTTP traffic into a HAR file")
|
||||
|
||||
general.add_option("--hex", dest="hexConvert",
|
||||
action="store_true",
|
||||
help="Use DBMS hex function(s) for data retrieval")
|
||||
|
|
|
@ -1,309 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
from BaseHTTPServer import BaseHTTPRequestHandler
|
||||
from httplib import HTTPResponse
|
||||
from StringIO import StringIO
|
||||
import base64
|
||||
import re
|
||||
|
||||
from lib.core.data import logger
|
||||
from lib.core.settings import VERSION
|
||||
|
||||
|
||||
class RequestCollectorFactory:
|
||||
|
||||
def __init__(self, collect=False):
|
||||
self.collect = collect
|
||||
|
||||
def create(self):
|
||||
collector = RequestCollector()
|
||||
|
||||
if not self.collect:
|
||||
collector.collectRequest = self._noop
|
||||
else:
|
||||
logger.info("Request collection is enabled.")
|
||||
|
||||
return collector
|
||||
|
||||
@staticmethod
|
||||
def _noop(*args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
class RequestCollector:
|
||||
|
||||
def __init__(self):
|
||||
self.reset()
|
||||
|
||||
def collectRequest(self, requestMessage, responseMessage):
|
||||
self.messages.append(RawPair(requestMessage, responseMessage))
|
||||
|
||||
def reset(self):
|
||||
self.messages = []
|
||||
|
||||
def obtain(self):
|
||||
if self.messages:
|
||||
return {"log": {
|
||||
"version": "1.2",
|
||||
"creator": {"name": "SQLMap", "version": VERSION},
|
||||
"entries": [pair.toEntry().toDict() for pair in self.messages],
|
||||
}}
|
||||
|
||||
|
||||
class RawPair:
|
||||
|
||||
def __init__(self, request, response):
|
||||
self.request = request
|
||||
self.response = response
|
||||
|
||||
def toEntry(self):
|
||||
return Entry(request=Request.parse(self.request),
|
||||
response=Response.parse(self.response))
|
||||
|
||||
|
||||
class Entry:
|
||||
|
||||
def __init__(self, request, response):
|
||||
self.request = request
|
||||
self.response = response
|
||||
|
||||
def toDict(self):
|
||||
return {
|
||||
"request": self.request.toDict(),
|
||||
"response": self.response.toDict(),
|
||||
}
|
||||
|
||||
|
||||
class Request:
|
||||
|
||||
def __init__(self, method, path, httpVersion, headers, postBody=None, raw=None, comment=None):
|
||||
self.method = method
|
||||
self.path = path
|
||||
self.httpVersion = httpVersion
|
||||
self.headers = headers or {}
|
||||
self.postBody = postBody
|
||||
self.comment = comment
|
||||
self.raw = raw
|
||||
|
||||
@classmethod
|
||||
def parse(cls, raw):
|
||||
request = HTTPRequest(raw)
|
||||
return cls(method=request.command,
|
||||
path=request.path,
|
||||
httpVersion=request.request_version,
|
||||
headers=request.headers,
|
||||
postBody=request.rfile.read(),
|
||||
comment=request.comment,
|
||||
raw=raw)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
host = self.headers.get('Host', 'unknown')
|
||||
return "http://%s%s" % (host, self.path)
|
||||
|
||||
def toDict(self):
|
||||
out = {
|
||||
"httpVersion": self.httpVersion,
|
||||
"method": self.method,
|
||||
"url": self.url,
|
||||
"headers": [dict(name=key, value=value) for key, value in self.headers.items()],
|
||||
"comment": self.comment,
|
||||
}
|
||||
if self.postBody:
|
||||
contentType = self.headers.get('Content-Type')
|
||||
out["postData"] = {
|
||||
"mimeType": contentType,
|
||||
"text": self.postBody,
|
||||
}
|
||||
return out
|
||||
|
||||
|
||||
class Response:
|
||||
|
||||
extract_status = re.compile(r'\((\d{3}) (.*)\)')
|
||||
|
||||
def __init__(self, httpVersion, status, statusText, headers, content, raw=None, comment=None):
|
||||
self.raw = raw
|
||||
self.httpVersion = httpVersion
|
||||
self.status = status
|
||||
self.statusText = statusText
|
||||
self.headers = headers
|
||||
self.content = content
|
||||
self.comment = comment
|
||||
|
||||
@classmethod
|
||||
def parse(cls, raw):
|
||||
altered = raw
|
||||
comment = None
|
||||
|
||||
if altered.startswith("HTTP response ["):
|
||||
io = StringIO(raw)
|
||||
first_line = io.readline()
|
||||
parts = cls.extract_status.search(first_line)
|
||||
status_line = "HTTP/1.0 %s %s" % (parts.group(1), parts.group(2))
|
||||
remain = io.read()
|
||||
altered = status_line + "\n" + remain
|
||||
comment = first_line
|
||||
|
||||
response = HTTPResponse(FakeSocket(altered))
|
||||
response.begin()
|
||||
return cls(httpVersion="HTTP/1.1" if response.version == 11 else "HTTP/1.0",
|
||||
status=response.status,
|
||||
statusText=response.reason,
|
||||
headers=response.msg,
|
||||
content=response.read(-1),
|
||||
comment=comment,
|
||||
raw=raw)
|
||||
|
||||
def toDict(self):
|
||||
content = {
|
||||
"mimeType": self.headers.get('Content-Type'),
|
||||
"text": self.content,
|
||||
}
|
||||
|
||||
binary = set(['\0', '\1'])
|
||||
if any(c in binary for c in self.content):
|
||||
content["encoding"] = "base64"
|
||||
content["text"] = base64.b64encode(self.content)
|
||||
|
||||
return {
|
||||
"httpVersion": self.httpVersion,
|
||||
"status": self.status,
|
||||
"statusText": self.statusText,
|
||||
"headers": [dict(name=key, value=value) for key, value in self.headers.items()],
|
||||
"content": content,
|
||||
"comment": self.comment,
|
||||
}
|
||||
|
||||
|
||||
class FakeSocket:
|
||||
# Original source:
|
||||
# https://stackoverflow.com/questions/24728088/python-parse-http-response-string
|
||||
|
||||
def __init__(self, response_text):
|
||||
self._file = StringIO(response_text)
|
||||
|
||||
def makefile(self, *args, **kwargs):
|
||||
return self._file
|
||||
|
||||
|
||||
class HTTPRequest(BaseHTTPRequestHandler):
|
||||
# Original source:
|
||||
# https://stackoverflow.com/questions/4685217/parse-raw-http-headers
|
||||
|
||||
def __init__(self, request_text):
|
||||
self.comment = None
|
||||
self.rfile = StringIO(request_text)
|
||||
self.raw_requestline = self.rfile.readline()
|
||||
|
||||
if self.raw_requestline.startswith("HTTP request ["):
|
||||
self.comment = self.raw_requestline
|
||||
self.raw_requestline = self.rfile.readline()
|
||||
|
||||
self.error_code = self.error_message = None
|
||||
self.parse_request()
|
||||
|
||||
def send_error(self, code, message):
|
||||
self.error_code = code
|
||||
self.error_message = message
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import unittest
|
||||
|
||||
class RequestParseTest(unittest.TestCase):
|
||||
|
||||
def test_basic_request(self):
|
||||
req = Request.parse("GET /test HTTP/1.0\r\n"
|
||||
"Host: test\r\n"
|
||||
"Connection: close")
|
||||
self.assertEqual("GET", req.method)
|
||||
self.assertEqual("/test", req.path)
|
||||
self.assertEqual("close", req.headers['Connection'])
|
||||
self.assertEqual("test", req.headers['Host'])
|
||||
self.assertEqual("HTTP/1.0", req.httpVersion)
|
||||
|
||||
def test_with_request_as_logged_by_sqlmap(self):
|
||||
raw = "HTTP request [#75]:\nPOST /create.php HTTP/1.1\nHost: 127.0.0.1\nAccept-encoding: gzip,deflate\nCache-control: no-cache\nContent-type: application/x-www-form-urlencoded; charset=utf-8\nAccept: */*\nUser-agent: Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.215 Safari/534.10\nCookie: PHPSESSID=65c4a9cfbbe91f2d975d50ce5e8d1026\nContent-length: 138\nConnection: close\n\nname=test%27%29%3BSELECT%20LIKE%28%27ABCDEFG%27%2CUPPER%28HEX%28RANDOMBLOB%280.0.10000%2F2%29%29%29%29--&csrfmiddlewaretoken=594d26cfa3fad\n" # noqa
|
||||
req = Request.parse(raw)
|
||||
self.assertEqual("POST", req.method)
|
||||
self.assertEqual("138", req.headers["Content-Length"])
|
||||
self.assertIn("csrfmiddlewaretoken", req.postBody)
|
||||
self.assertEqual("HTTP request [#75]:\n", req.comment)
|
||||
|
||||
class RequestRenderTest(unittest.TestCase):
|
||||
def test_render_get_request(self):
|
||||
req = Request(method="GET",
|
||||
path="/test.php",
|
||||
headers={"Host": "example.com", "Content-Length": "0"},
|
||||
httpVersion="HTTP/1.1",
|
||||
comment="Hello World")
|
||||
out = req.toDict()
|
||||
self.assertEqual("GET", out["method"])
|
||||
self.assertEqual("http://example.com/test.php", out["url"])
|
||||
self.assertIn({"name": "Host", "value": "example.com"}, out["headers"])
|
||||
self.assertEqual("Hello World", out["comment"])
|
||||
self.assertEqual("HTTP/1.1", out["httpVersion"])
|
||||
|
||||
def test_render_with_post_body(self):
|
||||
req = Request(method="POST",
|
||||
path="/test.php",
|
||||
headers={"Host": "example.com",
|
||||
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"},
|
||||
httpVersion="HTTP/1.1",
|
||||
postBody="name=test&csrfmiddlewaretoken=594d26cfa3fad\n")
|
||||
out = req.toDict()
|
||||
self.assertEqual(out["postData"], {
|
||||
"mimeType": "application/x-www-form-urlencoded; charset=utf-8",
|
||||
"text": "name=test&csrfmiddlewaretoken=594d26cfa3fad\n",
|
||||
})
|
||||
|
||||
class ResponseParseTest(unittest.TestCase):
|
||||
def test_parse_standard_http_response(self):
|
||||
raw = "HTTP/1.1 404 Not Found\nContent-length: 518\nX-powered-by: PHP/5.6.30\nContent-encoding: gzip\nExpires: Thu, 19 Nov 1981 08:52:00 GMT\nVary: Accept-Encoding\nUri: http://127.0.0.1/\nServer: Apache/2.4.10 (Debian)\nConnection: close\nPragma: no-cache\nCache-control: no-store, no-cache, must-revalidate, post-check=0, pre-check=0\nDate: Fri, 23 Jun 2017 16:18:17 GMT\nContent-type: text/html; charset=UTF-8\n\n<!doctype html>\n<html>Test</html>\n" # noqa
|
||||
resp = Response.parse(raw)
|
||||
self.assertEqual(resp.status, 404)
|
||||
self.assertEqual(resp.statusText, "Not Found")
|
||||
|
||||
def test_parse_response_as_logged_by_sqlmap(self):
|
||||
raw = "HTTP response [#74] (200 OK):\nContent-length: 518\nX-powered-by: PHP/5.6.30\nContent-encoding: gzip\nExpires: Thu, 19 Nov 1981 08:52:00 GMT\nVary: Accept-Encoding\nUri: http://127.0.0.1/\nServer: Apache/2.4.10 (Debian)\nConnection: close\nPragma: no-cache\nCache-control: no-store, no-cache, must-revalidate, post-check=0, pre-check=0\nDate: Fri, 23 Jun 2017 16:18:17 GMT\nContent-type: text/html; charset=UTF-8\n\n<!doctype html>\n<html>Test</html>\n" # noqa
|
||||
resp = Response.parse(raw)
|
||||
self.assertEqual(resp.status, 200)
|
||||
self.assertEqual(resp.statusText, "OK")
|
||||
self.assertEqual(resp.headers["Content-Length"], "518")
|
||||
self.assertIn("Test", resp.content)
|
||||
self.assertEqual("HTTP response [#74] (200 OK):\n", resp.comment)
|
||||
|
||||
class ResponseRenderTest(unittest.TestCase):
|
||||
def test_simple_page_encoding(self):
|
||||
resp = Response(status=200, statusText="OK",
|
||||
httpVersion="HTTP/1.1",
|
||||
headers={"Content-Type": "text/html"},
|
||||
content="<html>\n<body>Hello</body>\n</html>")
|
||||
out = resp.toDict()
|
||||
self.assertEqual(200, out["status"])
|
||||
self.assertEqual("OK", out["statusText"])
|
||||
self.assertIn({"name": "Content-Type", "value": "text/html"}, out["headers"])
|
||||
self.assertEqual(out["content"], {
|
||||
"mimeType": "text/html",
|
||||
"text": "<html>\n<body>Hello</body>\n</html>",
|
||||
})
|
||||
|
||||
def test_simple_body_contains_binary_data(self):
|
||||
resp = Response(status=200, statusText="OK",
|
||||
httpVersion="HTTP/1.1",
|
||||
headers={"Content-Type": "application/octet-stream"},
|
||||
content="test\0abc")
|
||||
out = resp.toDict()
|
||||
self.assertEqual(out["content"], {
|
||||
"encoding": "base64",
|
||||
"mimeType": "application/octet-stream",
|
||||
"text": "dGVzdABhYmM=",
|
||||
})
|
||||
|
||||
unittest.main(buffer=False)
|
194
lib/utils/har.py
Normal file
194
lib/utils/har.py
Normal file
|
@ -0,0 +1,194 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import base64
|
||||
import BaseHTTPServer
|
||||
import httplib
|
||||
import re
|
||||
import StringIO
|
||||
|
||||
from lib.core.data import logger
|
||||
from lib.core.settings import VERSION
|
||||
|
||||
class HTTPCollectorFactory:
|
||||
def __init__(self, harFile=False):
|
||||
self.harFile = harFile
|
||||
|
||||
def create(self):
|
||||
collector = HTTPCollector()
|
||||
|
||||
return collector
|
||||
|
||||
class HTTPCollector:
|
||||
def __init__(self):
|
||||
self.messages = []
|
||||
|
||||
def collectRequest(self, requestMessage, responseMessage):
|
||||
self.messages.append(RawPair(requestMessage, responseMessage))
|
||||
|
||||
def obtain(self):
|
||||
return {"log": {
|
||||
"version": "1.2",
|
||||
"creator": {"name": "sqlmap", "version": VERSION},
|
||||
"entries": [pair.toEntry().toDict() for pair in self.messages],
|
||||
}}
|
||||
|
||||
class RawPair:
|
||||
def __init__(self, request, response):
|
||||
self.request = request
|
||||
self.response = response
|
||||
|
||||
def toEntry(self):
|
||||
return Entry(request=Request.parse(self.request),
|
||||
response=Response.parse(self.response))
|
||||
|
||||
class Entry:
|
||||
def __init__(self, request, response):
|
||||
self.request = request
|
||||
self.response = response
|
||||
|
||||
def toDict(self):
|
||||
return {
|
||||
"request": self.request.toDict(),
|
||||
"response": self.response.toDict(),
|
||||
}
|
||||
|
||||
class Request:
|
||||
def __init__(self, method, path, httpVersion, headers, postBody=None, raw=None, comment=None):
|
||||
self.method = method
|
||||
self.path = path
|
||||
self.httpVersion = httpVersion
|
||||
self.headers = headers or {}
|
||||
self.postBody = postBody
|
||||
self.comment = comment
|
||||
self.raw = raw
|
||||
|
||||
@classmethod
|
||||
def parse(cls, raw):
|
||||
request = HTTPRequest(raw)
|
||||
return cls(method=request.command,
|
||||
path=request.path,
|
||||
httpVersion=request.request_version,
|
||||
headers=request.headers,
|
||||
postBody=request.rfile.read(),
|
||||
comment=request.comment,
|
||||
raw=raw)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
host = self.headers.get("Host", "unknown")
|
||||
return "http://%s%s" % (host, self.path)
|
||||
|
||||
def toDict(self):
|
||||
out = {
|
||||
"httpVersion": self.httpVersion,
|
||||
"method": self.method,
|
||||
"url": self.url,
|
||||
"headers": [dict(name=key.capitalize(), value=value) for key, value in self.headers.items()],
|
||||
"comment": self.comment,
|
||||
}
|
||||
|
||||
if self.postBody:
|
||||
contentType = self.headers.get("Content-Type")
|
||||
out["postData"] = {
|
||||
"mimeType": contentType,
|
||||
"text": self.postBody.rstrip("\r\n"),
|
||||
}
|
||||
|
||||
return out
|
||||
|
||||
class Response:
|
||||
extract_status = re.compile(r'\((\d{3}) (.*)\)')
|
||||
|
||||
def __init__(self, httpVersion, status, statusText, headers, content, raw=None, comment=None):
|
||||
self.raw = raw
|
||||
self.httpVersion = httpVersion
|
||||
self.status = status
|
||||
self.statusText = statusText
|
||||
self.headers = headers
|
||||
self.content = content
|
||||
self.comment = comment
|
||||
|
||||
@classmethod
|
||||
def parse(cls, raw):
|
||||
altered = raw
|
||||
comment = None
|
||||
|
||||
if altered.startswith("HTTP response ["):
|
||||
io = StringIO.StringIO(raw)
|
||||
first_line = io.readline()
|
||||
parts = cls.extract_status.search(first_line)
|
||||
status_line = "HTTP/1.0 %s %s" % (parts.group(1), parts.group(2))
|
||||
remain = io.read()
|
||||
altered = status_line + "\n" + remain
|
||||
comment = first_line
|
||||
|
||||
response = httplib.HTTPResponse(FakeSocket(altered))
|
||||
response.begin()
|
||||
|
||||
try:
|
||||
content = response.read(-1)
|
||||
except httplib.IncompleteRead:
|
||||
content = raw[raw.find("\n\n") + 2:].rstrip("\r\n")
|
||||
|
||||
return cls(httpVersion="HTTP/1.1" if response.version == 11 else "HTTP/1.0",
|
||||
status=response.status,
|
||||
statusText=response.reason,
|
||||
headers=response.msg,
|
||||
content=content,
|
||||
comment=comment,
|
||||
raw=raw)
|
||||
|
||||
def toDict(self):
|
||||
content = {
|
||||
"mimeType": self.headers.get("Content-Type"),
|
||||
"text": self.content,
|
||||
}
|
||||
|
||||
binary = set(['\0', '\1'])
|
||||
if any(c in binary for c in self.content):
|
||||
content["encoding"] = "base64"
|
||||
content["text"] = base64.b64encode(self.content)
|
||||
|
||||
return {
|
||||
"httpVersion": self.httpVersion,
|
||||
"status": self.status,
|
||||
"statusText": self.statusText,
|
||||
"headers": [dict(name=key.capitalize(), value=value) for key, value in self.headers.items() if key.lower() != "uri"],
|
||||
"content": content,
|
||||
"comment": self.comment,
|
||||
}
|
||||
|
||||
class FakeSocket:
|
||||
# Original source:
|
||||
# https://stackoverflow.com/questions/24728088/python-parse-http-response-string
|
||||
|
||||
def __init__(self, response_text):
|
||||
self._file = StringIO.StringIO(response_text)
|
||||
|
||||
def makefile(self, *args, **kwargs):
|
||||
return self._file
|
||||
|
||||
class HTTPRequest(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
# Original source:
|
||||
# https://stackoverflow.com/questions/4685217/parse-raw-http-headers
|
||||
|
||||
def __init__(self, request_text):
|
||||
self.comment = None
|
||||
self.rfile = StringIO.StringIO(request_text)
|
||||
self.raw_requestline = self.rfile.readline()
|
||||
|
||||
if self.raw_requestline.startswith("HTTP request ["):
|
||||
self.comment = self.raw_requestline
|
||||
self.raw_requestline = self.rfile.readline()
|
||||
|
||||
self.error_code = self.error_message = None
|
||||
self.parse_request()
|
||||
|
||||
def send_error(self, code, message):
|
||||
self.error_code = code
|
||||
self.error_message = message
|
|
@ -15,6 +15,7 @@ import bdb
|
|||
import distutils
|
||||
import glob
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
@ -40,6 +41,7 @@ try:
|
|||
from lib.core.common import getSafeExString
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import maskSensitiveData
|
||||
from lib.core.common import openFile
|
||||
from lib.core.common import setPaths
|
||||
from lib.core.common import weAreFrozen
|
||||
from lib.core.data import cmdLineOptions
|
||||
|
@ -327,6 +329,10 @@ def main():
|
|||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
if conf.harFile:
|
||||
with openFile(conf.harFile, "w+b") as f:
|
||||
f.write(json.dumps(conf.httpCollector.obtain(), indent=4, separators=(',', ': ')))
|
||||
|
||||
if cmdLineOptions.get("sqlmapShell"):
|
||||
cmdLineOptions.clear()
|
||||
conf.clear()
|
||||
|
|
|
@ -27,7 +27,7 @@ a97df93b552ee4e4ba3692eae870de7c lib/controller/handler.py
|
|||
310efc965c862cfbd7b0da5150a5ad36 lib/controller/__init__.py
|
||||
d58e85ffeac2471ef3af729076b3b5f7 lib/core/agent.py
|
||||
6cc95a117fbd34ef31b9aa25520f0e31 lib/core/bigarray.py
|
||||
d9a450dec19787649e0265b68956f020 lib/core/common.py
|
||||
d40de0812b667b7be9d1755f82e18f17 lib/core/common.py
|
||||
5065a4242a8cccf72f91e22e1007ae63 lib/core/convert.py
|
||||
a8143dab9d3a27490f7d49b6b29ea530 lib/core/data.py
|
||||
7936d78b1a7f1f008ff92bf2f88574ba lib/core/datatype.py
|
||||
|
@ -39,25 +39,25 @@ b9ff4e622c416116bee6024c0f050349 lib/core/enums.py
|
|||
9381a0c7e8bc19986299e84f4edda1a0 lib/core/exception.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/core/__init__.py
|
||||
9ba39bf66e9ecd469446bdbbeda906c3 lib/core/log.py
|
||||
f1531be15ed98555a9010e2db3c9da75 lib/core/optiondict.py
|
||||
0ff0d360c02b4b92293aa7e5ee705d49 lib/core/option.py
|
||||
5a34a1be62eab520cacc197b5eacda39 lib/core/optiondict.py
|
||||
837f3859f007b9104b32f18e217e326a lib/core/option.py
|
||||
5f2f56e6c5f274408df61943f1e080c0 lib/core/profiling.py
|
||||
40be71cd774662a7b420caeb7051e7d5 lib/core/readlineng.py
|
||||
d8e9250f3775119df07e9070eddccd16 lib/core/replication.py
|
||||
785f86e3f963fa3798f84286a4e83ff2 lib/core/revision.py
|
||||
40c80b28b3a5819b737a5a17d4565ae9 lib/core/session.py
|
||||
77c9531dcb52345e86c07e1973859e79 lib/core/settings.py
|
||||
191a4cb7eea0a46315c894abd9491bcf lib/core/settings.py
|
||||
d91291997d2bd2f6028aaf371bf1d3b6 lib/core/shell.py
|
||||
2ad85c130cc5f2b3701ea85c2f6bbf20 lib/core/subprocessng.py
|
||||
baa3f47efa6701076d026e43a6874a51 lib/core/target.py
|
||||
8970b88627902239d695280b1160e16c lib/core/testing.py
|
||||
40881e63d516d8304fc19971049cded0 lib/core/threads.py
|
||||
b8306192d980abdc8d669c024511e9a1 lib/core/threads.py
|
||||
ad74fc58fc7214802fd27067bce18dd2 lib/core/unescaper.py
|
||||
1f1fa616b5b19308d78c610ec8046399 lib/core/update.py
|
||||
4d13ed693401a498b6d073a2a494bd83 lib/core/wordlist.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/__init__.py
|
||||
8c4b04062db2245d9e190b413985202a lib/parse/banner.py
|
||||
89c837c3b2cb2853839e127978bed8a6 lib/parse/cmdline.py
|
||||
0557c5fee58f2578e0dd502b1839e3a3 lib/parse/cmdline.py
|
||||
3a31657bc38f277d0016ff6d50bde61f lib/parse/configfile.py
|
||||
14539f1be714d4f1ed042067d63bc50a lib/parse/handler.py
|
||||
64e5bb3ecbdd75144500588b437ba8da lib/parse/headers.py
|
||||
|
@ -103,6 +103,7 @@ a73c3ddd0de359507a8ad59b363aa963 lib/utils/api.py
|
|||
ed70f1ca9113664043ec9e6778e48078 lib/utils/crawler.py
|
||||
ba12c69a90061aa14d848b8396e79191 lib/utils/deps.py
|
||||
3b9fd519164e0bf275d5fd361c3f11ff lib/utils/getch.py
|
||||
3b93150eea78ea84fa0461a55e3e48ec lib/utils/har.py
|
||||
ccfdad414ce2ec0c394c3deaa39a82bf lib/utils/hashdb.py
|
||||
12e0e0ab70c6fe5786bc561c35dc067f lib/utils/hash.py
|
||||
e76a08237ee6a4cd6855af79610ea8a5 lib/utils/htmlentities.py
|
||||
|
@ -223,7 +224,7 @@ c3cc8b7727161e64ab59f312c33b541a shell/stager.aspx_
|
|||
1f7f125f30e0e800beb21e2ebbab18e1 shell/stager.jsp_
|
||||
01e3505e796edf19aad6a996101c81c9 shell/stager.php_
|
||||
0751a45ac4c130131f2cdb74d866b664 sqlmapapi.py
|
||||
c056277de4394bed29f35b74ffc4d209 sqlmap.py
|
||||
290b98e6923960e17bdef3db0a05b44c sqlmap.py
|
||||
08c711a470d7e0bf705320ba3c48b886 tamper/apostrophemask.py
|
||||
e8509df10d3f1c28014d7825562d32dd tamper/apostrophenullencode.py
|
||||
bb27f7dc980ea07fcfedbd7da5e5e029 tamper/appendnullbyte.py
|
||||
|
|
Loading…
Reference in New Issue
Block a user