mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 17:46:37 +03:00
Adding initial hook to receive the request/response pairs
This commit is contained in:
parent
5ec44b8346
commit
8df4cc3983
|
@ -2599,6 +2599,9 @@ def logHTTPTraffic(requestLogMsg, responseLogMsg):
|
||||||
"""
|
"""
|
||||||
Logs HTTP traffic to the output file
|
Logs HTTP traffic to the output file
|
||||||
"""
|
"""
|
||||||
|
threadData = getCurrentThreadData()
|
||||||
|
assert threadData.requestCollector is not None, "Request collector should be initialized by now"
|
||||||
|
threadData.requestCollector.collectRequest(requestLogMsg, responseLogMsg)
|
||||||
|
|
||||||
if not conf.trafficFile:
|
if not conf.trafficFile:
|
||||||
return
|
return
|
||||||
|
|
|
@ -149,6 +149,7 @@ from lib.request.pkihandler import HTTPSPKIAuthHandler
|
||||||
from lib.request.rangehandler import HTTPRangeHandler
|
from lib.request.rangehandler import HTTPRangeHandler
|
||||||
from lib.request.redirecthandler import SmartRedirectHandler
|
from lib.request.redirecthandler import SmartRedirectHandler
|
||||||
from lib.request.templates import getPageTemplate
|
from lib.request.templates import getPageTemplate
|
||||||
|
from lib.utils.collect import RequestCollectorFactory
|
||||||
from lib.utils.crawler import crawl
|
from lib.utils.crawler import crawl
|
||||||
from lib.utils.deps import checkDependencies
|
from lib.utils.deps import checkDependencies
|
||||||
from lib.utils.search import search
|
from lib.utils.search import search
|
||||||
|
@ -1844,6 +1845,7 @@ def _setConfAttributes():
|
||||||
conf.scheme = None
|
conf.scheme = None
|
||||||
conf.tests = []
|
conf.tests = []
|
||||||
conf.trafficFP = None
|
conf.trafficFP = None
|
||||||
|
conf.requestCollectorFactory = None
|
||||||
conf.wFileType = None
|
conf.wFileType = None
|
||||||
|
|
||||||
def _setKnowledgeBaseAttributes(flushAll=True):
|
def _setKnowledgeBaseAttributes(flushAll=True):
|
||||||
|
@ -2228,6 +2230,11 @@ def _setTrafficOutputFP():
|
||||||
|
|
||||||
conf.trafficFP = openFile(conf.trafficFile, "w+")
|
conf.trafficFP = openFile(conf.trafficFile, "w+")
|
||||||
|
|
||||||
|
def _setupRequestCollector():
|
||||||
|
conf.requestCollectorFactory = RequestCollectorFactory(collect=conf.collectRequests)
|
||||||
|
threadData = getCurrentThreadData()
|
||||||
|
threadData.requestCollector = conf.requestCollectorFactory.create()
|
||||||
|
|
||||||
def _setDNSServer():
|
def _setDNSServer():
|
||||||
if not conf.dnsDomain:
|
if not conf.dnsDomain:
|
||||||
return
|
return
|
||||||
|
@ -2604,6 +2611,7 @@ def init():
|
||||||
_setTamperingFunctions()
|
_setTamperingFunctions()
|
||||||
_setWafFunctions()
|
_setWafFunctions()
|
||||||
_setTrafficOutputFP()
|
_setTrafficOutputFP()
|
||||||
|
_setupRequestCollector()
|
||||||
_resolveCrossReferences()
|
_resolveCrossReferences()
|
||||||
_checkWebSocket()
|
_checkWebSocket()
|
||||||
|
|
||||||
|
|
|
@ -197,6 +197,7 @@ optDict = {
|
||||||
"binaryFields": "string",
|
"binaryFields": "string",
|
||||||
"charset": "string",
|
"charset": "string",
|
||||||
"checkInternet": "boolean",
|
"checkInternet": "boolean",
|
||||||
|
"collectRequests": "string",
|
||||||
"crawlDepth": "integer",
|
"crawlDepth": "integer",
|
||||||
"crawlExclude": "string",
|
"crawlExclude": "string",
|
||||||
"csvDel": "string",
|
"csvDel": "string",
|
||||||
|
|
|
@ -38,6 +38,8 @@ class _ThreadData(threading.local):
|
||||||
Resets thread data model
|
Resets thread data model
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
self.requestCollector = None
|
||||||
|
|
||||||
self.disableStdOut = False
|
self.disableStdOut = False
|
||||||
self.hashDBCursor = None
|
self.hashDBCursor = None
|
||||||
self.inTransaction = False
|
self.inTransaction = False
|
||||||
|
|
|
@ -631,6 +631,10 @@ def cmdLineParser(argv=None):
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Never ask for user input, use the default behaviour")
|
help="Never ask for user input, use the default behaviour")
|
||||||
|
|
||||||
|
general.add_option("--collect-requests", dest="collectRequests",
|
||||||
|
action="store_true",
|
||||||
|
help="Collect requests in HAR format")
|
||||||
|
|
||||||
general.add_option("--binary-fields", dest="binaryFields",
|
general.add_option("--binary-fields", dest="binaryFields",
|
||||||
help="Result fields having binary values (e.g. \"digest\")")
|
help="Result fields having binary values (e.g. \"digest\")")
|
||||||
|
|
||||||
|
|
32
lib/utils/collect.py
Normal file
32
lib/utils/collect.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
|
See the file 'doc/COPYING' for copying permission
|
||||||
|
"""
|
||||||
|
|
||||||
|
from lib.core.data import logger
|
||||||
|
|
||||||
|
|
||||||
|
class RequestCollectorFactory:
|
||||||
|
|
||||||
|
def __init__(self, collect=False):
|
||||||
|
self.collect = collect
|
||||||
|
|
||||||
|
def create(self):
|
||||||
|
collector = RequestCollector()
|
||||||
|
|
||||||
|
if not self.collect:
|
||||||
|
collector.collectRequest = self._noop
|
||||||
|
|
||||||
|
return collector
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _noop(*args, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RequestCollector:
|
||||||
|
|
||||||
|
def collectRequest(self, requestMessage, responseMessage):
|
||||||
|
logger.info("Received request/response: %s/%s", len(requestMessage), len(responseMessage))
|
Loading…
Reference in New Issue
Block a user