diff --git a/lib/core/common.py b/lib/core/common.py index 698dfba69..709a40ee7 100644 --- a/lib/core/common.py +++ b/lib/core/common.py @@ -2599,6 +2599,9 @@ def logHTTPTraffic(requestLogMsg, responseLogMsg): """ Logs HTTP traffic to the output file """ + threadData = getCurrentThreadData() + assert threadData.requestCollector is not None, "Request collector should be initialized by now" + threadData.requestCollector.collectRequest(requestLogMsg, responseLogMsg) if not conf.trafficFile: return diff --git a/lib/core/option.py b/lib/core/option.py index 5ae0f5aca..c0c59eb05 100755 --- a/lib/core/option.py +++ b/lib/core/option.py @@ -149,6 +149,7 @@ from lib.request.pkihandler import HTTPSPKIAuthHandler from lib.request.rangehandler import HTTPRangeHandler from lib.request.redirecthandler import SmartRedirectHandler from lib.request.templates import getPageTemplate +from lib.utils.collect import RequestCollectorFactory from lib.utils.crawler import crawl from lib.utils.deps import checkDependencies from lib.utils.search import search @@ -1844,6 +1845,7 @@ def _setConfAttributes(): conf.scheme = None conf.tests = [] conf.trafficFP = None + conf.requestCollectorFactory = None conf.wFileType = None def _setKnowledgeBaseAttributes(flushAll=True): @@ -2228,6 +2230,11 @@ def _setTrafficOutputFP(): conf.trafficFP = openFile(conf.trafficFile, "w+") +def _setupRequestCollector(): + conf.requestCollectorFactory = RequestCollectorFactory(collect=conf.collectRequests) + threadData = getCurrentThreadData() + threadData.requestCollector = conf.requestCollectorFactory.create() + def _setDNSServer(): if not conf.dnsDomain: return @@ -2604,6 +2611,7 @@ def init(): _setTamperingFunctions() _setWafFunctions() _setTrafficOutputFP() + _setupRequestCollector() _resolveCrossReferences() _checkWebSocket() diff --git a/lib/core/optiondict.py b/lib/core/optiondict.py index fd85eff38..db7834cf9 100644 --- a/lib/core/optiondict.py +++ b/lib/core/optiondict.py @@ -197,6 +197,7 @@ optDict = { "binaryFields": "string", "charset": "string", "checkInternet": "boolean", + "collectRequests": "string", "crawlDepth": "integer", "crawlExclude": "string", "csvDel": "string", diff --git a/lib/core/threads.py b/lib/core/threads.py index 8f89fb1b8..b3566b955 100644 --- a/lib/core/threads.py +++ b/lib/core/threads.py @@ -38,6 +38,8 @@ class _ThreadData(threading.local): Resets thread data model """ + self.requestCollector = None + self.disableStdOut = False self.hashDBCursor = None self.inTransaction = False diff --git a/lib/parse/cmdline.py b/lib/parse/cmdline.py index e02cac62f..3e97bfb56 100644 --- a/lib/parse/cmdline.py +++ b/lib/parse/cmdline.py @@ -631,6 +631,10 @@ def cmdLineParser(argv=None): action="store_true", help="Never ask for user input, use the default behaviour") + general.add_option("--collect-requests", dest="collectRequests", + action="store_true", + help="Collect requests in HAR format") + general.add_option("--binary-fields", dest="binaryFields", help="Result fields having binary values (e.g. \"digest\")") diff --git a/lib/utils/collect.py b/lib/utils/collect.py new file mode 100644 index 000000000..32b42d595 --- /dev/null +++ b/lib/utils/collect.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python + +""" +Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) +See the file 'doc/COPYING' for copying permission +""" + +from lib.core.data import logger + + +class RequestCollectorFactory: + + def __init__(self, collect=False): + self.collect = collect + + def create(self): + collector = RequestCollector() + + if not self.collect: + collector.collectRequest = self._noop + + return collector + + @staticmethod + def _noop(*args, **kwargs): + pass + + +class RequestCollector: + + def collectRequest(self, requestMessage, responseMessage): + logger.info("Received request/response: %s/%s", len(requestMessage), len(responseMessage))