diff --git a/lib/core/option.py b/lib/core/option.py index 72d345e42..822c384a1 100644 --- a/lib/core/option.py +++ b/lib/core/option.py @@ -1747,6 +1747,7 @@ def _setKnowledgeBaseAttributes(flushAll=True): kb.reduceTests = None kb.stickyDBMS = False kb.stickyLevel = None + kb.storeCrawlingChoice = None kb.storeHashesChoice = None kb.suppressResumeInfo = False kb.technique = None diff --git a/lib/utils/crawler.py b/lib/utils/crawler.py index cb502eca1..4dc7b9d8b 100644 --- a/lib/utils/crawler.py +++ b/lib/utils/crawler.py @@ -5,20 +5,26 @@ Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ +import codecs import httplib +import os import re import urlparse +import tempfile import time from lib.core.common import clearConsoleLine from lib.core.common import dataToStdout from lib.core.common import findPageForms +from lib.core.common import readInput +from lib.core.common import safeCSValue from lib.core.common import singleTimeWarnMessage from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger from lib.core.exception import SqlmapConnectionException from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS +from lib.core.settings import UNICODE_ENCODING from lib.core.threads import getCurrentThreadData from lib.core.threads import runThreads from lib.request.connect import Connect as Request @@ -143,4 +149,33 @@ def crawl(target): logger.warn(warnMsg) else: for url in threadData.shared.value: - kb.targets.add((url, None, None, None, None)) + kb.targets.add((url, conf.method, None, None, None)) + + storeResultsToFile(kb.targets) + +def storeResultsToFile(results): + if not results: + return + + if kb.storeCrawlingChoice is None: + message = "do you want to store crawling results to a temporary file " + message += "for eventual further processing with other tools [y/N] " + test = readInput(message, default="N") + kb.storeCrawlingChoice = test[0] in ("y", "Y") + + if kb.storeCrawlingChoice: + handle, filename = tempfile.mkstemp(prefix="sqlmapcrawling-", suffix=".csv" if conf.forms else ".txt") + os.close(handle) + + infoMsg = "writing crawling results to a temporary file '%s' " % filename + logger.info(infoMsg) + + with codecs.open(filename, "w+b", UNICODE_ENCODING) as f: + if conf.forms: + f.write("URL,POST\n") + + for url, _, data, _, _ in results: + if conf.forms: + f.write("%s,%s\n" % (safeCSValue(url), safeCSValue(data or ""))) + else: + f.write("%s\n" % url)