Added --crawl-file option

Added --crawl-file option to specify the output file for crawler
results.
This commit is contained in:
ctshanghai 2015-08-03 21:22:21 +08:00
parent 1e023c6483
commit 62e02cb730
2 changed files with 4 additions and 4 deletions

View File

@ -610,7 +610,7 @@ def cmdLineParser():
help="Log all HTTP traffic into a " help="Log all HTTP traffic into a "
"textual file") "textual file")
general.add_option("--crawl-file", dest="batchCrawlFile", action="store", general.add_option("--crawl-file", dest="crawlFile", action="store",
help="Destination file for crawled links") help="Destination file for crawled links")
general.add_option("--batch", dest="batch", general.add_option("--batch", dest="batch",

View File

@ -186,9 +186,9 @@ def storeResultsToFile(results):
test = readInput(message, default="N") test = readInput(message, default="N")
kb.storeCrawlingChoice = test[0] in ("y", "Y") kb.storeCrawlingChoice = test[0] in ("y", "Y")
if kb.storeCrawlingChoice or conf.batchCrawlFile: if kb.storeCrawlingChoice or conf.crawlFile:
if conf.batchCrawlFile: if conf.crawlFile:
filename = conf.batchCrawlFile filename = conf.crawlFile
else: else:
handle, filename = tempfile.mkstemp(prefix="sqlmapcrawling-", suffix=".csv" if conf.forms else ".txt") handle, filename = tempfile.mkstemp(prefix="sqlmapcrawling-", suffix=".csv" if conf.forms else ".txt")
os.close(handle) os.close(handle)