Storing crawling results to a temporary file (for eventual further processing)

This commit is contained in:
Miroslav Stampar 2014-11-20 16:29:17 +01:00
parent d3551631c4
commit f8a8cbf9a6
2 changed files with 37 additions and 1 deletions

View File

@ -1747,6 +1747,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.reduceTests = None kb.reduceTests = None
kb.stickyDBMS = False kb.stickyDBMS = False
kb.stickyLevel = None kb.stickyLevel = None
kb.storeCrawlingChoice = None
kb.storeHashesChoice = None kb.storeHashesChoice = None
kb.suppressResumeInfo = False kb.suppressResumeInfo = False
kb.technique = None kb.technique = None

View File

@ -5,20 +5,26 @@ Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission See the file 'doc/COPYING' for copying permission
""" """
import codecs
import httplib import httplib
import os
import re import re
import urlparse import urlparse
import tempfile
import time import time
from lib.core.common import clearConsoleLine from lib.core.common import clearConsoleLine
from lib.core.common import dataToStdout from lib.core.common import dataToStdout
from lib.core.common import findPageForms from lib.core.common import findPageForms
from lib.core.common import readInput
from lib.core.common import safeCSValue
from lib.core.common import singleTimeWarnMessage from lib.core.common import singleTimeWarnMessage
from lib.core.data import conf from lib.core.data import conf
from lib.core.data import kb from lib.core.data import kb
from lib.core.data import logger from lib.core.data import logger
from lib.core.exception import SqlmapConnectionException from lib.core.exception import SqlmapConnectionException
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
from lib.core.settings import UNICODE_ENCODING
from lib.core.threads import getCurrentThreadData from lib.core.threads import getCurrentThreadData
from lib.core.threads import runThreads from lib.core.threads import runThreads
from lib.request.connect import Connect as Request from lib.request.connect import Connect as Request
@ -143,4 +149,33 @@ def crawl(target):
logger.warn(warnMsg) logger.warn(warnMsg)
else: else:
for url in threadData.shared.value: for url in threadData.shared.value:
kb.targets.add((url, None, None, None, None)) kb.targets.add((url, conf.method, None, None, None))
storeResultsToFile(kb.targets)
def storeResultsToFile(results):
if not results:
return
if kb.storeCrawlingChoice is None:
message = "do you want to store crawling results to a temporary file "
message += "for eventual further processing with other tools [y/N] "
test = readInput(message, default="N")
kb.storeCrawlingChoice = test[0] in ("y", "Y")
if kb.storeCrawlingChoice:
handle, filename = tempfile.mkstemp(prefix="sqlmapcrawling-", suffix=".csv" if conf.forms else ".txt")
os.close(handle)
infoMsg = "writing crawling results to a temporary file '%s' " % filename
logger.info(infoMsg)
with codecs.open(filename, "w+b", UNICODE_ENCODING) as f:
if conf.forms:
f.write("URL,POST\n")
for url, _, data, _, _ in results:
if conf.forms:
f.write("%s,%s\n" % (safeCSValue(url), safeCSValue(data or "")))
else:
f.write("%s\n" % url)