mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-29 04:53:48 +03:00
Storing crawling results to a temporary file (for eventual further processing)
This commit is contained in:
parent
d3551631c4
commit
f8a8cbf9a6
|
@ -1747,6 +1747,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||||
kb.reduceTests = None
|
kb.reduceTests = None
|
||||||
kb.stickyDBMS = False
|
kb.stickyDBMS = False
|
||||||
kb.stickyLevel = None
|
kb.stickyLevel = None
|
||||||
|
kb.storeCrawlingChoice = None
|
||||||
kb.storeHashesChoice = None
|
kb.storeHashesChoice = None
|
||||||
kb.suppressResumeInfo = False
|
kb.suppressResumeInfo = False
|
||||||
kb.technique = None
|
kb.technique = None
|
||||||
|
|
|
@ -5,20 +5,26 @@ Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import codecs
|
||||||
import httplib
|
import httplib
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
import urlparse
|
import urlparse
|
||||||
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from lib.core.common import clearConsoleLine
|
from lib.core.common import clearConsoleLine
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import findPageForms
|
from lib.core.common import findPageForms
|
||||||
|
from lib.core.common import readInput
|
||||||
|
from lib.core.common import safeCSValue
|
||||||
from lib.core.common import singleTimeWarnMessage
|
from lib.core.common import singleTimeWarnMessage
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
||||||
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.core.threads import runThreads
|
from lib.core.threads import runThreads
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
|
@ -143,4 +149,33 @@ def crawl(target):
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
else:
|
else:
|
||||||
for url in threadData.shared.value:
|
for url in threadData.shared.value:
|
||||||
kb.targets.add((url, None, None, None, None))
|
kb.targets.add((url, conf.method, None, None, None))
|
||||||
|
|
||||||
|
storeResultsToFile(kb.targets)
|
||||||
|
|
||||||
|
def storeResultsToFile(results):
|
||||||
|
if not results:
|
||||||
|
return
|
||||||
|
|
||||||
|
if kb.storeCrawlingChoice is None:
|
||||||
|
message = "do you want to store crawling results to a temporary file "
|
||||||
|
message += "for eventual further processing with other tools [y/N] "
|
||||||
|
test = readInput(message, default="N")
|
||||||
|
kb.storeCrawlingChoice = test[0] in ("y", "Y")
|
||||||
|
|
||||||
|
if kb.storeCrawlingChoice:
|
||||||
|
handle, filename = tempfile.mkstemp(prefix="sqlmapcrawling-", suffix=".csv" if conf.forms else ".txt")
|
||||||
|
os.close(handle)
|
||||||
|
|
||||||
|
infoMsg = "writing crawling results to a temporary file '%s' " % filename
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
with codecs.open(filename, "w+b", UNICODE_ENCODING) as f:
|
||||||
|
if conf.forms:
|
||||||
|
f.write("URL,POST\n")
|
||||||
|
|
||||||
|
for url, _, data, _, _ in results:
|
||||||
|
if conf.forms:
|
||||||
|
f.write("%s,%s\n" % (safeCSValue(url), safeCSValue(data or "")))
|
||||||
|
else:
|
||||||
|
f.write("%s\n" % url)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user