In case of bulk file, crawl-scan-crawl-scan...

This commit is contained in:
Miroslav Stampar 2019-11-04 23:53:35 +01:00
parent 6679d6f427
commit fdcde2a2a5
4 changed files with 39 additions and 31 deletions

View File

@ -133,7 +133,6 @@ from lib.core.update import update
from lib.parse.configfile import configFileParser from lib.parse.configfile import configFileParser
from lib.parse.payloads import loadBoundaries from lib.parse.payloads import loadBoundaries
from lib.parse.payloads import loadPayloads from lib.parse.payloads import loadPayloads
from lib.parse.sitemap import parseSitemap
from lib.request.basic import checkCharEncoding from lib.request.basic import checkCharEncoding
from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler
from lib.request.chunkedhandler import ChunkedHandler from lib.request.chunkedhandler import ChunkedHandler
@ -338,25 +337,6 @@ def _setCrawler():
if not conf.bulkFile: if not conf.bulkFile:
crawl(conf.url) crawl(conf.url)
else:
targets = getFileItems(conf.bulkFile)
for i in xrange(len(targets)):
try:
target = targets[i]
if not re.search(r"(?i)\Ahttp[s]*://", target):
target = "http://%s" % target
crawl(target)
if conf.verbose in (1, 2):
status = "%d/%d links visited (%d%%)" % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
except Exception as ex:
if not isinstance(ex, SqlmapUserQuitException):
errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, getSafeExString(ex))
logger.error(errMsg)
def _doSearch(): def _doSearch():
""" """
@ -1939,7 +1919,6 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.mergeCookies = None kb.mergeCookies = None
kb.multipleCtrlC = False kb.multipleCtrlC = False
kb.negativeLogic = False kb.negativeLogic = False
kb.normalizeCrawlingChoice = None
kb.nullConnection = None kb.nullConnection = None
kb.oldMsf = None kb.oldMsf = None
kb.orderByColumns = None kb.orderByColumns = None
@ -1993,7 +1972,6 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.reduceTests = None kb.reduceTests = None
kb.tlsSNI = {} kb.tlsSNI = {}
kb.stickyDBMS = False kb.stickyDBMS = False
kb.storeCrawlingChoice = None
kb.storeHashesChoice = None kb.storeHashesChoice = None
kb.suppressResumeInfo = False kb.suppressResumeInfo = False
kb.tableFrom = None kb.tableFrom = None
@ -2013,11 +1991,14 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.xpCmdshellAvailable = False kb.xpCmdshellAvailable = False
if flushAll: if flushAll:
kb.checkSitemap = None
kb.headerPaths = {} kb.headerPaths = {}
kb.keywords = set(getFileItems(paths.SQL_KEYWORDS)) kb.keywords = set(getFileItems(paths.SQL_KEYWORDS))
kb.normalizeCrawlingChoice = None
kb.passwordMgr = None kb.passwordMgr = None
kb.preprocessFunctions = [] kb.preprocessFunctions = []
kb.skipVulnHost = None kb.skipVulnHost = None
kb.storeCrawlingChoice = None
kb.tamperFunctions = [] kb.tamperFunctions = []
kb.targets = OrderedSet() kb.targets = OrderedSet()
kb.testedParams = set() kb.testedParams = set()

View File

@ -18,7 +18,7 @@ from lib.core.enums import OS
from thirdparty.six import unichr as _unichr from thirdparty.six import unichr as _unichr
# sqlmap version (<major>.<minor>.<month>.<monthly commit>) # sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.3.11.4" VERSION = "1.3.11.5"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

View File

@ -133,10 +133,12 @@ def crawl(target):
threadData.shared.deeper = set() threadData.shared.deeper = set()
threadData.shared.unprocessed = set([target]) threadData.shared.unprocessed = set([target])
if kb.checkSitemap is None:
message = "do you want to check for the existence of " message = "do you want to check for the existence of "
message += "site's sitemap(.xml) [y/N] " message += "site's sitemap(.xml) [y/N] "
kb.checkSitemap = readInput(message, default='N', boolean=True)
if readInput(message, default='N', boolean=True): if kb.checkSitemap:
found = True found = True
items = None items = None
url = _urllib.parse.urljoin(target, "/sitemap.xml") url = _urllib.parse.urljoin(target, "/sitemap.xml")
@ -158,9 +160,8 @@ def crawl(target):
threadData.shared.unprocessed.update(items) threadData.shared.unprocessed.update(items)
logger.info("%s links found" % ("no" if not items else len(items))) logger.info("%s links found" % ("no" if not items else len(items)))
infoMsg = "starting crawler" if not conf.bulkFile:
if conf.bulkFile: infoMsg = "starting crawler for target URL '%s'" % target
infoMsg += " for target URL '%s'" % target
logger.info(infoMsg) logger.info(infoMsg)
for i in xrange(conf.crawlDepth): for i in xrange(conf.crawlDepth):

View File

@ -45,6 +45,7 @@ try:
from lib.core.common import dataToStdout from lib.core.common import dataToStdout
from lib.core.common import filterNone from lib.core.common import filterNone
from lib.core.common import getDaysFromLastUpdate from lib.core.common import getDaysFromLastUpdate
from lib.core.common import getFileItems
from lib.core.common import getSafeExString from lib.core.common import getSafeExString
from lib.core.common import maskSensitiveData from lib.core.common import maskSensitiveData
from lib.core.common import openFile from lib.core.common import openFile
@ -57,6 +58,7 @@ try:
from lib.core.common import MKSTEMP_PREFIX from lib.core.common import MKSTEMP_PREFIX
from lib.core.common import setColor from lib.core.common import setColor
from lib.core.common import unhandledExceptionMessage from lib.core.common import unhandledExceptionMessage
from lib.core.compat import xrange
from lib.core.exception import SqlmapBaseException from lib.core.exception import SqlmapBaseException
from lib.core.exception import SqlmapShellQuitException from lib.core.exception import SqlmapShellQuitException
from lib.core.exception import SqlmapSilentQuitException from lib.core.exception import SqlmapSilentQuitException
@ -73,6 +75,7 @@ try:
from lib.core.settings import UNICODE_ENCODING from lib.core.settings import UNICODE_ENCODING
from lib.core.settings import VERSION from lib.core.settings import VERSION
from lib.parse.cmdline import cmdLineParser from lib.parse.cmdline import cmdLineParser
from lib.utils.crawler import crawl
from thirdparty import six from thirdparty import six
except KeyboardInterrupt: except KeyboardInterrupt:
errMsg = "user aborted" errMsg = "user aborted"
@ -177,6 +180,29 @@ def main():
profile() profile()
else: else:
try: try:
if conf.crawlDepth and conf.bulkFile:
targets = getFileItems(conf.bulkFile)
for i in xrange(len(targets)):
try:
kb.targets.clear()
target = targets[i]
if not re.search(r"(?i)\Ahttp[s]*://", target):
target = "http://%s" % target
infoMsg = "starting crawler for target URL '%s' (%d/%d)" % (target, i + 1, len(targets))
logger.info(infoMsg)
crawl(target)
except Exception as ex:
if not isinstance(ex, SqlmapUserQuitException):
errMsg = "problem occurred while crawling '%s' ('%s')" % (target, getSafeExString(ex))
logger.error(errMsg)
else:
if kb.targets:
start()
else:
start() start()
except Exception as ex: except Exception as ex:
os._exitcode = 1 os._exitcode = 1