In case of bulk file, crawl-scan-crawl-scan...

This commit is contained in:
Miroslav Stampar 2019-11-04 23:53:35 +01:00
parent 6679d6f427
commit fdcde2a2a5
4 changed files with 39 additions and 31 deletions

View File

@ -133,7 +133,6 @@ from lib.core.update import update
from lib.parse.configfile import configFileParser
from lib.parse.payloads import loadBoundaries
from lib.parse.payloads import loadPayloads
from lib.parse.sitemap import parseSitemap
from lib.request.basic import checkCharEncoding
from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler
from lib.request.chunkedhandler import ChunkedHandler
@ -338,25 +337,6 @@ def _setCrawler():
if not conf.bulkFile:
crawl(conf.url)
else:
targets = getFileItems(conf.bulkFile)
for i in xrange(len(targets)):
try:
target = targets[i]
if not re.search(r"(?i)\Ahttp[s]*://", target):
target = "http://%s" % target
crawl(target)
if conf.verbose in (1, 2):
status = "%d/%d links visited (%d%%)" % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
except Exception as ex:
if not isinstance(ex, SqlmapUserQuitException):
errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, getSafeExString(ex))
logger.error(errMsg)
def _doSearch():
"""
@ -1939,7 +1919,6 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.mergeCookies = None
kb.multipleCtrlC = False
kb.negativeLogic = False
kb.normalizeCrawlingChoice = None
kb.nullConnection = None
kb.oldMsf = None
kb.orderByColumns = None
@ -1993,7 +1972,6 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.reduceTests = None
kb.tlsSNI = {}
kb.stickyDBMS = False
kb.storeCrawlingChoice = None
kb.storeHashesChoice = None
kb.suppressResumeInfo = False
kb.tableFrom = None
@ -2013,11 +1991,14 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.xpCmdshellAvailable = False
if flushAll:
kb.checkSitemap = None
kb.headerPaths = {}
kb.keywords = set(getFileItems(paths.SQL_KEYWORDS))
kb.normalizeCrawlingChoice = None
kb.passwordMgr = None
kb.preprocessFunctions = []
kb.skipVulnHost = None
kb.storeCrawlingChoice = None
kb.tamperFunctions = []
kb.targets = OrderedSet()
kb.testedParams = set()

View File

@ -18,7 +18,7 @@ from lib.core.enums import OS
from thirdparty.six import unichr as _unichr
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.3.11.4"
VERSION = "1.3.11.5"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

View File

@ -133,10 +133,12 @@ def crawl(target):
threadData.shared.deeper = set()
threadData.shared.unprocessed = set([target])
if kb.checkSitemap is None:
message = "do you want to check for the existence of "
message += "site's sitemap(.xml) [y/N] "
kb.checkSitemap = readInput(message, default='N', boolean=True)
if readInput(message, default='N', boolean=True):
if kb.checkSitemap:
found = True
items = None
url = _urllib.parse.urljoin(target, "/sitemap.xml")
@ -158,9 +160,8 @@ def crawl(target):
threadData.shared.unprocessed.update(items)
logger.info("%s links found" % ("no" if not items else len(items)))
infoMsg = "starting crawler"
if conf.bulkFile:
infoMsg += " for target URL '%s'" % target
if not conf.bulkFile:
infoMsg = "starting crawler for target URL '%s'" % target
logger.info(infoMsg)
for i in xrange(conf.crawlDepth):

View File

@ -45,6 +45,7 @@ try:
from lib.core.common import dataToStdout
from lib.core.common import filterNone
from lib.core.common import getDaysFromLastUpdate
from lib.core.common import getFileItems
from lib.core.common import getSafeExString
from lib.core.common import maskSensitiveData
from lib.core.common import openFile
@ -57,6 +58,7 @@ try:
from lib.core.common import MKSTEMP_PREFIX
from lib.core.common import setColor
from lib.core.common import unhandledExceptionMessage
from lib.core.compat import xrange
from lib.core.exception import SqlmapBaseException
from lib.core.exception import SqlmapShellQuitException
from lib.core.exception import SqlmapSilentQuitException
@ -73,6 +75,7 @@ try:
from lib.core.settings import UNICODE_ENCODING
from lib.core.settings import VERSION
from lib.parse.cmdline import cmdLineParser
from lib.utils.crawler import crawl
from thirdparty import six
except KeyboardInterrupt:
errMsg = "user aborted"
@ -177,6 +180,29 @@ def main():
profile()
else:
try:
if conf.crawlDepth and conf.bulkFile:
targets = getFileItems(conf.bulkFile)
for i in xrange(len(targets)):
try:
kb.targets.clear()
target = targets[i]
if not re.search(r"(?i)\Ahttp[s]*://", target):
target = "http://%s" % target
infoMsg = "starting crawler for target URL '%s' (%d/%d)" % (target, i + 1, len(targets))
logger.info(infoMsg)
crawl(target)
except Exception as ex:
if not isinstance(ex, SqlmapUserQuitException):
errMsg = "problem occurred while crawling '%s' ('%s')" % (target, getSafeExString(ex))
logger.error(errMsg)
else:
if kb.targets:
start()
else:
start()
except Exception as ex:
os._exitcode = 1