mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-03-03 19:55:47 +03:00
changing to: --crawl=CRAWLDEPTH
This commit is contained in:
parent
3717b8423f
commit
eaa2a4202f
|
@ -210,7 +210,7 @@ def start():
|
|||
action()
|
||||
return True
|
||||
|
||||
if conf.url and not any([conf.forms, conf.crawl]):
|
||||
if conf.url and not any([conf.forms, conf.crawlDepth]):
|
||||
kb.targetUrls.add(( conf.url, conf.method, conf.data, conf.cookie ))
|
||||
|
||||
if conf.configFile and not kb.targetUrls:
|
||||
|
|
|
@ -413,21 +413,11 @@ def __setRequestFromFile():
|
|||
__feedTargetsDict(conf.requestFile, addedTargetUrls)
|
||||
|
||||
def __setCrawler():
|
||||
if not conf.crawl:
|
||||
if not conf.crawlDepth:
|
||||
return
|
||||
|
||||
crawler = Crawler()
|
||||
depth = 1
|
||||
|
||||
infoMsg = "setting crawling options"
|
||||
logger.info(infoMsg)
|
||||
|
||||
message = "please enter maximum depth [Enter for %d (default)] " % depth
|
||||
choice = readInput(message, default=str(depth))
|
||||
if choice and choice.isdigit():
|
||||
depth = int(choice)
|
||||
|
||||
crawler.getTargetUrls(depth)
|
||||
crawler.getTargetUrls()
|
||||
|
||||
def __setGoogleDorking():
|
||||
"""
|
||||
|
@ -1319,7 +1309,7 @@ def __cleanupOptions():
|
|||
if conf.tmpPath:
|
||||
conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath))
|
||||
|
||||
if conf.googleDork or conf.logFile or conf.bulkFile or conf.forms or conf.crawl:
|
||||
if conf.googleDork or conf.logFile or conf.bulkFile or conf.forms or conf.crawlDepth:
|
||||
conf.multipleTargets = True
|
||||
|
||||
if conf.optimize:
|
||||
|
@ -1786,7 +1776,7 @@ def __basicOptionValidation():
|
|||
errMsg = "switch --forms is compatible only with -u (--url) target switch"
|
||||
raise sqlmapSyntaxException, errMsg
|
||||
|
||||
if conf.forms and conf.crawl:
|
||||
if conf.forms and conf.crawlDepth:
|
||||
errMsg = "switch --forms is currently not compatible with --crawl switch"
|
||||
raise sqlmapSyntaxException, errMsg
|
||||
|
||||
|
|
|
@ -167,7 +167,7 @@ optDict = {
|
|||
"beep": "boolean",
|
||||
"checkPayload": "boolean",
|
||||
"cleanup": "boolean",
|
||||
"crawl": "boolean",
|
||||
"crawlDepth": "integer",
|
||||
"forms": "boolean",
|
||||
"googlePage": "integer",
|
||||
"mobile": "boolean",
|
||||
|
|
|
@ -511,8 +511,7 @@ def cmdLineParser():
|
|||
help="Clean up the DBMS by sqlmap specific "
|
||||
"UDF and tables")
|
||||
|
||||
miscellaneous.add_option("--crawl", dest="crawl",
|
||||
action="store_true",
|
||||
miscellaneous.add_option("--crawl", dest="crawlDepth", type="int",
|
||||
help="Crawl the website starting from the target url")
|
||||
|
||||
miscellaneous.add_option("--forms", dest="forms",
|
||||
|
|
|
@ -31,7 +31,7 @@ class Crawler:
|
|||
line option '--crawl'
|
||||
"""
|
||||
|
||||
def getTargetUrls(self, depth=1):
|
||||
def getTargetUrls(self):
|
||||
try:
|
||||
threadData = getCurrentThreadData()
|
||||
threadData.shared.outputs = oset()
|
||||
|
@ -98,7 +98,7 @@ class Crawler:
|
|||
|
||||
logger.info("starting crawler")
|
||||
|
||||
for i in xrange(depth):
|
||||
for i in xrange(conf.crawlDepth):
|
||||
threadData.shared.count = 0
|
||||
threadData.shared.length = len(threadData.shared.unprocessed)
|
||||
numThreads = min(conf.threads, len(threadData.shared.unprocessed))
|
||||
|
|
|
@ -544,8 +544,9 @@ checkPayload = False
|
|||
cleanup = False
|
||||
|
||||
# Crawl the website starting from the target url
|
||||
# Valid: True or False
|
||||
crawl = False
|
||||
# Valid: integer
|
||||
# Default: 0
|
||||
crawlDepth = 0
|
||||
|
||||
# Parse and test forms on target url
|
||||
# Valid: True or False
|
||||
|
|
Loading…
Reference in New Issue
Block a user