changing to: --crawl=CRAWLDEPTH

This commit is contained in:
Miroslav Stampar 2011-06-24 05:40:03 +00:00
parent 3717b8423f
commit eaa2a4202f
6 changed files with 12 additions and 22 deletions

View File

@ -210,7 +210,7 @@ def start():
action() action()
return True return True
if conf.url and not any([conf.forms, conf.crawl]): if conf.url and not any([conf.forms, conf.crawlDepth]):
kb.targetUrls.add(( conf.url, conf.method, conf.data, conf.cookie )) kb.targetUrls.add(( conf.url, conf.method, conf.data, conf.cookie ))
if conf.configFile and not kb.targetUrls: if conf.configFile and not kb.targetUrls:

View File

@ -413,21 +413,11 @@ def __setRequestFromFile():
__feedTargetsDict(conf.requestFile, addedTargetUrls) __feedTargetsDict(conf.requestFile, addedTargetUrls)
def __setCrawler(): def __setCrawler():
if not conf.crawl: if not conf.crawlDepth:
return return
crawler = Crawler() crawler = Crawler()
depth = 1 crawler.getTargetUrls()
infoMsg = "setting crawling options"
logger.info(infoMsg)
message = "please enter maximum depth [Enter for %d (default)] " % depth
choice = readInput(message, default=str(depth))
if choice and choice.isdigit():
depth = int(choice)
crawler.getTargetUrls(depth)
def __setGoogleDorking(): def __setGoogleDorking():
""" """
@ -1319,7 +1309,7 @@ def __cleanupOptions():
if conf.tmpPath: if conf.tmpPath:
conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath)) conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath))
if conf.googleDork or conf.logFile or conf.bulkFile or conf.forms or conf.crawl: if conf.googleDork or conf.logFile or conf.bulkFile or conf.forms or conf.crawlDepth:
conf.multipleTargets = True conf.multipleTargets = True
if conf.optimize: if conf.optimize:
@ -1786,7 +1776,7 @@ def __basicOptionValidation():
errMsg = "switch --forms is compatible only with -u (--url) target switch" errMsg = "switch --forms is compatible only with -u (--url) target switch"
raise sqlmapSyntaxException, errMsg raise sqlmapSyntaxException, errMsg
if conf.forms and conf.crawl: if conf.forms and conf.crawlDepth:
errMsg = "switch --forms is currently not compatible with --crawl switch" errMsg = "switch --forms is currently not compatible with --crawl switch"
raise sqlmapSyntaxException, errMsg raise sqlmapSyntaxException, errMsg

View File

@ -167,7 +167,7 @@ optDict = {
"beep": "boolean", "beep": "boolean",
"checkPayload": "boolean", "checkPayload": "boolean",
"cleanup": "boolean", "cleanup": "boolean",
"crawl": "boolean", "crawlDepth": "integer",
"forms": "boolean", "forms": "boolean",
"googlePage": "integer", "googlePage": "integer",
"mobile": "boolean", "mobile": "boolean",

View File

@ -511,8 +511,7 @@ def cmdLineParser():
help="Clean up the DBMS by sqlmap specific " help="Clean up the DBMS by sqlmap specific "
"UDF and tables") "UDF and tables")
miscellaneous.add_option("--crawl", dest="crawl", miscellaneous.add_option("--crawl", dest="crawlDepth", type="int",
action="store_true",
help="Crawl the website starting from the target url") help="Crawl the website starting from the target url")
miscellaneous.add_option("--forms", dest="forms", miscellaneous.add_option("--forms", dest="forms",

View File

@ -31,7 +31,7 @@ class Crawler:
line option '--crawl' line option '--crawl'
""" """
def getTargetUrls(self, depth=1): def getTargetUrls(self):
try: try:
threadData = getCurrentThreadData() threadData = getCurrentThreadData()
threadData.shared.outputs = oset() threadData.shared.outputs = oset()
@ -98,7 +98,7 @@ class Crawler:
logger.info("starting crawler") logger.info("starting crawler")
for i in xrange(depth): for i in xrange(conf.crawlDepth):
threadData.shared.count = 0 threadData.shared.count = 0
threadData.shared.length = len(threadData.shared.unprocessed) threadData.shared.length = len(threadData.shared.unprocessed)
numThreads = min(conf.threads, len(threadData.shared.unprocessed)) numThreads = min(conf.threads, len(threadData.shared.unprocessed))

View File

@ -544,8 +544,9 @@ checkPayload = False
cleanup = False cleanup = False
# Crawl the website starting from the target url # Crawl the website starting from the target url
# Valid: True or False # Valid: integer
crawl = False # Default: 0
crawlDepth = 0
# Parse and test forms on target url # Parse and test forms on target url
# Valid: True or False # Valid: True or False