This commit is contained in:
Miroslav Stampar 2015-04-06 22:07:22 +02:00
parent 26bec7219d
commit 1e7f2d6da2
5 changed files with 23 additions and 1 deletions

View File

@ -2234,6 +2234,13 @@ def _basicOptionValidation():
errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, ex) errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, ex)
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
if conf.crawlExclude:
try:
re.compile(conf.crawlExclude)
except re.error, ex:
errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, ex)
raise SqlmapSyntaxException(errMsg)
if conf.dumpTable and conf.dumpAll: if conf.dumpTable and conf.dumpAll:
errMsg = "switch '--dump' is incompatible with switch '--dump-all'" errMsg = "switch '--dump' is incompatible with switch '--dump-all'"
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
@ -2250,6 +2257,10 @@ def _basicOptionValidation():
errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'" errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'"
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
if conf.crawlExclude and not conf.crawlDepth:
errMsg = "option '--crawl-exclude' requires usage of switch '--crawl'"
raise SqlmapSyntaxException(errMsg)
if conf.csrfUrl and not conf.csrfToken: if conf.csrfUrl and not conf.csrfToken:
errMsg = "option '--csrf-url' requires usage of option '--csrf-token'" errMsg = "option '--csrf-url' requires usage of option '--csrf-token'"
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)

View File

@ -188,6 +188,7 @@ optDict = {
"batch": "boolean", "batch": "boolean",
"charset": "string", "charset": "string",
"crawlDepth": "integer", "crawlDepth": "integer",
"crawlExclude": "string",
"csvDel": "string", "csvDel": "string",
"dumpFormat": "string", "dumpFormat": "string",
"eta": "boolean", "eta": "boolean",

View File

@ -606,7 +606,10 @@ def cmdLineParser():
help="Force character encoding used for data retrieval") help="Force character encoding used for data retrieval")
general.add_option("--crawl", dest="crawlDepth", type="int", general.add_option("--crawl", dest="crawlDepth", type="int",
help="Crawl the website starting from the target URL") help="Crawl the website starting from the target URL")
general.add_option("--crawl-exclude", dest="crawlExclude",
help="Regexp to exclude pages from crawling (e.g. \"logout\")")
general.add_option("--csv-del", dest="csvDel", general.add_option("--csv-del", dest="csvDel",
help="Delimiting character used in CSV output " help="Delimiting character used in CSV output "

View File

@ -48,6 +48,10 @@ def crawl(target):
current = threadData.shared.unprocessed.pop() current = threadData.shared.unprocessed.pop()
if current in visited: if current in visited:
continue continue
elif conf.crawlExclude and re.search(conf.crawlExclude, current):
dbgMsg = "skipping '%s'" % current
logger.debug(dbgMsg)
continue
else: else:
visited.add(current) visited.add(current)
else: else:

View File

@ -647,6 +647,9 @@ charset =
# Default: 0 # Default: 0
crawlDepth = 0 crawlDepth = 0
# Regexp to exclude pages from crawling (e.g. "logout").
crawlExclude =
# Delimiting character used in CSV output. # Delimiting character used in CSV output.
# Default: , # Default: ,
csvDel = , csvDel = ,