diff --git a/lib/core/option.py b/lib/core/option.py index 0eafd1b20..49d7f2f1e 100644 --- a/lib/core/option.py +++ b/lib/core/option.py @@ -2234,6 +2234,13 @@ def _basicOptionValidation(): errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, ex) raise SqlmapSyntaxException(errMsg) + if conf.crawlExclude: + try: + re.compile(conf.crawlExclude) + except re.error, ex: + errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, ex) + raise SqlmapSyntaxException(errMsg) + if conf.dumpTable and conf.dumpAll: errMsg = "switch '--dump' is incompatible with switch '--dump-all'" raise SqlmapSyntaxException(errMsg) @@ -2250,6 +2257,10 @@ def _basicOptionValidation(): errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'" raise SqlmapSyntaxException(errMsg) + if conf.crawlExclude and not conf.crawlDepth: + errMsg = "option '--crawl-exclude' requires usage of switch '--crawl'" + raise SqlmapSyntaxException(errMsg) + if conf.csrfUrl and not conf.csrfToken: errMsg = "option '--csrf-url' requires usage of option '--csrf-token'" raise SqlmapSyntaxException(errMsg) diff --git a/lib/core/optiondict.py b/lib/core/optiondict.py index 5844e87e7..355238464 100644 --- a/lib/core/optiondict.py +++ b/lib/core/optiondict.py @@ -188,6 +188,7 @@ optDict = { "batch": "boolean", "charset": "string", "crawlDepth": "integer", + "crawlExclude": "string", "csvDel": "string", "dumpFormat": "string", "eta": "boolean", diff --git a/lib/parse/cmdline.py b/lib/parse/cmdline.py index 1cba1d498..3c1620912 100644 --- a/lib/parse/cmdline.py +++ b/lib/parse/cmdline.py @@ -606,7 +606,10 @@ def cmdLineParser(): help="Force character encoding used for data retrieval") general.add_option("--crawl", dest="crawlDepth", type="int", - help="Crawl the website starting from the target URL") + help="Crawl the website starting from the target URL") + + general.add_option("--crawl-exclude", dest="crawlExclude", + help="Regexp to exclude pages from crawling (e.g. \"logout\")") general.add_option("--csv-del", dest="csvDel", help="Delimiting character used in CSV output " diff --git a/lib/utils/crawler.py b/lib/utils/crawler.py index 733c305e9..e7dfc75e6 100644 --- a/lib/utils/crawler.py +++ b/lib/utils/crawler.py @@ -48,6 +48,10 @@ def crawl(target): current = threadData.shared.unprocessed.pop() if current in visited: continue + elif conf.crawlExclude and re.search(conf.crawlExclude, current): + dbgMsg = "skipping '%s'" % current + logger.debug(dbgMsg) + continue else: visited.add(current) else: diff --git a/sqlmap.conf b/sqlmap.conf index c4d2eb23c..c3bae6b57 100644 --- a/sqlmap.conf +++ b/sqlmap.conf @@ -647,6 +647,9 @@ charset = # Default: 0 crawlDepth = 0 +# Regexp to exclude pages from crawling (e.g. "logout"). +crawlExclude = + # Delimiting character used in CSV output. # Default: , csvDel = ,