diff --git a/lib/core/option.py b/lib/core/option.py index d7d718442..ce54b2cac 100644 --- a/lib/core/option.py +++ b/lib/core/option.py @@ -1953,6 +1953,7 @@ def _setKnowledgeBaseAttributes(flushAll=True): kb.mergeCookies = None kb.multipleCtrlC = False kb.negativeLogic = False + kb.normalizeCrawlingChoice = None kb.nullConnection = None kb.oldMsf = None kb.orderByColumns = None diff --git a/lib/core/settings.py b/lib/core/settings.py index 2140d29c3..858eaa6c8 100644 --- a/lib/core/settings.py +++ b/lib/core/settings.py @@ -18,7 +18,7 @@ from lib.core.enums import OS from thirdparty.six import unichr as _unichr # sqlmap version (...) -VERSION = "1.3.10.38" +VERSION = "1.3.10.39" TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE) diff --git a/lib/utils/crawler.py b/lib/utils/crawler.py index 98f1cf544..6a6bf4d2e 100644 --- a/lib/utils/crawler.py +++ b/lib/utils/crawler.py @@ -195,6 +195,26 @@ def crawl(target): for url in threadData.shared.value: kb.targets.add((urldecode(url, kb.pageEncoding), None, None, None, None)) + if kb.normalizeCrawlingChoice is None: + message = "do you want to normalize " + message += "crawling results [Y/n] " + + kb.normalizeCrawlingChoice = readInput(message, default='Y', boolean=True) + + if kb.normalizeCrawlingChoice: + seen = set() + results = OrderedSet() + + for target in kb.targets: + match = re.search(r"/[^/?]*\?.*\Z", target[0]) + if match: + key = re.sub(r"=[^=&]*", "=", match.group(0)) + if key not in seen: + results.add(target) + seen.add(key) + + kb.targets = results + storeResultsToFile(kb.targets) def storeResultsToFile(results):