Implementation of crawling results normalization

This commit is contained in:
Miroslav Stampar 2019-10-31 22:07:16 +01:00
parent 273004396c
commit a660828cec
3 changed files with 22 additions and 1 deletions

View File

@ -1953,6 +1953,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.mergeCookies = None
kb.multipleCtrlC = False
kb.negativeLogic = False
kb.normalizeCrawlingChoice = None
kb.nullConnection = None
kb.oldMsf = None
kb.orderByColumns = None

View File

@ -18,7 +18,7 @@ from lib.core.enums import OS
from thirdparty.six import unichr as _unichr
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.3.10.38"
VERSION = "1.3.10.39"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

View File

@ -195,6 +195,26 @@ def crawl(target):
for url in threadData.shared.value:
kb.targets.add((urldecode(url, kb.pageEncoding), None, None, None, None))
if kb.normalizeCrawlingChoice is None:
message = "do you want to normalize "
message += "crawling results [Y/n] "
kb.normalizeCrawlingChoice = readInput(message, default='Y', boolean=True)
if kb.normalizeCrawlingChoice:
seen = set()
results = OrderedSet()
for target in kb.targets:
match = re.search(r"/[^/?]*\?.*\Z", target[0])
if match:
key = re.sub(r"=[^=&]*", "=", match.group(0))
if key not in seen:
results.add(target)
seen.add(key)
kb.targets = results
storeResultsToFile(kb.targets)
def storeResultsToFile(results):