From 34aed7cde0ee3d49aa07cae9d6c269b9f3540b1d Mon Sep 17 00:00:00 2001 From: Miroslav Stampar Date: Wed, 22 Oct 2014 13:49:29 +0200 Subject: [PATCH] Bug fix (now it's possible to use multiple parsed requests without mixing associated headers) --- lib/controller/controller.py | 7 +++++-- lib/core/common.py | 2 +- lib/core/option.py | 7 ++++--- lib/utils/crawler.py | 2 +- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/lib/controller/controller.py b/lib/controller/controller.py index e57330d96..68d5eb8e8 100644 --- a/lib/controller/controller.py +++ b/lib/controller/controller.py @@ -251,7 +251,7 @@ def start(): return True if conf.url and not any((conf.forms, conf.crawlDepth)): - kb.targets.add((conf.url, conf.method, conf.data, conf.cookie)) + kb.targets.add((conf.url, conf.method, conf.data, conf.cookie, None)) if conf.configFile and not kb.targets: errMsg = "you did not edit the configuration file properly, set " @@ -264,13 +264,16 @@ def start(): logger.info(infoMsg) hostCount = 0 + initialHeaders = list(conf.httpHeaders) - for targetUrl, targetMethod, targetData, targetCookie in kb.targets: + for targetUrl, targetMethod, targetData, targetCookie, targetHeaders in kb.targets: try: conf.url = targetUrl conf.method = targetMethod conf.data = targetData conf.cookie = targetCookie + conf.httpHeaders = list(initialHeaders) + conf.httpHeaders.extend(targetHeaders or []) initTargetEnv() parseTargetUrl() diff --git a/lib/core/common.py b/lib/core/common.py index 9b8f21045..2131f695e 100755 --- a/lib/core/common.py +++ b/lib/core/common.py @@ -3391,7 +3391,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False): logger.debug(debugMsg) continue - target = (url, method, data, conf.cookie) + target = (url, method, data, conf.cookie, None) retVal.add(target) else: errMsg = "there were no forms found at the given target URL" diff --git a/lib/core/option.py b/lib/core/option.py index 921fd035e..b5da3df44 100644 --- a/lib/core/option.py +++ b/lib/core/option.py @@ -271,6 +271,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls): params = False newline = None lines = request.split('\n') + headers = [] for index in xrange(len(lines)): line = lines[index] @@ -320,14 +321,14 @@ def _feedTargetsDict(reqFile, addedTargetUrls): port = filterStringValue(splitValue[1], "[0-9]") # Avoid to add a static content length header to - # conf.httpHeaders and consider the following lines as + # headers and consider the following lines as # POSTed data if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper(): params = True # Avoid proxy and connection type related headers elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION): - conf.httpHeaders.append((getUnicode(key), getUnicode(value))) + headers.append((getUnicode(key), getUnicode(value))) if CUSTOM_INJECTION_MARK_CHAR in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""): params = True @@ -355,7 +356,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls): if not(conf.scope and not re.search(conf.scope, url, re.I)): if not kb.targets or url not in addedTargetUrls: - kb.targets.add((url, method, data, cookie)) + kb.targets.add((url, method, data, cookie, tuple(headers))) addedTargetUrls.add(url) fp = openFile(reqFile, "rb") diff --git a/lib/utils/crawler.py b/lib/utils/crawler.py index caea59a85..cb502eca1 100644 --- a/lib/utils/crawler.py +++ b/lib/utils/crawler.py @@ -143,4 +143,4 @@ def crawl(target): logger.warn(warnMsg) else: for url in threadData.shared.value: - kb.targets.add((url, None, None, None)) + kb.targets.add((url, None, None, None, None))