Bug fix (now it's possible to use multiple parsed requests without mixing associated headers)

This commit is contained in:
Miroslav Stampar 2014-10-22 13:49:29 +02:00
parent 2f18df345e
commit 34aed7cde0
4 changed files with 11 additions and 7 deletions

View File

@ -251,7 +251,7 @@ def start():
return True return True
if conf.url and not any((conf.forms, conf.crawlDepth)): if conf.url and not any((conf.forms, conf.crawlDepth)):
kb.targets.add((conf.url, conf.method, conf.data, conf.cookie)) kb.targets.add((conf.url, conf.method, conf.data, conf.cookie, None))
if conf.configFile and not kb.targets: if conf.configFile and not kb.targets:
errMsg = "you did not edit the configuration file properly, set " errMsg = "you did not edit the configuration file properly, set "
@ -264,13 +264,16 @@ def start():
logger.info(infoMsg) logger.info(infoMsg)
hostCount = 0 hostCount = 0
initialHeaders = list(conf.httpHeaders)
for targetUrl, targetMethod, targetData, targetCookie in kb.targets: for targetUrl, targetMethod, targetData, targetCookie, targetHeaders in kb.targets:
try: try:
conf.url = targetUrl conf.url = targetUrl
conf.method = targetMethod conf.method = targetMethod
conf.data = targetData conf.data = targetData
conf.cookie = targetCookie conf.cookie = targetCookie
conf.httpHeaders = list(initialHeaders)
conf.httpHeaders.extend(targetHeaders or [])
initTargetEnv() initTargetEnv()
parseTargetUrl() parseTargetUrl()

View File

@ -3391,7 +3391,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
logger.debug(debugMsg) logger.debug(debugMsg)
continue continue
target = (url, method, data, conf.cookie) target = (url, method, data, conf.cookie, None)
retVal.add(target) retVal.add(target)
else: else:
errMsg = "there were no forms found at the given target URL" errMsg = "there were no forms found at the given target URL"

View File

@ -271,6 +271,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
params = False params = False
newline = None newline = None
lines = request.split('\n') lines = request.split('\n')
headers = []
for index in xrange(len(lines)): for index in xrange(len(lines)):
line = lines[index] line = lines[index]
@ -320,14 +321,14 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
port = filterStringValue(splitValue[1], "[0-9]") port = filterStringValue(splitValue[1], "[0-9]")
# Avoid to add a static content length header to # Avoid to add a static content length header to
# conf.httpHeaders and consider the following lines as # headers and consider the following lines as
# POSTed data # POSTed data
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper(): if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
params = True params = True
# Avoid proxy and connection type related headers # Avoid proxy and connection type related headers
elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION): elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION):
conf.httpHeaders.append((getUnicode(key), getUnicode(value))) headers.append((getUnicode(key), getUnicode(value)))
if CUSTOM_INJECTION_MARK_CHAR in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""): if CUSTOM_INJECTION_MARK_CHAR in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""):
params = True params = True
@ -355,7 +356,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
if not(conf.scope and not re.search(conf.scope, url, re.I)): if not(conf.scope and not re.search(conf.scope, url, re.I)):
if not kb.targets or url not in addedTargetUrls: if not kb.targets or url not in addedTargetUrls:
kb.targets.add((url, method, data, cookie)) kb.targets.add((url, method, data, cookie, tuple(headers)))
addedTargetUrls.add(url) addedTargetUrls.add(url)
fp = openFile(reqFile, "rb") fp = openFile(reqFile, "rb")

View File

@ -143,4 +143,4 @@ def crawl(target):
logger.warn(warnMsg) logger.warn(warnMsg)
else: else:
for url in threadData.shared.value: for url in threadData.shared.value:
kb.targets.add((url, None, None, None)) kb.targets.add((url, None, None, None, None))