adding compatibility support for using --crawl and --forms together

This commit is contained in:
Miroslav Stampar 2011-10-29 09:32:20 +00:00
parent ddc4dfe5ff
commit ef987c6954
4 changed files with 16 additions and 9 deletions

View File

@ -3363,6 +3363,9 @@ class HTMLForm:
if self.enctype == "application/x-www-form-urlencoded": if self.enctype == "application/x-www-form-urlencoded":
return (uri, urlencode(self._pairs()), return (uri, urlencode(self._pairs()),
[("Content-Type", self.enctype)]) [("Content-Type", self.enctype)])
elif self.enctype == "text/plain":
return (uri, self._pairs(),
[("Content-Type", self.enctype)])
elif self.enctype == "multipart/form-data": elif self.enctype == "multipart/form-data":
data = StringIO() data = StringIO()
http_hdrs = [] http_hdrs = []

View File

@ -3073,7 +3073,7 @@ def asciifyUrl(url, forceQuote=False):
def findPageForms(content, url, raise_=False, addToTargets=False): def findPageForms(content, url, raise_=False, addToTargets=False):
class _(StringIO): class _(StringIO):
def __init__(self): def __init__(self, content, url):
StringIO.__init__(self, unicodeencode(content, kb.pageEncoding) if isinstance(content, unicode) else content) StringIO.__init__(self, unicodeencode(content, kb.pageEncoding) if isinstance(content, unicode) else content)
self._url = url self._url = url
def geturl(self): def geturl(self):
@ -3083,17 +3083,21 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
errMsg = "can't parse forms as the page content appears to be blank" errMsg = "can't parse forms as the page content appears to be blank"
raise sqlmapGenericException, errMsg raise sqlmapGenericException, errMsg
forms = None
retVal = set() retVal = set()
response = _() response = _(content, url)
try: try:
forms = ParseResponse(response, backwards_compat=False) forms = ParseResponse(response, backwards_compat=False)
except ParseError: except ParseError:
errMsg = "badly formed HTML at the target url. will try to filter it" errMsg = "badly formed HTML at the target url. will try to filter it"
logger.error(errMsg) logger.error(errMsg)
response.seek(0) response.seek(0)
filtered = _("".join(re.findall(r'<form.+?</form>', response.read(), re.I | re.S)), response.geturl()) filtered = re.findall(r'<form.+?</form>', response.read(), re.I | re.S)
for i in xrange(len(filtered)):
filtered[i] = filtered[i][filtered[i].lower().rfind("<form"):]
response = _("".join(filtered), response.geturl())
try: try:
forms = ParseResponse(filtered, backwards_compat=False) forms = ParseResponse(response, backwards_compat=False)
except ParseError: except ParseError:
errMsg = "no success" errMsg = "no success"
if raise_: if raise_:

View File

@ -522,7 +522,7 @@ def __setBulkMultipleTargets():
f.close() f.close()
def __findPageForms(): def __findPageForms():
if not conf.forms: if not conf.forms or conf.crawlDepth:
return return
if not checkConnection(): if not checkConnection():
@ -1796,10 +1796,6 @@ def __basicOptionValidation():
errMsg = "switch --forms is compatible only with -u (--url) target switch" errMsg = "switch --forms is compatible only with -u (--url) target switch"
raise sqlmapSyntaxException, errMsg raise sqlmapSyntaxException, errMsg
if conf.forms and conf.crawlDepth:
errMsg = "switch --forms is currently not compatible with --crawl switch"
raise sqlmapSyntaxException, errMsg
if conf.timeSec < 1: if conf.timeSec < 1:
errMsg = "value for --time-sec option must be an integer greater than 0" errMsg = "value for --time-sec option must be an integer greater than 0"
raise sqlmapSyntaxException, errMsg raise sqlmapSyntaxException, errMsg

View File

@ -15,6 +15,7 @@ import time
from lib.core.common import clearConsoleLine from lib.core.common import clearConsoleLine
from lib.core.common import dataToStdout from lib.core.common import dataToStdout
from lib.core.common import findPageForms
from lib.core.common import singleTimeWarnMessage from lib.core.common import singleTimeWarnMessage
from lib.core.data import conf from lib.core.data import conf
from lib.core.data import kb from lib.core.data import kb
@ -92,6 +93,9 @@ class Crawler:
threadData.shared.outputs.add(url) threadData.shared.outputs.add(url)
kb.locks.outputs.release() kb.locks.outputs.release()
if conf.forms:
findPageForms(content, current, False, True)
if conf.verbose in (1, 2): if conf.verbose in (1, 2):
kb.locks.ioLock.acquire() kb.locks.ioLock.acquire()
threadData.shared.count += 1 threadData.shared.count += 1