Initial support to automatically work around the dynamic page at each refresh

(Major refactor to the comparison algorithm (True/False response))
This commit is contained in:
Bernardo Damele 2008-12-18 20:48:23 +00:00
parent 3fe493b63d
commit d0d6632c22
6 changed files with 85 additions and 12 deletions

View File

@ -294,16 +294,63 @@ def checkStability():
infoMsg = "testing if the url is stable, wait a few seconds" infoMsg = "testing if the url is stable, wait a few seconds"
logger.info(infoMsg) logger.info(infoMsg)
firstResult = Request.queryPage() firstPage, firstHeaders = Request.queryPage(content=True)
time.sleep(0.5) time.sleep(0.5)
secondResult = Request.queryPage() secondPage, secondHeaders = Request.queryPage(content=True)
time.sleep(0.5) time.sleep(0.5)
thirdResult = Request.queryPage() thirdPage, thirdHeaders = Request.queryPage(content=True)
condition = firstResult == secondResult condition = firstPage == secondPage
condition &= secondResult == thirdResult condition &= secondPage == thirdPage
if condition == False:
contentLengths = []
requestsHeaders = ( firstHeaders, secondHeaders, thirdHeaders )
for requestHeaders in requestsHeaders:
requestHeaders = str(requestHeaders).lower()
clHeader = re.search("content-length:\s+([\d]+)", requestHeaders, re.I | re.M)
if clHeader and clHeader.group(1).isdigit():
contentLengths.append(int(clHeader.group(1)))
if contentLengths:
clSum = 0
for cl in contentLengths:
clSum += cl
clAverage = clSum / len(contentLengths)
# TODO: go ahead here with the technique to compare True/False
# based upon clAverage discard (conf.contentLengths)
counter = 0
firstLines = firstPage.split("\n")
secondLines = secondPage.split("\n")
thirdLines = thirdPage.split("\n")
for firstLine in firstLines:
if counter > len(secondLines) or counter > len(thirdLines):
break
if firstLine in secondLines and firstLine in thirdLines:
conf.equalLines.append(firstLine)
counter += 1
if conf.equalLines:
warnMsg = "url is not stable, sqlmap inspected the page "
warnMsg += "content and identified a stable lines subset "
warnMsg += "to be used in the comparison algorithm"
logger.warn(warnMsg)
kb.defaultResult = True
return True
return condition return condition
@ -325,7 +372,7 @@ def checkString():
infoMsg += "target URL page content" infoMsg += "target URL page content"
logger.info(infoMsg) logger.info(infoMsg)
page = Request.queryPage(content=True) page, _ = Request.queryPage(content=True)
if conf.string in page: if conf.string in page:
setString() setString()
@ -356,7 +403,7 @@ def checkRegexp():
infoMsg += "the target URL page content" infoMsg += "the target URL page content"
logger.info(infoMsg) logger.info(infoMsg)
page = Request.queryPage(content=True) page, _ = Request.queryPage(content=True)
if re.search(conf.regexp, page, re.I | re.M): if re.search(conf.regexp, page, re.I | re.M):
setRegexp() setRegexp()

View File

@ -570,8 +570,10 @@ def __setConfAttributes():
logger.debug(debugMsg) logger.debug(debugMsg)
conf.cj = None conf.cj = None
conf.contentLengths = []
conf.dbmsHandler = None conf.dbmsHandler = None
conf.dumpPath = None conf.dumpPath = None
conf.equalLines = []
conf.httpHeaders = [] conf.httpHeaders = []
conf.hostname = None conf.hostname = None
conf.loggedToOut = None conf.loggedToOut = None

View File

@ -68,5 +68,29 @@ def comparison(page, headers=None, content=False):
return False return False
# By default it returns the page content MD5 hash # By default it returns the page content MD5 hash
else: if not conf.equalLines and not conf.contentLengths:
return md5.new(page).hexdigest() return md5.new(page).hexdigest()
# TODO: ahead here
elif conf.equalLines:
counter = 0
trueLines = 0
pageLines = page.split("\n")
for commonLine in conf.equalLines:
if counter >= len(pageLines):
break
if commonLine in pageLines:
trueLines += 1
counter += 1
# TODO: just debug prints
print "trueLines:", trueLines, "len(conf.equalLines):", len(conf.equalLines)
print "result:", ( trueLines * 100 ) / len(conf.equalLines)
if ( trueLines * 100 ) / len(conf.equalLines) >= 98:
return True
else:
return False

View File

@ -269,7 +269,7 @@ class Connect:
page, headers = Connect.getPage(get=get, post=post, cookie=cookie, ua=ua) page, headers = Connect.getPage(get=get, post=post, cookie=cookie, ua=ua)
if content: if content:
return page return page, headers
elif page and headers: elif page and headers:
return comparison(page, headers, content) return comparison(page, headers, content)
else: else:

View File

@ -340,6 +340,6 @@ def goStacked(expression):
query = agent.prefixQuery("; %s" % expression) query = agent.prefixQuery("; %s" % expression)
query = agent.postfixQuery("%s;%s" % (query, comment)) query = agent.postfixQuery("%s;%s" % (query, comment))
payload = agent.payload(newValue=query) payload = agent.payload(newValue=query)
page = Request.queryPage(payload, content=True) page, _ = Request.queryPage(payload, content=True)
return payload, page return payload, page

View File

@ -81,7 +81,7 @@ def __unionPosition(expression, negative=False):
payload = agent.payload(newValue=query, negative=negative) payload = agent.payload(newValue=query, negative=negative)
# Perform the request # Perform the request
resultPage = Request.queryPage(payload, content=True) resultPage, _ = Request.queryPage(payload, content=True)
reqCount += 1 reqCount += 1
# We have to assure that the randQuery value is not within the # We have to assure that the randQuery value is not within the
@ -280,7 +280,7 @@ def unionUse(expression, direct=False, unescape=True, resetCounter=False):
logger.info(infoMsg) logger.info(infoMsg)
# Perform the request # Perform the request
resultPage = Request.queryPage(payload, content=True) resultPage, _ = Request.queryPage(payload, content=True)
reqCount += 1 reqCount += 1
if temp.start not in resultPage or temp.stop not in resultPage: if temp.start not in resultPage or temp.stop not in resultPage: