mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-25 11:03:47 +03:00
Initial support to automatically work around the dynamic page at each refresh
(Major refactor to the comparison algorithm (True/False response))
This commit is contained in:
parent
3fe493b63d
commit
d0d6632c22
|
@ -294,16 +294,63 @@ def checkStability():
|
|||
infoMsg = "testing if the url is stable, wait a few seconds"
|
||||
logger.info(infoMsg)
|
||||
|
||||
firstResult = Request.queryPage()
|
||||
firstPage, firstHeaders = Request.queryPage(content=True)
|
||||
time.sleep(0.5)
|
||||
|
||||
secondResult = Request.queryPage()
|
||||
secondPage, secondHeaders = Request.queryPage(content=True)
|
||||
time.sleep(0.5)
|
||||
|
||||
thirdResult = Request.queryPage()
|
||||
thirdPage, thirdHeaders = Request.queryPage(content=True)
|
||||
|
||||
condition = firstResult == secondResult
|
||||
condition &= secondResult == thirdResult
|
||||
condition = firstPage == secondPage
|
||||
condition &= secondPage == thirdPage
|
||||
|
||||
if condition == False:
|
||||
contentLengths = []
|
||||
requestsHeaders = ( firstHeaders, secondHeaders, thirdHeaders )
|
||||
|
||||
for requestHeaders in requestsHeaders:
|
||||
requestHeaders = str(requestHeaders).lower()
|
||||
|
||||
clHeader = re.search("content-length:\s+([\d]+)", requestHeaders, re.I | re.M)
|
||||
|
||||
if clHeader and clHeader.group(1).isdigit():
|
||||
contentLengths.append(int(clHeader.group(1)))
|
||||
|
||||
if contentLengths:
|
||||
clSum = 0
|
||||
|
||||
for cl in contentLengths:
|
||||
clSum += cl
|
||||
|
||||
clAverage = clSum / len(contentLengths)
|
||||
|
||||
# TODO: go ahead here with the technique to compare True/False
|
||||
# based upon clAverage discard (conf.contentLengths)
|
||||
|
||||
counter = 0
|
||||
firstLines = firstPage.split("\n")
|
||||
secondLines = secondPage.split("\n")
|
||||
thirdLines = thirdPage.split("\n")
|
||||
|
||||
for firstLine in firstLines:
|
||||
if counter > len(secondLines) or counter > len(thirdLines):
|
||||
break
|
||||
|
||||
if firstLine in secondLines and firstLine in thirdLines:
|
||||
conf.equalLines.append(firstLine)
|
||||
|
||||
counter += 1
|
||||
|
||||
if conf.equalLines:
|
||||
warnMsg = "url is not stable, sqlmap inspected the page "
|
||||
warnMsg += "content and identified a stable lines subset "
|
||||
warnMsg += "to be used in the comparison algorithm"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
kb.defaultResult = True
|
||||
|
||||
return True
|
||||
|
||||
return condition
|
||||
|
||||
|
@ -325,7 +372,7 @@ def checkString():
|
|||
infoMsg += "target URL page content"
|
||||
logger.info(infoMsg)
|
||||
|
||||
page = Request.queryPage(content=True)
|
||||
page, _ = Request.queryPage(content=True)
|
||||
|
||||
if conf.string in page:
|
||||
setString()
|
||||
|
@ -356,7 +403,7 @@ def checkRegexp():
|
|||
infoMsg += "the target URL page content"
|
||||
logger.info(infoMsg)
|
||||
|
||||
page = Request.queryPage(content=True)
|
||||
page, _ = Request.queryPage(content=True)
|
||||
|
||||
if re.search(conf.regexp, page, re.I | re.M):
|
||||
setRegexp()
|
||||
|
|
|
@ -570,8 +570,10 @@ def __setConfAttributes():
|
|||
logger.debug(debugMsg)
|
||||
|
||||
conf.cj = None
|
||||
conf.contentLengths = []
|
||||
conf.dbmsHandler = None
|
||||
conf.dumpPath = None
|
||||
conf.equalLines = []
|
||||
conf.httpHeaders = []
|
||||
conf.hostname = None
|
||||
conf.loggedToOut = None
|
||||
|
|
|
@ -68,5 +68,29 @@ def comparison(page, headers=None, content=False):
|
|||
return False
|
||||
|
||||
# By default it returns the page content MD5 hash
|
||||
else:
|
||||
if not conf.equalLines and not conf.contentLengths:
|
||||
return md5.new(page).hexdigest()
|
||||
|
||||
# TODO: ahead here
|
||||
elif conf.equalLines:
|
||||
counter = 0
|
||||
trueLines = 0
|
||||
pageLines = page.split("\n")
|
||||
|
||||
for commonLine in conf.equalLines:
|
||||
if counter >= len(pageLines):
|
||||
break
|
||||
|
||||
if commonLine in pageLines:
|
||||
trueLines += 1
|
||||
|
||||
counter += 1
|
||||
|
||||
# TODO: just debug prints
|
||||
print "trueLines:", trueLines, "len(conf.equalLines):", len(conf.equalLines)
|
||||
print "result:", ( trueLines * 100 ) / len(conf.equalLines)
|
||||
|
||||
if ( trueLines * 100 ) / len(conf.equalLines) >= 98:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
|
|
@ -269,7 +269,7 @@ class Connect:
|
|||
page, headers = Connect.getPage(get=get, post=post, cookie=cookie, ua=ua)
|
||||
|
||||
if content:
|
||||
return page
|
||||
return page, headers
|
||||
elif page and headers:
|
||||
return comparison(page, headers, content)
|
||||
else:
|
||||
|
|
|
@ -340,6 +340,6 @@ def goStacked(expression):
|
|||
query = agent.prefixQuery("; %s" % expression)
|
||||
query = agent.postfixQuery("%s;%s" % (query, comment))
|
||||
payload = agent.payload(newValue=query)
|
||||
page = Request.queryPage(payload, content=True)
|
||||
page, _ = Request.queryPage(payload, content=True)
|
||||
|
||||
return payload, page
|
||||
|
|
|
@ -81,7 +81,7 @@ def __unionPosition(expression, negative=False):
|
|||
payload = agent.payload(newValue=query, negative=negative)
|
||||
|
||||
# Perform the request
|
||||
resultPage = Request.queryPage(payload, content=True)
|
||||
resultPage, _ = Request.queryPage(payload, content=True)
|
||||
reqCount += 1
|
||||
|
||||
# We have to assure that the randQuery value is not within the
|
||||
|
@ -280,7 +280,7 @@ def unionUse(expression, direct=False, unescape=True, resetCounter=False):
|
|||
logger.info(infoMsg)
|
||||
|
||||
# Perform the request
|
||||
resultPage = Request.queryPage(payload, content=True)
|
||||
resultPage, _ = Request.queryPage(payload, content=True)
|
||||
reqCount += 1
|
||||
|
||||
if temp.start not in resultPage or temp.stop not in resultPage:
|
||||
|
|
Loading…
Reference in New Issue
Block a user