dynamicity engine update

This commit is contained in:
Miroslav Stampar 2010-11-07 00:12:00 +00:00
parent 3619fc5127
commit 508b9cc763
5 changed files with 20 additions and 8 deletions

View File

@ -15,7 +15,6 @@ from difflib import SequenceMatcher
from lib.core.agent import agent from lib.core.agent import agent
from lib.core.common import beep from lib.core.common import beep
from lib.core.common import getFilteredPageContent
from lib.core.common import getUnicode from lib.core.common import getUnicode
from lib.core.common import randomInt from lib.core.common import randomInt
from lib.core.common import randomStr from lib.core.common import randomStr
@ -28,6 +27,7 @@ from lib.core.data import kb
from lib.core.data import logger from lib.core.data import logger
from lib.core.data import paths from lib.core.data import paths
from lib.core.exception import sqlmapConnectionException from lib.core.exception import sqlmapConnectionException
from lib.core.exception import sqlmapGenericException
from lib.core.exception import sqlmapNoneDataException from lib.core.exception import sqlmapNoneDataException
from lib.core.exception import sqlmapUserQuitException from lib.core.exception import sqlmapUserQuitException
from lib.core.exception import sqlmapSilentQuitException from lib.core.exception import sqlmapSilentQuitException
@ -219,6 +219,8 @@ def checkStability():
time.sleep(1) time.sleep(1)
secondPage, _ = Request.queryPage(content=True) secondPage, _ = Request.queryPage(content=True)
conf.seqMatcher.set_seq1(firstPage)
kb.pageStable = (firstPage == secondPage) kb.pageStable = (firstPage == secondPage)
if kb.pageStable: if kb.pageStable:
@ -283,6 +285,11 @@ def checkStability():
else: else:
checkDynamicContent(firstPage, secondPage) checkDynamicContent(firstPage, secondPage)
if not Request.queryPage():
errMsg = "target url is too dynamic. unable to continue. consider using other methods"
logger.error(errMsg)
raise sqlmapSilentQuitException
return kb.pageStable return kb.pageStable
def checkString(): def checkString():
@ -386,8 +393,7 @@ def checkConnection():
logger.info(infoMsg) logger.info(infoMsg)
try: try:
page, _ = Request.getPage() Request.getPage()
conf.seqMatcher.set_seq1(page if not conf.textOnly else getFilteredPageContent(page))
except sqlmapConnectionException, errMsg: except sqlmapConnectionException, errMsg:
errMsg = getUnicode(errMsg) errMsg = getUnicode(errMsg)

View File

@ -41,6 +41,7 @@ from lib.core.data import kb
from lib.core.data import logger from lib.core.data import logger
from lib.core.data import paths from lib.core.data import paths
from lib.core.data import queries from lib.core.data import queries
from lib.core.convert import htmlunescape
from lib.core.convert import urlencode from lib.core.convert import urlencode
from lib.core.exception import sqlmapFilePathException from lib.core.exception import sqlmapFilePathException
from lib.core.exception import sqlmapGenericException from lib.core.exception import sqlmapGenericException
@ -1102,6 +1103,8 @@ def getFilteredPageContent(page):
while retVal.find(" ") != -1: while retVal.find(" ") != -1:
retVal = retVal.replace(" ", " ") retVal = retVal.replace(" ", " ")
retVal = htmlunescape(retVal)
return retVal return retVal
def getPageTextWordsSet(page): def getPageTextWordsSet(page):

View File

@ -93,7 +93,7 @@ def utf8decode(string):
return string.decode("utf-8") return string.decode("utf-8")
def htmlescape(string): def htmlescape(string):
return string.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;').replace('"', '&quot;').replace("'", '&#39;') return string.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;').replace('"', '&quot;').replace("'", '&#39;').replace(' ', '&nbsp;')
def htmlunescape(string): def htmlunescape(string):
return string.replace('&amp;', '&').replace('&lt;', '<').replace('&gt;', '>').replace('&quot;', '"').replace('&#39;', "'") return string.replace('&amp;', '&').replace('&lt;', '<').replace('&gt;', '>').replace('&quot;', '"').replace('&#39;', "'").replace('&nbsp;', ' ')

View File

@ -9,7 +9,6 @@ See the file 'doc/COPYING' for copying permission
import re import re
from lib.core.common import getFilteredPageContent
from lib.core.common import wasLastRequestError from lib.core.common import wasLastRequestError
from lib.core.data import conf from lib.core.data import conf
from lib.core.data import kb from lib.core.data import kb
@ -50,7 +49,7 @@ def comparison(page, headers=None, getSeqMatcher=False, pageLength=None):
if conf.regexp: if conf.regexp:
return re.search(conf.regexp, page, re.I | re.M) is not None return re.search(conf.regexp, page, re.I | re.M) is not None
# Dynamic content lines to be excluded before calculating page hash # Dynamic content lines to be excluded before comparison
if not kb.nullConnection: if not kb.nullConnection:
for item in kb.dynamicMarkings: for item in kb.dynamicMarkings:
prefix, postfix = item prefix, postfix = item
@ -72,7 +71,7 @@ def comparison(page, headers=None, getSeqMatcher=False, pageLength=None):
if ratio > 1.: if ratio > 1.:
ratio = 1. / ratio ratio = 1. / ratio
else: else:
conf.seqMatcher.set_seq2(page if not conf.textOnly else getFilteredPageContent(page)) conf.seqMatcher.set_seq2(page)
ratio = round(conf.seqMatcher.ratio(), 3) ratio = round(conf.seqMatcher.ratio(), 3)
if kb.locks.seqLock: if kb.locks.seqLock:

View File

@ -18,6 +18,7 @@ import traceback
from lib.contrib import multipartpost from lib.contrib import multipartpost
from lib.core.agent import agent from lib.core.agent import agent
from lib.core.common import readInput from lib.core.common import readInput
from lib.core.common import getFilteredPageContent
from lib.core.common import getUnicode from lib.core.common import getUnicode
from lib.core.convert import urlencode from lib.core.convert import urlencode
from lib.core.common import urlEncodeCookieValues from lib.core.common import urlEncodeCookieValues
@ -367,6 +368,9 @@ class Connect:
if not pageLength: if not pageLength:
page, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404) page, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404)
if conf.textOnly:
page = getFilteredPageContent(page)
if content or response: if content or response:
return page, headers return page, headers
elif pageLength or page: elif pageLength or page: