2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-14 18:41:14 +04:00
|
|
|
Copyright (c) 2006-2010 sqlmap developers (http://sqlmap.sourceforge.net/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2008-12-12 22:06:31 +03:00
|
|
|
import re
|
2010-05-21 17:36:49 +04:00
|
|
|
import socket
|
2008-10-15 19:38:22 +04:00
|
|
|
import time
|
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
from difflib import SequenceMatcher
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.agent import agent
|
2010-10-25 23:16:42 +04:00
|
|
|
from lib.core.common import beep
|
2010-06-02 16:45:40 +04:00
|
|
|
from lib.core.common import getUnicode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import randomInt
|
|
|
|
from lib.core.common import randomStr
|
2010-10-11 15:47:07 +04:00
|
|
|
from lib.core.common import readInput
|
2010-10-12 19:49:04 +04:00
|
|
|
from lib.core.common import showStaticWords
|
2010-11-16 13:42:42 +03:00
|
|
|
from lib.core.common import wasLastRequestDBMSError
|
2010-09-13 17:31:01 +04:00
|
|
|
from lib.core.common import DynamicContentItem
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2010-10-07 02:29:52 +04:00
|
|
|
from lib.core.data import paths
|
2010-11-08 12:49:57 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
|
|
|
from lib.core.enums import NULLCONNECTION
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapConnectionException
|
2010-11-07 03:12:00 +03:00
|
|
|
from lib.core.exception import sqlmapGenericException
|
2010-02-10 12:39:36 +03:00
|
|
|
from lib.core.exception import sqlmapNoneDataException
|
2010-11-10 22:44:51 +03:00
|
|
|
from lib.core.exception import sqlmapSiteTooDynamic
|
2010-10-11 15:47:07 +04:00
|
|
|
from lib.core.exception import sqlmapUserQuitException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.session import setString
|
2008-12-12 22:06:31 +03:00
|
|
|
from lib.core.session import setRegexp
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request.connect import Connect as Request
|
|
|
|
|
|
|
|
def checkSqlInjection(place, parameter, value, parenthesis):
|
|
|
|
"""
|
|
|
|
This function checks if the GET, POST, Cookie, User-Agent
|
|
|
|
parameters are affected by a SQL injection vulnerability and
|
|
|
|
identifies the type of SQL injection:
|
|
|
|
|
|
|
|
* Unescaped numeric injection
|
|
|
|
* Single quoted string injection
|
|
|
|
* Double quoted string injection
|
|
|
|
"""
|
|
|
|
|
2010-10-07 19:34:17 +04:00
|
|
|
logic = conf.logic
|
2008-10-15 19:38:22 +04:00
|
|
|
randInt = randomInt()
|
|
|
|
randStr = randomStr()
|
2010-10-07 02:29:52 +04:00
|
|
|
prefix = ""
|
2010-11-18 01:00:09 +03:00
|
|
|
suffix = ""
|
2010-11-04 19:44:34 +03:00
|
|
|
retVal = None
|
|
|
|
|
2010-11-18 01:00:09 +03:00
|
|
|
if conf.prefix or conf.suffix:
|
2008-12-09 00:24:24 +03:00
|
|
|
if conf.prefix:
|
|
|
|
prefix = conf.prefix
|
|
|
|
|
2010-11-18 01:00:09 +03:00
|
|
|
if conf.suffix:
|
|
|
|
suffix = conf.suffix
|
2008-12-09 00:24:24 +03:00
|
|
|
|
2010-10-07 16:12:26 +04:00
|
|
|
for case in kb.injections.root.case:
|
2010-11-10 01:32:05 +03:00
|
|
|
conf.matchRatio = None
|
|
|
|
|
2010-10-07 16:12:26 +04:00
|
|
|
positive = case.test.positive
|
|
|
|
negative = case.test.negative
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-18 01:00:09 +03:00
|
|
|
if not prefix and not suffix and case.name == "custom":
|
2010-10-07 02:29:52 +04:00
|
|
|
continue
|
2010-10-07 02:43:04 +04:00
|
|
|
|
2010-10-07 19:34:17 +04:00
|
|
|
infoMsg = "testing %s (%s) injection " % (case.desc, logic)
|
2010-10-07 02:43:04 +04:00
|
|
|
infoMsg += "on %s parameter '%s'" % (place, parameter)
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-10 01:32:05 +03:00
|
|
|
payload = agent.payload(place, parameter, value, negative.format % eval(negative.params))
|
|
|
|
_ = Request.queryPage(payload, place)
|
|
|
|
|
2010-10-07 16:12:26 +04:00
|
|
|
payload = agent.payload(place, parameter, value, positive.format % eval(positive.params))
|
2010-10-07 02:29:52 +04:00
|
|
|
trueResult = Request.queryPage(payload, place)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-13 01:29:33 +03:00
|
|
|
if trueResult:
|
2010-11-10 01:32:05 +03:00
|
|
|
infoMsg = "confirming %s (%s) injection " % (case.desc, logic)
|
|
|
|
infoMsg += "on %s parameter '%s'" % (place, parameter)
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-10-07 16:12:26 +04:00
|
|
|
payload = agent.payload(place, parameter, value, negative.format % eval(negative.params))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-10 01:32:05 +03:00
|
|
|
randInt = randomInt()
|
|
|
|
randStr = randomStr()
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
falseResult = Request.queryPage(payload, place)
|
|
|
|
|
2010-11-13 01:29:33 +03:00
|
|
|
if not falseResult:
|
2010-10-07 19:34:17 +04:00
|
|
|
infoMsg = "%s parameter '%s' is %s (%s) injectable " % (place, parameter, case.desc, logic)
|
2010-10-25 23:16:42 +04:00
|
|
|
infoMsg += "with %d parenthesis" % parenthesis
|
2008-12-09 00:24:24 +03:00
|
|
|
logger.info(infoMsg)
|
2010-10-25 23:16:42 +04:00
|
|
|
|
|
|
|
if conf.beep:
|
|
|
|
beep()
|
|
|
|
|
2010-11-04 19:44:34 +03:00
|
|
|
retVal = case.name
|
|
|
|
break
|
|
|
|
|
|
|
|
kb.paramMatchRatio[(place, parameter)] = conf.matchRatio
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-04 19:44:34 +03:00
|
|
|
return retVal
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-11 16:26:35 +04:00
|
|
|
def heuristicCheckSqlInjection(place, parameter, value):
|
2010-11-05 16:14:12 +03:00
|
|
|
if kb.nullConnection:
|
|
|
|
debugMsg = "heuristic checking skipped "
|
|
|
|
debugMsg += "because NULL connection used"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
return
|
|
|
|
|
2010-10-11 16:26:35 +04:00
|
|
|
prefix = ""
|
2010-11-18 01:00:09 +03:00
|
|
|
suffix = ""
|
2010-10-11 16:26:35 +04:00
|
|
|
|
2010-11-18 01:00:09 +03:00
|
|
|
if conf.prefix or conf.suffix:
|
2010-10-11 16:26:35 +04:00
|
|
|
if conf.prefix:
|
|
|
|
prefix = conf.prefix
|
|
|
|
|
2010-11-18 01:00:09 +03:00
|
|
|
if conf.suffix:
|
|
|
|
suffix = conf.suffix
|
2010-10-11 16:26:35 +04:00
|
|
|
|
2010-11-18 01:00:09 +03:00
|
|
|
payload = "%s%s%s%s" % (value, prefix, randomStr(length=10, alphabet=['"', '\'', ')', '(']), suffix)
|
2010-10-27 12:27:31 +04:00
|
|
|
payload = agent.payload(place, parameter, value, payload)
|
2010-11-04 00:51:36 +03:00
|
|
|
Request.queryPage(payload, place, raise404=False)
|
2010-11-16 13:42:42 +03:00
|
|
|
result = wasLastRequestDBMSError()
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-11-16 13:52:49 +03:00
|
|
|
infoMsg = "heuristics shows that %s " % place
|
|
|
|
infoMsg += "parameter '%s' might " % parameter
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-11 16:26:35 +04:00
|
|
|
if result:
|
2010-11-16 13:52:49 +03:00
|
|
|
infoMsg += "be injectable (possible DBMS: %s)" % (kb.htmlFp[-1] if kb.htmlFp else 'Unknown')
|
2010-10-11 16:26:35 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
else:
|
2010-11-16 13:52:49 +03:00
|
|
|
infoMsg += "not be injectable"
|
2010-10-11 16:26:35 +04:00
|
|
|
logger.warning(infoMsg)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def checkDynParam(place, parameter, value):
|
|
|
|
"""
|
|
|
|
This function checks if the url parameter is dynamic. If it is
|
|
|
|
dynamic, the content of the page differs, otherwise the
|
|
|
|
dynamicity might depend on another parameter.
|
|
|
|
"""
|
|
|
|
|
2010-11-10 01:44:23 +03:00
|
|
|
conf.matchRatio = None
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
infoMsg = "testing if %s parameter '%s' is dynamic" % (place, parameter)
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
randInt = randomInt()
|
2010-06-02 16:45:40 +04:00
|
|
|
payload = agent.payload(place, parameter, value, getUnicode(randInt))
|
2010-11-05 16:14:12 +03:00
|
|
|
dynResult = Request.queryPage(payload, place)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-05 16:14:12 +03:00
|
|
|
if True == dynResult:
|
2008-10-15 19:38:22 +04:00
|
|
|
return False
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
infoMsg = "confirming that %s parameter '%s' is dynamic" % (place, parameter)
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-05 16:14:12 +03:00
|
|
|
randInt = randomInt()
|
|
|
|
payload = agent.payload(place, parameter, value, getUnicode(randInt))
|
|
|
|
dynResult = Request.queryPage(payload, place)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-05 16:14:12 +03:00
|
|
|
return not dynResult
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-25 14:41:37 +04:00
|
|
|
def checkDynamicContent(firstPage, secondPage):
|
2010-09-14 01:01:46 +04:00
|
|
|
"""
|
|
|
|
This function checks if the provided pages have dynamic content. If they
|
2010-10-25 14:41:37 +04:00
|
|
|
are dynamic, proper markings will be made.
|
2010-09-14 01:01:46 +04:00
|
|
|
"""
|
2010-11-04 12:18:32 +03:00
|
|
|
|
2010-11-04 00:51:36 +03:00
|
|
|
if kb.nullConnection:
|
2010-11-04 12:18:32 +03:00
|
|
|
debugMsg = "dynamic content checking skipped "
|
|
|
|
debugMsg += "because NULL connection used"
|
|
|
|
logger.debug(debugMsg)
|
2010-11-04 00:51:36 +03:00
|
|
|
return
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-11-07 11:52:09 +03:00
|
|
|
if conf.longestCommon:
|
|
|
|
debugMsg = "dynamic content checking skipped "
|
|
|
|
debugMsg += "because longest common comparison used"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
return
|
|
|
|
|
2010-09-13 19:19:47 +04:00
|
|
|
infoMsg = "searching for dynamic content"
|
2010-09-13 17:31:01 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-10-25 14:41:37 +04:00
|
|
|
blocks = SequenceMatcher(None, firstPage, secondPage).get_matching_blocks()
|
|
|
|
kb.dynamicMarkings = []
|
2010-10-07 02:29:52 +04:00
|
|
|
|
2010-10-25 14:41:37 +04:00
|
|
|
i = 0
|
|
|
|
while i < len(blocks):
|
|
|
|
block = blocks[i]
|
|
|
|
(_, _, length) = block
|
2010-10-25 23:45:53 +04:00
|
|
|
|
2010-10-25 14:41:37 +04:00
|
|
|
if length <= conf.minMatchBlock:
|
|
|
|
blocks.remove(block)
|
2010-10-25 23:45:53 +04:00
|
|
|
|
2010-10-25 14:41:37 +04:00
|
|
|
else:
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
if len(blocks) > 0:
|
|
|
|
blocks.insert(0, None)
|
|
|
|
blocks.append(None)
|
2010-10-25 23:45:53 +04:00
|
|
|
|
2010-10-25 14:41:37 +04:00
|
|
|
for i in xrange(len(blocks) - 1):
|
|
|
|
prefix = firstPage[blocks[i][0]:blocks[i][0] + blocks[i][2]] if blocks[i] else None
|
2010-11-18 01:00:09 +03:00
|
|
|
suffix = firstPage[blocks[i + 1][0]:blocks[i + 1][0] + blocks[i + 1][2]] if blocks[i + 1] else None
|
2010-10-25 23:45:53 +04:00
|
|
|
|
2010-10-25 14:41:37 +04:00
|
|
|
if prefix is None and blocks[i + 1][0] == 0:
|
|
|
|
continue
|
2010-10-25 23:45:53 +04:00
|
|
|
|
2010-11-18 01:00:09 +03:00
|
|
|
if suffix is None and (blocks[i][0] + blocks[i][2] >= len(firstPage)):
|
2010-10-25 14:41:37 +04:00
|
|
|
continue
|
2010-10-25 23:45:53 +04:00
|
|
|
|
2010-11-18 01:00:09 +03:00
|
|
|
kb.dynamicMarkings.append((re.escape(prefix[-conf.dynMarkLength:]) if prefix else None, re.escape(suffix[:conf.dynMarkLength]) if suffix else None))
|
2010-10-25 14:41:37 +04:00
|
|
|
|
|
|
|
if len(kb.dynamicMarkings) > 0:
|
|
|
|
infoMsg = "dynamic content marked for removal (%d region%s)" % (len(kb.dynamicMarkings), 's' if len(kb.dynamicMarkings) > 1 else '')
|
2010-09-13 17:31:01 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-11-15 17:59:37 +03:00
|
|
|
if conf.seqMatcher.a:
|
|
|
|
for item in kb.dynamicMarkings:
|
2010-11-18 01:00:09 +03:00
|
|
|
prefix, suffix = item
|
2010-11-15 17:59:37 +03:00
|
|
|
|
|
|
|
if prefix is None:
|
2010-11-18 01:00:09 +03:00
|
|
|
conf.seqMatcher.a = re.sub('(?s)^.+%s' % suffix, suffix, conf.seqMatcher.a)
|
|
|
|
elif suffix is None:
|
2010-11-15 17:59:37 +03:00
|
|
|
conf.seqMatcher.a = re.sub('(?s)%s.+$' % prefix, prefix, conf.seqMatcher.a)
|
|
|
|
else:
|
2010-11-18 01:00:09 +03:00
|
|
|
conf.seqMatcher.a = re.sub('(?s)%s.+%s' % (prefix, suffix), '%s%s' % (prefix, suffix), conf.seqMatcher.a)
|
2010-11-15 17:59:37 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def checkStability():
|
|
|
|
"""
|
|
|
|
This function checks if the URL content is stable requesting the
|
2010-09-13 19:19:47 +04:00
|
|
|
same page two times with a small delay within each request to
|
2008-10-15 19:38:22 +04:00
|
|
|
assume that it is stable.
|
|
|
|
|
|
|
|
In case the content of the page differs when requesting
|
|
|
|
the same page, the dynamicity might depend on other parameters,
|
|
|
|
like for instance string matching (--string).
|
|
|
|
"""
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
infoMsg = "testing if the url is stable, wait a few seconds"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 02:25:53 +03:00
|
|
|
firstPage = conf.seqMatcher.a # set inside checkConnection()
|
2008-12-21 19:35:03 +03:00
|
|
|
time.sleep(1)
|
2009-04-28 03:05:11 +04:00
|
|
|
secondPage, _ = Request.queryPage(content=True)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-25 17:52:21 +04:00
|
|
|
kb.pageStable = (firstPage == secondPage)
|
2008-12-18 23:48:23 +03:00
|
|
|
|
2010-10-25 17:52:21 +04:00
|
|
|
if kb.pageStable:
|
2010-02-10 12:27:34 +03:00
|
|
|
if firstPage:
|
|
|
|
logMsg = "url is stable"
|
|
|
|
logger.info(logMsg)
|
|
|
|
else:
|
2010-03-16 15:14:02 +03:00
|
|
|
errMsg = "there was an error checking the stability of page "
|
|
|
|
errMsg += "because of lack of content. please check the "
|
|
|
|
errMsg += "page request results (and probable errors) by "
|
|
|
|
errMsg += "using higher verbosity levels"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2009-02-09 13:28:03 +03:00
|
|
|
|
2010-10-25 17:52:21 +04:00
|
|
|
else:
|
2008-12-20 04:54:08 +03:00
|
|
|
warnMsg = "url is not stable, sqlmap will base the page "
|
2010-10-16 19:10:48 +04:00
|
|
|
warnMsg += "comparison on a sequence matcher. If no dynamic nor "
|
|
|
|
warnMsg += "injectable parameters are detected, or in case of "
|
|
|
|
warnMsg += "junk results, refer to user's manual paragraph "
|
|
|
|
warnMsg += "'Page comparison' and provide a string or regular "
|
|
|
|
warnMsg += "expression to match on"
|
2008-12-20 04:54:08 +03:00
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
message = "how do you want to proceed? [C(ontinue)/s(tring)/r(egex)/q(uit)] "
|
|
|
|
test = readInput(message, default="C")
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
if test and test[0] in ("q", "Q"):
|
|
|
|
raise sqlmapUserQuitException
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
elif test and test[0] in ("s", "S"):
|
|
|
|
showStaticWords(firstPage, secondPage)
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
message = "please enter value for parameter 'string': "
|
|
|
|
test = readInput(message)
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
if test:
|
|
|
|
conf.string = test
|
2010-11-04 12:18:32 +03:00
|
|
|
|
|
|
|
if kb.nullConnection:
|
|
|
|
debugMsg = "turning off NULL connection "
|
|
|
|
debugMsg += "support because of string checking"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
kb.nullConnection = None
|
2010-10-12 19:49:04 +04:00
|
|
|
else:
|
2010-11-10 22:44:51 +03:00
|
|
|
errMsg = "Empty value supplied"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
elif test and test[0] in ("r", "R"):
|
|
|
|
message = "please enter value for parameter 'regex': "
|
|
|
|
test = readInput(message)
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
if test:
|
|
|
|
conf.regex = test
|
2010-11-04 12:18:32 +03:00
|
|
|
|
|
|
|
if kb.nullConnection:
|
|
|
|
debugMsg = "turning off NULL connection "
|
|
|
|
debugMsg += "support because of regex checking"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
kb.nullConnection = None
|
2010-10-12 19:49:04 +04:00
|
|
|
else:
|
2010-11-10 22:44:51 +03:00
|
|
|
errMsg = "Empty value supplied"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2010-10-12 19:49:04 +04:00
|
|
|
else:
|
|
|
|
checkDynamicContent(firstPage, secondPage)
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2010-11-07 03:12:00 +03:00
|
|
|
if not Request.queryPage():
|
2010-11-07 18:34:52 +03:00
|
|
|
errMsg = "target url is too dynamic. unable to continue. "
|
|
|
|
errMsg += "consider using other switches (e.g. "
|
|
|
|
errMsg += "--longest-common, --string, --text-only, etc.)"
|
2010-11-10 22:44:51 +03:00
|
|
|
raise sqlmapSiteTooDynamic, errMsg
|
2010-11-07 03:12:00 +03:00
|
|
|
|
2010-10-25 17:52:21 +04:00
|
|
|
return kb.pageStable
|
2010-03-12 15:23:05 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def checkString():
|
|
|
|
if not conf.string:
|
|
|
|
return True
|
|
|
|
|
|
|
|
condition = (
|
|
|
|
kb.resumedQueries.has_key(conf.url) and
|
|
|
|
kb.resumedQueries[conf.url].has_key("String") and
|
|
|
|
kb.resumedQueries[conf.url]["String"][:-1] == conf.string
|
|
|
|
)
|
|
|
|
|
|
|
|
if condition:
|
|
|
|
return True
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
infoMsg = "testing if the provided string is within the "
|
|
|
|
infoMsg += "target URL page content"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-18 23:48:23 +03:00
|
|
|
page, _ = Request.queryPage(content=True)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.string in page:
|
|
|
|
setString()
|
|
|
|
else:
|
2010-11-18 01:00:09 +03:00
|
|
|
warnMsg = "you provided '%s' as the string to " % conf.string
|
|
|
|
warnMsg += "match, but such a string is not within the target "
|
|
|
|
warnMsg += "URL page content original request, sqlmap will "
|
|
|
|
warnMsg += "keep going anyway"
|
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-18 01:00:09 +03:00
|
|
|
return True
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-12 22:06:31 +03:00
|
|
|
def checkRegexp():
|
|
|
|
if not conf.regexp:
|
|
|
|
return True
|
|
|
|
|
|
|
|
condition = (
|
|
|
|
kb.resumedQueries.has_key(conf.url) and
|
|
|
|
kb.resumedQueries[conf.url].has_key("Regular expression") and
|
|
|
|
kb.resumedQueries[conf.url]["Regular expression"][:-1] == conf.regexp
|
|
|
|
)
|
|
|
|
|
|
|
|
if condition:
|
|
|
|
return True
|
|
|
|
|
|
|
|
infoMsg = "testing if the provided regular expression matches within "
|
|
|
|
infoMsg += "the target URL page content"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2008-12-18 23:48:23 +03:00
|
|
|
page, _ = Request.queryPage(content=True)
|
2008-12-12 22:06:31 +03:00
|
|
|
|
|
|
|
if re.search(conf.regexp, page, re.I | re.M):
|
|
|
|
setRegexp()
|
|
|
|
else:
|
2010-11-18 01:00:09 +03:00
|
|
|
warnMsg = "you provided '%s' as the regular expression to " % conf.regexp
|
|
|
|
warnMsg += "match, but such a regular expression does not have any "
|
|
|
|
warnMsg += "match within the target URL page content, sqlmap "
|
|
|
|
warnMsg += "will keep going anyway"
|
|
|
|
logger.warn(warnMsg)
|
2008-12-12 22:06:31 +03:00
|
|
|
|
2010-11-18 01:00:09 +03:00
|
|
|
return True
|
2008-12-12 22:06:31 +03:00
|
|
|
|
2010-09-16 12:43:10 +04:00
|
|
|
def checkNullConnection():
|
2010-10-15 15:17:17 +04:00
|
|
|
"""
|
|
|
|
Reference: http://www.wisec.it/sectou.php?id=472f952d79293
|
|
|
|
"""
|
|
|
|
|
2010-09-16 12:43:10 +04:00
|
|
|
infoMsg = "testing NULL connection to the target url"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
try:
|
2010-11-08 12:49:57 +03:00
|
|
|
page, headers = Request.getPage(method=HTTPMETHOD.HEAD)
|
2010-09-16 12:43:10 +04:00
|
|
|
if not page and 'Content-Length' in headers:
|
2010-11-08 12:49:57 +03:00
|
|
|
kb.nullConnection = NULLCONNECTION.HEAD
|
2010-10-15 14:24:54 +04:00
|
|
|
|
2010-10-15 16:46:41 +04:00
|
|
|
infoMsg = "NULL connection is supported with HEAD header"
|
2010-10-15 14:24:54 +04:00
|
|
|
logger.info(infoMsg)
|
2010-09-16 12:43:10 +04:00
|
|
|
else:
|
2010-11-08 16:26:45 +03:00
|
|
|
page, headers = Request.getPage(auxHeaders={"Range": "bytes=-1"})
|
2010-09-16 12:43:10 +04:00
|
|
|
if page and len(page) == 1 and 'Content-Range' in headers:
|
2010-11-08 12:49:57 +03:00
|
|
|
kb.nullConnection = NULLCONNECTION.RANGE
|
2010-09-16 12:43:10 +04:00
|
|
|
|
2010-10-15 16:46:41 +04:00
|
|
|
infoMsg = "NULL connection is supported with GET header "
|
2010-10-15 14:24:54 +04:00
|
|
|
infoMsg += "'%s'" % kb.nullConnection
|
|
|
|
logger.info(infoMsg)
|
2010-09-16 12:43:10 +04:00
|
|
|
except sqlmapConnectionException, errMsg:
|
|
|
|
errMsg = getUnicode(errMsg)
|
|
|
|
raise sqlmapConnectionException, errMsg
|
|
|
|
|
|
|
|
return kb.nullConnection is not None
|
|
|
|
|
2010-11-15 15:19:22 +03:00
|
|
|
def checkConnection(suppressOutput=False):
|
2010-05-21 17:36:49 +04:00
|
|
|
try:
|
|
|
|
socket.gethostbyname(conf.hostname)
|
|
|
|
except socket.gaierror:
|
2010-05-21 18:25:38 +04:00
|
|
|
errMsg = "host '%s' does not exist" % conf.hostname
|
2010-05-21 17:36:49 +04:00
|
|
|
raise sqlmapConnectionException, errMsg
|
|
|
|
|
2010-11-15 15:19:22 +03:00
|
|
|
if not suppressOutput:
|
|
|
|
infoMsg = "testing connection to the target url"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
try:
|
2010-11-08 02:25:53 +03:00
|
|
|
page, _ = Request.queryPage(content=True)
|
|
|
|
conf.seqMatcher.set_seq1(page)
|
2008-12-20 16:21:47 +03:00
|
|
|
|
2010-03-16 15:14:02 +03:00
|
|
|
except sqlmapConnectionException, errMsg:
|
2010-06-02 16:45:40 +04:00
|
|
|
errMsg = getUnicode(errMsg)
|
2010-03-16 15:14:02 +03:00
|
|
|
raise sqlmapConnectionException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return True
|