2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
This file is part of the sqlmap project, http://sqlmap.sourceforge.net.
|
|
|
|
|
2010-03-03 18:26:27 +03:00
|
|
|
Copyright (c) 2007-2010 Bernardo Damele A. G. <bernardo.damele@gmail.com>
|
2009-04-22 15:48:07 +04:00
|
|
|
Copyright (c) 2006 Daniele Bellucci <daniele.bellucci@gmail.com>
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
sqlmap is free software; you can redistribute it and/or modify it under
|
|
|
|
the terms of the GNU General Public License as published by the Free
|
|
|
|
Software Foundation version 2 of the License.
|
|
|
|
|
|
|
|
sqlmap is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
|
|
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
|
|
|
details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License along
|
|
|
|
with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
|
|
|
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
|
|
|
"""
|
|
|
|
|
2008-12-12 22:06:31 +03:00
|
|
|
import re
|
2010-05-21 17:36:49 +04:00
|
|
|
import socket
|
2008-10-15 19:38:22 +04:00
|
|
|
import time
|
|
|
|
|
|
|
|
from lib.core.agent import agent
|
2010-06-02 16:45:40 +04:00
|
|
|
from lib.core.common import getUnicode
|
2010-09-13 17:31:01 +04:00
|
|
|
from lib.core.common import preparePageForLineComparison
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import randomInt
|
|
|
|
from lib.core.common import randomStr
|
2010-10-11 15:47:07 +04:00
|
|
|
from lib.core.common import readInput
|
2010-09-13 17:31:01 +04:00
|
|
|
from lib.core.common import DynamicContentItem
|
2009-02-09 13:28:03 +03:00
|
|
|
from lib.core.convert import md5hash
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2010-10-07 02:29:52 +04:00
|
|
|
from lib.core.data import paths
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapConnectionException
|
2010-02-10 12:39:36 +03:00
|
|
|
from lib.core.exception import sqlmapNoneDataException
|
2010-10-11 15:47:07 +04:00
|
|
|
from lib.core.exception import sqlmapUserQuitException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.session import setString
|
2008-12-12 22:06:31 +03:00
|
|
|
from lib.core.session import setRegexp
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request.connect import Connect as Request
|
|
|
|
|
|
|
|
def checkSqlInjection(place, parameter, value, parenthesis):
|
|
|
|
"""
|
|
|
|
This function checks if the GET, POST, Cookie, User-Agent
|
|
|
|
parameters are affected by a SQL injection vulnerability and
|
|
|
|
identifies the type of SQL injection:
|
|
|
|
|
|
|
|
* Unescaped numeric injection
|
|
|
|
* Single quoted string injection
|
|
|
|
* Double quoted string injection
|
|
|
|
"""
|
|
|
|
|
2010-10-07 19:34:17 +04:00
|
|
|
logic = conf.logic
|
2008-10-15 19:38:22 +04:00
|
|
|
randInt = randomInt()
|
|
|
|
randStr = randomStr()
|
2010-10-07 02:29:52 +04:00
|
|
|
prefix = ""
|
|
|
|
postfix = ""
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
if conf.prefix or conf.postfix:
|
|
|
|
if conf.prefix:
|
|
|
|
prefix = conf.prefix
|
|
|
|
|
|
|
|
if conf.postfix:
|
|
|
|
postfix = conf.postfix
|
|
|
|
|
2010-10-07 16:12:26 +04:00
|
|
|
for case in kb.injections.root.case:
|
|
|
|
positive = case.test.positive
|
|
|
|
negative = case.test.negative
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-07 16:12:26 +04:00
|
|
|
if not prefix and not postfix and case.name == "custom":
|
2010-10-07 02:29:52 +04:00
|
|
|
continue
|
2010-10-07 02:43:04 +04:00
|
|
|
|
2010-10-07 19:34:17 +04:00
|
|
|
infoMsg = "testing %s (%s) injection " % (case.desc, logic)
|
2010-10-07 02:43:04 +04:00
|
|
|
infoMsg += "on %s parameter '%s'" % (place, parameter)
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-07 16:12:26 +04:00
|
|
|
payload = agent.payload(place, parameter, value, positive.format % eval(positive.params))
|
2010-10-07 02:29:52 +04:00
|
|
|
trueResult = Request.queryPage(payload, place)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-07 02:29:52 +04:00
|
|
|
if trueResult:
|
2010-10-07 16:12:26 +04:00
|
|
|
payload = agent.payload(place, parameter, value, negative.format % eval(negative.params))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
falseResult = Request.queryPage(payload, place)
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not falseResult:
|
2010-10-07 19:34:17 +04:00
|
|
|
infoMsg = "%s parameter '%s' is %s (%s) injectable " % (place, parameter, case.desc, logic)
|
2008-12-09 00:24:24 +03:00
|
|
|
infoMsg += "with %d parenthesis" % parenthesis
|
|
|
|
logger.info(infoMsg)
|
2010-10-07 16:12:26 +04:00
|
|
|
return case.name
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return None
|
|
|
|
|
2010-10-11 16:26:35 +04:00
|
|
|
def heuristicCheckSqlInjection(place, parameter, value):
|
|
|
|
prefix = ""
|
|
|
|
postfix = ""
|
|
|
|
|
|
|
|
if conf.prefix or conf.postfix:
|
|
|
|
if conf.prefix:
|
|
|
|
prefix = conf.prefix
|
|
|
|
|
|
|
|
if conf.postfix:
|
|
|
|
postfix = conf.postfix
|
|
|
|
|
|
|
|
payload = "%s%s%s" % (prefix, randomStr(length=10, alphabet=['"', '\'', ')', '(']), postfix)
|
|
|
|
Request.queryPage(payload, place)
|
|
|
|
result = kb.lastErrorPage and kb.lastErrorPage[0]==kb.lastRequestUID
|
|
|
|
infoMsg = "heuristics show that %s parameter '%s' is " % (place, parameter)
|
|
|
|
if result:
|
|
|
|
infoMsg += "injectable"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
else:
|
|
|
|
infoMsg += "not injectable"
|
|
|
|
logger.warning(infoMsg)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def checkDynParam(place, parameter, value):
|
|
|
|
"""
|
|
|
|
This function checks if the url parameter is dynamic. If it is
|
|
|
|
dynamic, the content of the page differs, otherwise the
|
|
|
|
dynamicity might depend on another parameter.
|
|
|
|
"""
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
infoMsg = "testing if %s parameter '%s' is dynamic" % (place, parameter)
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
randInt = randomInt()
|
2010-06-02 16:45:40 +04:00
|
|
|
payload = agent.payload(place, parameter, value, getUnicode(randInt))
|
2008-10-15 19:38:22 +04:00
|
|
|
dynResult1 = Request.queryPage(payload, place)
|
|
|
|
|
2008-12-20 04:54:08 +03:00
|
|
|
if True == dynResult1:
|
2008-10-15 19:38:22 +04:00
|
|
|
return False
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
infoMsg = "confirming that %s parameter '%s' is dynamic" % (place, parameter)
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
payload = agent.payload(place, parameter, value, "'%s" % randomStr())
|
|
|
|
dynResult2 = Request.queryPage(payload, place)
|
|
|
|
|
|
|
|
payload = agent.payload(place, parameter, value, "\"%s" % randomStr())
|
|
|
|
dynResult3 = Request.queryPage(payload, place)
|
|
|
|
|
2008-12-20 04:54:08 +03:00
|
|
|
condition = True != dynResult2
|
|
|
|
condition |= True != dynResult3
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return condition
|
|
|
|
|
2010-09-14 01:01:46 +04:00
|
|
|
def checkDynamicContent(*pages):
|
|
|
|
"""
|
|
|
|
This function checks if the provided pages have dynamic content. If they
|
|
|
|
are dynamic, their content differs at specific lines.
|
|
|
|
"""
|
2010-09-13 19:19:47 +04:00
|
|
|
infoMsg = "searching for dynamic content"
|
2010-09-13 17:31:01 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-09-14 01:01:46 +04:00
|
|
|
for i in xrange(len(pages)):
|
|
|
|
firstPage = pages[i]
|
|
|
|
linesFirst = preparePageForLineComparison(firstPage)
|
2010-10-07 02:29:52 +04:00
|
|
|
pageLinesNumber = len(linesFirst)
|
|
|
|
|
2010-09-14 01:01:46 +04:00
|
|
|
for j in xrange(i+1, len(pages)):
|
|
|
|
secondPage = pages[j]
|
|
|
|
linesSecond = preparePageForLineComparison(secondPage)
|
2010-10-07 02:29:52 +04:00
|
|
|
|
2010-09-14 01:01:46 +04:00
|
|
|
if pageLinesNumber == len(linesSecond):
|
|
|
|
for k in xrange(0, pageLinesNumber):
|
|
|
|
if (linesFirst[k] != linesSecond[k]):
|
|
|
|
item = DynamicContentItem(k, pageLinesNumber, \
|
|
|
|
linesFirst[k-1] if k > 0 else None, \
|
|
|
|
linesFirst[k+1] if k < pageLinesNumber - 1 else None)
|
|
|
|
|
|
|
|
found = None
|
2010-10-07 02:29:52 +04:00
|
|
|
|
2010-09-14 01:01:46 +04:00
|
|
|
for other in kb.dynamicContent:
|
|
|
|
found = True
|
|
|
|
if other.pageTotal == item.pageTotal:
|
|
|
|
if isinstance(other.lineNumber, int):
|
|
|
|
if other.lineNumber == item.lineNumber - 1:
|
|
|
|
other.lineNumber = [other.lineNumber, item.lineNumber]
|
|
|
|
other.lineContentAfter = item.lineContentAfter
|
|
|
|
break
|
2010-10-07 02:29:52 +04:00
|
|
|
|
2010-09-14 01:01:46 +04:00
|
|
|
elif other.lineNumber == item.lineNumber + 1:
|
|
|
|
other.lineNumber = [item.lineNumber, other.lineNumber]
|
|
|
|
other.lineContentBefore = item.lineContentBefore
|
|
|
|
break
|
2010-10-07 02:29:52 +04:00
|
|
|
|
2010-09-14 01:01:46 +04:00
|
|
|
elif item.lineNumber - 1 == other.lineNumber[-1]:
|
|
|
|
other.lineNumber.append(item.lineNumber)
|
|
|
|
other.lineContentAfter = item.lineContentAfter
|
|
|
|
break
|
2010-10-07 02:29:52 +04:00
|
|
|
|
2010-09-14 01:01:46 +04:00
|
|
|
elif item.lineNumber + 1 == other.lineNumber[0]:
|
|
|
|
other.lineNumber.insert(0, item.lineNumber)
|
|
|
|
other.lineContentBefore = item.lineContentBefore
|
|
|
|
break
|
2010-10-07 02:29:52 +04:00
|
|
|
|
2010-09-14 01:01:46 +04:00
|
|
|
found = False
|
2010-10-07 16:12:26 +04:00
|
|
|
|
2010-09-14 01:01:46 +04:00
|
|
|
if not found:
|
|
|
|
kb.dynamicContent.append(item)
|
2010-09-13 19:19:47 +04:00
|
|
|
|
2010-09-13 17:31:01 +04:00
|
|
|
if kb.dynamicContent:
|
|
|
|
infoMsg = "found probably removable dynamic lines"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def checkStability():
|
|
|
|
"""
|
|
|
|
This function checks if the URL content is stable requesting the
|
2010-09-13 19:19:47 +04:00
|
|
|
same page two times with a small delay within each request to
|
2008-10-15 19:38:22 +04:00
|
|
|
assume that it is stable.
|
|
|
|
|
|
|
|
In case the content of the page differs when requesting
|
|
|
|
the same page, the dynamicity might depend on other parameters,
|
|
|
|
like for instance string matching (--string).
|
|
|
|
"""
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
infoMsg = "testing if the url is stable, wait a few seconds"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
firstPage, _ = Request.queryPage(content=True)
|
2008-12-21 19:35:03 +03:00
|
|
|
time.sleep(1)
|
2009-04-28 03:05:11 +04:00
|
|
|
secondPage, _ = Request.queryPage(content=True)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-02-10 12:27:34 +03:00
|
|
|
condition = (firstPage == secondPage)
|
2008-12-18 23:48:23 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if condition:
|
2010-02-10 12:27:34 +03:00
|
|
|
if firstPage:
|
|
|
|
conf.md5hash = md5hash(firstPage)
|
|
|
|
logMsg = "url is stable"
|
|
|
|
logger.info(logMsg)
|
|
|
|
else:
|
2010-03-16 15:14:02 +03:00
|
|
|
errMsg = "there was an error checking the stability of page "
|
|
|
|
errMsg += "because of lack of content. please check the "
|
|
|
|
errMsg += "page request results (and probable errors) by "
|
|
|
|
errMsg += "using higher verbosity levels"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2009-02-09 13:28:03 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
elif not condition:
|
2008-12-20 04:54:08 +03:00
|
|
|
warnMsg = "url is not stable, sqlmap will base the page "
|
|
|
|
warnMsg += "comparison on a sequence matcher, if no dynamic nor "
|
|
|
|
warnMsg += "injectable parameters are detected, refer to user's "
|
|
|
|
warnMsg += "manual paragraph 'Page comparison' and provide a "
|
|
|
|
warnMsg += "string or regular expression to match on"
|
|
|
|
logger.warn(warnMsg)
|
2010-10-11 15:47:07 +04:00
|
|
|
|
|
|
|
message = "do you want to continue? [Y/n] "
|
|
|
|
test = readInput(message, default="Y")
|
|
|
|
if test and test[0] not in ("y", "Y"):
|
|
|
|
raise sqlmapUserQuitException
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-09-13 17:31:01 +04:00
|
|
|
checkDynamicContent(firstPage, secondPage)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return condition
|
2010-03-12 15:23:05 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def checkString():
|
|
|
|
if not conf.string:
|
|
|
|
return True
|
|
|
|
|
|
|
|
condition = (
|
|
|
|
kb.resumedQueries.has_key(conf.url) and
|
|
|
|
kb.resumedQueries[conf.url].has_key("String") and
|
|
|
|
kb.resumedQueries[conf.url]["String"][:-1] == conf.string
|
|
|
|
)
|
|
|
|
|
|
|
|
if condition:
|
|
|
|
return True
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
infoMsg = "testing if the provided string is within the "
|
|
|
|
infoMsg += "target URL page content"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-18 23:48:23 +03:00
|
|
|
page, _ = Request.queryPage(content=True)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.string in page:
|
|
|
|
setString()
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
errMsg = "you provided '%s' as the string to " % conf.string
|
|
|
|
errMsg += "match, but such a string is not within the target "
|
|
|
|
errMsg += "URL page content, please provide another string."
|
|
|
|
logger.error(errMsg)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2008-12-12 22:06:31 +03:00
|
|
|
def checkRegexp():
|
|
|
|
if not conf.regexp:
|
|
|
|
return True
|
|
|
|
|
|
|
|
condition = (
|
|
|
|
kb.resumedQueries.has_key(conf.url) and
|
|
|
|
kb.resumedQueries[conf.url].has_key("Regular expression") and
|
|
|
|
kb.resumedQueries[conf.url]["Regular expression"][:-1] == conf.regexp
|
|
|
|
)
|
|
|
|
|
|
|
|
if condition:
|
|
|
|
return True
|
|
|
|
|
|
|
|
infoMsg = "testing if the provided regular expression matches within "
|
|
|
|
infoMsg += "the target URL page content"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2008-12-18 23:48:23 +03:00
|
|
|
page, _ = Request.queryPage(content=True)
|
2008-12-12 22:06:31 +03:00
|
|
|
|
|
|
|
if re.search(conf.regexp, page, re.I | re.M):
|
|
|
|
setRegexp()
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
errMsg = "you provided '%s' as the regular expression to " % conf.regexp
|
|
|
|
errMsg += "match, but such a regular expression does not have any "
|
|
|
|
errMsg += "match within the target URL page content, please provide "
|
|
|
|
errMsg += "another regular expression."
|
|
|
|
logger.error(errMsg)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2010-09-16 12:43:10 +04:00
|
|
|
def checkNullConnection():
|
|
|
|
infoMsg = "testing NULL connection to the target url"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
try:
|
|
|
|
page, headers = Request.getPage(method="HEAD")
|
|
|
|
if not page and 'Content-Length' in headers:
|
|
|
|
kb.nullConnection = "HEAD"
|
|
|
|
else:
|
|
|
|
page, headers = Request.getPage(auxHeaders={"Range":"bytes=-1"})
|
|
|
|
if page and len(page) == 1 and 'Content-Range' in headers:
|
|
|
|
kb.nullConnection = "Range"
|
|
|
|
|
|
|
|
except sqlmapConnectionException, errMsg:
|
|
|
|
errMsg = getUnicode(errMsg)
|
|
|
|
raise sqlmapConnectionException, errMsg
|
|
|
|
|
|
|
|
if kb.nullConnection:
|
|
|
|
infoMsg = "method '%s' seems to be working" % kb.nullConnection
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
return kb.nullConnection is not None
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def checkConnection():
|
2010-05-21 17:36:49 +04:00
|
|
|
try:
|
|
|
|
socket.gethostbyname(conf.hostname)
|
|
|
|
except socket.gaierror:
|
2010-05-21 18:25:38 +04:00
|
|
|
errMsg = "host '%s' does not exist" % conf.hostname
|
2010-05-21 17:36:49 +04:00
|
|
|
raise sqlmapConnectionException, errMsg
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
infoMsg = "testing connection to the target url"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
try:
|
2008-12-20 04:54:08 +03:00
|
|
|
page, _ = Request.getPage()
|
|
|
|
conf.seqMatcher.set_seq1(page)
|
2008-12-20 16:21:47 +03:00
|
|
|
|
2010-03-16 15:14:02 +03:00
|
|
|
except sqlmapConnectionException, errMsg:
|
2010-06-02 16:45:40 +04:00
|
|
|
errMsg = getUnicode(errMsg)
|
2010-03-16 15:14:02 +03:00
|
|
|
raise sqlmapConnectionException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return True
|