2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-14 18:41:14 +04:00
|
|
|
Copyright (c) 2006-2010 sqlmap developers (http://sqlmap.sourceforge.net/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2010-05-24 15:00:49 +04:00
|
|
|
import codecs
|
2010-10-28 00:39:50 +04:00
|
|
|
import ctypes
|
2010-05-26 13:48:20 +04:00
|
|
|
import inspect
|
2008-10-15 19:38:22 +04:00
|
|
|
import os
|
|
|
|
import random
|
|
|
|
import re
|
2009-04-22 15:48:07 +04:00
|
|
|
import socket
|
2008-10-15 19:38:22 +04:00
|
|
|
import string
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import urlparse
|
2010-01-05 14:30:33 +03:00
|
|
|
import ntpath
|
|
|
|
import posixpath
|
2010-04-16 17:40:02 +04:00
|
|
|
import subprocess
|
2011-01-02 10:37:47 +03:00
|
|
|
import httplib
|
2010-01-28 20:07:34 +03:00
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
from ConfigParser import DEFAULTSECT
|
|
|
|
from ConfigParser import RawConfigParser
|
2010-04-22 20:13:22 +04:00
|
|
|
from StringIO import StringIO
|
2010-10-12 19:49:04 +04:00
|
|
|
from difflib import SequenceMatcher
|
2010-11-23 16:58:01 +03:00
|
|
|
from inspect import getmembers
|
2010-12-07 19:39:31 +03:00
|
|
|
from math import sqrt
|
2010-05-21 17:03:57 +04:00
|
|
|
from subprocess import PIPE
|
|
|
|
from subprocess import Popen as execute
|
2010-01-28 19:50:34 +03:00
|
|
|
from tempfile import NamedTemporaryFile
|
2010-01-29 13:12:09 +03:00
|
|
|
from tempfile import mkstemp
|
2010-06-30 01:07:23 +04:00
|
|
|
from xml.etree import ElementTree as ET
|
2010-10-07 02:43:04 +04:00
|
|
|
from xml.dom import minidom
|
2010-04-22 20:13:22 +04:00
|
|
|
from xml.sax import parse
|
2010-01-24 02:29:34 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
from extra.cloak.cloak import decloak
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.contrib import magic
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
2010-12-04 01:44:29 +03:00
|
|
|
from lib.core.data import dbmsDict
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.data import paths
|
|
|
|
from lib.core.data import queries
|
2010-11-07 03:12:00 +03:00
|
|
|
from lib.core.convert import htmlunescape
|
2010-01-15 14:44:05 +03:00
|
|
|
from lib.core.convert import urlencode
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import DBMS
|
|
|
|
from lib.core.enums import PLACE
|
2010-12-18 12:51:34 +03:00
|
|
|
from lib.core.enums import PAYLOAD
|
2011-01-13 14:24:03 +03:00
|
|
|
from lib.core.enums import SORTORDER
|
2011-01-10 13:30:17 +03:00
|
|
|
from lib.core.exception import sqlmapDataException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapFilePathException
|
2010-07-30 16:49:25 +04:00
|
|
|
from lib.core.exception import sqlmapGenericException
|
2010-01-15 19:06:59 +03:00
|
|
|
from lib.core.exception import sqlmapNoneDataException
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.exception import sqlmapMissingDependence
|
2010-02-10 15:06:23 +03:00
|
|
|
from lib.core.exception import sqlmapSyntaxException
|
2010-05-27 20:45:09 +04:00
|
|
|
from lib.core.optiondict import optDict
|
2010-03-03 19:19:17 +03:00
|
|
|
from lib.core.settings import DESCRIPTION
|
2009-06-11 19:01:48 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2010-05-21 16:09:31 +04:00
|
|
|
from lib.core.settings import PLATFORM
|
2010-02-25 20:37:46 +03:00
|
|
|
from lib.core.settings import SITE
|
2010-12-25 13:16:20 +03:00
|
|
|
from lib.core.settings import ERROR_PARSING_REGEXES
|
2011-01-05 13:25:07 +03:00
|
|
|
from lib.core.settings import NON_CONTROL_CHAR_REGEX
|
2008-12-19 23:09:46 +03:00
|
|
|
from lib.core.settings import SQL_STATEMENTS
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.settings import SUPPORTED_DBMS
|
2010-12-18 00:29:09 +03:00
|
|
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.settings import VERSION_STRING
|
2010-10-21 13:51:07 +04:00
|
|
|
from lib.core.settings import DUMP_NEWLINE_MARKER
|
2010-11-16 18:11:03 +03:00
|
|
|
from lib.core.settings import DUMP_CR_MARKER
|
2010-10-21 13:51:07 +04:00
|
|
|
from lib.core.settings import DUMP_DEL_MARKER
|
|
|
|
from lib.core.settings import DUMP_TAB_MARKER
|
|
|
|
from lib.core.settings import DUMP_START_MARKER
|
|
|
|
from lib.core.settings import DUMP_STOP_MARKER
|
2010-12-08 17:46:07 +03:00
|
|
|
from lib.core.settings import MIN_TIME_RESPONSES
|
2010-12-21 18:13:13 +03:00
|
|
|
from lib.core.settings import TIME_STDEV_COEFF
|
2010-12-29 22:39:32 +03:00
|
|
|
from lib.core.settings import DYNAMICITY_MARK_LENGTH
|
2010-12-24 15:13:48 +03:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2010-09-15 16:51:02 +04:00
|
|
|
|
|
|
|
class UnicodeRawConfigParser(RawConfigParser):
|
2010-09-15 16:52:28 +04:00
|
|
|
"""
|
|
|
|
RawConfigParser with unicode writing support
|
|
|
|
"""
|
2010-09-15 16:51:02 +04:00
|
|
|
def write(self, fp):
|
|
|
|
"""
|
|
|
|
Write an .ini-format representation of the configuration state.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self._defaults:
|
|
|
|
fp.write("[%s]\n" % DEFAULTSECT)
|
|
|
|
|
|
|
|
for (key, value) in self._defaults.items():
|
2011-01-04 01:31:29 +03:00
|
|
|
fp.write("%s = %s\n" % (key, getUnicode(value, conf.dataEncoding).replace('\n', '\n\t')))
|
2010-09-15 16:51:02 +04:00
|
|
|
|
|
|
|
fp.write("\n")
|
|
|
|
|
|
|
|
for section in self._sections:
|
|
|
|
fp.write("[%s]\n" % section)
|
|
|
|
|
|
|
|
for (key, value) in self._sections[section].items():
|
|
|
|
if key != "__name__":
|
|
|
|
if value is None:
|
|
|
|
fp.write("%s\n" % (key))
|
|
|
|
else:
|
2011-01-04 01:31:29 +03:00
|
|
|
fp.write("%s = %s\n" % (key, getUnicode(value, conf.dataEncoding).replace('\n', '\n\t')))
|
2010-09-15 16:51:02 +04:00
|
|
|
|
|
|
|
fp.write("\n")
|
|
|
|
|
|
|
|
|
|
|
|
class DynamicContentItem:
|
|
|
|
"""
|
|
|
|
Represents line in content page with dynamic properties (candidate for removal prior detection phase)
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, lineNumber, pageTotal, lineContentBefore, lineContentAfter):
|
|
|
|
self.lineNumber = lineNumber
|
|
|
|
self.pageTotal = pageTotal
|
|
|
|
self.lineContentBefore = lineContentBefore
|
|
|
|
self.lineContentAfter = lineContentAfter
|
|
|
|
|
2010-12-20 22:34:41 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def paramToDict(place, parameters=None):
|
|
|
|
"""
|
|
|
|
Split the parameters into names and values, check if these parameters
|
|
|
|
are within the testable parameters and return in a dictionary.
|
|
|
|
|
|
|
|
@param place: where sqlmap has to work, can be GET, POST or Cookie.
|
|
|
|
@type place: C{str}
|
|
|
|
|
|
|
|
@param parameters: parameters string in the format for instance
|
|
|
|
'p1=v1&p2=v2' (GET and POST) or 'p1=v1;p2=v2' (Cookie).
|
|
|
|
@type parameters: C{str}
|
|
|
|
|
|
|
|
@return: the parameters in a dictionary.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
testableParameters = {}
|
|
|
|
|
|
|
|
if conf.parameters.has_key(place) and not parameters:
|
|
|
|
parameters = conf.parameters[place]
|
|
|
|
|
2010-06-30 01:07:23 +04:00
|
|
|
if place is not "POSTxml":
|
|
|
|
parameters = parameters.replace(", ", ",")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 11:02:36 +03:00
|
|
|
if place == PLACE.COOKIE:
|
2010-06-30 01:07:23 +04:00
|
|
|
splitParams = parameters.split(";")
|
|
|
|
else:
|
|
|
|
splitParams = parameters.split("&")
|
|
|
|
|
|
|
|
for element in splitParams:
|
|
|
|
elem = element.split("=")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-06-30 01:07:23 +04:00
|
|
|
if len(elem) == 2:
|
|
|
|
parameter = elem[0].replace(" ", "")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-06-30 01:07:23 +04:00
|
|
|
condition = not conf.testParameter
|
|
|
|
condition |= parameter in conf.testParameter
|
|
|
|
|
|
|
|
if condition:
|
|
|
|
testableParameters[parameter] = elem[1]
|
|
|
|
else:
|
|
|
|
root = ET.XML(parameters)
|
|
|
|
iterator = root.getiterator()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-06-30 01:07:23 +04:00
|
|
|
for child in iterator:
|
|
|
|
parameter = child.tag
|
2010-10-21 01:49:05 +04:00
|
|
|
|
|
|
|
if "}" in parameter:
|
|
|
|
testParam = parameter.split("}")[1]
|
|
|
|
else:
|
|
|
|
testParam = parameter
|
|
|
|
|
2010-06-30 01:07:23 +04:00
|
|
|
condition = not conf.testParameter
|
2010-10-21 01:49:05 +04:00
|
|
|
condition |= testParam in conf.testParameter
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if condition:
|
2010-06-30 01:07:23 +04:00
|
|
|
testableParameters[parameter] = child.text
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.testParameter and not testableParameters:
|
|
|
|
paramStr = ", ".join(test for test in conf.testParameter)
|
|
|
|
|
|
|
|
if len(conf.testParameter) > 1:
|
|
|
|
warnMsg = "the testable parameters '%s' " % paramStr
|
|
|
|
warnMsg += "you provided are not into the %s" % place
|
|
|
|
else:
|
|
|
|
parameter = conf.testParameter[0]
|
|
|
|
|
|
|
|
warnMsg = "the testable parameter '%s' " % paramStr
|
|
|
|
warnMsg += "you provided is not into the %s" % place
|
|
|
|
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
elif len(conf.testParameter) != len(testableParameters.keys()):
|
|
|
|
for parameter in conf.testParameter:
|
|
|
|
if not testableParameters.has_key(parameter):
|
2008-11-02 17:39:38 +03:00
|
|
|
warnMsg = "the testable parameter '%s' " % parameter
|
2008-10-15 19:38:22 +04:00
|
|
|
warnMsg += "you provided is not into the %s" % place
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
return testableParameters
|
|
|
|
|
2008-11-16 02:41:31 +03:00
|
|
|
def formatDBMSfp(versions=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function format the back-end DBMS fingerprint value and return its
|
|
|
|
values formatted as a human readable string.
|
|
|
|
|
|
|
|
@return: detected back-end DBMS based upon fingerprint techniques.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2010-08-10 02:13:56 +04:00
|
|
|
while versions and None in versions:
|
2010-08-09 02:25:33 +04:00
|
|
|
versions.remove(None)
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2010-12-18 00:29:09 +03:00
|
|
|
if not versions and kb.dbmsVersion and kb.dbmsVersion[0] != UNKNOWN_DBMS_VERSION and kb.dbmsVersion[0] != None:
|
2008-10-15 19:38:22 +04:00
|
|
|
versions = kb.dbmsVersion
|
|
|
|
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(versions, basestring):
|
2011-01-13 20:36:54 +03:00
|
|
|
return "%s %s" % (getIdentifiedDBMS(), versions)
|
2008-10-15 19:38:22 +04:00
|
|
|
elif isinstance(versions, (list, set, tuple)):
|
2011-01-13 20:36:54 +03:00
|
|
|
return "%s %s" % (getIdentifiedDBMS(), " and ".join([version for version in versions]))
|
2008-12-02 02:27:51 +03:00
|
|
|
elif not versions:
|
|
|
|
warnMsg = "unable to extensively fingerprint the back-end "
|
|
|
|
warnMsg += "DBMS version"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2011-01-13 20:36:54 +03:00
|
|
|
return getIdentifiedDBMS()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def formatFingerprintString(values, chain=" or "):
|
2009-04-28 03:05:11 +04:00
|
|
|
strJoin = "|".join([v for v in values])
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return strJoin.replace("|", chain)
|
2008-11-17 20:41:02 +03:00
|
|
|
|
2008-11-18 20:42:46 +03:00
|
|
|
def formatFingerprint(target, info):
|
2008-11-16 02:41:31 +03:00
|
|
|
"""
|
|
|
|
This function format the back-end operating system fingerprint value
|
|
|
|
and return its values formatted as a human readable string.
|
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
Example of info (kb.headersFp) dictionary:
|
2008-11-18 20:42:46 +03:00
|
|
|
|
|
|
|
{
|
2008-11-19 03:36:44 +03:00
|
|
|
'distrib': set(['Ubuntu']),
|
|
|
|
'type': set(['Linux']),
|
|
|
|
'technology': set(['PHP 5.2.6', 'Apache 2.2.9']),
|
|
|
|
'release': set(['8.10'])
|
2008-11-18 20:42:46 +03:00
|
|
|
}
|
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
Example of info (kb.bannerFp) dictionary:
|
|
|
|
|
2008-11-18 20:42:46 +03:00
|
|
|
{
|
2008-11-19 03:36:44 +03:00
|
|
|
'sp': set(['Service Pack 4']),
|
|
|
|
'dbmsVersion': '8.00.194',
|
|
|
|
'dbmsServicePack': '0',
|
|
|
|
'distrib': set(['2000']),
|
|
|
|
'dbmsRelease': '2000',
|
|
|
|
'type': set(['Windows'])
|
2008-11-18 20:42:46 +03:00
|
|
|
}
|
|
|
|
|
2008-11-16 02:41:31 +03:00
|
|
|
@return: detected back-end operating system based upon fingerprint
|
|
|
|
techniques.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
infoStr = ""
|
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if info and "type" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += "%s operating system: %s" % (target, formatFingerprintString(info["type"]))
|
2008-11-18 20:42:46 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "distrib" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " %s" % formatFingerprintString(info["distrib"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "release" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " %s" % formatFingerprintString(info["release"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "sp" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " %s" % formatFingerprintString(info["sp"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "codename" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " (%s)" % formatFingerprintString(info["codename"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-18 20:42:46 +03:00
|
|
|
if "technology" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += "\nweb application technology: %s" % formatFingerprintString(info["technology"], ", ")
|
2008-11-16 02:41:31 +03:00
|
|
|
|
|
|
|
return infoStr
|
|
|
|
|
2011-01-02 19:51:21 +03:00
|
|
|
def getErrorParsedDBMSesFormatted():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function parses the knowledge base htmlFp list and return its
|
|
|
|
values formatted as a human readable string.
|
|
|
|
|
|
|
|
@return: list of possible back-end DBMS based upon error messages
|
|
|
|
parsing.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
htmlParsed = ""
|
|
|
|
|
|
|
|
if not kb.htmlFp:
|
|
|
|
return None
|
|
|
|
|
|
|
|
if len(kb.htmlFp) == 1:
|
|
|
|
htmlVer = kb.htmlFp[0]
|
|
|
|
htmlParsed = htmlVer
|
|
|
|
elif len(kb.htmlFp) > 1:
|
2008-11-17 03:00:54 +03:00
|
|
|
htmlParsed = " or ".join([htmlFp for htmlFp in kb.htmlFp])
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return htmlParsed
|
|
|
|
|
2010-02-25 18:22:41 +03:00
|
|
|
def getDocRoot(webApi=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
docRoot = None
|
2010-01-05 14:30:33 +03:00
|
|
|
pagePath = directoryPath(conf.path)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if kb.os == "Windows":
|
2010-11-16 16:46:46 +03:00
|
|
|
if webApi in ("php", "jsp"):
|
2010-02-25 18:22:41 +03:00
|
|
|
defaultDocRoot = "C:/xampp/htdocs/"
|
|
|
|
else:
|
|
|
|
defaultDocRoot = "C:/Inetpub/wwwroot/"
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2009-04-28 03:05:11 +04:00
|
|
|
defaultDocRoot = "/var/www/"
|
|
|
|
|
|
|
|
if kb.absFilePaths:
|
|
|
|
for absFilePath in kb.absFilePaths:
|
2010-02-09 17:27:41 +03:00
|
|
|
if directoryPath(absFilePath) == '/':
|
|
|
|
continue
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
absFilePath = normalizePath(absFilePath)
|
2009-04-28 03:05:11 +04:00
|
|
|
absFilePathWin = None
|
|
|
|
|
2010-02-04 12:49:31 +03:00
|
|
|
if isWindowsPath(absFilePath):
|
2010-02-04 17:37:00 +03:00
|
|
|
absFilePathWin = posixToNtSlashes(absFilePath)
|
|
|
|
absFilePath = ntToPosixSlashes(absFilePath[2:])
|
2010-04-23 20:34:20 +04:00
|
|
|
elif isWindowsDriveLetterPath(absFilePath): # E.g. C:/xampp/htdocs
|
2010-04-22 14:31:33 +04:00
|
|
|
absFilePath = absFilePath[2:]
|
|
|
|
|
2009-04-28 15:05:07 +04:00
|
|
|
if pagePath in absFilePath:
|
|
|
|
index = absFilePath.index(pagePath)
|
2009-04-28 03:05:11 +04:00
|
|
|
docRoot = absFilePath[:index]
|
|
|
|
|
2010-02-25 19:38:39 +03:00
|
|
|
if len(docRoot) == 0:
|
|
|
|
docRoot = None
|
|
|
|
continue
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if absFilePathWin:
|
2010-02-04 17:37:00 +03:00
|
|
|
docRoot = "C:/%s" % ntToPosixSlashes(docRoot)
|
2010-04-22 14:31:33 +04:00
|
|
|
|
2010-02-03 19:40:12 +03:00
|
|
|
docRoot = normalizePath(docRoot)
|
2009-04-28 03:05:11 +04:00
|
|
|
break
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if docRoot:
|
2009-04-28 03:05:11 +04:00
|
|
|
infoMsg = "retrieved the web server document root: '%s'" % docRoot
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2009-04-28 03:05:11 +04:00
|
|
|
warnMsg = "unable to retrieve the web server document root"
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
message = "please provide the web server document root "
|
|
|
|
message += "[%s]: " % defaultDocRoot
|
|
|
|
inputDocRoot = readInput(message, default=defaultDocRoot)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if inputDocRoot:
|
|
|
|
docRoot = inputDocRoot
|
|
|
|
else:
|
|
|
|
docRoot = defaultDocRoot
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return docRoot
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-02-25 18:22:41 +03:00
|
|
|
def getDirs(webApi=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
directories = set()
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if kb.os == "Windows":
|
2010-11-16 16:46:46 +03:00
|
|
|
if webApi in ("php", "jsp"):
|
2010-02-25 18:22:41 +03:00
|
|
|
defaultDirs = ["C:/xampp/htdocs/"]
|
|
|
|
else:
|
|
|
|
defaultDirs = ["C:/Inetpub/wwwroot/"]
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
2010-02-25 17:51:39 +03:00
|
|
|
defaultDirs = ["/var/www/"]
|
2009-04-28 03:05:11 +04:00
|
|
|
|
2010-11-17 12:46:04 +03:00
|
|
|
if kb.docRoot and kb.docRoot not in defaultDirs:
|
|
|
|
defaultDirs.append(kb.docRoot)
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if kb.absFilePaths:
|
|
|
|
infoMsg = "retrieved web server full paths: "
|
|
|
|
infoMsg += "'%s'" % ", ".join(path for path in kb.absFilePaths)
|
|
|
|
logger.info(infoMsg)
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for absFilePath in kb.absFilePaths:
|
2009-07-09 15:11:25 +04:00
|
|
|
if absFilePath:
|
2010-02-04 12:49:31 +03:00
|
|
|
directory = directoryPath(absFilePath)
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2010-02-04 12:49:31 +03:00
|
|
|
if isWindowsPath(directory):
|
2010-04-22 20:35:22 +04:00
|
|
|
directory = ntToPosixSlashes(directory)
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2010-02-09 17:27:41 +03:00
|
|
|
if directory == '/':
|
|
|
|
continue
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2010-02-04 12:49:31 +03:00
|
|
|
directories.add(directory)
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
|
|
|
warnMsg = "unable to retrieve any web server path"
|
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
message = "please provide any additional web server full path to try "
|
2010-02-25 18:16:41 +03:00
|
|
|
message += "to upload the agent [%s]: " % ",".join(directory for directory in defaultDirs)
|
|
|
|
inputDirs = readInput(message, default=",".join(directory for directory in defaultDirs))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if inputDirs:
|
|
|
|
inputDirs = inputDirs.replace(", ", ",")
|
|
|
|
inputDirs = inputDirs.split(",")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for inputDir in inputDirs:
|
2009-07-09 15:11:25 +04:00
|
|
|
if inputDir:
|
|
|
|
directories.add(inputDir)
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
2010-02-25 17:51:39 +03:00
|
|
|
[directories.add(directory) for directory in defaultDirs]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return directories
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def filePathToString(filePath):
|
2009-04-28 03:05:11 +04:00
|
|
|
strRepl = filePath.replace("/", "_").replace("\\", "_")
|
|
|
|
strRepl = strRepl.replace(" ", "_").replace(":", "_")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return strRepl
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-09-30 16:35:45 +04:00
|
|
|
def dataToStdout(data, forceOutput=False):
|
2011-01-02 10:37:47 +03:00
|
|
|
if not ('threadException' in kb and kb.threadException):
|
2011-01-07 19:39:47 +03:00
|
|
|
if forceOutput or (conf.verbose > 0) and not getCurrentThreadData().disableStdOut:
|
2011-01-02 10:37:47 +03:00
|
|
|
try:
|
|
|
|
sys.stdout.write(data)
|
|
|
|
except UnicodeEncodeError:
|
2011-01-11 15:11:33 +03:00
|
|
|
sys.stdout.write(data.encode(kb.pageEncoding or conf.dataEncoding))
|
2011-01-11 15:08:36 +03:00
|
|
|
finally:
|
|
|
|
sys.stdout.flush()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def dataToSessionFile(data):
|
2011-01-07 19:47:46 +03:00
|
|
|
if not conf.sessionFile or kb.suppressSession:
|
2008-10-16 19:31:02 +04:00
|
|
|
return
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.sessionFP.write(data)
|
|
|
|
conf.sessionFP.flush()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2010-11-08 14:22:47 +03:00
|
|
|
def dataToTrafficFile(data):
|
|
|
|
if not conf.trafficFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
conf.trafficFP.write(data)
|
|
|
|
conf.trafficFP.flush()
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def dataToDumpFile(dumpFile, data):
|
|
|
|
dumpFile.write(data)
|
|
|
|
dumpFile.flush()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def dataToOutFile(data):
|
|
|
|
if not data:
|
|
|
|
return "No data retrieved"
|
|
|
|
|
2010-05-29 03:12:20 +04:00
|
|
|
rFile = filePathToString(conf.rFile)
|
2009-04-22 15:48:07 +04:00
|
|
|
rFilePath = "%s%s%s" % (conf.filePath, os.sep, rFile)
|
2010-05-30 18:53:13 +04:00
|
|
|
rFileFP = codecs.open(rFilePath, "wb")
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-05-30 18:53:13 +04:00
|
|
|
rFileFP.write(data)
|
2009-04-22 15:48:07 +04:00
|
|
|
rFileFP.flush()
|
|
|
|
rFileFP.close()
|
|
|
|
|
|
|
|
return rFilePath
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
def strToHex(inpStr):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2009-04-28 03:05:11 +04:00
|
|
|
@param inpStr: inpStr to be converted into its hexadecimal value.
|
|
|
|
@type inpStr: C{str}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
@return: the hexadecimal converted inpStr.
|
2008-10-15 19:38:22 +04:00
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
hexStr = ""
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for character in inpStr:
|
2008-10-15 19:38:22 +04:00
|
|
|
if character == "\n":
|
|
|
|
character = " "
|
|
|
|
|
|
|
|
hexChar = "%2x" % ord(character)
|
|
|
|
hexChar = hexChar.replace(" ", "0")
|
|
|
|
hexChar = hexChar.upper()
|
|
|
|
|
|
|
|
hexStr += hexChar
|
|
|
|
|
|
|
|
return hexStr
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def readInput(message, default=None):
|
|
|
|
"""
|
|
|
|
@param message: message to display on terminal.
|
|
|
|
@type message: C{str}
|
|
|
|
|
|
|
|
@return: a string read from keyboard as input.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if "\n" in message:
|
|
|
|
message += "\n> "
|
2010-11-05 19:08:42 +03:00
|
|
|
elif message[-1] == ']':
|
|
|
|
message += " "
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.batch and default:
|
2011-01-04 01:31:29 +03:00
|
|
|
infoMsg = "%s%s" % (message, getUnicode(default, conf.dataEncoding))
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
debugMsg = "used the default behaviour, running in batch mode"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
data = default
|
|
|
|
else:
|
2011-01-06 19:59:58 +03:00
|
|
|
data = raw_input(message.encode(sys.stdout.encoding or conf.dataEncoding))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-09-26 03:03:45 +04:00
|
|
|
if not data:
|
|
|
|
data = default
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return data
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def randomRange(start=0, stop=1000):
|
|
|
|
"""
|
|
|
|
@param start: starting number.
|
|
|
|
@type start: C{int}
|
|
|
|
|
|
|
|
@param stop: last number.
|
|
|
|
@type stop: C{int}
|
|
|
|
|
|
|
|
@return: a random number within the range.
|
|
|
|
@rtype: C{int}
|
|
|
|
"""
|
|
|
|
|
|
|
|
return int(random.randint(start, stop))
|
|
|
|
|
|
|
|
def randomInt(length=4):
|
|
|
|
"""
|
|
|
|
@param length: length of the random string.
|
|
|
|
@type length: C{int}
|
|
|
|
|
|
|
|
@return: a random string of digits.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
return int("".join([random.choice(string.digits) for _ in xrange(0, length)]))
|
|
|
|
|
2010-10-11 16:26:35 +04:00
|
|
|
def randomStr(length=4, lowercase=False, alphabet=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
@param length: length of the random string.
|
|
|
|
@type length: C{int}
|
|
|
|
|
|
|
|
@return: a random string of characters.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2010-10-11 16:26:35 +04:00
|
|
|
if alphabet:
|
|
|
|
rndStr = "".join([random.choice(alphabet) for _ in xrange(0, length)])
|
|
|
|
elif lowercase:
|
2009-04-22 15:48:07 +04:00
|
|
|
rndStr = "".join([random.choice(string.lowercase) for _ in xrange(0, length)])
|
|
|
|
else:
|
|
|
|
rndStr = "".join([random.choice(string.letters) for _ in xrange(0, length)])
|
|
|
|
|
|
|
|
return rndStr
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
def sanitizeStr(inpStr):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2009-04-28 03:05:11 +04:00
|
|
|
@param inpStr: inpStr to sanitize: cast to str datatype and replace
|
2008-10-15 19:38:22 +04:00
|
|
|
newlines with one space and strip carriage returns.
|
2009-04-28 03:05:11 +04:00
|
|
|
@type inpStr: C{str}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
@return: sanitized inpStr
|
2008-10-15 19:38:22 +04:00
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2010-06-04 21:07:17 +04:00
|
|
|
cleanString = getUnicode(inpStr)
|
2008-10-15 19:38:22 +04:00
|
|
|
cleanString = cleanString.replace("\n", " ").replace("\r", "")
|
|
|
|
|
|
|
|
return cleanString
|
|
|
|
|
|
|
|
def checkFile(filename):
|
|
|
|
"""
|
|
|
|
@param filename: filename to check if it exists.
|
|
|
|
@type filename: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
raise sqlmapFilePathException, "unable to read file '%s'" % filename
|
2010-03-22 20:38:19 +03:00
|
|
|
|
|
|
|
def replaceNewlineTabs(inpStr, stdout=False):
|
2010-11-12 18:17:12 +03:00
|
|
|
if inpStr is None:
|
|
|
|
return
|
|
|
|
|
2010-03-22 20:38:19 +03:00
|
|
|
if stdout:
|
2010-11-16 18:02:22 +03:00
|
|
|
replacedString = inpStr.replace("\n", " ").replace("\r", " ").replace("\t", " ")
|
2010-03-22 20:38:19 +03:00
|
|
|
else:
|
2010-11-16 18:11:03 +03:00
|
|
|
replacedString = inpStr.replace("\n", DUMP_NEWLINE_MARKER).replace("\r", DUMP_CR_MARKER).replace("\t", DUMP_TAB_MARKER)
|
2010-10-21 13:51:07 +04:00
|
|
|
|
|
|
|
replacedString = replacedString.replace(kb.misc.delimiter, DUMP_DEL_MARKER)
|
|
|
|
|
|
|
|
return replacedString
|
|
|
|
|
|
|
|
def restoreDumpMarkedChars(inpStr, onlyNewlineTab=False):
|
|
|
|
replacedString = inpStr
|
2010-03-22 20:38:19 +03:00
|
|
|
|
2010-10-21 13:51:07 +04:00
|
|
|
if isinstance(replacedString, basestring):
|
2010-11-16 18:11:03 +03:00
|
|
|
replacedString = replacedString.replace(DUMP_NEWLINE_MARKER, "\n").replace(DUMP_CR_MARKER, "\r").replace(DUMP_TAB_MARKER, "\t")
|
2010-10-22 18:23:14 +04:00
|
|
|
|
2010-10-21 13:51:07 +04:00
|
|
|
if not onlyNewlineTab:
|
|
|
|
replacedString = replacedString.replace(DUMP_START_MARKER, "").replace(DUMP_STOP_MARKER, "")
|
|
|
|
replacedString = replacedString.replace(DUMP_DEL_MARKER, ", ")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return replacedString
|
|
|
|
|
|
|
|
def banner():
|
|
|
|
"""
|
|
|
|
This function prints sqlmap banner with its version
|
|
|
|
"""
|
|
|
|
|
2010-10-21 02:09:03 +04:00
|
|
|
ban = """
|
2010-03-03 19:19:17 +03:00
|
|
|
%s - %s
|
2010-10-21 02:09:03 +04:00
|
|
|
%s\n
|
|
|
|
""" % (VERSION_STRING, DESCRIPTION, SITE)
|
|
|
|
|
|
|
|
dataToStdout(ban, forceOutput=True)
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def parsePasswordHash(password):
|
|
|
|
blank = " " * 8
|
|
|
|
|
|
|
|
if not password or password == " ":
|
|
|
|
password = "NULL"
|
|
|
|
|
2011-01-13 20:36:54 +03:00
|
|
|
if getIdentifiedDBMS() == DBMS.MSSQL and password != "NULL" and isHexEncodedString(password):
|
2008-10-15 19:38:22 +04:00
|
|
|
hexPassword = password
|
|
|
|
password = "%s\n" % hexPassword
|
|
|
|
password += "%sheader: %s\n" % (blank, hexPassword[:6])
|
|
|
|
password += "%ssalt: %s\n" % (blank, hexPassword[6:14])
|
|
|
|
password += "%smixedcase: %s\n" % (blank, hexPassword[14:54])
|
|
|
|
|
|
|
|
if kb.dbmsVersion[0] not in ( "2005", "2008" ):
|
|
|
|
password += "%suppercase: %s" % (blank, hexPassword[54:])
|
|
|
|
|
|
|
|
return password
|
2010-01-05 19:15:31 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def cleanQuery(query):
|
2008-12-19 23:09:46 +03:00
|
|
|
upperQuery = query
|
|
|
|
|
|
|
|
for sqlStatements in SQL_STATEMENTS.values():
|
|
|
|
for sqlStatement in sqlStatements:
|
2009-01-28 17:53:11 +03:00
|
|
|
sqlStatementEsc = sqlStatement.replace("(", "\\(")
|
|
|
|
queryMatch = re.search("(%s)" % sqlStatementEsc, query, re.I)
|
|
|
|
|
|
|
|
if queryMatch:
|
|
|
|
upperQuery = upperQuery.replace(queryMatch.group(1), sqlStatement.upper())
|
2008-12-18 23:38:57 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return upperQuery
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def setPaths():
|
|
|
|
# sqlmap paths
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.SQLMAP_CONTRIB_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "lib", "contrib")
|
2010-05-21 14:30:11 +04:00
|
|
|
paths.SQLMAP_EXTRAS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "extra")
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.SQLMAP_SHELL_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "shell")
|
|
|
|
paths.SQLMAP_TXT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "txt")
|
|
|
|
paths.SQLMAP_UDF_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "udf")
|
|
|
|
paths.SQLMAP_XML_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "xml")
|
|
|
|
paths.SQLMAP_XML_BANNER_PATH = os.path.join(paths.SQLMAP_XML_PATH, "banner")
|
|
|
|
paths.SQLMAP_OUTPUT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "output")
|
|
|
|
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
|
|
|
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# sqlmap files
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.SQLMAP_HISTORY = os.path.join(paths.SQLMAP_ROOT_PATH, ".sqlmap_history")
|
|
|
|
paths.SQLMAP_CONFIG = os.path.join(paths.SQLMAP_ROOT_PATH, "sqlmap-%s.conf" % randomStr())
|
2010-11-09 12:51:54 +03:00
|
|
|
paths.COMMON_COLUMNS = os.path.join(paths.SQLMAP_TXT_PATH, "common-columns.txt")
|
2010-09-30 16:35:45 +04:00
|
|
|
paths.COMMON_TABLES = os.path.join(paths.SQLMAP_TXT_PATH, "common-tables.txt")
|
2010-11-09 12:51:54 +03:00
|
|
|
paths.COMMON_OUTPUTS = os.path.join(paths.SQLMAP_TXT_PATH, 'common-outputs.txt')
|
2010-10-26 02:54:56 +04:00
|
|
|
paths.SQL_KEYWORDS = os.path.join(paths.SQLMAP_TXT_PATH, "keywords.txt")
|
2010-11-23 17:50:47 +03:00
|
|
|
paths.ORACLE_DEFAULT_PASSWD = os.path.join(paths.SQLMAP_TXT_PATH, "oracle-default-passwords.txt")
|
2010-12-24 13:04:27 +03:00
|
|
|
paths.USER_AGENTS = os.path.join(paths.SQLMAP_TXT_PATH, "user-agents.txt")
|
2010-11-23 17:50:47 +03:00
|
|
|
paths.WORDLIST = os.path.join(paths.SQLMAP_TXT_PATH, "wordlist.txt")
|
2010-10-26 02:54:56 +04:00
|
|
|
paths.PHPIDS_RULES_XML = os.path.join(paths.SQLMAP_XML_PATH, "phpids_rules.xml")
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.ERRORS_XML = os.path.join(paths.SQLMAP_XML_PATH, "errors.xml")
|
2010-11-28 21:10:54 +03:00
|
|
|
paths.PAYLOADS_XML = os.path.join(paths.SQLMAP_XML_PATH, "payloads.xml")
|
2010-10-07 02:29:52 +04:00
|
|
|
paths.INJECTIONS_XML = os.path.join(paths.SQLMAP_XML_PATH, "injections.xml")
|
2010-09-26 18:02:13 +04:00
|
|
|
paths.LIVE_TESTS_XML = os.path.join(paths.SQLMAP_XML_PATH, "livetests.xml")
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.QUERIES_XML = os.path.join(paths.SQLMAP_XML_PATH, "queries.xml")
|
|
|
|
paths.GENERIC_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "generic.xml")
|
|
|
|
paths.MSSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mssql.xml")
|
|
|
|
paths.MYSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mysql.xml")
|
|
|
|
paths.ORACLE_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "oracle.xml")
|
|
|
|
paths.PGSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "postgresql.xml")
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def weAreFrozen():
|
|
|
|
"""
|
|
|
|
Returns whether we are frozen via py2exe.
|
|
|
|
This will affect how we find out where we are located.
|
|
|
|
Reference: http://www.py2exe.org/index.cgi/WhereAmI
|
|
|
|
"""
|
|
|
|
|
|
|
|
return hasattr(sys, "frozen")
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
def parseTargetDirect():
|
|
|
|
"""
|
|
|
|
Parse target dbms and set some attributes into the configuration singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.direct:
|
|
|
|
return
|
|
|
|
|
|
|
|
details = None
|
2010-04-13 15:00:15 +04:00
|
|
|
remote = False
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
for dbms in SUPPORTED_DBMS:
|
2010-08-30 20:35:28 +04:00
|
|
|
details = re.search("^(?P<dbms>%s)://(?P<credentials>(?P<user>.+?)\:(?P<pass>.*?)\@)?(?P<remote>(?P<hostname>.+?)\:(?P<port>[\d]+)\/)?(?P<db>[\w\d\ \:\.\_\-\/\\\\]+?)$" % dbms, conf.direct, re.I)
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if details:
|
2010-03-31 14:50:47 +04:00
|
|
|
conf.dbms = details.group('dbms')
|
|
|
|
|
2010-03-30 15:21:26 +04:00
|
|
|
if details.group('credentials'):
|
2010-03-31 14:50:47 +04:00
|
|
|
conf.dbmsUser = details.group('user')
|
|
|
|
conf.dbmsPass = details.group('pass')
|
2010-03-30 15:06:30 +04:00
|
|
|
else:
|
2010-05-28 17:05:02 +04:00
|
|
|
conf.dbmsUser = unicode()
|
|
|
|
conf.dbmsPass = unicode()
|
2010-03-31 14:50:47 +04:00
|
|
|
|
2010-04-29 17:34:03 +04:00
|
|
|
if not conf.dbmsPass:
|
|
|
|
conf.dbmsPass = None
|
|
|
|
|
2010-03-30 15:21:26 +04:00
|
|
|
if details.group('remote'):
|
2010-04-13 15:00:15 +04:00
|
|
|
remote = True
|
2010-03-30 15:21:26 +04:00
|
|
|
conf.hostname = details.group('hostname')
|
2010-06-10 16:02:48 +04:00
|
|
|
conf.port = int(details.group('port'))
|
2010-03-30 15:21:26 +04:00
|
|
|
else:
|
2010-03-30 15:06:30 +04:00
|
|
|
conf.hostname = "localhost"
|
2010-03-31 14:50:47 +04:00
|
|
|
conf.port = 0
|
|
|
|
|
|
|
|
conf.dbmsDb = details.group('db')
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
conf.parameters[None] = "direct connection"
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
if not details:
|
2010-03-31 14:50:47 +04:00
|
|
|
errMsg = "invalid target details, valid syntax is for instance "
|
|
|
|
errMsg += "'mysql://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_NAME' "
|
|
|
|
errMsg += "or 'access://DATABASE_FILEPATH'"
|
2010-03-27 02:23:25 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
for dbmsName, data in dbmsDict.items():
|
|
|
|
if conf.dbms in data[0]:
|
|
|
|
try:
|
2010-11-02 15:08:28 +03:00
|
|
|
if dbmsName in (DBMS.ACCESS, DBMS.SQLITE, DBMS.FIREBIRD):
|
2010-04-13 15:00:15 +04:00
|
|
|
if remote:
|
2010-04-13 15:13:01 +04:00
|
|
|
warnMsg = "direct connection over the network for "
|
|
|
|
warnMsg += "%s DBMS is not supported" % dbmsName
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.hostname = "localhost"
|
|
|
|
conf.port = 0
|
|
|
|
elif not remote:
|
2010-04-13 15:00:15 +04:00
|
|
|
errMsg = "missing remote connection details"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-11-02 15:08:28 +03:00
|
|
|
if dbmsName == DBMS.MSSQL:
|
2010-03-31 14:50:47 +04:00
|
|
|
import _mssql
|
2010-03-27 02:23:25 +03:00
|
|
|
import pymssql
|
2010-03-31 19:31:11 +04:00
|
|
|
|
|
|
|
if not hasattr(pymssql, "__version__") or pymssql.__version__ < "1.0.2":
|
|
|
|
errMsg = "pymssql library on your system must be "
|
|
|
|
errMsg += "version 1.0.2 to work, get it from "
|
|
|
|
errMsg += "http://sourceforge.net/projects/pymssql/files/pymssql/1.0.2/"
|
|
|
|
raise sqlmapMissingDependence, errMsg
|
|
|
|
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.MYSQL:
|
2010-03-27 02:23:25 +03:00
|
|
|
import MySQLdb
|
2010-12-04 01:28:09 +03:00
|
|
|
elif dbmsName == DBMS.PGSQL:
|
2010-03-27 02:23:25 +03:00
|
|
|
import psycopg2
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.ORACLE:
|
2010-03-28 00:50:19 +03:00
|
|
|
import cx_Oracle
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.SQLITE:
|
2010-03-31 14:50:47 +04:00
|
|
|
import sqlite3
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.ACCESS:
|
2010-03-31 14:50:47 +04:00
|
|
|
import pyodbc
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.FIREBIRD:
|
2010-03-31 14:50:47 +04:00
|
|
|
import kinterbasdb
|
2010-03-27 02:23:25 +03:00
|
|
|
except ImportError, _:
|
2010-10-15 13:39:41 +04:00
|
|
|
errMsg = "sqlmap requires '%s' third-party library " % data[1]
|
2010-03-27 02:23:25 +03:00
|
|
|
errMsg += "in order to directly connect to the database "
|
2010-10-15 13:39:41 +04:00
|
|
|
errMsg += "'%s'. Download from '%s'" % (dbmsName, data[2])
|
2010-03-27 02:23:25 +03:00
|
|
|
raise sqlmapMissingDependence, errMsg
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def parseTargetUrl():
|
|
|
|
"""
|
2010-03-27 02:23:25 +03:00
|
|
|
Parse target url and set some attributes into the configuration singleton.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.url:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not re.search("^http[s]*://", conf.url):
|
|
|
|
if ":443/" in conf.url:
|
|
|
|
conf.url = "https://" + conf.url
|
|
|
|
else:
|
|
|
|
conf.url = "http://" + conf.url
|
|
|
|
|
|
|
|
__urlSplit = urlparse.urlsplit(conf.url)
|
|
|
|
__hostnamePort = __urlSplit[1].split(":")
|
|
|
|
|
|
|
|
conf.scheme = __urlSplit[0]
|
|
|
|
conf.path = __urlSplit[2]
|
|
|
|
conf.hostname = __hostnamePort[0]
|
|
|
|
|
|
|
|
if len(__hostnamePort) == 2:
|
2010-02-10 15:06:23 +03:00
|
|
|
try:
|
|
|
|
conf.port = int(__hostnamePort[1])
|
|
|
|
except:
|
|
|
|
errMsg = "invalid target url"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
elif conf.scheme == "https":
|
|
|
|
conf.port = 443
|
|
|
|
else:
|
|
|
|
conf.port = 80
|
|
|
|
|
|
|
|
if __urlSplit[3]:
|
2010-11-08 11:02:36 +03:00
|
|
|
conf.parameters[PLACE.GET] = __urlSplit[3]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
conf.url = "%s://%s:%d%s" % (conf.scheme, conf.hostname, conf.port, conf.path)
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def expandAsteriskForColumns(expression):
|
|
|
|
# If the user provided an asterisk rather than the column(s)
|
|
|
|
# name, sqlmap will retrieve the columns itself and reprocess
|
|
|
|
# the SQL query string (expression)
|
2008-11-13 01:53:25 +03:00
|
|
|
asterisk = re.search("^SELECT\s+\*\s+FROM\s+([\w\.\_]+)\s*", expression, re.I)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if asterisk:
|
|
|
|
infoMsg = "you did not provide the fields in your query. "
|
|
|
|
infoMsg += "sqlmap will retrieve the column names itself"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2008-11-13 01:53:25 +03:00
|
|
|
dbTbl = asterisk.group(1)
|
|
|
|
|
|
|
|
if dbTbl and "." in dbTbl:
|
2010-11-05 14:34:09 +03:00
|
|
|
conf.db, conf.tbl = dbTbl.split(".", 1)
|
2008-11-13 01:53:25 +03:00
|
|
|
else:
|
|
|
|
conf.tbl = dbTbl
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
columnsDict = conf.dbmsHandler.getColumns(onlyColNames=True)
|
|
|
|
|
|
|
|
if columnsDict and conf.db in columnsDict and conf.tbl in columnsDict[conf.db]:
|
|
|
|
columns = columnsDict[conf.db][conf.tbl].keys()
|
|
|
|
columns.sort()
|
|
|
|
columnsStr = ", ".join([column for column in columns])
|
|
|
|
expression = expression.replace("*", columnsStr, 1)
|
|
|
|
|
|
|
|
infoMsg = "the query with column names is: "
|
|
|
|
infoMsg += "%s" % expression
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
return expression
|
2010-01-09 02:50:06 +03:00
|
|
|
|
2008-11-25 14:33:44 +03:00
|
|
|
def getRange(count, dump=False, plusOne=False):
|
2008-10-15 19:38:22 +04:00
|
|
|
count = int(count)
|
|
|
|
indexRange = None
|
|
|
|
limitStart = 1
|
|
|
|
limitStop = count
|
|
|
|
|
|
|
|
if dump:
|
2008-11-02 17:39:38 +03:00
|
|
|
if isinstance(conf.limitStop, int) and conf.limitStop > 0 and conf.limitStop < limitStop:
|
2008-10-15 19:38:22 +04:00
|
|
|
limitStop = conf.limitStop
|
|
|
|
|
2008-11-02 17:39:38 +03:00
|
|
|
if isinstance(conf.limitStart, int) and conf.limitStart > 0 and conf.limitStart <= limitStop:
|
2008-10-15 19:38:22 +04:00
|
|
|
limitStart = conf.limitStart
|
|
|
|
|
2010-01-09 02:50:06 +03:00
|
|
|
if plusOne:
|
2008-10-15 19:38:22 +04:00
|
|
|
indexRange = range(limitStart, limitStop + 1)
|
|
|
|
else:
|
|
|
|
indexRange = range(limitStart - 1, limitStop)
|
|
|
|
|
|
|
|
return indexRange
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def parseUnionPage(output, expression, partial=False, condition=None, sort=True):
|
2008-12-10 20:23:07 +03:00
|
|
|
data = []
|
|
|
|
|
2010-10-20 03:00:19 +04:00
|
|
|
outCond1 = ( output.startswith(kb.misc.start) and output.endswith(kb.misc.stop) )
|
2010-10-21 13:51:07 +04:00
|
|
|
outCond2 = ( output.startswith(DUMP_START_MARKER) and output.endswith(DUMP_STOP_MARKER) )
|
2008-12-10 20:23:07 +03:00
|
|
|
|
|
|
|
if outCond1 or outCond2:
|
|
|
|
if outCond1:
|
2010-10-20 03:00:19 +04:00
|
|
|
regExpr = '%s(.*?)%s' % (kb.misc.start, kb.misc.stop)
|
2008-12-10 20:23:07 +03:00
|
|
|
elif outCond2:
|
2010-10-21 13:51:07 +04:00
|
|
|
regExpr = '%s(.*?)%s' % (DUMP_START_MARKER, DUMP_STOP_MARKER)
|
2008-12-10 20:23:07 +03:00
|
|
|
|
|
|
|
output = re.findall(regExpr, output, re.S)
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if condition is None:
|
2008-12-10 20:23:07 +03:00
|
|
|
condition = (
|
|
|
|
kb.resumedQueries and conf.url in kb.resumedQueries.keys()
|
|
|
|
and expression in kb.resumedQueries[conf.url].keys()
|
|
|
|
)
|
|
|
|
|
|
|
|
if partial or not condition:
|
2010-10-21 13:51:07 +04:00
|
|
|
logOutput = "".join(["%s%s%s" % (DUMP_START_MARKER, replaceNewlineTabs(value), DUMP_STOP_MARKER) for value in output])
|
2010-11-28 21:10:54 +03:00
|
|
|
dataToSessionFile("[%s][%s][%s][%s][%s]\n" % (conf.url, kb.injection.place, conf.parameters[kb.injection.place], expression, logOutput))
|
2008-12-10 20:23:07 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if sort:
|
|
|
|
output = set(output)
|
2008-12-10 20:23:07 +03:00
|
|
|
|
|
|
|
for entry in output:
|
|
|
|
info = []
|
|
|
|
|
2010-10-21 13:51:07 +04:00
|
|
|
if DUMP_DEL_MARKER in entry:
|
|
|
|
entry = entry.split(DUMP_DEL_MARKER)
|
2008-12-10 20:23:07 +03:00
|
|
|
else:
|
2010-10-20 03:00:19 +04:00
|
|
|
entry = entry.split(kb.misc.delimiter)
|
2008-12-10 20:23:07 +03:00
|
|
|
|
|
|
|
if len(entry) == 1:
|
|
|
|
data.append(entry[0])
|
|
|
|
else:
|
|
|
|
for value in entry:
|
|
|
|
info.append(value)
|
|
|
|
|
|
|
|
data.append(info)
|
|
|
|
else:
|
|
|
|
data = output
|
|
|
|
|
2010-05-25 14:09:35 +04:00
|
|
|
if len(data) == 1 and isinstance(data[0], basestring):
|
2008-12-10 20:23:07 +03:00
|
|
|
data = data[0]
|
|
|
|
|
|
|
|
return data
|
2010-01-05 14:43:16 +03:00
|
|
|
|
|
|
|
def getDelayQuery(andCond=False):
|
2009-04-22 15:48:07 +04:00
|
|
|
query = None
|
|
|
|
|
2011-01-13 20:36:54 +03:00
|
|
|
if getIdentifiedDBMS() in (DBMS.MYSQL, DBMS.PGSQL):
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.banner:
|
|
|
|
conf.dbmsHandler.getVersionFromBanner()
|
|
|
|
|
2010-11-12 14:33:11 +03:00
|
|
|
banVer = kb.bannerFp["dbmsVersion"] if 'dbmsVersion' in kb.bannerFp else None
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-01-13 20:36:54 +03:00
|
|
|
if banVer is None or (getIdentifiedDBMS() == DBMS.MYSQL and banVer >= "5.0.12") or (getIdentifiedDBMS() == DBMS.PGSQL and banVer >= "8.2"):
|
|
|
|
query = queries[getIdentifiedDBMS()].timedelay.query % conf.timeSec
|
2009-04-22 15:48:07 +04:00
|
|
|
else:
|
2011-01-13 20:36:54 +03:00
|
|
|
query = queries[getIdentifiedDBMS()].timedelay.query2 % conf.timeSec
|
|
|
|
elif getIdentifiedDBMS() == DBMS.FIREBIRD:
|
|
|
|
query = queries[getIdentifiedDBMS()].timedelay.query
|
2010-01-02 05:02:12 +03:00
|
|
|
else:
|
2011-01-13 20:36:54 +03:00
|
|
|
query = queries[getIdentifiedDBMS()].timedelay.query % conf.timeSec
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-03-18 20:20:54 +03:00
|
|
|
if andCond:
|
2011-01-13 20:36:54 +03:00
|
|
|
if getIdentifiedDBMS() in ( DBMS.MYSQL, DBMS.SQLITE ):
|
2010-03-18 20:20:54 +03:00
|
|
|
query = query.replace("SELECT ", "")
|
2011-01-13 20:36:54 +03:00
|
|
|
elif getIdentifiedDBMS() == DBMS.FIREBIRD:
|
2010-03-18 20:20:54 +03:00
|
|
|
query = "(%s)>0" % query
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return query
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def getLocalIP():
|
2010-11-04 15:51:04 +03:00
|
|
|
retVal = None
|
|
|
|
try:
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
s.connect((conf.hostname, conf.port))
|
|
|
|
retVal, _ = s.getsockname()
|
|
|
|
s.close()
|
|
|
|
except:
|
|
|
|
debugMsg = "there was an error in opening socket "
|
|
|
|
debugMsg += "connection toward '%s'" % conf.hostname
|
|
|
|
logger.debug(debugMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-11-04 15:51:04 +03:00
|
|
|
return retVal
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
def getRemoteIP():
|
|
|
|
return socket.gethostbyname(conf.hostname)
|
|
|
|
|
|
|
|
def getFileType(filePath):
|
2009-04-22 16:44:16 +04:00
|
|
|
try:
|
|
|
|
magicFileType = magic.from_file(filePath)
|
|
|
|
except:
|
|
|
|
return "unknown"
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if "ASCII" in magicFileType or "text" in magicFileType:
|
|
|
|
return "text"
|
|
|
|
else:
|
|
|
|
return "binary"
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def getCharset(charsetType=None):
|
|
|
|
asciiTbl = []
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if charsetType is None:
|
2009-04-22 15:48:07 +04:00
|
|
|
asciiTbl = range(0, 128)
|
|
|
|
|
|
|
|
# 0 or 1
|
|
|
|
elif charsetType == 1:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 50))
|
|
|
|
|
|
|
|
# Digits
|
|
|
|
elif charsetType == 2:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
|
|
|
|
# Hexadecimal
|
|
|
|
elif charsetType == 3:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
asciiTbl.extend(range(64, 71))
|
|
|
|
asciiTbl.extend(range(96, 103))
|
|
|
|
|
|
|
|
# Characters
|
|
|
|
elif charsetType == 4:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(64, 91))
|
|
|
|
asciiTbl.extend(range(96, 123))
|
|
|
|
|
|
|
|
# Characters and digits
|
|
|
|
elif charsetType == 5:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
asciiTbl.extend(range(64, 91))
|
|
|
|
asciiTbl.extend(range(96, 123))
|
|
|
|
|
|
|
|
return asciiTbl
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-05-13 00:24:47 +04:00
|
|
|
def searchEnvPath(fileName):
|
|
|
|
envPaths = os.environ["PATH"]
|
2010-01-02 05:02:12 +03:00
|
|
|
result = None
|
2009-05-13 00:24:47 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2009-05-13 00:24:47 +04:00
|
|
|
envPaths = envPaths.split(";")
|
|
|
|
else:
|
|
|
|
envPaths = envPaths.split(":")
|
|
|
|
|
|
|
|
for envPath in envPaths:
|
|
|
|
envPath = envPath.replace(";", "")
|
2010-01-02 05:02:12 +03:00
|
|
|
result = os.path.exists(os.path.normpath(os.path.join(envPath, fileName)))
|
2009-05-13 00:24:47 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if result:
|
2009-05-13 00:24:47 +04:00
|
|
|
break
|
|
|
|
|
|
|
|
return result
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2010-01-15 14:45:48 +03:00
|
|
|
def urlEncodeCookieValues(cookieStr):
|
2010-01-02 05:02:12 +03:00
|
|
|
if cookieStr:
|
|
|
|
result = ""
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
for part in cookieStr.split(';'):
|
|
|
|
index = part.find('=') + 1
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if index > 0:
|
|
|
|
name = part[:index - 1].strip()
|
2010-01-15 14:44:05 +03:00
|
|
|
value = urlencode(part[index:], convall=True)
|
|
|
|
result += "; %s=%s" % (name, value)
|
2010-01-02 05:02:12 +03:00
|
|
|
elif part.strip().lower() != "secure":
|
2010-01-15 14:44:05 +03:00
|
|
|
result += "%s%s" % ("%3B", urlencode(part, convall=True))
|
2010-01-02 05:02:12 +03:00
|
|
|
else:
|
2010-01-15 14:44:05 +03:00
|
|
|
result += "; secure"
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-15 14:44:05 +03:00
|
|
|
if result.startswith('; '):
|
|
|
|
result = result[2:]
|
2010-01-02 05:02:12 +03:00
|
|
|
elif result.startswith('%3B'):
|
|
|
|
result = result[3:]
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
return result
|
|
|
|
else:
|
|
|
|
return None
|
2010-01-05 14:30:33 +03:00
|
|
|
|
|
|
|
def directoryPath(path):
|
|
|
|
retVal = None
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2010-04-23 20:34:20 +04:00
|
|
|
if isWindowsDriveLetterPath(path):
|
2010-01-05 14:30:33 +03:00
|
|
|
retVal = ntpath.dirname(path)
|
2010-02-21 02:11:05 +03:00
|
|
|
else:
|
|
|
|
retVal = posixpath.dirname(path)
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
return retVal
|
2010-01-15 20:42:46 +03:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
def normalizePath(path):
|
|
|
|
retVal = None
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2010-04-23 20:34:20 +04:00
|
|
|
if isWindowsDriveLetterPath(path):
|
2010-01-05 14:30:33 +03:00
|
|
|
retVal = ntpath.normpath(path)
|
2010-02-21 02:11:05 +03:00
|
|
|
else:
|
|
|
|
retVal = posixpath.normpath(path)
|
2010-02-26 15:00:47 +03:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
return retVal
|
2010-01-15 19:06:59 +03:00
|
|
|
|
|
|
|
def safeStringFormat(formatStr, params):
|
2010-05-07 17:40:57 +04:00
|
|
|
retVal = formatStr.replace("%d", "%s")
|
2010-01-15 20:42:46 +03:00
|
|
|
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(params, basestring):
|
2010-01-15 20:42:46 +03:00
|
|
|
retVal = retVal.replace("%s", params)
|
|
|
|
else:
|
|
|
|
count = 0
|
|
|
|
index = 0
|
|
|
|
|
|
|
|
while index != -1:
|
2010-05-07 17:40:57 +04:00
|
|
|
index = retVal.find("%s")
|
2010-01-15 20:42:46 +03:00
|
|
|
|
|
|
|
if index != -1:
|
|
|
|
if count < len(params):
|
2010-06-04 21:07:17 +04:00
|
|
|
retVal = retVal[:index] + getUnicode(params[count]) + retVal[index+2:]
|
2010-01-15 20:42:46 +03:00
|
|
|
else:
|
|
|
|
raise sqlmapNoneDataException, "wrong number of parameters during string formatting"
|
|
|
|
count += 1
|
|
|
|
|
2010-01-15 19:06:59 +03:00
|
|
|
return retVal
|
2010-01-24 02:29:34 +03:00
|
|
|
|
2010-03-26 20:18:02 +03:00
|
|
|
def sanitizeAsciiString(subject):
|
2010-05-04 12:43:14 +04:00
|
|
|
if subject:
|
2010-05-14 17:55:25 +04:00
|
|
|
index = None
|
|
|
|
for i in xrange(len(subject)):
|
|
|
|
if ord(subject[i]) >= 128:
|
|
|
|
index = i
|
|
|
|
break
|
2010-11-12 14:48:25 +03:00
|
|
|
if index is None:
|
2010-05-14 17:55:25 +04:00
|
|
|
return subject
|
|
|
|
else:
|
2010-05-14 18:03:33 +04:00
|
|
|
return subject[:index] + "".join(subject[i] if ord(subject[i]) < 128 else '?' for i in xrange(index, len(subject)))
|
2010-05-14 17:55:25 +04:00
|
|
|
else:
|
|
|
|
return None
|
2010-01-28 19:50:34 +03:00
|
|
|
|
2011-01-01 22:07:40 +03:00
|
|
|
def getFilteredPageContent(page, onlyText=True):
|
2010-10-12 19:49:04 +04:00
|
|
|
retVal = page
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
if isinstance(page, basestring):
|
2011-01-01 22:07:40 +03:00
|
|
|
retVal = re.sub(r"(?s)<script.+?</script>|<style.+?</style>%s" % (r"|<[^>]+>|\t|\n|\r" if onlyText else ""), " ", page)
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-13 00:01:59 +04:00
|
|
|
while retVal.find(" ") != -1:
|
|
|
|
retVal = retVal.replace(" ", " ")
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-11-07 03:12:00 +03:00
|
|
|
retVal = htmlunescape(retVal)
|
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
return retVal
|
|
|
|
|
|
|
|
def getPageTextWordsSet(page):
|
|
|
|
retVal = None
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
if isinstance(page, basestring):
|
|
|
|
page = getFilteredPageContent(page)
|
|
|
|
retVal = set(re.findall(r"\w+", page))
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
return retVal
|
|
|
|
|
|
|
|
def showStaticWords(firstPage, secondPage):
|
|
|
|
infoMsg = "finding static words in longest matching part of dynamic page content"
|
|
|
|
logger.info(infoMsg)
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
firstPage = getFilteredPageContent(firstPage)
|
|
|
|
secondPage = getFilteredPageContent(secondPage)
|
|
|
|
match = SequenceMatcher(None, firstPage, secondPage).find_longest_match(0, len(firstPage), 0, len(secondPage))
|
|
|
|
commonText = firstPage[match[0]:match[0]+match[2]]
|
|
|
|
commonWords = getPageTextWordsSet(commonText)
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
infoMsg = "static words: "
|
2010-10-14 16:38:06 +04:00
|
|
|
|
|
|
|
if commonWords:
|
|
|
|
commonWords = list(commonWords)
|
|
|
|
commonWords.sort(lambda a, b: cmp(a.lower(), b.lower()))
|
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
for word in commonWords:
|
|
|
|
if len(word) > 2:
|
|
|
|
infoMsg += "'%s', " % word
|
2010-10-14 16:38:06 +04:00
|
|
|
|
|
|
|
infoMsg = infoMsg.rstrip(", ")
|
2010-10-12 19:49:04 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
def decloakToNamedTemporaryFile(filepath, name=None):
|
|
|
|
retVal = NamedTemporaryFile()
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-02-03 17:49:28 +03:00
|
|
|
def __del__():
|
|
|
|
try:
|
|
|
|
if hasattr(retVal, 'old_name'):
|
|
|
|
retVal.name = old_name
|
|
|
|
retVal.close()
|
|
|
|
except OSError:
|
|
|
|
pass
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-02-03 17:49:28 +03:00
|
|
|
retVal.__del__ = __del__
|
2010-01-28 19:50:34 +03:00
|
|
|
retVal.write(decloak(filepath))
|
|
|
|
retVal.seek(0)
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
if name:
|
|
|
|
retVal.old_name = retVal.name
|
|
|
|
retVal.name = name
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
return retVal
|
2010-01-29 13:12:09 +03:00
|
|
|
|
|
|
|
def decloakToMkstemp(filepath, **kwargs):
|
|
|
|
name = mkstemp(**kwargs)[1]
|
|
|
|
retVal = open(name, 'w+b')
|
2010-05-29 14:10:28 +04:00
|
|
|
|
2010-01-29 13:12:09 +03:00
|
|
|
retVal.write(decloak(filepath))
|
|
|
|
retVal.seek(0)
|
2010-05-29 14:10:28 +04:00
|
|
|
|
2010-01-29 13:12:09 +03:00
|
|
|
return retVal
|
2010-02-04 12:49:31 +03:00
|
|
|
|
|
|
|
def isWindowsPath(filepath):
|
2010-02-26 15:00:47 +03:00
|
|
|
return re.search("\A[\w]\:\\\\", filepath) is not None
|
2010-02-04 17:37:00 +03:00
|
|
|
|
2010-04-22 14:31:33 +04:00
|
|
|
def isWindowsDriveLetterPath(filepath):
|
|
|
|
return re.search("\A[\w]\:", filepath) is not None
|
|
|
|
|
2010-02-04 17:37:00 +03:00
|
|
|
def posixToNtSlashes(filepath):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Replaces all occurances of Posix slashes (/) in provided
|
|
|
|
filepath with NT ones (/)
|
|
|
|
>>> posixToNtSlashes('C:/Windows')
|
|
|
|
'C:\\\\Windows'
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-02-04 17:37:00 +03:00
|
|
|
return filepath.replace('/', '\\')
|
|
|
|
|
|
|
|
def ntToPosixSlashes(filepath):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Replaces all occurances of NT slashes (\) in provided
|
|
|
|
filepath with Posix ones (/)
|
|
|
|
>>> ntToPosixSlashes('C:\\Windows')
|
|
|
|
'C:/Windows'
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-02-04 17:37:00 +03:00
|
|
|
return filepath.replace('\\', '/')
|
2010-03-26 20:18:02 +03:00
|
|
|
|
|
|
|
def isBase64EncodedString(subject):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Checks if the provided string is Base64 encoded
|
|
|
|
>>> isBase64EncodedString('dGVzdA==')
|
|
|
|
True
|
|
|
|
>>> isBase64EncodedString('123456')
|
|
|
|
False
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-03-26 20:18:02 +03:00
|
|
|
return re.match(r"\A(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?\Z", subject) is not None
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2010-03-26 20:18:02 +03:00
|
|
|
def isHexEncodedString(subject):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Checks if the provided string is hex encoded
|
|
|
|
>>> isHexEncodedString('DEADBEEF')
|
|
|
|
True
|
|
|
|
>>> isHexEncodedString('test')
|
|
|
|
False
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-21 02:12:53 +04:00
|
|
|
return re.match(r"\A[0-9a-fA-Fx]+\Z", subject) is not None
|
2010-04-16 17:40:02 +04:00
|
|
|
|
|
|
|
def getConsoleWidth(default=80):
|
|
|
|
width = None
|
|
|
|
|
|
|
|
if 'COLUMNS' in os.environ and os.environ['COLUMNS'].isdigit():
|
|
|
|
width = int(os.environ['COLUMNS'])
|
|
|
|
else:
|
|
|
|
output=subprocess.Popen('stty size', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).stdout.read()
|
|
|
|
items = output.split()
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-04-16 17:40:02 +04:00
|
|
|
if len(items) == 2 and items[1].isdigit():
|
|
|
|
width = int(items[1])
|
|
|
|
|
|
|
|
if width is None:
|
|
|
|
try:
|
|
|
|
import curses
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-04-16 17:40:02 +04:00
|
|
|
stdscr = curses.initscr()
|
|
|
|
_, width = stdscr.getmaxyx()
|
|
|
|
curses.endwin()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2010-04-16 23:57:00 +04:00
|
|
|
return width if width else default
|
|
|
|
|
2010-11-24 00:00:42 +03:00
|
|
|
def clearConsoleLine(forceOutput=False):
|
|
|
|
dataToStdout("\r%s\r" % (" " * (getConsoleWidth() - 1)), forceOutput)
|
2010-11-23 23:54:40 +03:00
|
|
|
|
2010-04-16 23:57:00 +04:00
|
|
|
def parseXmlFile(xmlFile, handler):
|
2010-11-02 10:32:08 +03:00
|
|
|
stream = StringIO(readCachedFileContent(xmlFile))
|
2010-04-16 23:57:00 +04:00
|
|
|
parse(stream, handler)
|
|
|
|
stream.close()
|
2010-05-13 15:05:35 +04:00
|
|
|
|
2010-11-02 10:32:08 +03:00
|
|
|
def readCachedFileContent(filename, mode='rb'):
|
|
|
|
if filename not in kb.cache.content:
|
2010-11-02 12:06:38 +03:00
|
|
|
kb.locks.cacheLock.acquire()
|
2010-11-02 10:32:08 +03:00
|
|
|
|
|
|
|
if filename not in kb.cache.content:
|
|
|
|
checkFile(filename)
|
|
|
|
xfile = codecs.open(filename, mode, conf.dataEncoding)
|
|
|
|
content = xfile.read()
|
|
|
|
kb.cache.content[filename] = content
|
|
|
|
xfile.close()
|
|
|
|
|
2010-11-02 12:06:38 +03:00
|
|
|
kb.locks.cacheLock.release()
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2010-11-02 10:32:08 +03:00
|
|
|
return kb.cache.content[filename]
|
|
|
|
|
2010-10-07 02:43:04 +04:00
|
|
|
def readXmlFile(xmlFile):
|
|
|
|
checkFile(xmlFile)
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-10-07 02:43:04 +04:00
|
|
|
xfile = codecs.open(xmlFile, 'r', conf.dataEncoding)
|
|
|
|
retVal = minidom.parse(xfile).documentElement
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-10-07 02:43:04 +04:00
|
|
|
xfile.close()
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-10-07 02:43:04 +04:00
|
|
|
return retVal
|
|
|
|
|
2010-12-07 19:39:31 +03:00
|
|
|
def stdev(values):
|
|
|
|
"""
|
|
|
|
Computes standard deviation of a list of numbers.
|
2010-12-08 01:45:38 +03:00
|
|
|
Reference: http://www.goldb.org/corestats.html
|
2010-12-07 19:39:31 +03:00
|
|
|
"""
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-12-08 01:45:38 +03:00
|
|
|
if not values or len(values) < 2:
|
|
|
|
return None
|
|
|
|
|
2010-12-14 15:22:17 +03:00
|
|
|
key = (values[0], values[-1], len(values))
|
2010-12-07 19:39:31 +03:00
|
|
|
|
2010-12-14 15:22:17 +03:00
|
|
|
if key in kb.cache.stdev:
|
|
|
|
return kb.cache.stdev[key]
|
|
|
|
else:
|
|
|
|
summa = 0.0
|
|
|
|
avg = average(values)
|
|
|
|
|
|
|
|
for value in values:
|
|
|
|
summa += pow(value - avg, 2)
|
2010-12-07 19:39:31 +03:00
|
|
|
|
2010-12-14 15:22:17 +03:00
|
|
|
retVal = sqrt(summa/(len(values) - 1))
|
|
|
|
kb.cache.stdev[key] = retVal
|
|
|
|
return retVal
|
2010-12-07 19:39:31 +03:00
|
|
|
|
2010-12-07 19:04:53 +03:00
|
|
|
def average(values):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
2010-12-07 19:04:53 +03:00
|
|
|
Computes the arithmetic mean of a list of numbers.
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
2010-12-08 01:45:38 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if values:
|
|
|
|
retVal = sum(values) / len(values)
|
|
|
|
|
|
|
|
return retVal
|
2010-12-07 19:04:53 +03:00
|
|
|
|
|
|
|
def calculateDeltaSeconds(start):
|
|
|
|
"""
|
|
|
|
Returns elapsed time from start till now
|
|
|
|
"""
|
|
|
|
return time.time() - start
|
2010-05-21 13:35:36 +04:00
|
|
|
|
2010-05-21 16:19:20 +04:00
|
|
|
def initCommonOutputs():
|
|
|
|
kb.commonOutputs = {}
|
2010-05-21 16:44:09 +04:00
|
|
|
key = None
|
|
|
|
|
2010-09-30 16:35:45 +04:00
|
|
|
cfile = codecs.open(paths.COMMON_OUTPUTS, 'r', conf.dataEncoding)
|
2010-05-21 16:44:09 +04:00
|
|
|
|
2010-05-31 15:11:53 +04:00
|
|
|
for line in cfile.readlines(): # xreadlines doesn't return unicode strings when codec.open() is used
|
|
|
|
if line.find('#') != -1:
|
|
|
|
line = line[:line.find('#')]
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-31 15:11:53 +04:00
|
|
|
line = line.strip()
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-21 16:19:20 +04:00
|
|
|
if len(line) > 1:
|
2010-05-31 19:05:29 +04:00
|
|
|
if line.startswith('[') and line.endswith(']'):
|
2010-05-21 16:19:20 +04:00
|
|
|
key = line[1:-1]
|
|
|
|
elif key:
|
2010-05-21 16:44:09 +04:00
|
|
|
if key not in kb.commonOutputs:
|
2010-06-28 17:47:20 +04:00
|
|
|
kb.commonOutputs[key] = set()
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-31 19:05:29 +04:00
|
|
|
if line not in kb.commonOutputs[key]:
|
2010-06-28 17:47:20 +04:00
|
|
|
kb.commonOutputs[key].add(line)
|
2010-05-24 19:46:12 +04:00
|
|
|
|
|
|
|
cfile.close()
|
2010-05-21 16:19:20 +04:00
|
|
|
|
2010-12-26 14:15:02 +03:00
|
|
|
def getFileItems(filename, commentPrefix='#', unicode_=True, lowercase=False, unique=False):
|
2010-09-30 16:35:45 +04:00
|
|
|
retVal = []
|
|
|
|
|
|
|
|
checkFile(filename)
|
2010-11-23 16:24:02 +03:00
|
|
|
|
|
|
|
if unicode_:
|
|
|
|
ifile = codecs.open(filename, 'r', conf.dataEncoding)
|
|
|
|
else:
|
|
|
|
ifile = open(filename, 'r')
|
2010-09-30 16:35:45 +04:00
|
|
|
|
2010-10-16 19:10:48 +04:00
|
|
|
for line in ifile.readlines(): # xreadlines doesn't return unicode strings when codec.open() is used
|
2010-10-29 13:00:51 +04:00
|
|
|
if commentPrefix:
|
|
|
|
if line.find(commentPrefix) != -1:
|
|
|
|
line = line[:line.find(commentPrefix)]
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2010-09-30 16:35:45 +04:00
|
|
|
line = line.strip()
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2010-11-23 18:31:23 +03:00
|
|
|
if not unicode_:
|
|
|
|
try:
|
2010-11-23 18:33:13 +03:00
|
|
|
line = str.encode(line)
|
2010-11-23 18:31:23 +03:00
|
|
|
except UnicodeDecodeError:
|
|
|
|
continue
|
2010-09-30 16:35:45 +04:00
|
|
|
if line:
|
2010-12-26 14:15:02 +03:00
|
|
|
if lowercase:
|
|
|
|
line = line.lower()
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2010-12-26 14:15:02 +03:00
|
|
|
if unique and line in retVal:
|
|
|
|
continue
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2010-09-30 16:35:45 +04:00
|
|
|
retVal.append(line)
|
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
def goGoodSamaritan(prevValue, originalCharset):
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
Function for retrieving parameters needed for common prediction (good
|
|
|
|
samaritan) feature.
|
|
|
|
|
|
|
|
prevValue: retrieved query output so far (e.g. 'i').
|
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
Returns commonValue if there is a complete single match (in kb.partRun
|
|
|
|
of txt/common-outputs.txt under kb.partRun) regarding parameter
|
|
|
|
prevValue. If there is no single value match, but multiple, commonCharset is
|
2010-05-27 20:45:09 +04:00
|
|
|
returned containing more probable characters (retrieved from matched
|
|
|
|
values in txt/common-outputs.txt) together with the rest of charset as
|
|
|
|
otherCharset.
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-24 19:46:12 +04:00
|
|
|
if kb.commonOutputs is None:
|
2010-05-21 16:19:20 +04:00
|
|
|
initCommonOutputs()
|
2010-05-21 13:35:36 +04:00
|
|
|
|
|
|
|
predictionSet = set()
|
2010-06-21 18:40:12 +04:00
|
|
|
commonValue = None
|
|
|
|
commonPattern = None
|
|
|
|
countCommonValue = 0
|
2010-05-21 16:44:09 +04:00
|
|
|
|
2010-05-31 19:05:29 +04:00
|
|
|
# If the header (e.g. Databases) we are looking for has common
|
|
|
|
# outputs defined
|
2010-06-21 18:40:12 +04:00
|
|
|
if kb.partRun in kb.commonOutputs:
|
|
|
|
commonPartOutputs = kb.commonOutputs[kb.partRun]
|
2010-06-30 15:22:25 +04:00
|
|
|
commonPattern = commonFinderOnly(prevValue, commonPartOutputs)
|
2010-06-17 15:38:32 +04:00
|
|
|
|
|
|
|
# If the longest common prefix is the same as previous value then
|
|
|
|
# do not consider it
|
2010-06-21 18:40:12 +04:00
|
|
|
if commonPattern and commonPattern == prevValue:
|
|
|
|
commonPattern = None
|
2010-06-17 15:38:32 +04:00
|
|
|
|
2010-05-31 19:05:29 +04:00
|
|
|
# For each common output
|
2010-06-17 15:38:32 +04:00
|
|
|
for item in commonPartOutputs:
|
2010-05-27 20:45:09 +04:00
|
|
|
# Check if the common output (item) starts with prevValue
|
2010-05-31 19:05:29 +04:00
|
|
|
# where prevValue is the enumerated character(s) so far
|
2010-05-31 13:41:41 +04:00
|
|
|
if item.startswith(prevValue):
|
2010-06-21 18:40:12 +04:00
|
|
|
commonValue = item
|
|
|
|
countCommonValue += 1
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-31 13:41:41 +04:00
|
|
|
if len(item) > len(prevValue):
|
|
|
|
char = item[len(prevValue)]
|
2010-06-17 15:38:32 +04:00
|
|
|
predictionSet.add(char)
|
2010-05-31 19:05:29 +04:00
|
|
|
|
2010-06-17 15:38:32 +04:00
|
|
|
# Reset single value if there is more than one possible common
|
|
|
|
# output
|
2010-06-21 18:40:12 +04:00
|
|
|
if countCommonValue > 1:
|
|
|
|
commonValue = None
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-27 20:45:09 +04:00
|
|
|
commonCharset = []
|
2010-05-21 16:44:09 +04:00
|
|
|
otherCharset = []
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-27 20:45:09 +04:00
|
|
|
# Split the original charset into common chars (commonCharset)
|
|
|
|
# and other chars (otherCharset)
|
2010-05-25 17:06:23 +04:00
|
|
|
for ordChar in originalCharset:
|
2010-05-21 13:35:36 +04:00
|
|
|
if chr(ordChar) not in predictionSet:
|
2010-05-21 16:44:09 +04:00
|
|
|
otherCharset.append(ordChar)
|
2010-05-21 13:35:36 +04:00
|
|
|
else:
|
2010-05-27 20:45:09 +04:00
|
|
|
commonCharset.append(ordChar)
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-27 20:45:09 +04:00
|
|
|
commonCharset.sort()
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
return commonValue, commonPattern, commonCharset, originalCharset
|
2010-05-21 13:35:36 +04:00
|
|
|
else:
|
2010-06-17 15:38:32 +04:00
|
|
|
return None, None, None, originalCharset
|
2010-05-21 18:42:59 +04:00
|
|
|
|
2010-11-24 17:20:43 +03:00
|
|
|
def getCompiledRegex(regex, flags=0):
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
Returns compiled regular expression and stores it in cache for further
|
|
|
|
usage
|
2010-08-21 01:27:47 +04:00
|
|
|
>>> getCompiledRegex('test') # doctest: +ELLIPSIS
|
|
|
|
<_sre.SRE_Pattern object at...
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-31 19:05:29 +04:00
|
|
|
|
2010-11-24 17:20:43 +03:00
|
|
|
if (regex, flags) in kb.cache.regex:
|
|
|
|
return kb.cache.regex[(regex, flags)]
|
2010-05-21 18:42:59 +04:00
|
|
|
else:
|
2010-11-24 17:20:43 +03:00
|
|
|
retVal = re.compile(regex, flags)
|
|
|
|
kb.cache.regex[(regex, flags)] = retVal
|
2010-05-21 18:42:59 +04:00
|
|
|
return retVal
|
2010-05-26 13:48:20 +04:00
|
|
|
|
|
|
|
def getPartRun():
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-11-12 13:02:02 +03:00
|
|
|
Goes through call stack and finds constructs matching conf.dbmsHandler.*.
|
2010-05-27 20:45:09 +04:00
|
|
|
Returns it or its alias used in txt/common-outputs.txt
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-26 13:48:20 +04:00
|
|
|
retVal = None
|
2010-05-27 20:45:09 +04:00
|
|
|
commonPartsDict = optDict["Enumeration"]
|
2010-05-26 13:48:20 +04:00
|
|
|
stack = [item[4][0] if isinstance(item[4], list) else '' for item in inspect.stack()]
|
2010-06-21 18:40:12 +04:00
|
|
|
reobj1 = getCompiledRegex('conf\.dbmsHandler\.([^(]+)\(\)')
|
|
|
|
reobj2 = getCompiledRegex('self\.(get[^(]+)\(\)')
|
2010-05-27 20:45:09 +04:00
|
|
|
|
|
|
|
# Goes backwards through the stack to find the conf.dbmsHandler method
|
|
|
|
# calling this function
|
2010-06-21 18:40:12 +04:00
|
|
|
for i in xrange(0, len(stack)-1):
|
|
|
|
for reobj in (reobj2, reobj1):
|
|
|
|
match = reobj.search(stack[i])
|
|
|
|
|
|
|
|
if match:
|
|
|
|
# This is the calling conf.dbmsHandler or self method
|
|
|
|
# (e.g. 'getDbms')
|
|
|
|
retVal = match.groups()[0]
|
|
|
|
break
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
if retVal is not None:
|
2010-05-26 13:48:20 +04:00
|
|
|
break
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
# Return the INI tag to consider for common outputs (e.g. 'Databases')
|
2010-05-27 20:45:09 +04:00
|
|
|
return commonPartsDict[retVal][1] if retVal in commonPartsDict else retVal
|
2010-05-28 13:13:50 +04:00
|
|
|
|
2010-09-07 14:21:42 +04:00
|
|
|
def getUnicode(value, encoding=None):
|
2010-08-21 01:01:51 +04:00
|
|
|
"""
|
|
|
|
Return the unicode representation of the supplied value:
|
|
|
|
|
|
|
|
>>> getUnicode(u'test')
|
|
|
|
u'test'
|
|
|
|
>>> getUnicode('test')
|
|
|
|
u'test'
|
|
|
|
>>> getUnicode(1)
|
|
|
|
u'1'
|
|
|
|
"""
|
2010-10-15 14:28:06 +04:00
|
|
|
|
2010-06-02 16:31:36 +04:00
|
|
|
if isinstance(value, basestring):
|
2011-01-04 01:31:29 +03:00
|
|
|
return value if isinstance(value, unicode) else unicode(value, encoding or kb.pageEncoding or conf.dataEncoding, errors='replace')
|
2010-06-02 16:31:36 +04:00
|
|
|
else:
|
2010-10-15 14:28:06 +04:00
|
|
|
return unicode(value) # encoding ignored for non-basestring instances
|
2010-06-02 16:31:36 +04:00
|
|
|
|
2010-06-17 15:38:32 +04:00
|
|
|
# http://boredzo.org/blog/archives/2007-01-06/longest-common-prefix-in-python-2
|
2010-06-30 15:22:25 +04:00
|
|
|
def longestCommonPrefix(*sequences):
|
2010-06-17 15:38:32 +04:00
|
|
|
if len(sequences) == 1:
|
|
|
|
return sequences[0]
|
|
|
|
|
|
|
|
sequences = [pair[1] for pair in sorted((len(fi), fi) for fi in sequences)]
|
|
|
|
|
|
|
|
if not sequences:
|
|
|
|
return None
|
|
|
|
|
|
|
|
for i, comparison_ch in enumerate(sequences[0]):
|
|
|
|
for fi in sequences[1:]:
|
|
|
|
ch = fi[i]
|
|
|
|
|
|
|
|
if ch != comparison_ch:
|
|
|
|
return fi[:i]
|
|
|
|
|
|
|
|
return sequences[0]
|
|
|
|
|
2010-06-30 15:22:25 +04:00
|
|
|
def commonFinderOnly(initial, sequence):
|
|
|
|
return longestCommonPrefix(*filter(lambda x: x.startswith(initial), sequence))
|
2010-09-25 01:59:03 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
def pushValue(value):
|
|
|
|
"""
|
|
|
|
Push value to the stack (thread dependent)
|
|
|
|
"""
|
2010-12-21 13:31:56 +03:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
getCurrentThreadData().valueStack.append(value)
|
2010-09-30 16:35:45 +04:00
|
|
|
|
|
|
|
def popValue():
|
2010-10-25 18:06:56 +04:00
|
|
|
"""
|
2010-12-21 01:45:01 +03:00
|
|
|
Pop value from the stack (thread dependent)
|
2010-10-25 18:06:56 +04:00
|
|
|
"""
|
2010-10-26 10:08:40 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
return getCurrentThreadData().valueStack.pop()
|
2010-10-25 18:06:56 +04:00
|
|
|
|
2010-11-16 13:42:42 +03:00
|
|
|
def wasLastRequestDBMSError():
|
2010-10-25 18:06:56 +04:00
|
|
|
"""
|
|
|
|
Returns True if the last web request resulted in a (recognized) DBMS error page
|
|
|
|
"""
|
2010-10-26 10:08:40 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
return threadData.lastErrorPage and threadData.lastErrorPage[0] == threadData.lastRequestUID
|
2010-12-08 17:26:40 +03:00
|
|
|
|
2010-12-26 16:20:52 +03:00
|
|
|
def wasLastRequestHTTPError():
|
|
|
|
"""
|
|
|
|
Returns True if the last web request resulted in an errornous HTTP code (like 500)
|
|
|
|
"""
|
|
|
|
|
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
return threadData.lastHTTPError and threadData.lastHTTPError[0] == threadData.lastRequestUID
|
|
|
|
|
2010-12-08 17:26:40 +03:00
|
|
|
def wasLastRequestDelayed():
|
|
|
|
"""
|
|
|
|
Returns True if the last web request resulted in a time-delay
|
|
|
|
"""
|
|
|
|
|
|
|
|
# 99.9999999997440% of all non time-based sql injection
|
2010-12-08 17:29:09 +03:00
|
|
|
# affected response times should be inside +-7*stdev([normal response times])
|
2010-12-08 17:26:40 +03:00
|
|
|
# (Math reference: http://www.answers.com/topic/standard-deviation)
|
2010-12-08 17:46:07 +03:00
|
|
|
deviation = stdev(kb.responseTimes)
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-12-08 17:46:07 +03:00
|
|
|
if deviation:
|
|
|
|
if len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
2010-12-09 03:26:06 +03:00
|
|
|
warnMsg = "time-based standard deviation method used on a model "
|
|
|
|
warnMsg += "with less than %d response times" % MIN_TIME_RESPONSES
|
2010-12-08 17:46:07 +03:00
|
|
|
logger.warn(warnMsg)
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-12-21 18:13:13 +03:00
|
|
|
return (threadData.lastQueryDuration >= average(kb.responseTimes) + TIME_STDEV_COEFF * deviation)
|
2010-12-08 17:46:07 +03:00
|
|
|
else:
|
2010-12-21 01:45:01 +03:00
|
|
|
return threadData.lastQueryDuration - conf.timeSec
|
2010-10-25 23:16:42 +04:00
|
|
|
|
2010-11-16 13:42:42 +03:00
|
|
|
def extractErrorMessage(page):
|
|
|
|
"""
|
|
|
|
Returns reported error message from page if it founds one
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = None
|
|
|
|
|
2010-11-16 17:41:46 +03:00
|
|
|
if isinstance(page, basestring):
|
2010-12-25 13:16:20 +03:00
|
|
|
for regex in ERROR_PARSING_REGEXES:
|
2010-11-16 17:41:46 +03:00
|
|
|
match = re.search(regex, page, re.DOTALL | re.IGNORECASE)
|
2010-11-24 15:03:01 +03:00
|
|
|
|
2010-11-16 17:41:46 +03:00
|
|
|
if match:
|
|
|
|
retVal = htmlunescape(match.group("result")).replace("<br>", "\n").strip()
|
|
|
|
break
|
2010-11-16 13:42:42 +03:00
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
2010-10-25 23:16:42 +04:00
|
|
|
def beep():
|
|
|
|
"""
|
|
|
|
Does an audible beep sound
|
|
|
|
Reference: http://de3.aminet.net/dev/src/clr.py.txt
|
|
|
|
"""
|
2010-10-26 02:54:56 +04:00
|
|
|
|
|
|
|
if sys.platform == 'linux2':
|
2010-11-09 04:23:54 +03:00
|
|
|
for dev in ('/dev/audio', '/dev/oss', '/dev/dsp', '/dev/sound'):
|
2010-10-26 12:32:58 +04:00
|
|
|
if os.path.exists(dev):
|
|
|
|
try:
|
|
|
|
audio = file(dev, 'wb')
|
2010-10-26 02:54:56 +04:00
|
|
|
|
2010-10-26 12:32:58 +04:00
|
|
|
for i in xrange(250):
|
|
|
|
audio.write(chr(32) * 4)
|
|
|
|
audio.write(chr(0) * 4)
|
2010-10-26 02:54:56 +04:00
|
|
|
|
2010-10-26 12:32:58 +04:00
|
|
|
audio.close()
|
|
|
|
return
|
|
|
|
except:
|
|
|
|
pass
|
2010-10-26 10:30:27 +04:00
|
|
|
|
2010-10-26 12:32:58 +04:00
|
|
|
try:
|
|
|
|
import curses
|
|
|
|
curses.initscr()
|
|
|
|
curses.beep()
|
|
|
|
curses.flash()
|
|
|
|
curses.endwin()
|
|
|
|
return
|
2010-10-25 23:16:42 +04:00
|
|
|
except:
|
2010-10-26 10:08:40 +04:00
|
|
|
dataToStdout('\a', True)
|
2010-10-26 12:32:58 +04:00
|
|
|
|
2010-10-25 23:16:42 +04:00
|
|
|
else:
|
2010-10-26 10:08:40 +04:00
|
|
|
dataToStdout('\a', True)
|
2010-10-28 00:39:50 +04:00
|
|
|
|
|
|
|
def runningAsAdmin():
|
|
|
|
isAdmin = False
|
|
|
|
|
|
|
|
if PLATFORM in ( "posix", "mac" ):
|
|
|
|
isAdmin = os.geteuid()
|
|
|
|
|
|
|
|
if isinstance(isAdmin, (int, float, long)) and isAdmin == 0:
|
|
|
|
isAdmin = True
|
|
|
|
elif IS_WIN:
|
|
|
|
isAdmin = ctypes.windll.shell32.IsUserAnAdmin()
|
|
|
|
|
|
|
|
if isinstance(isAdmin, (int, float, long)) and isAdmin == 1:
|
|
|
|
isAdmin = True
|
|
|
|
else:
|
|
|
|
errMsg = "sqlmap is not able to check if you are running it "
|
2010-10-29 20:11:50 +04:00
|
|
|
errMsg += "as an administrator account on this platform. "
|
2010-10-28 00:39:50 +04:00
|
|
|
errMsg += "sqlmap will assume that you are an administrator "
|
|
|
|
errMsg += "which is mandatory for the requested takeover attack "
|
|
|
|
errMsg += "to work properly"
|
|
|
|
logger.error(errMsg)
|
|
|
|
|
|
|
|
isAdmin = True
|
|
|
|
|
|
|
|
return isAdmin
|
2010-11-08 14:22:47 +03:00
|
|
|
|
|
|
|
def logHTTPTraffic(requestLogMsg, responseLogMsg):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Logs HTTP traffic to the output file
|
|
|
|
"""
|
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
kb.locks.logLock.acquire()
|
2010-11-08 15:36:48 +03:00
|
|
|
|
2010-11-08 14:22:47 +03:00
|
|
|
dataToTrafficFile("%s\n" % requestLogMsg)
|
|
|
|
dataToTrafficFile("%s\n" % responseLogMsg)
|
2010-11-08 15:36:48 +03:00
|
|
|
dataToTrafficFile("\n%s\n\n" % (76 * '#'))
|
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
kb.locks.logLock.release()
|
2010-11-23 16:58:01 +03:00
|
|
|
|
2010-12-25 13:16:20 +03:00
|
|
|
# cross-linked method
|
2010-12-18 12:51:34 +03:00
|
|
|
def getPageTemplate(payload, place):
|
|
|
|
pass
|
|
|
|
|
2010-12-15 14:21:47 +03:00
|
|
|
def getPublicTypeMembers(type_, onlyValues=False):
|
2010-11-23 17:50:47 +03:00
|
|
|
"""
|
|
|
|
Useful for getting members from types (e.g. in enums)
|
|
|
|
"""
|
2011-01-08 12:30:10 +03:00
|
|
|
|
2010-11-23 16:58:01 +03:00
|
|
|
retVal = []
|
|
|
|
|
|
|
|
for name, value in getmembers(type_):
|
|
|
|
if not name.startswith('__'):
|
2010-12-15 14:21:47 +03:00
|
|
|
if not onlyValues:
|
|
|
|
retVal.append((name, value))
|
|
|
|
else:
|
|
|
|
retVal.append(value)
|
2010-11-23 16:58:01 +03:00
|
|
|
|
2010-11-23 17:50:47 +03:00
|
|
|
return retVal
|
2010-11-24 14:38:27 +03:00
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
def enumValueToNameLookup(type_, value_):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Returns name of a enum member with a given value
|
|
|
|
"""
|
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
for name, value in getPublicTypeMembers(type_):
|
|
|
|
if value == value_:
|
|
|
|
retVal = name
|
|
|
|
break
|
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
2010-11-24 17:20:43 +03:00
|
|
|
def extractRegexResult(regex, content, flags=0):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Returns 'result' group value from a possible match with regex on a given
|
|
|
|
content
|
|
|
|
"""
|
|
|
|
|
2010-11-24 14:38:27 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if regex and content and '?P<result>' in regex:
|
2010-11-24 17:20:43 +03:00
|
|
|
match = re.search(regex, content, flags)
|
2010-12-07 15:32:58 +03:00
|
|
|
|
2010-11-24 14:38:27 +03:00
|
|
|
if match:
|
|
|
|
retVal = match.group("result")
|
|
|
|
|
|
|
|
return retVal
|
2010-11-29 18:14:49 +03:00
|
|
|
|
|
|
|
def trimAlphaNum(value):
|
|
|
|
"""
|
|
|
|
Trims alpha numeric characters from start and ending of a given value
|
|
|
|
"""
|
2011-01-08 12:30:10 +03:00
|
|
|
|
2010-11-29 18:14:49 +03:00
|
|
|
while value and value[-1].isalnum():
|
|
|
|
value = value[:-1]
|
|
|
|
|
|
|
|
while value and value[0].isalnum():
|
|
|
|
value = value[1:]
|
|
|
|
|
|
|
|
return value
|
2010-12-02 21:57:43 +03:00
|
|
|
|
|
|
|
def isNumPosStrValue(value):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Returns True if value is a string with a positive integer representation
|
|
|
|
"""
|
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
return value and isinstance(value, basestring) and value.isdigit() and value != "0"
|
2010-12-04 01:44:29 +03:00
|
|
|
|
|
|
|
def aliasToDbmsEnum(value):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Returns major DBMS name from a given alias
|
|
|
|
"""
|
|
|
|
|
2010-12-04 01:44:29 +03:00
|
|
|
retVal = None
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2010-12-04 01:44:29 +03:00
|
|
|
for key, item in dbmsDict.items():
|
2011-01-13 20:36:54 +03:00
|
|
|
if value.lower() in item[0]:
|
2010-12-04 01:44:29 +03:00
|
|
|
retVal = key
|
|
|
|
break
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2010-12-04 01:44:29 +03:00
|
|
|
return retVal
|
|
|
|
|
2010-12-29 22:39:32 +03:00
|
|
|
def findDynamicContent(firstPage, secondPage):
|
|
|
|
"""
|
|
|
|
This function checks if the provided pages have dynamic content. If they
|
2011-01-08 12:30:10 +03:00
|
|
|
are dynamic, proper markings will be made
|
2010-12-29 22:39:32 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
infoMsg = "searching for dynamic content"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
blocks = SequenceMatcher(None, firstPage, secondPage).get_matching_blocks()
|
|
|
|
kb.dynamicMarkings = []
|
|
|
|
|
|
|
|
# Removing too small matching blocks
|
|
|
|
i = 0
|
|
|
|
while i < len(blocks):
|
|
|
|
block = blocks[i]
|
|
|
|
(_, _, length) = block
|
|
|
|
|
|
|
|
if length <= DYNAMICITY_MARK_LENGTH:
|
|
|
|
blocks.remove(block)
|
|
|
|
|
|
|
|
else:
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
# Making of dynamic markings based on prefix/suffix principle
|
|
|
|
if len(blocks) > 0:
|
|
|
|
blocks.insert(0, None)
|
|
|
|
blocks.append(None)
|
|
|
|
|
|
|
|
for i in xrange(len(blocks) - 1):
|
|
|
|
prefix = firstPage[blocks[i][0]:blocks[i][0] + blocks[i][2]] if blocks[i] else None
|
|
|
|
suffix = firstPage[blocks[i + 1][0]:blocks[i + 1][0] + blocks[i + 1][2]] if blocks[i + 1] else None
|
|
|
|
|
|
|
|
if prefix is None and blocks[i + 1][0] == 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if suffix is None and (blocks[i][0] + blocks[i][2] >= len(firstPage)):
|
|
|
|
continue
|
|
|
|
|
|
|
|
prefix = trimAlphaNum(prefix)
|
|
|
|
suffix = trimAlphaNum(suffix)
|
|
|
|
|
|
|
|
kb.dynamicMarkings.append((re.escape(prefix[-DYNAMICITY_MARK_LENGTH/2:]) if prefix else None, re.escape(suffix[:DYNAMICITY_MARK_LENGTH/2]) if suffix else None))
|
|
|
|
|
|
|
|
if len(kb.dynamicMarkings) > 0:
|
|
|
|
infoMsg = "dynamic content marked for removal (%d region%s)" % (len(kb.dynamicMarkings), 's' if len(kb.dynamicMarkings) > 1 else '')
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-12-04 13:13:18 +03:00
|
|
|
def removeDynamicContent(page):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Removing dynamic content from supplied page basing removal on
|
|
|
|
precalculated dynamic markings
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-04 13:13:18 +03:00
|
|
|
if page:
|
|
|
|
for item in kb.dynamicMarkings:
|
|
|
|
prefix, suffix = item
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-26 16:27:24 +03:00
|
|
|
if prefix is None and suffix is None:
|
|
|
|
continue
|
|
|
|
elif prefix is None:
|
2010-12-04 17:43:35 +03:00
|
|
|
page = getCompiledRegex('(?s)^.+%s' % suffix).sub(suffix, page)
|
2010-12-04 13:13:18 +03:00
|
|
|
elif suffix is None:
|
2010-12-04 17:43:35 +03:00
|
|
|
page = getCompiledRegex('(?s)%s.+$' % prefix).sub(prefix, page)
|
2010-12-04 13:13:18 +03:00
|
|
|
else:
|
2010-12-04 17:43:35 +03:00
|
|
|
page = getCompiledRegex('(?s)%s.+%s' % (prefix, suffix)).sub('%s%s' % (prefix, suffix), page)
|
2010-12-04 13:13:18 +03:00
|
|
|
|
|
|
|
return page
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2011-01-05 00:56:37 +03:00
|
|
|
def filterStringValue(value, regex, replace=None):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns string value consisting only of chars satisfying supplied
|
|
|
|
regular expression
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-21 03:47:07 +03:00
|
|
|
retVal = ""
|
2010-12-21 13:31:56 +03:00
|
|
|
|
2010-12-21 03:47:07 +03:00
|
|
|
if value:
|
|
|
|
for char in value:
|
|
|
|
if re.search(regex, char):
|
|
|
|
retVal += char
|
2011-01-05 00:56:37 +03:00
|
|
|
elif replace:
|
|
|
|
retVal += replace
|
2010-12-21 03:47:07 +03:00
|
|
|
|
|
|
|
return retVal
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2011-01-05 13:25:07 +03:00
|
|
|
def filterControlChars(value):
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns string value with control chars being supstituted with ' '
|
2011-01-05 13:25:07 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-05 16:32:40 +03:00
|
|
|
return filterStringValue(value, NON_CONTROL_CHAR_REGEX, ' ')
|
2011-01-05 13:25:07 +03:00
|
|
|
|
2010-12-10 13:54:17 +03:00
|
|
|
def isDBMSVersionAtLeast(version):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Checks if the recognized DBMS version is at least the version
|
|
|
|
specified
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-10 13:54:17 +03:00
|
|
|
retVal = None
|
2010-12-11 14:17:24 +03:00
|
|
|
|
2010-12-18 00:29:09 +03:00
|
|
|
if kb.dbmsVersion and kb.dbmsVersion[0] != UNKNOWN_DBMS_VERSION and kb.dbmsVersion[0] != None:
|
2010-12-21 03:47:07 +03:00
|
|
|
value = kb.dbmsVersion[0].replace(" ", "").rstrip('.')
|
|
|
|
|
|
|
|
while True:
|
|
|
|
index = value.find('.', value.find('.') + 1)
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-21 03:47:07 +03:00
|
|
|
if index > -1:
|
|
|
|
value = value[0:index] + value[index + 1:]
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
value = filterStringValue(value, '[0-9.><=]')
|
2010-12-14 00:55:30 +03:00
|
|
|
|
2010-12-11 14:17:24 +03:00
|
|
|
if isinstance(value, basestring):
|
|
|
|
if value.startswith(">="):
|
|
|
|
value = float(value.replace(">=", ""))
|
|
|
|
elif value.startswith(">"):
|
|
|
|
value = float(value.replace(">", "")) + 0.01
|
|
|
|
elif value.startswith("<="):
|
|
|
|
value = float(value.replace("<=", ""))
|
|
|
|
elif value.startswith(">"):
|
|
|
|
value = float(value.replace("<", "")) - 0.01
|
2010-12-12 17:38:07 +03:00
|
|
|
|
|
|
|
retVal = getUnicode(value) >= getUnicode(version)
|
2010-12-10 13:54:17 +03:00
|
|
|
|
|
|
|
return retVal
|
2010-12-12 01:00:16 +03:00
|
|
|
|
|
|
|
def parseSqliteTableSchema(value):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Parses table column names and types from specified SQLite table schema
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-12 01:13:19 +03:00
|
|
|
if value:
|
|
|
|
table = {}
|
|
|
|
columns = {}
|
2010-12-12 01:00:16 +03:00
|
|
|
|
2010-12-12 01:13:19 +03:00
|
|
|
for match in re.finditer(getCompiledRegex(r"(\w+) ([A-Z]+)[,\r\n]"), value):
|
|
|
|
columns[match.group(1)] = match.group(2)
|
2010-12-12 01:00:16 +03:00
|
|
|
|
2010-12-12 01:13:19 +03:00
|
|
|
table[conf.tbl] = columns
|
|
|
|
kb.data.cachedColumns[conf.db] = table
|
2010-12-15 14:21:47 +03:00
|
|
|
|
|
|
|
def getTechniqueData(technique=None):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
|
|
|
Returns injection data for technique specified
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-15 14:21:47 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if technique and technique in kb.injection.data:
|
|
|
|
retVal = kb.injection.data[technique]
|
|
|
|
|
|
|
|
return retVal
|
2010-12-15 14:46:28 +03:00
|
|
|
|
|
|
|
def isTechniqueAvailable(technique=None):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns True if there is injection data which sqlmap could use for
|
|
|
|
technique specified
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-15 14:46:28 +03:00
|
|
|
return getTechniqueData(technique) is not None
|
2010-12-18 12:51:34 +03:00
|
|
|
|
|
|
|
def initTechnique(technique=None):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Prepares proper page template and match ratio for technique specified
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-10 13:30:17 +03:00
|
|
|
try:
|
|
|
|
data = getTechniqueData(technique)
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-10 13:30:17 +03:00
|
|
|
if data:
|
|
|
|
kb.pageTemplate, kb.errorIsNone = getPageTemplate(data.templatePayload, kb.injection.place)
|
|
|
|
kb.matchRatio = data.matchRatio
|
|
|
|
else:
|
|
|
|
warnMsg = "there is no injection data available for technique "
|
|
|
|
warnMsg += "'%s'" % enumValueToNameLookup(PAYLOAD.TECHNIQUE, technique)
|
|
|
|
logger.warn(warnMsg)
|
2011-01-12 01:18:47 +03:00
|
|
|
|
|
|
|
except sqlmapDataException, _:
|
2011-01-10 13:30:17 +03:00
|
|
|
errMsg = "missing data in old session file(s). "
|
|
|
|
errMsg += "please use '--flush-session' to deal "
|
|
|
|
errMsg += "with this error"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2010-12-22 21:55:50 +03:00
|
|
|
|
|
|
|
def arrayizeValue(value):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Makes a list out of value if it is not already a list, tuple or set
|
|
|
|
itself
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
|
|
|
if not isinstance(value, (list, tuple, set)):
|
|
|
|
value = [ value ]
|
|
|
|
|
2010-12-22 21:55:50 +03:00
|
|
|
return value
|
2010-12-24 13:55:41 +03:00
|
|
|
|
|
|
|
def getInjectionTests():
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns prioritized test list by eventually detected DBMS from error
|
|
|
|
messages
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-24 13:55:41 +03:00
|
|
|
retVal = conf.tests
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-07 19:08:01 +03:00
|
|
|
def priorityFunction(test):
|
2011-01-13 14:24:03 +03:00
|
|
|
retVal = SORTORDER.FIRST
|
2011-01-13 14:23:07 +03:00
|
|
|
|
2011-01-13 14:08:29 +03:00
|
|
|
if test.stype == PAYLOAD.TECHNIQUE.UNION:
|
2011-01-13 14:24:03 +03:00
|
|
|
retVal = SORTORDER.LAST
|
2011-01-13 14:23:07 +03:00
|
|
|
|
2011-01-13 14:08:29 +03:00
|
|
|
elif 'details' in test and 'dbms' in test.details:
|
2011-01-07 19:08:01 +03:00
|
|
|
if test.details.dbms in getErrorParsedDBMSes():
|
2011-01-13 14:24:03 +03:00
|
|
|
retVal = SORTORDER.SECOND
|
2011-01-07 19:08:01 +03:00
|
|
|
else:
|
2011-01-13 14:24:03 +03:00
|
|
|
retVal = SORTORDER.THIRD
|
2011-01-13 14:23:07 +03:00
|
|
|
|
2011-01-07 19:08:01 +03:00
|
|
|
return retVal
|
|
|
|
|
2011-01-02 19:51:21 +03:00
|
|
|
if getErrorParsedDBMSes():
|
2011-01-07 19:08:01 +03:00
|
|
|
retVal = sorted(retVal, key=priorityFunction)
|
2010-12-24 13:55:41 +03:00
|
|
|
|
|
|
|
return retVal
|
2010-12-27 02:50:16 +03:00
|
|
|
|
|
|
|
def filterListValue(value, regex):
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns list with items that have parts satisfying given regular
|
|
|
|
expression
|
2010-12-27 02:50:16 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-27 02:50:16 +03:00
|
|
|
if regex:
|
|
|
|
retVal = []
|
2011-01-07 19:50:39 +03:00
|
|
|
filt = getCompiledRegex(regex, re.I)
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-27 02:50:16 +03:00
|
|
|
for word in value:
|
2011-01-07 19:50:39 +03:00
|
|
|
if filt.search(word):
|
2010-12-27 02:50:16 +03:00
|
|
|
retVal.append(word)
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-27 02:50:16 +03:00
|
|
|
return retVal
|
|
|
|
else:
|
|
|
|
return value
|
2010-12-28 17:40:34 +03:00
|
|
|
|
|
|
|
def unicodeToSafeHTMLValue(value):
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns HTML representation of unicode string value safe for sending
|
|
|
|
over HTTP(s)
|
2010-12-28 17:40:34 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-28 17:40:34 +03:00
|
|
|
retVal = value
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-28 17:40:34 +03:00
|
|
|
if value:
|
|
|
|
for char in value:
|
|
|
|
if ord(char) > 127:
|
|
|
|
retVal = retVal.replace(char, "&#%d;" % ord(char))
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-28 17:40:34 +03:00
|
|
|
return retVal
|
2011-01-02 02:57:27 +03:00
|
|
|
|
2011-01-02 19:51:21 +03:00
|
|
|
def getErrorParsedDBMSes():
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns array with parsed DBMS names till now
|
2011-01-13 15:05:14 +03:00
|
|
|
|
|
|
|
This functions is called to:
|
|
|
|
|
|
|
|
1. Sort the tests, getInjectionTests() - detection phase.
|
|
|
|
2. Ask user whether or not skip specific DBMS tests in detection phase,
|
|
|
|
lib/controller/checks.py - detection phase.
|
|
|
|
3. Sort the fingerprint of the DBMS, lib/controller/handler.py -
|
|
|
|
fingerprint phase.
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-02 19:51:21 +03:00
|
|
|
return kb.htmlFp
|
2011-01-02 10:37:47 +03:00
|
|
|
|
2011-01-13 20:36:54 +03:00
|
|
|
def getIdentifiedDBMS():
|
|
|
|
dbms = None
|
|
|
|
|
|
|
|
if kb.dbms is not None:
|
|
|
|
dbms = kb.dbms
|
|
|
|
elif conf.dbms is not None:
|
|
|
|
dbms = conf.dbms
|
|
|
|
elif getErrorParsedDBMSes() is not None:
|
|
|
|
dbms = getErrorParsedDBMSes()[0]
|
|
|
|
|
|
|
|
return aliasToDbmsEnum(dbms)
|
|
|
|
|
2011-01-02 10:37:47 +03:00
|
|
|
def showHttpErrorCodes():
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Shows all HTTP error codes raised till now
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-02 10:37:47 +03:00
|
|
|
if kb.httpErrorCodes:
|
|
|
|
warnMsg = "HTTP error codes detected during testing:\n"
|
2011-01-07 18:41:09 +03:00
|
|
|
warnMsg += ", ".join("%d (%s) - %d times" % (code, httplib.responses[code] \
|
|
|
|
if code in httplib.responses else '?', count) \
|
2011-01-02 10:37:47 +03:00
|
|
|
for code, count in kb.httpErrorCodes.items())
|
|
|
|
logger.warn(warnMsg)
|
2011-01-03 11:32:06 +03:00
|
|
|
|
2011-01-03 11:46:20 +03:00
|
|
|
def getComparePageRatio(firstPage, secondPage, filtered=False):
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns comparison ratio between two given pages
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-03 11:46:20 +03:00
|
|
|
if filtered:
|
|
|
|
firstPage = getFilteredPageContent(firstPage)
|
|
|
|
secondPage = getFilteredPageContent(secondPage)
|
|
|
|
|
2011-01-03 11:32:06 +03:00
|
|
|
conf.seqMatcher.set_seq1(firstPage)
|
|
|
|
conf.seqMatcher.set_seq2(secondPage)
|
2011-01-03 11:46:20 +03:00
|
|
|
|
2011-01-03 11:32:06 +03:00
|
|
|
return conf.seqMatcher.quick_ratio()
|
2011-01-08 12:30:10 +03:00
|
|
|
|
|
|
|
def openFile(filename, mode='r'):
|
|
|
|
"""
|
|
|
|
Returns file handle of a given filename
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
return codecs.open(filename, mode, conf.dataEncoding)
|
|
|
|
except IOError, e:
|
|
|
|
errMsg = "there has been a file opening error for filename '%s'. " % filename
|
|
|
|
errMsg += "Please check %s permissions on a file " % ("write" if mode and\
|
|
|
|
('w' in mode or 'a' in mode or '+' in mode) else "read")
|
|
|
|
errMsg += "and that it's not locked by another process."
|
|
|
|
raise sqlmapFilePathException, errMsg
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-12 01:56:21 +03:00
|
|
|
def __configUnionChar(char):
|
|
|
|
if char.isdigit() or char == "NULL":
|
|
|
|
conf.uChar = char
|
|
|
|
elif not char.startswith("'") or not char.endswith("'"):
|
|
|
|
conf.uChar = "'%s'" % char
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-12 01:56:21 +03:00
|
|
|
def __configUnionCols(columns):
|
|
|
|
if "-" not in columns or len(columns.split("-")) != 2:
|
|
|
|
raise sqlmapSyntaxException, "--union-cols must be a range with hyphon (e.g. 1-10)"
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-12 01:56:21 +03:00
|
|
|
columns = columns.replace(" ", "")
|
|
|
|
conf.uColsStart, conf.uColsStop = columns.split("-")
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-12 01:56:21 +03:00
|
|
|
if not conf.uColsStart.isdigit() or not conf.uColsStop.isdigit():
|
|
|
|
raise sqlmapSyntaxException, "--union-cols must be a range of integers"
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-12 01:56:21 +03:00
|
|
|
conf.uColsStart = int(conf.uColsStart)
|
|
|
|
conf.uColsStop = int(conf.uColsStop)
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-12 01:56:21 +03:00
|
|
|
if conf.uColsStart > conf.uColsStop:
|
|
|
|
errMsg = "--union-cols range has to be from lower to "
|
|
|
|
errMsg += "higher number of columns"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-12 01:56:21 +03:00
|
|
|
def configUnion(char, columns):
|
|
|
|
if isinstance(conf.uChar, basestring):
|
|
|
|
__configUnionChar(conf.uChar)
|
|
|
|
elif isinstance(char, basestring):
|
|
|
|
__configUnionChar(char)
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-12 01:56:21 +03:00
|
|
|
if isinstance(conf.uCols, basestring):
|
|
|
|
__configUnionCols(conf.uCols)
|
|
|
|
elif isinstance(columns, basestring):
|
|
|
|
__configUnionCols(columns)
|