2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
This file is part of the sqlmap project, http://sqlmap.sourceforge.net.
|
|
|
|
|
2010-03-03 18:26:27 +03:00
|
|
|
Copyright (c) 2007-2010 Bernardo Damele A. G. <bernardo.damele@gmail.com>
|
2009-04-22 15:48:07 +04:00
|
|
|
Copyright (c) 2006 Daniele Bellucci <daniele.bellucci@gmail.com>
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
sqlmap is free software; you can redistribute it and/or modify it under
|
|
|
|
the terms of the GNU General Public License as published by the Free
|
|
|
|
Software Foundation version 2 of the License.
|
|
|
|
|
|
|
|
sqlmap is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
|
|
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
|
|
|
details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License along
|
|
|
|
with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
|
|
|
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
|
|
|
"""
|
|
|
|
|
2010-05-21 16:09:31 +04:00
|
|
|
import cProfile
|
2008-10-15 19:38:22 +04:00
|
|
|
import os
|
|
|
|
import random
|
|
|
|
import re
|
2009-04-22 15:48:07 +04:00
|
|
|
import socket
|
2008-10-15 19:38:22 +04:00
|
|
|
import string
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import urlparse
|
2010-01-05 14:30:33 +03:00
|
|
|
import ntpath
|
|
|
|
import posixpath
|
2010-04-16 17:40:02 +04:00
|
|
|
import subprocess
|
2010-01-28 20:07:34 +03:00
|
|
|
|
2010-04-22 20:13:22 +04:00
|
|
|
from StringIO import StringIO
|
2010-01-28 19:50:34 +03:00
|
|
|
from tempfile import NamedTemporaryFile
|
2010-01-29 13:12:09 +03:00
|
|
|
from tempfile import mkstemp
|
2010-04-22 20:13:22 +04:00
|
|
|
from xml.sax import parse
|
2010-01-24 02:29:34 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
from extra.cloak.cloak import decloak
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.contrib import magic
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.data import paths
|
|
|
|
from lib.core.data import queries
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import temp
|
2010-01-15 14:44:05 +03:00
|
|
|
from lib.core.convert import urlencode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapFilePathException
|
2010-01-15 19:06:59 +03:00
|
|
|
from lib.core.exception import sqlmapNoneDataException
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.exception import sqlmapMissingDependence
|
2010-02-10 15:06:23 +03:00
|
|
|
from lib.core.exception import sqlmapSyntaxException
|
2010-03-03 19:19:17 +03:00
|
|
|
from lib.core.settings import DESCRIPTION
|
2009-06-11 19:01:48 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2010-05-21 16:09:31 +04:00
|
|
|
from lib.core.settings import PLATFORM
|
2010-02-25 20:37:46 +03:00
|
|
|
from lib.core.settings import SITE
|
2008-12-19 23:09:46 +03:00
|
|
|
from lib.core.settings import SQL_STATEMENTS
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.settings import SUPPORTED_DBMS
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.settings import VERSION_STRING
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.settings import MSSQL_ALIASES
|
|
|
|
from lib.core.settings import MYSQL_ALIASES
|
|
|
|
from lib.core.settings import PGSQL_ALIASES
|
|
|
|
from lib.core.settings import ORACLE_ALIASES
|
|
|
|
from lib.core.settings import SQLITE_ALIASES
|
|
|
|
from lib.core.settings import ACCESS_ALIASES
|
|
|
|
from lib.core.settings import FIREBIRD_ALIASES
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def paramToDict(place, parameters=None):
|
|
|
|
"""
|
|
|
|
Split the parameters into names and values, check if these parameters
|
|
|
|
are within the testable parameters and return in a dictionary.
|
|
|
|
|
|
|
|
@param place: where sqlmap has to work, can be GET, POST or Cookie.
|
|
|
|
@type place: C{str}
|
|
|
|
|
|
|
|
@param parameters: parameters string in the format for instance
|
|
|
|
'p1=v1&p2=v2' (GET and POST) or 'p1=v1;p2=v2' (Cookie).
|
|
|
|
@type parameters: C{str}
|
|
|
|
|
|
|
|
@return: the parameters in a dictionary.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
testableParameters = {}
|
|
|
|
|
|
|
|
if conf.parameters.has_key(place) and not parameters:
|
|
|
|
parameters = conf.parameters[place]
|
|
|
|
|
|
|
|
parameters = parameters.replace(", ", ",")
|
|
|
|
|
|
|
|
if place == "Cookie":
|
|
|
|
splitParams = parameters.split(";")
|
|
|
|
else:
|
|
|
|
splitParams = parameters.split("&")
|
|
|
|
|
|
|
|
for element in splitParams:
|
|
|
|
elem = element.split("=")
|
|
|
|
|
|
|
|
if len(elem) == 2:
|
2008-11-28 01:33:33 +03:00
|
|
|
parameter = elem[0].replace(" ", "")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
condition = not conf.testParameter
|
|
|
|
condition |= parameter in conf.testParameter
|
|
|
|
|
|
|
|
if condition:
|
|
|
|
value = elem[1]
|
2009-07-09 15:25:35 +04:00
|
|
|
testableParameters[parameter] = value
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.testParameter and not testableParameters:
|
|
|
|
paramStr = ", ".join(test for test in conf.testParameter)
|
|
|
|
|
|
|
|
if len(conf.testParameter) > 1:
|
|
|
|
warnMsg = "the testable parameters '%s' " % paramStr
|
|
|
|
warnMsg += "you provided are not into the %s" % place
|
|
|
|
else:
|
|
|
|
parameter = conf.testParameter[0]
|
|
|
|
|
|
|
|
warnMsg = "the testable parameter '%s' " % paramStr
|
|
|
|
warnMsg += "you provided is not into the %s" % place
|
|
|
|
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
elif len(conf.testParameter) != len(testableParameters.keys()):
|
|
|
|
for parameter in conf.testParameter:
|
|
|
|
if not testableParameters.has_key(parameter):
|
2008-11-02 17:39:38 +03:00
|
|
|
warnMsg = "the testable parameter '%s' " % parameter
|
2008-10-15 19:38:22 +04:00
|
|
|
warnMsg += "you provided is not into the %s" % place
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
return testableParameters
|
|
|
|
|
2008-11-16 02:41:31 +03:00
|
|
|
def formatDBMSfp(versions=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function format the back-end DBMS fingerprint value and return its
|
|
|
|
values formatted as a human readable string.
|
|
|
|
|
|
|
|
@return: detected back-end DBMS based upon fingerprint techniques.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2010-04-09 14:40:08 +04:00
|
|
|
if ( not versions or versions == [None] ) and kb.dbmsVersion and kb.dbmsVersion[0] != "Unknown":
|
2008-10-15 19:38:22 +04:00
|
|
|
versions = kb.dbmsVersion
|
|
|
|
|
|
|
|
if isinstance(versions, str):
|
|
|
|
return "%s %s" % (kb.dbms, versions)
|
|
|
|
elif isinstance(versions, (list, set, tuple)):
|
|
|
|
return "%s %s" % (kb.dbms, " and ".join([version for version in versions]))
|
2008-12-02 02:27:51 +03:00
|
|
|
elif not versions:
|
|
|
|
warnMsg = "unable to extensively fingerprint the back-end "
|
|
|
|
warnMsg += "DBMS version"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
return kb.dbms
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def formatFingerprintString(values, chain=" or "):
|
2009-04-28 03:05:11 +04:00
|
|
|
strJoin = "|".join([v for v in values])
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return strJoin.replace("|", chain)
|
2008-11-17 20:41:02 +03:00
|
|
|
|
2008-11-18 20:42:46 +03:00
|
|
|
def formatFingerprint(target, info):
|
2008-11-16 02:41:31 +03:00
|
|
|
"""
|
|
|
|
This function format the back-end operating system fingerprint value
|
|
|
|
and return its values formatted as a human readable string.
|
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
Example of info (kb.headersFp) dictionary:
|
2008-11-18 20:42:46 +03:00
|
|
|
|
|
|
|
{
|
2008-11-19 03:36:44 +03:00
|
|
|
'distrib': set(['Ubuntu']),
|
|
|
|
'type': set(['Linux']),
|
|
|
|
'technology': set(['PHP 5.2.6', 'Apache 2.2.9']),
|
|
|
|
'release': set(['8.10'])
|
2008-11-18 20:42:46 +03:00
|
|
|
}
|
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
Example of info (kb.bannerFp) dictionary:
|
|
|
|
|
2008-11-18 20:42:46 +03:00
|
|
|
{
|
2008-11-19 03:36:44 +03:00
|
|
|
'sp': set(['Service Pack 4']),
|
|
|
|
'dbmsVersion': '8.00.194',
|
|
|
|
'dbmsServicePack': '0',
|
|
|
|
'distrib': set(['2000']),
|
|
|
|
'dbmsRelease': '2000',
|
|
|
|
'type': set(['Windows'])
|
2008-11-18 20:42:46 +03:00
|
|
|
}
|
|
|
|
|
2008-11-16 02:41:31 +03:00
|
|
|
@return: detected back-end operating system based upon fingerprint
|
|
|
|
techniques.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
infoStr = ""
|
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if info and "type" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += "%s operating system: %s" % (target, formatFingerprintString(info["type"]))
|
2008-11-18 20:42:46 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "distrib" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " %s" % formatFingerprintString(info["distrib"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "release" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " %s" % formatFingerprintString(info["release"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "sp" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " %s" % formatFingerprintString(info["sp"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-19 03:36:44 +03:00
|
|
|
if "codename" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += " (%s)" % formatFingerprintString(info["codename"])
|
2008-11-16 02:41:31 +03:00
|
|
|
|
2008-11-18 20:42:46 +03:00
|
|
|
if "technology" in info:
|
2009-04-22 15:48:07 +04:00
|
|
|
infoStr += "\nweb application technology: %s" % formatFingerprintString(info["technology"], ", ")
|
2008-11-16 02:41:31 +03:00
|
|
|
|
|
|
|
return infoStr
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def getHtmlErrorFp():
|
|
|
|
"""
|
|
|
|
This function parses the knowledge base htmlFp list and return its
|
|
|
|
values formatted as a human readable string.
|
|
|
|
|
|
|
|
@return: list of possible back-end DBMS based upon error messages
|
|
|
|
parsing.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
htmlParsed = ""
|
|
|
|
|
|
|
|
if not kb.htmlFp:
|
|
|
|
return None
|
|
|
|
|
|
|
|
if len(kb.htmlFp) == 1:
|
|
|
|
htmlVer = kb.htmlFp[0]
|
|
|
|
htmlParsed = htmlVer
|
|
|
|
elif len(kb.htmlFp) > 1:
|
2008-11-17 03:00:54 +03:00
|
|
|
htmlParsed = " or ".join([htmlFp for htmlFp in kb.htmlFp])
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return htmlParsed
|
|
|
|
|
2010-02-25 18:22:41 +03:00
|
|
|
def getDocRoot(webApi=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
docRoot = None
|
2010-01-05 14:30:33 +03:00
|
|
|
pagePath = directoryPath(conf.path)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if kb.os == "Windows":
|
2010-02-25 18:22:41 +03:00
|
|
|
if webApi == "php":
|
|
|
|
defaultDocRoot = "C:/xampp/htdocs/"
|
|
|
|
else:
|
|
|
|
defaultDocRoot = "C:/Inetpub/wwwroot/"
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2009-04-28 03:05:11 +04:00
|
|
|
defaultDocRoot = "/var/www/"
|
|
|
|
|
|
|
|
if kb.absFilePaths:
|
|
|
|
for absFilePath in kb.absFilePaths:
|
2010-02-09 17:27:41 +03:00
|
|
|
if directoryPath(absFilePath) == '/':
|
|
|
|
continue
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
absFilePath = normalizePath(absFilePath)
|
2009-04-28 03:05:11 +04:00
|
|
|
absFilePathWin = None
|
|
|
|
|
2010-02-04 12:49:31 +03:00
|
|
|
if isWindowsPath(absFilePath):
|
2010-02-04 17:37:00 +03:00
|
|
|
absFilePathWin = posixToNtSlashes(absFilePath)
|
|
|
|
absFilePath = ntToPosixSlashes(absFilePath[2:])
|
2010-04-23 20:34:20 +04:00
|
|
|
elif isWindowsDriveLetterPath(absFilePath): # E.g. C:/xampp/htdocs
|
2010-04-22 14:31:33 +04:00
|
|
|
absFilePath = absFilePath[2:]
|
|
|
|
|
2009-04-28 15:05:07 +04:00
|
|
|
if pagePath in absFilePath:
|
|
|
|
index = absFilePath.index(pagePath)
|
2009-04-28 03:05:11 +04:00
|
|
|
docRoot = absFilePath[:index]
|
|
|
|
|
2010-02-25 19:38:39 +03:00
|
|
|
if len(docRoot) == 0:
|
|
|
|
docRoot = None
|
|
|
|
continue
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if absFilePathWin:
|
2010-02-04 17:37:00 +03:00
|
|
|
docRoot = "C:/%s" % ntToPosixSlashes(docRoot)
|
2010-04-22 14:31:33 +04:00
|
|
|
|
2010-02-03 19:40:12 +03:00
|
|
|
docRoot = normalizePath(docRoot)
|
2009-04-28 03:05:11 +04:00
|
|
|
break
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if docRoot:
|
2009-04-28 03:05:11 +04:00
|
|
|
infoMsg = "retrieved the web server document root: '%s'" % docRoot
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2009-04-28 03:05:11 +04:00
|
|
|
warnMsg = "unable to retrieve the web server document root"
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
message = "please provide the web server document root "
|
|
|
|
message += "[%s]: " % defaultDocRoot
|
|
|
|
inputDocRoot = readInput(message, default=defaultDocRoot)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if inputDocRoot:
|
|
|
|
docRoot = inputDocRoot
|
|
|
|
else:
|
|
|
|
docRoot = defaultDocRoot
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return docRoot
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-02-25 18:22:41 +03:00
|
|
|
def getDirs(webApi=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
directories = set()
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if kb.os == "Windows":
|
2010-02-25 18:22:41 +03:00
|
|
|
if webApi == "php":
|
|
|
|
defaultDirs = ["C:/xampp/htdocs/"]
|
|
|
|
else:
|
|
|
|
defaultDirs = ["C:/Inetpub/wwwroot/"]
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
2010-02-25 17:51:39 +03:00
|
|
|
defaultDirs = ["/var/www/"]
|
2009-04-28 03:05:11 +04:00
|
|
|
|
|
|
|
if kb.absFilePaths:
|
|
|
|
infoMsg = "retrieved web server full paths: "
|
|
|
|
infoMsg += "'%s'" % ", ".join(path for path in kb.absFilePaths)
|
|
|
|
logger.info(infoMsg)
|
2010-02-09 17:27:41 +03:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for absFilePath in kb.absFilePaths:
|
2009-07-09 15:11:25 +04:00
|
|
|
if absFilePath:
|
2010-02-04 12:49:31 +03:00
|
|
|
directory = directoryPath(absFilePath)
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2010-02-04 12:49:31 +03:00
|
|
|
if isWindowsPath(directory):
|
2010-04-22 20:35:22 +04:00
|
|
|
directory = ntToPosixSlashes(directory)
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2010-02-09 17:27:41 +03:00
|
|
|
if directory == '/':
|
|
|
|
continue
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2010-02-04 12:49:31 +03:00
|
|
|
directories.add(directory)
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
|
|
|
warnMsg = "unable to retrieve any web server path"
|
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
message = "please provide any additional web server full path to try "
|
2010-02-25 18:16:41 +03:00
|
|
|
message += "to upload the agent [%s]: " % ",".join(directory for directory in defaultDirs)
|
|
|
|
inputDirs = readInput(message, default=",".join(directory for directory in defaultDirs))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if inputDirs:
|
|
|
|
inputDirs = inputDirs.replace(", ", ",")
|
|
|
|
inputDirs = inputDirs.split(",")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for inputDir in inputDirs:
|
2009-07-09 15:11:25 +04:00
|
|
|
if inputDir:
|
|
|
|
directories.add(inputDir)
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
2010-02-25 17:51:39 +03:00
|
|
|
[directories.add(directory) for directory in defaultDirs]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return directories
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def filePathToString(filePath):
|
2009-04-28 03:05:11 +04:00
|
|
|
strRepl = filePath.replace("/", "_").replace("\\", "_")
|
|
|
|
strRepl = strRepl.replace(" ", "_").replace(":", "_")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return strRepl
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def dataToStdout(data):
|
|
|
|
sys.stdout.write(data)
|
|
|
|
sys.stdout.flush()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def dataToSessionFile(data):
|
2008-10-16 19:31:02 +04:00
|
|
|
if not conf.sessionFile:
|
|
|
|
return
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.sessionFP.write(data)
|
|
|
|
conf.sessionFP.flush()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def dataToDumpFile(dumpFile, data):
|
|
|
|
dumpFile.write(data)
|
|
|
|
dumpFile.flush()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def dataToOutFile(data):
|
|
|
|
if not data:
|
|
|
|
return "No data retrieved"
|
|
|
|
|
|
|
|
rFile = filePathToString(conf.rFile)
|
|
|
|
rFilePath = "%s%s%s" % (conf.filePath, os.sep, rFile)
|
|
|
|
rFileFP = open(rFilePath, "wb")
|
|
|
|
|
|
|
|
rFileFP.write(data)
|
|
|
|
rFileFP.flush()
|
|
|
|
rFileFP.close()
|
|
|
|
|
|
|
|
return rFilePath
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
def strToHex(inpStr):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2009-04-28 03:05:11 +04:00
|
|
|
@param inpStr: inpStr to be converted into its hexadecimal value.
|
|
|
|
@type inpStr: C{str}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
@return: the hexadecimal converted inpStr.
|
2008-10-15 19:38:22 +04:00
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
hexStr = ""
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for character in inpStr:
|
2008-10-15 19:38:22 +04:00
|
|
|
if character == "\n":
|
|
|
|
character = " "
|
|
|
|
|
|
|
|
hexChar = "%2x" % ord(character)
|
|
|
|
hexChar = hexChar.replace(" ", "0")
|
|
|
|
hexChar = hexChar.upper()
|
|
|
|
|
|
|
|
hexStr += hexChar
|
|
|
|
|
|
|
|
return hexStr
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def fileToStr(fileName):
|
|
|
|
"""
|
|
|
|
@param fileName: file path to read the content and return as a no
|
|
|
|
NEWLINE string.
|
|
|
|
@type fileName: C{file.open}
|
|
|
|
|
|
|
|
@return: the file content as a string without TAB and NEWLINE.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
filePointer = open(fileName, "r")
|
|
|
|
fileText = filePointer.read()
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
return fileText.replace(" ", "").replace("\t", "").replace("\r", "").replace("\n", " ")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def fileToHex(fileName):
|
|
|
|
"""
|
|
|
|
@param fileName: file path to read the content and return as an
|
|
|
|
hexadecimal string.
|
|
|
|
@type fileName: C{file.open}
|
|
|
|
|
|
|
|
@return: the file content as an hexadecimal string.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
fileText = fileToStr(fileName)
|
|
|
|
hexFile = strToHex(fileText)
|
|
|
|
|
|
|
|
return hexFile
|
|
|
|
|
|
|
|
def readInput(message, default=None):
|
|
|
|
"""
|
|
|
|
@param message: message to display on terminal.
|
|
|
|
@type message: C{str}
|
|
|
|
|
|
|
|
@return: a string read from keyboard as input.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if "\n" in message:
|
|
|
|
message += "\n> "
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.batch and default:
|
|
|
|
infoMsg = "%s%s" % (message, str(default))
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
debugMsg = "used the default behaviour, running in batch mode"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
data = default
|
|
|
|
else:
|
2009-04-22 15:48:07 +04:00
|
|
|
data = raw_input(message)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-09-26 03:03:45 +04:00
|
|
|
if not data:
|
|
|
|
data = default
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return data
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def randomRange(start=0, stop=1000):
|
|
|
|
"""
|
|
|
|
@param start: starting number.
|
|
|
|
@type start: C{int}
|
|
|
|
|
|
|
|
@param stop: last number.
|
|
|
|
@type stop: C{int}
|
|
|
|
|
|
|
|
@return: a random number within the range.
|
|
|
|
@rtype: C{int}
|
|
|
|
"""
|
|
|
|
|
|
|
|
return int(random.randint(start, stop))
|
|
|
|
|
|
|
|
def randomInt(length=4):
|
|
|
|
"""
|
|
|
|
@param length: length of the random string.
|
|
|
|
@type length: C{int}
|
|
|
|
|
|
|
|
@return: a random string of digits.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
return int("".join([random.choice(string.digits) for _ in xrange(0, length)]))
|
|
|
|
|
2010-02-26 16:13:50 +03:00
|
|
|
def randomStr(length=4, lowercase=False):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
@param length: length of the random string.
|
|
|
|
@type length: C{int}
|
|
|
|
|
|
|
|
@return: a random string of characters.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if lowercase:
|
2009-04-22 15:48:07 +04:00
|
|
|
rndStr = "".join([random.choice(string.lowercase) for _ in xrange(0, length)])
|
|
|
|
else:
|
|
|
|
rndStr = "".join([random.choice(string.letters) for _ in xrange(0, length)])
|
|
|
|
|
|
|
|
return rndStr
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
def sanitizeStr(inpStr):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2009-04-28 03:05:11 +04:00
|
|
|
@param inpStr: inpStr to sanitize: cast to str datatype and replace
|
2008-10-15 19:38:22 +04:00
|
|
|
newlines with one space and strip carriage returns.
|
2009-04-28 03:05:11 +04:00
|
|
|
@type inpStr: C{str}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
@return: sanitized inpStr
|
2008-10-15 19:38:22 +04:00
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
cleanString = str(inpStr)
|
2008-10-15 19:38:22 +04:00
|
|
|
cleanString = cleanString.replace("\n", " ").replace("\r", "")
|
|
|
|
|
|
|
|
return cleanString
|
|
|
|
|
|
|
|
def checkFile(filename):
|
|
|
|
"""
|
|
|
|
@param filename: filename to check if it exists.
|
|
|
|
@type filename: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
raise sqlmapFilePathException, "unable to read file '%s'" % filename
|
2010-03-22 20:38:19 +03:00
|
|
|
|
|
|
|
def replaceNewlineTabs(inpStr, stdout=False):
|
|
|
|
if stdout:
|
|
|
|
replacedString = inpStr.replace("\n", " ").replace("\t", " ")
|
|
|
|
else:
|
|
|
|
replacedString = inpStr.replace("\n", "__NEWLINE__").replace("\t", "__TAB__")
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
replacedString = replacedString.replace(temp.delimiter, "__DEL__")
|
|
|
|
|
|
|
|
return replacedString
|
|
|
|
|
|
|
|
def banner():
|
|
|
|
"""
|
|
|
|
This function prints sqlmap banner with its version
|
|
|
|
"""
|
|
|
|
|
|
|
|
print """
|
2010-03-03 19:19:17 +03:00
|
|
|
%s - %s
|
2009-04-22 15:48:07 +04:00
|
|
|
%s
|
2010-03-03 19:19:17 +03:00
|
|
|
""" % (VERSION_STRING, DESCRIPTION, SITE)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def parsePasswordHash(password):
|
|
|
|
blank = " " * 8
|
|
|
|
|
|
|
|
if not password or password == " ":
|
|
|
|
password = "NULL"
|
|
|
|
|
|
|
|
if kb.dbms == "Microsoft SQL Server" and password != "NULL":
|
|
|
|
hexPassword = password
|
|
|
|
password = "%s\n" % hexPassword
|
|
|
|
password += "%sheader: %s\n" % (blank, hexPassword[:6])
|
|
|
|
password += "%ssalt: %s\n" % (blank, hexPassword[6:14])
|
|
|
|
password += "%smixedcase: %s\n" % (blank, hexPassword[14:54])
|
|
|
|
|
|
|
|
if kb.dbmsVersion[0] not in ( "2005", "2008" ):
|
|
|
|
password += "%suppercase: %s" % (blank, hexPassword[54:])
|
|
|
|
|
|
|
|
return password
|
2010-01-05 19:15:31 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def cleanQuery(query):
|
2008-12-19 23:09:46 +03:00
|
|
|
upperQuery = query
|
|
|
|
|
|
|
|
for sqlStatements in SQL_STATEMENTS.values():
|
|
|
|
for sqlStatement in sqlStatements:
|
2009-01-28 17:53:11 +03:00
|
|
|
sqlStatementEsc = sqlStatement.replace("(", "\\(")
|
|
|
|
queryMatch = re.search("(%s)" % sqlStatementEsc, query, re.I)
|
|
|
|
|
|
|
|
if queryMatch:
|
|
|
|
upperQuery = upperQuery.replace(queryMatch.group(1), sqlStatement.upper())
|
2008-12-18 23:38:57 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return upperQuery
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def setPaths():
|
|
|
|
# sqlmap paths
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.SQLMAP_CONTRIB_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "lib", "contrib")
|
2010-05-21 14:30:11 +04:00
|
|
|
paths.SQLMAP_EXTRAS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "extra")
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.SQLMAP_SHELL_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "shell")
|
|
|
|
paths.SQLMAP_TXT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "txt")
|
|
|
|
paths.SQLMAP_UDF_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "udf")
|
|
|
|
paths.SQLMAP_XML_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "xml")
|
|
|
|
paths.SQLMAP_XML_BANNER_PATH = os.path.join(paths.SQLMAP_XML_PATH, "banner")
|
|
|
|
paths.SQLMAP_OUTPUT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "output")
|
|
|
|
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
|
|
|
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# sqlmap files
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.SQLMAP_HISTORY = os.path.join(paths.SQLMAP_ROOT_PATH, ".sqlmap_history")
|
|
|
|
paths.SQLMAP_CONFIG = os.path.join(paths.SQLMAP_ROOT_PATH, "sqlmap-%s.conf" % randomStr())
|
|
|
|
paths.FUZZ_VECTORS = os.path.join(paths.SQLMAP_TXT_PATH, "fuzz_vectors.txt")
|
2010-01-28 03:25:36 +03:00
|
|
|
paths.DETECTION_RULES_XML = os.path.join(paths.SQLMAP_XML_PATH, "detection.xml")
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.ERRORS_XML = os.path.join(paths.SQLMAP_XML_PATH, "errors.xml")
|
|
|
|
paths.QUERIES_XML = os.path.join(paths.SQLMAP_XML_PATH, "queries.xml")
|
|
|
|
paths.GENERIC_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "generic.xml")
|
|
|
|
paths.MSSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mssql.xml")
|
|
|
|
paths.MYSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mysql.xml")
|
|
|
|
paths.ORACLE_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "oracle.xml")
|
|
|
|
paths.PGSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "postgresql.xml")
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def weAreFrozen():
|
|
|
|
"""
|
|
|
|
Returns whether we are frozen via py2exe.
|
|
|
|
This will affect how we find out where we are located.
|
|
|
|
Reference: http://www.py2exe.org/index.cgi/WhereAmI
|
|
|
|
"""
|
|
|
|
|
|
|
|
return hasattr(sys, "frozen")
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
def parseTargetDirect():
|
|
|
|
"""
|
|
|
|
Parse target dbms and set some attributes into the configuration singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.direct:
|
|
|
|
return
|
|
|
|
|
|
|
|
details = None
|
2010-04-13 15:00:15 +04:00
|
|
|
remote = False
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
for dbms in SUPPORTED_DBMS:
|
2010-04-29 17:34:03 +04:00
|
|
|
details = re.search("^(?P<dbms>%s)://(?P<credentials>(?P<user>.+?)\:(?P<pass>.*?)\@)?(?P<remote>(?P<hostname>.+?)\:(?P<port>[\d]+)\/)?(?P<db>[\w\d\.\_\-\/]+?)$" % dbms, conf.direct, re.I)
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
if details:
|
2010-03-31 14:50:47 +04:00
|
|
|
conf.dbms = details.group('dbms')
|
|
|
|
|
2010-03-30 15:21:26 +04:00
|
|
|
if details.group('credentials'):
|
2010-03-31 14:50:47 +04:00
|
|
|
conf.dbmsUser = details.group('user')
|
|
|
|
conf.dbmsPass = details.group('pass')
|
2010-03-30 15:06:30 +04:00
|
|
|
else:
|
|
|
|
conf.dbmsUser = str()
|
|
|
|
conf.dbmsPass = str()
|
2010-03-31 14:50:47 +04:00
|
|
|
|
2010-04-29 17:34:03 +04:00
|
|
|
if not conf.dbmsPass:
|
|
|
|
conf.dbmsPass = None
|
|
|
|
|
2010-03-30 15:21:26 +04:00
|
|
|
if details.group('remote'):
|
2010-04-13 15:00:15 +04:00
|
|
|
remote = True
|
2010-03-30 15:21:26 +04:00
|
|
|
conf.hostname = details.group('hostname')
|
|
|
|
conf.port = int(details.group('port'))
|
|
|
|
else:
|
2010-03-30 15:06:30 +04:00
|
|
|
conf.hostname = "localhost"
|
2010-03-31 14:50:47 +04:00
|
|
|
conf.port = 0
|
|
|
|
|
|
|
|
conf.dbmsDb = details.group('db')
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
conf.parameters[None] = "direct connection"
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
if not details:
|
2010-03-31 14:50:47 +04:00
|
|
|
errMsg = "invalid target details, valid syntax is for instance "
|
|
|
|
errMsg += "'mysql://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_NAME' "
|
|
|
|
errMsg += "or 'access://DATABASE_FILEPATH'"
|
2010-03-27 02:23:25 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
dbmsDict = { "Microsoft SQL Server": [MSSQL_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/"],
|
|
|
|
"MySQL": [MYSQL_ALIASES, "python-mysqldb", "http://mysql-python.sourceforge.net/"],
|
|
|
|
"PostgreSQL": [PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/"],
|
2010-03-28 00:50:19 +03:00
|
|
|
"Oracle": [ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/"],
|
2010-04-14 02:43:38 +04:00
|
|
|
"SQLite": [SQLITE_ALIASES, "python-pysqlite2 and python-sqlite", "http://pysqlite.googlecode.com/"],
|
2010-03-31 14:50:47 +04:00
|
|
|
"Access": [ACCESS_ALIASES, "python-pyodbc", "http://pyodbc.googlecode.com/"],
|
|
|
|
"Firebird": [FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/"] }
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
for dbmsName, data in dbmsDict.items():
|
|
|
|
if conf.dbms in data[0]:
|
|
|
|
try:
|
2010-04-13 15:00:15 +04:00
|
|
|
if dbmsName in ('Access', 'SQLite'):
|
|
|
|
if remote:
|
2010-04-13 15:13:01 +04:00
|
|
|
warnMsg = "direct connection over the network for "
|
|
|
|
warnMsg += "%s DBMS is not supported" % dbmsName
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.hostname = "localhost"
|
|
|
|
conf.port = 0
|
|
|
|
elif not remote:
|
2010-04-13 15:00:15 +04:00
|
|
|
errMsg = "missing remote connection details"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if dbmsName == "Microsoft SQL Server":
|
2010-03-31 14:50:47 +04:00
|
|
|
import _mssql
|
2010-03-27 02:23:25 +03:00
|
|
|
import pymssql
|
2010-03-31 19:31:11 +04:00
|
|
|
|
|
|
|
if not hasattr(pymssql, "__version__") or pymssql.__version__ < "1.0.2":
|
|
|
|
errMsg = "pymssql library on your system must be "
|
|
|
|
errMsg += "version 1.0.2 to work, get it from "
|
|
|
|
errMsg += "http://sourceforge.net/projects/pymssql/files/pymssql/1.0.2/"
|
|
|
|
raise sqlmapMissingDependence, errMsg
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
elif dbmsName == "MySQL":
|
|
|
|
import MySQLdb
|
|
|
|
elif dbmsName == "PostgreSQL":
|
|
|
|
import psycopg2
|
2010-03-28 00:50:19 +03:00
|
|
|
elif dbmsName == "Oracle":
|
|
|
|
import cx_Oracle
|
2010-03-31 14:50:47 +04:00
|
|
|
elif dbmsName == "SQLite":
|
2010-04-14 02:43:38 +04:00
|
|
|
import sqlite
|
2010-03-31 14:50:47 +04:00
|
|
|
import sqlite3
|
|
|
|
elif dbmsName == "Access":
|
|
|
|
import pyodbc
|
|
|
|
elif dbmsName == "Firebird":
|
|
|
|
import kinterbasdb
|
2010-03-27 02:23:25 +03:00
|
|
|
except ImportError, _:
|
|
|
|
errMsg = "sqlmap requires %s third-party library " % data[1]
|
|
|
|
errMsg += "in order to directly connect to the database "
|
|
|
|
errMsg += "%s. Download from %s" % (dbmsName, data[2])
|
|
|
|
raise sqlmapMissingDependence, errMsg
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def parseTargetUrl():
|
|
|
|
"""
|
2010-03-27 02:23:25 +03:00
|
|
|
Parse target url and set some attributes into the configuration singleton.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.url:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not re.search("^http[s]*://", conf.url):
|
|
|
|
if ":443/" in conf.url:
|
|
|
|
conf.url = "https://" + conf.url
|
|
|
|
else:
|
|
|
|
conf.url = "http://" + conf.url
|
|
|
|
|
|
|
|
__urlSplit = urlparse.urlsplit(conf.url)
|
|
|
|
__hostnamePort = __urlSplit[1].split(":")
|
|
|
|
|
|
|
|
conf.scheme = __urlSplit[0]
|
|
|
|
conf.path = __urlSplit[2]
|
|
|
|
conf.hostname = __hostnamePort[0]
|
|
|
|
|
|
|
|
if len(__hostnamePort) == 2:
|
2010-02-10 15:06:23 +03:00
|
|
|
try:
|
|
|
|
conf.port = int(__hostnamePort[1])
|
|
|
|
except:
|
|
|
|
errMsg = "invalid target url"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
elif conf.scheme == "https":
|
|
|
|
conf.port = 443
|
|
|
|
else:
|
|
|
|
conf.port = 80
|
|
|
|
|
|
|
|
if __urlSplit[3]:
|
2010-01-02 05:02:12 +03:00
|
|
|
conf.parameters["GET"] = __urlSplit[3]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
conf.url = "%s://%s:%d%s" % (conf.scheme, conf.hostname, conf.port, conf.path)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def expandAsteriskForColumns(expression):
|
|
|
|
# If the user provided an asterisk rather than the column(s)
|
|
|
|
# name, sqlmap will retrieve the columns itself and reprocess
|
|
|
|
# the SQL query string (expression)
|
2008-11-13 01:53:25 +03:00
|
|
|
asterisk = re.search("^SELECT\s+\*\s+FROM\s+([\w\.\_]+)\s*", expression, re.I)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if asterisk:
|
|
|
|
infoMsg = "you did not provide the fields in your query. "
|
|
|
|
infoMsg += "sqlmap will retrieve the column names itself"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2008-11-13 01:53:25 +03:00
|
|
|
dbTbl = asterisk.group(1)
|
|
|
|
|
|
|
|
if dbTbl and "." in dbTbl:
|
|
|
|
conf.db, conf.tbl = dbTbl.split(".")
|
|
|
|
else:
|
|
|
|
conf.tbl = dbTbl
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
columnsDict = conf.dbmsHandler.getColumns(onlyColNames=True)
|
|
|
|
|
|
|
|
if columnsDict and conf.db in columnsDict and conf.tbl in columnsDict[conf.db]:
|
|
|
|
columns = columnsDict[conf.db][conf.tbl].keys()
|
|
|
|
columns.sort()
|
|
|
|
columnsStr = ", ".join([column for column in columns])
|
|
|
|
expression = expression.replace("*", columnsStr, 1)
|
|
|
|
|
|
|
|
infoMsg = "the query with column names is: "
|
|
|
|
infoMsg += "%s" % expression
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
return expression
|
2010-01-09 02:50:06 +03:00
|
|
|
|
2008-11-25 14:33:44 +03:00
|
|
|
def getRange(count, dump=False, plusOne=False):
|
2008-10-15 19:38:22 +04:00
|
|
|
count = int(count)
|
|
|
|
indexRange = None
|
|
|
|
limitStart = 1
|
|
|
|
limitStop = count
|
|
|
|
|
|
|
|
if dump:
|
2008-11-02 17:39:38 +03:00
|
|
|
if isinstance(conf.limitStop, int) and conf.limitStop > 0 and conf.limitStop < limitStop:
|
2008-10-15 19:38:22 +04:00
|
|
|
limitStop = conf.limitStop
|
|
|
|
|
2008-11-02 17:39:38 +03:00
|
|
|
if isinstance(conf.limitStart, int) and conf.limitStart > 0 and conf.limitStart <= limitStop:
|
2008-10-15 19:38:22 +04:00
|
|
|
limitStart = conf.limitStart
|
|
|
|
|
2010-01-09 02:50:06 +03:00
|
|
|
if plusOne:
|
2008-10-15 19:38:22 +04:00
|
|
|
indexRange = range(limitStart, limitStop + 1)
|
|
|
|
else:
|
|
|
|
indexRange = range(limitStart - 1, limitStop)
|
|
|
|
|
|
|
|
return indexRange
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def parseUnionPage(output, expression, partial=False, condition=None, sort=True):
|
2008-12-10 20:23:07 +03:00
|
|
|
data = []
|
|
|
|
|
|
|
|
outCond1 = ( output.startswith(temp.start) and output.endswith(temp.stop) )
|
|
|
|
outCond2 = ( output.startswith("__START__") and output.endswith("__STOP__") )
|
|
|
|
|
|
|
|
if outCond1 or outCond2:
|
|
|
|
if outCond1:
|
|
|
|
regExpr = '%s(.*?)%s' % (temp.start, temp.stop)
|
|
|
|
elif outCond2:
|
|
|
|
regExpr = '__START__(.*?)__STOP__'
|
|
|
|
|
|
|
|
output = re.findall(regExpr, output, re.S)
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if condition is None:
|
2008-12-10 20:23:07 +03:00
|
|
|
condition = (
|
|
|
|
kb.resumedQueries and conf.url in kb.resumedQueries.keys()
|
|
|
|
and expression in kb.resumedQueries[conf.url].keys()
|
|
|
|
)
|
|
|
|
|
|
|
|
if partial or not condition:
|
|
|
|
logOutput = "".join(["__START__%s__STOP__" % replaceNewlineTabs(value) for value in output])
|
|
|
|
dataToSessionFile("[%s][%s][%s][%s][%s]\n" % (conf.url, kb.injPlace, conf.parameters[kb.injPlace], expression, logOutput))
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if sort:
|
|
|
|
output = set(output)
|
2008-12-10 20:23:07 +03:00
|
|
|
|
|
|
|
for entry in output:
|
|
|
|
info = []
|
|
|
|
|
|
|
|
if "__DEL__" in entry:
|
|
|
|
entry = entry.split("__DEL__")
|
|
|
|
else:
|
|
|
|
entry = entry.split(temp.delimiter)
|
|
|
|
|
|
|
|
if len(entry) == 1:
|
|
|
|
data.append(entry[0])
|
|
|
|
else:
|
|
|
|
for value in entry:
|
|
|
|
info.append(value)
|
|
|
|
|
|
|
|
data.append(info)
|
|
|
|
else:
|
|
|
|
data = output
|
|
|
|
|
|
|
|
if len(data) == 1 and isinstance(data[0], str):
|
|
|
|
data = data[0]
|
|
|
|
|
|
|
|
return data
|
2010-01-05 14:43:16 +03:00
|
|
|
|
|
|
|
def getDelayQuery(andCond=False):
|
2009-04-22 15:48:07 +04:00
|
|
|
query = None
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if kb.dbms in ("MySQL", "PostgreSQL"):
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.banner:
|
|
|
|
conf.dbmsHandler.getVersionFromBanner()
|
|
|
|
|
|
|
|
banVer = kb.bannerFp["dbmsVersion"]
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if (kb.dbms == "MySQL" and banVer >= "5.0.12") or (kb.dbms == "PostgreSQL" and banVer >= "8.2"):
|
2009-04-22 15:48:07 +04:00
|
|
|
query = queries[kb.dbms].timedelay % conf.timeSec
|
2010-01-05 14:43:16 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
else:
|
|
|
|
query = queries[kb.dbms].timedelay2 % conf.timeSec
|
2010-03-21 03:39:44 +03:00
|
|
|
elif kb.dbms == "Firebird":
|
2010-03-18 20:20:54 +03:00
|
|
|
query = queries[kb.dbms].timedelay
|
2010-01-02 05:02:12 +03:00
|
|
|
else:
|
2009-04-22 15:48:07 +04:00
|
|
|
query = queries[kb.dbms].timedelay % conf.timeSec
|
|
|
|
|
2010-03-18 20:20:54 +03:00
|
|
|
if andCond:
|
|
|
|
if kb.dbms in ( "MySQL", "SQLite" ):
|
|
|
|
query = query.replace("SELECT ", "")
|
2010-03-21 03:39:44 +03:00
|
|
|
elif kb.dbms == "Firebird":
|
2010-03-18 20:20:54 +03:00
|
|
|
query = "(%s)>0" % query
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return query
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def getLocalIP():
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
s.connect((conf.hostname, conf.port))
|
|
|
|
ip, _ = s.getsockname()
|
|
|
|
s.close()
|
|
|
|
|
|
|
|
return ip
|
|
|
|
|
|
|
|
def getRemoteIP():
|
|
|
|
return socket.gethostbyname(conf.hostname)
|
|
|
|
|
|
|
|
def getFileType(filePath):
|
2009-04-22 16:44:16 +04:00
|
|
|
try:
|
|
|
|
magicFileType = magic.from_file(filePath)
|
|
|
|
except:
|
|
|
|
return "unknown"
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if "ASCII" in magicFileType or "text" in magicFileType:
|
|
|
|
return "text"
|
|
|
|
else:
|
|
|
|
return "binary"
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def pollProcess(process):
|
|
|
|
while True:
|
|
|
|
dataToStdout(".")
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
returncode = process.poll()
|
|
|
|
|
2009-07-20 18:36:33 +04:00
|
|
|
if returncode is not None:
|
2009-04-22 15:48:07 +04:00
|
|
|
if returncode == 0:
|
|
|
|
dataToStdout(" done\n")
|
2009-07-20 18:36:33 +04:00
|
|
|
elif returncode < 0:
|
|
|
|
dataToStdout(" process terminated by signal %d\n" % returncode)
|
|
|
|
elif returncode > 0:
|
|
|
|
dataToStdout(" quit unexpectedly with return code %d\n" % returncode)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
break
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def getCharset(charsetType=None):
|
|
|
|
asciiTbl = []
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if charsetType is None:
|
2009-04-22 15:48:07 +04:00
|
|
|
asciiTbl = range(0, 128)
|
|
|
|
|
|
|
|
# 0 or 1
|
|
|
|
elif charsetType == 1:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 50))
|
|
|
|
|
|
|
|
# Digits
|
|
|
|
elif charsetType == 2:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
|
|
|
|
# Hexadecimal
|
|
|
|
elif charsetType == 3:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
asciiTbl.extend(range(64, 71))
|
|
|
|
asciiTbl.extend(range(96, 103))
|
|
|
|
|
|
|
|
# Characters
|
|
|
|
elif charsetType == 4:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(64, 91))
|
|
|
|
asciiTbl.extend(range(96, 123))
|
|
|
|
|
|
|
|
# Characters and digits
|
|
|
|
elif charsetType == 5:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
asciiTbl.extend(range(64, 91))
|
|
|
|
asciiTbl.extend(range(96, 123))
|
|
|
|
|
|
|
|
return asciiTbl
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2009-05-13 00:24:47 +04:00
|
|
|
def searchEnvPath(fileName):
|
|
|
|
envPaths = os.environ["PATH"]
|
2010-01-02 05:02:12 +03:00
|
|
|
result = None
|
2009-05-13 00:24:47 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2009-05-13 00:24:47 +04:00
|
|
|
envPaths = envPaths.split(";")
|
|
|
|
else:
|
|
|
|
envPaths = envPaths.split(":")
|
|
|
|
|
|
|
|
for envPath in envPaths:
|
|
|
|
envPath = envPath.replace(";", "")
|
2010-01-02 05:02:12 +03:00
|
|
|
result = os.path.exists(os.path.normpath(os.path.join(envPath, fileName)))
|
2009-05-13 00:24:47 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if result:
|
2009-05-13 00:24:47 +04:00
|
|
|
break
|
|
|
|
|
|
|
|
return result
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2010-01-15 14:45:48 +03:00
|
|
|
def urlEncodeCookieValues(cookieStr):
|
2010-01-02 05:02:12 +03:00
|
|
|
if cookieStr:
|
|
|
|
result = ""
|
|
|
|
for part in cookieStr.split(';'):
|
|
|
|
index = part.find('=') + 1
|
|
|
|
if index > 0:
|
|
|
|
name = part[:index - 1].strip()
|
2010-01-15 14:44:05 +03:00
|
|
|
value = urlencode(part[index:], convall=True)
|
|
|
|
result += "; %s=%s" % (name, value)
|
2010-01-02 05:02:12 +03:00
|
|
|
elif part.strip().lower() != "secure":
|
2010-01-15 14:44:05 +03:00
|
|
|
result += "%s%s" % ("%3B", urlencode(part, convall=True))
|
2010-01-02 05:02:12 +03:00
|
|
|
else:
|
2010-01-15 14:44:05 +03:00
|
|
|
result += "; secure"
|
|
|
|
if result.startswith('; '):
|
|
|
|
result = result[2:]
|
2010-01-02 05:02:12 +03:00
|
|
|
elif result.startswith('%3B'):
|
|
|
|
result = result[3:]
|
|
|
|
return result
|
|
|
|
else:
|
|
|
|
return None
|
2010-01-05 14:30:33 +03:00
|
|
|
|
|
|
|
def directoryPath(path):
|
|
|
|
retVal = None
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2010-04-23 20:34:20 +04:00
|
|
|
if isWindowsDriveLetterPath(path):
|
2010-01-05 14:30:33 +03:00
|
|
|
retVal = ntpath.dirname(path)
|
2010-02-21 02:11:05 +03:00
|
|
|
else:
|
|
|
|
retVal = posixpath.dirname(path)
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
return retVal
|
2010-01-15 20:42:46 +03:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
def normalizePath(path):
|
|
|
|
retVal = None
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2010-04-23 20:34:20 +04:00
|
|
|
if isWindowsDriveLetterPath(path):
|
2010-01-05 14:30:33 +03:00
|
|
|
retVal = ntpath.normpath(path)
|
2010-02-21 02:11:05 +03:00
|
|
|
else:
|
|
|
|
retVal = posixpath.normpath(path)
|
2010-02-26 15:00:47 +03:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
return retVal
|
2010-01-15 19:06:59 +03:00
|
|
|
|
|
|
|
def safeStringFormat(formatStr, params):
|
2010-05-07 17:40:57 +04:00
|
|
|
retVal = formatStr.replace("%d", "%s")
|
2010-01-15 20:42:46 +03:00
|
|
|
|
|
|
|
if isinstance(params, str):
|
|
|
|
retVal = retVal.replace("%s", params)
|
|
|
|
else:
|
|
|
|
count = 0
|
|
|
|
index = 0
|
|
|
|
|
|
|
|
while index != -1:
|
2010-05-07 17:40:57 +04:00
|
|
|
index = retVal.find("%s")
|
2010-01-15 20:42:46 +03:00
|
|
|
|
|
|
|
if index != -1:
|
|
|
|
if count < len(params):
|
|
|
|
retVal = retVal[:index] + str(params[count]) + retVal[index+2:]
|
|
|
|
else:
|
|
|
|
raise sqlmapNoneDataException, "wrong number of parameters during string formatting"
|
|
|
|
count += 1
|
|
|
|
|
2010-01-15 19:06:59 +03:00
|
|
|
return retVal
|
2010-01-24 02:29:34 +03:00
|
|
|
|
2010-03-26 20:18:02 +03:00
|
|
|
def sanitizeAsciiString(subject):
|
2010-05-04 12:43:14 +04:00
|
|
|
if subject:
|
2010-05-14 17:55:25 +04:00
|
|
|
index = None
|
|
|
|
for i in xrange(len(subject)):
|
|
|
|
if ord(subject[i]) >= 128:
|
|
|
|
index = i
|
|
|
|
break
|
|
|
|
if not index:
|
|
|
|
return subject
|
|
|
|
else:
|
2010-05-14 18:03:33 +04:00
|
|
|
return subject[:index] + "".join(subject[i] if ord(subject[i]) < 128 else '?' for i in xrange(index, len(subject)))
|
2010-05-14 17:55:25 +04:00
|
|
|
else:
|
|
|
|
return None
|
2010-01-28 19:50:34 +03:00
|
|
|
|
|
|
|
def decloakToNamedTemporaryFile(filepath, name=None):
|
|
|
|
retVal = NamedTemporaryFile()
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-02-03 17:49:28 +03:00
|
|
|
def __del__():
|
|
|
|
try:
|
|
|
|
if hasattr(retVal, 'old_name'):
|
|
|
|
retVal.name = old_name
|
|
|
|
retVal.close()
|
|
|
|
except OSError:
|
|
|
|
pass
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-02-03 17:49:28 +03:00
|
|
|
retVal.__del__ = __del__
|
2010-01-28 19:50:34 +03:00
|
|
|
retVal.write(decloak(filepath))
|
|
|
|
retVal.seek(0)
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
if name:
|
|
|
|
retVal.old_name = retVal.name
|
|
|
|
retVal.name = name
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
return retVal
|
2010-01-29 13:12:09 +03:00
|
|
|
|
|
|
|
def decloakToMkstemp(filepath, **kwargs):
|
|
|
|
name = mkstemp(**kwargs)[1]
|
|
|
|
retVal = open(name, 'w+b')
|
|
|
|
retVal.write(decloak(filepath))
|
|
|
|
retVal.seek(0)
|
|
|
|
return retVal
|
2010-02-04 12:49:31 +03:00
|
|
|
|
|
|
|
def isWindowsPath(filepath):
|
2010-02-26 15:00:47 +03:00
|
|
|
return re.search("\A[\w]\:\\\\", filepath) is not None
|
2010-02-04 17:37:00 +03:00
|
|
|
|
2010-04-22 14:31:33 +04:00
|
|
|
def isWindowsDriveLetterPath(filepath):
|
|
|
|
return re.search("\A[\w]\:", filepath) is not None
|
|
|
|
|
2010-02-04 17:37:00 +03:00
|
|
|
def posixToNtSlashes(filepath):
|
|
|
|
return filepath.replace('/', '\\')
|
|
|
|
|
|
|
|
def ntToPosixSlashes(filepath):
|
|
|
|
return filepath.replace('\\', '/')
|
2010-03-26 20:18:02 +03:00
|
|
|
|
|
|
|
def isBase64EncodedString(subject):
|
|
|
|
return re.match(r"\A(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?\Z", subject) is not None
|
|
|
|
|
|
|
|
def isHexEncodedString(subject):
|
|
|
|
return re.match(r"\A[0-9a-fA-F]+\Z", subject) is not None
|
2010-04-16 17:40:02 +04:00
|
|
|
|
2010-05-21 16:09:31 +04:00
|
|
|
def profile(profileOutputFile=None, imageOutputFile=None):
|
|
|
|
if profileOutputFile is None:
|
|
|
|
profileOutputFile = os.path.join(paths.SQLMAP_OUTPUT_PATH, "sqlmap_profile.raw")
|
2010-05-21 13:35:36 +04:00
|
|
|
|
2010-05-21 16:09:31 +04:00
|
|
|
if imageOutputFile is None:
|
|
|
|
imageOutputFile = os.path.join(paths.SQLMAP_OUTPUT_PATH, "sqlmap_profile.png")
|
2010-05-21 13:35:36 +04:00
|
|
|
|
2010-05-21 16:09:31 +04:00
|
|
|
if os.path.exists(profileOutputFile):
|
|
|
|
os.remove(profileOutputFile)
|
2010-05-21 13:35:36 +04:00
|
|
|
|
|
|
|
if os.path.exists(imageOutputFile):
|
|
|
|
os.remove(imageOutputFile)
|
|
|
|
|
2010-05-21 16:09:31 +04:00
|
|
|
infoMsg = "profiling the execution into file %s" % profileOutputFile
|
|
|
|
logger.info(infoMsg)
|
2010-05-21 13:35:36 +04:00
|
|
|
|
2010-05-21 16:09:31 +04:00
|
|
|
cProfile.run("start()", profileOutputFile)
|
2010-05-21 14:41:30 +04:00
|
|
|
|
2010-05-21 16:09:31 +04:00
|
|
|
infoMsg = "converting profile data into a graph image, %s" % imageOutputFile
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
graphScriptPath = os.path.join(paths.SQLMAP_EXTRAS_PATH, 'gprof2dot', 'gprof2dot.py')
|
2010-05-21 16:19:20 +04:00
|
|
|
stderr = subprocess.Popen('%s %s -f pstats %s | dot -Tpng -o %s' % (sys.executable, graphScriptPath, profileOutputFile, imageOutputFile), shell=True, stderr=subprocess.PIPE).stderr.read()
|
2010-05-21 16:09:31 +04:00
|
|
|
|
|
|
|
if stderr or not os.path.exists(imageOutputFile):
|
|
|
|
errMsg = "there was an error while converting ('%s')" % stderr.strip()
|
2010-05-21 14:41:30 +04:00
|
|
|
errMsg += "but you can still find raw profile data "
|
|
|
|
errMsg += "inside file '%s'" % profileOutputFile
|
|
|
|
logger.error(errMsg)
|
2010-05-21 13:35:36 +04:00
|
|
|
|
2010-05-21 16:09:31 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
if PLATFORM == 'mac':
|
|
|
|
subprocess.call(('open', imageOutputFile))
|
|
|
|
elif PLATFORM == 'posix':
|
|
|
|
subprocess.call(('xdg-open', imageOutputFile))
|
|
|
|
elif PLATFORM == 'nt':
|
|
|
|
subprocess.call(('start', imageOutputFile))
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2010-04-16 17:40:02 +04:00
|
|
|
def getConsoleWidth(default=80):
|
|
|
|
width = None
|
|
|
|
|
|
|
|
if 'COLUMNS' in os.environ and os.environ['COLUMNS'].isdigit():
|
|
|
|
width = int(os.environ['COLUMNS'])
|
|
|
|
else:
|
|
|
|
output=subprocess.Popen('stty size', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).stdout.read()
|
|
|
|
items = output.split()
|
|
|
|
if len(items) == 2 and items[1].isdigit():
|
|
|
|
width = int(items[1])
|
|
|
|
|
|
|
|
if width is None:
|
|
|
|
try:
|
|
|
|
import curses
|
|
|
|
stdscr = curses.initscr()
|
|
|
|
_, width = stdscr.getmaxyx()
|
|
|
|
curses.endwin()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2010-04-16 23:57:00 +04:00
|
|
|
return width if width else default
|
|
|
|
|
|
|
|
def parseXmlFile(xmlFile, handler):
|
2010-04-17 19:43:08 +04:00
|
|
|
xfile = open(xmlFile)
|
|
|
|
content = xfile.read()
|
2010-04-16 23:57:00 +04:00
|
|
|
stream = StringIO(content)
|
|
|
|
parse(stream, handler)
|
|
|
|
stream.close()
|
2010-04-17 19:43:08 +04:00
|
|
|
xfile.close()
|
2010-05-13 15:05:35 +04:00
|
|
|
|
|
|
|
def calculateDeltaSeconds(start, epsilon=0.05):
|
2010-05-13 19:19:28 +04:00
|
|
|
return int(time.time() - start + epsilon)
|
2010-05-21 13:35:36 +04:00
|
|
|
|
2010-05-21 16:19:20 +04:00
|
|
|
def initCommonOutputs():
|
|
|
|
kb.commonOutputs = {}
|
2010-05-21 16:44:09 +04:00
|
|
|
key = None
|
|
|
|
|
2010-05-21 16:19:20 +04:00
|
|
|
fileName = os.path.join(paths.SQLMAP_TXT_PATH, 'common-outputs.txt')
|
|
|
|
file = open(fileName, 'r')
|
2010-05-21 16:44:09 +04:00
|
|
|
|
2010-05-21 16:19:20 +04:00
|
|
|
for line in file.xreadlines():
|
|
|
|
line = line.strip()
|
|
|
|
if len(line) > 1:
|
|
|
|
if line[0] == '[' and line[-1] == ']':
|
|
|
|
key = line[1:-1]
|
|
|
|
elif key:
|
2010-05-21 16:44:09 +04:00
|
|
|
if key not in kb.commonOutputs:
|
|
|
|
kb.commonOutputs[key] = []
|
|
|
|
kb.commonOutputs[key].append(line.strip())
|
|
|
|
file.close()
|
2010-05-21 16:19:20 +04:00
|
|
|
|
2010-05-21 16:44:09 +04:00
|
|
|
def getGoodSamaritanCharsets(part, prevValue, originalCharset):
|
|
|
|
###wild card . (dot) is supported for compatibility with threading
|
2010-05-21 16:19:20 +04:00
|
|
|
if not kb.commonOutputs:
|
|
|
|
initCommonOutputs()
|
2010-05-21 13:35:36 +04:00
|
|
|
|
|
|
|
predictionSet = set()
|
|
|
|
wildIndexes = []
|
|
|
|
|
2010-05-21 16:44:09 +04:00
|
|
|
if prevValue[-1] != '.':
|
|
|
|
prevValue += '.'
|
2010-05-21 13:35:36 +04:00
|
|
|
charIndex = 0
|
2010-05-21 16:44:09 +04:00
|
|
|
findIndex = prevValue.find('.', charIndex)
|
2010-05-21 13:35:36 +04:00
|
|
|
while findIndex != -1:
|
|
|
|
wildIndexes.append(findIndex)
|
|
|
|
charIndex += 1
|
2010-05-21 16:44:09 +04:00
|
|
|
findIndex = prevValue.find('.', charIndex)
|
|
|
|
|
|
|
|
if part in kb.commonOutputs:
|
|
|
|
for item in kb.commonOutputs[kb.dbms]:
|
|
|
|
if re.search('\A%s' % prevValue, item):
|
2010-05-21 13:35:36 +04:00
|
|
|
for index in wildIndexes:
|
|
|
|
char = item[index]
|
|
|
|
if char not in predictionSet:
|
|
|
|
predictionSet.add(char)
|
2010-05-21 16:44:09 +04:00
|
|
|
predictedCharset = []
|
|
|
|
otherCharset = []
|
2010-05-21 13:35:36 +04:00
|
|
|
for ordChar in originalTable:
|
|
|
|
if chr(ordChar) not in predictionSet:
|
2010-05-21 16:44:09 +04:00
|
|
|
otherCharset.append(ordChar)
|
2010-05-21 13:35:36 +04:00
|
|
|
else:
|
2010-05-21 16:44:09 +04:00
|
|
|
predictedCharset.append(ordChar)
|
|
|
|
predictedCharset.sort()
|
|
|
|
return predictedCharset, otherCharset
|
2010-05-21 13:35:36 +04:00
|
|
|
else:
|
|
|
|
return None, originalTable
|