sqlmap/lib/core/common.py

3959 lines
127 KiB
Python
Raw Normal View History

#!/usr/bin/env python
2008-10-15 19:38:22 +04:00
"""
2015-01-06 17:02:16 +03:00
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
2010-10-15 03:18:29 +04:00
See the file 'doc/COPYING' for copying permission
2008-10-15 19:38:22 +04:00
"""
import codecs
2012-07-01 03:19:54 +04:00
import contextlib
2012-03-08 14:19:34 +04:00
import cookielib
import copy
import getpass
2014-10-27 02:37:46 +03:00
import hashlib
import httplib
import inspect
2014-10-27 02:37:46 +03:00
import json
2015-01-21 11:26:30 +03:00
import locale
2011-04-20 02:54:13 +04:00
import logging
import ntpath
2008-10-15 19:38:22 +04:00
import os
import posixpath
2008-10-15 19:38:22 +04:00
import random
import re
import socket
2008-10-15 19:38:22 +04:00
import string
import sys
2013-01-08 13:23:02 +04:00
import tempfile
2008-10-15 19:38:22 +04:00
import time
2011-10-24 22:11:34 +04:00
import urllib
2014-10-27 02:37:46 +03:00
import urllib2
2008-10-15 19:38:22 +04:00
import urlparse
import unicodedata
2010-01-28 20:07:34 +03:00
2010-05-28 19:57:43 +04:00
from ConfigParser import DEFAULTSECT
from ConfigParser import RawConfigParser
2010-04-22 20:13:22 +04:00
from StringIO import StringIO
from difflib import SequenceMatcher
2011-01-16 23:55:07 +03:00
from math import sqrt
2011-12-26 18:08:25 +04:00
from optparse import OptionValueError
2010-05-21 17:03:57 +04:00
from subprocess import PIPE
from subprocess import Popen as execute
2010-10-07 02:43:04 +04:00
from xml.dom import minidom
2010-04-22 20:13:22 +04:00
from xml.sax import parse
2014-12-02 12:57:50 +03:00
from xml.sax import SAXParseException
2010-01-24 02:29:34 +03:00
2013-01-08 13:23:02 +04:00
from extra.cloak.cloak import decloak
from extra.safe2bin.safe2bin import safecharencode
2012-02-16 13:46:41 +04:00
from lib.core.bigarray import BigArray
2008-10-15 19:38:22 +04:00
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.data import paths
from lib.core.convert import base64pickle
from lib.core.convert import base64unpickle
2012-12-19 04:30:22 +04:00
from lib.core.convert import hexdecode
2010-11-07 03:12:00 +03:00
from lib.core.convert import htmlunescape
2012-07-31 13:03:44 +04:00
from lib.core.convert import stdoutencode
from lib.core.convert import unicodeencode
2012-07-31 13:03:44 +04:00
from lib.core.convert import utf8encode
2012-07-30 12:06:14 +04:00
from lib.core.decorators import cachedmethod
2013-01-30 13:43:46 +04:00
from lib.core.defaults import defaults
2012-08-21 13:19:15 +04:00
from lib.core.dicts import DBMS_DICT
2013-02-25 13:44:04 +04:00
from lib.core.dicts import DEFAULT_DOC_ROOTS
2013-01-18 18:40:37 +04:00
from lib.core.dicts import DEPRECATED_OPTIONS
2012-08-21 13:19:15 +04:00
from lib.core.dicts import SQL_STATEMENTS
2012-10-09 17:19:47 +04:00
from lib.core.enums import ADJUST_TIME_DELAY
from lib.core.enums import CONTENT_STATUS
from lib.core.enums import CHARSET_TYPE
from lib.core.enums import DBMS
from lib.core.enums import EXPECTED
from lib.core.enums import HEURISTIC_TEST
from lib.core.enums import HTTP_HEADER
from lib.core.enums import HTTPMETHOD
from lib.core.enums import OS
from lib.core.enums import PLACE
2010-12-18 12:51:34 +03:00
from lib.core.enums import PAYLOAD
2011-05-30 13:46:32 +04:00
from lib.core.enums import REFLECTIVE_COUNTER
2011-12-21 23:40:42 +04:00
from lib.core.enums import SORT_ORDER
from lib.core.exception import SqlmapDataException
from lib.core.exception import SqlmapGenericException
from lib.core.exception import SqlmapNoneDataException
2014-12-02 12:57:50 +03:00
from lib.core.exception import SqlmapInstallationException
from lib.core.exception import SqlmapMissingDependence
from lib.core.exception import SqlmapSilentQuitException
from lib.core.exception import SqlmapSyntaxException
2014-11-23 17:41:24 +03:00
from lib.core.exception import SqlmapSystemException
from lib.core.exception import SqlmapUserQuitException
2012-07-11 22:29:48 +04:00
from lib.core.log import LOGGER_HANDLER
2010-05-27 20:45:09 +04:00
from lib.core.optiondict import optDict
2014-08-20 00:19:22 +04:00
from lib.core.settings import BANNER
from lib.core.settings import BOLD_PATTERNS
2013-05-09 16:26:29 +04:00
from lib.core.settings import BRUTE_DOC_ROOT_PREFIXES
from lib.core.settings import BRUTE_DOC_ROOT_SUFFIXES
from lib.core.settings import BRUTE_DOC_ROOT_TARGET_MARK
2012-04-17 12:41:19 +04:00
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
2012-10-29 17:08:48 +04:00
from lib.core.settings import DBMS_DIRECTORY_DICT
2011-11-22 01:31:08 +04:00
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
2012-10-29 17:08:48 +04:00
from lib.core.settings import DEFAULT_MSSQL_SCHEMA
2010-03-03 19:19:17 +03:00
from lib.core.settings import DESCRIPTION
2012-10-29 17:08:48 +04:00
from lib.core.settings import DUMMY_USER_INJECTION
from lib.core.settings import DYNAMICITY_MARK_LENGTH
from lib.core.settings import ERROR_PARSING_REGEXES
2013-04-15 13:49:11 +04:00
from lib.core.settings import FORCE_COOKIE_EXPIRATION_TIME
2012-10-29 17:08:48 +04:00
from lib.core.settings import FORM_SEARCH_REGEX
from lib.core.settings import GENERIC_DOC_ROOT_DIRECTORY_NAMES
from lib.core.settings import GIT_PAGE
2014-10-27 02:37:46 +03:00
from lib.core.settings import GITHUB_REPORT_OAUTH_TOKEN
2013-12-04 12:56:37 +04:00
from lib.core.settings import GOOGLE_ANALYTICS_COOKIE_PREFIX
from lib.core.settings import HASHDB_MILESTONE_VALUE
from lib.core.settings import HOST_ALIASES
2012-10-29 17:08:48 +04:00
from lib.core.settings import INFERENCE_UNKNOWN_CHAR
from lib.core.settings import INVALID_UNICODE_CHAR_FORMAT
2013-05-09 16:26:29 +04:00
from lib.core.settings import IP_ADDRESS_REGEX
2012-10-29 17:08:48 +04:00
from lib.core.settings import ISSUES_PAGE
from lib.core.settings import IS_WIN
2011-12-23 00:14:56 +04:00
from lib.core.settings import LARGE_OUTPUT_THRESHOLD
2013-01-18 14:00:21 +04:00
from lib.core.settings import MIN_ENCODED_LEN_CHECK
2010-12-08 17:46:07 +03:00
from lib.core.settings import MIN_TIME_RESPONSES
2013-01-30 00:06:02 +04:00
from lib.core.settings import MIN_VALID_DELAYED_RESPONSE
2013-02-12 15:42:12 +04:00
from lib.core.settings import NETSCAPE_FORMAT_HEADER_COOKIES
2012-10-29 17:08:48 +04:00
from lib.core.settings import NULL
from lib.core.settings import PARAMETER_AMP_MARKER
from lib.core.settings import PARAMETER_SEMICOLON_MARKER
2013-02-04 19:46:08 +04:00
from lib.core.settings import PARTIAL_HEX_VALUE_MARKER
2012-10-29 17:08:48 +04:00
from lib.core.settings import PARTIAL_VALUE_MARKER
from lib.core.settings import PAYLOAD_DELIMITER
2012-10-29 17:08:48 +04:00
from lib.core.settings import PLATFORM
from lib.core.settings import PRINTABLE_CHAR_REGEX
from lib.core.settings import PYVERSION
from lib.core.settings import REFERER_ALIASES
from lib.core.settings import REFLECTED_BORDER_REGEX
2011-07-13 03:21:15 +04:00
from lib.core.settings import REFLECTED_MAX_REGEX_PARTS
2012-10-29 17:08:48 +04:00
from lib.core.settings import REFLECTED_REPLACEMENT_REGEX
from lib.core.settings import REFLECTED_VALUE_MARKER
2011-05-30 13:46:32 +04:00
from lib.core.settings import REFLECTIVE_MISS_THRESHOLD
2012-10-29 17:08:48 +04:00
from lib.core.settings import REVISION
2011-02-02 17:25:16 +03:00
from lib.core.settings import SENSITIVE_DATA_REGEX
2012-10-29 17:08:48 +04:00
from lib.core.settings import SITE
from lib.core.settings import SUPPORTED_DBMS
from lib.core.settings import TEXT_TAG_REGEX
2012-10-29 17:08:48 +04:00
from lib.core.settings import TIME_STDEV_COEFF
from lib.core.settings import UNICODE_ENCODING
from lib.core.settings import UNKNOWN_DBMS_VERSION
from lib.core.settings import URI_QUESTION_MARKER
2012-07-31 13:03:44 +04:00
from lib.core.settings import URLENCODE_CHAR_LIMIT
from lib.core.settings import URLENCODE_FAILSAFE_CHARS
2012-10-29 17:08:48 +04:00
from lib.core.settings import USER_AGENT_ALIASES
from lib.core.settings import VERSION
from lib.core.settings import VERSION_STRING
from lib.core.threads import getCurrentThreadData
2013-04-15 16:20:21 +04:00
from lib.utils.sqlalchemy import _sqlalchemy
2012-07-14 19:01:04 +04:00
from thirdparty.clientform.clientform import ParseResponse
from thirdparty.clientform.clientform import ParseError
from thirdparty.magic import magic
from thirdparty.odict.odict import OrderedDict
from thirdparty.termcolor.termcolor import colored
2010-09-15 16:51:02 +04:00
class UnicodeRawConfigParser(RawConfigParser):
2010-09-15 16:52:28 +04:00
"""
RawConfigParser with unicode writing support
"""
2010-09-15 16:51:02 +04:00
def write(self, fp):
"""
Write an .ini-format representation of the configuration state.
"""
if self._defaults:
fp.write("[%s]\n" % DEFAULTSECT)
for (key, value) in self._defaults.items():
2011-01-30 14:36:03 +03:00
fp.write("%s = %s\n" % (key, getUnicode(value, UNICODE_ENCODING).replace('\n', '\n\t')))
2010-09-15 16:51:02 +04:00
fp.write("\n")
for section in self._sections:
fp.write("[%s]\n" % section)
for (key, value) in self._sections[section].items():
if key != "__name__":
if value is None:
fp.write("%s\n" % (key))
else:
2011-01-30 14:36:03 +03:00
fp.write("%s = %s\n" % (key, getUnicode(value, UNICODE_ENCODING).replace('\n', '\n\t')))
2010-09-15 16:51:02 +04:00
fp.write("\n")
class Format(object):
@staticmethod
def humanize(values, chain=" or "):
2012-03-28 17:31:07 +04:00
return chain.join(values)
# Get methods
@staticmethod
def getDbms(versions=None):
"""
Format the back-end DBMS fingerprint value and return its
values formatted as a human readable string.
@return: detected back-end DBMS based upon fingerprint techniques.
@rtype: C{str}
"""
if versions is None and Backend.getVersionList():
versions = Backend.getVersionList()
2012-03-28 17:31:07 +04:00
return Backend.getDbms() if versions is None else "%s %s" % (Backend.getDbms(), " and ".join(v for v in versions))
@staticmethod
def getErrorParsedDBMSes():
"""
Parses the knowledge base htmlFp list and return its values
formatted as a human readable string.
@return: list of possible back-end DBMS based upon error messages
parsing.
@rtype: C{str}
"""
2012-03-28 17:31:07 +04:00
htmlParsed = None
if len(kb.htmlFp) == 0 or kb.heuristicTest != HEURISTIC_TEST.POSITIVE:
2012-03-28 17:31:07 +04:00
pass
elif len(kb.htmlFp) == 1:
htmlParsed = kb.htmlFp[0]
elif len(kb.htmlFp) > 1:
2012-03-28 17:31:07 +04:00
htmlParsed = " or ".join(kb.htmlFp)
return htmlParsed
@staticmethod
def getOs(target, info):
"""
Formats the back-end operating system fingerprint value
and return its values formatted as a human readable string.
Example of info (kb.headersFp) dictionary:
{
'distrib': set(['Ubuntu']),
'type': set(['Linux']),
'technology': set(['PHP 5.2.6', 'Apache 2.2.9']),
'release': set(['8.10'])
}
Example of info (kb.bannerFp) dictionary:
{
'sp': set(['Service Pack 4']),
'dbmsVersion': '8.00.194',
'dbmsServicePack': '0',
'distrib': set(['2000']),
'dbmsRelease': '2000',
'type': set(['Windows'])
}
@return: detected back-end operating system based upon fingerprint
techniques.
@rtype: C{str}
"""
infoStr = ""
infoApi = {}
if info and "type" in info:
if hasattr(conf, "api"):
infoApi["%s operating system" % target] = info
else:
infoStr += "%s operating system: %s" % (target, Format.humanize(info["type"]))
if "distrib" in info:
infoStr += " %s" % Format.humanize(info["distrib"])
if "release" in info:
infoStr += " %s" % Format.humanize(info["release"])
if "sp" in info:
infoStr += " %s" % Format.humanize(info["sp"])
if "codename" in info:
infoStr += " (%s)" % Format.humanize(info["codename"])
if "technology" in info:
if hasattr(conf, "api"):
infoApi["web application technology"] = Format.humanize(info["technology"], ", ")
else:
infoStr += "\nweb application technology: %s" % Format.humanize(info["technology"], ", ")
if hasattr(conf, "api"):
return infoApi
else:
return infoStr.lstrip()
class Backend:
# Set methods
@staticmethod
def setDbms(dbms):
dbms = aliasToDbmsEnum(dbms)
if dbms is None:
return None
# Little precaution, in theory this condition should always be false
elif kb.dbms is not None and kb.dbms != dbms:
2014-02-27 17:23:14 +04:00
warnMsg = "there seems to be a high probability that "
warnMsg += "this could be a false positive case"
logger.warn(warnMsg)
msg = "sqlmap previously fingerprinted back-end DBMS as "
msg += "%s. However now it has been fingerprinted " % kb.dbms
2014-02-27 17:23:14 +04:00
msg += "as %s. " % dbms
msg += "Please, specify which DBMS should be "
msg += "correct [%s (default)/%s] " % (kb.dbms, dbms)
while True:
2012-02-16 13:54:29 +04:00
_ = readInput(msg, default=kb.dbms)
2012-02-16 13:54:29 +04:00
if aliasToDbmsEnum(_) == kb.dbms:
break
2012-02-16 13:54:29 +04:00
elif aliasToDbmsEnum(_) == dbms:
kb.dbms = aliasToDbmsEnum(_)
break
else:
warnMsg = "invalid value"
logger.warn(warnMsg)
elif kb.dbms is None:
kb.dbms = aliasToDbmsEnum(dbms)
return kb.dbms
@staticmethod
def setVersion(version):
if isinstance(version, basestring):
2012-02-22 19:53:36 +04:00
kb.dbmsVersion = [version]
return kb.dbmsVersion
@staticmethod
def setVersionList(versionsList):
if isinstance(versionsList, list):
kb.dbmsVersion = versionsList
2011-01-31 12:34:54 +03:00
elif isinstance(versionsList, basestring):
Backend.setVersion(versionsList)
else:
logger.error("invalid format of versionsList")
@staticmethod
2011-06-02 03:00:18 +04:00
def forceDbms(dbms, sticky=False):
2012-07-12 17:24:40 +04:00
if not kb.stickyDBMS:
2011-09-26 01:10:45 +04:00
kb.forcedDbms = aliasToDbmsEnum(dbms)
2012-07-12 17:24:40 +04:00
kb.stickyDBMS = sticky
@staticmethod
2011-06-02 03:00:18 +04:00
def flushForcedDbms(force=False):
2012-07-12 17:24:40 +04:00
if not kb.stickyDBMS or force:
2011-09-26 01:10:45 +04:00
kb.forcedDbms = None
2012-07-12 17:24:40 +04:00
kb.stickyDBMS = False
@staticmethod
def setOs(os):
if os is None:
return None
# Little precaution, in theory this condition should always be false
elif kb.os is not None and isinstance(os, basestring) and kb.os.lower() != os.lower():
msg = "sqlmap previously fingerprinted back-end DBMS "
msg += "operating system %s. However now it has " % kb.os
msg += "been fingerprinted to be %s. " % os
msg += "Please, specify which OS is "
msg += "correct [%s (default)/%s] " % (kb.os, os)
while True:
2012-02-16 13:54:29 +04:00
_ = readInput(msg, default=kb.os)
2012-02-16 13:54:29 +04:00
if _ == kb.os:
break
2012-02-16 13:54:29 +04:00
elif _ == os:
kb.os = _.capitalize()
break
else:
warnMsg = "invalid value"
logger.warn(warnMsg)
elif kb.os is None and isinstance(os, basestring):
kb.os = os.capitalize()
return kb.os
2012-01-13 20:49:52 +04:00
@staticmethod
def setOsVersion(version):
if version is None:
return None
elif kb.osVersion is None and isinstance(version, basestring):
kb.osVersion = version
@staticmethod
def setOsServicePack(sp):
if sp is None:
2012-01-13 20:49:52 +04:00
return None
elif kb.osSP is None and isinstance(sp, int):
kb.osSP = sp
2012-01-13 20:49:52 +04:00
@staticmethod
def setArch():
msg = "what is the back-end database management system architecture?"
msg += "\n[1] 32-bit (default)"
msg += "\n[2] 64-bit"
while True:
2012-02-16 13:54:29 +04:00
_ = readInput(msg, default='1')
2012-02-22 19:53:36 +04:00
if isinstance(_, basestring) and _.isdigit() and int(_) in (1, 2):
2012-02-16 17:46:01 +04:00
kb.arch = 32 if int(_) == 1 else 64
break
else:
2012-02-16 17:46:01 +04:00
warnMsg = "invalid value. Valid values are 1 and 2"
logger.warn(warnMsg)
return kb.arch
# Get methods
@staticmethod
def getForcedDbms():
return aliasToDbmsEnum(kb.get("forcedDbms"))
@staticmethod
def getDbms():
return aliasToDbmsEnum(kb.get("dbms"))
@staticmethod
def getErrorParsedDBMSes():
"""
Returns array with parsed DBMS names till now
This functions is called to:
1. Sort the tests, getSortedInjectionTests() - detection phase.
2. Ask user whether or not skip specific DBMS tests in detection phase,
lib/controller/checks.py - detection phase.
3. Sort the fingerprint of the DBMS, lib/controller/handler.py -
fingerprint phase.
"""
2012-09-07 12:09:19 +04:00
return kb.htmlFp if kb.get("heuristicTest") == HEURISTIC_TEST.POSITIVE else []
@staticmethod
def getIdentifiedDbms():
dbms = None
if not kb:
pass
elif Backend.getForcedDbms() is not None:
dbms = Backend.getForcedDbms()
elif Backend.getDbms() is not None:
dbms = kb.dbms
2012-12-12 14:54:59 +04:00
elif conf.get("dbms"):
dbms = conf.dbms
2012-10-28 03:16:02 +04:00
elif Backend.getErrorParsedDBMSes():
dbms = unArrayizeValue(Backend.getErrorParsedDBMSes())
2012-12-12 14:54:59 +04:00
elif kb.get("injection") and kb.injection.dbms:
dbms = unArrayizeValue(kb.injection.dbms)
return aliasToDbmsEnum(dbms)
@staticmethod
def getVersion():
if len(kb.dbmsVersion) > 0:
return kb.dbmsVersion[0]
else:
return None
@staticmethod
def getVersionList():
if len(kb.dbmsVersion) > 0:
return kb.dbmsVersion
else:
return None
@staticmethod
def getOs():
return kb.os
2012-01-13 20:49:52 +04:00
@staticmethod
def getOsVersion():
return kb.osVersion
@staticmethod
def getOsServicePack():
return kb.osSP
@staticmethod
def getArch():
if kb.arch is None:
Backend.setArch()
return kb.arch
# Comparison methods
@staticmethod
def isDbms(dbms):
2011-05-02 03:42:41 +04:00
if Backend.getDbms() is not None:
return Backend.getDbms() == aliasToDbmsEnum(dbms)
else:
return Backend.getIdentifiedDbms() == aliasToDbmsEnum(dbms)
@staticmethod
def isDbmsWithin(aliases):
return Backend.getDbms() is not None and Backend.getDbms().lower() in aliases
@staticmethod
def isVersion(version):
return Backend.getVersion() is not None and Backend.getVersion() == version
@staticmethod
def isVersionWithin(versionList):
if Backend.getVersionList() is None:
return False
2012-02-16 17:46:01 +04:00
for _ in Backend.getVersionList():
if _ != UNKNOWN_DBMS_VERSION and _ in versionList:
return True
return False
@staticmethod
def isVersionGreaterOrEqualThan(version):
return Backend.getVersion() is not None and str(Backend.getVersion()) >= str(version)
@staticmethod
def isOs(os):
return Backend.getOs() is not None and Backend.getOs().lower() == os.lower()
2008-10-15 19:38:22 +04:00
def paramToDict(place, parameters=None):
"""
Split the parameters into names and values, check if these parameters
are within the testable parameters and return in a dictionary.
"""
testableParameters = OrderedDict()
2008-10-15 19:38:22 +04:00
2012-02-22 19:53:36 +04:00
if place in conf.parameters and not parameters:
2008-10-15 19:38:22 +04:00
parameters = conf.parameters[place]
2012-10-04 13:25:44 +04:00
parameters = parameters.replace(", ", ",")
parameters = re.sub(r"&(\w{1,4});", r"%s\g<1>%s" % (PARAMETER_AMP_MARKER, PARAMETER_SEMICOLON_MARKER), parameters)
2013-07-31 22:41:19 +04:00
if place == PLACE.COOKIE:
2014-04-06 18:50:58 +04:00
splitParams = parameters.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)
2013-07-31 22:41:19 +04:00
else:
2014-04-06 18:48:46 +04:00
splitParams = parameters.split(conf.paramDel or DEFAULT_GET_POST_DELIMITER)
2012-10-04 13:25:44 +04:00
for element in splitParams:
element = re.sub(r"%s(.+?)%s" % (PARAMETER_AMP_MARKER, PARAMETER_SEMICOLON_MARKER), r"&\g<1>;", element)
2013-01-17 16:56:04 +04:00
parts = element.split("=")
2010-10-21 01:49:05 +04:00
2013-01-17 16:56:04 +04:00
if len(parts) >= 2:
parameter = urldecode(parts[0].replace(" ", ""))
2010-10-21 01:49:05 +04:00
2014-11-24 07:44:38 +03:00
if not parameter:
continue
2014-04-06 18:48:46 +04:00
if conf.paramDel and conf.paramDel == '\n':
2013-05-30 14:01:13 +04:00
parts[-1] = parts[-1].rstrip()
condition = not conf.testParameter
2014-12-22 08:02:39 +03:00
condition |= conf.testParameter is not None and parameter in conf.testParameter
condition |= place == PLACE.COOKIE and len(intersect((PLACE.COOKIE,), conf.testParameter, True)) > 0
2008-10-15 19:38:22 +04:00
if condition:
2013-01-17 16:56:04 +04:00
testableParameters[parameter] = "=".join(parts[1:])
2014-10-23 13:44:38 +04:00
if not conf.multipleTargets and not (conf.csrfToken and parameter == conf.csrfToken):
2012-11-19 14:59:28 +04:00
_ = urldecode(testableParameters[parameter], convall=True)
if (_.endswith("'") and _.count("'") == 1
2013-12-04 12:56:37 +04:00
or re.search(r'\A9{3,}', _) or re.search(DUMMY_USER_INJECTION, _))\
and not parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX):
2012-10-04 13:25:44 +04:00
warnMsg = "it appears that you have provided tainted parameter values "
warnMsg += "('%s') with most probably leftover " % element
2012-12-13 13:03:21 +04:00
warnMsg += "chars/statements from manual SQL injection test(s). "
2012-10-04 13:25:44 +04:00
warnMsg += "Please, always use only valid parameter values "
2013-08-22 12:44:21 +04:00
warnMsg += "so sqlmap could be able to run properly"
2012-10-04 13:25:44 +04:00
logger.warn(warnMsg)
2015-01-22 11:17:45 +03:00
message = "are you really sure that you want to continue (sqlmap could have problems)? [y/N] "
2012-10-04 13:25:44 +04:00
test = readInput(message, default="N")
if test[0] not in ("y", "Y"):
raise SqlmapSilentQuitException
2015-01-22 11:17:45 +03:00
else:
original = [_ for _ in string.ascii_letters + string.digits]
shuffled = list(original)
random.shuffle(shuffled)
kb.easterEgg = dict(_ for _ in zip(original, shuffled))
2014-08-30 22:58:15 +04:00
elif not _:
warnMsg = "provided value for parameter '%s' is empty. " % parameter
warnMsg += "Please, always use only valid parameter values "
warnMsg += "so sqlmap could be able to run properly"
logger.warn(warnMsg)
2008-10-15 19:38:22 +04:00
if conf.testParameter and not testableParameters:
paramStr = ", ".join(test for test in conf.testParameter)
if len(conf.testParameter) > 1:
2011-10-24 03:27:56 +04:00
warnMsg = "provided parameters '%s' " % paramStr
warnMsg += "are not inside the %s" % place
logger.warn(warnMsg)
2008-10-15 19:38:22 +04:00
else:
parameter = conf.testParameter[0]
if not intersect(USER_AGENT_ALIASES + REFERER_ALIASES + HOST_ALIASES, parameter, True):
debugMsg = "provided parameter '%s' " % paramStr
debugMsg += "is not inside the %s" % place
logger.debug(debugMsg)
2008-10-15 19:38:22 +04:00
elif len(conf.testParameter) != len(testableParameters.keys()):
for parameter in conf.testParameter:
2012-02-22 19:53:36 +04:00
if parameter not in testableParameters:
debugMsg = "provided parameter '%s' " % parameter
debugMsg += "is not inside the %s" % place
logger.debug(debugMsg)
2008-10-15 19:38:22 +04:00
2013-01-17 16:56:04 +04:00
if testableParameters:
for parameter, value in testableParameters.items():
if value and not value.isdigit():
for encoding in ("hex", "base64"):
try:
decoded = value.decode(encoding)
2013-01-18 14:00:21 +04:00
if len(decoded) > MIN_ENCODED_LEN_CHECK and all(_ in string.printable for _ in decoded):
2013-01-17 16:56:04 +04:00
warnMsg = "provided parameter '%s' " % parameter
warnMsg += "seems to be '%s' encoded" % encoding
logger.warn(warnMsg)
break
except:
pass
2008-10-15 19:38:22 +04:00
return testableParameters
def getManualDirectories():
directories = None
2010-01-05 14:30:33 +03:00
pagePath = directoryPath(conf.path)
2008-10-15 19:38:22 +04:00
2013-02-25 13:44:04 +04:00
defaultDocRoot = DEFAULT_DOC_ROOTS.get(Backend.getOs(), DEFAULT_DOC_ROOTS[OS.LINUX])
if kb.absFilePaths:
for absFilePath in kb.absFilePaths:
if directories:
2012-07-13 13:23:21 +04:00
break
2010-02-09 17:27:41 +03:00
if directoryPath(absFilePath) == '/':
continue
2010-01-05 14:30:33 +03:00
absFilePath = normalizePath(absFilePath)
2012-07-13 13:23:21 +04:00
windowsDriveLetter = None
2012-07-13 13:23:21 +04:00
if isWindowsDriveLetterPath(absFilePath):
windowsDriveLetter, absFilePath = absFilePath[:2], absFilePath[2:]
absFilePath = ntToPosixSlashes(posixToNtSlashes(absFilePath))
2012-07-13 13:23:21 +04:00
if any("/%s/" % _ in absFilePath for _ in GENERIC_DOC_ROOT_DIRECTORY_NAMES):
for _ in GENERIC_DOC_ROOT_DIRECTORY_NAMES:
_ = "/%s/" % _
2012-07-13 13:23:21 +04:00
if _ in absFilePath:
directories = "%s%s" % (absFilePath.split(_)[0], _)
2012-07-13 13:23:21 +04:00
break
2013-04-19 17:40:25 +04:00
if pagePath and pagePath in absFilePath:
directories = absFilePath.split(pagePath)[0]
2012-07-13 13:23:21 +04:00
if windowsDriveLetter:
directories = "%s/%s" % (windowsDriveLetter, ntToPosixSlashes(directories))
directories = normalizePath(directories)
2008-10-15 19:38:22 +04:00
if directories:
infoMsg = "retrieved the web server document root: '%s'" % directories
logger.info(infoMsg)
2008-10-15 19:38:22 +04:00
else:
2013-01-20 05:26:46 +04:00
warnMsg = "unable to retrieve automatically the web server "
warnMsg += "document root"
2008-10-15 19:38:22 +04:00
logger.warn(warnMsg)
directories = []
2008-10-15 19:38:22 +04:00
message = "what do you want to use for writable directory?\n"
2014-08-21 03:19:10 +04:00
message += "[1] common location(s) ('%s') (default)\n" % ", ".join(root for root in defaultDocRoot)
message += "[2] custom location(s)\n"
2013-05-09 16:26:29 +04:00
message += "[3] custom directory list file\n"
2014-08-21 03:12:44 +04:00
message += "[4] brute force search"
2013-05-09 16:26:29 +04:00
choice = readInput(message, default="1").strip()
if choice == "2":
message = "please provide a comma separate list of absolute directory paths: "
directories = readInput(message, default="").split(',')
2013-05-09 16:26:29 +04:00
elif choice == "3":
message = "what's the list file location?\n"
listPath = readInput(message, default="")
2013-05-09 16:26:29 +04:00
checkFile(listPath)
directories = getFileItems(listPath)
2013-05-09 16:26:29 +04:00
elif choice == "4":
targets = set([conf.hostname])
_ = conf.hostname.split('.')
if _[0] == "www":
targets.add('.'.join(_[1:]))
targets.add('.'.join(_[1:-1]))
else:
targets.add('.'.join(_[:-1]))
2013-05-09 16:26:29 +04:00
targets = filter(None, targets)
2013-05-09 16:26:29 +04:00
for prefix in BRUTE_DOC_ROOT_PREFIXES.get(Backend.getOs(), DEFAULT_DOC_ROOTS[OS.LINUX]):
if BRUTE_DOC_ROOT_TARGET_MARK in prefix and re.match(IP_ADDRESS_REGEX, conf.hostname):
continue
2013-05-09 16:26:29 +04:00
for suffix in BRUTE_DOC_ROOT_SUFFIXES:
for target in targets:
item = "%s/%s" % (prefix, suffix)
2013-08-30 12:22:43 +04:00
item = item.replace(BRUTE_DOC_ROOT_TARGET_MARK, target).replace("//", '/').rstrip('/')
directories.append(item)
2013-05-09 16:26:29 +04:00
if BRUTE_DOC_ROOT_TARGET_MARK not in prefix:
break
2014-08-21 03:12:44 +04:00
infoMsg = "using generated directory list: %s" % ','.join(directories)
2013-08-30 12:22:43 +04:00
logger.info(infoMsg)
2014-08-21 03:12:44 +04:00
msg = "use any additional custom directories [Enter for None]: "
2013-08-30 12:22:43 +04:00
answer = readInput(msg)
if answer:
directories.extend(answer.split(','))
2013-08-30 12:22:43 +04:00
2013-05-09 16:26:29 +04:00
else:
directories = defaultDocRoot
2008-10-15 19:38:22 +04:00
return directories
2008-10-15 19:38:22 +04:00
def getAutoDirectories():
retVal = set()
if kb.absFilePaths:
infoMsg = "retrieved web server absolute paths: "
2011-01-23 23:47:06 +03:00
infoMsg += "'%s'" % ", ".join(ntToPosixSlashes(path) for path in kb.absFilePaths)
logger.info(infoMsg)
for absFilePath in kb.absFilePaths:
if absFilePath:
directory = directoryPath(absFilePath)
2011-01-23 23:47:06 +03:00
directory = ntToPosixSlashes(directory)
2014-02-02 01:12:00 +04:00
retVal.add(directory)
else:
2014-08-21 03:19:10 +04:00
warnMsg = "unable to automatically parse any web server path"
logger.warn(warnMsg)
2008-10-15 19:38:22 +04:00
2014-02-02 01:12:00 +04:00
_ = extractRegexResult(r"//[^/]+?(?P<result>/.*)/", conf.url) # web directory
2014-02-02 01:12:00 +04:00
if _:
retVal.add(_)
2011-01-23 23:47:06 +03:00
2014-02-02 01:12:00 +04:00
return list(retVal)
2013-03-12 23:10:32 +04:00
def filePathToSafeString(filePath):
"""
Returns string representation of a given filepath safe for a single filename usage
>>> filePathToSafeString('C:/Windows/system32')
'C__Windows_system32'
"""
2013-01-03 00:52:50 +04:00
retVal = filePath.replace("/", "_").replace("\\", "_")
retVal = retVal.replace(" ", "_").replace(":", "_")
2008-10-15 19:38:22 +04:00
2013-01-03 00:52:50 +04:00
return retVal
2008-10-15 19:38:22 +04:00
def singleTimeDebugMessage(message):
singleTimeLogMessage(message, logging.DEBUG)
2011-06-08 18:35:23 +04:00
def singleTimeWarnMessage(message):
singleTimeLogMessage(message, logging.WARN)
def singleTimeLogMessage(message, level=logging.INFO, flag=None):
2011-06-07 19:13:51 +04:00
if flag is None:
flag = hash(message)
if not conf.smokeTest and flag not in kb.singleLogFlags:
2011-04-20 02:54:13 +04:00
kb.singleLogFlags.add(flag)
logger.log(level, message)
def boldifyMessage(message):
retVal = message
if any(_ in message for _ in BOLD_PATTERNS):
retVal = setColor(message, True)
return retVal
2012-07-11 21:54:21 +04:00
def setColor(message, bold=False):
retVal = message
2012-07-12 17:23:35 +04:00
level = extractRegexResult(r"\[(?P<result>[A-Z ]+)\]", message) or kb.get("stickyLevel")
2012-08-07 12:57:29 +04:00
if message and getattr(LOGGER_HANDLER, "is_tty", False): # colorizing handler
if bold:
retVal = colored(message, color=None, on_color=None, attrs=("bold",))
2012-07-12 17:23:35 +04:00
elif level:
level = getattr(logging, level, None) if isinstance(level, basestring) else level
_ = LOGGER_HANDLER.level_map.get(level)
if _:
background, foreground, bold = _
retVal = colored(message, color=foreground, on_color="on_%s" % background if background else None, attrs=("bold",) if bold else None)
2012-07-11 20:22:01 +04:00
2012-07-12 17:23:35 +04:00
kb.stickyLevel = level if message and message[-1] != "\n" else None
2012-07-11 21:54:21 +04:00
return retVal
def dataToStdout(data, forceOutput=False, bold=False, content_type=None, status=CONTENT_STATUS.IN_PROGRESS):
2012-07-01 03:19:54 +04:00
"""
Writes text to the stdout (console) stream
"""
message = ""
2011-12-27 15:41:57 +04:00
if not kb.get("threadException"):
if forceOutput or not getCurrentThreadData().disableStdOut:
2012-07-31 13:03:44 +04:00
if kb.get("multiThreadMode"):
logging._acquireLock()
2013-04-06 03:48:23 +04:00
if isinstance(data, unicode):
message = stdoutencode(data)
else:
message = data
2015-01-22 11:17:45 +03:00
if kb.easterEgg:
message = "".join(kb.easterEgg.get(_, _) for _ in message)
2013-01-29 05:44:57 +04:00
if hasattr(conf, "api"):
sys.stdout.write(message, status, content_type)
else:
sys.stdout.write(setColor(message, bold))
2012-08-31 21:48:45 +04:00
try:
sys.stdout.flush()
except IOError:
pass
if kb.get("multiThreadMode"):
logging._releaseLock()
kb.prependFlag = isinstance(data, basestring) and (len(data) == 1 and data not in ('\n', '\r') or len(data) > 2 and data[0] == '\r' and data[-1] != '\n')
2010-11-08 14:22:47 +03:00
def dataToTrafficFile(data):
if not conf.trafficFile:
return
2012-12-30 14:22:23 +04:00
try:
conf.trafficFP.write(data)
conf.trafficFP.flush()
except IOError, ex:
errMsg = "something went wrong while trying "
errMsg += "to write to the traffic file '%s' ('%s')" % (conf.trafficFile, ex)
2014-11-23 17:41:24 +03:00
raise SqlmapSystemException(errMsg)
2010-11-08 14:22:47 +03:00
2008-10-15 19:38:22 +04:00
def dataToDumpFile(dumpFile, data):
2014-12-11 15:29:42 +03:00
try:
dumpFile.write(data)
dumpFile.flush()
except IOError, ex:
2014-12-15 15:36:08 +03:00
if "No space left" in getUnicode(ex):
2014-12-11 15:29:42 +03:00
errMsg = "no space left on output device"
logger.error(errMsg)
else:
raise
2012-12-19 18:12:09 +04:00
def dataToOutFile(filename, data):
retVal = None
if data:
retVal = os.path.join(conf.filePath, filePathToSafeString(filename))
2014-11-02 13:01:46 +03:00
try:
2014-11-26 15:38:21 +03:00
with openFile(retVal, "wb") as f:
2014-11-02 13:01:46 +03:00
f.write(data)
except IOError, ex:
errMsg = "something went wrong while trying to write "
errMsg += "to the output file ('%s')" % ex
raise SqlmapGenericException(errMsg)
2012-07-01 02:33:19 +04:00
return retVal
def readInput(message, default=None, checkBatch=True):
2008-10-15 19:38:22 +04:00
"""
2012-03-28 17:31:07 +04:00
Reads input from terminal
2008-10-15 19:38:22 +04:00
"""
2012-11-21 13:16:13 +04:00
retVal = None
2012-07-12 17:25:26 +04:00
kb.stickyLevel = None
2012-07-11 20:53:32 +04:00
2013-06-10 13:44:56 +04:00
message = getUnicode(message)
if "\n" in message:
message += "%s> " % ("\n" if message.count("\n") > 1 else "")
2010-11-05 19:08:42 +03:00
elif message[-1] == ']':
message += " "
2014-12-15 11:30:54 +03:00
if kb.get("prependFlag"):
message = "\n%s" % message
kb.prependFlag = False
2014-10-27 02:37:46 +03:00
if conf.get("answers"):
2012-11-21 13:16:13 +04:00
for item in conf.answers.split(','):
question = item.split('=')[0].strip()
answer = item.split('=')[1] if len(item.split('=')) > 1 else None
if answer and question.lower() in message.lower():
retVal = getUnicode(answer, UNICODE_ENCODING)
2011-02-08 13:08:48 +03:00
2013-06-10 13:44:56 +04:00
infoMsg = "%s%s" % (message, retVal)
2012-11-21 13:16:13 +04:00
logger.info(infoMsg)
2008-10-15 19:38:22 +04:00
2012-11-21 13:16:13 +04:00
debugMsg = "used the given answer"
logger.debug(debugMsg)
2008-10-15 19:38:22 +04:00
2012-11-21 13:16:13 +04:00
break
if retVal is None:
2014-10-27 02:37:46 +03:00
if checkBatch and conf.get("batch"):
2012-11-21 13:16:13 +04:00
if isListLike(default):
options = ",".join(getUnicode(opt, UNICODE_ENCODING) for opt in default)
elif default:
options = getUnicode(default, UNICODE_ENCODING)
else:
options = unicode()
2013-06-10 13:44:56 +04:00
dataToStdout("\r%s%s\n" % (message, options), forceOutput=True, bold=True)
2012-11-21 13:16:13 +04:00
debugMsg = "used the default behaviour, running in batch mode"
logger.debug(debugMsg)
retVal = default
else:
logging._acquireLock()
dataToStdout("\r%s" % message, forceOutput=True, bold=True)
kb.prependFlag = False
2013-02-15 13:35:09 +04:00
2012-11-21 13:16:13 +04:00
try:
retVal = raw_input() or default
2014-11-04 02:34:35 +03:00
retVal = getUnicode(retVal, encoding=sys.stdin.encoding) if retVal else retVal
2012-11-21 13:16:13 +04:00
except:
time.sleep(0.05) # Reference: http://www.gossamer-threads.com/lists/python/python/781893
kb.prependFlag = True
raise SqlmapUserQuitException
2013-02-15 13:35:09 +04:00
2012-11-21 13:16:13 +04:00
finally:
logging._releaseLock()
return retVal
2014-05-27 23:41:07 +04:00
def randomRange(start=0, stop=1000, seed=None):
2008-10-15 19:38:22 +04:00
"""
2012-03-28 17:31:07 +04:00
Returns random integer value in given range
2013-03-12 23:10:32 +04:00
>>> random.seed(0)
>>> randomRange(1, 500)
423
2008-10-15 19:38:22 +04:00
"""
2014-05-27 23:41:07 +04:00
randint = random.WichmannHill(seed).randint if seed is not None else random.randint
2008-10-15 19:38:22 +04:00
2014-05-27 23:41:07 +04:00
return int(randint(start, stop))
def randomInt(length=4, seed=None):
2008-10-15 19:38:22 +04:00
"""
2012-03-28 17:31:07 +04:00
Returns random integer value with provided number of digits
2013-03-12 23:10:32 +04:00
>>> random.seed(0)
>>> randomInt(6)
874254
2008-10-15 19:38:22 +04:00
"""
2014-05-27 23:41:07 +04:00
choice = random.WichmannHill(seed).choice if seed is not None else random.choice
return int("".join(choice(string.digits if _ != 0 else string.digits.replace('0', '')) for _ in xrange(0, length)))
2008-10-15 19:38:22 +04:00
2014-05-27 23:41:07 +04:00
def randomStr(length=4, lowercase=False, alphabet=None, seed=None):
2008-10-15 19:38:22 +04:00
"""
2012-03-28 17:31:07 +04:00
Returns random string value with provided number of characters
2013-03-12 23:10:32 +04:00
>>> random.seed(0)
>>> randomStr(6)
'RNvnAv'
2008-10-15 19:38:22 +04:00
"""
2014-05-27 23:41:07 +04:00
choice = random.WichmannHill(seed).choice if seed is not None else random.choice
2010-10-11 16:26:35 +04:00
if alphabet:
2014-05-27 23:41:07 +04:00
retVal = "".join(choice(alphabet) for _ in xrange(0, length))
2010-10-11 16:26:35 +04:00
elif lowercase:
2014-05-27 23:41:07 +04:00
retVal = "".join(choice(string.ascii_lowercase) for _ in xrange(0, length))
else:
2014-05-27 23:41:07 +04:00
retVal = "".join(choice(string.ascii_letters) for _ in xrange(0, length))
2012-07-01 02:33:19 +04:00
return retVal
2010-09-13 17:31:01 +04:00
2011-12-21 23:40:42 +04:00
def sanitizeStr(value):
2008-10-15 19:38:22 +04:00
"""
2012-03-28 17:31:07 +04:00
Sanitizes string value in respect to newline and line-feed characters
2013-03-12 23:10:32 +04:00
>>> sanitizeStr('foo\\n\\rbar')
u'foo bar'
2008-10-15 19:38:22 +04:00
"""
2011-12-21 23:40:42 +04:00
return getUnicode(value).replace("\n", " ").replace("\r", "")
2008-10-15 19:38:22 +04:00
def checkFile(filename):
"""
2014-11-23 17:55:12 +03:00
Checks for file existence and readability
2008-10-15 19:38:22 +04:00
"""
2014-11-23 17:55:12 +03:00
valid = True
2014-10-31 18:45:26 +03:00
if filename is None or not os.path.isfile(filename):
2014-11-23 17:55:12 +03:00
valid = False
if valid:
try:
with open(filename, "rb") as f:
pass
except:
valid = False
if not valid:
2014-11-23 17:42:41 +03:00
raise SqlmapSystemException("unable to read file '%s'" % filename)
2008-10-15 19:38:22 +04:00
def banner():
"""
This function prints sqlmap banner with its version
"""
2014-08-20 00:19:22 +04:00
_ = BANNER
if not getattr(LOGGER_HANDLER, "is_tty", False):
_ = re.sub("\033.+?m", "", _)
2011-12-21 23:40:42 +04:00
dataToStdout(_, forceOutput=True)
2010-09-13 17:31:01 +04:00
2008-10-15 19:38:22 +04:00
def parsePasswordHash(password):
2014-04-12 14:41:54 +04:00
"""
In case of Microsoft SQL Server password hash value is expanded to its components
"""
2008-10-15 19:38:22 +04:00
blank = " " * 8
if not password or password == " ":
2012-02-07 14:46:55 +04:00
password = NULL
2008-10-15 19:38:22 +04:00
2012-02-07 14:46:55 +04:00
if Backend.isDbms(DBMS.MSSQL) and password != NULL and isHexEncodedString(password):
2008-10-15 19:38:22 +04:00
hexPassword = password
2011-01-19 02:05:32 +03:00
password = "%s\n" % hexPassword
2008-10-15 19:38:22 +04:00
password += "%sheader: %s\n" % (blank, hexPassword[:6])
password += "%ssalt: %s\n" % (blank, hexPassword[6:14])
password += "%smixedcase: %s\n" % (blank, hexPassword[14:54])
if not Backend.isVersionWithin(("2005", "2008")):
2008-10-15 19:38:22 +04:00
password += "%suppercase: %s" % (blank, hexPassword[54:])
return password
2008-10-15 19:38:22 +04:00
def cleanQuery(query):
2014-04-12 14:50:45 +04:00
"""
Switch all SQL statement (alike) keywords to upper case
"""
2012-07-01 02:33:19 +04:00
retVal = query
for sqlStatements in SQL_STATEMENTS.values():
for sqlStatement in sqlStatements:
sqlStatementEsc = sqlStatement.replace("(", "\\(")
queryMatch = re.search("(%s)" % sqlStatementEsc, query, re.I)
if queryMatch and "sys_exec" not in query:
2012-07-01 02:33:19 +04:00
retVal = retVal.replace(queryMatch.group(1), sqlStatement.upper())
2012-07-01 02:33:19 +04:00
return retVal
2010-09-13 17:31:01 +04:00
2008-10-15 19:38:22 +04:00
def setPaths():
2012-02-24 18:12:19 +04:00
"""
Sets absolute paths for project directories and files
"""
2008-10-15 19:38:22 +04:00
# sqlmap paths
2011-01-19 02:05:32 +03:00
paths.SQLMAP_EXTRAS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "extra")
paths.SQLMAP_PROCS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "procs")
2011-01-19 02:05:32 +03:00
paths.SQLMAP_SHELL_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "shell")
paths.SQLMAP_TAMPER_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "tamper")
paths.SQLMAP_WAF_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "waf")
2011-01-19 02:05:32 +03:00
paths.SQLMAP_TXT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "txt")
paths.SQLMAP_UDF_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "udf")
paths.SQLMAP_XML_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "xml")
paths.SQLMAP_XML_BANNER_PATH = os.path.join(paths.SQLMAP_XML_PATH, "banner")
2014-04-28 00:40:41 +04:00
_ = os.path.join(os.path.expanduser("~"), ".sqlmap")
2014-11-04 02:34:35 +03:00
paths.SQLMAP_OUTPUT_PATH = getUnicode(paths.get("SQLMAP_OUTPUT_PATH", os.path.join(_, "output")), encoding=sys.getfilesystemencoding())
2011-01-19 02:05:32 +03:00
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
2008-10-15 19:38:22 +04:00
# sqlmap files
2014-09-16 17:17:50 +04:00
paths.OS_SHELL_HISTORY = os.path.join(_, "os.hst")
2014-11-10 00:07:11 +03:00
paths.SQL_SHELL_HISTORY = os.path.join(_, "sql.hst")
2014-09-16 17:17:50 +04:00
paths.SQLMAP_SHELL_HISTORY = os.path.join(_, "sqlmap.hst")
2014-11-10 00:07:11 +03:00
paths.GITHUB_HISTORY = os.path.join(_, "github.hst")
2011-01-19 02:05:32 +03:00
paths.SQLMAP_CONFIG = os.path.join(paths.SQLMAP_ROOT_PATH, "sqlmap-%s.conf" % randomStr())
paths.COMMON_COLUMNS = os.path.join(paths.SQLMAP_TXT_PATH, "common-columns.txt")
paths.COMMON_TABLES = os.path.join(paths.SQLMAP_TXT_PATH, "common-tables.txt")
paths.COMMON_OUTPUTS = os.path.join(paths.SQLMAP_TXT_PATH, 'common-outputs.txt')
paths.SQL_KEYWORDS = os.path.join(paths.SQLMAP_TXT_PATH, "keywords.txt")
paths.SMALL_DICT = os.path.join(paths.SQLMAP_TXT_PATH, "smalldict.txt")
2011-01-19 02:05:32 +03:00
paths.USER_AGENTS = os.path.join(paths.SQLMAP_TXT_PATH, "user-agents.txt")
2012-07-18 16:24:10 +04:00
paths.WORDLIST = os.path.join(paths.SQLMAP_TXT_PATH, "wordlist.zip")
2011-01-19 02:05:32 +03:00
paths.ERRORS_XML = os.path.join(paths.SQLMAP_XML_PATH, "errors.xml")
paths.PAYLOADS_XML = os.path.join(paths.SQLMAP_XML_PATH, "payloads.xml")
paths.LIVE_TESTS_XML = os.path.join(paths.SQLMAP_XML_PATH, "livetests.xml")
paths.QUERIES_XML = os.path.join(paths.SQLMAP_XML_PATH, "queries.xml")
paths.GENERIC_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "generic.xml")
paths.MSSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mssql.xml")
paths.MYSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mysql.xml")
paths.ORACLE_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "oracle.xml")
paths.PGSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "postgresql.xml")
2010-09-13 17:31:01 +04:00
2014-11-03 01:32:19 +03:00
for path in paths.values():
if any(path.endswith(_) for _ in (".txt", ".xml", ".zip")):
checkFile(path)
2008-10-15 19:38:22 +04:00
def weAreFrozen():
"""
Returns whether we are frozen via py2exe.
This will affect how we find out where we are located.
Reference: http://www.py2exe.org/index.cgi/WhereAmI
"""
return hasattr(sys, "frozen")
def parseTargetDirect():
"""
Parse target dbms and set some attributes into the configuration singleton.
"""
if not conf.direct:
return
details = None
remote = False
for dbms in SUPPORTED_DBMS:
2012-01-12 18:58:23 +04:00
details = re.search("^(?P<dbms>%s)://(?P<credentials>(?P<user>.+?)\:(?P<pass>.*)\@)?(?P<remote>(?P<hostname>.+?)\:(?P<port>[\d]+)\/)?(?P<db>[\w\d\ \:\.\_\-\/\\\\]+?)$" % dbms, conf.direct, re.I)
2010-09-13 17:31:01 +04:00
if details:
2013-10-27 02:24:57 +04:00
conf.dbms = details.group("dbms")
2010-03-30 15:21:26 +04:00
if details.group('credentials'):
2013-10-27 02:24:57 +04:00
conf.dbmsUser = details.group("user")
conf.dbmsPass = details.group("pass")
2010-03-30 15:06:30 +04:00
else:
if conf.dbmsCred:
conf.dbmsUser, conf.dbmsPass = conf.dbmsCred.split(':')
else:
conf.dbmsUser = unicode()
conf.dbmsPass = unicode()
2010-04-29 17:34:03 +04:00
if not conf.dbmsPass:
conf.dbmsPass = None
2013-10-27 02:24:57 +04:00
if details.group("remote"):
remote = True
2013-10-27 02:24:57 +04:00
conf.hostname = details.group("hostname").strip()
conf.port = int(details.group("port"))
2010-03-30 15:21:26 +04:00
else:
2010-03-30 15:06:30 +04:00
conf.hostname = "localhost"
2011-01-19 02:05:32 +03:00
conf.port = 0
2013-10-27 02:24:57 +04:00
conf.dbmsDb = details.group("db")
conf.parameters[None] = "direct connection"
break
if not details:
errMsg = "invalid target details, valid syntax is for instance "
errMsg += "'mysql://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_NAME' "
errMsg += "or 'access://DATABASE_FILEPATH'"
raise SqlmapSyntaxException(errMsg)
for dbmsName, data in DBMS_DICT.items():
2014-12-07 17:55:22 +03:00
if dbmsName == conf.dbms or conf.dbms.lower() in data[0]:
try:
2010-11-02 15:08:28 +03:00
if dbmsName in (DBMS.ACCESS, DBMS.SQLITE, DBMS.FIREBIRD):
if remote:
2010-04-13 15:13:01 +04:00
warnMsg = "direct connection over the network for "
warnMsg += "%s DBMS is not supported" % dbmsName
logger.warn(warnMsg)
conf.hostname = "localhost"
2011-01-19 02:05:32 +03:00
conf.port = 0
2010-04-13 15:13:01 +04:00
elif not remote:
2014-12-07 17:55:22 +03:00
errMsg = "missing remote connection details (e.g. "
errMsg += "'mysql://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_NAME' "
errMsg += "or 'access://DATABASE_FILEPATH')"
2013-01-10 19:09:28 +04:00
raise SqlmapSyntaxException(errMsg)
2011-02-04 18:57:53 +03:00
if dbmsName in (DBMS.MSSQL, DBMS.SYBASE):
import _mssql
import pymssql
2010-03-31 19:31:11 +04:00
if not hasattr(pymssql, "__version__") or pymssql.__version__ < "1.0.2":
errMsg = "'%s' third-party library must be " % data[1]
errMsg += "version >= 1.0.2 to work properly. "
2012-02-09 13:48:47 +04:00
errMsg += "Download from '%s'" % data[2]
raise SqlmapMissingDependence(errMsg)
2010-03-31 19:31:11 +04:00
2010-11-02 15:08:28 +03:00
elif dbmsName == DBMS.MYSQL:
import pymysql
2010-12-04 01:28:09 +03:00
elif dbmsName == DBMS.PGSQL:
import psycopg2
2010-11-02 15:08:28 +03:00
elif dbmsName == DBMS.ORACLE:
import cx_Oracle
2010-11-02 15:08:28 +03:00
elif dbmsName == DBMS.SQLITE:
import sqlite3
2010-11-02 15:08:28 +03:00
elif dbmsName == DBMS.ACCESS:
import pyodbc
2010-11-02 15:08:28 +03:00
elif dbmsName == DBMS.FIREBIRD:
import kinterbasdb
2012-02-22 19:53:36 +04:00
except ImportError:
2013-04-15 16:20:21 +04:00
if _sqlalchemy and data[3] in _sqlalchemy.dialects.__all__:
pass
else:
errMsg = "sqlmap requires '%s' third-party library " % data[1]
2014-12-05 13:15:33 +03:00
errMsg += "in order to directly connect to the DBMS "
2013-04-15 16:20:21 +04:00
errMsg += "%s. You can download it from '%s'" % (dbmsName, data[2])
errMsg += ". Alternative is to use a package 'python-sqlalchemy' "
errMsg += "with support for dialect '%s' installed" % data[3]
raise SqlmapMissingDependence(errMsg)
2008-10-15 19:38:22 +04:00
def parseTargetUrl():
"""
Parse target URL and set some attributes into the configuration singleton.
2008-10-15 19:38:22 +04:00
"""
2012-02-24 18:12:19 +04:00
2008-10-15 19:38:22 +04:00
if not conf.url:
return
2012-08-15 18:37:18 +04:00
originalUrl = conf.url
2012-05-25 01:36:35 +04:00
if re.search("\[.+\]", conf.url) and not socket.has_ipv6:
errMsg = "IPv6 addressing is not supported "
errMsg += "on this platform"
raise SqlmapGenericException(errMsg)
2012-05-25 01:36:35 +04:00
2012-05-25 01:39:10 +04:00
if not re.search("^http[s]*://", conf.url, re.I):
2008-10-15 19:38:22 +04:00
if ":443/" in conf.url:
conf.url = "https://" + conf.url
else:
conf.url = "http://" + conf.url
2012-04-17 12:41:19 +04:00
if CUSTOM_INJECTION_MARK_CHAR in conf.url:
conf.url = conf.url.replace('?', URI_QUESTION_MARKER)
2014-11-13 12:28:38 +03:00
try:
urlSplit = urlparse.urlsplit(conf.url)
except ValueError, ex:
errMsg = "invalid URL '%s' has been given ('%s'). " % (conf.url, ex)
errMsg += "Please be sure that you don't have any leftover characters (e.g. '[' or ']') "
errMsg += "in the hostname part"
raise SqlmapGenericException(errMsg)
2013-01-10 18:41:07 +04:00
hostnamePort = urlSplit.netloc.split(":") if not re.search("\[.+\]", urlSplit.netloc) else filter(None, (re.search("\[.+\]", urlSplit.netloc).group(0), re.search("\](:(?P<port>\d+))?", urlSplit.netloc).group("port")))
2008-10-15 19:38:22 +04:00
2013-01-10 18:41:07 +04:00
conf.scheme = urlSplit.scheme.strip().lower() if not conf.forceSSL else "https"
conf.path = urlSplit.path.strip()
2012-07-01 03:19:54 +04:00
conf.hostname = hostnamePort[0].strip()
2012-05-25 02:07:50 +04:00
conf.ipv6 = conf.hostname != conf.hostname.strip("[]")
2014-03-20 16:08:28 +04:00
conf.hostname = conf.hostname.strip("[]").replace(CUSTOM_INJECTION_MARK_CHAR, "")
2011-01-24 17:52:50 +03:00
try:
_ = conf.hostname.encode("idna")
2014-12-11 02:11:52 +03:00
except LookupError:
_ = conf.hostname.encode(UNICODE_ENCODING)
except UnicodeError:
_ = None
if any((_ is None, re.search(r'\s', conf.hostname), '..' in conf.hostname, conf.hostname.startswith('.'))):
errMsg = "invalid target URL"
raise SqlmapSyntaxException(errMsg)
2008-10-15 19:38:22 +04:00
2012-07-01 03:19:54 +04:00
if len(hostnamePort) == 2:
try:
2012-07-01 03:19:54 +04:00
conf.port = int(hostnamePort[1])
except:
errMsg = "invalid target URL"
raise SqlmapSyntaxException(errMsg)
2008-10-15 19:38:22 +04:00
elif conf.scheme == "https":
conf.port = 443
else:
conf.port = 80
2013-01-10 18:41:07 +04:00
if urlSplit.query:
conf.parameters[PLACE.GET] = urldecode(urlSplit.query) if urlSplit.query and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in urlSplit.query else urlSplit.query
2008-10-15 19:38:22 +04:00
conf.url = getUnicode("%s://%s:%d%s" % (conf.scheme, ("[%s]" % conf.hostname) if conf.ipv6 else conf.hostname, conf.port, conf.path))
conf.url = conf.url.replace(URI_QUESTION_MARKER, '?')
2012-04-04 14:35:52 +04:00
if not conf.referer and intersect(REFERER_ALIASES, conf.testParameter, True):
debugMsg = "setting the HTTP Referer header to the target URL"
2012-04-04 14:35:52 +04:00
logger.debug(debugMsg)
conf.httpHeaders = filter(lambda (key, value): key != HTTP_HEADER.REFERER, conf.httpHeaders)
conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.url))
2012-04-04 14:35:52 +04:00
if not conf.host and intersect(HOST_ALIASES, conf.testParameter, True):
debugMsg = "setting the HTTP Host header to the target URL"
2012-04-04 14:35:52 +04:00
logger.debug(debugMsg)
conf.httpHeaders = filter(lambda (key, value): key != HTTP_HEADER.HOST, conf.httpHeaders)
conf.httpHeaders.append((HTTP_HEADER.HOST, getHostHeader(conf.url)))
2012-04-04 14:35:52 +04:00
2013-01-17 17:17:39 +04:00
if conf.url != originalUrl:
kb.originalUrls[conf.url] = originalUrl
2012-08-15 18:37:18 +04:00
2008-10-15 19:38:22 +04:00
def expandAsteriskForColumns(expression):
2012-02-24 18:12:19 +04:00
"""
If the user provided an asterisk rather than the column(s)
name, sqlmap will retrieve the columns itself and reprocess
the SQL query string (expression)
"""
asterisk = re.search("^SELECT(\s+TOP\s+[\d]+)?\s+\*\s+FROM\s+`?([^`\s()]+)", expression, re.I)
2008-10-15 19:38:22 +04:00
if asterisk:
2011-01-19 02:05:32 +03:00
infoMsg = "you did not provide the fields in your query. "
2008-10-15 19:38:22 +04:00
infoMsg += "sqlmap will retrieve the column names itself"
logger.info(infoMsg)
2014-08-27 00:40:50 +04:00
_ = asterisk.group(2).replace("..", ".").replace(".dbo.", ".")
2014-08-27 01:11:44 +04:00
db, conf.tbl = _.split(".", 1) if '.' in _ else (None, _)
if db is None:
if expression != conf.query:
conf.db = db
else:
expression = re.sub(r"([^\w])%s" % re.escape(conf.tbl), "\g<1>%s.%s" % (conf.db, conf.tbl), expression)
2014-08-27 01:11:44 +04:00
else:
conf.db = db
2012-12-21 12:47:58 +04:00
conf.db = safeSQLIdentificatorNaming(conf.db)
conf.tbl = safeSQLIdentificatorNaming(conf.tbl, True)
2008-10-15 19:38:22 +04:00
columnsDict = conf.dbmsHandler.getColumns(onlyColNames=True)
if columnsDict and conf.db in columnsDict and conf.tbl in columnsDict[conf.db]:
columns = columnsDict[conf.db][conf.tbl].keys()
columns.sort()
2011-11-20 23:38:56 +04:00
columnsStr = ", ".join(column for column in columns)
2008-10-15 19:38:22 +04:00
expression = expression.replace("*", columnsStr, 1)
2013-08-13 22:40:36 +04:00
infoMsg = "the query with expanded column name(s) is: "
2008-10-15 19:38:22 +04:00
infoMsg += "%s" % expression
logger.info(infoMsg)
return expression
2010-01-09 02:50:06 +03:00
2012-02-16 18:42:28 +04:00
def getLimitRange(count, dump=False, plusOne=False):
"""
Returns range of values used in limit/offset constructs
2013-03-12 23:10:32 +04:00
>>> [_ for _ in getLimitRange(10)]
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
2012-02-16 18:42:28 +04:00
"""
retVal = None
2011-01-19 02:05:32 +03:00
count = int(count)
2012-02-16 18:42:28 +04:00
limitStart, limitStop = 1, count
2008-10-15 19:38:22 +04:00
if dump:
if isinstance(conf.limitStop, int) and conf.limitStop > 0 and conf.limitStop < limitStop:
2008-10-15 19:38:22 +04:00
limitStop = conf.limitStop
if isinstance(conf.limitStart, int) and conf.limitStart > 0 and conf.limitStart <= limitStop:
2008-10-15 19:38:22 +04:00
limitStart = conf.limitStart
2012-02-16 18:42:28 +04:00
retVal = xrange(limitStart, limitStop + 1) if plusOne else xrange(limitStart - 1, limitStop)
2008-10-15 19:38:22 +04:00
2012-02-16 18:42:28 +04:00
return retVal
def parseUnionPage(page):
2012-02-16 18:42:28 +04:00
"""
2012-12-19 04:30:22 +04:00
Returns resulting items from UNION query inside provided page content
2012-02-16 18:42:28 +04:00
"""
if page is None:
2011-02-02 01:05:12 +03:00
return None
if re.search("(?si)\A%s.*%s\Z" % (kb.chars.start, kb.chars.stop), page):
if len(page) > LARGE_OUTPUT_THRESHOLD:
2011-12-23 00:14:56 +04:00
warnMsg = "large output detected. This might take a while"
logger.warn(warnMsg)
2011-12-23 00:08:28 +04:00
data = BigArray()
2012-08-08 02:03:58 +04:00
keys = set()
2011-12-23 00:08:28 +04:00
for match in re.finditer("%s(.*?)%s" % (kb.chars.start, kb.chars.stop), page, re.DOTALL | re.IGNORECASE):
entry = match.group(1)
if kb.chars.start in entry:
entry = entry.split(kb.chars.start)[-1]
2011-12-22 14:44:14 +04:00
if kb.unionDuplicates:
2011-12-22 03:23:00 +04:00
key = entry.lower()
2012-08-08 02:03:58 +04:00
if key not in keys:
keys.add(key)
else:
continue
2011-12-22 14:59:28 +04:00
entry = entry.split(kb.chars.delimiter)
if conf.hexConvert:
entry = applyFunctionRecursively(entry, decodeHexValue)
if kb.safeCharEncode:
entry = applyFunctionRecursively(entry, safecharencode)
data.append(entry[0] if len(entry) == 1 else entry)
else:
data = page
if len(data) == 1 and isinstance(data[0], basestring):
data = data[0]
return data
def parseFilePaths(page):
"""
2012-02-16 18:42:28 +04:00
Detects (possible) absolute system paths inside the provided page content
"""
if page:
2012-02-22 19:53:36 +04:00
for regex in (r" in <b>(?P<result>.*?)</b> on line", r"(?:>|\s)(?P<result>[A-Za-z]:[\\/][\w.\\/]*)", r"(?:>|\s)(?P<result>/\w[/\w.]+)"):
for match in re.finditer(regex, page):
absFilePath = match.group("result").strip()
page = page.replace(absFilePath, "")
if isWindowsDriveLetterPath(absFilePath):
absFilePath = posixToNtSlashes(absFilePath)
if absFilePath not in kb.absFilePaths:
kb.absFilePaths.add(absFilePath)
def getLocalIP():
2014-02-02 01:12:00 +04:00
"""
Get local IP address (exposed to the remote/target)
"""
2010-11-04 15:51:04 +03:00
retVal = None
2012-07-01 02:33:19 +04:00
2010-11-04 15:51:04 +03:00
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((conf.hostname, conf.port))
retVal, _ = s.getsockname()
s.close()
except:
debugMsg = "there was an error in opening socket "
debugMsg += "connection toward '%s'" % conf.hostname
logger.debug(debugMsg)
2010-11-04 15:51:04 +03:00
return retVal
def getRemoteIP():
2014-02-02 01:12:00 +04:00
"""
Get remote/target IP address
"""
2013-05-29 17:26:11 +04:00
retVal = None
2013-05-29 17:46:59 +04:00
2013-05-29 17:26:11 +04:00
try:
retVal = socket.gethostbyname(conf.hostname)
except socket.gaierror:
errMsg = "address resolution problem "
errMsg += "occurred for hostname '%s'" % conf.hostname
singleTimeLogMessage(errMsg, logging.ERROR)
2013-05-29 17:46:59 +04:00
2013-05-29 17:26:11 +04:00
return retVal
def getFileType(filePath):
try:
2012-02-16 18:42:28 +04:00
_ = magic.from_file(filePath)
except:
return "unknown"
2012-02-16 18:42:28 +04:00
return "text" if "ASCII" in _ or "text" in _ else "binary"
def getCharset(charsetType=None):
2013-03-12 23:10:32 +04:00
"""
Returns list with integers representing characters of a given
charset type appropriate for inference techniques
>>> getCharset(CHARSET_TYPE.BINARY)
[0, 1, 47, 48, 49]
"""
asciiTbl = []
if charsetType is None:
2011-12-21 23:40:42 +04:00
asciiTbl.extend(xrange(0, 128))
# 0 or 1
elif charsetType == CHARSET_TYPE.BINARY:
2012-02-22 19:53:36 +04:00
asciiTbl.extend([0, 1])
2011-12-21 23:40:42 +04:00
asciiTbl.extend(xrange(47, 50))
# Digits
elif charsetType == CHARSET_TYPE.DIGITS:
2012-02-22 19:53:36 +04:00
asciiTbl.extend([0, 1])
2011-12-21 23:40:42 +04:00
asciiTbl.extend(xrange(47, 58))
# Hexadecimal
elif charsetType == CHARSET_TYPE.HEXADECIMAL:
2012-02-22 19:53:36 +04:00
asciiTbl.extend([0, 1])
2011-12-21 23:40:42 +04:00
asciiTbl.extend(xrange(47, 58))
asciiTbl.extend(xrange(64, 71))
asciiTbl.extend([87, 88]) # X
2011-12-21 23:40:42 +04:00
asciiTbl.extend(xrange(96, 103))
asciiTbl.extend([119, 120]) # x
# Characters
elif charsetType == CHARSET_TYPE.ALPHA:
2012-02-22 19:53:36 +04:00
asciiTbl.extend([0, 1])
2011-12-21 23:40:42 +04:00
asciiTbl.extend(xrange(64, 91))
asciiTbl.extend(xrange(96, 123))
# Characters and digits
elif charsetType == CHARSET_TYPE.ALPHANUM:
2012-02-22 19:53:36 +04:00
asciiTbl.extend([0, 1])
2011-12-21 23:40:42 +04:00
asciiTbl.extend(xrange(47, 58))
asciiTbl.extend(xrange(64, 91))
asciiTbl.extend(xrange(96, 123))
return asciiTbl
2012-02-16 18:42:28 +04:00
def directoryPath(filepath):
"""
Returns directory path for a given filepath
2013-03-12 23:10:32 +04:00
>>> directoryPath('/var/log/apache.log')
'/var/log'
2012-02-16 18:42:28 +04:00
"""
2012-07-13 13:23:21 +04:00
retVal = filepath
if filepath:
retVal = ntpath.dirname(filepath) if isWindowsDriveLetterPath(filepath) else posixpath.dirname(filepath)
return retVal
2010-01-15 20:42:46 +03:00
2012-02-16 18:42:28 +04:00
def normalizePath(filepath):
"""
Returns normalized string representation of a given filepath
2013-03-12 23:10:32 +04:00
>>> normalizePath('//var///log/apache.log')
'//var/log/apache.log'
2012-02-16 18:42:28 +04:00
"""
2012-07-13 13:23:21 +04:00
retVal = filepath
if retVal:
2013-01-29 18:34:41 +04:00
retVal = retVal.strip("\r\n")
retVal = ntpath.normpath(retVal) if isWindowsDriveLetterPath(retVal) else posixpath.normpath(retVal)
2012-07-13 13:23:21 +04:00
return retVal
2010-01-15 19:06:59 +03:00
2015-01-21 11:26:30 +03:00
def safeExpandUser(filepath):
"""
Patch for a Python Issue18171 (http://bugs.python.org/issue18171)
"""
retVal = filepath
try:
retVal = os.path.expanduser(filepath)
except UnicodeDecodeError:
_ = locale.getdefaultlocale()
retVal = getUnicode(os.path.expanduser(filepath.encode(_[1] if _ and len(_) > 1 else UNICODE_ENCODING)))
print retVal
return retVal
2012-02-16 18:42:28 +04:00
def safeStringFormat(format_, params):
"""
Avoids problems with inappropriate string format strings
2013-03-12 23:10:32 +04:00
2014-12-19 11:26:01 +03:00
>>> safeStringFormat('SELECT foo FROM %s LIMIT %d', ('bar', '1'))
u'SELECT foo FROM bar LIMIT 1'
2012-02-16 18:42:28 +04:00
"""
2014-03-30 18:21:18 +04:00
if format_.count(PAYLOAD_DELIMITER) == 2:
_ = format_.split(PAYLOAD_DELIMITER)
2014-10-07 13:49:53 +04:00
_[1] = re.sub(r"(\A|[^A-Za-z0-9])(%d)([^A-Za-z0-9]|\Z)", r"\g<1>%s\g<3>", _[1])
2014-03-30 18:21:18 +04:00
retVal = PAYLOAD_DELIMITER.join(_)
else:
2014-10-07 13:49:53 +04:00
retVal = re.sub(r"(\A|[^A-Za-z0-9])(%d)([^A-Za-z0-9]|\Z)", r"\g<1>%s\g<3>", format_)
2010-01-15 20:42:46 +03:00
if isinstance(params, basestring):
retVal = retVal.replace("%s", params, 1)
2013-01-31 19:24:44 +04:00
elif not isListLike(params):
retVal = retVal.replace("%s", str(params), 1)
2010-01-15 20:42:46 +03:00
else:
2014-11-10 16:51:31 +03:00
start, end = 0, len(retVal)
match = re.search(r"%s(.+)%s" % (PAYLOAD_DELIMITER, PAYLOAD_DELIMITER), retVal)
if match and PAYLOAD_DELIMITER not in match.group(1):
start, end = match.start(), match.end()
if retVal.count("%s", start, end) == len(params):
for param in params:
index = retVal.find("%s", start)
retVal = retVal[:index] + getUnicode(param) + retVal[index + 2:]
2014-08-13 16:01:57 +04:00
else:
count = 0
while True:
match = re.search(r"(\A|[^A-Za-z0-9])(%s)([^A-Za-z0-9]|\Z)", retVal)
if match:
2014-10-07 13:34:47 +04:00
if count >= len(params):
2014-08-13 16:01:57 +04:00
raise Exception("wrong number of parameters during string formatting")
else:
retVal = re.sub(r"(\A|[^A-Za-z0-9])(%s)([^A-Za-z0-9]|\Z)", r"\g<1>%s\g<3>" % params[count], retVal, 1)
count += 1
2010-01-15 20:42:46 +03:00
else:
2014-08-13 16:01:57 +04:00
break
2010-01-15 19:06:59 +03:00
return retVal
2010-01-24 02:29:34 +03:00
2011-01-01 22:07:40 +03:00
def getFilteredPageContent(page, onlyText=True):
2012-02-16 18:42:28 +04:00
"""
Returns filtered page content without script, style and/or comments
or all HTML tags
2013-03-12 23:10:32 +04:00
>>> getFilteredPageContent(u'<html><title>foobar</title><body>test</body></html>')
u'foobar test'
2012-02-16 18:42:28 +04:00
"""
retVal = page
2010-10-16 19:10:48 +04:00
# only if the page's charset has been successfully identified
if isinstance(page, unicode):
2011-11-13 23:09:13 +04:00
retVal = re.sub(r"(?si)<script.+?</script>|<!--.+?-->|<style.+?</style>%s" % (r"|<[^>]+>|\t|\n|\r" if onlyText else ""), " ", page)
2010-10-13 00:01:59 +04:00
while retVal.find(" ") != -1:
retVal = retVal.replace(" ", " ")
2013-03-12 23:10:32 +04:00
retVal = htmlunescape(retVal.strip())
2010-11-07 03:12:00 +03:00
return retVal
def getPageWordSet(page):
2012-02-23 19:32:36 +04:00
"""
Returns word set used in page content
2013-03-12 23:10:32 +04:00
>>> sorted(getPageWordSet(u'<html><title>foobar</title><body>test</body></html>'))
[u'foobar', u'test']
2012-02-23 19:32:36 +04:00
"""
2011-09-11 20:41:07 +04:00
retVal = set()
2010-10-16 19:10:48 +04:00
# only if the page's charset has been successfully identified
if isinstance(page, unicode):
2012-07-01 03:19:54 +04:00
_ = getFilteredPageContent(page)
retVal = set(re.findall(r"\w+", _))
2010-10-16 19:10:48 +04:00
return retVal
def showStaticWords(firstPage, secondPage):
2014-02-02 01:12:00 +04:00
"""
Prints words appearing in two different response pages
"""
infoMsg = "finding static words in longest matching part of dynamic page content"
logger.info(infoMsg)
2010-10-16 19:10:48 +04:00
firstPage = getFilteredPageContent(firstPage)
secondPage = getFilteredPageContent(secondPage)
2010-10-16 19:10:48 +04:00
infoMsg = "static words: "
2010-10-14 16:38:06 +04:00
2011-12-12 13:45:40 +04:00
if firstPage and secondPage:
match = SequenceMatcher(None, firstPage, secondPage).find_longest_match(0, len(firstPage), 0, len(secondPage))
2012-02-22 19:53:36 +04:00
commonText = firstPage[match[0]:match[0] + match[2]]
2011-12-12 13:45:40 +04:00
commonWords = getPageWordSet(commonText)
else:
commonWords = None
2010-10-14 16:38:06 +04:00
if commonWords:
commonWords = list(commonWords)
commonWords.sort(lambda a, b: cmp(a.lower(), b.lower()))
2011-12-12 13:45:40 +04:00
for word in commonWords:
if len(word) > 2:
infoMsg += "'%s', " % word
infoMsg = infoMsg.rstrip(", ")
else:
infoMsg += "None"
2010-10-14 16:38:06 +04:00
logger.info(infoMsg)
def isWindowsDriveLetterPath(filepath):
2011-10-22 01:29:24 +04:00
"""
Returns True if given filepath starts with a Windows drive letter
2013-03-12 23:10:32 +04:00
>>> isWindowsDriveLetterPath('C:\\boot.ini')
True
>>> isWindowsDriveLetterPath('/var/log/apache.log')
False
2011-10-22 01:29:24 +04:00
"""
return re.search("\A[\w]\:", filepath) is not None
def posixToNtSlashes(filepath):
2010-08-21 01:27:47 +04:00
"""
Replaces all occurances of Posix slashes (/) in provided
2013-03-11 17:58:05 +04:00
filepath with NT ones (\)
2010-08-21 01:27:47 +04:00
>>> posixToNtSlashes('C:/Windows')
'C:\\\\Windows'
"""
2010-10-16 19:10:48 +04:00
return filepath.replace('/', '\\')
def ntToPosixSlashes(filepath):
2010-08-21 01:27:47 +04:00
"""
Replaces all occurances of NT slashes (\) in provided
filepath with Posix ones (/)
2010-08-21 01:27:47 +04:00
>>> ntToPosixSlashes('C:\\Windows')
'C:/Windows'
"""
2010-10-16 19:10:48 +04:00
return filepath.replace('\\', '/')
def isHexEncodedString(subject):
2010-08-21 01:27:47 +04:00
"""
Checks if the provided string is hex encoded
2010-08-21 01:27:47 +04:00
>>> isHexEncodedString('DEADBEEF')
True
>>> isHexEncodedString('test')
False
"""
2010-10-16 19:10:48 +04:00
2010-10-21 02:12:53 +04:00
return re.match(r"\A[0-9a-fA-Fx]+\Z", subject) is not None
2013-02-22 13:18:22 +04:00
@cachedmethod
def getConsoleWidth(default=80):
2011-10-22 01:29:24 +04:00
"""
Returns console width
"""
width = None
2012-07-01 03:19:54 +04:00
if os.getenv("COLUMNS", "").isdigit():
width = int(os.getenv("COLUMNS"))
else:
2013-02-22 13:18:22 +04:00
try:
2014-11-08 23:22:03 +03:00
try:
FNULL = open(os.devnull, 'w')
except IOError:
FNULL = None
process = execute("stty size", shell=True, stdout=PIPE, stderr=FNULL or PIPE)
stdout, _ = process.communicate()
items = stdout.split()
2010-10-16 19:10:48 +04:00
2014-11-08 23:22:03 +03:00
if len(items) == 2 and items[1].isdigit():
width = int(items[1])
2014-12-13 15:41:39 +03:00
except (OSError, MemoryError):
2013-02-22 13:18:22 +04:00
pass
if width is None:
try:
import curses
2010-10-16 19:10:48 +04:00
stdscr = curses.initscr()
_, width = stdscr.getmaxyx()
curses.endwin()
except:
pass
2012-07-01 03:19:54 +04:00
return width or default
2010-04-16 23:57:00 +04:00
2010-11-24 00:00:42 +03:00
def clearConsoleLine(forceOutput=False):
2011-10-22 01:29:24 +04:00
"""
Clears current console line
"""
2012-12-12 19:45:29 +04:00
if getattr(LOGGER_HANDLER, "is_tty", False):
dataToStdout("\r%s\r" % (" " * (getConsoleWidth() - 1)), forceOutput)
kb.prependFlag = False
kb.stickyLevel = None
2010-04-16 23:57:00 +04:00
def parseXmlFile(xmlFile, handler):
2011-10-22 01:29:24 +04:00
"""
Parses XML file by a given handler
"""
2014-12-02 12:57:50 +03:00
try:
with contextlib.closing(StringIO(readCachedFileContent(xmlFile))) as stream:
parse(stream, handler)
except (SAXParseException, UnicodeError), ex:
errMsg = "something seems to be wrong with "
errMsg += "the file '%s' ('%s'). Please make " % (xmlFile, ex)
errMsg += "sure that you haven't made any changes to it"
raise SqlmapInstallationException, errMsg
2012-07-10 03:19:32 +04:00
def getSQLSnippet(dbms, sfile, **variables):
"""
2012-07-10 03:19:32 +04:00
Returns content of SQL snippet located inside 'procs/' directory
"""
2012-02-15 17:24:02 +04:00
if sfile.endswith('.sql') and os.path.exists(sfile):
2012-07-10 03:53:07 +04:00
filename = sfile
elif not sfile.endswith('.sql') and os.path.exists("%s.sql" % sfile):
filename = "%s.sql" % sfile
2012-07-10 03:53:07 +04:00
else:
filename = os.path.join(paths.SQLMAP_PROCS_PATH, DBMS_DIRECTORY_DICT[dbms], sfile if sfile.endswith('.sql') else "%s.sql" % sfile)
checkFile(filename)
2012-02-15 17:24:02 +04:00
2012-07-10 03:19:32 +04:00
retVal = readCachedFileContent(filename)
2012-04-02 18:57:15 +04:00
retVal = re.sub(r"#.+", "", retVal)
retVal = re.sub(r"(?s);\s+", "; ", retVal).strip("\r\n")
2012-02-15 17:45:10 +04:00
for _ in variables.keys():
2012-02-20 23:35:57 +04:00
retVal = re.sub(r"%%%s%%" % _, variables[_], retVal)
2012-02-15 17:24:02 +04:00
for _ in re.findall(r"%RANDSTR\d+%", retVal, re.I):
retVal = retVal.replace(_, randomStr())
for _ in re.findall(r"%RANDINT\d+%", retVal, re.I):
retVal = retVal.replace(_, randomInt())
2013-02-13 19:31:03 +04:00
variables = re.findall(r"(?<!\bLIKE ')%(\w+)%", retVal, re.I)
2012-07-10 03:19:32 +04:00
if variables:
errMsg = "unresolved variable%s '%s' in SQL file '%s'" % ("s" if len(variables) > 1 else "", ", ".join(variables), sfile)
logger.error(errMsg)
msg = "do you want to provide the substitution values? [y/N] "
choice = readInput(msg, default="N")
if choice and choice[0].lower() == "y":
for var in variables:
msg = "insert value for variable '%s': " % var
2014-12-15 11:30:54 +03:00
val = readInput(msg, default="")
retVal = retVal.replace(r"%%%s%%" % var, val)
2012-02-15 17:24:02 +04:00
return retVal
def readCachedFileContent(filename, mode='rb'):
2011-10-22 01:29:24 +04:00
"""
Cached reading of file content (avoiding multiple same file reading)
"""
if filename not in kb.cache.content:
2012-06-14 17:52:56 +04:00
with kb.locks.cache:
if filename not in kb.cache.content:
checkFile(filename)
2014-11-26 15:38:21 +03:00
with openFile(filename, mode) as f:
2012-07-01 03:19:54 +04:00
kb.cache.content[filename] = f.read()
return kb.cache.content[filename]
2010-10-07 02:43:04 +04:00
def readXmlFile(xmlFile):
2011-10-22 01:29:24 +04:00
"""
2012-03-14 02:03:23 +04:00
Reads XML file content and returns its DOM representation
2011-10-22 01:29:24 +04:00
"""
checkFile(xmlFile)
retVal = minidom.parse(xmlFile).documentElement
2010-12-09 03:26:06 +03:00
2010-10-07 02:43:04 +04:00
return retVal
def stdev(values):
"""
Computes standard deviation of a list of numbers.
2010-12-08 01:45:38 +03:00
Reference: http://www.goldb.org/corestats.html
2013-03-12 23:10:32 +04:00
>>> stdev([0.9, 0.9, 0.9, 1.0, 0.8, 0.9])
0.06324555320336757
"""
2010-12-09 03:26:06 +03:00
2010-12-08 01:45:38 +03:00
if not values or len(values) < 2:
return None
2010-12-14 15:22:17 +03:00
key = (values[0], values[-1], len(values))
2013-03-13 22:42:22 +04:00
if kb.get("cache") and key in kb.cache.stdev:
2011-12-21 23:40:42 +04:00
retVal = kb.cache.stdev[key]
2010-12-14 15:22:17 +03:00
else:
avg = average(values)
2011-12-26 16:24:39 +04:00
_ = reduce(lambda x, y: x + pow((y or 0) - avg, 2), values, 0.0)
2012-02-22 19:53:36 +04:00
retVal = sqrt(_ / (len(values) - 1))
2013-03-13 22:42:22 +04:00
if kb.get("cache"):
kb.cache.stdev[key] = retVal
2011-12-21 23:40:42 +04:00
return retVal
2010-12-07 19:04:53 +03:00
def average(values):
2010-08-21 01:27:47 +04:00
"""
2010-12-07 19:04:53 +03:00
Computes the arithmetic mean of a list of numbers.
2013-03-12 23:10:32 +04:00
>>> average([0.9, 0.9, 0.9, 1.0, 0.8, 0.9])
0.9
2010-08-21 01:27:47 +04:00
"""
2011-10-22 01:29:24 +04:00
2012-07-01 03:19:54 +04:00
return (sum(values) / len(values)) if values else None
2010-12-07 19:04:53 +03:00
def calculateDeltaSeconds(start):
"""
Returns elapsed time from start till now
"""
2011-10-22 01:29:24 +04:00
2010-12-07 19:04:53 +03:00
return time.time() - start
2010-05-21 13:35:36 +04:00
2010-05-21 16:19:20 +04:00
def initCommonOutputs():
2011-10-22 01:29:24 +04:00
"""
Initializes dictionary containing common output values used by "good samaritan" feature
"""
2010-05-21 16:19:20 +04:00
kb.commonOutputs = {}
2010-05-21 16:44:09 +04:00
key = None
2014-11-26 15:38:21 +03:00
with openFile(paths.COMMON_OUTPUTS, 'r') as f:
2012-02-22 19:53:36 +04:00
for line in f.readlines(): # xreadlines doesn't return unicode strings when codec.open() is used
2011-12-21 23:40:42 +04:00
if line.find('#') != -1:
line = line[:line.find('#')]
2010-05-24 19:46:12 +04:00
2011-12-21 23:40:42 +04:00
line = line.strip()
2010-05-27 20:45:09 +04:00
2011-12-21 23:40:42 +04:00
if len(line) > 1:
if line.startswith('[') and line.endswith(']'):
key = line[1:-1]
elif key:
if key not in kb.commonOutputs:
kb.commonOutputs[key] = set()
2010-05-24 19:46:12 +04:00
2011-12-21 23:40:42 +04:00
if line not in kb.commonOutputs[key]:
kb.commonOutputs[key].add(line)
2010-05-21 16:19:20 +04:00
2010-12-26 14:15:02 +03:00
def getFileItems(filename, commentPrefix='#', unicode_=True, lowercase=False, unique=False):
2011-10-22 01:29:24 +04:00
"""
Returns newline delimited items contained inside file
"""
2011-12-22 14:51:41 +04:00
retVal = list() if not unique else OrderedDict()
2010-09-30 16:35:45 +04:00
checkFile(filename)
2014-11-23 17:39:08 +03:00
try:
2014-11-26 15:38:21 +03:00
with openFile(filename, 'r', errors="ignore") if unicode_ else open(filename, 'r') as f:
2014-11-23 17:39:08 +03:00
for line in (f.readlines() if unicode_ else f.xreadlines()): # xreadlines doesn't return unicode strings when codec.open() is used
if commentPrefix:
if line.find(commentPrefix) != -1:
line = line[:line.find(commentPrefix)]
2011-01-07 19:50:39 +03:00
2014-11-23 17:39:08 +03:00
line = line.strip()
2011-01-07 19:50:39 +03:00
2014-11-23 17:39:08 +03:00
if not unicode_:
try:
line = str.encode(line)
except UnicodeDecodeError:
continue
2011-01-07 19:50:39 +03:00
2014-11-23 17:39:08 +03:00
if line:
if lowercase:
line = line.lower()
2011-01-07 19:50:39 +03:00
2014-11-23 17:39:08 +03:00
if unique and line in retVal:
continue
2010-09-30 16:35:45 +04:00
2014-11-23 17:39:08 +03:00
if unique:
retVal[line] = True
else:
retVal.append(line)
2014-11-24 12:13:56 +03:00
except (IOError, OSError, MemoryError), ex:
2014-11-23 17:39:08 +03:00
errMsg = "something went wrong while trying "
errMsg += "to read the content of file '%s' ('%s')" % (filename, ex)
2014-11-23 17:41:24 +03:00
raise SqlmapSystemException(errMsg)
2011-12-21 23:40:42 +04:00
2011-12-22 14:51:41 +04:00
return retVal if not unique else retVal.keys()
2010-09-30 16:35:45 +04:00
def goGoodSamaritan(prevValue, originalCharset):
2010-05-26 15:14:22 +04:00
"""
2010-05-27 20:45:09 +04:00
Function for retrieving parameters needed for common prediction (good
samaritan) feature.
prevValue: retrieved query output so far (e.g. 'i').
Returns commonValue if there is a complete single match (in kb.partRun
of txt/common-outputs.txt under kb.partRun) regarding parameter
prevValue. If there is no single value match, but multiple, commonCharset is
2010-05-27 20:45:09 +04:00
returned containing more probable characters (retrieved from matched
values in txt/common-outputs.txt) together with the rest of charset as
otherCharset.
2010-05-26 15:14:22 +04:00
"""
2010-05-27 20:45:09 +04:00
2010-05-24 19:46:12 +04:00
if kb.commonOutputs is None:
2010-05-21 16:19:20 +04:00
initCommonOutputs()
2010-05-21 13:35:36 +04:00
predictionSet = set()
commonValue = None
commonPattern = None
countCommonValue = 0
2010-05-21 16:44:09 +04:00
# If the header (e.g. Databases) we are looking for has common
# outputs defined
if kb.partRun in kb.commonOutputs:
commonPartOutputs = kb.commonOutputs[kb.partRun]
2010-06-30 15:22:25 +04:00
commonPattern = commonFinderOnly(prevValue, commonPartOutputs)
# If the longest common prefix is the same as previous value then
# do not consider it
if commonPattern and commonPattern == prevValue:
commonPattern = None
# For each common output
for item in commonPartOutputs:
2010-05-27 20:45:09 +04:00
# Check if the common output (item) starts with prevValue
# where prevValue is the enumerated character(s) so far
if item.startswith(prevValue):
commonValue = item
countCommonValue += 1
2010-05-27 20:45:09 +04:00
if len(item) > len(prevValue):
char = item[len(prevValue)]
predictionSet.add(char)
# Reset single value if there is more than one possible common
# output
if countCommonValue > 1:
commonValue = None
2010-05-24 19:46:12 +04:00
2010-05-27 20:45:09 +04:00
commonCharset = []
2010-05-21 16:44:09 +04:00
otherCharset = []
2010-05-24 19:46:12 +04:00
2010-05-27 20:45:09 +04:00
# Split the original charset into common chars (commonCharset)
# and other chars (otherCharset)
for ordChar in originalCharset:
2010-05-21 13:35:36 +04:00
if chr(ordChar) not in predictionSet:
2010-05-21 16:44:09 +04:00
otherCharset.append(ordChar)
2010-05-21 13:35:36 +04:00
else:
2010-05-27 20:45:09 +04:00
commonCharset.append(ordChar)
2010-05-24 19:46:12 +04:00
2010-05-27 20:45:09 +04:00
commonCharset.sort()
2010-05-24 19:46:12 +04:00
return commonValue, commonPattern, commonCharset, originalCharset
2010-05-21 13:35:36 +04:00
else:
return None, None, None, originalCharset
2010-05-21 18:42:59 +04:00
def getPartRun(alias=True):
2010-05-26 15:14:22 +04:00
"""
2010-11-12 13:02:02 +03:00
Goes through call stack and finds constructs matching conf.dbmsHandler.*.
2010-05-27 20:45:09 +04:00
Returns it or its alias used in txt/common-outputs.txt
2010-05-26 15:14:22 +04:00
"""
2010-05-27 20:45:09 +04:00
retVal = None
2010-05-27 20:45:09 +04:00
commonPartsDict = optDict["Enumeration"]
2011-07-27 12:25:51 +04:00
try:
stack = [item[4][0] if isinstance(item[4], list) else '' for item in inspect.stack()]
# Goes backwards through the stack to find the conf.dbmsHandler method
# calling this function
2012-02-22 19:53:36 +04:00
for i in xrange(0, len(stack) - 1):
for regex in (r"self\.(get[^(]+)\(\)", r"conf\.dbmsHandler\.([^(]+)\(\)"):
match = re.search(regex, stack[i])
2011-07-27 12:25:51 +04:00
if match:
# This is the calling conf.dbmsHandler or self method
# (e.g. 'getDbms')
retVal = match.groups()[0]
break
if retVal is not None:
break
2010-05-27 20:45:09 +04:00
2011-07-27 12:25:51 +04:00
# Reference: http://coding.derkeiler.com/Archive/Python/comp.lang.python/2004-06/2267.html
except TypeError:
pass
2010-05-27 20:45:09 +04:00
# Return the INI tag to consider for common outputs (e.g. 'Databases')
if alias:
return commonPartsDict[retVal][1] if isinstance(commonPartsDict.get(retVal), tuple) else retVal
else:
return retVal
2010-05-28 13:13:50 +04:00
2014-11-04 02:34:35 +03:00
def getUnicode(value, encoding=None, noneToNull=False):
2010-08-21 01:01:51 +04:00
"""
Return the unicode representation of the supplied value:
>>> getUnicode(u'test')
u'test'
>>> getUnicode('test')
u'test'
>>> getUnicode(1)
u'1'
"""
2010-10-15 14:28:06 +04:00
2012-02-07 14:46:55 +04:00
if noneToNull and value is None:
return NULL
2012-06-14 17:38:53 +04:00
if isListLike(value):
2014-11-04 17:15:58 +03:00
value = list(getUnicode(_, encoding, noneToNull) for _ in value)
2012-02-07 14:46:55 +04:00
return value
2014-11-04 02:34:35 +03:00
if isinstance(value, unicode):
return value
elif isinstance(value, basestring):
while True:
2014-05-10 00:55:16 +04:00
try:
2014-11-04 02:34:35 +03:00
return unicode(value, encoding or kb.get("pageEncoding") or UNICODE_ENCODING)
except UnicodeDecodeError, ex:
2014-11-30 01:33:24 +03:00
try:
return unicode(value, UNICODE_ENCODING)
except:
value = value[:ex.start] + "".join(INVALID_UNICODE_CHAR_FORMAT % ord(_) for _ in value[ex.start:ex.end]) + value[ex.end:]
2010-06-02 16:31:36 +04:00
else:
try:
2014-11-04 02:34:35 +03:00
return unicode(value)
except UnicodeDecodeError:
return unicode(str(value), errors="ignore") # encoding ignored for non-basestring instances
2010-06-02 16:31:36 +04:00
2010-06-30 15:22:25 +04:00
def longestCommonPrefix(*sequences):
2011-10-22 01:29:24 +04:00
"""
Returns longest common prefix occuring in given sequences
2012-07-01 03:19:54 +04:00
Reference: http://boredzo.org/blog/archives/2007-01-06/longest-common-prefix-in-python-2
2013-03-12 23:10:32 +04:00
>>> longestCommonPrefix('foobar', 'fobar')
'fo'
2011-10-22 01:29:24 +04:00
"""
if len(sequences) == 1:
return sequences[0]
sequences = [pair[1] for pair in sorted((len(fi), fi) for fi in sequences)]
if not sequences:
return None
for i, comparison_ch in enumerate(sequences[0]):
for fi in sequences[1:]:
ch = fi[i]
if ch != comparison_ch:
return fi[:i]
return sequences[0]
2010-06-30 15:22:25 +04:00
def commonFinderOnly(initial, sequence):
return longestCommonPrefix(*filter(lambda x: x.startswith(initial), sequence))
2010-09-25 01:59:03 +04:00
2010-12-21 01:45:01 +03:00
def pushValue(value):
"""
Push value to the stack (thread dependent)
"""
2011-07-08 10:02:31 +04:00
getCurrentThreadData().valueStack.append(copy.deepcopy(value))
2010-09-30 16:35:45 +04:00
def popValue():
2010-10-25 18:06:56 +04:00
"""
2010-12-21 01:45:01 +03:00
Pop value from the stack (thread dependent)
2013-03-12 23:10:32 +04:00
>>> pushValue('foobar')
>>> popValue()
'foobar'
2010-10-25 18:06:56 +04:00
"""
2010-10-26 10:08:40 +04:00
2010-12-21 01:45:01 +03:00
return getCurrentThreadData().valueStack.pop()
2010-10-25 18:06:56 +04:00
2013-01-29 23:53:11 +04:00
def wasLastResponseDBMSError():
2010-10-25 18:06:56 +04:00
"""
Returns True if the last web request resulted in a (recognized) DBMS error page
"""
2010-10-26 10:08:40 +04:00
2010-12-21 01:45:01 +03:00
threadData = getCurrentThreadData()
return threadData.lastErrorPage and threadData.lastErrorPage[0] == threadData.lastRequestUID
2010-12-08 17:26:40 +03:00
2013-01-29 23:53:11 +04:00
def wasLastResponseHTTPError():
2010-12-26 16:20:52 +03:00
"""
Returns True if the last web request resulted in an errornous HTTP code (like 500)
"""
threadData = getCurrentThreadData()
return threadData.lastHTTPError and threadData.lastHTTPError[0] == threadData.lastRequestUID
2013-01-29 23:53:11 +04:00
def wasLastResponseDelayed():
2010-12-08 17:26:40 +03:00
"""
Returns True if the last web request resulted in a time-delay
"""
2012-06-19 12:33:51 +04:00
# 99.9999999997440% of all non time-based SQL injection affected
2011-01-19 02:05:32 +03:00
# response times should be inside +-7*stdev([normal response times])
# Math reference: http://www.answers.com/topic/standard-deviation
2011-10-22 01:29:24 +04:00
2010-12-08 17:46:07 +03:00
deviation = stdev(kb.responseTimes)
2010-12-21 01:45:01 +03:00
threadData = getCurrentThreadData()
2010-12-09 03:26:06 +03:00
2012-01-13 15:16:26 +04:00
if deviation and not conf.direct:
2010-12-08 17:46:07 +03:00
if len(kb.responseTimes) < MIN_TIME_RESPONSES:
2010-12-09 03:26:06 +03:00
warnMsg = "time-based standard deviation method used on a model "
warnMsg += "with less than %d response times" % MIN_TIME_RESPONSES
2010-12-08 17:46:07 +03:00
logger.warn(warnMsg)
2010-12-09 03:26:06 +03:00
lowerStdLimit = average(kb.responseTimes) + TIME_STDEV_COEFF * deviation
2013-01-30 00:06:02 +04:00
retVal = (threadData.lastQueryDuration >= max(MIN_VALID_DELAYED_RESPONSE, lowerStdLimit))
2012-10-09 17:19:47 +04:00
if not kb.testMode and retVal:
if kb.adjustTimeDelay is None:
msg = "do you want sqlmap to try to optimize value(s) "
msg += "for DBMS delay responses (option '--time-sec')? [Y/n] "
choice = readInput(msg, default='Y')
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE if choice.upper() == 'N' else ADJUST_TIME_DELAY.YES
if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES:
adjustTimeDelay(threadData.lastQueryDuration, lowerStdLimit)
return retVal
2010-12-08 17:46:07 +03:00
else:
2011-02-07 15:32:08 +03:00
return (threadData.lastQueryDuration - conf.timeSec) >= 0
2010-10-25 23:16:42 +04:00
def adjustTimeDelay(lastQueryDuration, lowerStdLimit):
"""
Provides tip for adjusting time delay in time-based data retrieval
"""
candidate = 1 + int(round(lowerStdLimit))
if candidate:
kb.delayCandidates = [candidate] + kb.delayCandidates[:-1]
2011-01-19 02:05:32 +03:00
2012-10-09 16:46:45 +04:00
if all((x == candidate for x in kb.delayCandidates)) and candidate < conf.timeSec:
conf.timeSec = candidate
infoMsg = "adjusting time delay to "
infoMsg += "%d second%s due to good response times" % (conf.timeSec, 's' if conf.timeSec > 1 else '')
logger.info(infoMsg)
2012-01-14 00:56:06 +04:00
def getLastRequestHTTPError():
"""
Returns last HTTP error code
"""
threadData = getCurrentThreadData()
return threadData.lastHTTPError[1] if threadData.lastHTTPError else None
def extractErrorMessage(page):
"""
Returns reported error message from page if it founds one
2013-01-30 13:38:11 +04:00
>>> extractErrorMessage(u'<html><title>Test</title>\\n<b>Warning</b>: oci_parse() [function.oci-parse]: ORA-01756: quoted string not properly terminated<br><p>Only a test page</p></html>')
u'oci_parse() [function.oci-parse]: ORA-01756: quoted string not properly terminated'
"""
retVal = None
2010-11-16 17:41:46 +03:00
if isinstance(page, basestring):
for regex in ERROR_PARSING_REGEXES:
2010-11-16 17:41:46 +03:00
match = re.search(regex, page, re.DOTALL | re.IGNORECASE)
2010-11-24 15:03:01 +03:00
2010-11-16 17:41:46 +03:00
if match:
retVal = htmlunescape(match.group("result")).replace("<br>", "\n").strip()
break
return retVal
2012-10-16 14:32:58 +04:00
def findMultipartPostBoundary(post):
"""
Finds value for a boundary parameter in given multipart POST body
"""
retVal = None
done = set()
candidates = []
for match in re.finditer(r"(?m)^--(.+?)(--)?$", post or ""):
_ = match.group(1)
if _ in done:
continue
else:
candidates.append((post.count(_), _))
done.add(_)
if candidates:
candidates.sort(key=lambda _: _[0], reverse=True)
retVal = candidates[0][1]
return retVal
2013-01-19 21:06:36 +04:00
def urldecode(value, encoding=None, unsafe="%%&=;+%s" % CUSTOM_INJECTION_MARK_CHAR, convall=False, plusspace=True):
2013-03-12 23:10:32 +04:00
"""
URL decodes given value
>>> urldecode('AND%201%3E%282%2B3%29%23', convall=True)
u'AND 1>(2+3)#'
"""
2012-11-20 14:19:23 +04:00
result = value
2012-07-31 13:03:44 +04:00
if value:
try:
# for cases like T%C3%BCrk%C3%A7e
value = str(value)
except ValueError:
pass
finally:
2012-10-15 19:55:57 +04:00
if convall:
result = urllib.unquote_plus(value) if plusspace else urllib.unquote(value)
2012-10-15 19:55:57 +04:00
else:
2012-10-15 18:23:41 +04:00
def _(match):
2012-10-16 01:15:52 +04:00
charset = reduce(lambda x, y: x.replace(y, ""), unsafe, string.printable)
2012-10-15 18:23:41 +04:00
char = chr(ord(match.group(1).decode("hex")))
2012-10-15 19:55:57 +04:00
return char if char in charset else match.group(0)
2013-03-12 23:10:32 +04:00
result = value
2013-01-19 21:06:36 +04:00
if plusspace:
result = result.replace("+", " ") # plus sign has a special meaning in URL encoded data (hence the usage of urllib.unquote_plus in convall case)
2013-03-12 23:10:32 +04:00
result = re.sub("%([0-9a-fA-F]{2})", _, result)
2012-07-31 13:03:44 +04:00
if isinstance(result, str):
result = unicode(result, encoding or UNICODE_ENCODING, "replace")
return result
2013-12-18 03:56:11 +04:00
def urlencode(value, safe="%&=-_", convall=False, limit=False, spaceplus=False):
2013-01-30 13:38:11 +04:00
"""
URL encodes given value
>>> urlencode('AND 1>(2+3)#')
'AND%201%3E%282%2B3%29%23'
"""
if conf.get("direct"):
2012-07-31 13:03:44 +04:00
return value
count = 0
result = None if value is None else ""
if value:
2014-05-21 00:00:26 +04:00
if Backend.isDbms(DBMS.MSSQL) and not kb.tamperFunctions and any(ord(_) > 255 for _ in value):
warnMsg = "if you experience problems with "
warnMsg += "non-ASCII identifier names "
warnMsg += "you are advised to rerun with '--tamper=charunicodeencode'"
singleTimeWarnMessage(warnMsg)
2012-07-31 13:03:44 +04:00
if convall or safe is None:
safe = ""
# corner case when character % really needs to be
# encoded (when not representing URL encoded char)
2012-07-31 13:03:44 +04:00
# except in cases when tampering scripts are used
if all(map(lambda x: '%' in x, [safe, value])) and not kb.tamperFunctions:
value = re.sub("%(?![0-9a-fA-F]{2})", "%25", value)
while True:
result = urllib.quote(utf8encode(value), safe)
if limit and len(result) > URLENCODE_CHAR_LIMIT:
if count >= len(URLENCODE_FAILSAFE_CHARS):
break
while count < len(URLENCODE_FAILSAFE_CHARS):
safe += URLENCODE_FAILSAFE_CHARS[count]
count += 1
if safe[-1] in value:
break
else:
break
2013-01-19 21:06:36 +04:00
if spaceplus:
result = result.replace(urllib.quote(' '), '+')
2012-07-31 13:03:44 +04:00
return result
2010-10-28 00:39:50 +04:00
def runningAsAdmin():
2011-10-22 01:29:24 +04:00
"""
Returns True if the current process is run under admin privileges
"""
isAdmin = None
2010-10-28 00:39:50 +04:00
2012-02-22 19:53:36 +04:00
if PLATFORM in ("posix", "mac"):
_ = os.geteuid()
2010-10-28 00:39:50 +04:00
isAdmin = isinstance(_, (int, float, long)) and _ == 0
2010-10-28 00:39:50 +04:00
elif IS_WIN:
import ctypes
_ = ctypes.windll.shell32.IsUserAnAdmin()
2010-10-28 00:39:50 +04:00
isAdmin = isinstance(_, (int, float, long)) and _ == 1
2010-10-28 00:39:50 +04:00
else:
2011-01-19 02:05:32 +03:00
errMsg = "sqlmap is not able to check if you are running it "
errMsg += "as an administrator account on this platform. "
2010-10-28 00:39:50 +04:00
errMsg += "sqlmap will assume that you are an administrator "
errMsg += "which is mandatory for the requested takeover attack "
errMsg += "to work properly"
logger.error(errMsg)
isAdmin = True
return isAdmin
2010-11-08 14:22:47 +03:00
def logHTTPTraffic(requestLogMsg, responseLogMsg):
"""
Logs HTTP traffic to the output file
"""
2011-12-26 16:24:39 +04:00
if not conf.trafficFile:
return
2012-06-14 17:52:56 +04:00
with kb.locks.log:
dataToTrafficFile("%s%s" % (requestLogMsg, os.linesep))
dataToTrafficFile("%s%s" % (responseLogMsg, os.linesep))
dataToTrafficFile("%s%s%s%s" % (os.linesep, 76 * '#', os.linesep, os.linesep))
2012-07-31 13:03:44 +04:00
def getPageTemplate(payload, place): # Cross-linked function
2013-08-12 16:25:51 +04:00
raise NotImplementedError
2010-12-15 14:21:47 +03:00
def getPublicTypeMembers(type_, onlyValues=False):
2010-11-23 17:50:47 +03:00
"""
Useful for getting members from types (e.g. in enums)
2013-01-30 13:38:11 +04:00
>>> [_ for _ in getPublicTypeMembers(OS, True)]
['Linux', 'Windows']
2010-11-23 17:50:47 +03:00
"""
for name, value in inspect.getmembers(type_):
if not name.startswith('__'):
2010-12-15 14:21:47 +03:00
if not onlyValues:
2011-05-17 00:14:10 +04:00
yield (name, value)
2010-12-15 14:21:47 +03:00
else:
2011-05-17 00:14:10 +04:00
yield value
2010-11-24 14:38:27 +03:00
2010-12-18 12:51:34 +03:00
def enumValueToNameLookup(type_, value_):
"""
Returns name of a enum member with a given value
2013-01-30 13:38:11 +04:00
>>> enumValueToNameLookup(SORT_ORDER, 100)
'LAST'
"""
2010-12-18 12:51:34 +03:00
retVal = None
for name, value in getPublicTypeMembers(type_):
if value == value_:
retVal = name
break
return retVal
def extractRegexResult(regex, content, flags=0):
"""
Returns 'result' group value from a possible match with regex on a given
content
2013-01-30 13:38:11 +04:00
>>> extractRegexResult(r'a(?P<result>[^g]+)g', 'abcdefg')
'bcdef'
"""
2010-11-24 14:38:27 +03:00
retVal = None
2013-01-30 13:38:11 +04:00
if regex and content and "?P<result>" in regex:
match = re.search(regex, content, flags)
2010-12-07 15:32:58 +03:00
2010-11-24 14:38:27 +03:00
if match:
retVal = match.group("result")
return retVal
2010-11-29 18:14:49 +03:00
def extractTextTagContent(page):
"""
Returns list containing content from "textual" tags
2013-01-30 13:38:11 +04:00
>>> extractTextTagContent(u'<html><head><title>Title</title></head><body><pre>foobar</pre><a href="#link">Link</a></body></html>')
[u'Title', u'foobar']
"""
2012-04-12 01:36:37 +04:00
page = re.sub(r"(?si)[^\s>]*%s[^<]*" % REFLECTED_VALUE_MARKER, "", page or "")
return filter(None, (_.group('result').strip() for _ in re.finditer(TEXT_TAG_REGEX, page)))
2010-11-29 18:14:49 +03:00
def trimAlphaNum(value):
"""
Trims alpha numeric characters from start and ending of a given value
2013-01-30 13:38:11 +04:00
>>> trimAlphaNum(u'AND 1>(2+3)-- foobar')
u' 1>(2+3)-- '
2010-11-29 18:14:49 +03:00
"""
2010-11-29 18:14:49 +03:00
while value and value[-1].isalnum():
value = value[:-1]
while value and value[0].isalnum():
value = value[1:]
return value
def isNumPosStrValue(value):
"""
Returns True if value is a string (or integer) with a positive integer representation
2013-01-30 13:38:11 +04:00
>>> isNumPosStrValue(1)
True
>>> isNumPosStrValue('1')
True
>>> isNumPosStrValue(0)
False
>>> isNumPosStrValue('-2')
False
"""
2013-01-30 13:38:11 +04:00
return (value and isinstance(value, basestring) and value.isdigit() and int(value) > 0) or (isinstance(value, int) and value > 0)
@cachedmethod
2011-01-14 12:49:14 +03:00
def aliasToDbmsEnum(dbms):
"""
Returns major DBMS name from a given alias
2013-01-30 13:38:11 +04:00
>>> aliasToDbmsEnum('mssql')
'Microsoft SQL Server'
"""
retVal = None
2010-12-10 13:54:17 +03:00
2012-02-07 16:05:23 +04:00
if dbms:
for key, item in DBMS_DICT.items():
if dbms.lower() in item[0] or dbms.lower() == key.lower():
retVal = key
break
2010-12-10 13:54:17 +03:00
return retVal
2010-12-29 22:39:32 +03:00
def findDynamicContent(firstPage, secondPage):
"""
This function checks if the provided pages have dynamic content. If they
are dynamic, proper markings will be made
2010-12-29 22:39:32 +03:00
"""
2014-12-23 10:23:40 +03:00
if not firstPage or not secondPage:
return
2010-12-29 22:39:32 +03:00
infoMsg = "searching for dynamic content"
logger.info(infoMsg)
blocks = SequenceMatcher(None, firstPage, secondPage).get_matching_blocks()
kb.dynamicMarkings = []
# Removing too small matching blocks
2011-12-22 02:59:23 +04:00
for block in blocks[:]:
2010-12-29 22:39:32 +03:00
(_, _, length) = block
if length <= DYNAMICITY_MARK_LENGTH:
blocks.remove(block)
# Making of dynamic markings based on prefix/suffix principle
if len(blocks) > 0:
blocks.insert(0, None)
blocks.append(None)
for i in xrange(len(blocks) - 1):
prefix = firstPage[blocks[i][0]:blocks[i][0] + blocks[i][2]] if blocks[i] else None
suffix = firstPage[blocks[i + 1][0]:blocks[i + 1][0] + blocks[i + 1][2]] if blocks[i + 1] else None
if prefix is None and blocks[i + 1][0] == 0:
continue
if suffix is None and (blocks[i][0] + blocks[i][2] >= len(firstPage)):
continue
prefix = trimAlphaNum(prefix)
suffix = trimAlphaNum(suffix)
2012-02-22 19:53:36 +04:00
kb.dynamicMarkings.append((re.escape(prefix[-DYNAMICITY_MARK_LENGTH / 2:]) if prefix else None, re.escape(suffix[:DYNAMICITY_MARK_LENGTH / 2]) if suffix else None))
2010-12-29 22:39:32 +03:00
if len(kb.dynamicMarkings) > 0:
infoMsg = "dynamic content marked for removal (%d region%s)" % (len(kb.dynamicMarkings), 's' if len(kb.dynamicMarkings) > 1 else '')
logger.info(infoMsg)
2010-12-04 13:13:18 +03:00
def removeDynamicContent(page):
"""
2011-01-07 18:41:09 +03:00
Removing dynamic content from supplied page basing removal on
precalculated dynamic markings
"""
2011-01-07 18:41:09 +03:00
2010-12-04 13:13:18 +03:00
if page:
for item in kb.dynamicMarkings:
prefix, suffix = item
2011-01-07 18:41:09 +03:00
if prefix is None and suffix is None:
continue
elif prefix is None:
page = re.sub(r'(?s)^.+%s' % re.escape(suffix), suffix, page)
2010-12-04 13:13:18 +03:00
elif suffix is None:
page = re.sub(r'(?s)%s.+$' % re.escape(prefix), prefix, page)
2010-12-04 13:13:18 +03:00
else:
page = re.sub(r'(?s)%s.+%s' % (re.escape(prefix), re.escape(suffix)), '%s%s' % (prefix, suffix), page)
2010-12-04 13:13:18 +03:00
return page
2010-12-10 13:54:17 +03:00
2013-01-30 13:38:11 +04:00
def filterStringValue(value, charRegex, replacement=""):
"""
2011-01-07 18:41:09 +03:00
Returns string value consisting only of chars satisfying supplied
2011-11-22 13:00:00 +04:00
regular expression (note: it has to be in form [...])
2013-01-30 13:38:11 +04:00
>>> filterStringValue(u'wzydeadbeef0123#', r'[0-9a-f]')
u'deadbeef0123'
"""
2011-01-07 18:41:09 +03:00
2011-12-16 16:34:26 +04:00
retVal = value
2012-07-01 02:33:19 +04:00
2011-12-16 16:34:26 +04:00
if value:
2013-01-30 13:38:11 +04:00
retVal = re.sub(charRegex.replace("[", "[^") if "[^" not in charRegex else charRegex.replace("[^", "["), replacement, value)
2012-07-01 02:33:19 +04:00
2011-12-16 16:34:26 +04:00
return retVal
2010-12-10 13:54:17 +03:00
def filterControlChars(value):
"""
2011-01-07 18:41:09 +03:00
Returns string value with control chars being supstituted with ' '
2013-01-30 13:38:11 +04:00
>>> filterControlChars(u'AND 1>(2+3)\\n--')
u'AND 1>(2+3) --'
"""
2011-01-07 18:41:09 +03:00
return filterStringValue(value, PRINTABLE_CHAR_REGEX, ' ')
2010-12-10 13:54:17 +03:00
def isDBMSVersionAtLeast(version):
"""
2011-01-07 18:41:09 +03:00
Checks if the recognized DBMS version is at least the version
specified
"""
2011-01-07 18:41:09 +03:00
2010-12-10 13:54:17 +03:00
retVal = None
if Backend.getVersion() and Backend.getVersion() != UNKNOWN_DBMS_VERSION:
value = Backend.getVersion().replace(" ", "").rstrip('.')
2010-12-21 03:47:07 +03:00
while True:
index = value.find('.', value.find('.') + 1)
2011-01-07 18:41:09 +03:00
2010-12-21 03:47:07 +03:00
if index > -1:
value = value[0:index] + value[index + 1:]
else:
break
value = filterStringValue(value, '[0-9.><=]')
2010-12-14 00:55:30 +03:00
if isinstance(value, basestring):
if value.startswith(">="):
value = float(value.replace(">=", ""))
elif value.startswith(">"):
value = float(value.replace(">", "")) + 0.01
elif value.startswith("<="):
value = float(value.replace("<=", ""))
elif value.startswith(">"):
value = float(value.replace("<", "")) - 0.01
retVal = getUnicode(value) >= getUnicode(version)
2010-12-10 13:54:17 +03:00
return retVal
def parseSqliteTableSchema(value):
"""
2011-01-07 18:41:09 +03:00
Parses table column names and types from specified SQLite table schema
"""
2011-01-07 18:41:09 +03:00
if value:
table = {}
columns = {}
2013-04-02 00:18:41 +04:00
for match in re.finditer(r"(\w+)\s+(TEXT|NUMERIC|INTEGER|REAL|NONE)\b", value, re.I):
columns[match.group(1)] = match.group(2)
table[conf.tbl] = columns
kb.data.cachedColumns[conf.db] = table
2010-12-15 14:21:47 +03:00
def getTechniqueData(technique=None):
"""
Returns injection data for technique specified
"""
2011-01-07 18:41:09 +03:00
2012-07-06 19:18:22 +04:00
return kb.injection.data.get(technique)
2010-12-15 14:46:28 +03:00
def isTechniqueAvailable(technique):
"""
2011-01-07 18:41:09 +03:00
Returns True if there is injection data which sqlmap could use for
technique specified
"""
2011-02-02 01:05:12 +03:00
if conf.tech and isinstance(conf.tech, list) and technique not in conf.tech:
return False
else:
return getTechniqueData(technique) is not None
2010-12-18 12:51:34 +03:00
def isStackingAvailable():
"""
Returns True whether techniques using stacking are available
"""
retVal = False
if PAYLOAD.TECHNIQUE.STACKED in kb.injection.data:
retVal = True
else:
for technique in getPublicTypeMembers(PAYLOAD.TECHNIQUE, True):
_ = getTechniqueData(technique)
if _ and "stacked" in _["title"].lower():
retVal = True
break
return retVal
def isInferenceAvailable():
2012-02-16 18:42:28 +04:00
"""
Returns True whether techniques using inference technique are available
"""
return any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.STACKED, PAYLOAD.TECHNIQUE.TIME))
def setOptimize():
2012-07-01 03:19:54 +04:00
"""
Sets options turned on by switch '-o'
"""
#conf.predictOutput = True
conf.keepAlive = True
conf.threads = 3 if conf.threads < 3 else conf.threads
2013-02-13 15:28:51 +04:00
conf.nullConnection = not any((conf.data, conf.textOnly, conf.titles, conf.string, conf.notString, conf.regexp, conf.tor))
if not conf.nullConnection:
debugMsg = "turning off --null-connection switch used indirectly by switch -o"
logger.debug(debugMsg)
2010-12-18 12:51:34 +03:00
def initTechnique(technique=None):
"""
2011-12-21 15:50:49 +04:00
Prepares data for technique specified
"""
try:
data = getTechniqueData(technique)
2011-12-21 15:50:49 +04:00
resetCounter(technique)
2011-01-07 18:41:09 +03:00
if data:
kb.pageTemplate, kb.errorIsNone = getPageTemplate(data.templatePayload, kb.injection.place)
2011-01-14 17:55:59 +03:00
kb.matchRatio = data.matchRatio
kb.negativeLogic = (technique == PAYLOAD.TECHNIQUE.BOOLEAN) and (data.where == PAYLOAD.WHERE.NEGATIVE)
2011-01-14 17:55:59 +03:00
# Restoring stored conf options
2011-01-14 18:33:49 +03:00
for key, value in kb.injection.conf.items():
if value and (not hasattr(conf, key) or (hasattr(conf, key) and not getattr(conf, key))):
2011-01-14 18:33:49 +03:00
setattr(conf, key, value)
2011-01-16 02:20:52 +03:00
debugMsg = "resuming configuration option '%s' (%s)" % (key, value)
2011-01-14 18:33:49 +03:00
logger.debug(debugMsg)
if value and key == "optimize":
setOptimize()
else:
warnMsg = "there is no injection data available for technique "
warnMsg += "'%s'" % enumValueToNameLookup(PAYLOAD.TECHNIQUE, technique)
logger.warn(warnMsg)
except SqlmapDataException:
errMsg = "missing data in old session file(s). "
2011-12-22 15:55:02 +04:00
errMsg += "Please use '--flush-session' to deal "
errMsg += "with this error"
raise SqlmapNoneDataException(errMsg)
def arrayizeValue(value):
"""
2012-02-29 19:38:01 +04:00
Makes a list out of value if it is not already a list or tuple itself
2013-01-30 13:38:11 +04:00
>>> arrayizeValue(u'1')
[u'1']
"""
2011-01-07 18:41:09 +03:00
2012-06-14 17:38:53 +04:00
if not isListLike(value):
2012-02-22 19:53:36 +04:00
value = [value]
2011-01-07 18:41:09 +03:00
return value
def unArrayizeValue(value):
"""
2012-02-29 19:38:01 +04:00
Makes a value out of iterable if it is a list or tuple itself
2013-01-30 13:38:11 +04:00
>>> unArrayizeValue([u'1'])
u'1'
"""
2012-06-14 17:38:53 +04:00
if isListLike(value):
value = value[0] if len(value) > 0 else None
return value
def flattenValue(value):
"""
Returns an iterator representing flat representation of a given value
2013-01-30 13:38:11 +04:00
>>> [_ for _ in flattenValue([[u'1'], [[u'2'], u'3']])]
[u'1', u'2', u'3']
"""
for i in iter(value):
2012-06-14 17:38:53 +04:00
if isListLike(i):
for j in flattenValue(i):
yield j
else:
yield i
2012-06-14 17:38:53 +04:00
def isListLike(value):
"""
Returns True if the given value is a list-like instance
2013-01-30 13:38:11 +04:00
>>> isListLike([1, 2, 3])
True
>>> isListLike(u'2')
False
2012-06-14 17:38:53 +04:00
"""
return isinstance(value, (list, tuple, set, BigArray))
def getSortedInjectionTests():
"""
2011-01-07 18:41:09 +03:00
Returns prioritized test list by eventually detected DBMS from error
messages
"""
2011-01-07 18:41:09 +03:00
2012-07-26 02:49:51 +04:00
retVal = copy.deepcopy(conf.tests)
2011-01-07 18:41:09 +03:00
def priorityFunction(test):
2011-12-21 23:40:42 +04:00
retVal = SORT_ORDER.FIRST
2011-01-13 14:23:07 +03:00
2011-01-13 14:08:29 +03:00
if test.stype == PAYLOAD.TECHNIQUE.UNION:
2011-12-21 23:40:42 +04:00
retVal = SORT_ORDER.LAST
2011-01-13 14:23:07 +03:00
2011-01-13 14:08:29 +03:00
elif 'details' in test and 'dbms' in test.details:
if test.details.dbms in Backend.getErrorParsedDBMSes():
2011-12-21 23:40:42 +04:00
retVal = SORT_ORDER.SECOND
else:
2011-12-21 23:40:42 +04:00
retVal = SORT_ORDER.THIRD
2011-01-13 14:23:07 +03:00
return retVal
if Backend.getErrorParsedDBMSes():
retVal = sorted(retVal, key=priorityFunction)
return retVal
def filterListValue(value, regex):
"""
2011-01-07 18:41:09 +03:00
Returns list with items that have parts satisfying given regular
expression
2013-01-30 13:38:11 +04:00
>>> filterListValue(['users', 'admins', 'logs'], r'(users|admins)')
['users', 'admins']
"""
2011-01-07 18:41:09 +03:00
2011-05-17 00:09:12 +04:00
if isinstance(value, list) and regex:
retVal = filter(lambda _: re.search(regex, _, re.I), value)
else:
2011-05-17 00:09:12 +04:00
retVal = value
return retVal
def showHttpErrorCodes():
2011-01-03 11:46:20 +03:00
"""
2011-01-07 18:41:09 +03:00
Shows all HTTP error codes raised till now
2011-01-03 11:46:20 +03:00
"""
2011-01-07 18:41:09 +03:00
if kb.httpErrorCodes:
2012-12-11 18:24:02 +04:00
warnMsg = "HTTP error codes detected during run:\n"
2011-01-07 18:41:09 +03:00
warnMsg += ", ".join("%d (%s) - %d times" % (code, httplib.responses[code] \
if code in httplib.responses else '?', count) \
for code, count in kb.httpErrorCodes.items())
logger.warn(warnMsg)
2014-12-04 11:34:37 +03:00
if any((str(_).startswith('4') or str(_).startswith('5')) and _ != httplib.INTERNAL_SERVER_ERROR and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()):
msg = "too many 4xx and/or 5xx HTTP error codes "
msg += "could mean that some kind of protection is involved (e.g. WAF)"
2014-12-14 02:14:18 +03:00
logger.debug(msg)
2014-12-12 06:40:44 +03:00
def openFile(filename, mode='r', encoding=UNICODE_ENCODING, errors="replace", buffering=1):
"""
Returns file handle of a given filename
"""
try:
2014-12-12 06:40:44 +03:00
return codecs.open(filename, mode, encoding, errors, buffering)
except IOError:
errMsg = "there has been a file opening error for filename '%s'. " % filename
2011-01-19 02:05:32 +03:00
errMsg += "Please check %s permissions on a file " % ("write" if \
mode and ('w' in mode or 'a' in mode or '+' in mode) else "read")
errMsg += "and that it's not locked by another process."
2014-11-23 17:42:41 +03:00
raise SqlmapSystemException(errMsg)
2011-01-19 18:25:48 +03:00
def decodeIntToUnicode(value):
"""
2012-07-23 21:31:06 +04:00
Decodes inferenced integer value to an unicode character
2013-01-30 13:38:11 +04:00
>>> decodeIntToUnicode(35)
u'#'
>>> decodeIntToUnicode(64)
u'@'
2011-01-19 18:25:48 +03:00
"""
2012-07-23 21:31:06 +04:00
retVal = value
if isinstance(value, int):
try:
2014-10-07 15:02:47 +04:00
if value > 255:
_ = "%x" % value
if len(_) % 2 == 1:
_ = "0%s" % _
2014-10-07 15:02:47 +04:00
retVal = getUnicode(hexdecode(_), encoding="UTF-16" if Backend.isDbms(DBMS.MSSQL) else None)
else:
retVal = getUnicode(chr(value))
2012-07-23 21:31:06 +04:00
except:
retVal = INFERENCE_UNKNOWN_CHAR
return retVal
def unhandledExceptionMessage():
"""
2013-07-31 11:24:34 +04:00
Returns detailed message about occurred unhandled exception
"""
errMsg = "unhandled exception occurred in %s. It is recommended to retry your " % VERSION_STRING
errMsg += "run with the latest development version from official GitHub "
errMsg += "repository at '%s'. If the exception persists, please open a new issue " % GIT_PAGE
2015-01-06 14:30:49 +03:00
errMsg += "at '%s' " % ISSUES_PAGE
errMsg += "with the following text and any other information required to "
errMsg += "reproduce the bug. The "
errMsg += "developers will try to reproduce the bug, fix it accordingly "
errMsg += "and get back to you\n"
errMsg += "sqlmap version: %s\n" % VERSION_STRING[VERSION_STRING.find('/') + 1:]
errMsg += "Python version: %s\n" % PYVERSION
errMsg += "Operating system: %s\n" % PLATFORM
2014-10-28 17:26:28 +03:00
errMsg += "Command line: %s\n" % re.sub(r".+?\bsqlmap.py\b", "sqlmap.py", " ".join(sys.argv))
2013-05-30 13:38:24 +04:00
errMsg += "Technique: %s\n" % (enumValueToNameLookup(PAYLOAD.TECHNIQUE, kb.technique) if kb.get("technique") else ("DIRECT" if conf.get("direct") else None))
2011-02-01 01:34:57 +03:00
errMsg += "Back-end DBMS: %s" % ("%s (fingerprinted)" % Backend.getDbms() if Backend.getDbms() is not None else "%s (identified)" % Backend.getIdentifiedDbms())
2011-12-21 23:40:42 +04:00
return errMsg
2011-02-02 17:25:16 +03:00
2014-10-27 02:37:46 +03:00
def createGithubIssue(errMsg, excMsg):
"""
Automatically create a Github issue with unhandled exception information
"""
2014-11-10 00:07:11 +03:00
issues = []
try:
issues = getFileItems(paths.GITHUB_HISTORY, unique=True)
except:
pass
finally:
issues = set(issues)
_ = re.sub(r"'[^']+'", "''", excMsg)
2014-11-16 15:34:35 +03:00
_ = re.sub(r"\s+line \d+", "", _)
_ = re.sub(r'File ".+?/(\w+\.py)', "\g<1>", _)
2014-12-30 19:07:08 +03:00
_ = re.sub(r".+\Z", "", _)
2014-11-10 00:07:11 +03:00
key = hashlib.md5(_).hexdigest()[:8]
if key in issues:
return
2014-10-27 02:37:46 +03:00
msg = "\ndo you want to automatically create a new (anonymized) issue "
msg += "with the unhandled exception information at "
msg += "the official Github repository? [y/N] "
2014-10-28 16:08:06 +03:00
try:
test = readInput(msg, default="N")
except:
test = None
if test and test[0] in ("y", "Y"):
2014-10-27 02:37:46 +03:00
ex = None
errMsg = errMsg[errMsg.find("\n"):]
2014-11-09 20:58:25 +03:00
2014-11-10 00:07:11 +03:00
data = {"title": "Unhandled exception (#%s)" % key, "body": "```%s\n```\n```\n%s```" % (errMsg, excMsg)}
2014-10-27 02:37:46 +03:00
req = urllib2.Request(url="https://api.github.com/repos/sqlmapproject/sqlmap/issues", data=json.dumps(data), headers={"Authorization": "token %s" % GITHUB_REPORT_OAUTH_TOKEN})
try:
f = urllib2.urlopen(req)
content = f.read()
except Exception, ex:
content = None
issueUrl = re.search(r"https://github.com/sqlmapproject/sqlmap/issues/\d+", content or "")
if issueUrl:
infoMsg = "created Github issue can been found at the address '%s'" % issueUrl.group(0)
logger.info(infoMsg)
2014-11-10 00:07:11 +03:00
try:
with open(paths.GITHUB_HISTORY, "a+b") as f:
f.write("%s\n" % key)
except:
pass
2014-10-27 02:37:46 +03:00
else:
warnMsg = "something went wrong while creating a Github issue"
if ex:
warnMsg += " ('%s')" % ex
logger.warn(warnMsg)
2011-02-02 17:25:16 +03:00
def maskSensitiveData(msg):
"""
Masks sensitive data in the supplied message
"""
2014-08-30 19:13:02 +04:00
retVal = getUnicode(msg)
2011-02-02 17:25:16 +03:00
2013-08-09 16:13:48 +04:00
for item in filter(None, map(lambda x: conf.get(x), ("hostname", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy"))):
2014-08-30 19:13:02 +04:00
regex = SENSITIVE_DATA_REGEX % re.sub("(\W)", r"\\\1", getUnicode(item))
2011-02-02 17:35:21 +03:00
while extractRegexResult(regex, retVal):
value = extractRegexResult(regex, retVal)
2012-02-22 19:53:36 +04:00
retVal = retVal.replace(value, '*' * len(value))
2011-02-02 17:25:16 +03:00
if not conf.get("hostname"):
match = re.search(r"(?i)sqlmap.+(-u|--url)\s+([^ ]+)", retVal)
if match:
retVal = retVal.replace(match.group(2), '*' * len(match.group(2)))
if getpass.getuser():
retVal = re.sub(r"(?i)\b%s\b" % re.escape(getpass.getuser()), "*" * len(getpass.getuser()), retVal)
2011-02-02 17:25:16 +03:00
return retVal
def listToStrValue(value):
"""
Flattens list to a string value
>>> listToStrValue([1,2,3])
'1, 2, 3'
"""
2011-02-01 01:51:14 +03:00
if isinstance(value, (set, tuple)):
value = list(value)
if isinstance(value, list):
2011-02-01 14:10:23 +03:00
retVal = value.__str__().lstrip('[').rstrip(']')
else:
2011-02-01 14:10:23 +03:00
retVal = value
2011-02-01 14:10:23 +03:00
return retVal
def getExceptionFrameLocals():
"""
Returns dictionary with local variable content from frame
2012-02-16 18:42:28 +04:00
where exception has been raised
"""
retVal = {}
2011-02-01 14:10:23 +03:00
if sys.exc_info():
trace = sys.exc_info()[2]
while trace.tb_next:
trace = trace.tb_next
retVal = trace.tb_frame.f_locals
2011-02-01 14:10:23 +03:00
return retVal
2011-08-29 17:47:32 +04:00
def intersect(valueA, valueB, lowerCase=False):
"""
Returns intersection of the array-ized values
2013-01-30 13:38:11 +04:00
>>> intersect([1, 2, 3], set([1,3]))
[1, 3]
"""
2013-05-13 15:42:43 +04:00
retVal = []
if valueA and valueB:
2011-08-29 17:47:32 +04:00
valueA = arrayizeValue(valueA)
valueB = arrayizeValue(valueB)
if lowerCase:
2012-01-03 21:28:50 +04:00
valueA = [val.lower() if isinstance(val, basestring) else val for val in valueA]
valueB = [val.lower() if isinstance(val, basestring) else val for val in valueB]
2011-08-29 17:47:32 +04:00
retVal = [val for val in valueA if val in valueB]
return retVal
2011-02-22 15:54:22 +03:00
def cpuThrottle(value):
"""
2011-12-20 19:01:27 +04:00
Does a CPU throttling for lesser CPU consumption
2011-02-22 15:54:22 +03:00
"""
2011-02-22 15:54:22 +03:00
delay = 0.00001 * (value ** 2)
time.sleep(delay)
def removeReflectiveValues(content, payload, suppressWarning=False):
"""
Neutralizes reflective values in a given content based on a payload
2012-04-12 01:48:44 +04:00
(e.g. ..search.php?q=1 AND 1=2 --> "...searching for <b>1%20AND%201%3D2</b>..." --> "...searching for <b>__REFLECTED_VALUE__</b>...")
"""
2011-02-25 12:35:24 +03:00
retVal = content
2014-10-01 15:31:48 +04:00
if all([content, payload]) and isinstance(content, unicode) and kb.reflectiveMechanism and not kb.heuristicMode:
def _(value):
while 2 * REFLECTED_REPLACEMENT_REGEX in value:
value = value.replace(2 * REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX)
return value
2011-02-26 20:48:19 +03:00
payload = getUnicode(urldecode(payload.replace(PAYLOAD_DELIMITER, ''), convall=True))
regex = _(filterStringValue(payload, r"[A-Za-z0-9]", REFLECTED_REPLACEMENT_REGEX.encode("string-escape")))
2011-02-26 20:48:19 +03:00
2012-04-11 12:58:03 +04:00
if regex != payload:
if all(part.lower() in content.lower() for part in filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))[1:]): # fast optimization check
2012-04-11 12:58:03 +04:00
parts = regex.split(REFLECTED_REPLACEMENT_REGEX)
2012-04-12 03:01:38 +04:00
retVal = content.replace(payload, REFLECTED_VALUE_MARKER) # dummy approach
2012-04-11 12:58:03 +04:00
if len(parts) > REFLECTED_MAX_REGEX_PARTS: # preventing CPU hogs
regex = _("%s%s%s" % (REFLECTED_REPLACEMENT_REGEX.join(parts[:REFLECTED_MAX_REGEX_PARTS / 2]), REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX.join(parts[-REFLECTED_MAX_REGEX_PARTS / 2:])))
parts = filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))
if regex.startswith(REFLECTED_REPLACEMENT_REGEX):
regex = r"%s%s" % (REFLECTED_BORDER_REGEX, regex[len(REFLECTED_REPLACEMENT_REGEX):])
else:
regex = r"\b%s" % regex
if regex.endswith(REFLECTED_REPLACEMENT_REGEX):
regex = r"%s%s" % (regex[:-len(REFLECTED_REPLACEMENT_REGEX)], REFLECTED_BORDER_REGEX)
else:
regex = r"%s\b" % regex
2012-04-12 03:01:38 +04:00
retVal = re.sub(r"(?i)%s" % regex, REFLECTED_VALUE_MARKER, retVal)
if len(parts) > 2:
regex = REFLECTED_REPLACEMENT_REGEX.join(parts[1:])
2012-04-12 03:01:38 +04:00
retVal = re.sub(r"(?i)\b%s\b" % regex, REFLECTED_VALUE_MARKER, retVal)
2014-10-01 15:31:48 +04:00
if retVal != content:
2012-04-11 12:58:03 +04:00
kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT] += 1
2011-05-30 13:46:32 +04:00
if not suppressWarning:
2012-04-11 12:58:03 +04:00
warnMsg = "reflective value(s) found and filtering out"
singleTimeWarnMessage(warnMsg)
2012-05-14 18:38:16 +04:00
if re.search(r"FRAME[^>]+src=[^>]*%s" % REFLECTED_VALUE_MARKER, retVal, re.I):
warnMsg = "frames detected containing attacked parameter values. Please be sure to "
warnMsg += "test those separately in case that attack on this page fails"
singleTimeWarnMessage(warnMsg)
2012-04-11 12:58:03 +04:00
elif not kb.testMode and not kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT]:
kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] += 1
if kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] > REFLECTIVE_MISS_THRESHOLD:
kb.reflectiveMechanism = False
if not suppressWarning:
debugMsg = "turning off reflection removal mechanism (for optimization purposes)"
logger.debug(debugMsg)
return retVal
def normalizeUnicode(value):
"""
Does an ASCII normalization of unicode strings
Reference: http://www.peterbe.com/plog/unicode-to-ascii
2013-03-13 22:42:22 +04:00
>>> normalizeUnicode(u'\u0161u\u0107uraj')
'sucuraj'
"""
2012-07-06 19:18:22 +04:00
return unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') if isinstance(value, unicode) else value
2011-03-30 01:54:15 +04:00
def safeSQLIdentificatorNaming(name, isTable=False):
"""
2011-06-16 17:41:02 +04:00
Returns a safe representation of SQL identificator name (internal data format)
2012-08-15 00:28:42 +04:00
Reference: http://stackoverflow.com/questions/954884/what-special-characters-are-allowed-in-t-sql-column-retVal
2011-03-30 01:54:15 +04:00
"""
2011-03-30 01:54:15 +04:00
retVal = name
2011-03-30 01:54:15 +04:00
if isinstance(name, basestring):
2012-07-17 01:13:21 +04:00
retVal = getUnicode(name)
_ = isTable and Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE)
if _:
retVal = re.sub(r"(?i)\A%s\." % DEFAULT_MSSQL_SCHEMA, "", retVal)
2013-11-12 17:53:05 +04:00
if retVal.upper() in kb.keywords or (retVal or " ")[0].isdigit() or not re.match(r"\A[A-Za-z0-9_@%s\$]+\Z" % ("." if _ else ""), retVal): # MsSQL is the only DBMS where we automatically prepend schema to table name (dot is normal)
2012-07-17 01:13:21 +04:00
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS):
retVal = "`%s`" % retVal.strip("`")
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.DB2):
2012-07-17 01:13:21 +04:00
retVal = "\"%s\"" % retVal.strip("\"")
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE,):
retVal = "\"%s\"" % retVal.strip("\"").upper()
2013-12-14 00:00:26 +04:00
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL,) and not re.match(r"\A\w+\Z", retVal, re.U):
2012-07-17 01:13:21 +04:00
retVal = "[%s]" % retVal.strip("[]")
2012-11-27 13:08:22 +04:00
if _ and DEFAULT_MSSQL_SCHEMA not in retVal and '.' not in re.sub(r"\[[^]]+\]", "", retVal):
2012-07-17 01:13:21 +04:00
retVal = "%s.%s" % (DEFAULT_MSSQL_SCHEMA, retVal)
2011-03-30 01:54:15 +04:00
return retVal
def unsafeSQLIdentificatorNaming(name):
"""
2012-03-14 02:03:23 +04:00
Extracts identificator's name from its safe SQL representation
2011-03-30 01:54:15 +04:00
"""
2011-03-30 01:54:15 +04:00
retVal = name
2011-03-30 01:54:15 +04:00
if isinstance(name, basestring):
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS):
retVal = name.replace("`", "")
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.DB2):
2011-03-30 01:54:15 +04:00
retVal = name.replace("\"", "")
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE,):
retVal = name.replace("\"", "").upper()
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL,):
retVal = name.replace("[", "").replace("]", "")
2011-03-30 01:54:15 +04:00
if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE):
prefix = "%s." % DEFAULT_MSSQL_SCHEMA
if retVal.startswith(prefix):
retVal = retVal[len(prefix):]
2011-03-30 01:54:15 +04:00
return retVal
def isNoneValue(value):
"""
2012-02-29 18:19:59 +04:00
Returns whether the value is unusable (None or '')
2013-01-30 13:38:11 +04:00
>>> isNoneValue(None)
True
>>> isNoneValue('None')
True
>>> isNoneValue('')
True
>>> isNoneValue([])
True
>>> isNoneValue([2])
False
"""
if isinstance(value, basestring):
2012-02-29 17:56:40 +04:00
return value in ("None", "")
2012-06-14 17:38:53 +04:00
elif isListLike(value):
2012-02-29 18:19:59 +04:00
return all(isNoneValue(_) for _ in value)
elif isinstance(value, dict):
return not any(value)
else:
return value is None
2011-06-15 15:58:50 +04:00
2011-10-22 01:12:48 +04:00
def isNullValue(value):
"""
Returns whether the value contains explicit 'NULL' value
2013-01-30 13:38:11 +04:00
>>> isNullValue(u'NULL')
True
2013-03-13 22:42:22 +04:00
>>> isNullValue(u'foobar')
False
2011-10-22 01:12:48 +04:00
"""
2011-10-22 01:29:24 +04:00
2012-02-07 14:46:55 +04:00
return isinstance(value, basestring) and value.upper() == NULL
2011-10-22 01:12:48 +04:00
2011-06-15 15:58:50 +04:00
def expandMnemonics(mnemonics, parser, args):
"""
2012-02-16 18:42:28 +04:00
Expands mnemonic options
2011-06-15 15:58:50 +04:00
"""
class MnemonicNode(object):
2011-06-15 15:58:50 +04:00
def __init__(self):
self.next = {}
self.current = []
head = MnemonicNode()
pointer = None
for group in parser.option_groups:
for option in group.option_list:
for opt in option._long_opts + option._short_opts:
pointer = head
2011-06-15 15:58:50 +04:00
for char in opt:
if char == "-":
continue
elif char not in pointer.next:
pointer.next[char] = MnemonicNode()
2011-06-15 15:58:50 +04:00
pointer = pointer.next[char]
pointer.current.append(option)
for mnemonic in mnemonics.split(','):
found = None
2011-06-15 16:04:30 +04:00
name = mnemonic.split('=')[0].replace("-", "").strip()
2011-06-15 15:58:50 +04:00
value = mnemonic.split('=')[1] if len(mnemonic.split('=')) > 1 else None
pointer = head
2011-06-15 15:58:50 +04:00
for char in name:
if char in pointer.next:
pointer = pointer.next[char]
else:
pointer = None
2011-06-16 16:12:30 +04:00
break
2011-06-15 15:58:50 +04:00
if pointer in (None, head):
errMsg = "mnemonic '%s' can't be resolved to any parameter name" % name
raise SqlmapSyntaxException(errMsg)
2011-06-15 15:58:50 +04:00
elif len(pointer.current) > 1:
options = {}
2011-06-15 15:58:50 +04:00
for option in pointer.current:
for opt in option._long_opts + option._short_opts:
opt = opt.strip('-')
if opt.startswith(name):
options[opt] = option
2015-01-08 11:22:47 +03:00
if not options:
warnMsg = "mnemonic '%s' can't be resolved" % name
logger.warn(warnMsg)
elif name in options:
2011-06-15 15:58:50 +04:00
found = name
debugMsg = "mnemonic '%s' resolved to %s). " % (name, found)
logger.debug(debugMsg)
else:
found = sorted(options.keys(), key=lambda x: len(x))[0]
2011-12-21 23:40:42 +04:00
warnMsg = "detected ambiguity (mnemonic '%s' can be resolved to: %s). " % (name, ", ".join("'%s'" % key for key in options.keys()))
warnMsg += "Resolved to shortest of those ('%s')" % found
2011-06-15 15:58:50 +04:00
logger.warn(warnMsg)
2015-01-08 11:22:47 +03:00
if found:
found = options[found]
2011-06-15 15:58:50 +04:00
else:
found = pointer.current[0]
debugMsg = "mnemonic '%s' resolved to %s). " % (name, found)
logger.debug(debugMsg)
if found:
2011-12-26 18:08:25 +04:00
try:
value = found.convert_value(found, value)
except OptionValueError:
value = None
2011-06-15 15:58:50 +04:00
if value is not None:
setattr(args, found.dest, value)
2012-02-23 19:32:36 +04:00
elif not found.type: # boolean
2011-06-15 15:58:50 +04:00
setattr(args, found.dest, True)
2011-06-16 16:26:50 +04:00
else:
errMsg = "mnemonic '%s' requires value of type '%s'" % (name, found.type)
raise SqlmapSyntaxException(errMsg)
2011-07-03 02:48:56 +04:00
def safeCSValue(value):
"""
2012-02-16 18:42:28 +04:00
Returns value safe for CSV dumping
2011-11-30 21:44:34 +04:00
Reference: http://tools.ietf.org/html/rfc4180
2013-01-30 13:38:11 +04:00
>>> safeCSValue(u'foo, bar')
u'"foo, bar"'
>>> safeCSValue(u'foobar')
u'foobar'
2011-07-03 02:48:56 +04:00
"""
retVal = value
2011-07-13 10:44:15 +04:00
if retVal and isinstance(retVal, basestring):
2011-07-03 02:48:56 +04:00
if not (retVal[0] == retVal[-1] == '"'):
2013-01-30 13:43:46 +04:00
if any(_ in retVal for _ in (conf.get("csvDel", defaults.csvDel), '"', '\n')):
2011-07-03 02:48:56 +04:00
retVal = '"%s"' % retVal.replace('"', '""')
return retVal
2011-08-09 18:20:25 +04:00
def filterPairValues(values):
2012-02-16 18:42:28 +04:00
"""
Returns only list-like values with length 2
2013-01-30 13:38:11 +04:00
>>> filterPairValues([[1, 2], [3], 1, [4, 5]])
[[1, 2], [4, 5]]
2012-02-16 18:42:28 +04:00
"""
2011-08-09 18:20:25 +04:00
retVal = []
if not isNoneValue(values) and hasattr(values, '__iter__'):
retVal = filter(lambda x: isinstance(x, (tuple, list, set)) and len(x) == 2, values)
return retVal
def randomizeParameterValue(value):
2011-10-22 01:29:24 +04:00
"""
Randomize a parameter value based on occurances of alphanumeric characters
2013-03-13 22:42:22 +04:00
>>> random.seed(0)
>>> randomizeParameterValue('foobar')
'rnvnav'
>>> randomizeParameterValue('17')
'83'
2011-10-22 01:29:24 +04:00
"""
retVal = value
2015-01-17 23:47:57 +03:00
value = re.sub(r"%[0-9a-fA-F]{2}", "", value)
for match in re.finditer('[A-Z]+', value):
retVal = retVal.replace(match.group(), randomStr(len(match.group())).upper())
for match in re.finditer('[a-z]+', value):
retVal = retVal.replace(match.group(), randomStr(len(match.group())).lower())
for match in re.finditer('[0-9]+', value):
retVal = retVal.replace(match.group(), str(randomInt(len(match.group()))))
return retVal
def asciifyUrl(url, forceQuote=False):
"""
Attempts to make a unicode URL usuable with ``urllib/urllib2``.
More specifically, it attempts to convert the unicode object ``url``,
which is meant to represent a IRI, to an unicode object that,
containing only ASCII characters, is a valid URI. This involves:
* IDNA/Puny-encoding the domain name.
* UTF8-quoting the path and querystring parts.
See also RFC 3987.
Reference: http://blog.elsdoerfer.name/2008/12/12/opening-iris-in-python/
2013-03-13 22:42:22 +04:00
>>> asciifyUrl(u'http://www.\u0161u\u0107uraj.com')
u'http://www.xn--uuraj-gxa24d.com'
"""
parts = urlparse.urlsplit(url)
if not parts.scheme or not parts.netloc:
# apparently not an url
return url
2011-12-22 02:09:21 +04:00
if all(char in string.printable for char in url):
return url
# idna-encode domain
try:
hostname = parts.hostname.encode("idna")
except LookupError:
hostname = parts.hostname.encode(UNICODE_ENCODING)
# UTF8-quote the other parts. We check each part individually if
# if needs to be quoted - that should catch some additional user
# errors, say for example an umlaut in the username even though
# the path *is* already quoted.
def quote(s, safe):
s = s or ''
# Triggers on non-ascii characters - another option would be:
# urllib.quote(s.replace('%', '')) != s.replace('%', '')
# which would trigger on all %-characters, e.g. "&".
2011-12-22 02:09:21 +04:00
if s.encode("ascii", "replace") != s or forceQuote:
2012-02-23 19:32:36 +04:00
return urllib.quote(s.encode(UNICODE_ENCODING), safe=safe)
return s
username = quote(parts.username, '')
password = quote(parts.password, safe='')
path = quote(parts.path, safe='/')
2011-12-22 02:09:21 +04:00
query = quote(parts.query, safe="&=")
# put everything back together
netloc = hostname
if username or password:
netloc = '@' + netloc
if password:
netloc = ':' + password + netloc
netloc = username + netloc
2012-07-06 19:18:22 +04:00
if parts.port:
netloc += ':' + str(parts.port)
return urlparse.urlunsplit([parts.scheme, netloc, path, query, parts.fragment])
2012-10-25 00:59:46 +04:00
def isAdminFromPrivileges(privileges):
"""
Inspects privileges to see if those are comming from an admin user
"""
# In PostgreSQL the usesuper privilege means that the
# user is DBA
retVal = (Backend.isDbms(DBMS.PGSQL) and "super" in privileges)
# In Oracle the DBA privilege means that the
# user is DBA
retVal |= (Backend.isDbms(DBMS.ORACLE) and "DBA" in privileges)
# In MySQL >= 5.0 the SUPER privilege means
# that the user is DBA
retVal |= (Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema and "SUPER" in privileges)
# In MySQL < 5.0 the super_priv privilege means
# that the user is DBA
retVal |= (Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema and "super_priv" in privileges)
# In Firebird there is no specific privilege that means
# that the user is DBA
# TODO: confirm
2013-01-10 18:59:02 +04:00
retVal |= (Backend.isDbms(DBMS.FIREBIRD) and all(_ in privileges for _ in ("SELECT", "INSERT", "UPDATE", "DELETE", "REFERENCES", "EXECUTE")))
2012-10-25 00:59:46 +04:00
return retVal
def findPageForms(content, url, raise_=False, addToTargets=False):
2012-02-16 18:42:28 +04:00
"""
Parses given page content for possible forms
"""
class _(StringIO):
def __init__(self, content, url):
StringIO.__init__(self, unicodeencode(content, kb.pageEncoding) if isinstance(content, unicode) else content)
self._url = url
def geturl(self):
return self._url
2011-10-29 14:31:52 +04:00
if not content:
errMsg = "can't parse forms as the page content appears to be blank"
2011-10-29 14:31:52 +04:00
if raise_:
raise SqlmapGenericException(errMsg)
2011-10-29 14:31:52 +04:00
else:
logger.debug(errMsg)
forms = None
retVal = set()
response = _(content, url)
2012-03-28 17:31:07 +04:00
try:
forms = ParseResponse(response, backwards_compat=False)
2014-08-28 02:00:16 +04:00
except UnicodeError:
pass
except ParseError:
2014-12-10 14:13:37 +03:00
if "<html" in (content or ""):
warnMsg = "badly formed HTML at the given URL ('%s'). Going to filter it" % url
logger.warning(warnMsg)
filtered = _("".join(re.findall(FORM_SEARCH_REGEX, content)), url)
try:
forms = ParseResponse(filtered, backwards_compat=False)
except ParseError:
errMsg = "no success"
if raise_:
raise SqlmapGenericException(errMsg)
else:
logger.debug(errMsg)
if forms:
for form in forms:
2012-07-31 00:39:45 +04:00
try:
for control in form.controls:
2013-07-31 10:45:04 +04:00
if hasattr(control, "items") and not control.disabled:
2012-07-31 00:39:45 +04:00
# if control has selectable items select first non-disabled
for item in control.items:
if not item.disabled:
if not item.selected:
item.selected = True
break
request = form.click()
except (ValueError, TypeError), ex:
errMsg = "there has been a problem while "
errMsg += "processing page forms ('%s')" % ex
if raise_:
raise SqlmapGenericException(errMsg)
2012-07-31 00:39:45 +04:00
else:
logger.debug(errMsg)
else:
url = urldecode(request.get_full_url(), kb.pageEncoding)
method = request.get_method()
data = request.get_data() if request.has_data() else None
2013-01-19 21:06:36 +04:00
data = urldecode(data, kb.pageEncoding, plusspace=False)
2012-07-31 00:39:45 +04:00
if not data and method and method.upper() == HTTPMETHOD.POST:
debugMsg = "invalid POST form with blank data detected"
logger.debug(debugMsg)
continue
2012-02-16 18:42:28 +04:00
2014-11-20 18:10:25 +03:00
# flag to know if we are dealing with the same target host
_ = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], (response.geturl(), url)))
if conf.scope:
if not re.search(conf.scope, url, re.I):
continue
elif not _:
continue
else:
target = (url, method, data, conf.cookie, None)
retVal.add(target)
else:
errMsg = "there were no forms found at the given target URL"
if raise_:
raise SqlmapGenericException(errMsg)
else:
logger.debug(errMsg)
if addToTargets and retVal:
for target in retVal:
kb.targets.add(target)
2011-11-11 15:28:27 +04:00
return retVal
def getHostHeader(url):
2012-02-16 18:42:28 +04:00
"""
Returns proper Host header value for a given target URL
2013-03-13 22:42:22 +04:00
>>> getHostHeader('http://www.target.com/vuln.php?id=1')
'www.target.com'
2012-02-16 18:42:28 +04:00
"""
2012-07-17 02:19:33 +04:00
retVal = url
2011-11-11 15:28:27 +04:00
2012-07-17 02:19:33 +04:00
if url:
retVal = urlparse.urlparse(url).netloc
if re.search("http(s)?://\[.+\]", url, re.I):
retVal = extractRegexResult("http(s)?://\[(?P<result>.+)\]", url)
elif any(retVal.endswith(':%d' % _) for _ in (80, 443)):
retVal = retVal.split(':')[0]
2011-11-11 15:28:27 +04:00
2011-11-21 20:41:02 +04:00
return retVal
2012-11-28 14:10:57 +04:00
def checkDeprecatedOptions(args):
"""
Checks for deprecated options
"""
for _ in args:
if _ in DEPRECATED_OPTIONS:
errMsg = "switch/option '%s' is deprecated" % _
2013-01-18 18:40:37 +04:00
if DEPRECATED_OPTIONS[_]:
errMsg += " (hint: %s)" % DEPRECATED_OPTIONS[_]
raise SqlmapSyntaxException(errMsg)
2012-11-28 14:10:57 +04:00
2013-08-27 15:55:38 +04:00
def checkSystemEncoding():
"""
Checks for problematic encodings
"""
if sys.getdefaultencoding() == "cp720":
try:
codecs.lookup("cp720")
except LookupError:
errMsg = "there is a known Python issue (#1616979) related "
errMsg += "to support for charset 'cp720'. Please visit "
errMsg += "'http://blog.oneortheother.info/tip/python-fix-cp720-encoding/index.html' "
errMsg += "and follow the instructions to be able to fix it"
logger.critical(errMsg)
warnMsg = "temporary switching to charset 'cp1256'"
logger.warn(warnMsg)
reload(sys)
sys.setdefaultencoding("cp1256")
2012-02-16 18:42:28 +04:00
def evaluateCode(code, variables=None):
"""
Executes given python code given in a string form
"""
2011-11-21 20:41:02 +04:00
try:
exec(code, variables)
2012-08-21 16:34:19 +04:00
except KeyboardInterrupt:
raise
2011-11-21 20:41:02 +04:00
except Exception, ex:
2013-07-31 11:24:34 +04:00
errMsg = "an error occurred while evaluating provided code ('%s'). " % ex
raise SqlmapGenericException(errMsg)
def serializeObject(object_):
2012-02-16 18:42:28 +04:00
"""
Serializes given object
"""
return base64pickle(object_)
def unserializeObject(value):
2012-02-16 18:42:28 +04:00
"""
Unserializes object from given serialized form
2013-03-13 22:42:22 +04:00
>>> unserializeObject(serializeObject([1, 2, 3])) == [1, 2, 3]
True
2012-02-16 18:42:28 +04:00
"""
2012-07-01 03:19:54 +04:00
return base64unpickle(value) if value else None
2011-12-21 15:50:49 +04:00
2012-02-16 18:42:28 +04:00
def resetCounter(technique):
"""
Resets query counter for a given technique
"""
kb.counters[technique] = 0
def incrementCounter(technique):
"""
Increments query counter for a given technique
"""
kb.counters[technique] = getCounter(technique) + 1
2011-12-21 15:50:49 +04:00
2012-02-16 18:42:28 +04:00
def getCounter(technique):
"""
Returns query counter for a given technique
"""
2012-02-16 18:42:28 +04:00
return kb.counters.get(technique, 0)
def applyFunctionRecursively(value, function):
"""
Applies function recursively through list-like structures
2013-03-13 22:42:22 +04:00
>>> applyFunctionRecursively([1, 2, [3, 4, [19]], -9], lambda _: _ > 0)
[True, True, [True, True, [True]], False]
"""
2012-06-14 17:38:53 +04:00
if isListLike(value):
retVal = [applyFunctionRecursively(_, function) for _ in value]
else:
retVal = function(value)
return retVal
def decodeHexValue(value):
"""
Returns value decoded from DBMS specific hexadecimal representation
2013-03-13 22:42:22 +04:00
>>> decodeHexValue('3132332031')
u'123 1'
"""
2012-04-02 16:58:10 +04:00
retVal = value
def _(value):
retVal = value
2012-02-21 17:38:18 +04:00
if value and isinstance(value, basestring) and len(value) % 2 == 0:
retVal = hexdecode(retVal)
2012-12-19 04:30:22 +04:00
2013-12-27 01:27:04 +04:00
if not kb.binaryField:
if Backend.isDbms(DBMS.MSSQL) and value.startswith("0x"):
try:
retVal = retVal.decode("utf-16-le")
except UnicodeDecodeError:
pass
elif Backend.isDbms(DBMS.HSQLDB):
try:
retVal = retVal.decode("utf-16-be")
except UnicodeDecodeError:
pass
if not isinstance(retVal, unicode):
retVal = getUnicode(retVal, "utf8")
2012-12-19 04:30:22 +04:00
return retVal
2012-04-02 16:58:10 +04:00
try:
retVal = applyFunctionRecursively(value, _)
except:
2012-04-02 16:58:10 +04:00
singleTimeWarnMessage("there was a problem decoding value '%s' from expected hexadecimal form" % value)
return retVal
def extractExpectedValue(value, expected):
"""
Extracts and returns expected value by a given type
2013-03-13 22:42:22 +04:00
>>> extractExpectedValue(['1'], EXPECTED.BOOL)
True
>>> extractExpectedValue('1', EXPECTED.INT)
1
"""
if expected:
value = unArrayizeValue(value)
if isNoneValue(value):
value = None
elif expected == EXPECTED.BOOL:
if isinstance(value, int):
value = bool(value)
elif isinstance(value, basestring):
value = value.strip().lower()
if value in ("true", "false"):
value = value == "true"
elif value in ("1", "-1"):
value = True
elif value == "0":
value = False
else:
value = None
elif expected == EXPECTED.INT:
if isinstance(value, basestring):
2012-07-01 03:19:54 +04:00
value = int(value) if value.isdigit() else None
return value
def hashDBWrite(key, value, serialize=False):
"""
Helper function for writing session data to HashDB
"""
_ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE)
2012-02-24 19:03:39 +04:00
conf.hashDB.write(_, value, serialize)
2012-02-24 18:54:10 +04:00
def hashDBRetrieve(key, unserialize=False, checkConf=False):
"""
Helper function for restoring session data from HashDB
"""
_ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE)
2013-02-04 19:46:08 +04:00
retVal = conf.hashDB.retrieve(_, unserialize) if kb.resumeValues and not (checkConf and any((conf.flushSession, conf.freshQueries))) else None
if not kb.inferenceMode and not kb.fileReadMode and any(_ in (retVal or "") for _ in (PARTIAL_VALUE_MARKER, PARTIAL_HEX_VALUE_MARKER)):
retVal = None
return retVal
2012-03-08 14:19:34 +04:00
def resetCookieJar(cookieJar):
2012-07-01 03:19:54 +04:00
"""
Cleans cookies from a given cookie jar
"""
2012-07-24 17:34:50 +04:00
if not conf.loadCookies:
2012-03-08 14:19:34 +04:00
cookieJar.clear()
else:
try:
2013-02-12 15:58:15 +04:00
if not cookieJar.filename:
infoMsg = "loading cookies from '%s'" % conf.loadCookies
logger.info(infoMsg)
2013-02-12 15:58:15 +04:00
content = readCachedFileContent(conf.loadCookies)
lines = filter(None, (line.strip() for line in content.split("\n") if not line.startswith('#')))
2013-02-15 04:17:13 +04:00
handle, filename = tempfile.mkstemp(prefix="sqlmapcj-")
2013-02-12 15:58:15 +04:00
os.close(handle)
# Reference: http://www.hashbangcode.com/blog/netscape-http-cooke-file-parser-php-584.html
2013-02-12 15:58:15 +04:00
with open(filename, "w+b") as f:
f.write("%s\n" % NETSCAPE_FORMAT_HEADER_COOKIES)
for line in lines:
_ = line.split()
if len(_) == 7:
2013-04-15 13:49:11 +04:00
_[4] = FORCE_COOKIE_EXPIRATION_TIME
f.write("\n%s" % "\t".join(_))
2013-02-12 15:58:15 +04:00
cookieJar.filename = filename
cookieJar.load(cookieJar.filename, ignore_expires=True)
for cookie in cookieJar:
if cookie.expires < time.time():
warnMsg = "cookie '%s' has expired" % cookie
singleTimeWarnMessage(warnMsg)
cookieJar.clear_expired_cookies()
2013-02-12 15:58:15 +04:00
if not cookieJar._cookies:
errMsg = "no valid cookies found"
2013-02-12 15:58:15 +04:00
raise SqlmapGenericException(errMsg)
2012-03-08 14:19:34 +04:00
except cookielib.LoadError, msg:
errMsg = "there was a problem loading "
2013-04-15 13:56:19 +04:00
errMsg += "cookies file ('%s')" % re.sub(r"(cookies) file '[^']+'", "\g<1>", str(msg))
raise SqlmapGenericException(errMsg)
2013-01-08 13:23:02 +04:00
def decloakToTemp(filename):
"""
Decloaks content of a given file to a temporary file with similar name and extension
"""
content = decloak(filename)
_ = os.path.split(filename[:-1])[-1]
prefix, suffix = os.path.splitext(_)
prefix = prefix.split(os.extsep)[0]
handle, filename = tempfile.mkstemp(prefix=prefix, suffix=suffix)
os.close(handle)
with open(filename, "w+b") as f:
f.write(content)
return filename
2012-05-04 02:34:18 +04:00
def prioritySortColumns(columns):
2012-07-01 03:19:54 +04:00
"""
Sorts given column names by length in ascending order while those containing
string 'id' go first
2013-03-13 22:42:22 +04:00
>>> prioritySortColumns(['password', 'userid', 'name'])
['userid', 'name', 'password']
2012-07-01 03:19:54 +04:00
"""
2012-05-04 02:34:18 +04:00
_ = lambda x: x and "id" in x.lower()
return sorted(sorted(columns, key=len), lambda x, y: -1 if _(x) and not _(y) else 1 if not _(x) and _(y) else 0)
def getRequestHeader(request, name):
"""
Solving an issue with an urllib2 Request header case sensitivity
Reference: http://bugs.python.org/issue2275
"""
retVal = None
if request and name:
2013-03-11 16:31:50 +04:00
retVal = max(value if name.upper() == key.upper() else None for key, value in request.header_items())
return retVal
2012-10-04 20:01:42 +04:00
def isNumber(value):
"""
Returns True if the given value is a number-like object
2013-03-13 22:42:22 +04:00
>>> isNumber(1)
True
>>> isNumber('0')
True
>>> isNumber('foobar')
False
2012-10-04 20:01:42 +04:00
"""
try:
float(value)
2012-10-04 20:01:42 +04:00
except:
return False
else:
return True
def zeroDepthSearch(expression, value):
"""
2013-01-21 16:19:08 +04:00
Searches occurrences of value inside expression at 0-depth level
regarding the parentheses
"""
2013-01-21 16:18:34 +04:00
retVal = []
depth = 0
for index in xrange(len(expression)):
if expression[index] == '(':
depth += 1
elif expression[index] == ')':
depth -= 1
elif depth == 0 and expression[index:index + len(value)] == value:
retVal.append(index)
return retVal
def splitFields(fields, delimiter=','):
"""
2013-03-13 22:42:22 +04:00
Returns list of (0-depth) fields splitted by delimiter
>>> splitFields('foo, bar, max(foo, bar)')
['foo', 'bar', 'max(foo,bar)']
"""
fields = fields.replace("%s " % delimiter, delimiter)
commas = [-1, len(fields)]
commas.extend(zeroDepthSearch(fields, ','))
commas = sorted(commas)
return [fields[x + 1:y] for (x, y) in zip(commas, commas[1:])]
def pollProcess(process, suppress_errors=False):
2014-02-02 01:12:00 +04:00
"""
Checks for process status (prints . if still running)
"""
while True:
dataToStdout(".")
time.sleep(1)
returncode = process.poll()
if returncode is not None:
if not suppress_errors:
if returncode == 0:
dataToStdout(" done\n")
elif returncode < 0:
dataToStdout(" process terminated by signal %d\n" % returncode)
elif returncode > 0:
dataToStdout(" quit unexpectedly with return code %d\n" % returncode)
break