2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-01-11 18:59:46 +04:00
|
|
|
Copyright (c) 2006-2012 sqlmap developers (http://www.sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2010-05-24 15:00:49 +04:00
|
|
|
import codecs
|
2012-03-08 14:19:34 +04:00
|
|
|
import cookielib
|
2011-07-06 09:44:47 +04:00
|
|
|
import copy
|
2010-10-28 00:39:50 +04:00
|
|
|
import ctypes
|
2011-07-23 23:04:59 +04:00
|
|
|
import httplib
|
2010-05-26 13:48:20 +04:00
|
|
|
import inspect
|
2011-04-20 02:54:13 +04:00
|
|
|
import logging
|
2011-07-23 23:04:59 +04:00
|
|
|
import ntpath
|
2008-10-15 19:38:22 +04:00
|
|
|
import os
|
2011-07-23 23:04:59 +04:00
|
|
|
import pickle
|
|
|
|
import posixpath
|
2008-10-15 19:38:22 +04:00
|
|
|
import random
|
|
|
|
import re
|
2009-04-22 15:48:07 +04:00
|
|
|
import socket
|
2008-10-15 19:38:22 +04:00
|
|
|
import string
|
2011-07-23 23:04:59 +04:00
|
|
|
import struct
|
2008-10-15 19:38:22 +04:00
|
|
|
import sys
|
|
|
|
import time
|
2011-10-24 22:11:34 +04:00
|
|
|
import urllib
|
2008-10-15 19:38:22 +04:00
|
|
|
import urlparse
|
2011-03-24 23:04:20 +03:00
|
|
|
import unicodedata
|
2010-01-28 20:07:34 +03:00
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
from ConfigParser import DEFAULTSECT
|
|
|
|
from ConfigParser import RawConfigParser
|
2010-04-22 20:13:22 +04:00
|
|
|
from StringIO import StringIO
|
2010-10-12 19:49:04 +04:00
|
|
|
from difflib import SequenceMatcher
|
2011-01-16 23:55:07 +03:00
|
|
|
from math import sqrt
|
2011-12-26 18:08:25 +04:00
|
|
|
from optparse import OptionValueError
|
2010-05-21 17:03:57 +04:00
|
|
|
from subprocess import PIPE
|
|
|
|
from subprocess import Popen as execute
|
2010-01-28 19:50:34 +03:00
|
|
|
from tempfile import NamedTemporaryFile
|
2010-01-29 13:12:09 +03:00
|
|
|
from tempfile import mkstemp
|
2010-06-30 01:07:23 +04:00
|
|
|
from xml.etree import ElementTree as ET
|
2010-10-07 02:43:04 +04:00
|
|
|
from xml.dom import minidom
|
2010-04-22 20:13:22 +04:00
|
|
|
from xml.sax import parse
|
2010-01-24 02:29:34 +03:00
|
|
|
|
2011-10-29 12:32:24 +04:00
|
|
|
from extra.clientform.clientform import ParseResponse
|
|
|
|
from extra.clientform.clientform import ParseError
|
2010-01-28 19:50:34 +03:00
|
|
|
from extra.cloak.cloak import decloak
|
2011-02-08 03:13:39 +03:00
|
|
|
from extra.magic import magic
|
2011-02-04 21:07:21 +03:00
|
|
|
from extra.odict.odict import OrderedDict
|
2012-02-22 14:40:11 +04:00
|
|
|
from extra.safe2bin.safe2bin import safecharencode
|
2012-02-16 13:46:41 +04:00
|
|
|
from lib.core.bigarray import BigArray
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.data import paths
|
2012-03-08 14:57:47 +04:00
|
|
|
from lib.core.convert import base64pickle
|
|
|
|
from lib.core.convert import base64unpickle
|
2010-11-07 03:12:00 +03:00
|
|
|
from lib.core.convert import htmlunescape
|
2011-10-29 12:32:24 +04:00
|
|
|
from lib.core.convert import unicodeencode
|
2011-01-27 21:36:28 +03:00
|
|
|
from lib.core.convert import urldecode
|
2010-01-15 14:44:05 +03:00
|
|
|
from lib.core.convert import urlencode
|
2012-02-29 18:36:23 +04:00
|
|
|
from lib.core.enums import CHARSET_TYPE
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import DBMS
|
2012-02-17 18:22:48 +04:00
|
|
|
from lib.core.enums import EXPECTED
|
2011-04-07 17:57:07 +04:00
|
|
|
from lib.core.enums import HTTPHEADER
|
2011-10-29 12:32:24 +04:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2011-04-23 20:25:09 +04:00
|
|
|
from lib.core.enums import OS
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PLACE
|
2010-12-18 12:51:34 +03:00
|
|
|
from lib.core.enums import PAYLOAD
|
2011-05-30 13:46:32 +04:00
|
|
|
from lib.core.enums import REFLECTIVE_COUNTER
|
2011-12-21 23:40:42 +04:00
|
|
|
from lib.core.enums import SORT_ORDER
|
2011-01-10 13:30:17 +03:00
|
|
|
from lib.core.exception import sqlmapDataException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapFilePathException
|
2010-07-30 16:49:25 +04:00
|
|
|
from lib.core.exception import sqlmapGenericException
|
2010-01-15 19:06:59 +03:00
|
|
|
from lib.core.exception import sqlmapNoneDataException
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.exception import sqlmapMissingDependence
|
2012-02-14 13:26:52 +04:00
|
|
|
from lib.core.exception import sqlmapSilentQuitException
|
2010-02-10 15:06:23 +03:00
|
|
|
from lib.core.exception import sqlmapSyntaxException
|
2010-05-27 20:45:09 +04:00
|
|
|
from lib.core.optiondict import optDict
|
2012-04-17 12:41:19 +04:00
|
|
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
2011-11-22 01:31:08 +04:00
|
|
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
|
|
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
2012-01-07 21:16:14 +04:00
|
|
|
from lib.core.settings import DUMMY_USER_INJECTION
|
2011-01-19 18:26:57 +03:00
|
|
|
from lib.core.settings import INFERENCE_UNKNOWN_CHAR
|
2011-01-30 14:36:03 +03:00
|
|
|
from lib.core.settings import UNICODE_ENCODING
|
2011-04-23 20:25:09 +04:00
|
|
|
from lib.core.settings import DBMS_DICT
|
2012-02-16 13:32:47 +04:00
|
|
|
from lib.core.settings import DBMS_DIRECTORY_DICT
|
2010-03-03 19:19:17 +03:00
|
|
|
from lib.core.settings import DESCRIPTION
|
2011-06-18 02:04:25 +04:00
|
|
|
from lib.core.settings import DUMMY_SQL_INJECTION_CHARS
|
2012-02-24 14:48:19 +04:00
|
|
|
from lib.core.settings import FORMATTER
|
2012-02-07 14:46:55 +04:00
|
|
|
from lib.core.settings import NULL
|
2012-03-13 02:55:57 +04:00
|
|
|
from lib.core.settings import HASHDB_MILESTONE_VALUE
|
2009-06-11 19:01:48 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2010-05-21 16:09:31 +04:00
|
|
|
from lib.core.settings import PLATFORM
|
2011-01-28 19:15:45 +03:00
|
|
|
from lib.core.settings import PYVERSION
|
|
|
|
from lib.core.settings import VERSION
|
|
|
|
from lib.core.settings import REVISION
|
|
|
|
from lib.core.settings import VERSION_STRING
|
2010-02-25 20:37:46 +03:00
|
|
|
from lib.core.settings import SITE
|
2011-12-20 16:52:41 +04:00
|
|
|
from lib.core.settings import HOST_ALIASES
|
|
|
|
from lib.core.settings import REFERER_ALIASES
|
|
|
|
from lib.core.settings import USER_AGENT_ALIASES
|
2012-05-10 18:22:34 +04:00
|
|
|
from lib.core.settings import PARTIAL_VALUE_MARKER
|
2010-12-25 13:16:20 +03:00
|
|
|
from lib.core.settings import ERROR_PARSING_REGEXES
|
2011-04-14 13:43:36 +04:00
|
|
|
from lib.core.settings import PRINTABLE_CHAR_REGEX
|
2011-12-22 14:59:28 +04:00
|
|
|
from lib.core.settings import DUMP_DEL_MARKER
|
2008-12-19 23:09:46 +03:00
|
|
|
from lib.core.settings import SQL_STATEMENTS
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.settings import SUPPORTED_DBMS
|
2010-12-18 00:29:09 +03:00
|
|
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
2011-03-31 12:43:17 +04:00
|
|
|
from lib.core.settings import DEFAULT_MSSQL_SCHEMA
|
2010-10-21 13:51:07 +04:00
|
|
|
from lib.core.settings import DUMP_NEWLINE_MARKER
|
2010-11-16 18:11:03 +03:00
|
|
|
from lib.core.settings import DUMP_CR_MARKER
|
2010-10-21 13:51:07 +04:00
|
|
|
from lib.core.settings import DUMP_TAB_MARKER
|
2012-02-14 18:08:10 +04:00
|
|
|
from lib.core.settings import PARAMETER_AMP_MARKER
|
|
|
|
from lib.core.settings import PARAMETER_SEMICOLON_MARKER
|
2011-12-23 00:14:56 +04:00
|
|
|
from lib.core.settings import LARGE_OUTPUT_THRESHOLD
|
2011-04-01 20:40:28 +04:00
|
|
|
from lib.core.settings import ML
|
2010-12-08 17:46:07 +03:00
|
|
|
from lib.core.settings import MIN_TIME_RESPONSES
|
2011-02-25 12:22:44 +03:00
|
|
|
from lib.core.settings import PAYLOAD_DELIMITER
|
2012-04-12 01:26:00 +04:00
|
|
|
from lib.core.settings import REFLECTED_BORDER_REGEX
|
2012-03-28 23:27:12 +04:00
|
|
|
from lib.core.settings import REFLECTED_REPLACEMENT_REGEX
|
2011-07-13 03:21:15 +04:00
|
|
|
from lib.core.settings import REFLECTED_MAX_REGEX_PARTS
|
2011-02-25 12:22:44 +03:00
|
|
|
from lib.core.settings import REFLECTED_VALUE_MARKER
|
2010-12-21 18:13:13 +03:00
|
|
|
from lib.core.settings import TIME_STDEV_COEFF
|
2010-12-29 22:39:32 +03:00
|
|
|
from lib.core.settings import DYNAMICITY_MARK_LENGTH
|
2011-05-30 13:46:32 +04:00
|
|
|
from lib.core.settings import REFLECTIVE_MISS_THRESHOLD
|
2011-02-02 17:25:16 +03:00
|
|
|
from lib.core.settings import SENSITIVE_DATA_REGEX
|
2012-04-11 01:48:34 +04:00
|
|
|
from lib.core.settings import TEXT_TAG_REGEX
|
2011-12-22 19:42:21 +04:00
|
|
|
from lib.core.settings import UNION_UNIQUE_FIFO_LENGTH
|
2011-02-04 15:43:18 +03:00
|
|
|
from lib.core.settings import URI_QUESTION_MARKER
|
2010-12-24 15:13:48 +03:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2010-09-15 16:51:02 +04:00
|
|
|
|
|
|
|
class UnicodeRawConfigParser(RawConfigParser):
|
2010-09-15 16:52:28 +04:00
|
|
|
"""
|
|
|
|
RawConfigParser with unicode writing support
|
|
|
|
"""
|
2011-01-20 02:06:15 +03:00
|
|
|
|
2010-09-15 16:51:02 +04:00
|
|
|
def write(self, fp):
|
|
|
|
"""
|
|
|
|
Write an .ini-format representation of the configuration state.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self._defaults:
|
|
|
|
fp.write("[%s]\n" % DEFAULTSECT)
|
|
|
|
|
|
|
|
for (key, value) in self._defaults.items():
|
2011-01-30 14:36:03 +03:00
|
|
|
fp.write("%s = %s\n" % (key, getUnicode(value, UNICODE_ENCODING).replace('\n', '\n\t')))
|
2010-09-15 16:51:02 +04:00
|
|
|
|
|
|
|
fp.write("\n")
|
|
|
|
|
|
|
|
for section in self._sections:
|
|
|
|
fp.write("[%s]\n" % section)
|
|
|
|
|
|
|
|
for (key, value) in self._sections[section].items():
|
|
|
|
if key != "__name__":
|
|
|
|
if value is None:
|
|
|
|
fp.write("%s\n" % (key))
|
|
|
|
else:
|
2011-01-30 14:36:03 +03:00
|
|
|
fp.write("%s = %s\n" % (key, getUnicode(value, UNICODE_ENCODING).replace('\n', '\n\t')))
|
2010-09-15 16:51:02 +04:00
|
|
|
|
|
|
|
fp.write("\n")
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
class Format:
|
|
|
|
@staticmethod
|
|
|
|
def humanize(values, chain=" or "):
|
2012-03-28 17:31:07 +04:00
|
|
|
return chain.join(values)
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
# Get methods
|
|
|
|
@staticmethod
|
|
|
|
def getDbms(versions=None):
|
|
|
|
"""
|
|
|
|
Format the back-end DBMS fingerprint value and return its
|
|
|
|
values formatted as a human readable string.
|
|
|
|
|
|
|
|
@return: detected back-end DBMS based upon fingerprint techniques.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if versions is None and Backend.getVersionList():
|
|
|
|
versions = Backend.getVersionList()
|
|
|
|
|
2012-03-28 17:31:07 +04:00
|
|
|
return Backend.getDbms() if versions is None else "%s %s" % (Backend.getDbms(), " and ".join(v for v in versions))
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getErrorParsedDBMSes():
|
|
|
|
"""
|
|
|
|
Parses the knowledge base htmlFp list and return its values
|
|
|
|
formatted as a human readable string.
|
|
|
|
|
|
|
|
@return: list of possible back-end DBMS based upon error messages
|
|
|
|
parsing.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2012-03-28 17:31:07 +04:00
|
|
|
htmlParsed = None
|
2011-01-28 19:36:09 +03:00
|
|
|
|
2012-03-05 13:56:48 +04:00
|
|
|
if len(kb.htmlFp) == 0 or kb.heuristicTest is None:
|
2012-03-28 17:31:07 +04:00
|
|
|
pass
|
2011-01-28 19:36:09 +03:00
|
|
|
elif len(kb.htmlFp) == 1:
|
|
|
|
htmlParsed = kb.htmlFp[0]
|
|
|
|
elif len(kb.htmlFp) > 1:
|
2012-03-28 17:31:07 +04:00
|
|
|
htmlParsed = " or ".join(kb.htmlFp)
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
return htmlParsed
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getOs(target, info):
|
|
|
|
"""
|
|
|
|
Formats the back-end operating system fingerprint value
|
|
|
|
and return its values formatted as a human readable string.
|
|
|
|
|
|
|
|
Example of info (kb.headersFp) dictionary:
|
|
|
|
|
|
|
|
{
|
|
|
|
'distrib': set(['Ubuntu']),
|
|
|
|
'type': set(['Linux']),
|
|
|
|
'technology': set(['PHP 5.2.6', 'Apache 2.2.9']),
|
|
|
|
'release': set(['8.10'])
|
|
|
|
}
|
|
|
|
|
|
|
|
Example of info (kb.bannerFp) dictionary:
|
|
|
|
|
|
|
|
{
|
|
|
|
'sp': set(['Service Pack 4']),
|
|
|
|
'dbmsVersion': '8.00.194',
|
|
|
|
'dbmsServicePack': '0',
|
|
|
|
'distrib': set(['2000']),
|
|
|
|
'dbmsRelease': '2000',
|
|
|
|
'type': set(['Windows'])
|
|
|
|
}
|
|
|
|
|
|
|
|
@return: detected back-end operating system based upon fingerprint
|
|
|
|
techniques.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
infoStr = ""
|
|
|
|
|
|
|
|
if info and "type" in info:
|
|
|
|
infoStr += "%s operating system: %s" % (target, Format.humanize(info["type"]))
|
|
|
|
|
|
|
|
if "distrib" in info:
|
|
|
|
infoStr += " %s" % Format.humanize(info["distrib"])
|
|
|
|
|
|
|
|
if "release" in info:
|
|
|
|
infoStr += " %s" % Format.humanize(info["release"])
|
|
|
|
|
|
|
|
if "sp" in info:
|
|
|
|
infoStr += " %s" % Format.humanize(info["sp"])
|
|
|
|
|
|
|
|
if "codename" in info:
|
|
|
|
infoStr += " (%s)" % Format.humanize(info["codename"])
|
|
|
|
|
|
|
|
if "technology" in info:
|
|
|
|
infoStr += "\nweb application technology: %s" % Format.humanize(info["technology"], ", ")
|
|
|
|
|
|
|
|
return infoStr
|
|
|
|
|
|
|
|
class Backend:
|
|
|
|
# Set methods
|
|
|
|
@staticmethod
|
|
|
|
def setDbms(dbms):
|
|
|
|
dbms = aliasToDbmsEnum(dbms)
|
|
|
|
|
|
|
|
if dbms is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Little precaution, in theory this condition should always be false
|
|
|
|
elif kb.dbms is not None and kb.dbms != dbms:
|
|
|
|
msg = "sqlmap previously fingerprinted back-end DBMS "
|
|
|
|
msg += "%s. However now it has been fingerprinted " % kb.dbms
|
|
|
|
msg += "to be %s. " % dbms
|
|
|
|
msg += "Please, specify which DBMS is "
|
|
|
|
msg += "correct [%s (default)/%s] " % (kb.dbms, dbms)
|
|
|
|
|
|
|
|
while True:
|
2012-02-16 13:54:29 +04:00
|
|
|
_ = readInput(msg, default=kb.dbms)
|
2011-01-28 19:36:09 +03:00
|
|
|
|
2012-02-16 13:54:29 +04:00
|
|
|
if aliasToDbmsEnum(_) == kb.dbms:
|
2011-01-28 19:36:09 +03:00
|
|
|
break
|
2012-02-16 13:54:29 +04:00
|
|
|
elif aliasToDbmsEnum(_) == dbms:
|
|
|
|
kb.dbms = aliasToDbmsEnum(_)
|
2011-01-28 19:36:09 +03:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
warnMsg = "invalid value"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
elif kb.dbms is None:
|
|
|
|
kb.dbms = aliasToDbmsEnum(dbms)
|
|
|
|
|
|
|
|
return kb.dbms
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def setVersion(version):
|
|
|
|
if isinstance(version, basestring):
|
2012-02-22 19:53:36 +04:00
|
|
|
kb.dbmsVersion = [version]
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
return kb.dbmsVersion
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def setVersionList(versionsList):
|
|
|
|
if isinstance(versionsList, list):
|
|
|
|
kb.dbmsVersion = versionsList
|
2011-01-31 12:34:54 +03:00
|
|
|
elif isinstance(versionsList, basestring):
|
2011-01-28 19:36:09 +03:00
|
|
|
Backend.setVersion(versionsList)
|
|
|
|
else:
|
|
|
|
logger.error("invalid format of versionsList")
|
|
|
|
|
|
|
|
@staticmethod
|
2011-06-02 03:00:18 +04:00
|
|
|
def forceDbms(dbms, sticky=False):
|
2011-09-26 01:10:45 +04:00
|
|
|
if not kb.stickyFlag:
|
|
|
|
kb.forcedDbms = aliasToDbmsEnum(dbms)
|
|
|
|
kb.stickyFlag = sticky
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
2011-06-02 03:00:18 +04:00
|
|
|
def flushForcedDbms(force=False):
|
2011-09-26 01:10:45 +04:00
|
|
|
if not kb.stickyFlag or force:
|
|
|
|
kb.forcedDbms = None
|
|
|
|
kb.stickyFlag = False
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def setOs(os):
|
|
|
|
if os is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Little precaution, in theory this condition should always be false
|
2011-04-23 20:25:09 +04:00
|
|
|
elif kb.os is not None and isinstance(os, basestring) and kb.os.lower() != os.lower():
|
2011-01-28 19:36:09 +03:00
|
|
|
msg = "sqlmap previously fingerprinted back-end DBMS "
|
|
|
|
msg += "operating system %s. However now it has " % kb.os
|
|
|
|
msg += "been fingerprinted to be %s. " % os
|
|
|
|
msg += "Please, specify which OS is "
|
|
|
|
msg += "correct [%s (default)/%s] " % (kb.os, os)
|
|
|
|
|
|
|
|
while True:
|
2012-02-16 13:54:29 +04:00
|
|
|
_ = readInput(msg, default=kb.os)
|
2011-01-28 19:36:09 +03:00
|
|
|
|
2012-02-16 13:54:29 +04:00
|
|
|
if _ == kb.os:
|
2011-01-28 19:36:09 +03:00
|
|
|
break
|
2012-02-16 13:54:29 +04:00
|
|
|
elif _ == os:
|
|
|
|
kb.os = _.capitalize()
|
2011-01-28 19:36:09 +03:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
warnMsg = "invalid value"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
elif kb.os is None and isinstance(os, basestring):
|
|
|
|
kb.os = os.capitalize()
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
return kb.os
|
|
|
|
|
2012-01-13 20:49:52 +04:00
|
|
|
@staticmethod
|
|
|
|
def setOsVersion(version):
|
|
|
|
if version is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
elif kb.osVersion is None and isinstance(version, basestring):
|
|
|
|
kb.osVersion = version
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def setOsServicePack(sp):
|
2012-01-13 21:04:59 +04:00
|
|
|
if sp is None:
|
2012-01-13 20:49:52 +04:00
|
|
|
return None
|
|
|
|
|
2012-01-13 21:04:59 +04:00
|
|
|
elif kb.osSP is None and isinstance(sp, int):
|
|
|
|
kb.osSP = sp
|
2012-01-13 20:49:52 +04:00
|
|
|
|
2011-04-25 03:01:21 +04:00
|
|
|
@staticmethod
|
|
|
|
def setArch():
|
|
|
|
msg = "what is the back-end database management system architecture?"
|
|
|
|
msg += "\n[1] 32-bit (default)"
|
|
|
|
msg += "\n[2] 64-bit"
|
|
|
|
|
|
|
|
while True:
|
2012-02-16 13:54:29 +04:00
|
|
|
_ = readInput(msg, default='1')
|
2011-04-25 03:01:21 +04:00
|
|
|
|
2012-02-22 19:53:36 +04:00
|
|
|
if isinstance(_, basestring) and _.isdigit() and int(_) in (1, 2):
|
2012-02-16 17:46:01 +04:00
|
|
|
kb.arch = 32 if int(_) == 1 else 64
|
2011-04-25 03:01:21 +04:00
|
|
|
break
|
|
|
|
else:
|
2012-02-16 17:46:01 +04:00
|
|
|
warnMsg = "invalid value. Valid values are 1 and 2"
|
2011-04-25 03:01:21 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
return kb.arch
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
# Get methods
|
|
|
|
@staticmethod
|
|
|
|
def getForcedDbms():
|
2011-09-26 01:10:45 +04:00
|
|
|
return aliasToDbmsEnum(kb.forcedDbms)
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getDbms():
|
2011-06-03 15:01:26 +04:00
|
|
|
return aliasToDbmsEnum(kb.dbms) if kb.get('dbms') else None
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getErrorParsedDBMSes():
|
|
|
|
"""
|
|
|
|
Returns array with parsed DBMS names till now
|
|
|
|
|
|
|
|
This functions is called to:
|
|
|
|
|
|
|
|
1. Sort the tests, getSortedInjectionTests() - detection phase.
|
|
|
|
2. Ask user whether or not skip specific DBMS tests in detection phase,
|
|
|
|
lib/controller/checks.py - detection phase.
|
2012-02-22 14:40:11 +04:00
|
|
|
3. Sort the fingerprint of the DBMS, lib/controller/handler.py -
|
2011-01-28 19:36:09 +03:00
|
|
|
fingerprint phase.
|
|
|
|
"""
|
|
|
|
|
2012-03-05 13:56:48 +04:00
|
|
|
return kb.htmlFp if kb.heuristicTest is not None else []
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getIdentifiedDbms():
|
|
|
|
dbms = None
|
|
|
|
|
2011-06-03 15:01:26 +04:00
|
|
|
if not kb:
|
|
|
|
pass
|
|
|
|
elif Backend.getForcedDbms() is not None:
|
2011-01-28 19:36:09 +03:00
|
|
|
dbms = Backend.getForcedDbms()
|
|
|
|
elif Backend.getDbms() is not None:
|
|
|
|
dbms = kb.dbms
|
2011-06-03 15:01:26 +04:00
|
|
|
elif conf.get('dbms'):
|
2011-01-28 19:36:09 +03:00
|
|
|
dbms = conf.dbms
|
|
|
|
elif len(Backend.getErrorParsedDBMSes()) > 0:
|
|
|
|
dbms = Backend.getErrorParsedDBMSes()[0]
|
|
|
|
|
|
|
|
return aliasToDbmsEnum(dbms)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getVersion():
|
|
|
|
if len(kb.dbmsVersion) > 0:
|
|
|
|
return kb.dbmsVersion[0]
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getVersionList():
|
|
|
|
if len(kb.dbmsVersion) > 0:
|
|
|
|
return kb.dbmsVersion
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2011-02-07 03:21:17 +03:00
|
|
|
@staticmethod
|
|
|
|
def getOs():
|
|
|
|
return kb.os
|
|
|
|
|
2012-01-13 20:49:52 +04:00
|
|
|
@staticmethod
|
|
|
|
def getOsVersion():
|
|
|
|
return kb.osVersion
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getOsServicePack():
|
|
|
|
return kb.osSP
|
|
|
|
|
2011-04-25 03:01:21 +04:00
|
|
|
@staticmethod
|
|
|
|
def getArch():
|
|
|
|
if kb.arch is None:
|
|
|
|
Backend.setArch()
|
|
|
|
return kb.arch
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
# Comparison methods
|
|
|
|
@staticmethod
|
|
|
|
def isDbms(dbms):
|
2011-05-02 03:42:41 +04:00
|
|
|
if Backend.getDbms() is not None:
|
|
|
|
return Backend.getDbms() == aliasToDbmsEnum(dbms)
|
|
|
|
else:
|
|
|
|
return Backend.getIdentifiedDbms() == aliasToDbmsEnum(dbms)
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def isDbmsWithin(aliases):
|
2011-05-03 18:13:45 +04:00
|
|
|
return Backend.getDbms() is not None and Backend.getDbms().lower() in aliases
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def isVersion(version):
|
|
|
|
return Backend.getVersion() is not None and Backend.getVersion() == version
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def isVersionWithin(versionList):
|
|
|
|
if Backend.getVersionList() is None:
|
|
|
|
return False
|
|
|
|
|
2012-02-16 17:46:01 +04:00
|
|
|
for _ in Backend.getVersionList():
|
|
|
|
if _ != UNKNOWN_DBMS_VERSION and _ in versionList:
|
2011-01-28 19:36:09 +03:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def isVersionGreaterOrEqualThan(version):
|
|
|
|
return Backend.getVersion() is not None and str(Backend.getVersion()) >= str(version)
|
|
|
|
|
2011-02-07 03:21:17 +03:00
|
|
|
@staticmethod
|
|
|
|
def isOs(os):
|
2011-04-23 20:25:09 +04:00
|
|
|
return Backend.getOs() is not None and Backend.getOs().lower() == os.lower()
|
2011-02-07 03:21:17 +03:00
|
|
|
|
2011-07-24 13:19:33 +04:00
|
|
|
# Reference: http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/
|
|
|
|
def cachedmethod(f, cache={}):
|
|
|
|
def g(*args, **kwargs):
|
2012-02-22 19:53:36 +04:00
|
|
|
key = (f, tuple(args), frozenset(kwargs.items()))
|
2011-07-24 13:19:33 +04:00
|
|
|
if key not in cache:
|
|
|
|
cache[key] = f(*args, **kwargs)
|
|
|
|
return cache[key]
|
|
|
|
return g
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def paramToDict(place, parameters=None):
|
|
|
|
"""
|
|
|
|
Split the parameters into names and values, check if these parameters
|
|
|
|
are within the testable parameters and return in a dictionary.
|
|
|
|
|
|
|
|
@param place: where sqlmap has to work, can be GET, POST or Cookie.
|
|
|
|
@type place: C{str}
|
|
|
|
|
|
|
|
@param parameters: parameters string in the format for instance
|
|
|
|
'p1=v1&p2=v2' (GET and POST) or 'p1=v1;p2=v2' (Cookie).
|
|
|
|
@type parameters: C{str}
|
|
|
|
|
|
|
|
@return: the parameters in a dictionary.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2011-02-04 21:07:21 +03:00
|
|
|
testableParameters = OrderedDict()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-02-22 19:53:36 +04:00
|
|
|
if place in conf.parameters and not parameters:
|
2008-10-15 19:38:22 +04:00
|
|
|
parameters = conf.parameters[place]
|
|
|
|
|
2011-04-18 01:39:00 +04:00
|
|
|
if place != PLACE.SOAP:
|
2010-06-30 01:07:23 +04:00
|
|
|
parameters = parameters.replace(", ", ",")
|
2012-02-14 18:08:10 +04:00
|
|
|
parameters = re.sub(r"&(\w{1,4});", r"%s\g<1>%s" % (PARAMETER_AMP_MARKER, PARAMETER_SEMICOLON_MARKER), parameters)
|
2011-11-22 01:31:08 +04:00
|
|
|
splitParams = parameters.split(conf.pDel or (DEFAULT_COOKIE_DELIMITER if place == PLACE.COOKIE else DEFAULT_GET_POST_DELIMITER))
|
2010-06-30 01:07:23 +04:00
|
|
|
|
|
|
|
for element in splitParams:
|
2012-02-14 18:08:10 +04:00
|
|
|
element = re.sub(r"%s(.+?)%s" % (PARAMETER_AMP_MARKER, PARAMETER_SEMICOLON_MARKER), r"&\g<1>;", element)
|
2010-06-30 01:07:23 +04:00
|
|
|
elem = element.split("=")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-07-21 14:06:52 +04:00
|
|
|
if len(elem) >= 2:
|
2010-06-30 01:07:23 +04:00
|
|
|
parameter = elem[0].replace(" ", "")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
condition = not conf.testParameter
|
2010-06-30 01:07:23 +04:00
|
|
|
condition |= parameter in conf.testParameter
|
|
|
|
|
|
|
|
if condition:
|
2011-07-21 14:18:11 +04:00
|
|
|
testableParameters[parameter] = "=".join(elem[1:])
|
2011-10-24 01:12:34 +04:00
|
|
|
if testableParameters[parameter].strip(DUMMY_SQL_INJECTION_CHARS) != testableParameters[parameter]\
|
2012-01-07 21:16:14 +04:00
|
|
|
or re.search(r'\A9{3,}', testableParameters[parameter]) or re.search(DUMMY_USER_INJECTION, testableParameters[parameter]):
|
2012-02-14 13:26:52 +04:00
|
|
|
warnMsg = "it appears that you have provided tainted parameter values "
|
|
|
|
warnMsg += "('%s') with most probably leftover " % element
|
|
|
|
warnMsg += "chars from manual sql injection "
|
|
|
|
warnMsg += "tests (%s) or non-valid numerical value. " % DUMMY_SQL_INJECTION_CHARS
|
|
|
|
warnMsg += "Please, always use only valid parameter values "
|
|
|
|
warnMsg += "so sqlmap could be able to properly run "
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
message = "Are you sure you want to continue? [y/N] "
|
|
|
|
test = readInput(message, default="N")
|
|
|
|
if test[0] not in ("y", "Y"):
|
|
|
|
raise sqlmapSilentQuitException
|
|
|
|
|
2010-06-30 01:07:23 +04:00
|
|
|
else:
|
|
|
|
root = ET.XML(parameters)
|
|
|
|
iterator = root.getiterator()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-06-30 01:07:23 +04:00
|
|
|
for child in iterator:
|
|
|
|
parameter = child.tag
|
2010-10-21 01:49:05 +04:00
|
|
|
|
|
|
|
if "}" in parameter:
|
|
|
|
testParam = parameter.split("}")[1]
|
|
|
|
else:
|
|
|
|
testParam = parameter
|
|
|
|
|
2010-06-30 01:07:23 +04:00
|
|
|
condition = not conf.testParameter
|
2010-10-21 01:49:05 +04:00
|
|
|
condition |= testParam in conf.testParameter
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if condition:
|
2010-06-30 01:07:23 +04:00
|
|
|
testableParameters[parameter] = child.text
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.testParameter and not testableParameters:
|
|
|
|
paramStr = ", ".join(test for test in conf.testParameter)
|
|
|
|
|
|
|
|
if len(conf.testParameter) > 1:
|
2011-10-24 03:27:56 +04:00
|
|
|
warnMsg = "provided parameters '%s' " % paramStr
|
|
|
|
warnMsg += "are not inside the %s" % place
|
2011-12-20 16:52:41 +04:00
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
|
|
|
parameter = conf.testParameter[0]
|
|
|
|
|
2011-12-20 16:52:41 +04:00
|
|
|
if not intersect(USER_AGENT_ALIASES + REFERER_ALIASES + HOST_ALIASES, parameter, True):
|
|
|
|
warnMsg = "provided parameter '%s' " % paramStr
|
|
|
|
warnMsg += "is not inside the %s" % place
|
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
elif len(conf.testParameter) != len(testableParameters.keys()):
|
|
|
|
for parameter in conf.testParameter:
|
2012-02-22 19:53:36 +04:00
|
|
|
if parameter not in testableParameters:
|
2011-10-24 03:27:56 +04:00
|
|
|
warnMsg = "provided parameter '%s' " % parameter
|
|
|
|
warnMsg += "is not inside the %s" % place
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
return testableParameters
|
|
|
|
|
2011-01-23 23:47:06 +03:00
|
|
|
def getDocRoot():
|
2008-10-15 19:38:22 +04:00
|
|
|
docRoot = None
|
2010-01-05 14:30:33 +03:00
|
|
|
pagePath = directoryPath(conf.path)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
if Backend.isOs(OS.WINDOWS):
|
2011-01-23 23:47:06 +03:00
|
|
|
defaultDocRoot = ["C:/xampp/htdocs/", "C:/Inetpub/wwwroot/"]
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2011-01-23 23:47:06 +03:00
|
|
|
defaultDocRoot = ["/var/www/"]
|
2009-04-28 03:05:11 +04:00
|
|
|
|
|
|
|
if kb.absFilePaths:
|
|
|
|
for absFilePath in kb.absFilePaths:
|
2010-02-09 17:27:41 +03:00
|
|
|
if directoryPath(absFilePath) == '/':
|
|
|
|
continue
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
absFilePath = normalizePath(absFilePath)
|
2009-04-28 03:05:11 +04:00
|
|
|
absFilePathWin = None
|
|
|
|
|
2010-02-04 12:49:31 +03:00
|
|
|
if isWindowsPath(absFilePath):
|
2010-02-04 17:37:00 +03:00
|
|
|
absFilePathWin = posixToNtSlashes(absFilePath)
|
2011-01-19 02:05:32 +03:00
|
|
|
absFilePath = ntToPosixSlashes(absFilePath[2:])
|
2011-01-23 23:47:06 +03:00
|
|
|
elif isWindowsDriveLetterPath(absFilePath):
|
2011-01-19 02:05:32 +03:00
|
|
|
absFilePath = absFilePath[2:]
|
2010-04-22 14:31:33 +04:00
|
|
|
|
2009-04-28 15:05:07 +04:00
|
|
|
if pagePath in absFilePath:
|
2011-01-19 02:05:32 +03:00
|
|
|
index = absFilePath.index(pagePath)
|
2009-04-28 03:05:11 +04:00
|
|
|
docRoot = absFilePath[:index]
|
|
|
|
|
2010-02-25 19:38:39 +03:00
|
|
|
if len(docRoot) == 0:
|
|
|
|
docRoot = None
|
|
|
|
continue
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if absFilePathWin:
|
2010-02-04 17:37:00 +03:00
|
|
|
docRoot = "C:/%s" % ntToPosixSlashes(docRoot)
|
2010-04-22 14:31:33 +04:00
|
|
|
|
2010-02-03 19:40:12 +03:00
|
|
|
docRoot = normalizePath(docRoot)
|
2009-04-28 03:05:11 +04:00
|
|
|
break
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if docRoot:
|
2009-04-28 03:05:11 +04:00
|
|
|
infoMsg = "retrieved the web server document root: '%s'" % docRoot
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2009-04-28 03:05:11 +04:00
|
|
|
warnMsg = "unable to retrieve the web server document root"
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
message = "please provide the web server document root "
|
2011-01-23 23:47:06 +03:00
|
|
|
message += "[%s]: " % ",".join(root for root in defaultDocRoot)
|
2011-02-08 12:44:34 +03:00
|
|
|
inputDocRoot = readInput(message, default=defaultDocRoot)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if inputDocRoot:
|
2011-01-23 23:47:06 +03:00
|
|
|
if isinstance(inputDocRoot, basestring):
|
|
|
|
docRoot = inputDocRoot.split(',')
|
|
|
|
else:
|
|
|
|
docRoot = inputDocRoot
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
|
|
|
docRoot = defaultDocRoot
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return docRoot
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-23 23:47:06 +03:00
|
|
|
def getDirs():
|
|
|
|
directories = set("/")
|
2010-11-17 12:46:04 +03:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if kb.absFilePaths:
|
2011-01-19 02:05:32 +03:00
|
|
|
infoMsg = "retrieved web server full paths: "
|
2011-01-23 23:47:06 +03:00
|
|
|
infoMsg += "'%s'" % ", ".join(ntToPosixSlashes(path) for path in kb.absFilePaths)
|
2009-04-28 03:05:11 +04:00
|
|
|
logger.info(infoMsg)
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for absFilePath in kb.absFilePaths:
|
2009-07-09 15:11:25 +04:00
|
|
|
if absFilePath:
|
2010-02-04 12:49:31 +03:00
|
|
|
directory = directoryPath(absFilePath)
|
2011-01-23 23:47:06 +03:00
|
|
|
directory = ntToPosixSlashes(directory)
|
2010-02-04 12:49:31 +03:00
|
|
|
directories.add(directory)
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
|
|
|
warnMsg = "unable to retrieve any web server path"
|
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-23 23:47:06 +03:00
|
|
|
webDir = extractRegexResult(r"//[^/]+?/(?P<result>.*)/.", conf.url)
|
|
|
|
if webDir:
|
|
|
|
directories.add(webDir)
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
message = "please provide any additional web server full path to try "
|
2011-01-23 23:47:06 +03:00
|
|
|
message += "to upload the agent [Enter for None]: "
|
|
|
|
inputDirs = readInput(message)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if inputDirs:
|
|
|
|
inputDirs = inputDirs.replace(", ", ",")
|
|
|
|
inputDirs = inputDirs.split(",")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for inputDir in inputDirs:
|
2009-07-09 15:11:25 +04:00
|
|
|
if inputDir:
|
|
|
|
directories.add(inputDir)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return directories
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def filePathToString(filePath):
|
2009-04-28 03:05:11 +04:00
|
|
|
strRepl = filePath.replace("/", "_").replace("\\", "_")
|
|
|
|
strRepl = strRepl.replace(" ", "_").replace(":", "_")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return strRepl
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-06-08 18:35:23 +04:00
|
|
|
def singleTimeWarnMessage(message):
|
|
|
|
singleTimeLogMessage(message, logging.WARN)
|
|
|
|
|
|
|
|
def singleTimeLogMessage(message, level=logging.INFO, flag=None):
|
2011-06-07 19:13:51 +04:00
|
|
|
if flag is None:
|
|
|
|
flag = hash(message)
|
|
|
|
|
2011-04-20 02:54:13 +04:00
|
|
|
if flag not in kb.singleLogFlags:
|
|
|
|
kb.singleLogFlags.add(flag)
|
|
|
|
logger.log(level, message)
|
|
|
|
|
2010-09-30 16:35:45 +04:00
|
|
|
def dataToStdout(data, forceOutput=False):
|
2011-12-27 15:41:57 +04:00
|
|
|
if not kb.get("threadException"):
|
2011-03-11 23:04:15 +03:00
|
|
|
if forceOutput or not getCurrentThreadData().disableStdOut:
|
2011-01-02 10:37:47 +03:00
|
|
|
try:
|
2011-12-27 15:41:57 +04:00
|
|
|
if kb.get("multiThreadMode"):
|
|
|
|
logging._acquireLock()
|
2011-04-21 14:01:58 +04:00
|
|
|
# Reference: http://bugs.python.org/issue1602
|
2011-04-20 02:33:03 +04:00
|
|
|
if IS_WIN:
|
2011-05-14 23:57:28 +04:00
|
|
|
output = data.encode('ascii', "replace")
|
2011-04-21 14:03:18 +04:00
|
|
|
|
2011-04-20 02:54:13 +04:00
|
|
|
if output != data:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "cannot properly display Unicode characters "
|
2011-04-20 02:33:03 +04:00
|
|
|
warnMsg += "inside Windows OS command prompt "
|
2011-04-21 14:03:18 +04:00
|
|
|
warnMsg += "(http://bugs.python.org/issue1602). All "
|
2011-05-15 00:09:37 +04:00
|
|
|
warnMsg += "unhandled occurances will result in "
|
2011-04-21 14:03:18 +04:00
|
|
|
warnMsg += "replacement with '?' character. Please, find "
|
2011-04-20 02:54:13 +04:00
|
|
|
warnMsg += "proper character representation inside "
|
2011-06-20 15:00:23 +04:00
|
|
|
warnMsg += "corresponding output files. "
|
2011-06-08 18:35:23 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
2011-04-21 14:03:18 +04:00
|
|
|
|
2011-04-20 02:33:03 +04:00
|
|
|
sys.stdout.write(output)
|
|
|
|
else:
|
|
|
|
sys.stdout.write(data.encode(sys.stdout.encoding))
|
2011-01-30 14:36:03 +03:00
|
|
|
except:
|
2011-01-31 12:28:16 +03:00
|
|
|
sys.stdout.write(data.encode(UNICODE_ENCODING))
|
2011-01-11 15:08:36 +03:00
|
|
|
finally:
|
|
|
|
sys.stdout.flush()
|
2011-12-27 15:41:57 +04:00
|
|
|
if kb.get("multiThreadMode"):
|
|
|
|
logging._releaseLock()
|
2012-02-29 19:38:01 +04:00
|
|
|
setFormatterPrependFlag(len(data) == 1 and data not in ('\n', '\r') or len(data) > 2 and data[0] == '\r' and data[-1] != '\n')
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def dataToSessionFile(data):
|
2011-01-07 19:47:46 +03:00
|
|
|
if not conf.sessionFile or kb.suppressSession:
|
2008-10-16 19:31:02 +04:00
|
|
|
return
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.sessionFP.write(data)
|
|
|
|
conf.sessionFP.flush()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2010-11-08 14:22:47 +03:00
|
|
|
def dataToTrafficFile(data):
|
|
|
|
if not conf.trafficFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
conf.trafficFP.write(data)
|
|
|
|
conf.trafficFP.flush()
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def dataToDumpFile(dumpFile, data):
|
|
|
|
dumpFile.write(data)
|
|
|
|
dumpFile.flush()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def dataToOutFile(data):
|
|
|
|
if not data:
|
|
|
|
return "No data retrieved"
|
|
|
|
|
2010-05-29 03:12:20 +04:00
|
|
|
rFile = filePathToString(conf.rFile)
|
2009-04-22 15:48:07 +04:00
|
|
|
rFilePath = "%s%s%s" % (conf.filePath, os.sep, rFile)
|
2010-05-30 18:53:13 +04:00
|
|
|
rFileFP = codecs.open(rFilePath, "wb")
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-05-30 18:53:13 +04:00
|
|
|
rFileFP.write(data)
|
2009-04-22 15:48:07 +04:00
|
|
|
rFileFP.flush()
|
|
|
|
rFileFP.close()
|
|
|
|
|
|
|
|
return rFilePath
|
|
|
|
|
2012-03-28 17:31:07 +04:00
|
|
|
def strToHex(value):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2012-03-28 17:31:07 +04:00
|
|
|
Converts string value to it's hexadecimal representation
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2012-03-28 17:31:07 +04:00
|
|
|
return (value if not isinstance(value, unicode) else value.encode(UNICODE_ENCODING)).encode("hex").upper()
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2011-06-27 18:14:49 +04:00
|
|
|
def readInput(message, default=None, checkBatch=True):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2012-03-28 17:31:07 +04:00
|
|
|
Reads input from terminal
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if "\n" in message:
|
2011-06-30 12:42:43 +04:00
|
|
|
message += "%s> " % ("\n" if message.count("\n") > 1 else "")
|
2010-11-05 19:08:42 +03:00
|
|
|
elif message[-1] == ']':
|
|
|
|
message += " "
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-06-27 18:14:49 +04:00
|
|
|
if checkBatch and conf.batch:
|
2011-02-08 13:08:48 +03:00
|
|
|
if isinstance(default, (list, tuple, set)):
|
2011-11-20 23:38:56 +04:00
|
|
|
options = ",".join(getUnicode(opt, UNICODE_ENCODING) for opt in default)
|
2011-04-09 02:29:50 +04:00
|
|
|
elif default:
|
2011-02-08 13:08:48 +03:00
|
|
|
options = getUnicode(default, UNICODE_ENCODING)
|
2011-04-09 02:29:50 +04:00
|
|
|
else:
|
|
|
|
options = unicode()
|
2011-02-08 13:08:48 +03:00
|
|
|
|
2011-06-09 12:38:17 +04:00
|
|
|
infoMsg = "%s%s" % (getUnicode(message), options)
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
debugMsg = "used the default behaviour, running in batch mode"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
data = default
|
|
|
|
else:
|
2011-06-22 18:53:42 +04:00
|
|
|
logging._acquireLock()
|
2012-02-07 15:16:03 +04:00
|
|
|
dataToStdout("\r%s" % message, True)
|
2011-06-22 18:59:49 +04:00
|
|
|
data = raw_input()
|
|
|
|
#data = raw_input(message.encode(sys.stdout.encoding or UNICODE_ENCODING))
|
2011-06-22 18:53:42 +04:00
|
|
|
logging._releaseLock()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-09-26 03:03:45 +04:00
|
|
|
if not data:
|
|
|
|
data = default
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return data
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def randomRange(start=0, stop=1000):
|
|
|
|
"""
|
2012-03-28 17:31:07 +04:00
|
|
|
Returns random integer value in given range
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
return int(random.randint(start, stop))
|
|
|
|
|
|
|
|
def randomInt(length=4):
|
|
|
|
"""
|
2012-03-28 17:31:07 +04:00
|
|
|
Returns random integer value with provided number of digits
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2011-11-20 23:38:56 +04:00
|
|
|
return int("".join(random.choice(string.digits if i!=0 else string.digits.replace('0', '')) for i in xrange(0, length)))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-11 16:26:35 +04:00
|
|
|
def randomStr(length=4, lowercase=False, alphabet=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2012-03-28 17:31:07 +04:00
|
|
|
Returns random string value with provided number of characters
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2010-10-11 16:26:35 +04:00
|
|
|
if alphabet:
|
2012-02-22 14:40:11 +04:00
|
|
|
rndStr = "".join(random.choice(alphabet) for _ in xrange(0, length))
|
2010-10-11 16:26:35 +04:00
|
|
|
elif lowercase:
|
2011-11-20 23:38:56 +04:00
|
|
|
rndStr = "".join(random.choice(string.lowercase) for _ in xrange(0, length))
|
2009-04-22 15:48:07 +04:00
|
|
|
else:
|
2011-11-20 23:38:56 +04:00
|
|
|
rndStr = "".join(random.choice(string.letters) for _ in xrange(0, length))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
return rndStr
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
def sanitizeStr(value):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2012-03-28 17:31:07 +04:00
|
|
|
Sanitizes string value in respect to newline and line-feed characters
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
return getUnicode(value).replace("\n", " ").replace("\r", "")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def checkFile(filename):
|
|
|
|
"""
|
2012-03-28 17:31:07 +04:00
|
|
|
Checks for file existence
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
raise sqlmapFilePathException, "unable to read file '%s'" % filename
|
2010-03-22 20:38:19 +03:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
def replaceNewlineTabs(value, stdout=False):
|
|
|
|
if value is None:
|
2010-11-12 18:17:12 +03:00
|
|
|
return
|
|
|
|
|
2010-03-22 20:38:19 +03:00
|
|
|
if stdout:
|
2011-12-21 23:40:42 +04:00
|
|
|
retVal = value.replace("\n", " ").replace("\r", " ").replace("\t", " ")
|
2010-03-22 20:38:19 +03:00
|
|
|
else:
|
2011-12-21 23:40:42 +04:00
|
|
|
retVal = value.replace("\n", DUMP_NEWLINE_MARKER).replace("\r", DUMP_CR_MARKER).replace("\t", DUMP_TAB_MARKER)
|
2010-10-21 13:51:07 +04:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
retVal = retVal.replace(kb.chars.delimiter, DUMP_DEL_MARKER)
|
2010-10-21 13:51:07 +04:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
return retVal
|
2010-10-21 13:51:07 +04:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
def restoreDumpMarkedChars(value, onlyNewlineTab=False):
|
|
|
|
retVal = value
|
2010-03-22 20:38:19 +03:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
if isinstance(retVal, basestring):
|
|
|
|
retVal = retVal.replace(DUMP_NEWLINE_MARKER, "\n").replace(DUMP_CR_MARKER, "\r").replace(DUMP_TAB_MARKER, "\t")
|
2010-10-22 18:23:14 +04:00
|
|
|
|
2010-10-21 13:51:07 +04:00
|
|
|
if not onlyNewlineTab:
|
2011-12-21 23:40:42 +04:00
|
|
|
retVal = retVal.replace(DUMP_DEL_MARKER, ", ")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
return retVal
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def banner():
|
|
|
|
"""
|
|
|
|
This function prints sqlmap banner with its version
|
|
|
|
"""
|
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
_ = """\n %s - %s\n %s\n\n""" % (VERSION_STRING, DESCRIPTION, SITE)
|
|
|
|
dataToStdout(_, forceOutput=True)
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def parsePasswordHash(password):
|
|
|
|
blank = " " * 8
|
|
|
|
|
|
|
|
if not password or password == " ":
|
2012-02-07 14:46:55 +04:00
|
|
|
password = NULL
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-02-07 14:46:55 +04:00
|
|
|
if Backend.isDbms(DBMS.MSSQL) and password != NULL and isHexEncodedString(password):
|
2008-10-15 19:38:22 +04:00
|
|
|
hexPassword = password
|
2011-01-19 02:05:32 +03:00
|
|
|
password = "%s\n" % hexPassword
|
2008-10-15 19:38:22 +04:00
|
|
|
password += "%sheader: %s\n" % (blank, hexPassword[:6])
|
|
|
|
password += "%ssalt: %s\n" % (blank, hexPassword[6:14])
|
|
|
|
password += "%smixedcase: %s\n" % (blank, hexPassword[14:54])
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
if not Backend.isVersionWithin(("2005", "2008")):
|
2008-10-15 19:38:22 +04:00
|
|
|
password += "%suppercase: %s" % (blank, hexPassword[54:])
|
|
|
|
|
|
|
|
return password
|
2010-01-05 19:15:31 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def cleanQuery(query):
|
2008-12-19 23:09:46 +03:00
|
|
|
upperQuery = query
|
|
|
|
|
|
|
|
for sqlStatements in SQL_STATEMENTS.values():
|
|
|
|
for sqlStatement in sqlStatements:
|
2009-01-28 17:53:11 +03:00
|
|
|
sqlStatementEsc = sqlStatement.replace("(", "\\(")
|
|
|
|
queryMatch = re.search("(%s)" % sqlStatementEsc, query, re.I)
|
|
|
|
|
2012-03-15 04:19:57 +04:00
|
|
|
if queryMatch and "sys_exec" not in query:
|
2009-01-28 17:53:11 +03:00
|
|
|
upperQuery = upperQuery.replace(queryMatch.group(1), sqlStatement.upper())
|
2008-12-18 23:38:57 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return upperQuery
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def setPaths():
|
2012-02-24 18:12:19 +04:00
|
|
|
"""
|
|
|
|
Sets absolute paths for project directories and files
|
|
|
|
"""
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# sqlmap paths
|
2011-01-19 02:05:32 +03:00
|
|
|
paths.SQLMAP_EXTRAS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "extra")
|
2012-02-15 17:17:01 +04:00
|
|
|
paths.SQLMAP_PROCS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "procs")
|
2011-01-19 02:05:32 +03:00
|
|
|
paths.SQLMAP_SHELL_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "shell")
|
2011-05-19 01:47:40 +04:00
|
|
|
paths.SQLMAP_TAMPER_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "tamper")
|
2011-01-19 02:05:32 +03:00
|
|
|
paths.SQLMAP_TXT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "txt")
|
|
|
|
paths.SQLMAP_UDF_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "udf")
|
|
|
|
paths.SQLMAP_XML_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "xml")
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.SQLMAP_XML_BANNER_PATH = os.path.join(paths.SQLMAP_XML_PATH, "banner")
|
2011-01-19 02:05:32 +03:00
|
|
|
paths.SQLMAP_OUTPUT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "output")
|
|
|
|
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
|
|
|
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
2011-04-25 03:01:21 +04:00
|
|
|
paths.SQLMAP_SEXEC_PATH = os.path.join(paths.SQLMAP_EXTRAS_PATH, "shellcodeexec")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# sqlmap files
|
2011-01-19 02:05:32 +03:00
|
|
|
paths.SQLMAP_HISTORY = os.path.join(paths.SQLMAP_ROOT_PATH, ".sqlmap_history")
|
|
|
|
paths.SQLMAP_CONFIG = os.path.join(paths.SQLMAP_ROOT_PATH, "sqlmap-%s.conf" % randomStr())
|
|
|
|
paths.COMMON_COLUMNS = os.path.join(paths.SQLMAP_TXT_PATH, "common-columns.txt")
|
|
|
|
paths.COMMON_TABLES = os.path.join(paths.SQLMAP_TXT_PATH, "common-tables.txt")
|
|
|
|
paths.COMMON_OUTPUTS = os.path.join(paths.SQLMAP_TXT_PATH, 'common-outputs.txt')
|
|
|
|
paths.SQL_KEYWORDS = os.path.join(paths.SQLMAP_TXT_PATH, "keywords.txt")
|
2011-11-02 15:21:49 +04:00
|
|
|
paths.SMALL_DICT = os.path.join(paths.SQLMAP_TXT_PATH, "smalldict.txt")
|
2011-01-19 02:05:32 +03:00
|
|
|
paths.USER_AGENTS = os.path.join(paths.SQLMAP_TXT_PATH, "user-agents.txt")
|
|
|
|
paths.WORDLIST = os.path.join(paths.SQLMAP_TXT_PATH, "wordlist.txt")
|
|
|
|
paths.PHPIDS_RULES_XML = os.path.join(paths.SQLMAP_XML_PATH, "phpids_rules.xml")
|
|
|
|
paths.ERRORS_XML = os.path.join(paths.SQLMAP_XML_PATH, "errors.xml")
|
|
|
|
paths.PAYLOADS_XML = os.path.join(paths.SQLMAP_XML_PATH, "payloads.xml")
|
|
|
|
paths.INJECTIONS_XML = os.path.join(paths.SQLMAP_XML_PATH, "injections.xml")
|
|
|
|
paths.LIVE_TESTS_XML = os.path.join(paths.SQLMAP_XML_PATH, "livetests.xml")
|
|
|
|
paths.QUERIES_XML = os.path.join(paths.SQLMAP_XML_PATH, "queries.xml")
|
|
|
|
paths.GENERIC_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "generic.xml")
|
|
|
|
paths.MSSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mssql.xml")
|
|
|
|
paths.MYSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mysql.xml")
|
|
|
|
paths.ORACLE_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "oracle.xml")
|
|
|
|
paths.PGSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "postgresql.xml")
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def weAreFrozen():
|
|
|
|
"""
|
|
|
|
Returns whether we are frozen via py2exe.
|
|
|
|
This will affect how we find out where we are located.
|
|
|
|
Reference: http://www.py2exe.org/index.cgi/WhereAmI
|
|
|
|
"""
|
|
|
|
|
|
|
|
return hasattr(sys, "frozen")
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
def parseTargetDirect():
|
|
|
|
"""
|
|
|
|
Parse target dbms and set some attributes into the configuration singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.direct:
|
|
|
|
return
|
|
|
|
|
|
|
|
details = None
|
2010-04-13 15:00:15 +04:00
|
|
|
remote = False
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
for dbms in SUPPORTED_DBMS:
|
2012-01-12 18:58:23 +04:00
|
|
|
details = re.search("^(?P<dbms>%s)://(?P<credentials>(?P<user>.+?)\:(?P<pass>.*)\@)?(?P<remote>(?P<hostname>.+?)\:(?P<port>[\d]+)\/)?(?P<db>[\w\d\ \:\.\_\-\/\\\\]+?)$" % dbms, conf.direct, re.I)
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if details:
|
2010-03-31 14:50:47 +04:00
|
|
|
conf.dbms = details.group('dbms')
|
|
|
|
|
2010-03-30 15:21:26 +04:00
|
|
|
if details.group('credentials'):
|
2010-03-31 14:50:47 +04:00
|
|
|
conf.dbmsUser = details.group('user')
|
|
|
|
conf.dbmsPass = details.group('pass')
|
2010-03-30 15:06:30 +04:00
|
|
|
else:
|
2010-05-28 17:05:02 +04:00
|
|
|
conf.dbmsUser = unicode()
|
|
|
|
conf.dbmsPass = unicode()
|
2010-03-31 14:50:47 +04:00
|
|
|
|
2010-04-29 17:34:03 +04:00
|
|
|
if not conf.dbmsPass:
|
|
|
|
conf.dbmsPass = None
|
|
|
|
|
2010-03-30 15:21:26 +04:00
|
|
|
if details.group('remote'):
|
2010-04-13 15:00:15 +04:00
|
|
|
remote = True
|
2010-03-30 15:21:26 +04:00
|
|
|
conf.hostname = details.group('hostname')
|
2011-01-19 02:05:32 +03:00
|
|
|
conf.port = int(details.group('port'))
|
2010-03-30 15:21:26 +04:00
|
|
|
else:
|
2010-03-30 15:06:30 +04:00
|
|
|
conf.hostname = "localhost"
|
2011-01-19 02:05:32 +03:00
|
|
|
conf.port = 0
|
2010-03-31 14:50:47 +04:00
|
|
|
|
|
|
|
conf.dbmsDb = details.group('db')
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
conf.parameters[None] = "direct connection"
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
if not details:
|
2010-03-31 14:50:47 +04:00
|
|
|
errMsg = "invalid target details, valid syntax is for instance "
|
|
|
|
errMsg += "'mysql://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_NAME' "
|
|
|
|
errMsg += "or 'access://DATABASE_FILEPATH'"
|
2010-03-27 02:23:25 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
for dbmsName, data in DBMS_DICT.items():
|
2010-03-27 02:23:25 +03:00
|
|
|
if conf.dbms in data[0]:
|
|
|
|
try:
|
2010-11-02 15:08:28 +03:00
|
|
|
if dbmsName in (DBMS.ACCESS, DBMS.SQLITE, DBMS.FIREBIRD):
|
2010-04-13 15:00:15 +04:00
|
|
|
if remote:
|
2010-04-13 15:13:01 +04:00
|
|
|
warnMsg = "direct connection over the network for "
|
|
|
|
warnMsg += "%s DBMS is not supported" % dbmsName
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.hostname = "localhost"
|
2011-01-19 02:05:32 +03:00
|
|
|
conf.port = 0
|
2010-04-13 15:13:01 +04:00
|
|
|
elif not remote:
|
2010-04-13 15:00:15 +04:00
|
|
|
errMsg = "missing remote connection details"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-02-04 18:57:53 +03:00
|
|
|
if dbmsName in (DBMS.MSSQL, DBMS.SYBASE):
|
2010-03-31 14:50:47 +04:00
|
|
|
import _mssql
|
2010-03-27 02:23:25 +03:00
|
|
|
import pymssql
|
2010-03-31 19:31:11 +04:00
|
|
|
|
|
|
|
if not hasattr(pymssql, "__version__") or pymssql.__version__ < "1.0.2":
|
2011-06-13 22:44:02 +04:00
|
|
|
errMsg = "'%s' third-party library must be " % data[1]
|
|
|
|
errMsg += "version >= 1.0.2 to work properly. "
|
2012-02-09 13:48:47 +04:00
|
|
|
errMsg += "Download from '%s'" % data[2]
|
2010-03-31 19:31:11 +04:00
|
|
|
raise sqlmapMissingDependence, errMsg
|
|
|
|
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.MYSQL:
|
2011-06-22 17:31:07 +04:00
|
|
|
import pymysql
|
2010-12-04 01:28:09 +03:00
|
|
|
elif dbmsName == DBMS.PGSQL:
|
2010-03-27 02:23:25 +03:00
|
|
|
import psycopg2
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.ORACLE:
|
2010-03-28 00:50:19 +03:00
|
|
|
import cx_Oracle
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.SQLITE:
|
2010-03-31 14:50:47 +04:00
|
|
|
import sqlite3
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.ACCESS:
|
2010-03-31 14:50:47 +04:00
|
|
|
import pyodbc
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.FIREBIRD:
|
2010-03-31 14:50:47 +04:00
|
|
|
import kinterbasdb
|
2012-02-22 19:53:36 +04:00
|
|
|
except ImportError:
|
2011-01-19 02:05:32 +03:00
|
|
|
errMsg = "sqlmap requires '%s' third-party library " % data[1]
|
2010-03-27 02:23:25 +03:00
|
|
|
errMsg += "in order to directly connect to the database "
|
2012-02-09 13:48:47 +04:00
|
|
|
errMsg += "%s. Download from '%s'" % (dbmsName, data[2])
|
2010-03-27 02:23:25 +03:00
|
|
|
raise sqlmapMissingDependence, errMsg
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def parseTargetUrl():
|
|
|
|
"""
|
2010-03-27 02:23:25 +03:00
|
|
|
Parse target url and set some attributes into the configuration singleton.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2012-02-24 18:12:19 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if not conf.url:
|
|
|
|
return
|
|
|
|
|
2012-05-25 01:36:35 +04:00
|
|
|
if re.search("\[.+\]", conf.url) and not socket.has_ipv6:
|
|
|
|
errMsg = "IPv6 addressing is not supported "
|
|
|
|
errMsg += "on this platform"
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
2012-05-25 01:39:10 +04:00
|
|
|
if not re.search("^http[s]*://", conf.url, re.I):
|
2008-10-15 19:38:22 +04:00
|
|
|
if ":443/" in conf.url:
|
|
|
|
conf.url = "https://" + conf.url
|
|
|
|
else:
|
|
|
|
conf.url = "http://" + conf.url
|
|
|
|
|
2012-04-17 12:41:19 +04:00
|
|
|
if CUSTOM_INJECTION_MARK_CHAR in conf.url:
|
2011-02-04 15:43:18 +03:00
|
|
|
conf.url = conf.url.replace('?', URI_QUESTION_MARKER)
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
__urlSplit = urlparse.urlsplit(conf.url)
|
2012-05-25 01:36:35 +04:00
|
|
|
__hostnamePort = __urlSplit[1].split(":") if not re.search("\[.+\]", __urlSplit[1]) else filter(None, (re.search("\[.+\]", __urlSplit[1]).group(0), re.search("\](:(?P<port>\d+))?", __urlSplit[1]).group("port")))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-05-25 01:39:10 +04:00
|
|
|
conf.scheme = __urlSplit[0].strip().lower() if not conf.forceSSL else "https"
|
2011-02-04 14:33:21 +03:00
|
|
|
conf.path = __urlSplit[2].strip()
|
2012-05-25 01:49:20 +04:00
|
|
|
conf.hostname = __hostnamePort[0].strip().strip("[]")
|
2011-01-24 17:52:50 +03:00
|
|
|
|
2012-01-07 20:06:18 +04:00
|
|
|
try:
|
|
|
|
_ = conf.hostname.encode("idna")
|
|
|
|
except UnicodeError:
|
|
|
|
_ = None
|
|
|
|
|
|
|
|
if any((_ is None, re.search(r'\s', conf.hostname), '..' in conf.hostname, conf.hostname.startswith('.'))):
|
2011-01-24 17:52:50 +03:00
|
|
|
errMsg = "invalid target url"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if len(__hostnamePort) == 2:
|
2010-02-10 15:06:23 +03:00
|
|
|
try:
|
|
|
|
conf.port = int(__hostnamePort[1])
|
|
|
|
except:
|
|
|
|
errMsg = "invalid target url"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
elif conf.scheme == "https":
|
|
|
|
conf.port = 443
|
|
|
|
else:
|
|
|
|
conf.port = 80
|
|
|
|
|
|
|
|
if __urlSplit[3]:
|
2011-11-22 01:31:08 +04:00
|
|
|
conf.parameters[PLACE.GET] = urldecode(__urlSplit[3]) if __urlSplit[3] and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in __urlSplit[3] else __urlSplit[3]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-05-25 01:49:20 +04:00
|
|
|
conf.url = "%s://%s:%d%s" % (conf.scheme, ("[%s]" % conf.hostname) if __hostnamePort[0].strip("[]") != __hostnamePort[0] else conf.hostname, conf.port, conf.path)
|
2011-02-04 15:43:18 +03:00
|
|
|
conf.url = conf.url.replace(URI_QUESTION_MARKER, '?')
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2012-04-04 14:35:52 +04:00
|
|
|
if not conf.referer and intersect(REFERER_ALIASES, conf.testParameter, True):
|
|
|
|
debugMsg = "setting the HTTP Referer header to the target url"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
conf.httpHeaders = filter(lambda (key, value): key != HTTPHEADER.REFERER, conf.httpHeaders)
|
|
|
|
conf.httpHeaders.append((HTTPHEADER.REFERER, conf.url))
|
|
|
|
|
|
|
|
if not conf.host and intersect(HOST_ALIASES, conf.testParameter, True):
|
|
|
|
debugMsg = "setting the HTTP Host header to the target url"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
conf.httpHeaders = filter(lambda (key, value): key != HTTPHEADER.HOST, conf.httpHeaders)
|
|
|
|
conf.httpHeaders.append((HTTPHEADER.HOST, getHostHeader(conf.url)))
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def expandAsteriskForColumns(expression):
|
2012-02-24 18:12:19 +04:00
|
|
|
"""
|
|
|
|
If the user provided an asterisk rather than the column(s)
|
|
|
|
name, sqlmap will retrieve the columns itself and reprocess
|
|
|
|
the SQL query string (expression)
|
|
|
|
"""
|
|
|
|
|
2008-11-13 01:53:25 +03:00
|
|
|
asterisk = re.search("^SELECT\s+\*\s+FROM\s+([\w\.\_]+)\s*", expression, re.I)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if asterisk:
|
2011-01-19 02:05:32 +03:00
|
|
|
infoMsg = "you did not provide the fields in your query. "
|
2008-10-15 19:38:22 +04:00
|
|
|
infoMsg += "sqlmap will retrieve the column names itself"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2008-11-13 01:53:25 +03:00
|
|
|
dbTbl = asterisk.group(1)
|
|
|
|
|
2011-05-23 23:45:40 +04:00
|
|
|
if dbTbl and ".." in dbTbl:
|
|
|
|
dbTbl = dbTbl.replace('..', '.dbo.')
|
|
|
|
|
2008-11-13 01:53:25 +03:00
|
|
|
if dbTbl and "." in dbTbl:
|
2010-11-05 14:34:09 +03:00
|
|
|
conf.db, conf.tbl = dbTbl.split(".", 1)
|
2008-11-13 01:53:25 +03:00
|
|
|
else:
|
|
|
|
conf.tbl = dbTbl
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
columnsDict = conf.dbmsHandler.getColumns(onlyColNames=True)
|
|
|
|
|
|
|
|
if columnsDict and conf.db in columnsDict and conf.tbl in columnsDict[conf.db]:
|
|
|
|
columns = columnsDict[conf.db][conf.tbl].keys()
|
|
|
|
columns.sort()
|
2011-11-20 23:38:56 +04:00
|
|
|
columnsStr = ", ".join(column for column in columns)
|
2008-10-15 19:38:22 +04:00
|
|
|
expression = expression.replace("*", columnsStr, 1)
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
infoMsg = "the query with column names is: "
|
2008-10-15 19:38:22 +04:00
|
|
|
infoMsg += "%s" % expression
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
return expression
|
2010-01-09 02:50:06 +03:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
def getLimitRange(count, dump=False, plusOne=False):
|
|
|
|
"""
|
|
|
|
Returns range of values used in limit/offset constructs
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = None
|
2011-01-19 02:05:32 +03:00
|
|
|
count = int(count)
|
2012-02-16 18:42:28 +04:00
|
|
|
limitStart, limitStop = 1, count
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if dump:
|
2008-11-02 17:39:38 +03:00
|
|
|
if isinstance(conf.limitStop, int) and conf.limitStop > 0 and conf.limitStop < limitStop:
|
2008-10-15 19:38:22 +04:00
|
|
|
limitStop = conf.limitStop
|
|
|
|
|
2008-11-02 17:39:38 +03:00
|
|
|
if isinstance(conf.limitStart, int) and conf.limitStart > 0 and conf.limitStart <= limitStop:
|
2008-10-15 19:38:22 +04:00
|
|
|
limitStart = conf.limitStart
|
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
retVal = xrange(limitStart, limitStop + 1) if plusOne else xrange(limitStart - 1, limitStop)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
return retVal
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2012-05-09 22:47:40 +04:00
|
|
|
def parseUnionPage(page, unique=True):
|
2012-02-16 18:42:28 +04:00
|
|
|
"""
|
|
|
|
Returns resulting items from inband query inside provided page content
|
|
|
|
"""
|
|
|
|
|
2012-05-09 22:47:40 +04:00
|
|
|
if page is None:
|
2011-02-02 01:05:12 +03:00
|
|
|
return None
|
|
|
|
|
2012-05-09 22:47:40 +04:00
|
|
|
if page.startswith(kb.chars.start) and page.endswith(kb.chars.stop):
|
|
|
|
if len(page) > LARGE_OUTPUT_THRESHOLD:
|
2011-12-23 00:14:56 +04:00
|
|
|
warnMsg = "large output detected. This might take a while"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2011-12-23 00:08:28 +04:00
|
|
|
data = BigArray()
|
|
|
|
_ = []
|
|
|
|
|
2012-05-09 22:47:40 +04:00
|
|
|
for match in re.finditer("%s(.*?)%s" % (kb.chars.start, kb.chars.stop), page, re.DOTALL | re.IGNORECASE):
|
|
|
|
entry = match.group(1)
|
2011-07-25 00:44:17 +04:00
|
|
|
|
2012-05-09 22:47:40 +04:00
|
|
|
if kb.chars.start in entry:
|
|
|
|
entry = entry.split(kb.chars.start)[-1]
|
2011-12-22 14:44:14 +04:00
|
|
|
|
2011-12-22 14:32:21 +04:00
|
|
|
if unique:
|
2011-12-22 03:23:00 +04:00
|
|
|
key = entry.lower()
|
2011-12-22 14:32:21 +04:00
|
|
|
if key not in _:
|
2011-12-22 19:42:21 +04:00
|
|
|
_.append(key)
|
|
|
|
if len(_) > UNION_UNIQUE_FIFO_LENGTH:
|
|
|
|
_.pop(0)
|
2011-12-22 14:32:21 +04:00
|
|
|
else:
|
|
|
|
continue
|
2008-12-10 20:23:07 +03:00
|
|
|
|
2011-12-22 14:59:28 +04:00
|
|
|
entry = entry.split(kb.chars.delimiter)
|
2011-07-29 14:45:09 +04:00
|
|
|
|
2012-03-14 14:31:24 +04:00
|
|
|
if conf.hexConvert:
|
|
|
|
entry = applyFunctionRecursively(entry, decodeHexValue)
|
|
|
|
|
|
|
|
if kb.safeCharEncode:
|
|
|
|
entry = applyFunctionRecursively(entry, safecharencode)
|
|
|
|
|
2011-12-22 14:32:21 +04:00
|
|
|
data.append(entry[0] if len(entry) == 1 else entry)
|
2011-07-25 00:44:17 +04:00
|
|
|
else:
|
2012-05-09 22:47:40 +04:00
|
|
|
data = page
|
2008-12-10 20:23:07 +03:00
|
|
|
|
2010-05-25 14:09:35 +04:00
|
|
|
if len(data) == 1 and isinstance(data[0], basestring):
|
2008-12-10 20:23:07 +03:00
|
|
|
data = data[0]
|
|
|
|
|
|
|
|
return data
|
2010-01-05 14:43:16 +03:00
|
|
|
|
2011-11-22 12:39:13 +04:00
|
|
|
def parseFilePaths(page):
|
|
|
|
"""
|
2012-02-16 18:42:28 +04:00
|
|
|
Detects (possible) absolute system paths inside the provided page content
|
2011-11-22 12:39:13 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if page:
|
2012-02-22 19:53:36 +04:00
|
|
|
for regex in (r" in <b>(?P<result>.*?)</b> on line", r"(?:>|\s)(?P<result>[A-Za-z]:[\\/][\w.\\/]*)", r"(?:>|\s)(?P<result>/\w[/\w.]+)"):
|
2011-11-22 12:39:13 +04:00
|
|
|
for match in re.finditer(regex, page):
|
|
|
|
absFilePath = match.group("result").strip()
|
|
|
|
page = page.replace(absFilePath, "")
|
|
|
|
|
|
|
|
if isWindowsDriveLetterPath(absFilePath):
|
|
|
|
absFilePath = posixToNtSlashes(absFilePath)
|
|
|
|
|
|
|
|
if absFilePath not in kb.absFilePaths:
|
|
|
|
kb.absFilePaths.add(absFilePath)
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def getLocalIP():
|
2010-11-04 15:51:04 +03:00
|
|
|
retVal = None
|
|
|
|
try:
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
s.connect((conf.hostname, conf.port))
|
|
|
|
retVal, _ = s.getsockname()
|
|
|
|
s.close()
|
|
|
|
except:
|
|
|
|
debugMsg = "there was an error in opening socket "
|
|
|
|
debugMsg += "connection toward '%s'" % conf.hostname
|
|
|
|
logger.debug(debugMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-11-04 15:51:04 +03:00
|
|
|
return retVal
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
def getRemoteIP():
|
|
|
|
return socket.gethostbyname(conf.hostname)
|
|
|
|
|
|
|
|
def getFileType(filePath):
|
2009-04-22 16:44:16 +04:00
|
|
|
try:
|
2012-02-16 18:42:28 +04:00
|
|
|
_ = magic.from_file(filePath)
|
2009-04-22 16:44:16 +04:00
|
|
|
except:
|
|
|
|
return "unknown"
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
return "text" if "ASCII" in _ or "text" in _ else "binary"
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def getCharset(charsetType=None):
|
|
|
|
asciiTbl = []
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if charsetType is None:
|
2011-12-21 23:40:42 +04:00
|
|
|
asciiTbl.extend(xrange(0, 128))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# 0 or 1
|
2012-02-29 18:36:23 +04:00
|
|
|
elif charsetType == CHARSET_TYPE.BINARY:
|
2012-02-22 19:53:36 +04:00
|
|
|
asciiTbl.extend([0, 1])
|
2011-12-21 23:40:42 +04:00
|
|
|
asciiTbl.extend(xrange(47, 50))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# Digits
|
2012-02-29 18:36:23 +04:00
|
|
|
elif charsetType == CHARSET_TYPE.DIGITS:
|
2012-02-22 19:53:36 +04:00
|
|
|
asciiTbl.extend([0, 1])
|
2011-12-21 23:40:42 +04:00
|
|
|
asciiTbl.extend(xrange(47, 58))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# Hexadecimal
|
2012-02-29 18:36:23 +04:00
|
|
|
elif charsetType == CHARSET_TYPE.HEXADECIMAL:
|
2012-02-22 19:53:36 +04:00
|
|
|
asciiTbl.extend([0, 1])
|
2011-12-21 23:40:42 +04:00
|
|
|
asciiTbl.extend(xrange(47, 58))
|
|
|
|
asciiTbl.extend(xrange(64, 71))
|
|
|
|
asciiTbl.extend(xrange(96, 103))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# Characters
|
2012-02-29 18:36:23 +04:00
|
|
|
elif charsetType == CHARSET_TYPE.ALPHA:
|
2012-02-22 19:53:36 +04:00
|
|
|
asciiTbl.extend([0, 1])
|
2011-12-21 23:40:42 +04:00
|
|
|
asciiTbl.extend(xrange(64, 91))
|
|
|
|
asciiTbl.extend(xrange(96, 123))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# Characters and digits
|
2012-02-29 18:36:23 +04:00
|
|
|
elif charsetType == CHARSET_TYPE.ALPHANUM:
|
2012-02-22 19:53:36 +04:00
|
|
|
asciiTbl.extend([0, 1])
|
2011-12-21 23:40:42 +04:00
|
|
|
asciiTbl.extend(xrange(47, 58))
|
|
|
|
asciiTbl.extend(xrange(64, 91))
|
|
|
|
asciiTbl.extend(xrange(96, 123))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
return asciiTbl
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
def searchEnvPath(filename):
|
2010-01-02 05:02:12 +03:00
|
|
|
result = None
|
2012-02-16 18:42:28 +04:00
|
|
|
path = os.environ.get("PATH", "")
|
|
|
|
paths = path.split(";") if IS_WIN else path.split(":")
|
2009-05-13 00:24:47 +04:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
for _ in paths:
|
|
|
|
_ = _.replace(";", "")
|
|
|
|
result = os.path.exists(os.path.normpath(os.path.join(_, filename)))
|
2009-05-13 00:24:47 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if result:
|
2009-05-13 00:24:47 +04:00
|
|
|
break
|
|
|
|
|
|
|
|
return result
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2010-01-15 14:45:48 +03:00
|
|
|
def urlEncodeCookieValues(cookieStr):
|
2010-01-02 05:02:12 +03:00
|
|
|
if cookieStr:
|
|
|
|
result = ""
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
for part in cookieStr.split(';'):
|
|
|
|
index = part.find('=') + 1
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if index > 0:
|
|
|
|
name = part[:index - 1].strip()
|
2010-01-15 14:44:05 +03:00
|
|
|
value = urlencode(part[index:], convall=True)
|
|
|
|
result += "; %s=%s" % (name, value)
|
2010-01-02 05:02:12 +03:00
|
|
|
elif part.strip().lower() != "secure":
|
2010-01-15 14:44:05 +03:00
|
|
|
result += "%s%s" % ("%3B", urlencode(part, convall=True))
|
2010-01-02 05:02:12 +03:00
|
|
|
else:
|
2010-01-15 14:44:05 +03:00
|
|
|
result += "; secure"
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-15 14:44:05 +03:00
|
|
|
if result.startswith('; '):
|
|
|
|
result = result[2:]
|
2010-01-02 05:02:12 +03:00
|
|
|
elif result.startswith('%3B'):
|
|
|
|
result = result[3:]
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
return result
|
|
|
|
else:
|
|
|
|
return None
|
2010-01-05 14:30:33 +03:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
def directoryPath(filepath):
|
|
|
|
"""
|
|
|
|
Returns directory path for a given filepath
|
|
|
|
"""
|
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
retVal = None
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
if isWindowsDriveLetterPath(filepath):
|
|
|
|
retVal = ntpath.dirname(filepath)
|
2010-02-21 02:11:05 +03:00
|
|
|
else:
|
2012-02-16 18:42:28 +04:00
|
|
|
retVal = posixpath.dirname(filepath)
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
return retVal
|
2010-01-15 20:42:46 +03:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
def normalizePath(filepath):
|
|
|
|
"""
|
|
|
|
Returns normalized string representation of a given filepath
|
|
|
|
"""
|
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
retVal = None
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
if isWindowsDriveLetterPath(filepath):
|
|
|
|
retVal = ntpath.normpath(filepath)
|
2010-02-21 02:11:05 +03:00
|
|
|
else:
|
2012-02-16 18:42:28 +04:00
|
|
|
retVal = posixpath.normpath(filepath)
|
2010-02-26 15:00:47 +03:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
return retVal
|
2010-01-15 19:06:59 +03:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
def safeStringFormat(format_, params):
|
|
|
|
"""
|
|
|
|
Avoids problems with inappropriate string format strings
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = format_.replace("%d", "%s")
|
2010-01-15 20:42:46 +03:00
|
|
|
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(params, basestring):
|
2010-01-15 20:42:46 +03:00
|
|
|
retVal = retVal.replace("%s", params)
|
|
|
|
else:
|
2011-12-21 23:40:42 +04:00
|
|
|
count, index = 0, 0
|
2010-01-15 20:42:46 +03:00
|
|
|
|
|
|
|
while index != -1:
|
2010-05-07 17:40:57 +04:00
|
|
|
index = retVal.find("%s")
|
2010-01-15 20:42:46 +03:00
|
|
|
|
|
|
|
if index != -1:
|
|
|
|
if count < len(params):
|
2012-02-22 19:53:36 +04:00
|
|
|
retVal = retVal[:index] + getUnicode(params[count]) + retVal[index + 2:]
|
2010-01-15 20:42:46 +03:00
|
|
|
else:
|
|
|
|
raise sqlmapNoneDataException, "wrong number of parameters during string formatting"
|
2011-04-30 19:22:33 +04:00
|
|
|
|
2010-01-15 20:42:46 +03:00
|
|
|
count += 1
|
|
|
|
|
2010-01-15 19:06:59 +03:00
|
|
|
return retVal
|
2010-01-24 02:29:34 +03:00
|
|
|
|
2011-01-01 22:07:40 +03:00
|
|
|
def getFilteredPageContent(page, onlyText=True):
|
2012-02-16 18:42:28 +04:00
|
|
|
"""
|
|
|
|
Returns filtered page content without script, style and/or comments
|
|
|
|
or all HTML tags
|
|
|
|
"""
|
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
retVal = page
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2011-06-09 11:53:31 +04:00
|
|
|
# only if the page's charset has been successfully identified
|
|
|
|
if isinstance(page, unicode):
|
2011-11-13 23:09:13 +04:00
|
|
|
retVal = re.sub(r"(?si)<script.+?</script>|<!--.+?-->|<style.+?</style>%s" % (r"|<[^>]+>|\t|\n|\r" if onlyText else ""), " ", page)
|
2011-04-30 19:22:33 +04:00
|
|
|
|
2010-10-13 00:01:59 +04:00
|
|
|
while retVal.find(" ") != -1:
|
|
|
|
retVal = retVal.replace(" ", " ")
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-11-07 03:12:00 +03:00
|
|
|
retVal = htmlunescape(retVal)
|
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
return retVal
|
|
|
|
|
2011-06-18 19:47:19 +04:00
|
|
|
def getPageWordSet(page):
|
2012-02-23 19:32:36 +04:00
|
|
|
"""
|
|
|
|
Returns word set used in page content
|
|
|
|
"""
|
|
|
|
|
2011-09-11 20:41:07 +04:00
|
|
|
retVal = set()
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2011-06-09 11:53:31 +04:00
|
|
|
# only if the page's charset has been successfully identified
|
|
|
|
if isinstance(page, unicode):
|
2010-10-12 19:49:04 +04:00
|
|
|
page = getFilteredPageContent(page)
|
|
|
|
retVal = set(re.findall(r"\w+", page))
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
return retVal
|
|
|
|
|
|
|
|
def showStaticWords(firstPage, secondPage):
|
|
|
|
infoMsg = "finding static words in longest matching part of dynamic page content"
|
|
|
|
logger.info(infoMsg)
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
firstPage = getFilteredPageContent(firstPage)
|
|
|
|
secondPage = getFilteredPageContent(secondPage)
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
infoMsg = "static words: "
|
2010-10-14 16:38:06 +04:00
|
|
|
|
2011-12-12 13:45:40 +04:00
|
|
|
if firstPage and secondPage:
|
|
|
|
match = SequenceMatcher(None, firstPage, secondPage).find_longest_match(0, len(firstPage), 0, len(secondPage))
|
2012-02-22 19:53:36 +04:00
|
|
|
commonText = firstPage[match[0]:match[0] + match[2]]
|
2011-12-12 13:45:40 +04:00
|
|
|
commonWords = getPageWordSet(commonText)
|
|
|
|
else:
|
|
|
|
commonWords = None
|
|
|
|
|
2010-10-14 16:38:06 +04:00
|
|
|
if commonWords:
|
|
|
|
commonWords = list(commonWords)
|
|
|
|
commonWords.sort(lambda a, b: cmp(a.lower(), b.lower()))
|
|
|
|
|
2011-12-12 13:45:40 +04:00
|
|
|
for word in commonWords:
|
|
|
|
if len(word) > 2:
|
|
|
|
infoMsg += "'%s', " % word
|
|
|
|
|
|
|
|
infoMsg = infoMsg.rstrip(", ")
|
|
|
|
else:
|
|
|
|
infoMsg += "None"
|
2010-10-14 16:38:06 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
def decloakToNamedTemporaryFile(filepath, name=None):
|
|
|
|
retVal = NamedTemporaryFile()
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-02-03 17:49:28 +03:00
|
|
|
def __del__():
|
|
|
|
try:
|
|
|
|
if hasattr(retVal, 'old_name'):
|
2011-01-15 15:13:45 +03:00
|
|
|
retVal.name = retVal.old_name
|
2010-02-03 17:49:28 +03:00
|
|
|
retVal.close()
|
|
|
|
except OSError:
|
|
|
|
pass
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-02-03 17:49:28 +03:00
|
|
|
retVal.__del__ = __del__
|
2010-01-28 19:50:34 +03:00
|
|
|
retVal.write(decloak(filepath))
|
|
|
|
retVal.seek(0)
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
if name:
|
|
|
|
retVal.old_name = retVal.name
|
|
|
|
retVal.name = name
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
return retVal
|
2010-01-29 13:12:09 +03:00
|
|
|
|
|
|
|
def decloakToMkstemp(filepath, **kwargs):
|
2011-04-21 14:52:34 +04:00
|
|
|
handle, name = mkstemp(**kwargs)
|
|
|
|
|
|
|
|
fptr = os.fdopen(handle)
|
2012-02-22 19:53:36 +04:00
|
|
|
fptr.close() # close low level handle (causing problems latter)
|
2011-04-21 14:52:34 +04:00
|
|
|
|
2010-01-29 13:12:09 +03:00
|
|
|
retVal = open(name, 'w+b')
|
2010-05-29 14:10:28 +04:00
|
|
|
|
2010-01-29 13:12:09 +03:00
|
|
|
retVal.write(decloak(filepath))
|
|
|
|
retVal.seek(0)
|
2010-05-29 14:10:28 +04:00
|
|
|
|
2010-01-29 13:12:09 +03:00
|
|
|
return retVal
|
2010-02-04 12:49:31 +03:00
|
|
|
|
|
|
|
def isWindowsPath(filepath):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns True if given filepath is in Windows format
|
|
|
|
"""
|
|
|
|
|
2010-02-26 15:00:47 +03:00
|
|
|
return re.search("\A[\w]\:\\\\", filepath) is not None
|
2010-02-04 17:37:00 +03:00
|
|
|
|
2010-04-22 14:31:33 +04:00
|
|
|
def isWindowsDriveLetterPath(filepath):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns True if given filepath starts with a Windows drive letter
|
|
|
|
"""
|
|
|
|
|
2010-04-22 14:31:33 +04:00
|
|
|
return re.search("\A[\w]\:", filepath) is not None
|
|
|
|
|
2010-02-04 17:37:00 +03:00
|
|
|
def posixToNtSlashes(filepath):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Replaces all occurances of Posix slashes (/) in provided
|
|
|
|
filepath with NT ones (/)
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2010-08-21 01:27:47 +04:00
|
|
|
>>> posixToNtSlashes('C:/Windows')
|
|
|
|
'C:\\\\Windows'
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-02-04 17:37:00 +03:00
|
|
|
return filepath.replace('/', '\\')
|
|
|
|
|
|
|
|
def ntToPosixSlashes(filepath):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Replaces all occurances of NT slashes (\) in provided
|
|
|
|
filepath with Posix ones (/)
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2010-08-21 01:27:47 +04:00
|
|
|
>>> ntToPosixSlashes('C:\\Windows')
|
|
|
|
'C:/Windows'
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-02-04 17:37:00 +03:00
|
|
|
return filepath.replace('\\', '/')
|
2010-03-26 20:18:02 +03:00
|
|
|
|
|
|
|
def isBase64EncodedString(subject):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Checks if the provided string is Base64 encoded
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2010-08-21 01:27:47 +04:00
|
|
|
>>> isBase64EncodedString('dGVzdA==')
|
|
|
|
True
|
|
|
|
>>> isBase64EncodedString('123456')
|
|
|
|
False
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-03-26 20:18:02 +03:00
|
|
|
return re.match(r"\A(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?\Z", subject) is not None
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2010-03-26 20:18:02 +03:00
|
|
|
def isHexEncodedString(subject):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Checks if the provided string is hex encoded
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2010-08-21 01:27:47 +04:00
|
|
|
>>> isHexEncodedString('DEADBEEF')
|
|
|
|
True
|
|
|
|
>>> isHexEncodedString('test')
|
|
|
|
False
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-21 02:12:53 +04:00
|
|
|
return re.match(r"\A[0-9a-fA-Fx]+\Z", subject) is not None
|
2010-04-16 17:40:02 +04:00
|
|
|
|
|
|
|
def getConsoleWidth(default=80):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns console width
|
|
|
|
"""
|
|
|
|
|
2010-04-16 17:40:02 +04:00
|
|
|
width = None
|
|
|
|
|
|
|
|
if 'COLUMNS' in os.environ and os.environ['COLUMNS'].isdigit():
|
|
|
|
width = int(os.environ['COLUMNS'])
|
|
|
|
else:
|
2011-01-15 15:13:45 +03:00
|
|
|
output=execute('stty size', shell=True, stdout=PIPE, stderr=PIPE).stdout.read()
|
2010-04-16 17:40:02 +04:00
|
|
|
items = output.split()
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-04-16 17:40:02 +04:00
|
|
|
if len(items) == 2 and items[1].isdigit():
|
|
|
|
width = int(items[1])
|
|
|
|
|
|
|
|
if width is None:
|
|
|
|
try:
|
|
|
|
import curses
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-04-16 17:40:02 +04:00
|
|
|
stdscr = curses.initscr()
|
|
|
|
_, width = stdscr.getmaxyx()
|
|
|
|
curses.endwin()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2010-04-16 23:57:00 +04:00
|
|
|
return width if width else default
|
|
|
|
|
2010-11-24 00:00:42 +03:00
|
|
|
def clearConsoleLine(forceOutput=False):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Clears current console line
|
|
|
|
"""
|
|
|
|
|
2010-11-24 00:00:42 +03:00
|
|
|
dataToStdout("\r%s\r" % (" " * (getConsoleWidth() - 1)), forceOutput)
|
2012-02-29 19:38:01 +04:00
|
|
|
setFormatterPrependFlag(False)
|
2010-11-23 23:54:40 +03:00
|
|
|
|
2010-04-16 23:57:00 +04:00
|
|
|
def parseXmlFile(xmlFile, handler):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Parses XML file by a given handler
|
|
|
|
"""
|
|
|
|
|
2010-11-02 10:32:08 +03:00
|
|
|
stream = StringIO(readCachedFileContent(xmlFile))
|
2010-04-16 23:57:00 +04:00
|
|
|
parse(stream, handler)
|
|
|
|
stream.close()
|
2010-05-13 15:05:35 +04:00
|
|
|
|
2012-04-03 13:18:30 +04:00
|
|
|
def getSPQLSnippet(dbms, name, **variables):
|
2012-02-15 17:17:01 +04:00
|
|
|
"""
|
2012-04-03 13:18:30 +04:00
|
|
|
Returns content of SP(Q)L snippet located inside "procs" directory
|
2012-02-15 17:17:01 +04:00
|
|
|
"""
|
2012-02-15 17:24:02 +04:00
|
|
|
|
2012-02-16 13:32:47 +04:00
|
|
|
filename = os.path.join(paths.SQLMAP_PROCS_PATH, DBMS_DIRECTORY_DICT[dbms], "%s.txt" % name)
|
2012-02-15 17:17:01 +04:00
|
|
|
checkFile(filename)
|
|
|
|
retVal = readCachedFileContent(filename)
|
2012-02-15 17:24:02 +04:00
|
|
|
|
2012-04-02 18:57:15 +04:00
|
|
|
retVal = re.sub(r"#.+", "", retVal)
|
2012-04-07 18:06:11 +04:00
|
|
|
retVal = re.sub(r"(?s);\s+", "; ", retVal).strip()
|
2012-02-15 17:45:10 +04:00
|
|
|
|
2012-02-15 17:17:01 +04:00
|
|
|
for _ in variables.keys():
|
2012-02-20 23:35:57 +04:00
|
|
|
retVal = re.sub(r"%%%s%%" % _, variables[_], retVal)
|
2012-02-15 17:24:02 +04:00
|
|
|
|
2012-04-07 18:06:11 +04:00
|
|
|
for _ in re.findall(r"%RANDSTR\d+%", retVal, re.I):
|
|
|
|
retVal = retVal.replace(_, randomStr())
|
|
|
|
|
|
|
|
for _ in re.findall(r"%RANDINT\d+%", retVal, re.I):
|
|
|
|
retVal = retVal.replace(_, randomInt())
|
|
|
|
|
2012-04-02 18:05:30 +04:00
|
|
|
_ = re.search(r"%(\w+)%", retVal, re.I)
|
2012-02-15 17:24:02 +04:00
|
|
|
if _:
|
|
|
|
errMsg = "unresolved variable '%s' in SPL snippet '%s'" % (_.group(1), name)
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
2012-02-15 17:17:01 +04:00
|
|
|
return retVal
|
|
|
|
|
2010-11-02 10:32:08 +03:00
|
|
|
def readCachedFileContent(filename, mode='rb'):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Cached reading of file content (avoiding multiple same file reading)
|
|
|
|
"""
|
|
|
|
|
2010-11-02 10:32:08 +03:00
|
|
|
if filename not in kb.cache.content:
|
2011-12-28 20:27:17 +04:00
|
|
|
kb.locks.cache.acquire()
|
2010-11-02 10:32:08 +03:00
|
|
|
|
|
|
|
if filename not in kb.cache.content:
|
|
|
|
checkFile(filename)
|
2011-12-21 23:40:42 +04:00
|
|
|
with codecs.open(filename, mode, UNICODE_ENCODING) as f:
|
|
|
|
content = f.read()
|
|
|
|
kb.cache.content[filename] = content
|
2010-11-02 10:32:08 +03:00
|
|
|
|
2011-12-28 20:27:17 +04:00
|
|
|
kb.locks.cache.release()
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2010-11-02 10:32:08 +03:00
|
|
|
return kb.cache.content[filename]
|
|
|
|
|
2010-10-07 02:43:04 +04:00
|
|
|
def readXmlFile(xmlFile):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
2012-03-14 02:03:23 +04:00
|
|
|
Reads XML file content and returns its DOM representation
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
|
2012-02-22 14:40:11 +04:00
|
|
|
checkFile(xmlFile)
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
with codecs.open(xmlFile, 'r', UNICODE_ENCODING) as f:
|
|
|
|
retVal = minidom.parse(f).documentElement
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-10-07 02:43:04 +04:00
|
|
|
return retVal
|
|
|
|
|
2010-12-07 19:39:31 +03:00
|
|
|
def stdev(values):
|
|
|
|
"""
|
|
|
|
Computes standard deviation of a list of numbers.
|
2010-12-08 01:45:38 +03:00
|
|
|
Reference: http://www.goldb.org/corestats.html
|
2010-12-07 19:39:31 +03:00
|
|
|
"""
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-12-08 01:45:38 +03:00
|
|
|
if not values or len(values) < 2:
|
|
|
|
return None
|
|
|
|
|
2010-12-14 15:22:17 +03:00
|
|
|
key = (values[0], values[-1], len(values))
|
2010-12-07 19:39:31 +03:00
|
|
|
|
2010-12-14 15:22:17 +03:00
|
|
|
if key in kb.cache.stdev:
|
2011-12-21 23:40:42 +04:00
|
|
|
retVal = kb.cache.stdev[key]
|
2010-12-14 15:22:17 +03:00
|
|
|
else:
|
|
|
|
avg = average(values)
|
2011-12-26 16:24:39 +04:00
|
|
|
_ = reduce(lambda x, y: x + pow((y or 0) - avg, 2), values, 0.0)
|
2012-02-22 19:53:36 +04:00
|
|
|
retVal = sqrt(_ / (len(values) - 1))
|
2010-12-14 15:22:17 +03:00
|
|
|
kb.cache.stdev[key] = retVal
|
2011-12-21 23:40:42 +04:00
|
|
|
|
|
|
|
return retVal
|
2010-12-07 19:39:31 +03:00
|
|
|
|
2010-12-07 19:04:53 +03:00
|
|
|
def average(values):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
2010-12-07 19:04:53 +03:00
|
|
|
Computes the arithmetic mean of a list of numbers.
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
2011-10-22 01:29:24 +04:00
|
|
|
|
2010-12-08 01:45:38 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if values:
|
|
|
|
retVal = sum(values) / len(values)
|
|
|
|
|
|
|
|
return retVal
|
2010-12-07 19:04:53 +03:00
|
|
|
|
|
|
|
def calculateDeltaSeconds(start):
|
|
|
|
"""
|
|
|
|
Returns elapsed time from start till now
|
|
|
|
"""
|
2011-10-22 01:29:24 +04:00
|
|
|
|
2010-12-07 19:04:53 +03:00
|
|
|
return time.time() - start
|
2010-05-21 13:35:36 +04:00
|
|
|
|
2010-05-21 16:19:20 +04:00
|
|
|
def initCommonOutputs():
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Initializes dictionary containing common output values used by "good samaritan" feature
|
|
|
|
"""
|
|
|
|
|
2010-05-21 16:19:20 +04:00
|
|
|
kb.commonOutputs = {}
|
2010-05-21 16:44:09 +04:00
|
|
|
key = None
|
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
with codecs.open(paths.COMMON_OUTPUTS, 'r', UNICODE_ENCODING) as f:
|
2012-02-22 19:53:36 +04:00
|
|
|
for line in f.readlines(): # xreadlines doesn't return unicode strings when codec.open() is used
|
2011-12-21 23:40:42 +04:00
|
|
|
if line.find('#') != -1:
|
|
|
|
line = line[:line.find('#')]
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
line = line.strip()
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
if len(line) > 1:
|
|
|
|
if line.startswith('[') and line.endswith(']'):
|
|
|
|
key = line[1:-1]
|
|
|
|
elif key:
|
|
|
|
if key not in kb.commonOutputs:
|
|
|
|
kb.commonOutputs[key] = set()
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2011-12-21 23:40:42 +04:00
|
|
|
if line not in kb.commonOutputs[key]:
|
|
|
|
kb.commonOutputs[key].add(line)
|
2010-05-21 16:19:20 +04:00
|
|
|
|
2010-12-26 14:15:02 +03:00
|
|
|
def getFileItems(filename, commentPrefix='#', unicode_=True, lowercase=False, unique=False):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns newline delimited items contained inside file
|
|
|
|
"""
|
|
|
|
|
2011-12-22 14:51:41 +04:00
|
|
|
retVal = list() if not unique else OrderedDict()
|
2010-09-30 16:35:45 +04:00
|
|
|
|
|
|
|
checkFile(filename)
|
2010-11-23 16:24:02 +03:00
|
|
|
|
2011-12-22 02:09:21 +04:00
|
|
|
with codecs.open(filename, 'r', UNICODE_ENCODING) if unicode_ else open(filename, 'r') as f:
|
2012-02-22 19:53:36 +04:00
|
|
|
for line in (f.readlines() if unicode_ else f.xreadlines()): # xreadlines doesn't return unicode strings when codec.open() is used
|
2011-12-22 02:09:21 +04:00
|
|
|
if commentPrefix:
|
|
|
|
if line.find(commentPrefix) != -1:
|
|
|
|
line = line[:line.find(commentPrefix)]
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2011-12-22 02:09:21 +04:00
|
|
|
line = line.strip()
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2011-12-22 02:09:21 +04:00
|
|
|
if not unicode_:
|
|
|
|
try:
|
|
|
|
line = str.encode(line)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
continue
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2011-12-22 02:09:21 +04:00
|
|
|
if line:
|
|
|
|
if lowercase:
|
|
|
|
line = line.lower()
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2011-12-22 02:09:21 +04:00
|
|
|
if unique and line in retVal:
|
|
|
|
continue
|
2010-09-30 16:35:45 +04:00
|
|
|
|
2011-12-22 02:09:21 +04:00
|
|
|
if unique:
|
2011-12-22 14:51:41 +04:00
|
|
|
retVal[line] = True
|
2011-12-22 02:09:21 +04:00
|
|
|
else:
|
|
|
|
retVal.append(line)
|
2011-12-21 23:40:42 +04:00
|
|
|
|
2011-12-22 14:51:41 +04:00
|
|
|
return retVal if not unique else retVal.keys()
|
2010-09-30 16:35:45 +04:00
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
def goGoodSamaritan(prevValue, originalCharset):
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
Function for retrieving parameters needed for common prediction (good
|
|
|
|
samaritan) feature.
|
|
|
|
|
|
|
|
prevValue: retrieved query output so far (e.g. 'i').
|
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
Returns commonValue if there is a complete single match (in kb.partRun
|
|
|
|
of txt/common-outputs.txt under kb.partRun) regarding parameter
|
|
|
|
prevValue. If there is no single value match, but multiple, commonCharset is
|
2010-05-27 20:45:09 +04:00
|
|
|
returned containing more probable characters (retrieved from matched
|
|
|
|
values in txt/common-outputs.txt) together with the rest of charset as
|
|
|
|
otherCharset.
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-24 19:46:12 +04:00
|
|
|
if kb.commonOutputs is None:
|
2010-05-21 16:19:20 +04:00
|
|
|
initCommonOutputs()
|
2010-05-21 13:35:36 +04:00
|
|
|
|
|
|
|
predictionSet = set()
|
2010-06-21 18:40:12 +04:00
|
|
|
commonValue = None
|
|
|
|
commonPattern = None
|
|
|
|
countCommonValue = 0
|
2010-05-21 16:44:09 +04:00
|
|
|
|
2010-05-31 19:05:29 +04:00
|
|
|
# If the header (e.g. Databases) we are looking for has common
|
|
|
|
# outputs defined
|
2010-06-21 18:40:12 +04:00
|
|
|
if kb.partRun in kb.commonOutputs:
|
|
|
|
commonPartOutputs = kb.commonOutputs[kb.partRun]
|
2010-06-30 15:22:25 +04:00
|
|
|
commonPattern = commonFinderOnly(prevValue, commonPartOutputs)
|
2010-06-17 15:38:32 +04:00
|
|
|
|
|
|
|
# If the longest common prefix is the same as previous value then
|
|
|
|
# do not consider it
|
2010-06-21 18:40:12 +04:00
|
|
|
if commonPattern and commonPattern == prevValue:
|
|
|
|
commonPattern = None
|
2010-06-17 15:38:32 +04:00
|
|
|
|
2010-05-31 19:05:29 +04:00
|
|
|
# For each common output
|
2010-06-17 15:38:32 +04:00
|
|
|
for item in commonPartOutputs:
|
2010-05-27 20:45:09 +04:00
|
|
|
# Check if the common output (item) starts with prevValue
|
2010-05-31 19:05:29 +04:00
|
|
|
# where prevValue is the enumerated character(s) so far
|
2010-05-31 13:41:41 +04:00
|
|
|
if item.startswith(prevValue):
|
2010-06-21 18:40:12 +04:00
|
|
|
commonValue = item
|
|
|
|
countCommonValue += 1
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-31 13:41:41 +04:00
|
|
|
if len(item) > len(prevValue):
|
|
|
|
char = item[len(prevValue)]
|
2010-06-17 15:38:32 +04:00
|
|
|
predictionSet.add(char)
|
2010-05-31 19:05:29 +04:00
|
|
|
|
2010-06-17 15:38:32 +04:00
|
|
|
# Reset single value if there is more than one possible common
|
|
|
|
# output
|
2010-06-21 18:40:12 +04:00
|
|
|
if countCommonValue > 1:
|
|
|
|
commonValue = None
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-27 20:45:09 +04:00
|
|
|
commonCharset = []
|
2010-05-21 16:44:09 +04:00
|
|
|
otherCharset = []
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-27 20:45:09 +04:00
|
|
|
# Split the original charset into common chars (commonCharset)
|
|
|
|
# and other chars (otherCharset)
|
2010-05-25 17:06:23 +04:00
|
|
|
for ordChar in originalCharset:
|
2010-05-21 13:35:36 +04:00
|
|
|
if chr(ordChar) not in predictionSet:
|
2010-05-21 16:44:09 +04:00
|
|
|
otherCharset.append(ordChar)
|
2010-05-21 13:35:36 +04:00
|
|
|
else:
|
2010-05-27 20:45:09 +04:00
|
|
|
commonCharset.append(ordChar)
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-27 20:45:09 +04:00
|
|
|
commonCharset.sort()
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
return commonValue, commonPattern, commonCharset, originalCharset
|
2010-05-21 13:35:36 +04:00
|
|
|
else:
|
2010-06-17 15:38:32 +04:00
|
|
|
return None, None, None, originalCharset
|
2010-05-21 18:42:59 +04:00
|
|
|
|
2010-05-26 13:48:20 +04:00
|
|
|
def getPartRun():
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-11-12 13:02:02 +03:00
|
|
|
Goes through call stack and finds constructs matching conf.dbmsHandler.*.
|
2010-05-27 20:45:09 +04:00
|
|
|
Returns it or its alias used in txt/common-outputs.txt
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-26 13:48:20 +04:00
|
|
|
retVal = None
|
2010-05-27 20:45:09 +04:00
|
|
|
commonPartsDict = optDict["Enumeration"]
|
2011-07-27 12:25:51 +04:00
|
|
|
|
|
|
|
try:
|
|
|
|
stack = [item[4][0] if isinstance(item[4], list) else '' for item in inspect.stack()]
|
|
|
|
|
|
|
|
# Goes backwards through the stack to find the conf.dbmsHandler method
|
|
|
|
# calling this function
|
2012-02-22 19:53:36 +04:00
|
|
|
for i in xrange(0, len(stack) - 1):
|
2012-04-03 18:34:15 +04:00
|
|
|
for regex in (r"self\.(get[^(]+)\(\)", r"conf\.dbmsHandler\.([^(]+)\(\)"):
|
|
|
|
match = re.search(regex, stack[i])
|
2011-07-27 12:25:51 +04:00
|
|
|
|
|
|
|
if match:
|
|
|
|
# This is the calling conf.dbmsHandler or self method
|
|
|
|
# (e.g. 'getDbms')
|
|
|
|
retVal = match.groups()[0]
|
|
|
|
break
|
|
|
|
|
|
|
|
if retVal is not None:
|
2010-06-21 18:40:12 +04:00
|
|
|
break
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2011-07-27 12:25:51 +04:00
|
|
|
# Reference: http://coding.derkeiler.com/Archive/Python/comp.lang.python/2004-06/2267.html
|
|
|
|
except TypeError:
|
|
|
|
pass
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
# Return the INI tag to consider for common outputs (e.g. 'Databases')
|
2011-07-27 12:25:51 +04:00
|
|
|
return commonPartsDict[retVal][1] if isinstance(commonPartsDict.get(retVal), tuple) else retVal
|
2010-05-28 13:13:50 +04:00
|
|
|
|
2012-02-07 14:46:55 +04:00
|
|
|
def getUnicode(value, encoding=None, system=False, noneToNull=False):
|
2010-08-21 01:01:51 +04:00
|
|
|
"""
|
|
|
|
Return the unicode representation of the supplied value:
|
|
|
|
|
|
|
|
>>> getUnicode(u'test')
|
|
|
|
u'test'
|
|
|
|
>>> getUnicode('test')
|
|
|
|
u'test'
|
|
|
|
>>> getUnicode(1)
|
|
|
|
u'1'
|
|
|
|
"""
|
2010-10-15 14:28:06 +04:00
|
|
|
|
2012-02-07 14:46:55 +04:00
|
|
|
if noneToNull and value is None:
|
|
|
|
return NULL
|
|
|
|
|
|
|
|
if isinstance(value, (list, tuple)):
|
|
|
|
value = list(getUnicode(_, encoding, system, noneToNull) for _ in value)
|
|
|
|
return value
|
|
|
|
|
2011-03-18 19:26:39 +03:00
|
|
|
if not system:
|
|
|
|
if isinstance(value, unicode):
|
|
|
|
return value
|
|
|
|
elif isinstance(value, basestring):
|
2012-05-14 18:06:43 +04:00
|
|
|
try:
|
|
|
|
return unicode(value, encoding or kb.pageEncoding)
|
|
|
|
except:
|
|
|
|
return unicode(value, UNICODE_ENCODING, "replace")
|
2011-03-18 19:26:39 +03:00
|
|
|
else:
|
2012-02-22 19:53:36 +04:00
|
|
|
return unicode(value) # encoding ignored for non-basestring instances
|
2010-06-02 16:31:36 +04:00
|
|
|
else:
|
2011-03-18 19:26:39 +03:00
|
|
|
try:
|
|
|
|
return getUnicode(value, sys.getfilesystemencoding() or sys.stdin.encoding)
|
|
|
|
except:
|
|
|
|
return getUnicode(value, UNICODE_ENCODING)
|
2010-06-02 16:31:36 +04:00
|
|
|
|
2010-06-30 15:22:25 +04:00
|
|
|
def longestCommonPrefix(*sequences):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns longest common prefix occuring in given sequences
|
|
|
|
"""
|
|
|
|
# Reference: http://boredzo.org/blog/archives/2007-01-06/longest-common-prefix-in-python-2
|
|
|
|
|
2010-06-17 15:38:32 +04:00
|
|
|
if len(sequences) == 1:
|
|
|
|
return sequences[0]
|
|
|
|
|
|
|
|
sequences = [pair[1] for pair in sorted((len(fi), fi) for fi in sequences)]
|
|
|
|
|
|
|
|
if not sequences:
|
|
|
|
return None
|
|
|
|
|
|
|
|
for i, comparison_ch in enumerate(sequences[0]):
|
|
|
|
for fi in sequences[1:]:
|
|
|
|
ch = fi[i]
|
|
|
|
|
|
|
|
if ch != comparison_ch:
|
|
|
|
return fi[:i]
|
|
|
|
|
|
|
|
return sequences[0]
|
|
|
|
|
2010-06-30 15:22:25 +04:00
|
|
|
def commonFinderOnly(initial, sequence):
|
|
|
|
return longestCommonPrefix(*filter(lambda x: x.startswith(initial), sequence))
|
2010-09-25 01:59:03 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
def pushValue(value):
|
|
|
|
"""
|
|
|
|
Push value to the stack (thread dependent)
|
|
|
|
"""
|
2010-12-21 13:31:56 +03:00
|
|
|
|
2011-07-08 10:02:31 +04:00
|
|
|
getCurrentThreadData().valueStack.append(copy.deepcopy(value))
|
2010-09-30 16:35:45 +04:00
|
|
|
|
|
|
|
def popValue():
|
2010-10-25 18:06:56 +04:00
|
|
|
"""
|
2010-12-21 01:45:01 +03:00
|
|
|
Pop value from the stack (thread dependent)
|
2010-10-25 18:06:56 +04:00
|
|
|
"""
|
2010-10-26 10:08:40 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
return getCurrentThreadData().valueStack.pop()
|
2010-10-25 18:06:56 +04:00
|
|
|
|
2010-11-16 13:42:42 +03:00
|
|
|
def wasLastRequestDBMSError():
|
2010-10-25 18:06:56 +04:00
|
|
|
"""
|
|
|
|
Returns True if the last web request resulted in a (recognized) DBMS error page
|
|
|
|
"""
|
2010-10-26 10:08:40 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
return threadData.lastErrorPage and threadData.lastErrorPage[0] == threadData.lastRequestUID
|
2010-12-08 17:26:40 +03:00
|
|
|
|
2010-12-26 16:20:52 +03:00
|
|
|
def wasLastRequestHTTPError():
|
|
|
|
"""
|
|
|
|
Returns True if the last web request resulted in an errornous HTTP code (like 500)
|
|
|
|
"""
|
|
|
|
|
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
return threadData.lastHTTPError and threadData.lastHTTPError[0] == threadData.lastRequestUID
|
|
|
|
|
2010-12-08 17:26:40 +03:00
|
|
|
def wasLastRequestDelayed():
|
|
|
|
"""
|
|
|
|
Returns True if the last web request resulted in a time-delay
|
|
|
|
"""
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
# 99.9999999997440% of all non time-based sql injection affected
|
|
|
|
# response times should be inside +-7*stdev([normal response times])
|
|
|
|
# Math reference: http://www.answers.com/topic/standard-deviation
|
2011-10-22 01:29:24 +04:00
|
|
|
|
2010-12-08 17:46:07 +03:00
|
|
|
deviation = stdev(kb.responseTimes)
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2012-01-13 15:16:26 +04:00
|
|
|
if deviation and not conf.direct:
|
2010-12-08 17:46:07 +03:00
|
|
|
if len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
2010-12-09 03:26:06 +03:00
|
|
|
warnMsg = "time-based standard deviation method used on a model "
|
|
|
|
warnMsg += "with less than %d response times" % MIN_TIME_RESPONSES
|
2010-12-08 17:46:07 +03:00
|
|
|
logger.warn(warnMsg)
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2011-01-16 20:52:42 +03:00
|
|
|
lowerStdLimit = average(kb.responseTimes) + TIME_STDEV_COEFF * deviation
|
|
|
|
retVal = (threadData.lastQueryDuration >= lowerStdLimit)
|
2011-01-16 15:04:32 +03:00
|
|
|
|
2011-04-15 12:52:53 +04:00
|
|
|
if not kb.testMode and retVal and kb.adjustTimeDelay:
|
2011-01-16 20:52:42 +03:00
|
|
|
adjustTimeDelay(threadData.lastQueryDuration, lowerStdLimit)
|
2011-01-16 15:04:32 +03:00
|
|
|
|
|
|
|
return retVal
|
2010-12-08 17:46:07 +03:00
|
|
|
else:
|
2011-02-07 15:32:08 +03:00
|
|
|
return (threadData.lastQueryDuration - conf.timeSec) >= 0
|
2010-10-25 23:16:42 +04:00
|
|
|
|
2011-01-16 20:52:42 +03:00
|
|
|
def adjustTimeDelay(lastQueryDuration, lowerStdLimit):
|
|
|
|
"""
|
2011-10-28 15:25:07 +04:00
|
|
|
Provides tip for adjusting time delay in time-based data retrieval
|
2011-01-16 20:52:42 +03:00
|
|
|
"""
|
|
|
|
|
2011-01-16 23:55:07 +03:00
|
|
|
candidate = 1 + int(round((1 - (lastQueryDuration - lowerStdLimit) / lastQueryDuration) * conf.timeSec))
|
2011-01-16 20:52:42 +03:00
|
|
|
|
|
|
|
if candidate:
|
|
|
|
kb.delayCandidates = [candidate] + kb.delayCandidates[:-1]
|
2011-01-19 02:05:32 +03:00
|
|
|
|
2011-01-16 20:52:42 +03:00
|
|
|
if all([x == candidate for x in kb.delayCandidates]) and candidate < conf.timeSec:
|
2012-02-29 19:51:23 +04:00
|
|
|
conf.timeSec = candidate
|
|
|
|
|
|
|
|
infoMsg = "adjusting time delay to "
|
|
|
|
infoMsg += "%d second%s due to good response times" % (conf.timeSec, 's' if conf.timeSec > 1 else '')
|
|
|
|
logger.info(infoMsg)
|
2011-01-16 20:52:42 +03:00
|
|
|
|
2012-01-14 00:56:06 +04:00
|
|
|
def getLastRequestHTTPError():
|
|
|
|
"""
|
|
|
|
Returns last HTTP error code
|
|
|
|
"""
|
|
|
|
|
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
return threadData.lastHTTPError[1] if threadData.lastHTTPError else None
|
|
|
|
|
2010-11-16 13:42:42 +03:00
|
|
|
def extractErrorMessage(page):
|
|
|
|
"""
|
|
|
|
Returns reported error message from page if it founds one
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = None
|
|
|
|
|
2010-11-16 17:41:46 +03:00
|
|
|
if isinstance(page, basestring):
|
2010-12-25 13:16:20 +03:00
|
|
|
for regex in ERROR_PARSING_REGEXES:
|
2010-11-16 17:41:46 +03:00
|
|
|
match = re.search(regex, page, re.DOTALL | re.IGNORECASE)
|
2010-11-24 15:03:01 +03:00
|
|
|
|
2010-11-16 17:41:46 +03:00
|
|
|
if match:
|
|
|
|
retVal = htmlunescape(match.group("result")).replace("<br>", "\n").strip()
|
|
|
|
break
|
2010-11-16 13:42:42 +03:00
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
2010-10-25 23:16:42 +04:00
|
|
|
def beep():
|
|
|
|
"""
|
|
|
|
Does an audible beep sound
|
|
|
|
Reference: http://de3.aminet.net/dev/src/clr.py.txt
|
|
|
|
"""
|
2010-10-26 02:54:56 +04:00
|
|
|
|
2011-04-13 22:32:47 +04:00
|
|
|
def _failsafe():
|
|
|
|
dataToStdout('\a', True)
|
|
|
|
|
2010-10-26 02:54:56 +04:00
|
|
|
if sys.platform == 'linux2':
|
2010-11-09 04:23:54 +03:00
|
|
|
for dev in ('/dev/audio', '/dev/oss', '/dev/dsp', '/dev/sound'):
|
2010-10-26 12:32:58 +04:00
|
|
|
if os.path.exists(dev):
|
|
|
|
try:
|
|
|
|
audio = file(dev, 'wb')
|
2010-10-26 02:54:56 +04:00
|
|
|
|
2011-01-15 16:15:10 +03:00
|
|
|
for _ in xrange(250):
|
2010-10-26 12:32:58 +04:00
|
|
|
audio.write(chr(32) * 4)
|
|
|
|
audio.write(chr(0) * 4)
|
2010-10-26 02:54:56 +04:00
|
|
|
|
2010-10-26 12:32:58 +04:00
|
|
|
audio.close()
|
|
|
|
return
|
|
|
|
except:
|
|
|
|
pass
|
2010-10-26 10:30:27 +04:00
|
|
|
|
2010-10-26 12:32:58 +04:00
|
|
|
try:
|
|
|
|
import curses
|
|
|
|
curses.initscr()
|
|
|
|
curses.beep()
|
|
|
|
curses.flash()
|
|
|
|
curses.endwin()
|
|
|
|
return
|
2010-10-25 23:16:42 +04:00
|
|
|
except:
|
2011-04-13 22:32:47 +04:00
|
|
|
_failsafe()
|
|
|
|
|
|
|
|
elif sys.platform == 'darwin':
|
|
|
|
try:
|
|
|
|
import Carbon.Snd
|
|
|
|
Carbon.Snd.SysBeep(1)
|
|
|
|
except:
|
|
|
|
_failsafe()
|
2010-10-26 12:32:58 +04:00
|
|
|
|
2010-10-25 23:16:42 +04:00
|
|
|
else:
|
2011-04-13 22:32:47 +04:00
|
|
|
_failsafe()
|
2010-10-28 00:39:50 +04:00
|
|
|
|
|
|
|
def runningAsAdmin():
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns True if the current process is run under admin privileges
|
|
|
|
"""
|
|
|
|
|
2012-03-31 14:22:50 +04:00
|
|
|
isAdmin = None
|
2010-10-28 00:39:50 +04:00
|
|
|
|
2012-02-22 19:53:36 +04:00
|
|
|
if PLATFORM in ("posix", "mac"):
|
2012-03-31 14:22:50 +04:00
|
|
|
_ = os.geteuid()
|
2010-10-28 00:39:50 +04:00
|
|
|
|
2012-03-31 14:22:50 +04:00
|
|
|
isAdmin = isinstance(_, (int, float, long)) and _ == 0
|
2010-10-28 00:39:50 +04:00
|
|
|
elif IS_WIN:
|
2012-03-31 14:22:50 +04:00
|
|
|
_ = ctypes.windll.shell32.IsUserAnAdmin()
|
2010-10-28 00:39:50 +04:00
|
|
|
|
2012-03-31 14:22:50 +04:00
|
|
|
isAdmin = isinstance(_, (int, float, long)) and _ == 1
|
2010-10-28 00:39:50 +04:00
|
|
|
else:
|
2011-01-19 02:05:32 +03:00
|
|
|
errMsg = "sqlmap is not able to check if you are running it "
|
2010-10-29 20:11:50 +04:00
|
|
|
errMsg += "as an administrator account on this platform. "
|
2010-10-28 00:39:50 +04:00
|
|
|
errMsg += "sqlmap will assume that you are an administrator "
|
|
|
|
errMsg += "which is mandatory for the requested takeover attack "
|
|
|
|
errMsg += "to work properly"
|
|
|
|
logger.error(errMsg)
|
|
|
|
|
|
|
|
isAdmin = True
|
|
|
|
|
|
|
|
return isAdmin
|
2010-11-08 14:22:47 +03:00
|
|
|
|
|
|
|
def logHTTPTraffic(requestLogMsg, responseLogMsg):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Logs HTTP traffic to the output file
|
|
|
|
"""
|
|
|
|
|
2011-12-26 16:24:39 +04:00
|
|
|
if not conf.trafficFile:
|
|
|
|
return
|
|
|
|
|
2011-12-28 20:27:17 +04:00
|
|
|
kb.locks.log.acquire()
|
2010-11-08 15:36:48 +03:00
|
|
|
|
2011-05-19 19:57:29 +04:00
|
|
|
dataToTrafficFile("%s%s" % (requestLogMsg, os.linesep))
|
|
|
|
dataToTrafficFile("%s%s" % (responseLogMsg, os.linesep))
|
|
|
|
dataToTrafficFile("%s%s%s%s" % (os.linesep, 76 * '#', os.linesep, os.linesep))
|
2010-11-08 15:36:48 +03:00
|
|
|
|
2011-12-28 20:27:17 +04:00
|
|
|
kb.locks.log.release()
|
2010-11-23 16:58:01 +03:00
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
def getPageTemplate(payload, place):
|
2011-01-19 02:05:32 +03:00
|
|
|
"""
|
|
|
|
Cross-linked method
|
|
|
|
"""
|
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
pass
|
|
|
|
|
2010-12-15 14:21:47 +03:00
|
|
|
def getPublicTypeMembers(type_, onlyValues=False):
|
2010-11-23 17:50:47 +03:00
|
|
|
"""
|
|
|
|
Useful for getting members from types (e.g. in enums)
|
|
|
|
"""
|
2011-01-08 12:30:10 +03:00
|
|
|
|
2011-01-15 15:13:45 +03:00
|
|
|
for name, value in inspect.getmembers(type_):
|
2010-11-23 16:58:01 +03:00
|
|
|
if not name.startswith('__'):
|
2010-12-15 14:21:47 +03:00
|
|
|
if not onlyValues:
|
2011-05-17 00:14:10 +04:00
|
|
|
yield (name, value)
|
2010-12-15 14:21:47 +03:00
|
|
|
else:
|
2011-05-17 00:14:10 +04:00
|
|
|
yield value
|
2010-11-24 14:38:27 +03:00
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
def enumValueToNameLookup(type_, value_):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Returns name of a enum member with a given value
|
|
|
|
"""
|
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
for name, value in getPublicTypeMembers(type_):
|
|
|
|
if value == value_:
|
|
|
|
retVal = name
|
|
|
|
break
|
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
2010-11-24 17:20:43 +03:00
|
|
|
def extractRegexResult(regex, content, flags=0):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
2012-02-22 14:40:11 +04:00
|
|
|
Returns 'result' group value from a possible match with regex on a given
|
2011-01-08 12:30:10 +03:00
|
|
|
content
|
|
|
|
"""
|
|
|
|
|
2010-11-24 14:38:27 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if regex and content and '?P<result>' in regex:
|
2012-04-03 18:34:15 +04:00
|
|
|
match = re.search(regex, content, flags)
|
2010-12-07 15:32:58 +03:00
|
|
|
|
2010-11-24 14:38:27 +03:00
|
|
|
if match:
|
|
|
|
retVal = match.group("result")
|
|
|
|
|
|
|
|
return retVal
|
2010-11-29 18:14:49 +03:00
|
|
|
|
2012-04-11 01:48:34 +04:00
|
|
|
def extractTextTagContent(page):
|
|
|
|
"""
|
|
|
|
Returns list containing content from "textual" tags
|
|
|
|
"""
|
|
|
|
|
2012-04-12 01:36:37 +04:00
|
|
|
page = re.sub(r"(?si)[^\s>]*%s[^<]*" % REFLECTED_VALUE_MARKER, "", page or "")
|
|
|
|
return filter(None, (_.group('result').strip() for _ in re.finditer(TEXT_TAG_REGEX, page)))
|
2012-04-11 01:48:34 +04:00
|
|
|
|
2010-11-29 18:14:49 +03:00
|
|
|
def trimAlphaNum(value):
|
|
|
|
"""
|
|
|
|
Trims alpha numeric characters from start and ending of a given value
|
|
|
|
"""
|
2011-01-08 12:30:10 +03:00
|
|
|
|
2010-11-29 18:14:49 +03:00
|
|
|
while value and value[-1].isalnum():
|
|
|
|
value = value[:-1]
|
|
|
|
|
|
|
|
while value and value[0].isalnum():
|
|
|
|
value = value[1:]
|
|
|
|
|
|
|
|
return value
|
2010-12-02 21:57:43 +03:00
|
|
|
|
|
|
|
def isNumPosStrValue(value):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
2012-02-17 18:22:48 +04:00
|
|
|
Returns True if value is a string (or integer) with a positive integer representation
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
|
2012-02-17 18:22:48 +04:00
|
|
|
return (value and isinstance(value, basestring) and value.isdigit() and value != "0") or (isinstance(value, int) and value != 0)
|
2010-12-04 01:44:29 +03:00
|
|
|
|
2011-07-24 13:19:33 +04:00
|
|
|
@cachedmethod
|
2011-01-14 12:49:14 +03:00
|
|
|
def aliasToDbmsEnum(dbms):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Returns major DBMS name from a given alias
|
|
|
|
"""
|
|
|
|
|
2010-12-04 01:44:29 +03:00
|
|
|
retVal = None
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2012-02-07 16:05:23 +04:00
|
|
|
if dbms:
|
|
|
|
for key, item in DBMS_DICT.items():
|
|
|
|
if dbms.lower() in item[0] or dbms.lower() == key.lower():
|
|
|
|
retVal = key
|
|
|
|
break
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2010-12-04 01:44:29 +03:00
|
|
|
return retVal
|
|
|
|
|
2010-12-29 22:39:32 +03:00
|
|
|
def findDynamicContent(firstPage, secondPage):
|
|
|
|
"""
|
|
|
|
This function checks if the provided pages have dynamic content. If they
|
2011-01-08 12:30:10 +03:00
|
|
|
are dynamic, proper markings will be made
|
2010-12-29 22:39:32 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
infoMsg = "searching for dynamic content"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
blocks = SequenceMatcher(None, firstPage, secondPage).get_matching_blocks()
|
|
|
|
kb.dynamicMarkings = []
|
|
|
|
|
|
|
|
# Removing too small matching blocks
|
2011-12-22 02:59:23 +04:00
|
|
|
for block in blocks[:]:
|
2010-12-29 22:39:32 +03:00
|
|
|
(_, _, length) = block
|
|
|
|
|
|
|
|
if length <= DYNAMICITY_MARK_LENGTH:
|
|
|
|
blocks.remove(block)
|
|
|
|
|
|
|
|
# Making of dynamic markings based on prefix/suffix principle
|
|
|
|
if len(blocks) > 0:
|
|
|
|
blocks.insert(0, None)
|
|
|
|
blocks.append(None)
|
|
|
|
|
|
|
|
for i in xrange(len(blocks) - 1):
|
|
|
|
prefix = firstPage[blocks[i][0]:blocks[i][0] + blocks[i][2]] if blocks[i] else None
|
|
|
|
suffix = firstPage[blocks[i + 1][0]:blocks[i + 1][0] + blocks[i + 1][2]] if blocks[i + 1] else None
|
|
|
|
|
|
|
|
if prefix is None and blocks[i + 1][0] == 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if suffix is None and (blocks[i][0] + blocks[i][2] >= len(firstPage)):
|
|
|
|
continue
|
|
|
|
|
|
|
|
prefix = trimAlphaNum(prefix)
|
|
|
|
suffix = trimAlphaNum(suffix)
|
|
|
|
|
2012-02-22 19:53:36 +04:00
|
|
|
kb.dynamicMarkings.append((re.escape(prefix[-DYNAMICITY_MARK_LENGTH / 2:]) if prefix else None, re.escape(suffix[:DYNAMICITY_MARK_LENGTH / 2]) if suffix else None))
|
2010-12-29 22:39:32 +03:00
|
|
|
|
|
|
|
if len(kb.dynamicMarkings) > 0:
|
|
|
|
infoMsg = "dynamic content marked for removal (%d region%s)" % (len(kb.dynamicMarkings), 's' if len(kb.dynamicMarkings) > 1 else '')
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-12-04 13:13:18 +03:00
|
|
|
def removeDynamicContent(page):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Removing dynamic content from supplied page basing removal on
|
|
|
|
precalculated dynamic markings
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-04 13:13:18 +03:00
|
|
|
if page:
|
|
|
|
for item in kb.dynamicMarkings:
|
|
|
|
prefix, suffix = item
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-26 16:27:24 +03:00
|
|
|
if prefix is None and suffix is None:
|
|
|
|
continue
|
|
|
|
elif prefix is None:
|
2012-04-03 18:34:15 +04:00
|
|
|
page = re.sub(r'(?s)^.+%s' % suffix, suffix, page)
|
2010-12-04 13:13:18 +03:00
|
|
|
elif suffix is None:
|
2012-04-03 18:34:15 +04:00
|
|
|
page = re.sub(r'(?s)%s.+$' % prefix, prefix, page)
|
2010-12-04 13:13:18 +03:00
|
|
|
else:
|
2012-04-03 18:34:15 +04:00
|
|
|
page = re.sub(r'(?s)%s.+%s' % (prefix, suffix), '%s%s' % (prefix, suffix), page)
|
2010-12-04 13:13:18 +03:00
|
|
|
|
|
|
|
return page
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2011-12-16 16:34:26 +04:00
|
|
|
def filterStringValue(value, regex, replacement=""):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns string value consisting only of chars satisfying supplied
|
2011-11-22 13:00:00 +04:00
|
|
|
regular expression (note: it has to be in form [...])
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-12-16 16:34:26 +04:00
|
|
|
retVal = value
|
|
|
|
if value:
|
|
|
|
retVal = re.sub(regex.replace("[", "[^") if "[^" not in regex else regex.replace("[^", "["), replacement, value)
|
|
|
|
return retVal
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2011-01-05 13:25:07 +03:00
|
|
|
def filterControlChars(value):
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns string value with control chars being supstituted with ' '
|
2011-01-05 13:25:07 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-04-14 13:43:36 +04:00
|
|
|
return filterStringValue(value, PRINTABLE_CHAR_REGEX, ' ')
|
2011-01-05 13:25:07 +03:00
|
|
|
|
2010-12-10 13:54:17 +03:00
|
|
|
def isDBMSVersionAtLeast(version):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Checks if the recognized DBMS version is at least the version
|
|
|
|
specified
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-10 13:54:17 +03:00
|
|
|
retVal = None
|
2010-12-11 14:17:24 +03:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
if Backend.getVersion() and Backend.getVersion() != UNKNOWN_DBMS_VERSION:
|
|
|
|
value = Backend.getVersion().replace(" ", "").rstrip('.')
|
2010-12-21 03:47:07 +03:00
|
|
|
|
|
|
|
while True:
|
|
|
|
index = value.find('.', value.find('.') + 1)
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-21 03:47:07 +03:00
|
|
|
if index > -1:
|
|
|
|
value = value[0:index] + value[index + 1:]
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
value = filterStringValue(value, '[0-9.><=]')
|
2010-12-14 00:55:30 +03:00
|
|
|
|
2010-12-11 14:17:24 +03:00
|
|
|
if isinstance(value, basestring):
|
|
|
|
if value.startswith(">="):
|
|
|
|
value = float(value.replace(">=", ""))
|
|
|
|
elif value.startswith(">"):
|
|
|
|
value = float(value.replace(">", "")) + 0.01
|
|
|
|
elif value.startswith("<="):
|
|
|
|
value = float(value.replace("<=", ""))
|
|
|
|
elif value.startswith(">"):
|
|
|
|
value = float(value.replace("<", "")) - 0.01
|
2010-12-12 17:38:07 +03:00
|
|
|
|
|
|
|
retVal = getUnicode(value) >= getUnicode(version)
|
2010-12-10 13:54:17 +03:00
|
|
|
|
|
|
|
return retVal
|
2010-12-12 01:00:16 +03:00
|
|
|
|
|
|
|
def parseSqliteTableSchema(value):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Parses table column names and types from specified SQLite table schema
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-12 01:13:19 +03:00
|
|
|
if value:
|
|
|
|
table = {}
|
|
|
|
columns = {}
|
2010-12-12 01:00:16 +03:00
|
|
|
|
2012-04-03 18:34:15 +04:00
|
|
|
for match in re.finditer(r"(\w+)\s+(TEXT|NUMERIC|INTEGER|REAL|NONE)", value):
|
2010-12-12 01:13:19 +03:00
|
|
|
columns[match.group(1)] = match.group(2)
|
2010-12-12 01:00:16 +03:00
|
|
|
|
2010-12-12 01:13:19 +03:00
|
|
|
table[conf.tbl] = columns
|
|
|
|
kb.data.cachedColumns[conf.db] = table
|
2010-12-15 14:21:47 +03:00
|
|
|
|
|
|
|
def getTechniqueData(technique=None):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
|
|
|
Returns injection data for technique specified
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-15 14:21:47 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if technique and technique in kb.injection.data:
|
|
|
|
retVal = kb.injection.data[technique]
|
|
|
|
|
|
|
|
return retVal
|
2010-12-15 14:46:28 +03:00
|
|
|
|
2012-01-07 23:30:35 +04:00
|
|
|
def isTechniqueAvailable(technique):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns True if there is injection data which sqlmap could use for
|
|
|
|
technique specified
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-02-02 01:05:12 +03:00
|
|
|
|
2011-04-06 18:41:44 +04:00
|
|
|
if conf.tech and isinstance(conf.tech, list) and technique not in conf.tech:
|
2011-01-24 19:47:24 +03:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return getTechniqueData(technique) is not None
|
2010-12-18 12:51:34 +03:00
|
|
|
|
2012-01-07 23:30:35 +04:00
|
|
|
def isInferenceAvailable():
|
2012-02-16 18:42:28 +04:00
|
|
|
"""
|
|
|
|
Returns True whether techniques using inference technique are available
|
|
|
|
"""
|
|
|
|
|
2012-01-07 23:30:35 +04:00
|
|
|
return any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.STACKED, PAYLOAD.TECHNIQUE.TIME))
|
|
|
|
|
2011-07-25 15:05:49 +04:00
|
|
|
def setOptimize():
|
|
|
|
#conf.predictOutput = True
|
|
|
|
conf.keepAlive = True
|
|
|
|
conf.threads = 3 if conf.threads < 3 else conf.threads
|
2012-05-10 21:50:54 +04:00
|
|
|
conf.nullConnection = not any([conf.data, conf.textOnly, conf.titles, conf.string, conf.regexp, conf.tor])
|
2011-07-25 15:05:49 +04:00
|
|
|
|
|
|
|
if not conf.nullConnection:
|
|
|
|
debugMsg = "turning off --null-connection switch used indirectly by switch -o"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
def initTechnique(technique=None):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-12-21 15:50:49 +04:00
|
|
|
Prepares data for technique specified
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-10 13:30:17 +03:00
|
|
|
try:
|
|
|
|
data = getTechniqueData(technique)
|
2011-12-21 15:50:49 +04:00
|
|
|
resetCounter(technique)
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-10 13:30:17 +03:00
|
|
|
if data:
|
|
|
|
kb.pageTemplate, kb.errorIsNone = getPageTemplate(data.templatePayload, kb.injection.place)
|
2011-01-14 17:55:59 +03:00
|
|
|
kb.matchRatio = data.matchRatio
|
2012-03-29 18:33:27 +04:00
|
|
|
kb.negativeLogic = (technique == PAYLOAD.TECHNIQUE.BOOLEAN) and (data.where == PAYLOAD.WHERE.NEGATIVE)
|
2011-01-14 17:37:03 +03:00
|
|
|
|
2011-01-14 17:55:59 +03:00
|
|
|
# Restoring stored conf options
|
2011-01-14 18:33:49 +03:00
|
|
|
for key, value in kb.injection.conf.items():
|
2011-01-16 02:43:26 +03:00
|
|
|
if value and (not hasattr(conf, key) or (hasattr(conf, key) and not getattr(conf, key))):
|
2011-01-14 18:33:49 +03:00
|
|
|
setattr(conf, key, value)
|
2011-01-16 02:20:52 +03:00
|
|
|
debugMsg = "resuming configuration option '%s' (%s)" % (key, value)
|
2011-01-14 18:33:49 +03:00
|
|
|
logger.debug(debugMsg)
|
2011-07-25 15:05:49 +04:00
|
|
|
|
|
|
|
if value and key == "optimize":
|
|
|
|
setOptimize()
|
2011-01-10 13:30:17 +03:00
|
|
|
else:
|
|
|
|
warnMsg = "there is no injection data available for technique "
|
|
|
|
warnMsg += "'%s'" % enumValueToNameLookup(PAYLOAD.TECHNIQUE, technique)
|
|
|
|
logger.warn(warnMsg)
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2012-02-22 19:53:36 +04:00
|
|
|
except sqlmapDataException:
|
2011-01-10 13:30:17 +03:00
|
|
|
errMsg = "missing data in old session file(s). "
|
2011-12-22 15:55:02 +04:00
|
|
|
errMsg += "Please use '--flush-session' to deal "
|
2011-01-10 13:30:17 +03:00
|
|
|
errMsg += "with this error"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2010-12-22 21:55:50 +03:00
|
|
|
|
|
|
|
def arrayizeValue(value):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2012-02-29 19:38:01 +04:00
|
|
|
Makes a list out of value if it is not already a list or tuple itself
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2012-01-14 02:00:34 +04:00
|
|
|
if not isinstance(value, (list, tuple)):
|
2012-02-22 19:53:36 +04:00
|
|
|
value = [value]
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-22 21:55:50 +03:00
|
|
|
return value
|
2010-12-24 13:55:41 +03:00
|
|
|
|
2011-03-07 12:50:43 +03:00
|
|
|
def unArrayizeValue(value):
|
|
|
|
"""
|
2012-02-29 19:38:01 +04:00
|
|
|
Makes a value out of iterable if it is a list or tuple itself
|
2011-03-07 12:50:43 +03:00
|
|
|
"""
|
|
|
|
|
2012-01-14 02:00:34 +04:00
|
|
|
if isinstance(value, (list, tuple)):
|
2011-03-07 12:50:43 +03:00
|
|
|
value = value[0] if len(value) > 0 else None
|
|
|
|
|
|
|
|
return value
|
|
|
|
|
2012-04-03 17:56:11 +04:00
|
|
|
def flattenValue(value):
|
|
|
|
"""
|
|
|
|
Returns an iterator representing flat representation of a given value
|
|
|
|
"""
|
|
|
|
|
|
|
|
for i in iter(value):
|
|
|
|
if isinstance(i, (list, tuple)):
|
|
|
|
for j in flattenValue(i):
|
|
|
|
yield j
|
|
|
|
else:
|
|
|
|
yield i
|
|
|
|
|
2011-01-20 02:06:15 +03:00
|
|
|
def getSortedInjectionTests():
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns prioritized test list by eventually detected DBMS from error
|
|
|
|
messages
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-24 13:55:41 +03:00
|
|
|
retVal = conf.tests
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-07 19:08:01 +03:00
|
|
|
def priorityFunction(test):
|
2011-12-21 23:40:42 +04:00
|
|
|
retVal = SORT_ORDER.FIRST
|
2011-01-13 14:23:07 +03:00
|
|
|
|
2011-01-13 14:08:29 +03:00
|
|
|
if test.stype == PAYLOAD.TECHNIQUE.UNION:
|
2011-12-21 23:40:42 +04:00
|
|
|
retVal = SORT_ORDER.LAST
|
2011-01-13 14:23:07 +03:00
|
|
|
|
2011-01-13 14:08:29 +03:00
|
|
|
elif 'details' in test and 'dbms' in test.details:
|
2011-01-28 19:36:09 +03:00
|
|
|
if test.details.dbms in Backend.getErrorParsedDBMSes():
|
2011-12-21 23:40:42 +04:00
|
|
|
retVal = SORT_ORDER.SECOND
|
2011-01-07 19:08:01 +03:00
|
|
|
else:
|
2011-12-21 23:40:42 +04:00
|
|
|
retVal = SORT_ORDER.THIRD
|
2011-01-13 14:23:07 +03:00
|
|
|
|
2011-01-07 19:08:01 +03:00
|
|
|
return retVal
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
if Backend.getErrorParsedDBMSes():
|
2011-01-07 19:08:01 +03:00
|
|
|
retVal = sorted(retVal, key=priorityFunction)
|
2010-12-24 13:55:41 +03:00
|
|
|
|
|
|
|
return retVal
|
2010-12-27 02:50:16 +03:00
|
|
|
|
|
|
|
def filterListValue(value, regex):
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns list with items that have parts satisfying given regular
|
|
|
|
expression
|
2010-12-27 02:50:16 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-05-17 00:09:12 +04:00
|
|
|
if isinstance(value, list) and regex:
|
2012-04-03 18:34:15 +04:00
|
|
|
retVal = filter(lambda _: re.search(regex, _, re.I), value)
|
2010-12-27 02:50:16 +03:00
|
|
|
else:
|
2011-05-17 00:09:12 +04:00
|
|
|
retVal = value
|
|
|
|
|
|
|
|
return retVal
|
2010-12-28 17:40:34 +03:00
|
|
|
|
2011-01-02 10:37:47 +03:00
|
|
|
def showHttpErrorCodes():
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Shows all HTTP error codes raised till now
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-02 10:37:47 +03:00
|
|
|
if kb.httpErrorCodes:
|
|
|
|
warnMsg = "HTTP error codes detected during testing:\n"
|
2011-01-07 18:41:09 +03:00
|
|
|
warnMsg += ", ".join("%d (%s) - %d times" % (code, httplib.responses[code] \
|
|
|
|
if code in httplib.responses else '?', count) \
|
2011-01-02 10:37:47 +03:00
|
|
|
for code, count in kb.httpErrorCodes.items())
|
|
|
|
logger.warn(warnMsg)
|
2011-01-03 11:32:06 +03:00
|
|
|
|
2011-01-03 11:46:20 +03:00
|
|
|
def getComparePageRatio(firstPage, secondPage, filtered=False):
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns comparison ratio between two given pages
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-03 11:46:20 +03:00
|
|
|
if filtered:
|
2011-01-14 17:55:59 +03:00
|
|
|
(firstPage, secondPage) = map(getFilteredPageContent, (firstPage, secondPage))
|
2011-01-03 11:46:20 +03:00
|
|
|
|
2011-01-16 13:52:42 +03:00
|
|
|
seqMatcher = getCurrentThreadData().seqMatcher
|
|
|
|
seqMatcher.set_seq1(firstPage)
|
|
|
|
seqMatcher.set_seq2(secondPage)
|
2011-01-03 11:46:20 +03:00
|
|
|
|
2011-01-16 13:52:42 +03:00
|
|
|
return seqMatcher.quick_ratio()
|
2011-01-08 12:30:10 +03:00
|
|
|
|
|
|
|
def openFile(filename, mode='r'):
|
|
|
|
"""
|
|
|
|
Returns file handle of a given filename
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
2012-05-14 18:06:43 +04:00
|
|
|
return codecs.open(filename, mode, UNICODE_ENCODING, "replace")
|
2011-01-15 15:13:45 +03:00
|
|
|
except IOError:
|
2011-01-08 12:30:10 +03:00
|
|
|
errMsg = "there has been a file opening error for filename '%s'. " % filename
|
2011-01-19 02:05:32 +03:00
|
|
|
errMsg += "Please check %s permissions on a file " % ("write" if \
|
|
|
|
mode and ('w' in mode or 'a' in mode or '+' in mode) else "read")
|
2011-01-08 12:30:10 +03:00
|
|
|
errMsg += "and that it's not locked by another process."
|
|
|
|
raise sqlmapFilePathException, errMsg
|
2011-01-19 18:25:48 +03:00
|
|
|
|
|
|
|
def decodeIntToUnicode(value):
|
|
|
|
"""
|
|
|
|
Decodes inferenced integer value with usage of current page encoding
|
|
|
|
"""
|
|
|
|
try:
|
2012-02-01 18:17:27 +04:00
|
|
|
# http://dev.mysql.com/doc/refman/5.0/en/string-functions.html#function_ord
|
|
|
|
if Backend.getIdentifiedDbms() in (DBMS.MYSQL,):
|
2012-02-22 19:53:36 +04:00
|
|
|
return struct.pack('B' if value < 256 else '<H', value).decode(kb.pageEncoding or UNICODE_ENCODING)
|
2012-02-01 18:17:27 +04:00
|
|
|
else:
|
|
|
|
return unichr(value)
|
2011-01-19 18:25:48 +03:00
|
|
|
except:
|
2011-01-19 18:26:57 +03:00
|
|
|
return INFERENCE_UNKNOWN_CHAR
|
2011-01-28 19:15:45 +03:00
|
|
|
|
|
|
|
def unhandledExceptionMessage():
|
2011-02-01 14:06:56 +03:00
|
|
|
"""
|
|
|
|
Returns detailed message about occured unhandled exception
|
|
|
|
"""
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "unhandled exception in %s, retry your " % VERSION_STRING
|
2011-01-28 19:15:45 +03:00
|
|
|
errMsg += "run with the latest development version from the Subversion "
|
|
|
|
errMsg += "repository. If the exception persists, please send by e-mail "
|
2011-04-01 20:40:28 +04:00
|
|
|
errMsg += "to %s the following text " % ML
|
2011-02-01 00:23:40 +03:00
|
|
|
errMsg += "and any information required to reproduce the bug. The "
|
|
|
|
errMsg += "developers will try to reproduce the bug, fix it accordingly "
|
|
|
|
errMsg += "and get back to you.\n"
|
2011-01-28 19:15:45 +03:00
|
|
|
errMsg += "sqlmap version: %s%s\n" % (VERSION, " (r%d)" % REVISION if REVISION else "")
|
|
|
|
errMsg += "Python version: %s\n" % PYVERSION
|
|
|
|
errMsg += "Operating system: %s\n" % PLATFORM
|
2011-02-02 17:25:16 +03:00
|
|
|
errMsg += "Command line: %s\n" % " ".join(sys.argv)
|
2011-06-03 15:01:26 +04:00
|
|
|
errMsg += "Technique: %s\n" % (enumValueToNameLookup(PAYLOAD.TECHNIQUE, kb.technique) if kb and kb.technique else None)
|
2011-02-01 01:34:57 +03:00
|
|
|
errMsg += "Back-end DBMS: %s" % ("%s (fingerprinted)" % Backend.getDbms() if Backend.getDbms() is not None else "%s (identified)" % Backend.getIdentifiedDbms())
|
2011-12-21 23:40:42 +04:00
|
|
|
|
2011-02-02 17:25:16 +03:00
|
|
|
return maskSensitiveData(errMsg)
|
|
|
|
|
|
|
|
def maskSensitiveData(msg):
|
|
|
|
"""
|
|
|
|
Masks sensitive data in the supplied message
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = msg
|
|
|
|
|
2012-02-23 19:32:36 +04:00
|
|
|
for item in filter(None, map(lambda x: conf.get(x), ("hostname", "googleDork", "aCred", "pCred", "tbl", "db", "col", "user", "cookie", "proxy"))):
|
2012-05-25 00:53:01 +04:00
|
|
|
regex = SENSITIVE_DATA_REGEX % re.sub("(\W)", r"\\\1", item)
|
2011-02-02 17:35:21 +03:00
|
|
|
while extractRegexResult(regex, retVal):
|
|
|
|
value = extractRegexResult(regex, retVal)
|
2012-02-22 19:53:36 +04:00
|
|
|
retVal = retVal.replace(value, '*' * len(value))
|
2011-02-02 17:25:16 +03:00
|
|
|
|
|
|
|
return retVal
|
2011-01-31 15:21:17 +03:00
|
|
|
|
|
|
|
def listToStrValue(value):
|
|
|
|
"""
|
|
|
|
Flattens list to a string value
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-01-31 15:21:17 +03:00
|
|
|
>>> listToStrValue([1,2,3])
|
|
|
|
'1, 2, 3'
|
|
|
|
"""
|
2011-02-01 01:51:14 +03:00
|
|
|
|
|
|
|
if isinstance(value, (set, tuple)):
|
|
|
|
value = list(value)
|
|
|
|
|
2011-01-31 15:21:17 +03:00
|
|
|
if isinstance(value, list):
|
2011-02-01 14:10:23 +03:00
|
|
|
retVal = value.__str__().lstrip('[').rstrip(']')
|
2011-01-31 15:21:17 +03:00
|
|
|
else:
|
2011-02-01 14:10:23 +03:00
|
|
|
retVal = value
|
2011-01-31 15:21:17 +03:00
|
|
|
|
2011-02-01 14:10:23 +03:00
|
|
|
return retVal
|
2011-02-01 14:06:56 +03:00
|
|
|
|
|
|
|
def getExceptionFrameLocals():
|
|
|
|
"""
|
|
|
|
Returns dictionary with local variable content from frame
|
2012-02-16 18:42:28 +04:00
|
|
|
where exception has been raised
|
2011-02-01 14:06:56 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = {}
|
2011-02-01 14:10:23 +03:00
|
|
|
|
2011-02-01 14:06:56 +03:00
|
|
|
if sys.exc_info():
|
|
|
|
trace = sys.exc_info()[2]
|
|
|
|
while trace.tb_next:
|
|
|
|
trace = trace.tb_next
|
|
|
|
retVal = trace.tb_frame.f_locals
|
2011-02-01 14:10:23 +03:00
|
|
|
|
2011-02-01 14:06:56 +03:00
|
|
|
return retVal
|
2011-02-14 00:20:21 +03:00
|
|
|
|
2011-08-29 17:47:32 +04:00
|
|
|
def intersect(valueA, valueB, lowerCase=False):
|
2011-02-14 00:20:21 +03:00
|
|
|
"""
|
|
|
|
Returns intersection of the array-ized values
|
|
|
|
"""
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-02-14 00:20:21 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if valueA and valueB:
|
2011-08-29 17:47:32 +04:00
|
|
|
valueA = arrayizeValue(valueA)
|
|
|
|
valueB = arrayizeValue(valueB)
|
|
|
|
|
|
|
|
if lowerCase:
|
2012-01-03 21:28:50 +04:00
|
|
|
valueA = [val.lower() if isinstance(val, basestring) else val for val in valueA]
|
|
|
|
valueB = [val.lower() if isinstance(val, basestring) else val for val in valueB]
|
2011-08-29 17:47:32 +04:00
|
|
|
|
|
|
|
retVal = [val for val in valueA if val in valueB]
|
2011-02-14 00:20:21 +03:00
|
|
|
|
|
|
|
return retVal
|
2011-02-22 15:54:22 +03:00
|
|
|
|
|
|
|
def cpuThrottle(value):
|
|
|
|
"""
|
2011-12-20 19:01:27 +04:00
|
|
|
Does a CPU throttling for lesser CPU consumption
|
2011-02-22 15:54:22 +03:00
|
|
|
"""
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-02-22 15:54:22 +03:00
|
|
|
delay = 0.00001 * (value ** 2)
|
|
|
|
time.sleep(delay)
|
2011-02-25 12:22:44 +03:00
|
|
|
|
2011-03-30 00:45:21 +04:00
|
|
|
def removeReflectiveValues(content, payload, suppressWarning=False):
|
2011-02-25 12:22:44 +03:00
|
|
|
"""
|
2012-04-12 01:41:48 +04:00
|
|
|
Neutralizes reflective values in a given content based on a payload
|
2012-04-12 01:48:44 +04:00
|
|
|
(e.g. ..search.php?q=1 AND 1=2 --> "...searching for <b>1%20AND%201%3D2</b>..." --> "...searching for <b>__REFLECTED_VALUE__</b>...")
|
2011-02-25 12:22:44 +03:00
|
|
|
"""
|
|
|
|
|
2011-02-25 12:35:24 +03:00
|
|
|
retVal = content
|
2011-02-25 12:22:44 +03:00
|
|
|
|
2011-06-06 17:34:49 +04:00
|
|
|
if all([content, payload]) and isinstance(content, unicode) and kb.reflectiveMechanism:
|
2012-03-29 16:44:20 +04:00
|
|
|
def _(value):
|
|
|
|
while 2 * REFLECTED_REPLACEMENT_REGEX in value:
|
|
|
|
value = value.replace(2 * REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX)
|
|
|
|
return value
|
2011-02-26 20:48:19 +03:00
|
|
|
|
2012-03-29 16:44:20 +04:00
|
|
|
payload = getUnicode(urldecode(payload.replace(PAYLOAD_DELIMITER, '')))
|
2012-04-12 01:26:00 +04:00
|
|
|
regex = _(filterStringValue(payload, r"[A-Za-z0-9]", REFLECTED_REPLACEMENT_REGEX.encode("string-escape")))
|
2011-02-26 20:48:19 +03:00
|
|
|
|
2012-04-11 12:58:03 +04:00
|
|
|
if regex != payload:
|
2012-04-11 19:01:28 +04:00
|
|
|
if all(part.lower() in content.lower() for part in filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))[1:]): # fast optimization check
|
2012-04-11 12:58:03 +04:00
|
|
|
parts = regex.split(REFLECTED_REPLACEMENT_REGEX)
|
2012-04-12 03:01:38 +04:00
|
|
|
retVal = content.replace(payload, REFLECTED_VALUE_MARKER) # dummy approach
|
2012-04-12 01:26:00 +04:00
|
|
|
|
2012-04-11 12:58:03 +04:00
|
|
|
if len(parts) > REFLECTED_MAX_REGEX_PARTS: # preventing CPU hogs
|
2012-04-12 01:26:00 +04:00
|
|
|
regex = _("%s%s%s" % (REFLECTED_REPLACEMENT_REGEX.join(parts[:REFLECTED_MAX_REGEX_PARTS / 2]), REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX.join(parts[-REFLECTED_MAX_REGEX_PARTS / 2:])))
|
|
|
|
|
|
|
|
parts = filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))
|
|
|
|
|
|
|
|
if regex.startswith(REFLECTED_REPLACEMENT_REGEX):
|
|
|
|
regex = r"%s%s" % (REFLECTED_BORDER_REGEX, regex[len(REFLECTED_REPLACEMENT_REGEX):])
|
|
|
|
else:
|
|
|
|
regex = r"\b%s" % regex
|
2012-04-11 19:01:28 +04:00
|
|
|
|
2012-04-12 01:26:00 +04:00
|
|
|
if regex.endswith(REFLECTED_REPLACEMENT_REGEX):
|
|
|
|
regex = r"%s%s" % (regex[:-len(REFLECTED_REPLACEMENT_REGEX)], REFLECTED_BORDER_REGEX)
|
|
|
|
else:
|
|
|
|
regex = r"%s\b" % regex
|
|
|
|
|
2012-04-12 03:01:38 +04:00
|
|
|
retVal = re.sub(r"(?i)%s" % regex, REFLECTED_VALUE_MARKER, retVal)
|
2012-04-12 01:26:00 +04:00
|
|
|
|
|
|
|
if len(parts) > 2:
|
|
|
|
regex = REFLECTED_REPLACEMENT_REGEX.join(parts[1:])
|
2012-04-12 03:01:38 +04:00
|
|
|
retVal = re.sub(r"(?i)\b%s\b" % regex, REFLECTED_VALUE_MARKER, retVal)
|
2011-07-13 02:28:19 +04:00
|
|
|
|
2012-04-11 12:58:03 +04:00
|
|
|
if retVal != content:
|
|
|
|
kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT] += 1
|
2011-05-30 13:46:32 +04:00
|
|
|
if not suppressWarning:
|
2012-04-11 12:58:03 +04:00
|
|
|
warnMsg = "reflective value(s) found and filtering out"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
2012-05-14 18:38:16 +04:00
|
|
|
if re.search(r"FRAME[^>]+src=[^>]*%s" % REFLECTED_VALUE_MARKER, retVal, re.I):
|
|
|
|
warnMsg = "frames detected containing attacked parameter values. Please be sure to "
|
|
|
|
warnMsg += "test those separately in case that attack on this page fails"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
|
|
|
|
2012-04-11 12:58:03 +04:00
|
|
|
elif not kb.testMode and not kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT]:
|
|
|
|
kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] += 1
|
|
|
|
if kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] > REFLECTIVE_MISS_THRESHOLD:
|
|
|
|
kb.reflectiveMechanism = False
|
|
|
|
if not suppressWarning:
|
|
|
|
debugMsg = "turning off reflection removal mechanism (for optimization purposes)"
|
|
|
|
logger.debug(debugMsg)
|
2011-02-25 12:22:44 +03:00
|
|
|
|
|
|
|
return retVal
|
2011-03-24 23:04:20 +03:00
|
|
|
|
|
|
|
def normalizeUnicode(value):
|
|
|
|
"""
|
|
|
|
Does an ASCII normalization of unicode strings
|
|
|
|
Reference: http://www.peterbe.com/plog/unicode-to-ascii
|
|
|
|
"""
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-03-24 23:04:20 +03:00
|
|
|
retVal = value
|
|
|
|
if isinstance(value, unicode):
|
2012-02-22 19:53:36 +04:00
|
|
|
retVal = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
|
2011-03-24 23:04:20 +03:00
|
|
|
return retVal
|
2011-03-30 01:54:15 +04:00
|
|
|
|
|
|
|
def safeSQLIdentificatorNaming(name, isTable=False):
|
|
|
|
"""
|
2011-06-16 17:41:02 +04:00
|
|
|
Returns a safe representation of SQL identificator name (internal data format)
|
2011-03-30 01:54:15 +04:00
|
|
|
"""
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
retVal = name
|
2011-04-30 04:22:22 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
if isinstance(name, basestring):
|
2011-12-14 16:59:25 +04:00
|
|
|
name = getUnicode(name)
|
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
if isTable and Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) and '.' not in name:
|
|
|
|
name = "%s.%s" % (DEFAULT_MSSQL_SCHEMA, name)
|
|
|
|
|
|
|
|
parts = name.split('.')
|
2011-04-30 04:22:22 +04:00
|
|
|
|
2011-10-22 02:34:27 +04:00
|
|
|
for i in xrange(len(parts)):
|
2011-03-30 01:54:15 +04:00
|
|
|
if not re.match(r"\A[A-Za-z0-9_]+\Z", parts[i]):
|
|
|
|
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS):
|
|
|
|
parts[i] = "`%s`" % parts[i].strip("`")
|
2011-06-25 13:44:24 +04:00
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.ORACLE, DBMS.PGSQL, DBMS.DB2):
|
2011-03-30 01:54:15 +04:00
|
|
|
parts[i] = "\"%s\"" % parts[i].strip("\"")
|
2011-04-30 04:22:22 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
retVal = ".".join(parts)
|
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
|
|
|
def unsafeSQLIdentificatorNaming(name):
|
|
|
|
"""
|
2012-03-14 02:03:23 +04:00
|
|
|
Extracts identificator's name from its safe SQL representation
|
2011-03-30 01:54:15 +04:00
|
|
|
"""
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
retVal = name
|
2011-04-25 03:01:21 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
if isinstance(name, basestring):
|
|
|
|
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS):
|
|
|
|
retVal = name.replace("`", "")
|
2011-06-25 13:44:24 +04:00
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.ORACLE, DBMS.PGSQL, DBMS.DB2):
|
2011-03-30 01:54:15 +04:00
|
|
|
retVal = name.replace("\"", "")
|
|
|
|
if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE):
|
2011-05-03 15:09:30 +04:00
|
|
|
prefix = "%s." % DEFAULT_MSSQL_SCHEMA
|
|
|
|
if retVal.startswith(prefix):
|
|
|
|
retVal = retVal[len(prefix):]
|
2011-04-25 03:01:21 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
return retVal
|
2011-04-10 02:39:03 +04:00
|
|
|
|
|
|
|
def isBinaryData(value):
|
|
|
|
"""
|
|
|
|
Tests given value for binary content
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = False
|
|
|
|
if isinstance(value, basestring):
|
2012-02-22 19:53:36 +04:00
|
|
|
retVal = reduce(lambda x, y: x or not (y in string.printable or ord(y) > 255), value, False)
|
2011-04-10 02:39:03 +04:00
|
|
|
return retVal
|
2011-05-22 13:48:46 +04:00
|
|
|
|
|
|
|
def isNoneValue(value):
|
|
|
|
"""
|
2012-02-29 18:19:59 +04:00
|
|
|
Returns whether the value is unusable (None or '')
|
2011-05-22 13:48:46 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if isinstance(value, basestring):
|
2012-02-29 17:56:40 +04:00
|
|
|
return value in ("None", "")
|
2011-05-23 15:09:44 +04:00
|
|
|
elif isinstance(value, (list, tuple)):
|
2012-02-29 18:19:59 +04:00
|
|
|
return all(isNoneValue(_) for _ in value)
|
2011-05-22 13:48:46 +04:00
|
|
|
elif isinstance(value, dict):
|
2011-05-23 15:09:44 +04:00
|
|
|
return not any(value)
|
2011-05-22 13:48:46 +04:00
|
|
|
else:
|
|
|
|
return value is None
|
2011-06-15 15:58:50 +04:00
|
|
|
|
2011-10-22 01:12:48 +04:00
|
|
|
def isNullValue(value):
|
|
|
|
"""
|
|
|
|
Returns whether the value contains explicit 'NULL' value
|
|
|
|
"""
|
2011-10-22 01:29:24 +04:00
|
|
|
|
2012-02-07 14:46:55 +04:00
|
|
|
return isinstance(value, basestring) and value.upper() == NULL
|
2011-10-22 01:12:48 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
def expandMnemonics(mnemonics, parser, args):
|
|
|
|
"""
|
2012-02-16 18:42:28 +04:00
|
|
|
Expands mnemonic options
|
2011-06-15 15:58:50 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
class MnemonicNode:
|
|
|
|
def __init__(self):
|
|
|
|
self.next = {}
|
|
|
|
self.current = []
|
|
|
|
|
|
|
|
head = MnemonicNode()
|
|
|
|
pointer = None
|
|
|
|
|
|
|
|
for group in parser.option_groups:
|
|
|
|
for option in group.option_list:
|
|
|
|
for opt in option._long_opts + option._short_opts:
|
|
|
|
pointer = head
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
for char in opt:
|
|
|
|
if char == "-":
|
|
|
|
continue
|
|
|
|
elif char not in pointer.next:
|
|
|
|
pointer.next[char] = MnemonicNode()
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
pointer = pointer.next[char]
|
|
|
|
pointer.current.append(option)
|
|
|
|
|
|
|
|
for mnemonic in mnemonics.split(','):
|
|
|
|
found = None
|
2011-06-15 16:04:30 +04:00
|
|
|
name = mnemonic.split('=')[0].replace("-", "").strip()
|
2011-06-15 15:58:50 +04:00
|
|
|
value = mnemonic.split('=')[1] if len(mnemonic.split('=')) > 1 else None
|
|
|
|
pointer = head
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
for char in name:
|
|
|
|
if char in pointer.next:
|
|
|
|
pointer = pointer.next[char]
|
|
|
|
else:
|
|
|
|
pointer = None
|
2011-06-16 16:12:30 +04:00
|
|
|
break
|
2011-06-15 15:58:50 +04:00
|
|
|
|
|
|
|
if pointer in (None, head):
|
|
|
|
errMsg = "mnemonic '%s' can't be resolved to any parameter name" % name
|
2011-06-16 16:26:50 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
elif len(pointer.current) > 1:
|
|
|
|
options = {}
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
for option in pointer.current:
|
|
|
|
for opt in option._long_opts + option._short_opts:
|
2011-06-16 16:34:38 +04:00
|
|
|
opt = opt.strip('-')
|
|
|
|
if opt.startswith(name):
|
|
|
|
options[opt] = option
|
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
if name in options:
|
|
|
|
found = name
|
|
|
|
debugMsg = "mnemonic '%s' resolved to %s). " % (name, found)
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
else:
|
|
|
|
found = sorted(options.keys(), key=lambda x: len(x))[0]
|
2011-12-21 23:40:42 +04:00
|
|
|
warnMsg = "detected ambiguity (mnemonic '%s' can be resolved to: %s). " % (name, ", ".join("'%s'" % key for key in options.keys()))
|
|
|
|
warnMsg += "Resolved to shortest of those ('%s')" % found
|
2011-06-15 15:58:50 +04:00
|
|
|
logger.warn(warnMsg)
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
found = options[found]
|
|
|
|
else:
|
|
|
|
found = pointer.current[0]
|
|
|
|
debugMsg = "mnemonic '%s' resolved to %s). " % (name, found)
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
if found:
|
2011-12-26 18:08:25 +04:00
|
|
|
try:
|
|
|
|
value = found.convert_value(found, value)
|
|
|
|
except OptionValueError:
|
|
|
|
value = None
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
if value is not None:
|
|
|
|
setattr(args, found.dest, value)
|
2012-02-23 19:32:36 +04:00
|
|
|
elif not found.type: # boolean
|
2011-06-15 15:58:50 +04:00
|
|
|
setattr(args, found.dest, True)
|
2011-06-16 16:26:50 +04:00
|
|
|
else:
|
|
|
|
errMsg = "mnemonic '%s' requires value of type '%s'" % (name, found.type)
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2011-07-03 02:48:56 +04:00
|
|
|
|
|
|
|
def safeCSValue(value):
|
|
|
|
"""
|
2012-02-16 18:42:28 +04:00
|
|
|
Returns value safe for CSV dumping
|
2011-11-30 21:44:34 +04:00
|
|
|
Reference: http://tools.ietf.org/html/rfc4180
|
2011-07-03 02:48:56 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = value
|
|
|
|
|
2011-07-13 10:44:15 +04:00
|
|
|
if retVal and isinstance(retVal, basestring):
|
2011-07-03 02:48:56 +04:00
|
|
|
if not (retVal[0] == retVal[-1] == '"'):
|
2011-12-22 02:59:23 +04:00
|
|
|
if any(_ in retVal for _ in (conf.csvDel, '"', '\n')):
|
2011-07-03 02:48:56 +04:00
|
|
|
retVal = '"%s"' % retVal.replace('"', '""')
|
|
|
|
|
|
|
|
return retVal
|
2011-08-09 18:20:25 +04:00
|
|
|
|
|
|
|
def filterPairValues(values):
|
2012-02-16 18:42:28 +04:00
|
|
|
"""
|
|
|
|
Returns only list-like values with length 2
|
|
|
|
"""
|
|
|
|
|
2011-08-09 18:20:25 +04:00
|
|
|
retVal = []
|
|
|
|
|
|
|
|
if not isNoneValue(values) and hasattr(values, '__iter__'):
|
|
|
|
retVal = filter(lambda x: isinstance(x, (tuple, list, set)) and len(x) == 2, values)
|
|
|
|
|
|
|
|
return retVal
|
2011-08-29 16:50:52 +04:00
|
|
|
|
|
|
|
def randomizeParameterValue(value):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Randomize a parameter value based on occurances of alphanumeric characters
|
|
|
|
"""
|
|
|
|
|
2011-08-29 16:50:52 +04:00
|
|
|
retVal = value
|
|
|
|
|
|
|
|
for match in re.finditer('[A-Z]+', value):
|
|
|
|
retVal = retVal.replace(match.group(), randomStr(len(match.group())).upper())
|
|
|
|
|
|
|
|
for match in re.finditer('[a-z]+', value):
|
|
|
|
retVal = retVal.replace(match.group(), randomStr(len(match.group())).lower())
|
|
|
|
|
|
|
|
for match in re.finditer('[0-9]+', value):
|
|
|
|
retVal = retVal.replace(match.group(), str(randomInt(len(match.group()))))
|
|
|
|
|
|
|
|
return retVal
|
2011-10-23 21:02:48 +04:00
|
|
|
|
2011-10-29 12:32:24 +04:00
|
|
|
def asciifyUrl(url, forceQuote=False):
|
2011-10-23 21:02:48 +04:00
|
|
|
"""
|
|
|
|
Attempts to make a unicode url usuable with ``urllib/urllib2``.
|
|
|
|
|
|
|
|
More specifically, it attempts to convert the unicode object ``url``,
|
|
|
|
which is meant to represent a IRI, to an unicode object that,
|
|
|
|
containing only ASCII characters, is a valid URI. This involves:
|
|
|
|
|
|
|
|
* IDNA/Puny-encoding the domain name.
|
|
|
|
* UTF8-quoting the path and querystring parts.
|
|
|
|
|
|
|
|
See also RFC 3987.
|
|
|
|
|
|
|
|
Reference: http://blog.elsdoerfer.name/2008/12/12/opening-iris-in-python/
|
|
|
|
"""
|
|
|
|
|
|
|
|
parts = urlparse.urlsplit(url)
|
|
|
|
if not parts.scheme or not parts.netloc:
|
|
|
|
# apparently not an url
|
|
|
|
return url
|
|
|
|
|
2011-12-22 02:09:21 +04:00
|
|
|
if all(char in string.printable for char in url):
|
|
|
|
return url
|
|
|
|
|
2011-10-23 21:02:48 +04:00
|
|
|
# idna-encode domain
|
2011-12-22 02:09:21 +04:00
|
|
|
hostname = parts.hostname.encode("idna")
|
2011-10-23 21:02:48 +04:00
|
|
|
|
|
|
|
# UTF8-quote the other parts. We check each part individually if
|
|
|
|
# if needs to be quoted - that should catch some additional user
|
|
|
|
# errors, say for example an umlaut in the username even though
|
|
|
|
# the path *is* already quoted.
|
|
|
|
def quote(s, safe):
|
|
|
|
s = s or ''
|
|
|
|
# Triggers on non-ascii characters - another option would be:
|
|
|
|
# urllib.quote(s.replace('%', '')) != s.replace('%', '')
|
|
|
|
# which would trigger on all %-characters, e.g. "&".
|
2011-12-22 02:09:21 +04:00
|
|
|
if s.encode("ascii", "replace") != s or forceQuote:
|
2012-02-23 19:32:36 +04:00
|
|
|
return urllib.quote(s.encode(UNICODE_ENCODING), safe=safe)
|
2011-10-23 21:02:48 +04:00
|
|
|
return s
|
|
|
|
|
|
|
|
username = quote(parts.username, '')
|
|
|
|
password = quote(parts.password, safe='')
|
|
|
|
path = quote(parts.path, safe='/')
|
2011-12-22 02:09:21 +04:00
|
|
|
query = quote(parts.query, safe="&=")
|
2011-10-23 21:02:48 +04:00
|
|
|
|
|
|
|
# put everything back together
|
|
|
|
netloc = hostname
|
|
|
|
if username or password:
|
|
|
|
netloc = '@' + netloc
|
|
|
|
if password:
|
|
|
|
netloc = ':' + password + netloc
|
|
|
|
netloc = username + netloc
|
|
|
|
if parts.port:
|
|
|
|
netloc += ':' + str(parts.port)
|
|
|
|
|
2011-10-29 12:32:24 +04:00
|
|
|
return urlparse.urlunsplit([parts.scheme, netloc, path, query, parts.fragment])
|
|
|
|
|
|
|
|
def findPageForms(content, url, raise_=False, addToTargets=False):
|
2012-02-16 18:42:28 +04:00
|
|
|
"""
|
|
|
|
Parses given page content for possible forms
|
|
|
|
"""
|
|
|
|
|
2011-10-29 12:32:24 +04:00
|
|
|
class _(StringIO):
|
2011-10-29 13:32:20 +04:00
|
|
|
def __init__(self, content, url):
|
2011-10-29 12:32:24 +04:00
|
|
|
StringIO.__init__(self, unicodeencode(content, kb.pageEncoding) if isinstance(content, unicode) else content)
|
|
|
|
self._url = url
|
|
|
|
def geturl(self):
|
|
|
|
return self._url
|
|
|
|
|
2011-10-29 14:31:52 +04:00
|
|
|
if not content:
|
2011-10-29 12:32:24 +04:00
|
|
|
errMsg = "can't parse forms as the page content appears to be blank"
|
2011-10-29 14:31:52 +04:00
|
|
|
if raise_:
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
else:
|
|
|
|
logger.debug(errMsg)
|
2011-10-29 12:32:24 +04:00
|
|
|
|
2011-10-29 13:32:20 +04:00
|
|
|
forms = None
|
2011-10-29 12:32:24 +04:00
|
|
|
retVal = set()
|
2011-10-29 13:32:20 +04:00
|
|
|
response = _(content, url)
|
2012-03-28 17:31:07 +04:00
|
|
|
|
2011-10-29 12:32:24 +04:00
|
|
|
try:
|
|
|
|
forms = ParseResponse(response, backwards_compat=False)
|
|
|
|
except ParseError:
|
2011-10-29 14:31:52 +04:00
|
|
|
warnMsg = "badly formed HTML at the given url ('%s'). Will try to filter it" % url
|
|
|
|
logger.warning(warnMsg)
|
2011-10-29 12:32:24 +04:00
|
|
|
response.seek(0)
|
2011-12-22 02:09:21 +04:00
|
|
|
filtered = _("".join(re.findall(r"<form(?!.+<form).+?</form>", response.read(), re.I | re.S)), response.geturl())
|
2011-10-29 12:32:24 +04:00
|
|
|
try:
|
2011-10-29 14:31:52 +04:00
|
|
|
forms = ParseResponse(filtered, backwards_compat=False)
|
2011-10-29 12:32:24 +04:00
|
|
|
except ParseError:
|
|
|
|
errMsg = "no success"
|
|
|
|
if raise_:
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
else:
|
|
|
|
logger.debug(errMsg)
|
|
|
|
|
|
|
|
if forms:
|
|
|
|
for form in forms:
|
|
|
|
for control in form.controls:
|
2011-12-22 02:09:21 +04:00
|
|
|
if hasattr(control, "items"):
|
2011-10-29 12:32:24 +04:00
|
|
|
# if control has selectable items select first non-disabled
|
|
|
|
for item in control.items:
|
|
|
|
if not item.disabled:
|
2011-12-14 16:03:21 +04:00
|
|
|
if not item.selected:
|
|
|
|
item.selected = True
|
2011-10-29 12:32:24 +04:00
|
|
|
break
|
2012-02-16 18:42:28 +04:00
|
|
|
|
2011-10-29 12:32:24 +04:00
|
|
|
request = form.click()
|
|
|
|
url = urldecode(request.get_full_url(), kb.pageEncoding)
|
|
|
|
method = request.get_method()
|
2011-11-22 11:07:52 +04:00
|
|
|
data = request.get_data() if request.has_data() else None
|
2011-11-22 01:31:08 +04:00
|
|
|
data = urldecode(data, kb.pageEncoding) if data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in data else data
|
2012-02-16 18:42:28 +04:00
|
|
|
|
2011-10-29 12:32:24 +04:00
|
|
|
if not data and method and method.upper() == HTTPMETHOD.POST:
|
|
|
|
debugMsg = "invalid POST form with blank data detected"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
continue
|
2012-02-16 18:42:28 +04:00
|
|
|
|
2011-10-29 12:32:24 +04:00
|
|
|
target = (url, method, data, conf.cookie)
|
|
|
|
retVal.add(target)
|
|
|
|
else:
|
|
|
|
errMsg = "there were no forms found at the given target url"
|
|
|
|
if raise_:
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
else:
|
|
|
|
logger.debug(errMsg)
|
|
|
|
|
|
|
|
if addToTargets and retVal:
|
|
|
|
for target in retVal:
|
|
|
|
kb.targetUrls.add(target)
|
|
|
|
|
2011-11-11 15:28:27 +04:00
|
|
|
return retVal
|
|
|
|
|
|
|
|
def getHostHeader(url):
|
2012-02-16 18:42:28 +04:00
|
|
|
"""
|
|
|
|
Returns proper Host header value for a given target URL
|
|
|
|
"""
|
|
|
|
|
2011-11-11 15:28:27 +04:00
|
|
|
retVal = urlparse.urlparse(url).netloc
|
|
|
|
|
2012-05-25 01:55:57 +04:00
|
|
|
if re.search("http(s)?://\[.+\]", url, re.I):
|
|
|
|
retVal = extractRegexResult("http(s)?://\[(?P<result>.+)\]", url)
|
|
|
|
elif any(retVal.endswith(':%d' % _) for _ in [80, 443]):
|
2011-11-11 15:28:27 +04:00
|
|
|
retVal = retVal.split(':')[0]
|
|
|
|
|
2011-11-21 20:41:02 +04:00
|
|
|
return retVal
|
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
def evaluateCode(code, variables=None):
|
|
|
|
"""
|
|
|
|
Executes given python code given in a string form
|
|
|
|
"""
|
|
|
|
|
2011-11-21 20:41:02 +04:00
|
|
|
try:
|
|
|
|
exec(code, variables)
|
|
|
|
except Exception, ex:
|
|
|
|
errMsg = "an error occured while evaluating provided code ('%s'). " % ex
|
|
|
|
raise sqlmapGenericException, errMsg
|
2011-11-22 12:39:13 +04:00
|
|
|
|
|
|
|
def serializeObject(object_):
|
2012-02-16 18:42:28 +04:00
|
|
|
"""
|
|
|
|
Serializes given object
|
|
|
|
"""
|
|
|
|
|
2012-03-08 14:57:47 +04:00
|
|
|
return base64pickle(object_)
|
2011-11-22 12:39:13 +04:00
|
|
|
|
|
|
|
def unserializeObject(value):
|
2012-02-16 18:42:28 +04:00
|
|
|
"""
|
|
|
|
Unserializes object from given serialized form
|
|
|
|
"""
|
|
|
|
|
2011-11-22 12:39:13 +04:00
|
|
|
retVal = None
|
|
|
|
if value:
|
2012-03-08 14:57:47 +04:00
|
|
|
retVal = base64unpickle(value)
|
2011-11-22 12:39:13 +04:00
|
|
|
return retVal
|
2011-12-21 15:50:49 +04:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
def resetCounter(technique):
|
|
|
|
"""
|
|
|
|
Resets query counter for a given technique
|
|
|
|
"""
|
|
|
|
|
|
|
|
kb.counters[technique] = 0
|
|
|
|
|
|
|
|
def incrementCounter(technique):
|
|
|
|
"""
|
|
|
|
Increments query counter for a given technique
|
|
|
|
"""
|
|
|
|
|
|
|
|
kb.counters[technique] = getCounter(technique) + 1
|
2011-12-21 15:50:49 +04:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
def getCounter(technique):
|
|
|
|
"""
|
|
|
|
Returns query counter for a given technique
|
|
|
|
"""
|
2012-02-10 14:24:48 +04:00
|
|
|
|
2012-02-16 18:42:28 +04:00
|
|
|
return kb.counters.get(technique, 0)
|
2012-02-17 18:22:48 +04:00
|
|
|
|
2012-02-21 15:44:48 +04:00
|
|
|
def applyFunctionRecursively(value, function):
|
2012-02-17 18:22:48 +04:00
|
|
|
"""
|
2012-02-21 15:44:48 +04:00
|
|
|
Applies function recursively through list-like structures
|
|
|
|
"""
|
|
|
|
|
|
|
|
if isinstance(value, (list, tuple, set, BigArray)):
|
|
|
|
retVal = [applyFunctionRecursively(_, function) for _ in value]
|
|
|
|
else:
|
|
|
|
retVal = function(value)
|
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
|
|
|
def decodeHexValue(value):
|
|
|
|
"""
|
|
|
|
Returns value decoded from DBMS specific hexadecimal representation
|
2012-02-17 18:22:48 +04:00
|
|
|
"""
|
|
|
|
|
2012-04-02 16:58:10 +04:00
|
|
|
retVal = value
|
|
|
|
|
2012-02-21 15:44:48 +04:00
|
|
|
def _(value):
|
2012-02-21 17:38:18 +04:00
|
|
|
if value and isinstance(value, basestring) and len(value) % 2 == 0:
|
|
|
|
if value.lower().startswith("0x"):
|
|
|
|
value = value[2:]
|
|
|
|
value = value.decode("hex")
|
|
|
|
if len(value) > 1 and value[1] == '\x00':
|
2012-02-23 19:19:20 +04:00
|
|
|
try:
|
|
|
|
value = value.decode("utf-16-le")
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
pass
|
2012-02-21 17:38:18 +04:00
|
|
|
elif value and value[0] == '\x00':
|
2012-02-23 19:19:20 +04:00
|
|
|
try:
|
|
|
|
value = value.decode("utf-16-be")
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
pass
|
2012-02-17 18:22:48 +04:00
|
|
|
return value
|
|
|
|
|
2012-04-02 16:58:10 +04:00
|
|
|
try:
|
|
|
|
retVal = applyFunctionRecursively(value, _)
|
|
|
|
except Exception:
|
|
|
|
singleTimeWarnMessage("there was a problem decoding value '%s' from expected hexadecimal form" % value)
|
|
|
|
|
|
|
|
return retVal
|
2012-02-21 15:44:48 +04:00
|
|
|
|
|
|
|
def extractExpectedValue(value, expected):
|
|
|
|
"""
|
|
|
|
Extracts and returns expected value by a given type
|
|
|
|
"""
|
|
|
|
|
|
|
|
if expected:
|
|
|
|
value = unArrayizeValue(value)
|
|
|
|
|
|
|
|
if isNoneValue(value):
|
|
|
|
value = None
|
|
|
|
elif expected == EXPECTED.BOOL:
|
|
|
|
if isinstance(value, int):
|
|
|
|
value = bool(value)
|
|
|
|
elif isinstance(value, basestring):
|
|
|
|
value = value.strip().lower()
|
|
|
|
if value in ("true", "false"):
|
|
|
|
value = value == "true"
|
|
|
|
elif value in ("1", "-1"):
|
|
|
|
value = True
|
|
|
|
elif value == "0":
|
|
|
|
value = False
|
|
|
|
else:
|
|
|
|
value = None
|
|
|
|
elif expected == EXPECTED.INT:
|
|
|
|
if isinstance(value, basestring):
|
|
|
|
if value.isdigit():
|
|
|
|
value = int(value)
|
|
|
|
else:
|
|
|
|
value = None
|
2012-02-17 18:22:48 +04:00
|
|
|
|
|
|
|
return value
|
2012-02-24 14:48:19 +04:00
|
|
|
|
|
|
|
def setFormatterPrependFlag(value=True):
|
|
|
|
"""
|
|
|
|
Sets logging formatter flag used for signaling if newline is needed before
|
|
|
|
the logging message itself (used in inference mode)
|
|
|
|
"""
|
2012-02-24 15:05:04 +04:00
|
|
|
|
2012-02-24 14:48:19 +04:00
|
|
|
FORMATTER._prepend_flag = value
|
2012-02-24 17:07:20 +04:00
|
|
|
|
|
|
|
def hashDBWrite(key, value, serialize=False):
|
|
|
|
"""
|
|
|
|
Helper function for writing session data to HashDB
|
|
|
|
"""
|
|
|
|
|
2012-03-13 02:55:57 +04:00
|
|
|
_ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE)
|
2012-02-24 19:03:39 +04:00
|
|
|
conf.hashDB.write(_, value, serialize)
|
2012-02-24 17:07:20 +04:00
|
|
|
|
2012-02-24 18:54:10 +04:00
|
|
|
def hashDBRetrieve(key, unserialize=False, checkConf=False):
|
2012-02-24 17:07:20 +04:00
|
|
|
"""
|
|
|
|
Helper function for restoring session data from HashDB
|
|
|
|
"""
|
|
|
|
|
2012-03-13 02:55:57 +04:00
|
|
|
_ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE)
|
2012-05-10 18:22:34 +04:00
|
|
|
_ = conf.hashDB.retrieve(_, unserialize) if kb.resumeValues and not (checkConf and any([conf.flushSession, conf.freshQueries])) else None
|
|
|
|
if not kb.inferenceMode and _ and PARTIAL_VALUE_MARKER in _:
|
|
|
|
_ = None
|
|
|
|
return _
|
2012-03-08 14:19:34 +04:00
|
|
|
|
|
|
|
def resetCookieJar(cookieJar):
|
|
|
|
if not conf.loC:
|
|
|
|
cookieJar.clear()
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
cookieJar.load(conf.loC)
|
|
|
|
cookieJar.clear_expired_cookies()
|
|
|
|
except cookielib.LoadError, msg:
|
|
|
|
errMsg = "there was a problem loading "
|
|
|
|
errMsg += "cookies file ('%s')" % msg
|
|
|
|
raise sqlmapGenericException, errMsg
|
2012-04-12 01:26:00 +04:00
|
|
|
|
2012-05-04 02:34:18 +04:00
|
|
|
def prioritySortColumns(columns):
|
|
|
|
_ = lambda x: x and "id" in x.lower()
|
|
|
|
return sorted(sorted(columns, key=len), lambda x, y: -1 if _(x) and not _(y) else 1 if not _(x) and _(y) else 0)
|