2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-07-08 00:10:03 +04:00
|
|
|
Copyright (c) 2006-2011 sqlmap developers (http://www.sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2010-05-24 15:00:49 +04:00
|
|
|
import codecs
|
2011-07-06 09:44:47 +04:00
|
|
|
import copy
|
2010-10-28 00:39:50 +04:00
|
|
|
import ctypes
|
2011-07-23 23:04:59 +04:00
|
|
|
import httplib
|
2010-05-26 13:48:20 +04:00
|
|
|
import inspect
|
2011-04-20 02:54:13 +04:00
|
|
|
import logging
|
2011-07-23 23:04:59 +04:00
|
|
|
import ntpath
|
2008-10-15 19:38:22 +04:00
|
|
|
import os
|
2011-07-23 23:04:59 +04:00
|
|
|
import pickle
|
|
|
|
import posixpath
|
2008-10-15 19:38:22 +04:00
|
|
|
import random
|
|
|
|
import re
|
2009-04-22 15:48:07 +04:00
|
|
|
import socket
|
2008-10-15 19:38:22 +04:00
|
|
|
import string
|
2011-07-23 23:04:59 +04:00
|
|
|
import struct
|
2008-10-15 19:38:22 +04:00
|
|
|
import sys
|
2011-07-24 13:19:33 +04:00
|
|
|
import tempfile
|
2008-10-15 19:38:22 +04:00
|
|
|
import time
|
2011-07-24 13:19:33 +04:00
|
|
|
import types
|
2008-10-15 19:38:22 +04:00
|
|
|
import urlparse
|
2011-03-24 23:04:20 +03:00
|
|
|
import unicodedata
|
2010-01-28 20:07:34 +03:00
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
from ConfigParser import DEFAULTSECT
|
|
|
|
from ConfigParser import RawConfigParser
|
2010-04-22 20:13:22 +04:00
|
|
|
from StringIO import StringIO
|
2010-10-12 19:49:04 +04:00
|
|
|
from difflib import SequenceMatcher
|
2011-01-16 23:55:07 +03:00
|
|
|
from math import sqrt
|
2010-05-21 17:03:57 +04:00
|
|
|
from subprocess import PIPE
|
|
|
|
from subprocess import Popen as execute
|
2010-01-28 19:50:34 +03:00
|
|
|
from tempfile import NamedTemporaryFile
|
2010-01-29 13:12:09 +03:00
|
|
|
from tempfile import mkstemp
|
2010-06-30 01:07:23 +04:00
|
|
|
from xml.etree import ElementTree as ET
|
2010-10-07 02:43:04 +04:00
|
|
|
from xml.dom import minidom
|
2010-04-22 20:13:22 +04:00
|
|
|
from xml.sax import parse
|
2010-01-24 02:29:34 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
from extra.cloak.cloak import decloak
|
2011-02-08 03:13:39 +03:00
|
|
|
from extra.magic import magic
|
2011-02-04 21:07:21 +03:00
|
|
|
from extra.odict.odict import OrderedDict
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.data import paths
|
|
|
|
from lib.core.data import queries
|
2010-11-07 03:12:00 +03:00
|
|
|
from lib.core.convert import htmlunescape
|
2011-07-29 14:45:09 +04:00
|
|
|
from lib.core.convert import safecharencode
|
2011-01-27 21:36:28 +03:00
|
|
|
from lib.core.convert import urldecode
|
2010-01-15 14:44:05 +03:00
|
|
|
from lib.core.convert import urlencode
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import DBMS
|
2011-04-07 17:57:07 +04:00
|
|
|
from lib.core.enums import HTTPHEADER
|
2011-04-23 20:25:09 +04:00
|
|
|
from lib.core.enums import OS
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PLACE
|
2010-12-18 12:51:34 +03:00
|
|
|
from lib.core.enums import PAYLOAD
|
2011-05-30 13:46:32 +04:00
|
|
|
from lib.core.enums import REFLECTIVE_COUNTER
|
2011-01-13 14:24:03 +03:00
|
|
|
from lib.core.enums import SORTORDER
|
2011-01-10 13:30:17 +03:00
|
|
|
from lib.core.exception import sqlmapDataException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapFilePathException
|
2010-07-30 16:49:25 +04:00
|
|
|
from lib.core.exception import sqlmapGenericException
|
2010-01-15 19:06:59 +03:00
|
|
|
from lib.core.exception import sqlmapNoneDataException
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.exception import sqlmapMissingDependence
|
2010-02-10 15:06:23 +03:00
|
|
|
from lib.core.exception import sqlmapSyntaxException
|
2010-05-27 20:45:09 +04:00
|
|
|
from lib.core.optiondict import optDict
|
2011-07-23 23:04:59 +04:00
|
|
|
from lib.core.settings import BIGARRAY_CHUNK_LENGTH
|
2011-01-19 18:26:57 +03:00
|
|
|
from lib.core.settings import INFERENCE_UNKNOWN_CHAR
|
2011-01-30 14:36:03 +03:00
|
|
|
from lib.core.settings import UNICODE_ENCODING
|
2011-04-23 20:25:09 +04:00
|
|
|
from lib.core.settings import DBMS_DICT
|
2010-03-03 19:19:17 +03:00
|
|
|
from lib.core.settings import DESCRIPTION
|
2011-06-18 02:04:25 +04:00
|
|
|
from lib.core.settings import DUMMY_SQL_INJECTION_CHARS
|
2009-06-11 19:01:48 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2010-05-21 16:09:31 +04:00
|
|
|
from lib.core.settings import PLATFORM
|
2011-01-28 19:15:45 +03:00
|
|
|
from lib.core.settings import PYVERSION
|
|
|
|
from lib.core.settings import VERSION
|
|
|
|
from lib.core.settings import REVISION
|
|
|
|
from lib.core.settings import VERSION_STRING
|
2010-02-25 20:37:46 +03:00
|
|
|
from lib.core.settings import SITE
|
2010-12-25 13:16:20 +03:00
|
|
|
from lib.core.settings import ERROR_PARSING_REGEXES
|
2011-04-14 13:43:36 +04:00
|
|
|
from lib.core.settings import PRINTABLE_CHAR_REGEX
|
2008-12-19 23:09:46 +03:00
|
|
|
from lib.core.settings import SQL_STATEMENTS
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.settings import SUPPORTED_DBMS
|
2010-12-18 00:29:09 +03:00
|
|
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
2011-03-31 12:43:17 +04:00
|
|
|
from lib.core.settings import DEFAULT_MSSQL_SCHEMA
|
2010-10-21 13:51:07 +04:00
|
|
|
from lib.core.settings import DUMP_NEWLINE_MARKER
|
2010-11-16 18:11:03 +03:00
|
|
|
from lib.core.settings import DUMP_CR_MARKER
|
2010-10-21 13:51:07 +04:00
|
|
|
from lib.core.settings import DUMP_DEL_MARKER
|
|
|
|
from lib.core.settings import DUMP_TAB_MARKER
|
|
|
|
from lib.core.settings import DUMP_START_MARKER
|
|
|
|
from lib.core.settings import DUMP_STOP_MARKER
|
2011-04-01 20:40:28 +04:00
|
|
|
from lib.core.settings import ML
|
2010-12-08 17:46:07 +03:00
|
|
|
from lib.core.settings import MIN_TIME_RESPONSES
|
2011-02-25 12:22:44 +03:00
|
|
|
from lib.core.settings import PAYLOAD_DELIMITER
|
2011-02-27 20:43:41 +03:00
|
|
|
from lib.core.settings import REFLECTED_NON_ALPHA_NUM_REGEX
|
2011-07-13 03:21:15 +04:00
|
|
|
from lib.core.settings import REFLECTED_MAX_REGEX_PARTS
|
2011-02-25 12:22:44 +03:00
|
|
|
from lib.core.settings import REFLECTED_VALUE_MARKER
|
2010-12-21 18:13:13 +03:00
|
|
|
from lib.core.settings import TIME_STDEV_COEFF
|
2010-12-29 22:39:32 +03:00
|
|
|
from lib.core.settings import DYNAMICITY_MARK_LENGTH
|
2011-05-30 13:46:32 +04:00
|
|
|
from lib.core.settings import REFLECTIVE_MISS_THRESHOLD
|
2011-02-02 17:25:16 +03:00
|
|
|
from lib.core.settings import SENSITIVE_DATA_REGEX
|
2011-04-23 20:25:09 +04:00
|
|
|
from lib.core.settings import SUPPORTED_OS
|
2011-01-20 02:06:15 +03:00
|
|
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
2011-02-04 15:43:18 +03:00
|
|
|
from lib.core.settings import URI_INJECTION_MARK_CHAR
|
|
|
|
from lib.core.settings import URI_QUESTION_MARKER
|
2010-12-24 15:13:48 +03:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2010-09-15 16:51:02 +04:00
|
|
|
|
|
|
|
class UnicodeRawConfigParser(RawConfigParser):
|
2010-09-15 16:52:28 +04:00
|
|
|
"""
|
|
|
|
RawConfigParser with unicode writing support
|
|
|
|
"""
|
2011-01-20 02:06:15 +03:00
|
|
|
|
2010-09-15 16:51:02 +04:00
|
|
|
def write(self, fp):
|
|
|
|
"""
|
|
|
|
Write an .ini-format representation of the configuration state.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self._defaults:
|
|
|
|
fp.write("[%s]\n" % DEFAULTSECT)
|
|
|
|
|
|
|
|
for (key, value) in self._defaults.items():
|
2011-01-30 14:36:03 +03:00
|
|
|
fp.write("%s = %s\n" % (key, getUnicode(value, UNICODE_ENCODING).replace('\n', '\n\t')))
|
2010-09-15 16:51:02 +04:00
|
|
|
|
|
|
|
fp.write("\n")
|
|
|
|
|
|
|
|
for section in self._sections:
|
|
|
|
fp.write("[%s]\n" % section)
|
|
|
|
|
|
|
|
for (key, value) in self._sections[section].items():
|
|
|
|
if key != "__name__":
|
|
|
|
if value is None:
|
|
|
|
fp.write("%s\n" % (key))
|
|
|
|
else:
|
2011-01-30 14:36:03 +03:00
|
|
|
fp.write("%s = %s\n" % (key, getUnicode(value, UNICODE_ENCODING).replace('\n', '\n\t')))
|
2010-09-15 16:51:02 +04:00
|
|
|
|
|
|
|
fp.write("\n")
|
|
|
|
|
2011-07-01 03:44:49 +04:00
|
|
|
class Wordlist:
|
|
|
|
"""
|
|
|
|
Iterator for looping over a large dictionaries
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, filenames):
|
|
|
|
self.filenames = filenames
|
|
|
|
self.fp = None
|
|
|
|
self.index = 0
|
|
|
|
self.iter = None
|
|
|
|
self.cursize = 0
|
|
|
|
self.custom = []
|
|
|
|
self.adjust()
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def adjust(self):
|
|
|
|
self.closeFP()
|
|
|
|
if self.index > len(self.filenames):
|
|
|
|
raise StopIteration
|
|
|
|
elif self.index == len(self.filenames):
|
|
|
|
self.iter = iter(self.custom)
|
|
|
|
else:
|
|
|
|
current = self.filenames[self.index]
|
2011-07-01 14:04:34 +04:00
|
|
|
infoMsg = "loading dictionary from '%s'" % current
|
2011-07-01 03:44:49 +04:00
|
|
|
singleTimeLogMessage(infoMsg)
|
|
|
|
self.fp = open(current, "r")
|
|
|
|
self.cursize = os.path.getsize(current)
|
|
|
|
self.iter = self.fp.xreadlines()
|
2011-07-01 14:04:34 +04:00
|
|
|
|
2011-07-01 03:44:49 +04:00
|
|
|
self.index += 1
|
|
|
|
|
|
|
|
def append(self, value):
|
|
|
|
self.custom.append(value)
|
|
|
|
|
|
|
|
def closeFP(self):
|
|
|
|
if self.fp:
|
|
|
|
self.fp.close()
|
2011-07-13 00:32:19 +04:00
|
|
|
self.fp = None
|
2011-07-01 03:44:49 +04:00
|
|
|
|
|
|
|
def next(self):
|
2011-07-13 00:32:19 +04:00
|
|
|
retVal = None
|
2011-07-01 03:44:49 +04:00
|
|
|
try:
|
2011-07-13 00:32:19 +04:00
|
|
|
retVal = self.iter.next().rstrip()
|
2011-07-01 03:44:49 +04:00
|
|
|
except StopIteration:
|
|
|
|
self.adjust()
|
2011-07-13 00:32:19 +04:00
|
|
|
retVal = self.iter.next().rstrip()
|
|
|
|
return retVal
|
2011-07-01 03:44:49 +04:00
|
|
|
|
|
|
|
def percentage(self):
|
|
|
|
retVal = 0
|
|
|
|
if self.fp:
|
|
|
|
retVal = round(100.0 * self.fp.tell() / self.cursize)
|
|
|
|
return retVal
|
|
|
|
|
|
|
|
def rewind(self):
|
|
|
|
self.index = 0
|
|
|
|
self.adjust()
|
|
|
|
|
2011-07-23 23:04:59 +04:00
|
|
|
class BigArray(list):
|
|
|
|
"""
|
|
|
|
List-like object used for storing large amounts of data (disk cached)
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.chunks = [[]]
|
|
|
|
self.cache = None
|
|
|
|
self.length = 0
|
2011-07-24 13:19:33 +04:00
|
|
|
self.filenames = set()
|
2011-07-23 23:04:59 +04:00
|
|
|
|
|
|
|
def append(self, value):
|
|
|
|
self.chunks[-1].append(value)
|
|
|
|
if len(self.chunks[-1]) >= BIGARRAY_CHUNK_LENGTH:
|
2011-07-24 13:19:33 +04:00
|
|
|
filename = self._dump(self.chunks[-1])
|
2011-07-23 23:04:59 +04:00
|
|
|
del(self.chunks[-1][:])
|
2011-07-24 13:19:33 +04:00
|
|
|
self.chunks[-1] = filename
|
2011-07-23 23:04:59 +04:00
|
|
|
self.chunks.append([])
|
|
|
|
|
2011-07-24 13:19:33 +04:00
|
|
|
def pop(self):
|
|
|
|
if len(self.chunks[-1]) < 1:
|
|
|
|
self.chunks.pop()
|
|
|
|
fp = open(self.chunks[-1], 'rb')
|
|
|
|
self.chunks[-1] = pickle.load(fp)
|
|
|
|
fp.close()
|
|
|
|
return self.chunks[-1].pop()
|
|
|
|
|
|
|
|
def index(self, value):
|
|
|
|
for index in xrange(len(self)):
|
|
|
|
if self[index] == value:
|
|
|
|
return index
|
|
|
|
return ValueError, "%s is not in list" % value
|
|
|
|
|
|
|
|
def _dump(self, value):
|
|
|
|
handle, filename = tempfile.mkstemp()
|
|
|
|
self.filenames.add(filename)
|
|
|
|
os.close(handle)
|
|
|
|
fp = open(filename, 'w+b')
|
|
|
|
pickle.dump(value, fp)
|
|
|
|
fp.close()
|
|
|
|
return filename
|
|
|
|
|
|
|
|
def _checkcache(self, index):
|
|
|
|
if (self.cache and self.cache[0] != index and self.cache[2]):
|
|
|
|
filename = self._dump(self.cache[1])
|
|
|
|
self.chunks[self.cache[0]] = filename
|
|
|
|
if not (self.cache and self.cache[0] == index):
|
|
|
|
fp = open(self.chunks[index], 'rb')
|
|
|
|
self.cache = [index, pickle.load(fp), False]
|
|
|
|
fp.close()
|
|
|
|
|
2011-07-23 23:04:59 +04:00
|
|
|
def __getitem__(self, y):
|
|
|
|
index = y / BIGARRAY_CHUNK_LENGTH
|
|
|
|
offset = y % BIGARRAY_CHUNK_LENGTH
|
|
|
|
chunk = self.chunks[index]
|
2011-08-02 03:48:38 +04:00
|
|
|
if isinstance(chunk, list):
|
2011-07-23 23:04:59 +04:00
|
|
|
return chunk[offset]
|
|
|
|
else:
|
2011-07-24 13:19:33 +04:00
|
|
|
self._checkcache(index)
|
2011-07-23 23:04:59 +04:00
|
|
|
return self.cache[1][offset]
|
|
|
|
|
2011-07-24 13:19:33 +04:00
|
|
|
def __setitem__(self, y, value):
|
|
|
|
index = y / BIGARRAY_CHUNK_LENGTH
|
|
|
|
offset = y % BIGARRAY_CHUNK_LENGTH
|
|
|
|
chunk = self.chunks[index]
|
|
|
|
if isinstance(chunk, list):
|
|
|
|
chunk[offset] = value
|
|
|
|
else:
|
|
|
|
self._checkcache(index)
|
|
|
|
self.cache[1][offset] = value
|
|
|
|
self.cache[2] = True # dirty flag
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "%s%s" % ("..." if len(self.chunks) > 1 else "", self.chunks[-1].__repr__())
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
for i in xrange(len(self)):
|
|
|
|
yield self[i]
|
|
|
|
|
2011-07-23 23:04:59 +04:00
|
|
|
def __len__(self):
|
|
|
|
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * BIGARRAY_CHUNK_LENGTH + len(self.chunks[-1])
|
|
|
|
|
2011-07-24 13:19:33 +04:00
|
|
|
def __del__(self):
|
|
|
|
for filename in self.filenames:
|
|
|
|
try:
|
|
|
|
os.remove(filename)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
2010-09-15 16:51:02 +04:00
|
|
|
class DynamicContentItem:
|
|
|
|
"""
|
2011-01-20 02:06:15 +03:00
|
|
|
Represents line in content page with dynamic properties (candidate
|
|
|
|
for removal prior detection phase)
|
2010-09-15 16:51:02 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, lineNumber, pageTotal, lineContentBefore, lineContentAfter):
|
|
|
|
self.lineNumber = lineNumber
|
|
|
|
self.pageTotal = pageTotal
|
|
|
|
self.lineContentBefore = lineContentBefore
|
|
|
|
self.lineContentAfter = lineContentAfter
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
class Format:
|
|
|
|
@staticmethod
|
|
|
|
def humanize(values, chain=" or "):
|
|
|
|
strJoin = "|".join([v for v in values])
|
|
|
|
|
|
|
|
return strJoin.replace("|", chain)
|
|
|
|
|
|
|
|
# Get methods
|
|
|
|
@staticmethod
|
|
|
|
def getDbms(versions=None):
|
|
|
|
"""
|
|
|
|
Format the back-end DBMS fingerprint value and return its
|
|
|
|
values formatted as a human readable string.
|
|
|
|
|
|
|
|
@return: detected back-end DBMS based upon fingerprint techniques.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if versions is None and Backend.getVersionList():
|
|
|
|
versions = Backend.getVersionList()
|
|
|
|
|
|
|
|
if versions is None:
|
|
|
|
return Backend.getDbms()
|
|
|
|
else:
|
|
|
|
return "%s %s" % (Backend.getDbms(), " and ".join([v for v in versions]))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getErrorParsedDBMSes():
|
|
|
|
"""
|
|
|
|
Parses the knowledge base htmlFp list and return its values
|
|
|
|
formatted as a human readable string.
|
|
|
|
|
|
|
|
@return: list of possible back-end DBMS based upon error messages
|
|
|
|
parsing.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
htmlParsed = ""
|
|
|
|
|
|
|
|
if len(kb.htmlFp) == 0:
|
|
|
|
return None
|
|
|
|
elif len(kb.htmlFp) == 1:
|
|
|
|
htmlParsed = kb.htmlFp[0]
|
|
|
|
elif len(kb.htmlFp) > 1:
|
|
|
|
htmlParsed = " or ".join([htmlFp for htmlFp in kb.htmlFp])
|
|
|
|
|
|
|
|
return htmlParsed
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getOs(target, info):
|
|
|
|
"""
|
|
|
|
Formats the back-end operating system fingerprint value
|
|
|
|
and return its values formatted as a human readable string.
|
|
|
|
|
|
|
|
Example of info (kb.headersFp) dictionary:
|
|
|
|
|
|
|
|
{
|
|
|
|
'distrib': set(['Ubuntu']),
|
|
|
|
'type': set(['Linux']),
|
|
|
|
'technology': set(['PHP 5.2.6', 'Apache 2.2.9']),
|
|
|
|
'release': set(['8.10'])
|
|
|
|
}
|
|
|
|
|
|
|
|
Example of info (kb.bannerFp) dictionary:
|
|
|
|
|
|
|
|
{
|
|
|
|
'sp': set(['Service Pack 4']),
|
|
|
|
'dbmsVersion': '8.00.194',
|
|
|
|
'dbmsServicePack': '0',
|
|
|
|
'distrib': set(['2000']),
|
|
|
|
'dbmsRelease': '2000',
|
|
|
|
'type': set(['Windows'])
|
|
|
|
}
|
|
|
|
|
|
|
|
@return: detected back-end operating system based upon fingerprint
|
|
|
|
techniques.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
infoStr = ""
|
|
|
|
|
|
|
|
if info and "type" in info:
|
|
|
|
infoStr += "%s operating system: %s" % (target, Format.humanize(info["type"]))
|
|
|
|
|
|
|
|
if "distrib" in info:
|
|
|
|
infoStr += " %s" % Format.humanize(info["distrib"])
|
|
|
|
|
|
|
|
if "release" in info:
|
|
|
|
infoStr += " %s" % Format.humanize(info["release"])
|
|
|
|
|
|
|
|
if "sp" in info:
|
|
|
|
infoStr += " %s" % Format.humanize(info["sp"])
|
|
|
|
|
|
|
|
if "codename" in info:
|
|
|
|
infoStr += " (%s)" % Format.humanize(info["codename"])
|
|
|
|
|
|
|
|
if "technology" in info:
|
|
|
|
infoStr += "\nweb application technology: %s" % Format.humanize(info["technology"], ", ")
|
|
|
|
|
|
|
|
return infoStr
|
|
|
|
|
|
|
|
class Backend:
|
|
|
|
# Set methods
|
|
|
|
@staticmethod
|
|
|
|
def setDbms(dbms):
|
|
|
|
dbms = aliasToDbmsEnum(dbms)
|
|
|
|
|
|
|
|
if dbms is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Little precaution, in theory this condition should always be false
|
|
|
|
elif kb.dbms is not None and kb.dbms != dbms:
|
|
|
|
msg = "sqlmap previously fingerprinted back-end DBMS "
|
|
|
|
msg += "%s. However now it has been fingerprinted " % kb.dbms
|
|
|
|
msg += "to be %s. " % dbms
|
|
|
|
msg += "Please, specify which DBMS is "
|
|
|
|
msg += "correct [%s (default)/%s] " % (kb.dbms, dbms)
|
|
|
|
|
|
|
|
while True:
|
|
|
|
inp = readInput(msg, default=kb.dbms)
|
|
|
|
|
|
|
|
if aliasToDbmsEnum(inp) == kb.dbms:
|
|
|
|
break
|
|
|
|
elif aliasToDbmsEnum(inp) == dbms:
|
|
|
|
kb.dbms = aliasToDbmsEnum(inp)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
warnMsg = "invalid value"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
elif kb.dbms is None:
|
|
|
|
kb.dbms = aliasToDbmsEnum(dbms)
|
|
|
|
|
|
|
|
return kb.dbms
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def setVersion(version):
|
|
|
|
if isinstance(version, basestring):
|
|
|
|
kb.dbmsVersion = [ version ]
|
|
|
|
|
|
|
|
return kb.dbmsVersion
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def setVersionList(versionsList):
|
|
|
|
if isinstance(versionsList, list):
|
|
|
|
kb.dbmsVersion = versionsList
|
2011-01-31 12:34:54 +03:00
|
|
|
elif isinstance(versionsList, basestring):
|
2011-01-28 19:36:09 +03:00
|
|
|
Backend.setVersion(versionsList)
|
|
|
|
else:
|
|
|
|
logger.error("invalid format of versionsList")
|
|
|
|
|
|
|
|
@staticmethod
|
2011-06-02 03:00:18 +04:00
|
|
|
def forceDbms(dbms, sticky=False):
|
2011-09-26 01:10:45 +04:00
|
|
|
if not kb.stickyFlag:
|
|
|
|
kb.forcedDbms = aliasToDbmsEnum(dbms)
|
|
|
|
kb.stickyFlag = sticky
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
2011-06-02 03:00:18 +04:00
|
|
|
def flushForcedDbms(force=False):
|
2011-09-26 01:10:45 +04:00
|
|
|
if not kb.stickyFlag or force:
|
|
|
|
kb.forcedDbms = None
|
|
|
|
kb.stickyFlag = False
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def setOs(os):
|
|
|
|
if os is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Little precaution, in theory this condition should always be false
|
2011-04-23 20:25:09 +04:00
|
|
|
elif kb.os is not None and isinstance(os, basestring) and kb.os.lower() != os.lower():
|
2011-01-28 19:36:09 +03:00
|
|
|
msg = "sqlmap previously fingerprinted back-end DBMS "
|
|
|
|
msg += "operating system %s. However now it has " % kb.os
|
|
|
|
msg += "been fingerprinted to be %s. " % os
|
|
|
|
msg += "Please, specify which OS is "
|
|
|
|
msg += "correct [%s (default)/%s] " % (kb.os, os)
|
|
|
|
|
|
|
|
while True:
|
|
|
|
inp = readInput(msg, default=kb.os)
|
|
|
|
|
|
|
|
if inp == kb.os:
|
|
|
|
break
|
|
|
|
elif inp == os:
|
2011-04-23 20:25:09 +04:00
|
|
|
kb.os = inp.capitalize()
|
2011-01-28 19:36:09 +03:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
warnMsg = "invalid value"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
elif kb.os is None and isinstance(os, basestring):
|
|
|
|
kb.os = os.capitalize()
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
return kb.os
|
|
|
|
|
2011-04-25 03:01:21 +04:00
|
|
|
@staticmethod
|
|
|
|
def setArch():
|
|
|
|
msg = "what is the back-end database management system architecture?"
|
|
|
|
msg += "\n[1] 32-bit (default)"
|
|
|
|
msg += "\n[2] 64-bit"
|
|
|
|
|
|
|
|
while True:
|
|
|
|
arch = readInput(msg, default='1')
|
|
|
|
|
|
|
|
if isinstance(arch, basestring) and arch.isdigit() and int(arch) in ( 1, 2 ):
|
|
|
|
if int(arch) == 1:
|
|
|
|
kb.arch = 32
|
|
|
|
else:
|
|
|
|
kb.arch = 64
|
|
|
|
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
warnMsg = "invalid value, valid values are 1 and 2"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
return kb.arch
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
# Get methods
|
|
|
|
@staticmethod
|
|
|
|
def getForcedDbms():
|
2011-09-26 01:10:45 +04:00
|
|
|
return aliasToDbmsEnum(kb.forcedDbms)
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getDbms():
|
2011-06-03 15:01:26 +04:00
|
|
|
return aliasToDbmsEnum(kb.dbms) if kb.get('dbms') else None
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getErrorParsedDBMSes():
|
|
|
|
"""
|
|
|
|
Returns array with parsed DBMS names till now
|
|
|
|
|
|
|
|
This functions is called to:
|
|
|
|
|
|
|
|
1. Sort the tests, getSortedInjectionTests() - detection phase.
|
|
|
|
2. Ask user whether or not skip specific DBMS tests in detection phase,
|
|
|
|
lib/controller/checks.py - detection phase.
|
|
|
|
3. Sort the fingerprint of the DBMS, lib/controller/handler.py -
|
|
|
|
fingerprint phase.
|
|
|
|
"""
|
|
|
|
|
|
|
|
return kb.htmlFp
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getIdentifiedDbms():
|
|
|
|
dbms = None
|
|
|
|
|
2011-06-03 15:01:26 +04:00
|
|
|
if not kb:
|
|
|
|
pass
|
|
|
|
elif Backend.getForcedDbms() is not None:
|
2011-01-28 19:36:09 +03:00
|
|
|
dbms = Backend.getForcedDbms()
|
|
|
|
elif Backend.getDbms() is not None:
|
|
|
|
dbms = kb.dbms
|
2011-06-03 15:01:26 +04:00
|
|
|
elif conf.get('dbms'):
|
2011-01-28 19:36:09 +03:00
|
|
|
dbms = conf.dbms
|
|
|
|
elif len(Backend.getErrorParsedDBMSes()) > 0:
|
|
|
|
dbms = Backend.getErrorParsedDBMSes()[0]
|
|
|
|
|
|
|
|
return aliasToDbmsEnum(dbms)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getVersion():
|
|
|
|
if len(kb.dbmsVersion) > 0:
|
|
|
|
return kb.dbmsVersion[0]
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def getVersionList():
|
|
|
|
if len(kb.dbmsVersion) > 0:
|
|
|
|
return kb.dbmsVersion
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2011-02-07 03:21:17 +03:00
|
|
|
@staticmethod
|
|
|
|
def getOs():
|
|
|
|
return kb.os
|
|
|
|
|
2011-04-25 03:01:21 +04:00
|
|
|
@staticmethod
|
|
|
|
def getArch():
|
|
|
|
if kb.arch is None:
|
|
|
|
Backend.setArch()
|
|
|
|
|
|
|
|
return kb.arch
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
# Comparison methods
|
|
|
|
@staticmethod
|
|
|
|
def isDbms(dbms):
|
2011-05-02 03:42:41 +04:00
|
|
|
if Backend.getDbms() is not None:
|
|
|
|
return Backend.getDbms() == aliasToDbmsEnum(dbms)
|
|
|
|
else:
|
|
|
|
return Backend.getIdentifiedDbms() == aliasToDbmsEnum(dbms)
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def isDbmsWithin(aliases):
|
2011-05-03 18:13:45 +04:00
|
|
|
return Backend.getDbms() is not None and Backend.getDbms().lower() in aliases
|
2011-01-28 19:36:09 +03:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def isVersion(version):
|
|
|
|
return Backend.getVersion() is not None and Backend.getVersion() == version
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def isVersionWithin(versionList):
|
|
|
|
if Backend.getVersionList() is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
for dbmsVersion in Backend.getVersionList():
|
|
|
|
if dbmsVersion == UNKNOWN_DBMS_VERSION:
|
|
|
|
continue
|
|
|
|
elif dbmsVersion in versionList:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def isVersionGreaterOrEqualThan(version):
|
|
|
|
return Backend.getVersion() is not None and str(Backend.getVersion()) >= str(version)
|
|
|
|
|
2011-02-07 03:21:17 +03:00
|
|
|
@staticmethod
|
|
|
|
def isOs(os):
|
2011-04-23 20:25:09 +04:00
|
|
|
return Backend.getOs() is not None and Backend.getOs().lower() == os.lower()
|
2011-02-07 03:21:17 +03:00
|
|
|
|
2011-07-24 13:19:33 +04:00
|
|
|
# Reference: http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/
|
|
|
|
def cachedmethod(f, cache={}):
|
|
|
|
def g(*args, **kwargs):
|
|
|
|
key = ( f, tuple(args), frozenset(kwargs.items()) )
|
|
|
|
if key not in cache:
|
|
|
|
cache[key] = f(*args, **kwargs)
|
|
|
|
return cache[key]
|
|
|
|
return g
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def paramToDict(place, parameters=None):
|
|
|
|
"""
|
|
|
|
Split the parameters into names and values, check if these parameters
|
|
|
|
are within the testable parameters and return in a dictionary.
|
|
|
|
|
|
|
|
@param place: where sqlmap has to work, can be GET, POST or Cookie.
|
|
|
|
@type place: C{str}
|
|
|
|
|
|
|
|
@param parameters: parameters string in the format for instance
|
|
|
|
'p1=v1&p2=v2' (GET and POST) or 'p1=v1;p2=v2' (Cookie).
|
|
|
|
@type parameters: C{str}
|
|
|
|
|
|
|
|
@return: the parameters in a dictionary.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2011-02-04 21:07:21 +03:00
|
|
|
testableParameters = OrderedDict()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.parameters.has_key(place) and not parameters:
|
|
|
|
parameters = conf.parameters[place]
|
|
|
|
|
2011-04-18 01:39:00 +04:00
|
|
|
if place != PLACE.SOAP:
|
2010-06-30 01:07:23 +04:00
|
|
|
parameters = parameters.replace(", ", ",")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-10-07 02:03:33 +04:00
|
|
|
splitParams = parameters.split(conf.pDel or (";" if place == PLACE.COOKIE else "&"))
|
2010-06-30 01:07:23 +04:00
|
|
|
|
|
|
|
for element in splitParams:
|
|
|
|
elem = element.split("=")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-07-21 14:06:52 +04:00
|
|
|
if len(elem) >= 2:
|
2010-06-30 01:07:23 +04:00
|
|
|
parameter = elem[0].replace(" ", "")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
condition = not conf.testParameter
|
2010-06-30 01:07:23 +04:00
|
|
|
condition |= parameter in conf.testParameter
|
|
|
|
|
|
|
|
if condition:
|
2011-07-21 14:18:11 +04:00
|
|
|
testableParameters[parameter] = "=".join(elem[1:])
|
2011-10-24 01:12:34 +04:00
|
|
|
if testableParameters[parameter].strip(DUMMY_SQL_INJECTION_CHARS) != testableParameters[parameter]\
|
2011-10-24 01:17:45 +04:00
|
|
|
or re.search(r'(\A-[1-9])|(\A9{3,})', testableParameters[parameter]):
|
2011-06-18 02:28:07 +04:00
|
|
|
errMsg = "you have provided tainted parameter values "
|
|
|
|
errMsg += "(%s) with most probably leftover " % element
|
|
|
|
errMsg += "chars from manual sql injection "
|
2011-10-24 01:17:45 +04:00
|
|
|
errMsg += "tests (%s) or non-valid numerical value. " % DUMMY_SQL_INJECTION_CHARS
|
2011-10-24 01:12:34 +04:00
|
|
|
errMsg += "Please, always use only valid parameter values "
|
2011-06-18 02:28:07 +04:00
|
|
|
errMsg += "so sqlmap could be able to do a valid run."
|
2011-06-18 02:04:25 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-06-30 01:07:23 +04:00
|
|
|
else:
|
|
|
|
root = ET.XML(parameters)
|
|
|
|
iterator = root.getiterator()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-06-30 01:07:23 +04:00
|
|
|
for child in iterator:
|
|
|
|
parameter = child.tag
|
2010-10-21 01:49:05 +04:00
|
|
|
|
|
|
|
if "}" in parameter:
|
|
|
|
testParam = parameter.split("}")[1]
|
|
|
|
else:
|
|
|
|
testParam = parameter
|
|
|
|
|
2010-06-30 01:07:23 +04:00
|
|
|
condition = not conf.testParameter
|
2010-10-21 01:49:05 +04:00
|
|
|
condition |= testParam in conf.testParameter
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if condition:
|
2010-06-30 01:07:23 +04:00
|
|
|
testableParameters[parameter] = child.text
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.testParameter and not testableParameters:
|
|
|
|
paramStr = ", ".join(test for test in conf.testParameter)
|
|
|
|
|
|
|
|
if len(conf.testParameter) > 1:
|
2011-01-19 02:05:32 +03:00
|
|
|
warnMsg = "the testable parameters '%s' " % paramStr
|
2011-05-28 21:53:05 +04:00
|
|
|
warnMsg += "you provided are not inside the %s" % place
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
|
|
|
parameter = conf.testParameter[0]
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
warnMsg = "the testable parameter '%s' " % paramStr
|
2011-05-28 21:53:05 +04:00
|
|
|
warnMsg += "you provided is not inside the %s" % place
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
elif len(conf.testParameter) != len(testableParameters.keys()):
|
|
|
|
for parameter in conf.testParameter:
|
|
|
|
if not testableParameters.has_key(parameter):
|
2011-01-19 02:05:32 +03:00
|
|
|
warnMsg = "the testable parameter '%s' " % parameter
|
2011-05-28 21:53:05 +04:00
|
|
|
warnMsg += "you provided is not inside the %s" % place
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
return testableParameters
|
|
|
|
|
2011-01-23 23:47:06 +03:00
|
|
|
def getDocRoot():
|
2008-10-15 19:38:22 +04:00
|
|
|
docRoot = None
|
2010-01-05 14:30:33 +03:00
|
|
|
pagePath = directoryPath(conf.path)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
if Backend.isOs(OS.WINDOWS):
|
2011-01-23 23:47:06 +03:00
|
|
|
defaultDocRoot = ["C:/xampp/htdocs/", "C:/Inetpub/wwwroot/"]
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2011-01-23 23:47:06 +03:00
|
|
|
defaultDocRoot = ["/var/www/"]
|
2009-04-28 03:05:11 +04:00
|
|
|
|
|
|
|
if kb.absFilePaths:
|
|
|
|
for absFilePath in kb.absFilePaths:
|
2010-02-09 17:27:41 +03:00
|
|
|
if directoryPath(absFilePath) == '/':
|
|
|
|
continue
|
2010-04-23 20:34:20 +04:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
absFilePath = normalizePath(absFilePath)
|
2009-04-28 03:05:11 +04:00
|
|
|
absFilePathWin = None
|
|
|
|
|
2010-02-04 12:49:31 +03:00
|
|
|
if isWindowsPath(absFilePath):
|
2010-02-04 17:37:00 +03:00
|
|
|
absFilePathWin = posixToNtSlashes(absFilePath)
|
2011-01-19 02:05:32 +03:00
|
|
|
absFilePath = ntToPosixSlashes(absFilePath[2:])
|
2011-01-23 23:47:06 +03:00
|
|
|
elif isWindowsDriveLetterPath(absFilePath):
|
2011-01-19 02:05:32 +03:00
|
|
|
absFilePath = absFilePath[2:]
|
2010-04-22 14:31:33 +04:00
|
|
|
|
2009-04-28 15:05:07 +04:00
|
|
|
if pagePath in absFilePath:
|
2011-01-19 02:05:32 +03:00
|
|
|
index = absFilePath.index(pagePath)
|
2009-04-28 03:05:11 +04:00
|
|
|
docRoot = absFilePath[:index]
|
|
|
|
|
2010-02-25 19:38:39 +03:00
|
|
|
if len(docRoot) == 0:
|
|
|
|
docRoot = None
|
|
|
|
continue
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if absFilePathWin:
|
2010-02-04 17:37:00 +03:00
|
|
|
docRoot = "C:/%s" % ntToPosixSlashes(docRoot)
|
2010-04-22 14:31:33 +04:00
|
|
|
|
2010-02-03 19:40:12 +03:00
|
|
|
docRoot = normalizePath(docRoot)
|
2009-04-28 03:05:11 +04:00
|
|
|
break
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if docRoot:
|
2009-04-28 03:05:11 +04:00
|
|
|
infoMsg = "retrieved the web server document root: '%s'" % docRoot
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2009-04-28 03:05:11 +04:00
|
|
|
warnMsg = "unable to retrieve the web server document root"
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
message = "please provide the web server document root "
|
2011-01-23 23:47:06 +03:00
|
|
|
message += "[%s]: " % ",".join(root for root in defaultDocRoot)
|
2011-02-08 12:44:34 +03:00
|
|
|
inputDocRoot = readInput(message, default=defaultDocRoot)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if inputDocRoot:
|
2011-01-23 23:47:06 +03:00
|
|
|
if isinstance(inputDocRoot, basestring):
|
|
|
|
docRoot = inputDocRoot.split(',')
|
|
|
|
else:
|
|
|
|
docRoot = inputDocRoot
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
|
|
|
docRoot = defaultDocRoot
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return docRoot
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-23 23:47:06 +03:00
|
|
|
def getDirs():
|
|
|
|
directories = set("/")
|
2010-11-17 12:46:04 +03:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if kb.absFilePaths:
|
2011-01-19 02:05:32 +03:00
|
|
|
infoMsg = "retrieved web server full paths: "
|
2011-01-23 23:47:06 +03:00
|
|
|
infoMsg += "'%s'" % ", ".join(ntToPosixSlashes(path) for path in kb.absFilePaths)
|
2009-04-28 03:05:11 +04:00
|
|
|
logger.info(infoMsg)
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for absFilePath in kb.absFilePaths:
|
2009-07-09 15:11:25 +04:00
|
|
|
if absFilePath:
|
2010-02-04 12:49:31 +03:00
|
|
|
directory = directoryPath(absFilePath)
|
2011-01-23 23:47:06 +03:00
|
|
|
directory = ntToPosixSlashes(directory)
|
2010-02-04 12:49:31 +03:00
|
|
|
directories.add(directory)
|
2009-04-28 03:05:11 +04:00
|
|
|
else:
|
|
|
|
warnMsg = "unable to retrieve any web server path"
|
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-23 23:47:06 +03:00
|
|
|
webDir = extractRegexResult(r"//[^/]+?/(?P<result>.*)/.", conf.url)
|
|
|
|
if webDir:
|
|
|
|
directories.add(webDir)
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
message = "please provide any additional web server full path to try "
|
2011-01-23 23:47:06 +03:00
|
|
|
message += "to upload the agent [Enter for None]: "
|
|
|
|
inputDirs = readInput(message)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
if inputDirs:
|
|
|
|
inputDirs = inputDirs.replace(", ", ",")
|
|
|
|
inputDirs = inputDirs.split(",")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for inputDir in inputDirs:
|
2009-07-09 15:11:25 +04:00
|
|
|
if inputDir:
|
|
|
|
directories.add(inputDir)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return directories
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def filePathToString(filePath):
|
2009-04-28 03:05:11 +04:00
|
|
|
strRepl = filePath.replace("/", "_").replace("\\", "_")
|
|
|
|
strRepl = strRepl.replace(" ", "_").replace(":", "_")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return strRepl
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-06-08 18:35:23 +04:00
|
|
|
def singleTimeWarnMessage(message):
|
|
|
|
singleTimeLogMessage(message, logging.WARN)
|
|
|
|
|
|
|
|
def singleTimeLogMessage(message, level=logging.INFO, flag=None):
|
2011-06-07 19:13:51 +04:00
|
|
|
if flag is None:
|
|
|
|
flag = hash(message)
|
|
|
|
|
2011-04-20 02:54:13 +04:00
|
|
|
if flag not in kb.singleLogFlags:
|
|
|
|
kb.singleLogFlags.add(flag)
|
|
|
|
logger.log(level, message)
|
|
|
|
|
2010-09-30 16:35:45 +04:00
|
|
|
def dataToStdout(data, forceOutput=False):
|
2011-01-02 10:37:47 +03:00
|
|
|
if not ('threadException' in kb and kb.threadException):
|
2011-03-11 23:04:15 +03:00
|
|
|
if forceOutput or not getCurrentThreadData().disableStdOut:
|
2011-01-02 10:37:47 +03:00
|
|
|
try:
|
2011-06-22 18:53:42 +04:00
|
|
|
logging._acquireLock()
|
2011-04-21 14:01:58 +04:00
|
|
|
# Reference: http://bugs.python.org/issue1602
|
2011-04-20 02:33:03 +04:00
|
|
|
if IS_WIN:
|
2011-05-14 23:57:28 +04:00
|
|
|
output = data.encode('ascii', "replace")
|
2011-04-21 14:03:18 +04:00
|
|
|
|
2011-04-20 02:54:13 +04:00
|
|
|
if output != data:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "cannot properly display Unicode characters "
|
2011-04-20 02:33:03 +04:00
|
|
|
warnMsg += "inside Windows OS command prompt "
|
2011-04-21 14:03:18 +04:00
|
|
|
warnMsg += "(http://bugs.python.org/issue1602). All "
|
2011-05-15 00:09:37 +04:00
|
|
|
warnMsg += "unhandled occurances will result in "
|
2011-04-21 14:03:18 +04:00
|
|
|
warnMsg += "replacement with '?' character. Please, find "
|
2011-04-20 02:54:13 +04:00
|
|
|
warnMsg += "proper character representation inside "
|
2011-06-20 15:00:23 +04:00
|
|
|
warnMsg += "corresponding output files. "
|
2011-06-08 18:35:23 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
2011-04-21 14:03:18 +04:00
|
|
|
|
2011-04-20 02:33:03 +04:00
|
|
|
sys.stdout.write(output)
|
|
|
|
else:
|
|
|
|
sys.stdout.write(data.encode(sys.stdout.encoding))
|
2011-01-30 14:36:03 +03:00
|
|
|
except:
|
2011-01-31 12:28:16 +03:00
|
|
|
sys.stdout.write(data.encode(UNICODE_ENCODING))
|
2011-01-11 15:08:36 +03:00
|
|
|
finally:
|
|
|
|
sys.stdout.flush()
|
2011-06-22 18:53:42 +04:00
|
|
|
logging._releaseLock()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def dataToSessionFile(data):
|
2011-01-07 19:47:46 +03:00
|
|
|
if not conf.sessionFile or kb.suppressSession:
|
2008-10-16 19:31:02 +04:00
|
|
|
return
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.sessionFP.write(data)
|
|
|
|
conf.sessionFP.flush()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2010-11-08 14:22:47 +03:00
|
|
|
def dataToTrafficFile(data):
|
|
|
|
if not conf.trafficFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
conf.trafficFP.write(data)
|
|
|
|
conf.trafficFP.flush()
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def dataToDumpFile(dumpFile, data):
|
|
|
|
dumpFile.write(data)
|
|
|
|
dumpFile.flush()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def dataToOutFile(data):
|
|
|
|
if not data:
|
|
|
|
return "No data retrieved"
|
|
|
|
|
2010-05-29 03:12:20 +04:00
|
|
|
rFile = filePathToString(conf.rFile)
|
2009-04-22 15:48:07 +04:00
|
|
|
rFilePath = "%s%s%s" % (conf.filePath, os.sep, rFile)
|
2010-05-30 18:53:13 +04:00
|
|
|
rFileFP = codecs.open(rFilePath, "wb")
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-05-30 18:53:13 +04:00
|
|
|
rFileFP.write(data)
|
2009-04-22 15:48:07 +04:00
|
|
|
rFileFP.flush()
|
|
|
|
rFileFP.close()
|
|
|
|
|
|
|
|
return rFilePath
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
def strToHex(inpStr):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2009-04-28 03:05:11 +04:00
|
|
|
@param inpStr: inpStr to be converted into its hexadecimal value.
|
|
|
|
@type inpStr: C{str}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
@return: the hexadecimal converted inpStr.
|
2008-10-15 19:38:22 +04:00
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
hexStr = ""
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
for character in inpStr:
|
2008-10-15 19:38:22 +04:00
|
|
|
if character == "\n":
|
|
|
|
character = " "
|
|
|
|
|
|
|
|
hexChar = "%2x" % ord(character)
|
|
|
|
hexChar = hexChar.replace(" ", "0")
|
|
|
|
hexChar = hexChar.upper()
|
|
|
|
|
|
|
|
hexStr += hexChar
|
|
|
|
|
|
|
|
return hexStr
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2011-06-27 18:14:49 +04:00
|
|
|
def readInput(message, default=None, checkBatch=True):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
@param message: message to display on terminal.
|
|
|
|
@type message: C{str}
|
|
|
|
|
|
|
|
@return: a string read from keyboard as input.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if "\n" in message:
|
2011-06-30 12:42:43 +04:00
|
|
|
message += "%s> " % ("\n" if message.count("\n") > 1 else "")
|
2010-11-05 19:08:42 +03:00
|
|
|
elif message[-1] == ']':
|
|
|
|
message += " "
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-06-21 01:18:12 +04:00
|
|
|
message = "\r%s" % message
|
|
|
|
|
2011-06-27 18:14:49 +04:00
|
|
|
if checkBatch and conf.batch:
|
2011-02-08 13:08:48 +03:00
|
|
|
if isinstance(default, (list, tuple, set)):
|
|
|
|
options = ",".join([getUnicode(opt, UNICODE_ENCODING) for opt in default])
|
2011-04-09 02:29:50 +04:00
|
|
|
elif default:
|
2011-02-08 13:08:48 +03:00
|
|
|
options = getUnicode(default, UNICODE_ENCODING)
|
2011-04-09 02:29:50 +04:00
|
|
|
else:
|
|
|
|
options = unicode()
|
2011-02-08 13:08:48 +03:00
|
|
|
|
2011-06-09 12:38:17 +04:00
|
|
|
infoMsg = "%s%s" % (getUnicode(message), options)
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
debugMsg = "used the default behaviour, running in batch mode"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
data = default
|
|
|
|
else:
|
2011-06-22 18:53:42 +04:00
|
|
|
logging._acquireLock()
|
2011-09-12 18:15:57 +04:00
|
|
|
dataToStdout(message, True)
|
2011-06-22 18:59:49 +04:00
|
|
|
data = raw_input()
|
|
|
|
#data = raw_input(message.encode(sys.stdout.encoding or UNICODE_ENCODING))
|
2011-06-22 18:53:42 +04:00
|
|
|
logging._releaseLock()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-09-26 03:03:45 +04:00
|
|
|
if not data:
|
|
|
|
data = default
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return data
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def randomRange(start=0, stop=1000):
|
|
|
|
"""
|
|
|
|
@param start: starting number.
|
|
|
|
@type start: C{int}
|
|
|
|
|
|
|
|
@param stop: last number.
|
|
|
|
@type stop: C{int}
|
|
|
|
|
|
|
|
@return: a random number within the range.
|
|
|
|
@rtype: C{int}
|
|
|
|
"""
|
|
|
|
|
|
|
|
return int(random.randint(start, stop))
|
|
|
|
|
|
|
|
def randomInt(length=4):
|
|
|
|
"""
|
|
|
|
@param length: length of the random string.
|
|
|
|
@type length: C{int}
|
|
|
|
|
|
|
|
@return: a random string of digits.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2011-10-24 02:27:45 +04:00
|
|
|
return int("".join([random.choice(string.digits if i!=0 else string.digits.replace('0', '')) for i in xrange(0, length)]))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-11 16:26:35 +04:00
|
|
|
def randomStr(length=4, lowercase=False, alphabet=None):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
@param length: length of the random string.
|
|
|
|
@type length: C{int}
|
|
|
|
|
|
|
|
@return: a random string of characters.
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2010-10-11 16:26:35 +04:00
|
|
|
if alphabet:
|
|
|
|
rndStr = "".join([random.choice(alphabet) for _ in xrange(0, length)])
|
|
|
|
elif lowercase:
|
2009-04-22 15:48:07 +04:00
|
|
|
rndStr = "".join([random.choice(string.lowercase) for _ in xrange(0, length)])
|
|
|
|
else:
|
|
|
|
rndStr = "".join([random.choice(string.letters) for _ in xrange(0, length)])
|
|
|
|
|
|
|
|
return rndStr
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
def sanitizeStr(inpStr):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2009-04-28 03:05:11 +04:00
|
|
|
@param inpStr: inpStr to sanitize: cast to str datatype and replace
|
2008-10-15 19:38:22 +04:00
|
|
|
newlines with one space and strip carriage returns.
|
2009-04-28 03:05:11 +04:00
|
|
|
@type inpStr: C{str}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
@return: sanitized inpStr
|
2008-10-15 19:38:22 +04:00
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2010-06-04 21:07:17 +04:00
|
|
|
cleanString = getUnicode(inpStr)
|
2008-10-15 19:38:22 +04:00
|
|
|
cleanString = cleanString.replace("\n", " ").replace("\r", "")
|
|
|
|
|
|
|
|
return cleanString
|
|
|
|
|
|
|
|
def checkFile(filename):
|
|
|
|
"""
|
|
|
|
@param filename: filename to check if it exists.
|
|
|
|
@type filename: C{str}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
raise sqlmapFilePathException, "unable to read file '%s'" % filename
|
2010-03-22 20:38:19 +03:00
|
|
|
|
|
|
|
def replaceNewlineTabs(inpStr, stdout=False):
|
2010-11-12 18:17:12 +03:00
|
|
|
if inpStr is None:
|
|
|
|
return
|
|
|
|
|
2010-03-22 20:38:19 +03:00
|
|
|
if stdout:
|
2010-11-16 18:02:22 +03:00
|
|
|
replacedString = inpStr.replace("\n", " ").replace("\r", " ").replace("\t", " ")
|
2010-03-22 20:38:19 +03:00
|
|
|
else:
|
2010-11-16 18:11:03 +03:00
|
|
|
replacedString = inpStr.replace("\n", DUMP_NEWLINE_MARKER).replace("\r", DUMP_CR_MARKER).replace("\t", DUMP_TAB_MARKER)
|
2010-10-21 13:51:07 +04:00
|
|
|
|
2011-09-26 01:10:45 +04:00
|
|
|
replacedString = replacedString.replace(kb.chars.delimiter, DUMP_DEL_MARKER)
|
2010-10-21 13:51:07 +04:00
|
|
|
|
|
|
|
return replacedString
|
|
|
|
|
|
|
|
def restoreDumpMarkedChars(inpStr, onlyNewlineTab=False):
|
|
|
|
replacedString = inpStr
|
2010-03-22 20:38:19 +03:00
|
|
|
|
2010-10-21 13:51:07 +04:00
|
|
|
if isinstance(replacedString, basestring):
|
2010-11-16 18:11:03 +03:00
|
|
|
replacedString = replacedString.replace(DUMP_NEWLINE_MARKER, "\n").replace(DUMP_CR_MARKER, "\r").replace(DUMP_TAB_MARKER, "\t")
|
2010-10-22 18:23:14 +04:00
|
|
|
|
2010-10-21 13:51:07 +04:00
|
|
|
if not onlyNewlineTab:
|
|
|
|
replacedString = replacedString.replace(DUMP_START_MARKER, "").replace(DUMP_STOP_MARKER, "")
|
|
|
|
replacedString = replacedString.replace(DUMP_DEL_MARKER, ", ")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return replacedString
|
|
|
|
|
|
|
|
def banner():
|
|
|
|
"""
|
|
|
|
This function prints sqlmap banner with its version
|
|
|
|
"""
|
|
|
|
|
2010-10-21 02:09:03 +04:00
|
|
|
ban = """
|
2010-03-03 19:19:17 +03:00
|
|
|
%s - %s
|
2010-10-21 02:09:03 +04:00
|
|
|
%s\n
|
|
|
|
""" % (VERSION_STRING, DESCRIPTION, SITE)
|
|
|
|
|
2011-04-19 15:13:53 +04:00
|
|
|
# Reference: http://www.frexx.de/xterm-256-notes/
|
|
|
|
#if not any([IS_WIN, os.getenv('ANSI_COLORS_DISABLED')]):
|
|
|
|
# ban = "\033[1;34m%s\033[0m" % ban
|
|
|
|
|
2010-10-21 02:09:03 +04:00
|
|
|
dataToStdout(ban, forceOutput=True)
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def parsePasswordHash(password):
|
|
|
|
blank = " " * 8
|
|
|
|
|
|
|
|
if not password or password == " ":
|
|
|
|
password = "NULL"
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MSSQL) and password != "NULL" and isHexEncodedString(password):
|
2008-10-15 19:38:22 +04:00
|
|
|
hexPassword = password
|
2011-01-19 02:05:32 +03:00
|
|
|
password = "%s\n" % hexPassword
|
2008-10-15 19:38:22 +04:00
|
|
|
password += "%sheader: %s\n" % (blank, hexPassword[:6])
|
|
|
|
password += "%ssalt: %s\n" % (blank, hexPassword[6:14])
|
|
|
|
password += "%smixedcase: %s\n" % (blank, hexPassword[14:54])
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
if not Backend.isVersionWithin(("2005", "2008")):
|
2008-10-15 19:38:22 +04:00
|
|
|
password += "%suppercase: %s" % (blank, hexPassword[54:])
|
|
|
|
|
|
|
|
return password
|
2010-01-05 19:15:31 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def cleanQuery(query):
|
2008-12-19 23:09:46 +03:00
|
|
|
upperQuery = query
|
|
|
|
|
|
|
|
for sqlStatements in SQL_STATEMENTS.values():
|
|
|
|
for sqlStatement in sqlStatements:
|
2009-01-28 17:53:11 +03:00
|
|
|
sqlStatementEsc = sqlStatement.replace("(", "\\(")
|
|
|
|
queryMatch = re.search("(%s)" % sqlStatementEsc, query, re.I)
|
|
|
|
|
|
|
|
if queryMatch:
|
|
|
|
upperQuery = upperQuery.replace(queryMatch.group(1), sqlStatement.upper())
|
2008-12-18 23:38:57 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return upperQuery
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def setPaths():
|
|
|
|
# sqlmap paths
|
2011-01-19 02:05:32 +03:00
|
|
|
paths.SQLMAP_EXTRAS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "extra")
|
|
|
|
paths.SQLMAP_SHELL_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "shell")
|
2011-05-19 01:47:40 +04:00
|
|
|
paths.SQLMAP_TAMPER_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "tamper")
|
2011-01-19 02:05:32 +03:00
|
|
|
paths.SQLMAP_TXT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "txt")
|
|
|
|
paths.SQLMAP_UDF_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "udf")
|
|
|
|
paths.SQLMAP_XML_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "xml")
|
2009-12-18 01:04:01 +03:00
|
|
|
paths.SQLMAP_XML_BANNER_PATH = os.path.join(paths.SQLMAP_XML_PATH, "banner")
|
2011-01-19 02:05:32 +03:00
|
|
|
paths.SQLMAP_OUTPUT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "output")
|
|
|
|
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
|
|
|
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
2011-04-25 03:01:21 +04:00
|
|
|
paths.SQLMAP_SEXEC_PATH = os.path.join(paths.SQLMAP_EXTRAS_PATH, "shellcodeexec")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# sqlmap files
|
2011-01-19 02:05:32 +03:00
|
|
|
paths.SQLMAP_HISTORY = os.path.join(paths.SQLMAP_ROOT_PATH, ".sqlmap_history")
|
|
|
|
paths.SQLMAP_CONFIG = os.path.join(paths.SQLMAP_ROOT_PATH, "sqlmap-%s.conf" % randomStr())
|
|
|
|
paths.COMMON_COLUMNS = os.path.join(paths.SQLMAP_TXT_PATH, "common-columns.txt")
|
|
|
|
paths.COMMON_TABLES = os.path.join(paths.SQLMAP_TXT_PATH, "common-tables.txt")
|
|
|
|
paths.COMMON_OUTPUTS = os.path.join(paths.SQLMAP_TXT_PATH, 'common-outputs.txt')
|
|
|
|
paths.SQL_KEYWORDS = os.path.join(paths.SQLMAP_TXT_PATH, "keywords.txt")
|
|
|
|
paths.ORACLE_DEFAULT_PASSWD = os.path.join(paths.SQLMAP_TXT_PATH, "oracle-default-passwords.txt")
|
|
|
|
paths.USER_AGENTS = os.path.join(paths.SQLMAP_TXT_PATH, "user-agents.txt")
|
|
|
|
paths.WORDLIST = os.path.join(paths.SQLMAP_TXT_PATH, "wordlist.txt")
|
|
|
|
paths.PHPIDS_RULES_XML = os.path.join(paths.SQLMAP_XML_PATH, "phpids_rules.xml")
|
|
|
|
paths.ERRORS_XML = os.path.join(paths.SQLMAP_XML_PATH, "errors.xml")
|
|
|
|
paths.PAYLOADS_XML = os.path.join(paths.SQLMAP_XML_PATH, "payloads.xml")
|
|
|
|
paths.INJECTIONS_XML = os.path.join(paths.SQLMAP_XML_PATH, "injections.xml")
|
|
|
|
paths.LIVE_TESTS_XML = os.path.join(paths.SQLMAP_XML_PATH, "livetests.xml")
|
|
|
|
paths.QUERIES_XML = os.path.join(paths.SQLMAP_XML_PATH, "queries.xml")
|
|
|
|
paths.GENERIC_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "generic.xml")
|
|
|
|
paths.MSSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mssql.xml")
|
|
|
|
paths.MYSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "mysql.xml")
|
|
|
|
paths.ORACLE_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "oracle.xml")
|
|
|
|
paths.PGSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "postgresql.xml")
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def weAreFrozen():
|
|
|
|
"""
|
|
|
|
Returns whether we are frozen via py2exe.
|
|
|
|
This will affect how we find out where we are located.
|
|
|
|
Reference: http://www.py2exe.org/index.cgi/WhereAmI
|
|
|
|
"""
|
|
|
|
|
|
|
|
return hasattr(sys, "frozen")
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
def parseTargetDirect():
|
|
|
|
"""
|
|
|
|
Parse target dbms and set some attributes into the configuration singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.direct:
|
|
|
|
return
|
|
|
|
|
|
|
|
details = None
|
2010-04-13 15:00:15 +04:00
|
|
|
remote = False
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
for dbms in SUPPORTED_DBMS:
|
2010-08-30 20:35:28 +04:00
|
|
|
details = re.search("^(?P<dbms>%s)://(?P<credentials>(?P<user>.+?)\:(?P<pass>.*?)\@)?(?P<remote>(?P<hostname>.+?)\:(?P<port>[\d]+)\/)?(?P<db>[\w\d\ \:\.\_\-\/\\\\]+?)$" % dbms, conf.direct, re.I)
|
2010-09-13 17:31:01 +04:00
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if details:
|
2010-03-31 14:50:47 +04:00
|
|
|
conf.dbms = details.group('dbms')
|
|
|
|
|
2010-03-30 15:21:26 +04:00
|
|
|
if details.group('credentials'):
|
2010-03-31 14:50:47 +04:00
|
|
|
conf.dbmsUser = details.group('user')
|
|
|
|
conf.dbmsPass = details.group('pass')
|
2010-03-30 15:06:30 +04:00
|
|
|
else:
|
2010-05-28 17:05:02 +04:00
|
|
|
conf.dbmsUser = unicode()
|
|
|
|
conf.dbmsPass = unicode()
|
2010-03-31 14:50:47 +04:00
|
|
|
|
2010-04-29 17:34:03 +04:00
|
|
|
if not conf.dbmsPass:
|
|
|
|
conf.dbmsPass = None
|
|
|
|
|
2010-03-30 15:21:26 +04:00
|
|
|
if details.group('remote'):
|
2010-04-13 15:00:15 +04:00
|
|
|
remote = True
|
2010-03-30 15:21:26 +04:00
|
|
|
conf.hostname = details.group('hostname')
|
2011-01-19 02:05:32 +03:00
|
|
|
conf.port = int(details.group('port'))
|
2010-03-30 15:21:26 +04:00
|
|
|
else:
|
2010-03-30 15:06:30 +04:00
|
|
|
conf.hostname = "localhost"
|
2011-01-19 02:05:32 +03:00
|
|
|
conf.port = 0
|
2010-03-31 14:50:47 +04:00
|
|
|
|
|
|
|
conf.dbmsDb = details.group('db')
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
conf.parameters[None] = "direct connection"
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
if not details:
|
2010-03-31 14:50:47 +04:00
|
|
|
errMsg = "invalid target details, valid syntax is for instance "
|
|
|
|
errMsg += "'mysql://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_NAME' "
|
|
|
|
errMsg += "or 'access://DATABASE_FILEPATH'"
|
2010-03-27 02:23:25 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
for dbmsName, data in DBMS_DICT.items():
|
2010-03-27 02:23:25 +03:00
|
|
|
if conf.dbms in data[0]:
|
|
|
|
try:
|
2010-11-02 15:08:28 +03:00
|
|
|
if dbmsName in (DBMS.ACCESS, DBMS.SQLITE, DBMS.FIREBIRD):
|
2010-04-13 15:00:15 +04:00
|
|
|
if remote:
|
2010-04-13 15:13:01 +04:00
|
|
|
warnMsg = "direct connection over the network for "
|
|
|
|
warnMsg += "%s DBMS is not supported" % dbmsName
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.hostname = "localhost"
|
2011-01-19 02:05:32 +03:00
|
|
|
conf.port = 0
|
2010-04-13 15:13:01 +04:00
|
|
|
elif not remote:
|
2010-04-13 15:00:15 +04:00
|
|
|
errMsg = "missing remote connection details"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-02-04 18:57:53 +03:00
|
|
|
if dbmsName in (DBMS.MSSQL, DBMS.SYBASE):
|
2010-03-31 14:50:47 +04:00
|
|
|
import _mssql
|
2010-03-27 02:23:25 +03:00
|
|
|
import pymssql
|
2010-03-31 19:31:11 +04:00
|
|
|
|
|
|
|
if not hasattr(pymssql, "__version__") or pymssql.__version__ < "1.0.2":
|
2011-06-13 22:44:02 +04:00
|
|
|
errMsg = "'%s' third-party library must be " % data[1]
|
|
|
|
errMsg += "version >= 1.0.2 to work properly. "
|
|
|
|
errMsg += "Download from %s" % data[2]
|
2010-03-31 19:31:11 +04:00
|
|
|
raise sqlmapMissingDependence, errMsg
|
|
|
|
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.MYSQL:
|
2011-06-22 17:31:07 +04:00
|
|
|
import pymysql
|
2010-12-04 01:28:09 +03:00
|
|
|
elif dbmsName == DBMS.PGSQL:
|
2010-03-27 02:23:25 +03:00
|
|
|
import psycopg2
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.ORACLE:
|
2010-03-28 00:50:19 +03:00
|
|
|
import cx_Oracle
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.SQLITE:
|
2010-03-31 14:50:47 +04:00
|
|
|
import sqlite3
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.ACCESS:
|
2010-03-31 14:50:47 +04:00
|
|
|
import pyodbc
|
2010-11-02 15:08:28 +03:00
|
|
|
elif dbmsName == DBMS.FIREBIRD:
|
2010-03-31 14:50:47 +04:00
|
|
|
import kinterbasdb
|
2010-03-27 02:23:25 +03:00
|
|
|
except ImportError, _:
|
2011-01-19 02:05:32 +03:00
|
|
|
errMsg = "sqlmap requires '%s' third-party library " % data[1]
|
2010-03-27 02:23:25 +03:00
|
|
|
errMsg += "in order to directly connect to the database "
|
2011-06-13 22:44:02 +04:00
|
|
|
errMsg += "%s. Download from %s" % (dbmsName, data[2])
|
2010-03-27 02:23:25 +03:00
|
|
|
raise sqlmapMissingDependence, errMsg
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def parseTargetUrl():
|
|
|
|
"""
|
2010-03-27 02:23:25 +03:00
|
|
|
Parse target url and set some attributes into the configuration singleton.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
if not conf.url:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not re.search("^http[s]*://", conf.url):
|
|
|
|
if ":443/" in conf.url:
|
|
|
|
conf.url = "https://" + conf.url
|
|
|
|
else:
|
|
|
|
conf.url = "http://" + conf.url
|
|
|
|
|
2011-02-04 15:43:18 +03:00
|
|
|
if URI_INJECTION_MARK_CHAR in conf.url:
|
|
|
|
conf.url = conf.url.replace('?', URI_QUESTION_MARKER)
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
__urlSplit = urlparse.urlsplit(conf.url)
|
2008-10-15 19:38:22 +04:00
|
|
|
__hostnamePort = __urlSplit[1].split(":")
|
|
|
|
|
2011-01-24 17:52:50 +03:00
|
|
|
conf.scheme = __urlSplit[0].strip()
|
2011-02-04 14:33:21 +03:00
|
|
|
conf.path = __urlSplit[2].strip()
|
2011-01-24 17:52:50 +03:00
|
|
|
conf.hostname = __hostnamePort[0].strip()
|
|
|
|
|
|
|
|
if re.search(r'\s', conf.hostname):
|
|
|
|
errMsg = "invalid target url"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if len(__hostnamePort) == 2:
|
2010-02-10 15:06:23 +03:00
|
|
|
try:
|
|
|
|
conf.port = int(__hostnamePort[1])
|
|
|
|
except:
|
|
|
|
errMsg = "invalid target url"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
elif conf.scheme == "https":
|
|
|
|
conf.port = 443
|
|
|
|
else:
|
|
|
|
conf.port = 80
|
|
|
|
|
|
|
|
if __urlSplit[3]:
|
2011-01-27 21:36:28 +03:00
|
|
|
conf.parameters[PLACE.GET] = urldecode(__urlSplit[3])
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
conf.url = "%s://%s:%d%s" % (conf.scheme, conf.hostname, conf.port, conf.path)
|
2011-02-04 15:43:18 +03:00
|
|
|
conf.url = conf.url.replace(URI_QUESTION_MARKER, '?')
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2011-04-07 17:57:07 +04:00
|
|
|
if not conf.referer and conf.level >= 3:
|
|
|
|
debugMsg = "setting the HTTP Referer header to the target url"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
conf.httpHeaders = filter(lambda (key, value): key != HTTPHEADER.REFERER, conf.httpHeaders)
|
|
|
|
conf.httpHeaders.append((HTTPHEADER.REFERER, conf.url))
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def expandAsteriskForColumns(expression):
|
|
|
|
# If the user provided an asterisk rather than the column(s)
|
|
|
|
# name, sqlmap will retrieve the columns itself and reprocess
|
|
|
|
# the SQL query string (expression)
|
2008-11-13 01:53:25 +03:00
|
|
|
asterisk = re.search("^SELECT\s+\*\s+FROM\s+([\w\.\_]+)\s*", expression, re.I)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if asterisk:
|
2011-01-19 02:05:32 +03:00
|
|
|
infoMsg = "you did not provide the fields in your query. "
|
2008-10-15 19:38:22 +04:00
|
|
|
infoMsg += "sqlmap will retrieve the column names itself"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2008-11-13 01:53:25 +03:00
|
|
|
dbTbl = asterisk.group(1)
|
|
|
|
|
2011-05-23 23:45:40 +04:00
|
|
|
if dbTbl and ".." in dbTbl:
|
|
|
|
dbTbl = dbTbl.replace('..', '.dbo.')
|
|
|
|
|
2008-11-13 01:53:25 +03:00
|
|
|
if dbTbl and "." in dbTbl:
|
2010-11-05 14:34:09 +03:00
|
|
|
conf.db, conf.tbl = dbTbl.split(".", 1)
|
2008-11-13 01:53:25 +03:00
|
|
|
else:
|
|
|
|
conf.tbl = dbTbl
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
columnsDict = conf.dbmsHandler.getColumns(onlyColNames=True)
|
|
|
|
|
|
|
|
if columnsDict and conf.db in columnsDict and conf.tbl in columnsDict[conf.db]:
|
|
|
|
columns = columnsDict[conf.db][conf.tbl].keys()
|
|
|
|
columns.sort()
|
|
|
|
columnsStr = ", ".join([column for column in columns])
|
|
|
|
expression = expression.replace("*", columnsStr, 1)
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
infoMsg = "the query with column names is: "
|
2008-10-15 19:38:22 +04:00
|
|
|
infoMsg += "%s" % expression
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
return expression
|
2010-01-09 02:50:06 +03:00
|
|
|
|
2008-11-25 14:33:44 +03:00
|
|
|
def getRange(count, dump=False, plusOne=False):
|
2011-01-19 02:05:32 +03:00
|
|
|
count = int(count)
|
2008-10-15 19:38:22 +04:00
|
|
|
indexRange = None
|
|
|
|
limitStart = 1
|
2011-01-19 02:05:32 +03:00
|
|
|
limitStop = count
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if dump:
|
2008-11-02 17:39:38 +03:00
|
|
|
if isinstance(conf.limitStop, int) and conf.limitStop > 0 and conf.limitStop < limitStop:
|
2008-10-15 19:38:22 +04:00
|
|
|
limitStop = conf.limitStop
|
|
|
|
|
2008-11-02 17:39:38 +03:00
|
|
|
if isinstance(conf.limitStart, int) and conf.limitStart > 0 and conf.limitStart <= limitStop:
|
2008-10-15 19:38:22 +04:00
|
|
|
limitStart = conf.limitStart
|
|
|
|
|
2010-01-09 02:50:06 +03:00
|
|
|
if plusOne:
|
2011-09-05 13:28:40 +04:00
|
|
|
indexRange = xrange(limitStart, limitStop + 1)
|
2011-07-25 15:15:18 +04:00
|
|
|
else:
|
2011-09-05 13:28:40 +04:00
|
|
|
indexRange = xrange(limitStart - 1, limitStop)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
return indexRange
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def parseUnionPage(output, expression, partial=False, condition=None, sort=True):
|
2011-02-02 01:05:12 +03:00
|
|
|
if output is None:
|
|
|
|
return None
|
|
|
|
|
2011-07-24 13:19:33 +04:00
|
|
|
data = BigArray()
|
2008-12-10 20:23:07 +03:00
|
|
|
|
2011-09-26 01:10:45 +04:00
|
|
|
outCond1 = ( output.startswith(kb.chars.start) and output.endswith(kb.chars.stop) )
|
2011-07-25 00:44:17 +04:00
|
|
|
outCond2 = ( output.startswith(DUMP_START_MARKER) and output.endswith(DUMP_STOP_MARKER) )
|
|
|
|
|
|
|
|
if outCond1 or outCond2:
|
|
|
|
if outCond1:
|
2011-09-26 01:10:45 +04:00
|
|
|
regExpr = '%s(.*?)%s' % (kb.chars.start, kb.chars.stop)
|
2011-07-25 00:44:17 +04:00
|
|
|
elif outCond2:
|
|
|
|
regExpr = '%s(.*?)%s' % (DUMP_START_MARKER, DUMP_STOP_MARKER)
|
|
|
|
|
|
|
|
output = re.findall(regExpr, output, re.DOTALL | re.IGNORECASE)
|
|
|
|
if condition is None:
|
|
|
|
condition = (
|
|
|
|
kb.resumedQueries and conf.url in kb.resumedQueries.keys()
|
|
|
|
and expression in kb.resumedQueries[conf.url].keys()
|
|
|
|
)
|
|
|
|
|
|
|
|
if partial or not condition:
|
|
|
|
logOutput = "".join(["%s%s%s" % (DUMP_START_MARKER, replaceNewlineTabs(value), DUMP_STOP_MARKER) for value in output])
|
|
|
|
dataToSessionFile("[%s][%s][%s][%s][%s]\n" % (conf.url, kb.injection.place, conf.parameters[kb.injection.place], expression, logOutput))
|
|
|
|
|
|
|
|
if sort:
|
|
|
|
dict_ = {}
|
2011-07-25 00:36:44 +04:00
|
|
|
for entry in output:
|
2011-07-25 00:44:17 +04:00
|
|
|
dict_[entry.lower()] = entry
|
|
|
|
output = dict_.values()
|
2008-12-10 20:23:07 +03:00
|
|
|
|
2011-07-25 00:44:17 +04:00
|
|
|
for entry in output:
|
2011-07-29 14:45:09 +04:00
|
|
|
entry = safecharencode(entry) if kb.safeCharEncode else entry
|
|
|
|
|
2011-07-25 00:44:17 +04:00
|
|
|
if DUMP_DEL_MARKER in entry:
|
|
|
|
entry = entry.split(DUMP_DEL_MARKER)
|
|
|
|
else:
|
2011-09-26 01:10:45 +04:00
|
|
|
entry = entry.split(kb.chars.delimiter)
|
2011-07-25 00:36:44 +04:00
|
|
|
|
2011-07-25 00:44:17 +04:00
|
|
|
if len(entry) == 1:
|
|
|
|
data.append(entry[0])
|
|
|
|
else:
|
2011-07-29 14:54:25 +04:00
|
|
|
data.append(list(entry))
|
2011-07-25 00:44:17 +04:00
|
|
|
else:
|
|
|
|
data = output
|
2008-12-10 20:23:07 +03:00
|
|
|
|
2010-05-25 14:09:35 +04:00
|
|
|
if len(data) == 1 and isinstance(data[0], basestring):
|
2008-12-10 20:23:07 +03:00
|
|
|
data = data[0]
|
|
|
|
|
|
|
|
return data
|
2010-01-05 14:43:16 +03:00
|
|
|
|
|
|
|
def getDelayQuery(andCond=False):
|
2009-04-22 15:48:07 +04:00
|
|
|
query = None
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL):
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.banner:
|
|
|
|
conf.dbmsHandler.getVersionFromBanner()
|
|
|
|
|
2010-11-12 14:33:11 +03:00
|
|
|
banVer = kb.bannerFp["dbmsVersion"] if 'dbmsVersion' in kb.bannerFp else None
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if banVer is None or (Backend.isDbms(DBMS.MYSQL) and banVer >= "5.0.12") or (Backend.isDbms(DBMS.PGSQL) and banVer >= "8.2"):
|
2011-01-28 19:36:09 +03:00
|
|
|
query = queries[Backend.getIdentifiedDbms()].timedelay.query % conf.timeSec
|
2009-04-22 15:48:07 +04:00
|
|
|
else:
|
2011-01-28 19:36:09 +03:00
|
|
|
query = queries[Backend.getIdentifiedDbms()].timedelay.query2 % conf.timeSec
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.FIREBIRD):
|
2011-01-28 19:36:09 +03:00
|
|
|
query = queries[Backend.getIdentifiedDbms()].timedelay.query
|
2010-01-02 05:02:12 +03:00
|
|
|
else:
|
2011-01-28 19:36:09 +03:00
|
|
|
query = queries[Backend.getIdentifiedDbms()].timedelay.query % conf.timeSec
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-03-18 20:20:54 +03:00
|
|
|
if andCond:
|
2011-01-28 19:36:09 +03:00
|
|
|
if Backend.getIdentifiedDbms() in ( DBMS.MYSQL, DBMS.SQLITE ):
|
2010-03-18 20:20:54 +03:00
|
|
|
query = query.replace("SELECT ", "")
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.FIREBIRD):
|
2010-03-18 20:20:54 +03:00
|
|
|
query = "(%s)>0" % query
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return query
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def getLocalIP():
|
2010-11-04 15:51:04 +03:00
|
|
|
retVal = None
|
|
|
|
try:
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
s.connect((conf.hostname, conf.port))
|
|
|
|
retVal, _ = s.getsockname()
|
|
|
|
s.close()
|
|
|
|
except:
|
|
|
|
debugMsg = "there was an error in opening socket "
|
|
|
|
debugMsg += "connection toward '%s'" % conf.hostname
|
|
|
|
logger.debug(debugMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-11-04 15:51:04 +03:00
|
|
|
return retVal
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
def getRemoteIP():
|
|
|
|
return socket.gethostbyname(conf.hostname)
|
|
|
|
|
|
|
|
def getFileType(filePath):
|
2009-04-22 16:44:16 +04:00
|
|
|
try:
|
|
|
|
magicFileType = magic.from_file(filePath)
|
|
|
|
except:
|
|
|
|
return "unknown"
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if "ASCII" in magicFileType or "text" in magicFileType:
|
|
|
|
return "text"
|
|
|
|
else:
|
|
|
|
return "binary"
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def getCharset(charsetType=None):
|
|
|
|
asciiTbl = []
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if charsetType is None:
|
2009-04-22 15:48:07 +04:00
|
|
|
asciiTbl = range(0, 128)
|
|
|
|
|
|
|
|
# 0 or 1
|
|
|
|
elif charsetType == 1:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 50))
|
|
|
|
|
|
|
|
# Digits
|
|
|
|
elif charsetType == 2:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
|
|
|
|
# Hexadecimal
|
|
|
|
elif charsetType == 3:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
asciiTbl.extend(range(64, 71))
|
|
|
|
asciiTbl.extend(range(96, 103))
|
|
|
|
|
|
|
|
# Characters
|
|
|
|
elif charsetType == 4:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(64, 91))
|
|
|
|
asciiTbl.extend(range(96, 123))
|
|
|
|
|
|
|
|
# Characters and digits
|
|
|
|
elif charsetType == 5:
|
|
|
|
asciiTbl.extend([ 0, 1 ])
|
|
|
|
asciiTbl.extend(range(47, 58))
|
|
|
|
asciiTbl.extend(range(64, 91))
|
|
|
|
asciiTbl.extend(range(96, 123))
|
|
|
|
|
|
|
|
return asciiTbl
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2009-05-13 00:24:47 +04:00
|
|
|
def searchEnvPath(fileName):
|
|
|
|
envPaths = os.environ["PATH"]
|
2010-01-02 05:02:12 +03:00
|
|
|
result = None
|
2009-05-13 00:24:47 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2009-05-13 00:24:47 +04:00
|
|
|
envPaths = envPaths.split(";")
|
|
|
|
else:
|
|
|
|
envPaths = envPaths.split(":")
|
|
|
|
|
|
|
|
for envPath in envPaths:
|
|
|
|
envPath = envPath.replace(";", "")
|
2010-01-02 05:02:12 +03:00
|
|
|
result = os.path.exists(os.path.normpath(os.path.join(envPath, fileName)))
|
2009-05-13 00:24:47 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if result:
|
2009-05-13 00:24:47 +04:00
|
|
|
break
|
|
|
|
|
|
|
|
return result
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2010-01-15 14:45:48 +03:00
|
|
|
def urlEncodeCookieValues(cookieStr):
|
2010-01-02 05:02:12 +03:00
|
|
|
if cookieStr:
|
|
|
|
result = ""
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
for part in cookieStr.split(';'):
|
|
|
|
index = part.find('=') + 1
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if index > 0:
|
|
|
|
name = part[:index - 1].strip()
|
2010-01-15 14:44:05 +03:00
|
|
|
value = urlencode(part[index:], convall=True)
|
|
|
|
result += "; %s=%s" % (name, value)
|
2010-01-02 05:02:12 +03:00
|
|
|
elif part.strip().lower() != "secure":
|
2010-01-15 14:44:05 +03:00
|
|
|
result += "%s%s" % ("%3B", urlencode(part, convall=True))
|
2010-01-02 05:02:12 +03:00
|
|
|
else:
|
2010-01-15 14:44:05 +03:00
|
|
|
result += "; secure"
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-15 14:44:05 +03:00
|
|
|
if result.startswith('; '):
|
|
|
|
result = result[2:]
|
2010-01-02 05:02:12 +03:00
|
|
|
elif result.startswith('%3B'):
|
|
|
|
result = result[3:]
|
2010-10-31 15:29:00 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
return result
|
|
|
|
else:
|
|
|
|
return None
|
2010-01-05 14:30:33 +03:00
|
|
|
|
|
|
|
def directoryPath(path):
|
|
|
|
retVal = None
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2010-04-23 20:34:20 +04:00
|
|
|
if isWindowsDriveLetterPath(path):
|
2010-01-05 14:30:33 +03:00
|
|
|
retVal = ntpath.dirname(path)
|
2010-02-21 02:11:05 +03:00
|
|
|
else:
|
|
|
|
retVal = posixpath.dirname(path)
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
return retVal
|
2010-01-15 20:42:46 +03:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
def normalizePath(path):
|
|
|
|
retVal = None
|
2010-04-22 20:35:22 +04:00
|
|
|
|
2010-04-23 20:34:20 +04:00
|
|
|
if isWindowsDriveLetterPath(path):
|
2010-01-05 14:30:33 +03:00
|
|
|
retVal = ntpath.normpath(path)
|
2010-02-21 02:11:05 +03:00
|
|
|
else:
|
|
|
|
retVal = posixpath.normpath(path)
|
2010-02-26 15:00:47 +03:00
|
|
|
|
2010-01-05 14:30:33 +03:00
|
|
|
return retVal
|
2010-01-15 19:06:59 +03:00
|
|
|
|
|
|
|
def safeStringFormat(formatStr, params):
|
2010-05-07 17:40:57 +04:00
|
|
|
retVal = formatStr.replace("%d", "%s")
|
2010-01-15 20:42:46 +03:00
|
|
|
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(params, basestring):
|
2010-01-15 20:42:46 +03:00
|
|
|
retVal = retVal.replace("%s", params)
|
|
|
|
else:
|
|
|
|
count = 0
|
|
|
|
index = 0
|
|
|
|
|
|
|
|
while index != -1:
|
2010-05-07 17:40:57 +04:00
|
|
|
index = retVal.find("%s")
|
2010-01-15 20:42:46 +03:00
|
|
|
|
|
|
|
if index != -1:
|
|
|
|
if count < len(params):
|
2010-06-04 21:07:17 +04:00
|
|
|
retVal = retVal[:index] + getUnicode(params[count]) + retVal[index+2:]
|
2010-01-15 20:42:46 +03:00
|
|
|
else:
|
|
|
|
raise sqlmapNoneDataException, "wrong number of parameters during string formatting"
|
2011-04-30 19:22:33 +04:00
|
|
|
|
2010-01-15 20:42:46 +03:00
|
|
|
count += 1
|
|
|
|
|
2010-01-15 19:06:59 +03:00
|
|
|
return retVal
|
2010-01-24 02:29:34 +03:00
|
|
|
|
2010-03-26 20:18:02 +03:00
|
|
|
def sanitizeAsciiString(subject):
|
2010-05-04 12:43:14 +04:00
|
|
|
if subject:
|
2010-05-14 17:55:25 +04:00
|
|
|
index = None
|
2011-04-30 19:22:33 +04:00
|
|
|
|
2010-05-14 17:55:25 +04:00
|
|
|
for i in xrange(len(subject)):
|
|
|
|
if ord(subject[i]) >= 128:
|
|
|
|
index = i
|
|
|
|
break
|
2011-04-30 19:22:33 +04:00
|
|
|
|
2010-11-12 14:48:25 +03:00
|
|
|
if index is None:
|
2010-05-14 17:55:25 +04:00
|
|
|
return subject
|
|
|
|
else:
|
2010-05-14 18:03:33 +04:00
|
|
|
return subject[:index] + "".join(subject[i] if ord(subject[i]) < 128 else '?' for i in xrange(index, len(subject)))
|
2010-05-14 17:55:25 +04:00
|
|
|
else:
|
|
|
|
return None
|
2010-01-28 19:50:34 +03:00
|
|
|
|
2011-01-01 22:07:40 +03:00
|
|
|
def getFilteredPageContent(page, onlyText=True):
|
2010-10-12 19:49:04 +04:00
|
|
|
retVal = page
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2011-06-09 11:53:31 +04:00
|
|
|
# only if the page's charset has been successfully identified
|
|
|
|
if isinstance(page, unicode):
|
2011-02-04 02:09:08 +03:00
|
|
|
retVal = re.sub(r"(?s)<script.+?</script>|<!--.+?-->|<style.+?</style>%s" % (r"|<[^>]+>|\t|\n|\r" if onlyText else ""), " ", page)
|
2011-04-30 19:22:33 +04:00
|
|
|
|
2010-10-13 00:01:59 +04:00
|
|
|
while retVal.find(" ") != -1:
|
|
|
|
retVal = retVal.replace(" ", " ")
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-11-07 03:12:00 +03:00
|
|
|
retVal = htmlunescape(retVal)
|
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
return retVal
|
|
|
|
|
2011-06-18 19:47:19 +04:00
|
|
|
def getPageWordSet(page):
|
2011-09-11 20:41:07 +04:00
|
|
|
retVal = set()
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2011-06-09 11:53:31 +04:00
|
|
|
# only if the page's charset has been successfully identified
|
|
|
|
if isinstance(page, unicode):
|
2010-10-12 19:49:04 +04:00
|
|
|
page = getFilteredPageContent(page)
|
|
|
|
retVal = set(re.findall(r"\w+", page))
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
return retVal
|
|
|
|
|
|
|
|
def showStaticWords(firstPage, secondPage):
|
|
|
|
infoMsg = "finding static words in longest matching part of dynamic page content"
|
|
|
|
logger.info(infoMsg)
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
firstPage = getFilteredPageContent(firstPage)
|
|
|
|
secondPage = getFilteredPageContent(secondPage)
|
|
|
|
match = SequenceMatcher(None, firstPage, secondPage).find_longest_match(0, len(firstPage), 0, len(secondPage))
|
|
|
|
commonText = firstPage[match[0]:match[0]+match[2]]
|
2011-06-18 19:47:19 +04:00
|
|
|
commonWords = getPageWordSet(commonText)
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
infoMsg = "static words: "
|
2010-10-14 16:38:06 +04:00
|
|
|
|
|
|
|
if commonWords:
|
|
|
|
commonWords = list(commonWords)
|
|
|
|
commonWords.sort(lambda a, b: cmp(a.lower(), b.lower()))
|
|
|
|
|
2010-10-12 19:49:04 +04:00
|
|
|
for word in commonWords:
|
|
|
|
if len(word) > 2:
|
|
|
|
infoMsg += "'%s', " % word
|
2010-10-14 16:38:06 +04:00
|
|
|
|
|
|
|
infoMsg = infoMsg.rstrip(", ")
|
2010-10-12 19:49:04 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
def decloakToNamedTemporaryFile(filepath, name=None):
|
|
|
|
retVal = NamedTemporaryFile()
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-02-03 17:49:28 +03:00
|
|
|
def __del__():
|
|
|
|
try:
|
|
|
|
if hasattr(retVal, 'old_name'):
|
2011-01-15 15:13:45 +03:00
|
|
|
retVal.name = retVal.old_name
|
2010-02-03 17:49:28 +03:00
|
|
|
retVal.close()
|
|
|
|
except OSError:
|
|
|
|
pass
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-02-03 17:49:28 +03:00
|
|
|
retVal.__del__ = __del__
|
2010-01-28 19:50:34 +03:00
|
|
|
retVal.write(decloak(filepath))
|
|
|
|
retVal.seek(0)
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
if name:
|
|
|
|
retVal.old_name = retVal.name
|
|
|
|
retVal.name = name
|
2010-03-21 03:39:44 +03:00
|
|
|
|
2010-01-28 19:50:34 +03:00
|
|
|
return retVal
|
2010-01-29 13:12:09 +03:00
|
|
|
|
|
|
|
def decloakToMkstemp(filepath, **kwargs):
|
2011-04-21 14:52:34 +04:00
|
|
|
handle, name = mkstemp(**kwargs)
|
|
|
|
|
|
|
|
fptr = os.fdopen(handle)
|
|
|
|
fptr.close() # close low level handle (causing problems latter)
|
|
|
|
|
2010-01-29 13:12:09 +03:00
|
|
|
retVal = open(name, 'w+b')
|
2010-05-29 14:10:28 +04:00
|
|
|
|
2010-01-29 13:12:09 +03:00
|
|
|
retVal.write(decloak(filepath))
|
|
|
|
retVal.seek(0)
|
2010-05-29 14:10:28 +04:00
|
|
|
|
2010-01-29 13:12:09 +03:00
|
|
|
return retVal
|
2010-02-04 12:49:31 +03:00
|
|
|
|
|
|
|
def isWindowsPath(filepath):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns True if given filepath is in Windows format
|
|
|
|
"""
|
|
|
|
|
2010-02-26 15:00:47 +03:00
|
|
|
return re.search("\A[\w]\:\\\\", filepath) is not None
|
2010-02-04 17:37:00 +03:00
|
|
|
|
2010-04-22 14:31:33 +04:00
|
|
|
def isWindowsDriveLetterPath(filepath):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns True if given filepath starts with a Windows drive letter
|
|
|
|
"""
|
|
|
|
|
2010-04-22 14:31:33 +04:00
|
|
|
return re.search("\A[\w]\:", filepath) is not None
|
|
|
|
|
2010-02-04 17:37:00 +03:00
|
|
|
def posixToNtSlashes(filepath):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Replaces all occurances of Posix slashes (/) in provided
|
|
|
|
filepath with NT ones (/)
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2010-08-21 01:27:47 +04:00
|
|
|
>>> posixToNtSlashes('C:/Windows')
|
|
|
|
'C:\\\\Windows'
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-02-04 17:37:00 +03:00
|
|
|
return filepath.replace('/', '\\')
|
|
|
|
|
|
|
|
def ntToPosixSlashes(filepath):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Replaces all occurances of NT slashes (\) in provided
|
|
|
|
filepath with Posix ones (/)
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2010-08-21 01:27:47 +04:00
|
|
|
>>> ntToPosixSlashes('C:\\Windows')
|
|
|
|
'C:/Windows'
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-02-04 17:37:00 +03:00
|
|
|
return filepath.replace('\\', '/')
|
2010-03-26 20:18:02 +03:00
|
|
|
|
|
|
|
def isBase64EncodedString(subject):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Checks if the provided string is Base64 encoded
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2010-08-21 01:27:47 +04:00
|
|
|
>>> isBase64EncodedString('dGVzdA==')
|
|
|
|
True
|
|
|
|
>>> isBase64EncodedString('123456')
|
|
|
|
False
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-03-26 20:18:02 +03:00
|
|
|
return re.match(r"\A(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?\Z", subject) is not None
|
2010-09-14 01:01:46 +04:00
|
|
|
|
2010-03-26 20:18:02 +03:00
|
|
|
def isHexEncodedString(subject):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
|
|
|
Checks if the provided string is hex encoded
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2010-08-21 01:27:47 +04:00
|
|
|
>>> isHexEncodedString('DEADBEEF')
|
|
|
|
True
|
|
|
|
>>> isHexEncodedString('test')
|
|
|
|
False
|
|
|
|
"""
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-10-21 02:12:53 +04:00
|
|
|
return re.match(r"\A[0-9a-fA-Fx]+\Z", subject) is not None
|
2010-04-16 17:40:02 +04:00
|
|
|
|
|
|
|
def getConsoleWidth(default=80):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns console width
|
|
|
|
"""
|
|
|
|
|
2010-04-16 17:40:02 +04:00
|
|
|
width = None
|
|
|
|
|
|
|
|
if 'COLUMNS' in os.environ and os.environ['COLUMNS'].isdigit():
|
|
|
|
width = int(os.environ['COLUMNS'])
|
|
|
|
else:
|
2011-01-15 15:13:45 +03:00
|
|
|
output=execute('stty size', shell=True, stdout=PIPE, stderr=PIPE).stdout.read()
|
2010-04-16 17:40:02 +04:00
|
|
|
items = output.split()
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-04-16 17:40:02 +04:00
|
|
|
if len(items) == 2 and items[1].isdigit():
|
|
|
|
width = int(items[1])
|
|
|
|
|
|
|
|
if width is None:
|
|
|
|
try:
|
|
|
|
import curses
|
2010-10-16 19:10:48 +04:00
|
|
|
|
2010-04-16 17:40:02 +04:00
|
|
|
stdscr = curses.initscr()
|
|
|
|
_, width = stdscr.getmaxyx()
|
|
|
|
curses.endwin()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2010-04-16 23:57:00 +04:00
|
|
|
return width if width else default
|
|
|
|
|
2010-11-24 00:00:42 +03:00
|
|
|
def clearConsoleLine(forceOutput=False):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Clears current console line
|
|
|
|
"""
|
|
|
|
|
2010-11-24 00:00:42 +03:00
|
|
|
dataToStdout("\r%s\r" % (" " * (getConsoleWidth() - 1)), forceOutput)
|
2010-11-23 23:54:40 +03:00
|
|
|
|
2010-04-16 23:57:00 +04:00
|
|
|
def parseXmlFile(xmlFile, handler):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Parses XML file by a given handler
|
|
|
|
"""
|
|
|
|
|
2010-11-02 10:32:08 +03:00
|
|
|
stream = StringIO(readCachedFileContent(xmlFile))
|
2010-04-16 23:57:00 +04:00
|
|
|
parse(stream, handler)
|
|
|
|
stream.close()
|
2010-05-13 15:05:35 +04:00
|
|
|
|
2010-11-02 10:32:08 +03:00
|
|
|
def readCachedFileContent(filename, mode='rb'):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Cached reading of file content (avoiding multiple same file reading)
|
|
|
|
"""
|
|
|
|
|
2010-11-02 10:32:08 +03:00
|
|
|
if filename not in kb.cache.content:
|
2010-11-02 12:06:38 +03:00
|
|
|
kb.locks.cacheLock.acquire()
|
2010-11-02 10:32:08 +03:00
|
|
|
|
|
|
|
if filename not in kb.cache.content:
|
|
|
|
checkFile(filename)
|
2011-01-30 14:36:03 +03:00
|
|
|
xfile = codecs.open(filename, mode, UNICODE_ENCODING)
|
2010-11-02 10:32:08 +03:00
|
|
|
content = xfile.read()
|
|
|
|
kb.cache.content[filename] = content
|
|
|
|
xfile.close()
|
|
|
|
|
2010-11-02 12:06:38 +03:00
|
|
|
kb.locks.cacheLock.release()
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2010-11-02 10:32:08 +03:00
|
|
|
return kb.cache.content[filename]
|
|
|
|
|
2010-10-07 02:43:04 +04:00
|
|
|
def readXmlFile(xmlFile):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Reads XML file content and returns it's DOM representation
|
|
|
|
"""
|
|
|
|
|
2010-10-07 02:43:04 +04:00
|
|
|
checkFile(xmlFile)
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2011-01-30 14:36:03 +03:00
|
|
|
xfile = codecs.open(xmlFile, 'r', UNICODE_ENCODING)
|
2010-10-07 02:43:04 +04:00
|
|
|
retVal = minidom.parse(xfile).documentElement
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-10-07 02:43:04 +04:00
|
|
|
xfile.close()
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-10-07 02:43:04 +04:00
|
|
|
return retVal
|
|
|
|
|
2010-12-07 19:39:31 +03:00
|
|
|
def stdev(values):
|
|
|
|
"""
|
|
|
|
Computes standard deviation of a list of numbers.
|
2010-12-08 01:45:38 +03:00
|
|
|
Reference: http://www.goldb.org/corestats.html
|
2010-12-07 19:39:31 +03:00
|
|
|
"""
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-12-08 01:45:38 +03:00
|
|
|
if not values or len(values) < 2:
|
|
|
|
return None
|
|
|
|
|
2010-12-14 15:22:17 +03:00
|
|
|
key = (values[0], values[-1], len(values))
|
2010-12-07 19:39:31 +03:00
|
|
|
|
2010-12-14 15:22:17 +03:00
|
|
|
if key in kb.cache.stdev:
|
|
|
|
return kb.cache.stdev[key]
|
|
|
|
else:
|
|
|
|
summa = 0.0
|
|
|
|
avg = average(values)
|
|
|
|
|
|
|
|
for value in values:
|
2011-05-24 19:07:37 +04:00
|
|
|
value = value or 0
|
2010-12-14 15:22:17 +03:00
|
|
|
summa += pow(value - avg, 2)
|
2010-12-07 19:39:31 +03:00
|
|
|
|
2010-12-14 15:22:17 +03:00
|
|
|
retVal = sqrt(summa/(len(values) - 1))
|
|
|
|
kb.cache.stdev[key] = retVal
|
|
|
|
return retVal
|
2010-12-07 19:39:31 +03:00
|
|
|
|
2010-12-07 19:04:53 +03:00
|
|
|
def average(values):
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
2010-12-07 19:04:53 +03:00
|
|
|
Computes the arithmetic mean of a list of numbers.
|
2010-08-21 01:27:47 +04:00
|
|
|
"""
|
2011-10-22 01:29:24 +04:00
|
|
|
|
2010-12-08 01:45:38 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if values:
|
|
|
|
retVal = sum(values) / len(values)
|
|
|
|
|
|
|
|
return retVal
|
2010-12-07 19:04:53 +03:00
|
|
|
|
|
|
|
def calculateDeltaSeconds(start):
|
|
|
|
"""
|
|
|
|
Returns elapsed time from start till now
|
|
|
|
"""
|
2011-10-22 01:29:24 +04:00
|
|
|
|
2010-12-07 19:04:53 +03:00
|
|
|
return time.time() - start
|
2010-05-21 13:35:36 +04:00
|
|
|
|
2010-05-21 16:19:20 +04:00
|
|
|
def initCommonOutputs():
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Initializes dictionary containing common output values used by "good samaritan" feature
|
|
|
|
"""
|
|
|
|
|
2010-05-21 16:19:20 +04:00
|
|
|
kb.commonOutputs = {}
|
2010-05-21 16:44:09 +04:00
|
|
|
key = None
|
|
|
|
|
2011-01-30 14:36:03 +03:00
|
|
|
cfile = codecs.open(paths.COMMON_OUTPUTS, 'r', UNICODE_ENCODING)
|
2010-05-21 16:44:09 +04:00
|
|
|
|
2010-05-31 15:11:53 +04:00
|
|
|
for line in cfile.readlines(): # xreadlines doesn't return unicode strings when codec.open() is used
|
|
|
|
if line.find('#') != -1:
|
|
|
|
line = line[:line.find('#')]
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-31 15:11:53 +04:00
|
|
|
line = line.strip()
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-21 16:19:20 +04:00
|
|
|
if len(line) > 1:
|
2010-05-31 19:05:29 +04:00
|
|
|
if line.startswith('[') and line.endswith(']'):
|
2010-05-21 16:19:20 +04:00
|
|
|
key = line[1:-1]
|
|
|
|
elif key:
|
2010-05-21 16:44:09 +04:00
|
|
|
if key not in kb.commonOutputs:
|
2010-06-28 17:47:20 +04:00
|
|
|
kb.commonOutputs[key] = set()
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-31 19:05:29 +04:00
|
|
|
if line not in kb.commonOutputs[key]:
|
2010-06-28 17:47:20 +04:00
|
|
|
kb.commonOutputs[key].add(line)
|
2010-05-24 19:46:12 +04:00
|
|
|
|
|
|
|
cfile.close()
|
2010-05-21 16:19:20 +04:00
|
|
|
|
2010-12-26 14:15:02 +03:00
|
|
|
def getFileItems(filename, commentPrefix='#', unicode_=True, lowercase=False, unique=False):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns newline delimited items contained inside file
|
|
|
|
"""
|
|
|
|
|
2010-09-30 16:35:45 +04:00
|
|
|
retVal = []
|
|
|
|
|
|
|
|
checkFile(filename)
|
2010-11-23 16:24:02 +03:00
|
|
|
|
|
|
|
if unicode_:
|
2011-01-30 14:36:03 +03:00
|
|
|
ifile = codecs.open(filename, 'r', UNICODE_ENCODING)
|
2010-11-23 16:24:02 +03:00
|
|
|
else:
|
|
|
|
ifile = open(filename, 'r')
|
2010-09-30 16:35:45 +04:00
|
|
|
|
2010-10-16 19:10:48 +04:00
|
|
|
for line in ifile.readlines(): # xreadlines doesn't return unicode strings when codec.open() is used
|
2010-10-29 13:00:51 +04:00
|
|
|
if commentPrefix:
|
|
|
|
if line.find(commentPrefix) != -1:
|
|
|
|
line = line[:line.find(commentPrefix)]
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2010-09-30 16:35:45 +04:00
|
|
|
line = line.strip()
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2010-11-23 18:31:23 +03:00
|
|
|
if not unicode_:
|
|
|
|
try:
|
2010-11-23 18:33:13 +03:00
|
|
|
line = str.encode(line)
|
2010-11-23 18:31:23 +03:00
|
|
|
except UnicodeDecodeError:
|
|
|
|
continue
|
2010-09-30 16:35:45 +04:00
|
|
|
if line:
|
2010-12-26 14:15:02 +03:00
|
|
|
if lowercase:
|
|
|
|
line = line.lower()
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2010-12-26 14:15:02 +03:00
|
|
|
if unique and line in retVal:
|
|
|
|
continue
|
2011-01-07 19:50:39 +03:00
|
|
|
|
2010-09-30 16:35:45 +04:00
|
|
|
retVal.append(line)
|
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
def goGoodSamaritan(prevValue, originalCharset):
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
Function for retrieving parameters needed for common prediction (good
|
|
|
|
samaritan) feature.
|
|
|
|
|
|
|
|
prevValue: retrieved query output so far (e.g. 'i').
|
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
Returns commonValue if there is a complete single match (in kb.partRun
|
|
|
|
of txt/common-outputs.txt under kb.partRun) regarding parameter
|
|
|
|
prevValue. If there is no single value match, but multiple, commonCharset is
|
2010-05-27 20:45:09 +04:00
|
|
|
returned containing more probable characters (retrieved from matched
|
|
|
|
values in txt/common-outputs.txt) together with the rest of charset as
|
|
|
|
otherCharset.
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-24 19:46:12 +04:00
|
|
|
if kb.commonOutputs is None:
|
2010-05-21 16:19:20 +04:00
|
|
|
initCommonOutputs()
|
2010-05-21 13:35:36 +04:00
|
|
|
|
|
|
|
predictionSet = set()
|
2010-06-21 18:40:12 +04:00
|
|
|
commonValue = None
|
|
|
|
commonPattern = None
|
|
|
|
countCommonValue = 0
|
2010-05-21 16:44:09 +04:00
|
|
|
|
2010-05-31 19:05:29 +04:00
|
|
|
# If the header (e.g. Databases) we are looking for has common
|
|
|
|
# outputs defined
|
2010-06-21 18:40:12 +04:00
|
|
|
if kb.partRun in kb.commonOutputs:
|
|
|
|
commonPartOutputs = kb.commonOutputs[kb.partRun]
|
2010-06-30 15:22:25 +04:00
|
|
|
commonPattern = commonFinderOnly(prevValue, commonPartOutputs)
|
2010-06-17 15:38:32 +04:00
|
|
|
|
|
|
|
# If the longest common prefix is the same as previous value then
|
|
|
|
# do not consider it
|
2010-06-21 18:40:12 +04:00
|
|
|
if commonPattern and commonPattern == prevValue:
|
|
|
|
commonPattern = None
|
2010-06-17 15:38:32 +04:00
|
|
|
|
2010-05-31 19:05:29 +04:00
|
|
|
# For each common output
|
2010-06-17 15:38:32 +04:00
|
|
|
for item in commonPartOutputs:
|
2010-05-27 20:45:09 +04:00
|
|
|
# Check if the common output (item) starts with prevValue
|
2010-05-31 19:05:29 +04:00
|
|
|
# where prevValue is the enumerated character(s) so far
|
2010-05-31 13:41:41 +04:00
|
|
|
if item.startswith(prevValue):
|
2010-06-21 18:40:12 +04:00
|
|
|
commonValue = item
|
|
|
|
countCommonValue += 1
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-31 13:41:41 +04:00
|
|
|
if len(item) > len(prevValue):
|
|
|
|
char = item[len(prevValue)]
|
2010-06-17 15:38:32 +04:00
|
|
|
predictionSet.add(char)
|
2010-05-31 19:05:29 +04:00
|
|
|
|
2010-06-17 15:38:32 +04:00
|
|
|
# Reset single value if there is more than one possible common
|
|
|
|
# output
|
2010-06-21 18:40:12 +04:00
|
|
|
if countCommonValue > 1:
|
|
|
|
commonValue = None
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-27 20:45:09 +04:00
|
|
|
commonCharset = []
|
2010-05-21 16:44:09 +04:00
|
|
|
otherCharset = []
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-27 20:45:09 +04:00
|
|
|
# Split the original charset into common chars (commonCharset)
|
|
|
|
# and other chars (otherCharset)
|
2010-05-25 17:06:23 +04:00
|
|
|
for ordChar in originalCharset:
|
2010-05-21 13:35:36 +04:00
|
|
|
if chr(ordChar) not in predictionSet:
|
2010-05-21 16:44:09 +04:00
|
|
|
otherCharset.append(ordChar)
|
2010-05-21 13:35:36 +04:00
|
|
|
else:
|
2010-05-27 20:45:09 +04:00
|
|
|
commonCharset.append(ordChar)
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-05-27 20:45:09 +04:00
|
|
|
commonCharset.sort()
|
2010-05-24 19:46:12 +04:00
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
return commonValue, commonPattern, commonCharset, originalCharset
|
2010-05-21 13:35:36 +04:00
|
|
|
else:
|
2010-06-17 15:38:32 +04:00
|
|
|
return None, None, None, originalCharset
|
2010-05-21 18:42:59 +04:00
|
|
|
|
2010-11-24 17:20:43 +03:00
|
|
|
def getCompiledRegex(regex, flags=0):
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
Returns compiled regular expression and stores it in cache for further
|
|
|
|
usage
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2010-08-21 01:27:47 +04:00
|
|
|
>>> getCompiledRegex('test') # doctest: +ELLIPSIS
|
|
|
|
<_sre.SRE_Pattern object at...
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-31 19:05:29 +04:00
|
|
|
|
2010-11-24 17:20:43 +03:00
|
|
|
if (regex, flags) in kb.cache.regex:
|
|
|
|
return kb.cache.regex[(regex, flags)]
|
2010-05-21 18:42:59 +04:00
|
|
|
else:
|
2010-11-24 17:20:43 +03:00
|
|
|
retVal = re.compile(regex, flags)
|
|
|
|
kb.cache.regex[(regex, flags)] = retVal
|
2010-05-21 18:42:59 +04:00
|
|
|
return retVal
|
2010-05-26 13:48:20 +04:00
|
|
|
|
|
|
|
def getPartRun():
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-11-12 13:02:02 +03:00
|
|
|
Goes through call stack and finds constructs matching conf.dbmsHandler.*.
|
2010-05-27 20:45:09 +04:00
|
|
|
Returns it or its alias used in txt/common-outputs.txt
|
2010-05-26 15:14:22 +04:00
|
|
|
"""
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-05-26 13:48:20 +04:00
|
|
|
retVal = None
|
2010-05-27 20:45:09 +04:00
|
|
|
commonPartsDict = optDict["Enumeration"]
|
2011-07-27 12:25:51 +04:00
|
|
|
|
|
|
|
try:
|
|
|
|
stack = [item[4][0] if isinstance(item[4], list) else '' for item in inspect.stack()]
|
|
|
|
|
|
|
|
# Goes backwards through the stack to find the conf.dbmsHandler method
|
|
|
|
# calling this function
|
|
|
|
for i in xrange(0, len(stack)-1):
|
|
|
|
for regex in (getCompiledRegex('self\.(get[^(]+)\(\)'), getCompiledRegex('conf\.dbmsHandler\.([^(]+)\(\)')):
|
|
|
|
match = regex.search(stack[i])
|
|
|
|
|
|
|
|
if match:
|
|
|
|
# This is the calling conf.dbmsHandler or self method
|
|
|
|
# (e.g. 'getDbms')
|
|
|
|
retVal = match.groups()[0]
|
|
|
|
break
|
|
|
|
|
|
|
|
if retVal is not None:
|
2010-06-21 18:40:12 +04:00
|
|
|
break
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2011-07-27 12:25:51 +04:00
|
|
|
# Reference: http://coding.derkeiler.com/Archive/Python/comp.lang.python/2004-06/2267.html
|
|
|
|
except TypeError:
|
|
|
|
pass
|
2010-05-27 20:45:09 +04:00
|
|
|
|
2010-06-21 18:40:12 +04:00
|
|
|
# Return the INI tag to consider for common outputs (e.g. 'Databases')
|
2011-07-27 12:25:51 +04:00
|
|
|
return commonPartsDict[retVal][1] if isinstance(commonPartsDict.get(retVal), tuple) else retVal
|
2010-05-28 13:13:50 +04:00
|
|
|
|
2011-03-18 19:26:39 +03:00
|
|
|
def getUnicode(value, encoding=None, system=False):
|
2010-08-21 01:01:51 +04:00
|
|
|
"""
|
|
|
|
Return the unicode representation of the supplied value:
|
|
|
|
|
|
|
|
>>> getUnicode(u'test')
|
|
|
|
u'test'
|
|
|
|
>>> getUnicode('test')
|
|
|
|
u'test'
|
|
|
|
>>> getUnicode(1)
|
|
|
|
u'1'
|
|
|
|
"""
|
2010-10-15 14:28:06 +04:00
|
|
|
|
2011-03-18 19:26:39 +03:00
|
|
|
if not system:
|
|
|
|
if isinstance(value, unicode):
|
|
|
|
return value
|
|
|
|
elif isinstance(value, basestring):
|
2011-05-16 01:43:38 +04:00
|
|
|
return unicode(value, encoding or UNICODE_ENCODING, errors="replace")
|
2011-03-18 19:26:39 +03:00
|
|
|
else:
|
|
|
|
return unicode(value) # encoding ignored for non-basestring instances
|
2010-06-02 16:31:36 +04:00
|
|
|
else:
|
2011-03-18 19:26:39 +03:00
|
|
|
try:
|
|
|
|
return getUnicode(value, sys.getfilesystemencoding() or sys.stdin.encoding)
|
|
|
|
except:
|
|
|
|
return getUnicode(value, UNICODE_ENCODING)
|
2010-06-02 16:31:36 +04:00
|
|
|
|
2010-06-30 15:22:25 +04:00
|
|
|
def longestCommonPrefix(*sequences):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns longest common prefix occuring in given sequences
|
|
|
|
"""
|
|
|
|
# Reference: http://boredzo.org/blog/archives/2007-01-06/longest-common-prefix-in-python-2
|
|
|
|
|
2010-06-17 15:38:32 +04:00
|
|
|
if len(sequences) == 1:
|
|
|
|
return sequences[0]
|
|
|
|
|
|
|
|
sequences = [pair[1] for pair in sorted((len(fi), fi) for fi in sequences)]
|
|
|
|
|
|
|
|
if not sequences:
|
|
|
|
return None
|
|
|
|
|
|
|
|
for i, comparison_ch in enumerate(sequences[0]):
|
|
|
|
for fi in sequences[1:]:
|
|
|
|
ch = fi[i]
|
|
|
|
|
|
|
|
if ch != comparison_ch:
|
|
|
|
return fi[:i]
|
|
|
|
|
|
|
|
return sequences[0]
|
|
|
|
|
2010-06-30 15:22:25 +04:00
|
|
|
def commonFinderOnly(initial, sequence):
|
|
|
|
return longestCommonPrefix(*filter(lambda x: x.startswith(initial), sequence))
|
2010-09-25 01:59:03 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
def pushValue(value):
|
|
|
|
"""
|
|
|
|
Push value to the stack (thread dependent)
|
|
|
|
"""
|
2010-12-21 13:31:56 +03:00
|
|
|
|
2011-07-08 10:02:31 +04:00
|
|
|
getCurrentThreadData().valueStack.append(copy.deepcopy(value))
|
2010-09-30 16:35:45 +04:00
|
|
|
|
|
|
|
def popValue():
|
2010-10-25 18:06:56 +04:00
|
|
|
"""
|
2010-12-21 01:45:01 +03:00
|
|
|
Pop value from the stack (thread dependent)
|
2010-10-25 18:06:56 +04:00
|
|
|
"""
|
2010-10-26 10:08:40 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
return getCurrentThreadData().valueStack.pop()
|
2010-10-25 18:06:56 +04:00
|
|
|
|
2010-11-16 13:42:42 +03:00
|
|
|
def wasLastRequestDBMSError():
|
2010-10-25 18:06:56 +04:00
|
|
|
"""
|
|
|
|
Returns True if the last web request resulted in a (recognized) DBMS error page
|
|
|
|
"""
|
2010-10-26 10:08:40 +04:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
return threadData.lastErrorPage and threadData.lastErrorPage[0] == threadData.lastRequestUID
|
2010-12-08 17:26:40 +03:00
|
|
|
|
2010-12-26 16:20:52 +03:00
|
|
|
def wasLastRequestHTTPError():
|
|
|
|
"""
|
|
|
|
Returns True if the last web request resulted in an errornous HTTP code (like 500)
|
|
|
|
"""
|
|
|
|
|
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
return threadData.lastHTTPError and threadData.lastHTTPError[0] == threadData.lastRequestUID
|
|
|
|
|
2010-12-08 17:26:40 +03:00
|
|
|
def wasLastRequestDelayed():
|
|
|
|
"""
|
|
|
|
Returns True if the last web request resulted in a time-delay
|
|
|
|
"""
|
|
|
|
|
2011-01-19 02:05:32 +03:00
|
|
|
# 99.9999999997440% of all non time-based sql injection affected
|
|
|
|
# response times should be inside +-7*stdev([normal response times])
|
|
|
|
# Math reference: http://www.answers.com/topic/standard-deviation
|
2011-10-22 01:29:24 +04:00
|
|
|
|
2010-12-08 17:46:07 +03:00
|
|
|
deviation = stdev(kb.responseTimes)
|
2010-12-21 01:45:01 +03:00
|
|
|
threadData = getCurrentThreadData()
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2010-12-08 17:46:07 +03:00
|
|
|
if deviation:
|
|
|
|
if len(kb.responseTimes) < MIN_TIME_RESPONSES:
|
2010-12-09 03:26:06 +03:00
|
|
|
warnMsg = "time-based standard deviation method used on a model "
|
|
|
|
warnMsg += "with less than %d response times" % MIN_TIME_RESPONSES
|
2010-12-08 17:46:07 +03:00
|
|
|
logger.warn(warnMsg)
|
2010-12-09 03:26:06 +03:00
|
|
|
|
2011-01-16 20:52:42 +03:00
|
|
|
lowerStdLimit = average(kb.responseTimes) + TIME_STDEV_COEFF * deviation
|
|
|
|
retVal = (threadData.lastQueryDuration >= lowerStdLimit)
|
2011-01-16 15:04:32 +03:00
|
|
|
|
2011-04-15 12:52:53 +04:00
|
|
|
if not kb.testMode and retVal and kb.adjustTimeDelay:
|
2011-01-16 20:52:42 +03:00
|
|
|
adjustTimeDelay(threadData.lastQueryDuration, lowerStdLimit)
|
2011-01-16 15:04:32 +03:00
|
|
|
|
|
|
|
return retVal
|
2010-12-08 17:46:07 +03:00
|
|
|
else:
|
2011-02-07 15:32:08 +03:00
|
|
|
return (threadData.lastQueryDuration - conf.timeSec) >= 0
|
2010-10-25 23:16:42 +04:00
|
|
|
|
2011-01-16 20:52:42 +03:00
|
|
|
def adjustTimeDelay(lastQueryDuration, lowerStdLimit):
|
|
|
|
"""
|
2011-01-19 02:05:32 +03:00
|
|
|
Adjusts time delay in time-based data retrieval
|
2011-01-16 20:52:42 +03:00
|
|
|
"""
|
|
|
|
|
2011-01-16 23:55:07 +03:00
|
|
|
candidate = 1 + int(round((1 - (lastQueryDuration - lowerStdLimit) / lastQueryDuration) * conf.timeSec))
|
2011-01-16 20:52:42 +03:00
|
|
|
|
|
|
|
if candidate:
|
|
|
|
kb.delayCandidates = [candidate] + kb.delayCandidates[:-1]
|
2011-01-19 02:05:32 +03:00
|
|
|
|
2011-01-16 20:52:42 +03:00
|
|
|
if all([x == candidate for x in kb.delayCandidates]) and candidate < conf.timeSec:
|
2011-01-17 01:35:54 +03:00
|
|
|
print
|
2011-01-19 02:05:32 +03:00
|
|
|
|
2011-08-12 17:06:40 +04:00
|
|
|
msg = "do you want to adjust the time delay to %d second%s " % (candidate, 's' if candidate > 1 else '')
|
|
|
|
msg += "(due to good response times)? [Y/n] "
|
|
|
|
inp = readInput(msg, default="Y")
|
2011-01-19 02:05:32 +03:00
|
|
|
|
2011-08-12 17:06:40 +04:00
|
|
|
if inp and inp[0].lower() == "y":
|
|
|
|
conf.timeSec = candidate
|
2011-01-16 20:52:42 +03:00
|
|
|
|
2010-11-16 13:42:42 +03:00
|
|
|
def extractErrorMessage(page):
|
|
|
|
"""
|
|
|
|
Returns reported error message from page if it founds one
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = None
|
|
|
|
|
2010-11-16 17:41:46 +03:00
|
|
|
if isinstance(page, basestring):
|
2010-12-25 13:16:20 +03:00
|
|
|
for regex in ERROR_PARSING_REGEXES:
|
2010-11-16 17:41:46 +03:00
|
|
|
match = re.search(regex, page, re.DOTALL | re.IGNORECASE)
|
2010-11-24 15:03:01 +03:00
|
|
|
|
2010-11-16 17:41:46 +03:00
|
|
|
if match:
|
|
|
|
retVal = htmlunescape(match.group("result")).replace("<br>", "\n").strip()
|
|
|
|
break
|
2010-11-16 13:42:42 +03:00
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
2010-10-25 23:16:42 +04:00
|
|
|
def beep():
|
|
|
|
"""
|
|
|
|
Does an audible beep sound
|
|
|
|
Reference: http://de3.aminet.net/dev/src/clr.py.txt
|
|
|
|
"""
|
2010-10-26 02:54:56 +04:00
|
|
|
|
2011-04-13 22:32:47 +04:00
|
|
|
def _failsafe():
|
|
|
|
dataToStdout('\a', True)
|
|
|
|
|
2010-10-26 02:54:56 +04:00
|
|
|
if sys.platform == 'linux2':
|
2010-11-09 04:23:54 +03:00
|
|
|
for dev in ('/dev/audio', '/dev/oss', '/dev/dsp', '/dev/sound'):
|
2010-10-26 12:32:58 +04:00
|
|
|
if os.path.exists(dev):
|
|
|
|
try:
|
|
|
|
audio = file(dev, 'wb')
|
2010-10-26 02:54:56 +04:00
|
|
|
|
2011-01-15 16:15:10 +03:00
|
|
|
for _ in xrange(250):
|
2010-10-26 12:32:58 +04:00
|
|
|
audio.write(chr(32) * 4)
|
|
|
|
audio.write(chr(0) * 4)
|
2010-10-26 02:54:56 +04:00
|
|
|
|
2010-10-26 12:32:58 +04:00
|
|
|
audio.close()
|
|
|
|
return
|
|
|
|
except:
|
|
|
|
pass
|
2010-10-26 10:30:27 +04:00
|
|
|
|
2010-10-26 12:32:58 +04:00
|
|
|
try:
|
|
|
|
import curses
|
|
|
|
curses.initscr()
|
|
|
|
curses.beep()
|
|
|
|
curses.flash()
|
|
|
|
curses.endwin()
|
|
|
|
return
|
2010-10-25 23:16:42 +04:00
|
|
|
except:
|
2011-04-13 22:32:47 +04:00
|
|
|
_failsafe()
|
|
|
|
|
|
|
|
elif sys.platform == 'darwin':
|
|
|
|
try:
|
|
|
|
import Carbon.Snd
|
|
|
|
Carbon.Snd.SysBeep(1)
|
|
|
|
except:
|
|
|
|
_failsafe()
|
2010-10-26 12:32:58 +04:00
|
|
|
|
2010-10-25 23:16:42 +04:00
|
|
|
else:
|
2011-04-13 22:32:47 +04:00
|
|
|
_failsafe()
|
2010-10-28 00:39:50 +04:00
|
|
|
|
|
|
|
def runningAsAdmin():
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Returns True if the current process is run under admin privileges
|
|
|
|
"""
|
|
|
|
|
2010-10-28 00:39:50 +04:00
|
|
|
isAdmin = False
|
|
|
|
|
|
|
|
if PLATFORM in ( "posix", "mac" ):
|
|
|
|
isAdmin = os.geteuid()
|
|
|
|
|
|
|
|
if isinstance(isAdmin, (int, float, long)) and isAdmin == 0:
|
|
|
|
isAdmin = True
|
|
|
|
elif IS_WIN:
|
|
|
|
isAdmin = ctypes.windll.shell32.IsUserAnAdmin()
|
|
|
|
|
|
|
|
if isinstance(isAdmin, (int, float, long)) and isAdmin == 1:
|
|
|
|
isAdmin = True
|
|
|
|
else:
|
2011-01-19 02:05:32 +03:00
|
|
|
errMsg = "sqlmap is not able to check if you are running it "
|
2010-10-29 20:11:50 +04:00
|
|
|
errMsg += "as an administrator account on this platform. "
|
2010-10-28 00:39:50 +04:00
|
|
|
errMsg += "sqlmap will assume that you are an administrator "
|
|
|
|
errMsg += "which is mandatory for the requested takeover attack "
|
|
|
|
errMsg += "to work properly"
|
|
|
|
logger.error(errMsg)
|
|
|
|
|
|
|
|
isAdmin = True
|
|
|
|
|
|
|
|
return isAdmin
|
2010-11-08 14:22:47 +03:00
|
|
|
|
|
|
|
def logHTTPTraffic(requestLogMsg, responseLogMsg):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Logs HTTP traffic to the output file
|
|
|
|
"""
|
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
kb.locks.logLock.acquire()
|
2010-11-08 15:36:48 +03:00
|
|
|
|
2011-05-19 19:57:29 +04:00
|
|
|
dataToTrafficFile("%s%s" % (requestLogMsg, os.linesep))
|
|
|
|
dataToTrafficFile("%s%s" % (responseLogMsg, os.linesep))
|
|
|
|
dataToTrafficFile("%s%s%s%s" % (os.linesep, 76 * '#', os.linesep, os.linesep))
|
2010-11-08 15:36:48 +03:00
|
|
|
|
2010-12-21 01:45:01 +03:00
|
|
|
kb.locks.logLock.release()
|
2010-11-23 16:58:01 +03:00
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
def getPageTemplate(payload, place):
|
2011-01-19 02:05:32 +03:00
|
|
|
"""
|
|
|
|
Cross-linked method
|
|
|
|
"""
|
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
pass
|
|
|
|
|
2010-12-15 14:21:47 +03:00
|
|
|
def getPublicTypeMembers(type_, onlyValues=False):
|
2010-11-23 17:50:47 +03:00
|
|
|
"""
|
|
|
|
Useful for getting members from types (e.g. in enums)
|
|
|
|
"""
|
2011-01-08 12:30:10 +03:00
|
|
|
|
2011-01-15 15:13:45 +03:00
|
|
|
for name, value in inspect.getmembers(type_):
|
2010-11-23 16:58:01 +03:00
|
|
|
if not name.startswith('__'):
|
2010-12-15 14:21:47 +03:00
|
|
|
if not onlyValues:
|
2011-05-17 00:14:10 +04:00
|
|
|
yield (name, value)
|
2010-12-15 14:21:47 +03:00
|
|
|
else:
|
2011-05-17 00:14:10 +04:00
|
|
|
yield value
|
2010-11-24 14:38:27 +03:00
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
def enumValueToNameLookup(type_, value_):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Returns name of a enum member with a given value
|
|
|
|
"""
|
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
for name, value in getPublicTypeMembers(type_):
|
|
|
|
if value == value_:
|
|
|
|
retVal = name
|
|
|
|
break
|
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
2010-11-24 17:20:43 +03:00
|
|
|
def extractRegexResult(regex, content, flags=0):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Returns 'result' group value from a possible match with regex on a given
|
|
|
|
content
|
|
|
|
"""
|
|
|
|
|
2010-11-24 14:38:27 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if regex and content and '?P<result>' in regex:
|
2011-03-31 21:16:26 +04:00
|
|
|
match = getCompiledRegex(regex, flags).search(content)
|
2010-12-07 15:32:58 +03:00
|
|
|
|
2010-11-24 14:38:27 +03:00
|
|
|
if match:
|
|
|
|
retVal = match.group("result")
|
|
|
|
|
|
|
|
return retVal
|
2010-11-29 18:14:49 +03:00
|
|
|
|
|
|
|
def trimAlphaNum(value):
|
|
|
|
"""
|
|
|
|
Trims alpha numeric characters from start and ending of a given value
|
|
|
|
"""
|
2011-01-08 12:30:10 +03:00
|
|
|
|
2010-11-29 18:14:49 +03:00
|
|
|
while value and value[-1].isalnum():
|
|
|
|
value = value[:-1]
|
|
|
|
|
|
|
|
while value and value[0].isalnum():
|
|
|
|
value = value[1:]
|
|
|
|
|
|
|
|
return value
|
2010-12-02 21:57:43 +03:00
|
|
|
|
|
|
|
def isNumPosStrValue(value):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Returns True if value is a string with a positive integer representation
|
|
|
|
"""
|
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
return value and isinstance(value, basestring) and value.isdigit() and value != "0"
|
2010-12-04 01:44:29 +03:00
|
|
|
|
2011-07-24 13:19:33 +04:00
|
|
|
@cachedmethod
|
2011-01-14 12:49:14 +03:00
|
|
|
def aliasToDbmsEnum(dbms):
|
2011-01-08 12:30:10 +03:00
|
|
|
"""
|
|
|
|
Returns major DBMS name from a given alias
|
|
|
|
"""
|
|
|
|
|
2010-12-04 01:44:29 +03:00
|
|
|
retVal = None
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2011-01-14 12:49:14 +03:00
|
|
|
if dbms is None:
|
|
|
|
return None
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
for key, item in DBMS_DICT.items():
|
2011-01-14 12:49:14 +03:00
|
|
|
if dbms.lower() in item[0]:
|
2010-12-04 01:44:29 +03:00
|
|
|
retVal = key
|
|
|
|
break
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2010-12-04 01:44:29 +03:00
|
|
|
return retVal
|
|
|
|
|
2010-12-29 22:39:32 +03:00
|
|
|
def findDynamicContent(firstPage, secondPage):
|
|
|
|
"""
|
|
|
|
This function checks if the provided pages have dynamic content. If they
|
2011-01-08 12:30:10 +03:00
|
|
|
are dynamic, proper markings will be made
|
2010-12-29 22:39:32 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
infoMsg = "searching for dynamic content"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
blocks = SequenceMatcher(None, firstPage, secondPage).get_matching_blocks()
|
|
|
|
kb.dynamicMarkings = []
|
|
|
|
|
|
|
|
# Removing too small matching blocks
|
|
|
|
i = 0
|
|
|
|
while i < len(blocks):
|
|
|
|
block = blocks[i]
|
|
|
|
(_, _, length) = block
|
|
|
|
|
|
|
|
if length <= DYNAMICITY_MARK_LENGTH:
|
|
|
|
blocks.remove(block)
|
|
|
|
|
|
|
|
else:
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
# Making of dynamic markings based on prefix/suffix principle
|
|
|
|
if len(blocks) > 0:
|
|
|
|
blocks.insert(0, None)
|
|
|
|
blocks.append(None)
|
|
|
|
|
|
|
|
for i in xrange(len(blocks) - 1):
|
|
|
|
prefix = firstPage[blocks[i][0]:blocks[i][0] + blocks[i][2]] if blocks[i] else None
|
|
|
|
suffix = firstPage[blocks[i + 1][0]:blocks[i + 1][0] + blocks[i + 1][2]] if blocks[i + 1] else None
|
|
|
|
|
|
|
|
if prefix is None and blocks[i + 1][0] == 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if suffix is None and (blocks[i][0] + blocks[i][2] >= len(firstPage)):
|
|
|
|
continue
|
|
|
|
|
|
|
|
prefix = trimAlphaNum(prefix)
|
|
|
|
suffix = trimAlphaNum(suffix)
|
|
|
|
|
|
|
|
kb.dynamicMarkings.append((re.escape(prefix[-DYNAMICITY_MARK_LENGTH/2:]) if prefix else None, re.escape(suffix[:DYNAMICITY_MARK_LENGTH/2]) if suffix else None))
|
|
|
|
|
|
|
|
if len(kb.dynamicMarkings) > 0:
|
|
|
|
infoMsg = "dynamic content marked for removal (%d region%s)" % (len(kb.dynamicMarkings), 's' if len(kb.dynamicMarkings) > 1 else '')
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-12-04 13:13:18 +03:00
|
|
|
def removeDynamicContent(page):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Removing dynamic content from supplied page basing removal on
|
|
|
|
precalculated dynamic markings
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-04 13:13:18 +03:00
|
|
|
if page:
|
|
|
|
for item in kb.dynamicMarkings:
|
|
|
|
prefix, suffix = item
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-26 16:27:24 +03:00
|
|
|
if prefix is None and suffix is None:
|
|
|
|
continue
|
|
|
|
elif prefix is None:
|
2010-12-04 17:43:35 +03:00
|
|
|
page = getCompiledRegex('(?s)^.+%s' % suffix).sub(suffix, page)
|
2010-12-04 13:13:18 +03:00
|
|
|
elif suffix is None:
|
2010-12-04 17:43:35 +03:00
|
|
|
page = getCompiledRegex('(?s)%s.+$' % prefix).sub(prefix, page)
|
2010-12-04 13:13:18 +03:00
|
|
|
else:
|
2010-12-04 17:43:35 +03:00
|
|
|
page = getCompiledRegex('(?s)%s.+%s' % (prefix, suffix)).sub('%s%s' % (prefix, suffix), page)
|
2010-12-04 13:13:18 +03:00
|
|
|
|
|
|
|
return page
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2011-01-05 00:56:37 +03:00
|
|
|
def filterStringValue(value, regex, replace=None):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns string value consisting only of chars satisfying supplied
|
|
|
|
regular expression
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-21 03:47:07 +03:00
|
|
|
retVal = ""
|
2010-12-21 13:31:56 +03:00
|
|
|
|
2010-12-21 03:47:07 +03:00
|
|
|
if value:
|
|
|
|
for char in value:
|
|
|
|
if re.search(regex, char):
|
|
|
|
retVal += char
|
2011-01-05 00:56:37 +03:00
|
|
|
elif replace:
|
|
|
|
retVal += replace
|
2010-12-21 03:47:07 +03:00
|
|
|
|
|
|
|
return retVal
|
2010-12-10 13:54:17 +03:00
|
|
|
|
2011-01-05 13:25:07 +03:00
|
|
|
def filterControlChars(value):
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns string value with control chars being supstituted with ' '
|
2011-01-05 13:25:07 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-04-14 13:43:36 +04:00
|
|
|
return filterStringValue(value, PRINTABLE_CHAR_REGEX, ' ')
|
2011-01-05 13:25:07 +03:00
|
|
|
|
2010-12-10 13:54:17 +03:00
|
|
|
def isDBMSVersionAtLeast(version):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Checks if the recognized DBMS version is at least the version
|
|
|
|
specified
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-10 13:54:17 +03:00
|
|
|
retVal = None
|
2010-12-11 14:17:24 +03:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
if Backend.getVersion() and Backend.getVersion() != UNKNOWN_DBMS_VERSION:
|
|
|
|
value = Backend.getVersion().replace(" ", "").rstrip('.')
|
2010-12-21 03:47:07 +03:00
|
|
|
|
|
|
|
while True:
|
|
|
|
index = value.find('.', value.find('.') + 1)
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-21 03:47:07 +03:00
|
|
|
if index > -1:
|
|
|
|
value = value[0:index] + value[index + 1:]
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
value = filterStringValue(value, '[0-9.><=]')
|
2010-12-14 00:55:30 +03:00
|
|
|
|
2010-12-11 14:17:24 +03:00
|
|
|
if isinstance(value, basestring):
|
|
|
|
if value.startswith(">="):
|
|
|
|
value = float(value.replace(">=", ""))
|
|
|
|
elif value.startswith(">"):
|
|
|
|
value = float(value.replace(">", "")) + 0.01
|
|
|
|
elif value.startswith("<="):
|
|
|
|
value = float(value.replace("<=", ""))
|
|
|
|
elif value.startswith(">"):
|
|
|
|
value = float(value.replace("<", "")) - 0.01
|
2010-12-12 17:38:07 +03:00
|
|
|
|
|
|
|
retVal = getUnicode(value) >= getUnicode(version)
|
2010-12-10 13:54:17 +03:00
|
|
|
|
|
|
|
return retVal
|
2010-12-12 01:00:16 +03:00
|
|
|
|
|
|
|
def parseSqliteTableSchema(value):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Parses table column names and types from specified SQLite table schema
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-12 01:13:19 +03:00
|
|
|
if value:
|
|
|
|
table = {}
|
|
|
|
columns = {}
|
2010-12-12 01:00:16 +03:00
|
|
|
|
2010-12-12 01:13:19 +03:00
|
|
|
for match in re.finditer(getCompiledRegex(r"(\w+) ([A-Z]+)[,\r\n]"), value):
|
|
|
|
columns[match.group(1)] = match.group(2)
|
2010-12-12 01:00:16 +03:00
|
|
|
|
2010-12-12 01:13:19 +03:00
|
|
|
table[conf.tbl] = columns
|
|
|
|
kb.data.cachedColumns[conf.db] = table
|
2010-12-15 14:21:47 +03:00
|
|
|
|
|
|
|
def getTechniqueData(technique=None):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
|
|
|
Returns injection data for technique specified
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-15 14:21:47 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if technique and technique in kb.injection.data:
|
|
|
|
retVal = kb.injection.data[technique]
|
|
|
|
|
|
|
|
return retVal
|
2010-12-15 14:46:28 +03:00
|
|
|
|
|
|
|
def isTechniqueAvailable(technique=None):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns True if there is injection data which sqlmap could use for
|
|
|
|
technique specified
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-02-02 01:05:12 +03:00
|
|
|
|
2011-04-06 18:41:44 +04:00
|
|
|
if conf.tech and isinstance(conf.tech, list) and technique not in conf.tech:
|
2011-01-24 19:47:24 +03:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return getTechniqueData(technique) is not None
|
2010-12-18 12:51:34 +03:00
|
|
|
|
2011-07-25 15:05:49 +04:00
|
|
|
def setOptimize():
|
|
|
|
#conf.predictOutput = True
|
|
|
|
conf.keepAlive = True
|
|
|
|
conf.threads = 3 if conf.threads < 3 else conf.threads
|
|
|
|
conf.nullConnection = not any([conf.data, conf.textOnly, conf.titles, conf.string, conf.regexp])
|
|
|
|
|
|
|
|
if not conf.nullConnection:
|
|
|
|
debugMsg = "turning off --null-connection switch used indirectly by switch -o"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
def initTechnique(technique=None):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Prepares proper page template and match ratio for technique specified
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-12 01:18:47 +03:00
|
|
|
|
2011-01-10 13:30:17 +03:00
|
|
|
try:
|
|
|
|
data = getTechniqueData(technique)
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-10 13:30:17 +03:00
|
|
|
if data:
|
|
|
|
kb.pageTemplate, kb.errorIsNone = getPageTemplate(data.templatePayload, kb.injection.place)
|
2011-01-14 17:55:59 +03:00
|
|
|
kb.matchRatio = data.matchRatio
|
2011-09-26 01:10:45 +04:00
|
|
|
kb.chars = kb.injection.chars
|
2011-01-14 17:37:03 +03:00
|
|
|
|
2011-01-14 17:55:59 +03:00
|
|
|
# Restoring stored conf options
|
2011-01-14 18:33:49 +03:00
|
|
|
for key, value in kb.injection.conf.items():
|
2011-01-16 02:43:26 +03:00
|
|
|
if value and (not hasattr(conf, key) or (hasattr(conf, key) and not getattr(conf, key))):
|
2011-01-14 18:33:49 +03:00
|
|
|
setattr(conf, key, value)
|
2011-01-16 02:20:52 +03:00
|
|
|
debugMsg = "resuming configuration option '%s' (%s)" % (key, value)
|
2011-01-14 18:33:49 +03:00
|
|
|
logger.debug(debugMsg)
|
2011-07-25 15:05:49 +04:00
|
|
|
|
|
|
|
if value and key == "optimize":
|
|
|
|
setOptimize()
|
2011-01-10 13:30:17 +03:00
|
|
|
else:
|
|
|
|
warnMsg = "there is no injection data available for technique "
|
|
|
|
warnMsg += "'%s'" % enumValueToNameLookup(PAYLOAD.TECHNIQUE, technique)
|
|
|
|
logger.warn(warnMsg)
|
2011-01-12 01:18:47 +03:00
|
|
|
|
|
|
|
except sqlmapDataException, _:
|
2011-01-10 13:30:17 +03:00
|
|
|
errMsg = "missing data in old session file(s). "
|
|
|
|
errMsg += "please use '--flush-session' to deal "
|
|
|
|
errMsg += "with this error"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2010-12-22 21:55:50 +03:00
|
|
|
|
|
|
|
def arrayizeValue(value):
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Makes a list out of value if it is not already a list, tuple or set
|
|
|
|
itself
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
|
|
|
if not isinstance(value, (list, tuple, set)):
|
|
|
|
value = [ value ]
|
|
|
|
|
2010-12-22 21:55:50 +03:00
|
|
|
return value
|
2010-12-24 13:55:41 +03:00
|
|
|
|
2011-03-07 12:50:43 +03:00
|
|
|
def unArrayizeValue(value):
|
|
|
|
"""
|
|
|
|
Makes a value out of iterable if it is a list, tuple or set
|
|
|
|
itself
|
|
|
|
"""
|
|
|
|
|
|
|
|
if isinstance(value, (list, tuple, set)):
|
|
|
|
value = value[0] if len(value) > 0 else None
|
|
|
|
|
|
|
|
return value
|
|
|
|
|
2011-01-20 02:06:15 +03:00
|
|
|
def getSortedInjectionTests():
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns prioritized test list by eventually detected DBMS from error
|
|
|
|
messages
|
2010-12-24 13:55:41 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2010-12-24 13:55:41 +03:00
|
|
|
retVal = conf.tests
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-07 19:08:01 +03:00
|
|
|
def priorityFunction(test):
|
2011-01-13 14:24:03 +03:00
|
|
|
retVal = SORTORDER.FIRST
|
2011-01-13 14:23:07 +03:00
|
|
|
|
2011-01-13 14:08:29 +03:00
|
|
|
if test.stype == PAYLOAD.TECHNIQUE.UNION:
|
2011-01-13 14:24:03 +03:00
|
|
|
retVal = SORTORDER.LAST
|
2011-01-13 14:23:07 +03:00
|
|
|
|
2011-01-13 14:08:29 +03:00
|
|
|
elif 'details' in test and 'dbms' in test.details:
|
2011-01-28 19:36:09 +03:00
|
|
|
if test.details.dbms in Backend.getErrorParsedDBMSes():
|
2011-01-13 14:24:03 +03:00
|
|
|
retVal = SORTORDER.SECOND
|
2011-01-07 19:08:01 +03:00
|
|
|
else:
|
2011-01-13 14:24:03 +03:00
|
|
|
retVal = SORTORDER.THIRD
|
2011-01-13 14:23:07 +03:00
|
|
|
|
2011-01-07 19:08:01 +03:00
|
|
|
return retVal
|
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
if Backend.getErrorParsedDBMSes():
|
2011-01-07 19:08:01 +03:00
|
|
|
retVal = sorted(retVal, key=priorityFunction)
|
2010-12-24 13:55:41 +03:00
|
|
|
|
|
|
|
return retVal
|
2010-12-27 02:50:16 +03:00
|
|
|
|
|
|
|
def filterListValue(value, regex):
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns list with items that have parts satisfying given regular
|
|
|
|
expression
|
2010-12-27 02:50:16 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-05-17 00:09:12 +04:00
|
|
|
if isinstance(value, list) and regex:
|
|
|
|
retVal = filter(lambda word: getCompiledRegex(regex, re.I).search(word), value)
|
2010-12-27 02:50:16 +03:00
|
|
|
else:
|
2011-05-17 00:09:12 +04:00
|
|
|
retVal = value
|
|
|
|
|
|
|
|
return retVal
|
2010-12-28 17:40:34 +03:00
|
|
|
|
2011-01-02 10:37:47 +03:00
|
|
|
def showHttpErrorCodes():
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Shows all HTTP error codes raised till now
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-02 10:37:47 +03:00
|
|
|
if kb.httpErrorCodes:
|
|
|
|
warnMsg = "HTTP error codes detected during testing:\n"
|
2011-01-07 18:41:09 +03:00
|
|
|
warnMsg += ", ".join("%d (%s) - %d times" % (code, httplib.responses[code] \
|
|
|
|
if code in httplib.responses else '?', count) \
|
2011-01-02 10:37:47 +03:00
|
|
|
for code, count in kb.httpErrorCodes.items())
|
|
|
|
logger.warn(warnMsg)
|
2011-01-03 11:32:06 +03:00
|
|
|
|
2011-01-03 11:46:20 +03:00
|
|
|
def getComparePageRatio(firstPage, secondPage, filtered=False):
|
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
Returns comparison ratio between two given pages
|
2011-01-03 11:46:20 +03:00
|
|
|
"""
|
2011-01-07 18:41:09 +03:00
|
|
|
|
2011-01-03 11:46:20 +03:00
|
|
|
if filtered:
|
2011-01-14 17:55:59 +03:00
|
|
|
(firstPage, secondPage) = map(getFilteredPageContent, (firstPage, secondPage))
|
2011-01-03 11:46:20 +03:00
|
|
|
|
2011-01-16 13:52:42 +03:00
|
|
|
seqMatcher = getCurrentThreadData().seqMatcher
|
|
|
|
seqMatcher.set_seq1(firstPage)
|
|
|
|
seqMatcher.set_seq2(secondPage)
|
2011-01-03 11:46:20 +03:00
|
|
|
|
2011-01-16 13:52:42 +03:00
|
|
|
return seqMatcher.quick_ratio()
|
2011-01-08 12:30:10 +03:00
|
|
|
|
|
|
|
def openFile(filename, mode='r'):
|
|
|
|
"""
|
|
|
|
Returns file handle of a given filename
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
2011-10-22 00:21:29 +04:00
|
|
|
return codecs.open(filename, mode, UNICODE_ENCODING, errors="replace")
|
2011-01-15 15:13:45 +03:00
|
|
|
except IOError:
|
2011-01-08 12:30:10 +03:00
|
|
|
errMsg = "there has been a file opening error for filename '%s'. " % filename
|
2011-01-19 02:05:32 +03:00
|
|
|
errMsg += "Please check %s permissions on a file " % ("write" if \
|
|
|
|
mode and ('w' in mode or 'a' in mode or '+' in mode) else "read")
|
2011-01-08 12:30:10 +03:00
|
|
|
errMsg += "and that it's not locked by another process."
|
|
|
|
raise sqlmapFilePathException, errMsg
|
2011-01-19 18:25:48 +03:00
|
|
|
|
|
|
|
def decodeIntToUnicode(value):
|
|
|
|
"""
|
|
|
|
Decodes inferenced integer value with usage of current page encoding
|
|
|
|
"""
|
|
|
|
try:
|
2011-05-26 01:12:12 +04:00
|
|
|
return struct.pack('B' if value<256 else '>H', value).decode(kb.pageEncoding or UNICODE_ENCODING)
|
2011-01-19 18:25:48 +03:00
|
|
|
except:
|
2011-01-19 18:26:57 +03:00
|
|
|
return INFERENCE_UNKNOWN_CHAR
|
2011-01-28 19:15:45 +03:00
|
|
|
|
|
|
|
def unhandledExceptionMessage():
|
2011-02-01 14:06:56 +03:00
|
|
|
"""
|
|
|
|
Returns detailed message about occured unhandled exception
|
|
|
|
"""
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "unhandled exception in %s, retry your " % VERSION_STRING
|
2011-01-28 19:15:45 +03:00
|
|
|
errMsg += "run with the latest development version from the Subversion "
|
|
|
|
errMsg += "repository. If the exception persists, please send by e-mail "
|
2011-04-01 20:40:28 +04:00
|
|
|
errMsg += "to %s the following text " % ML
|
2011-02-01 00:23:40 +03:00
|
|
|
errMsg += "and any information required to reproduce the bug. The "
|
|
|
|
errMsg += "developers will try to reproduce the bug, fix it accordingly "
|
|
|
|
errMsg += "and get back to you.\n"
|
2011-01-28 19:15:45 +03:00
|
|
|
errMsg += "sqlmap version: %s%s\n" % (VERSION, " (r%d)" % REVISION if REVISION else "")
|
|
|
|
errMsg += "Python version: %s\n" % PYVERSION
|
|
|
|
errMsg += "Operating system: %s\n" % PLATFORM
|
2011-02-02 17:25:16 +03:00
|
|
|
errMsg += "Command line: %s\n" % " ".join(sys.argv)
|
2011-06-03 15:01:26 +04:00
|
|
|
errMsg += "Technique: %s\n" % (enumValueToNameLookup(PAYLOAD.TECHNIQUE, kb.technique) if kb and kb.technique else None)
|
2011-02-01 01:34:57 +03:00
|
|
|
errMsg += "Back-end DBMS: %s" % ("%s (fingerprinted)" % Backend.getDbms() if Backend.getDbms() is not None else "%s (identified)" % Backend.getIdentifiedDbms())
|
2011-02-02 17:25:16 +03:00
|
|
|
return maskSensitiveData(errMsg)
|
|
|
|
|
|
|
|
def maskSensitiveData(msg):
|
|
|
|
"""
|
|
|
|
Masks sensitive data in the supplied message
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = msg
|
|
|
|
|
2011-07-31 14:21:47 +04:00
|
|
|
for item in filter(None, map(lambda x: conf.get(x), ['hostname', 'googleDork', 'aCred', 'pCred', 'tbl', 'db', 'col', 'user', 'cookie'])):
|
2011-02-02 17:35:21 +03:00
|
|
|
regex = SENSITIVE_DATA_REGEX % item
|
|
|
|
while extractRegexResult(regex, retVal):
|
|
|
|
value = extractRegexResult(regex, retVal)
|
|
|
|
retVal = retVal.replace(value, '*'*len(value))
|
2011-02-02 17:25:16 +03:00
|
|
|
|
|
|
|
return retVal
|
2011-01-31 15:21:17 +03:00
|
|
|
|
|
|
|
def listToStrValue(value):
|
|
|
|
"""
|
|
|
|
Flattens list to a string value
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-01-31 15:21:17 +03:00
|
|
|
>>> listToStrValue([1,2,3])
|
|
|
|
'1, 2, 3'
|
|
|
|
"""
|
2011-02-01 01:51:14 +03:00
|
|
|
|
|
|
|
if isinstance(value, (set, tuple)):
|
|
|
|
value = list(value)
|
|
|
|
|
2011-01-31 15:21:17 +03:00
|
|
|
if isinstance(value, list):
|
2011-02-01 14:10:23 +03:00
|
|
|
retVal = value.__str__().lstrip('[').rstrip(']')
|
2011-01-31 15:21:17 +03:00
|
|
|
else:
|
2011-02-01 14:10:23 +03:00
|
|
|
retVal = value
|
2011-01-31 15:21:17 +03:00
|
|
|
|
2011-02-01 14:10:23 +03:00
|
|
|
return retVal
|
2011-02-01 14:06:56 +03:00
|
|
|
|
|
|
|
def getExceptionFrameLocals():
|
|
|
|
"""
|
|
|
|
Returns dictionary with local variable content from frame
|
|
|
|
where exception was raised
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = {}
|
2011-02-01 14:10:23 +03:00
|
|
|
|
2011-02-01 14:06:56 +03:00
|
|
|
if sys.exc_info():
|
|
|
|
trace = sys.exc_info()[2]
|
|
|
|
while trace.tb_next:
|
|
|
|
trace = trace.tb_next
|
|
|
|
retVal = trace.tb_frame.f_locals
|
2011-02-01 14:10:23 +03:00
|
|
|
|
2011-02-01 14:06:56 +03:00
|
|
|
return retVal
|
2011-02-14 00:20:21 +03:00
|
|
|
|
2011-08-29 17:47:32 +04:00
|
|
|
def intersect(valueA, valueB, lowerCase=False):
|
2011-02-14 00:20:21 +03:00
|
|
|
"""
|
|
|
|
Returns intersection of the array-ized values
|
|
|
|
"""
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-02-14 00:20:21 +03:00
|
|
|
retVal = None
|
|
|
|
|
|
|
|
if valueA and valueB:
|
2011-08-29 17:47:32 +04:00
|
|
|
valueA = arrayizeValue(valueA)
|
|
|
|
valueB = arrayizeValue(valueB)
|
|
|
|
|
|
|
|
if lowerCase:
|
|
|
|
valueA = [val.lower() if isinstance(val, basestring) else val for val in valueA]
|
|
|
|
valueB = [val.lower() if isinstance(val, basestring) else val for val in valueB]
|
|
|
|
|
|
|
|
retVal = [val for val in valueA if val in valueB]
|
2011-02-14 00:20:21 +03:00
|
|
|
|
|
|
|
return retVal
|
2011-02-22 15:54:22 +03:00
|
|
|
|
|
|
|
def cpuThrottle(value):
|
|
|
|
"""
|
|
|
|
Does a CPU throttling for a lesser CPU consumption
|
|
|
|
"""
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-02-22 15:54:22 +03:00
|
|
|
delay = 0.00001 * (value ** 2)
|
|
|
|
time.sleep(delay)
|
2011-02-25 12:22:44 +03:00
|
|
|
|
2011-03-30 00:45:21 +04:00
|
|
|
def removeReflectiveValues(content, payload, suppressWarning=False):
|
2011-02-25 12:22:44 +03:00
|
|
|
"""
|
|
|
|
Neutralizes (static/marked) reflective values in a given content based on a payload
|
|
|
|
(e.g. ?search=sql injection ---> ...value="sql%20injection")
|
|
|
|
"""
|
|
|
|
|
2011-02-25 12:35:24 +03:00
|
|
|
retVal = content
|
2011-02-25 12:22:44 +03:00
|
|
|
|
2011-06-06 17:34:49 +04:00
|
|
|
if all([content, payload]) and isinstance(content, unicode) and kb.reflectiveMechanism:
|
2011-10-20 12:28:57 +04:00
|
|
|
payload = getUnicode(urldecode(payload.replace(PAYLOAD_DELIMITER, '')))
|
2011-02-25 12:22:44 +03:00
|
|
|
|
2011-02-27 20:43:41 +03:00
|
|
|
regex = filterStringValue(payload, r'[A-Za-z0-9]', REFLECTED_NON_ALPHA_NUM_REGEX)
|
2011-02-26 20:48:19 +03:00
|
|
|
|
2011-02-27 20:43:41 +03:00
|
|
|
while 2 * REFLECTED_NON_ALPHA_NUM_REGEX in regex:
|
|
|
|
regex = regex.replace(2 * REFLECTED_NON_ALPHA_NUM_REGEX, REFLECTED_NON_ALPHA_NUM_REGEX)
|
2011-02-26 20:48:19 +03:00
|
|
|
|
2011-07-13 02:28:19 +04:00
|
|
|
if all([part.lower() in content.lower() for part in regex.split(REFLECTED_NON_ALPHA_NUM_REGEX)]): # fast optimization check
|
2011-07-13 03:21:15 +04:00
|
|
|
parts = regex.split(REFLECTED_NON_ALPHA_NUM_REGEX)
|
|
|
|
if len(parts) > REFLECTED_MAX_REGEX_PARTS: # preventing CPU hogs
|
|
|
|
regex = "%s.+?%s" % (REFLECTED_NON_ALPHA_NUM_REGEX.join(parts[:REFLECTED_MAX_REGEX_PARTS/2]), REFLECTED_NON_ALPHA_NUM_REGEX.join(parts[-REFLECTED_MAX_REGEX_PARTS/2:]))
|
|
|
|
|
2011-05-31 00:34:34 +04:00
|
|
|
retVal = re.sub(regex, REFLECTED_VALUE_MARKER, content, re.I)
|
|
|
|
|
2011-07-13 02:28:19 +04:00
|
|
|
if retVal != content:
|
|
|
|
kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT] += 1
|
|
|
|
if not suppressWarning:
|
|
|
|
debugMsg = "reflective value found and filtered out"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
elif not kb.testMode and not kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT]:
|
|
|
|
kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] += 1
|
|
|
|
if kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] > REFLECTIVE_MISS_THRESHOLD:
|
|
|
|
kb.reflectiveMechanism = False
|
2011-05-30 13:46:32 +04:00
|
|
|
if not suppressWarning:
|
2011-07-13 02:28:19 +04:00
|
|
|
debugMsg = "turning off reflection removal mechanism (for optimization purposes)"
|
2011-05-30 13:46:32 +04:00
|
|
|
logger.debug(debugMsg)
|
2011-02-25 12:22:44 +03:00
|
|
|
|
|
|
|
return retVal
|
2011-03-24 23:04:20 +03:00
|
|
|
|
|
|
|
def normalizeUnicode(value):
|
|
|
|
"""
|
|
|
|
Does an ASCII normalization of unicode strings
|
|
|
|
Reference: http://www.peterbe.com/plog/unicode-to-ascii
|
|
|
|
"""
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-03-24 23:04:20 +03:00
|
|
|
retVal = value
|
|
|
|
if isinstance(value, unicode):
|
|
|
|
retVal = unicodedata.normalize('NFKD', value).encode('ascii','ignore')
|
|
|
|
return retVal
|
2011-03-30 01:54:15 +04:00
|
|
|
|
|
|
|
def safeSQLIdentificatorNaming(name, isTable=False):
|
|
|
|
"""
|
2011-06-16 17:41:02 +04:00
|
|
|
Returns a safe representation of SQL identificator name (internal data format)
|
2011-03-30 01:54:15 +04:00
|
|
|
"""
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
retVal = name
|
2011-04-30 04:22:22 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
if isinstance(name, basestring):
|
|
|
|
if isTable and Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) and '.' not in name:
|
|
|
|
name = "%s.%s" % (DEFAULT_MSSQL_SCHEMA, name)
|
|
|
|
|
|
|
|
parts = name.split('.')
|
2011-04-30 04:22:22 +04:00
|
|
|
|
2011-10-22 02:34:27 +04:00
|
|
|
for i in xrange(len(parts)):
|
2011-03-30 01:54:15 +04:00
|
|
|
if not re.match(r"\A[A-Za-z0-9_]+\Z", parts[i]):
|
|
|
|
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS):
|
|
|
|
parts[i] = "`%s`" % parts[i].strip("`")
|
2011-06-25 13:44:24 +04:00
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.ORACLE, DBMS.PGSQL, DBMS.DB2):
|
2011-03-30 01:54:15 +04:00
|
|
|
parts[i] = "\"%s\"" % parts[i].strip("\"")
|
2011-04-30 04:22:22 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
retVal = ".".join(parts)
|
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
|
|
|
def unsafeSQLIdentificatorNaming(name):
|
|
|
|
"""
|
|
|
|
Extracts identificator's name from it's safe SQL representation
|
|
|
|
"""
|
2011-04-10 02:39:03 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
retVal = name
|
2011-04-25 03:01:21 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
if isinstance(name, basestring):
|
|
|
|
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS):
|
|
|
|
retVal = name.replace("`", "")
|
2011-06-25 13:44:24 +04:00
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.ORACLE, DBMS.PGSQL, DBMS.DB2):
|
2011-03-30 01:54:15 +04:00
|
|
|
retVal = name.replace("\"", "")
|
|
|
|
if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE):
|
2011-05-03 15:09:30 +04:00
|
|
|
prefix = "%s." % DEFAULT_MSSQL_SCHEMA
|
|
|
|
if retVal.startswith(prefix):
|
|
|
|
retVal = retVal[len(prefix):]
|
2011-04-25 03:01:21 +04:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
return retVal
|
2011-04-10 02:39:03 +04:00
|
|
|
|
|
|
|
def isBinaryData(value):
|
|
|
|
"""
|
|
|
|
Tests given value for binary content
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = False
|
|
|
|
if isinstance(value, basestring):
|
|
|
|
retVal = reduce(lambda x, y: x or not (y in string.printable or ord(y) > 255), value, False)
|
|
|
|
return retVal
|
2011-05-22 13:48:46 +04:00
|
|
|
|
|
|
|
def isNoneValue(value):
|
|
|
|
"""
|
|
|
|
Returns whether the value contains implicit 'None' value
|
|
|
|
"""
|
|
|
|
|
|
|
|
if isinstance(value, basestring):
|
|
|
|
return value == "None"
|
2011-05-23 15:09:44 +04:00
|
|
|
elif isinstance(value, (list, tuple)):
|
2011-07-23 23:51:19 +04:00
|
|
|
if len(value) == 1:
|
|
|
|
return isNoneValue(value[0])
|
|
|
|
else:
|
2011-07-24 13:19:33 +04:00
|
|
|
for item in value:
|
|
|
|
if item and item != "None":
|
2011-07-23 23:51:19 +04:00
|
|
|
return False
|
|
|
|
return True
|
2011-05-22 13:48:46 +04:00
|
|
|
elif isinstance(value, dict):
|
2011-05-23 15:09:44 +04:00
|
|
|
return not any(value)
|
2011-05-22 13:48:46 +04:00
|
|
|
else:
|
|
|
|
return value is None
|
2011-06-15 15:58:50 +04:00
|
|
|
|
2011-10-22 01:12:48 +04:00
|
|
|
def isNullValue(value):
|
|
|
|
"""
|
|
|
|
Returns whether the value contains explicit 'NULL' value
|
|
|
|
"""
|
2011-10-22 01:29:24 +04:00
|
|
|
|
|
|
|
return isinstance(value, basestring) and value.upper() == "NULL"
|
2011-10-22 01:12:48 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
def expandMnemonics(mnemonics, parser, args):
|
|
|
|
"""
|
|
|
|
Expand mnemonic options
|
|
|
|
"""
|
|
|
|
|
|
|
|
class MnemonicNode:
|
|
|
|
def __init__(self):
|
|
|
|
self.next = {}
|
|
|
|
self.current = []
|
|
|
|
|
|
|
|
head = MnemonicNode()
|
|
|
|
pointer = None
|
|
|
|
|
|
|
|
for group in parser.option_groups:
|
|
|
|
for option in group.option_list:
|
|
|
|
for opt in option._long_opts + option._short_opts:
|
|
|
|
pointer = head
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
for char in opt:
|
|
|
|
if char == "-":
|
|
|
|
continue
|
|
|
|
elif char not in pointer.next:
|
|
|
|
pointer.next[char] = MnemonicNode()
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
pointer = pointer.next[char]
|
|
|
|
pointer.current.append(option)
|
|
|
|
|
|
|
|
for mnemonic in mnemonics.split(','):
|
|
|
|
found = None
|
2011-06-15 16:04:30 +04:00
|
|
|
name = mnemonic.split('=')[0].replace("-", "").strip()
|
2011-06-15 15:58:50 +04:00
|
|
|
value = mnemonic.split('=')[1] if len(mnemonic.split('=')) > 1 else None
|
|
|
|
pointer = head
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
for char in name:
|
|
|
|
if char in pointer.next:
|
|
|
|
pointer = pointer.next[char]
|
|
|
|
else:
|
|
|
|
pointer = None
|
2011-06-16 16:12:30 +04:00
|
|
|
break
|
2011-06-15 15:58:50 +04:00
|
|
|
|
|
|
|
if pointer in (None, head):
|
|
|
|
errMsg = "mnemonic '%s' can't be resolved to any parameter name" % name
|
2011-06-16 16:26:50 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
elif len(pointer.current) > 1:
|
|
|
|
options = {}
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
for option in pointer.current:
|
|
|
|
for opt in option._long_opts + option._short_opts:
|
2011-06-16 16:34:38 +04:00
|
|
|
opt = opt.strip('-')
|
|
|
|
if opt.startswith(name):
|
|
|
|
options[opt] = option
|
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
if name in options:
|
|
|
|
found = name
|
|
|
|
debugMsg = "mnemonic '%s' resolved to %s). " % (name, found)
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
else:
|
|
|
|
found = sorted(options.keys(), key=lambda x: len(x))[0]
|
2011-06-16 16:34:38 +04:00
|
|
|
warnMsg = "detected ambiguity (mnemonic '%s' can be resolved to %s). " % (name, ", ".join("'%s'" % key for key in options.keys()))
|
|
|
|
warnMsg += "resolved to shortest of those available ('%s')" % found
|
2011-06-15 15:58:50 +04:00
|
|
|
logger.warn(warnMsg)
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
found = options[found]
|
|
|
|
else:
|
|
|
|
found = pointer.current[0]
|
|
|
|
debugMsg = "mnemonic '%s' resolved to %s). " % (name, found)
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
if found:
|
|
|
|
value = found.convert_value(found, value)
|
2011-06-16 16:34:38 +04:00
|
|
|
|
2011-06-15 15:58:50 +04:00
|
|
|
if value is not None:
|
|
|
|
setattr(args, found.dest, value)
|
2011-06-16 16:26:50 +04:00
|
|
|
elif not found.type: # boolean
|
2011-06-15 15:58:50 +04:00
|
|
|
setattr(args, found.dest, True)
|
2011-06-16 16:26:50 +04:00
|
|
|
else:
|
|
|
|
errMsg = "mnemonic '%s' requires value of type '%s'" % (name, found.type)
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2011-07-03 02:48:56 +04:00
|
|
|
|
|
|
|
def safeCSValue(value):
|
|
|
|
"""
|
|
|
|
Returns value safe for CSV dumping.
|
|
|
|
Reference: http://stackoverflow.com/questions/769621/dealing-with-commas-in-a-csv-file
|
|
|
|
"""
|
|
|
|
|
|
|
|
retVal = value
|
|
|
|
|
2011-07-13 10:44:15 +04:00
|
|
|
if retVal and isinstance(retVal, basestring):
|
2011-07-03 02:48:56 +04:00
|
|
|
if not (retVal[0] == retVal[-1] == '"'):
|
|
|
|
if any(map(lambda x: x in retVal, ['"', ',', '\n'])):
|
|
|
|
retVal = '"%s"' % retVal.replace('"', '""')
|
|
|
|
|
|
|
|
return retVal
|
2011-08-09 18:20:25 +04:00
|
|
|
|
|
|
|
def filterPairValues(values):
|
|
|
|
retVal = []
|
|
|
|
|
|
|
|
if not isNoneValue(values) and hasattr(values, '__iter__'):
|
|
|
|
retVal = filter(lambda x: isinstance(x, (tuple, list, set)) and len(x) == 2, values)
|
|
|
|
|
|
|
|
return retVal
|
2011-08-29 16:50:52 +04:00
|
|
|
|
|
|
|
def randomizeParameterValue(value):
|
2011-10-22 01:29:24 +04:00
|
|
|
"""
|
|
|
|
Randomize a parameter value based on occurances of alphanumeric characters
|
|
|
|
"""
|
|
|
|
|
2011-08-29 16:50:52 +04:00
|
|
|
retVal = value
|
|
|
|
|
|
|
|
for match in re.finditer('[A-Z]+', value):
|
|
|
|
retVal = retVal.replace(match.group(), randomStr(len(match.group())).upper())
|
|
|
|
|
|
|
|
for match in re.finditer('[a-z]+', value):
|
|
|
|
retVal = retVal.replace(match.group(), randomStr(len(match.group())).lower())
|
|
|
|
|
|
|
|
for match in re.finditer('[0-9]+', value):
|
|
|
|
retVal = retVal.replace(match.group(), str(randomInt(len(match.group()))))
|
|
|
|
|
|
|
|
return retVal
|
2011-10-23 21:02:48 +04:00
|
|
|
|
|
|
|
def asciifyUrl(url, force_quote=False):
|
|
|
|
"""
|
|
|
|
Attempts to make a unicode url usuable with ``urllib/urllib2``.
|
|
|
|
|
|
|
|
More specifically, it attempts to convert the unicode object ``url``,
|
|
|
|
which is meant to represent a IRI, to an unicode object that,
|
|
|
|
containing only ASCII characters, is a valid URI. This involves:
|
|
|
|
|
|
|
|
* IDNA/Puny-encoding the domain name.
|
|
|
|
* UTF8-quoting the path and querystring parts.
|
|
|
|
|
|
|
|
See also RFC 3987.
|
|
|
|
|
|
|
|
Reference: http://blog.elsdoerfer.name/2008/12/12/opening-iris-in-python/
|
|
|
|
"""
|
|
|
|
|
|
|
|
parts = urlparse.urlsplit(url)
|
|
|
|
if not parts.scheme or not parts.netloc:
|
|
|
|
# apparently not an url
|
|
|
|
return url
|
|
|
|
|
|
|
|
# idna-encode domain
|
|
|
|
hostname = parts.hostname.encode('idna')
|
|
|
|
|
|
|
|
# UTF8-quote the other parts. We check each part individually if
|
|
|
|
# if needs to be quoted - that should catch some additional user
|
|
|
|
# errors, say for example an umlaut in the username even though
|
|
|
|
# the path *is* already quoted.
|
|
|
|
def quote(s, safe):
|
|
|
|
s = s or ''
|
|
|
|
# Triggers on non-ascii characters - another option would be:
|
|
|
|
# urllib.quote(s.replace('%', '')) != s.replace('%', '')
|
|
|
|
# which would trigger on all %-characters, e.g. "&".
|
|
|
|
if s.encode('ascii', 'replace') != s or force_quote:
|
|
|
|
return urllib.quote(s.encode('utf8'), safe=safe)
|
|
|
|
return s
|
|
|
|
|
|
|
|
username = quote(parts.username, '')
|
|
|
|
password = quote(parts.password, safe='')
|
|
|
|
path = quote(parts.path, safe='/')
|
|
|
|
query = quote(parts.query, safe='&=')
|
|
|
|
|
|
|
|
# put everything back together
|
|
|
|
netloc = hostname
|
|
|
|
if username or password:
|
|
|
|
netloc = '@' + netloc
|
|
|
|
if password:
|
|
|
|
netloc = ':' + password + netloc
|
|
|
|
netloc = username + netloc
|
|
|
|
if parts.port:
|
|
|
|
netloc += ':' + str(parts.port)
|
|
|
|
|
|
|
|
return urlparse.urlunsplit([parts.scheme, netloc, path, query, parts.fragment])
|