mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-25 19:13:48 +03:00
hello big tables, this is sqlmap, sqlmap this is big tables
This commit is contained in:
parent
82e1e61554
commit
ec1bc0219c
|
@ -23,7 +23,9 @@ import socket
|
||||||
import string
|
import string
|
||||||
import struct
|
import struct
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
import types
|
||||||
import urlparse
|
import urlparse
|
||||||
import unicodedata
|
import unicodedata
|
||||||
|
|
||||||
|
@ -205,16 +207,48 @@ class BigArray(list):
|
||||||
self.chunks = [[]]
|
self.chunks = [[]]
|
||||||
self.cache = None
|
self.cache = None
|
||||||
self.length = 0
|
self.length = 0
|
||||||
|
self.filenames = set()
|
||||||
|
|
||||||
def append(self, value):
|
def append(self, value):
|
||||||
self.chunks[-1].append(value)
|
self.chunks[-1].append(value)
|
||||||
if len(self.chunks[-1]) >= BIGARRAY_CHUNK_LENGTH:
|
if len(self.chunks[-1]) >= BIGARRAY_CHUNK_LENGTH:
|
||||||
fp = tempfile.TemporaryFile()
|
filename = self._dump(self.chunks[-1])
|
||||||
pickle.dump(self.chunks[-1], fp)
|
|
||||||
del(self.chunks[-1][:])
|
del(self.chunks[-1][:])
|
||||||
self.chunks[-1] = fp
|
self.chunks[-1] = filename
|
||||||
self.chunks.append([])
|
self.chunks.append([])
|
||||||
|
|
||||||
|
def pop(self):
|
||||||
|
if len(self.chunks[-1]) < 1:
|
||||||
|
self.chunks.pop()
|
||||||
|
fp = open(self.chunks[-1], 'rb')
|
||||||
|
self.chunks[-1] = pickle.load(fp)
|
||||||
|
fp.close()
|
||||||
|
return self.chunks[-1].pop()
|
||||||
|
|
||||||
|
def index(self, value):
|
||||||
|
for index in xrange(len(self)):
|
||||||
|
if self[index] == value:
|
||||||
|
return index
|
||||||
|
return ValueError, "%s is not in list" % value
|
||||||
|
|
||||||
|
def _dump(self, value):
|
||||||
|
handle, filename = tempfile.mkstemp()
|
||||||
|
self.filenames.add(filename)
|
||||||
|
os.close(handle)
|
||||||
|
fp = open(filename, 'w+b')
|
||||||
|
pickle.dump(value, fp)
|
||||||
|
fp.close()
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def _checkcache(self, index):
|
||||||
|
if (self.cache and self.cache[0] != index and self.cache[2]):
|
||||||
|
filename = self._dump(self.cache[1])
|
||||||
|
self.chunks[self.cache[0]] = filename
|
||||||
|
if not (self.cache and self.cache[0] == index):
|
||||||
|
fp = open(self.chunks[index], 'rb')
|
||||||
|
self.cache = [index, pickle.load(fp), False]
|
||||||
|
fp.close()
|
||||||
|
|
||||||
def __getitem__(self, y):
|
def __getitem__(self, y):
|
||||||
index = y / BIGARRAY_CHUNK_LENGTH
|
index = y / BIGARRAY_CHUNK_LENGTH
|
||||||
offset = y % BIGARRAY_CHUNK_LENGTH
|
offset = y % BIGARRAY_CHUNK_LENGTH
|
||||||
|
@ -222,14 +256,37 @@ class BigArray(list):
|
||||||
if isinstance(chunk, list):
|
if isinstance(chunk, list):
|
||||||
return chunk[offset]
|
return chunk[offset]
|
||||||
else:
|
else:
|
||||||
if not (self.cache and self.cache[0] == index):
|
self._checkcache(index)
|
||||||
chunk.seek(0)
|
|
||||||
self.cache = (index, pickle.load(chunk))
|
|
||||||
return self.cache[1][offset]
|
return self.cache[1][offset]
|
||||||
|
|
||||||
|
def __setitem__(self, y, value):
|
||||||
|
index = y / BIGARRAY_CHUNK_LENGTH
|
||||||
|
offset = y % BIGARRAY_CHUNK_LENGTH
|
||||||
|
chunk = self.chunks[index]
|
||||||
|
if isinstance(chunk, list):
|
||||||
|
chunk[offset] = value
|
||||||
|
else:
|
||||||
|
self._checkcache(index)
|
||||||
|
self.cache[1][offset] = value
|
||||||
|
self.cache[2] = True # dirty flag
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "%s%s" % ("..." if len(self.chunks) > 1 else "", self.chunks[-1].__repr__())
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
for i in xrange(len(self)):
|
||||||
|
yield self[i]
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * BIGARRAY_CHUNK_LENGTH + len(self.chunks[-1])
|
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * BIGARRAY_CHUNK_LENGTH + len(self.chunks[-1])
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
for filename in self.filenames:
|
||||||
|
try:
|
||||||
|
os.remove(filename)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
class DynamicContentItem:
|
class DynamicContentItem:
|
||||||
"""
|
"""
|
||||||
Represents line in content page with dynamic properties (candidate
|
Represents line in content page with dynamic properties (candidate
|
||||||
|
@ -561,6 +618,15 @@ class Backend:
|
||||||
def isOs(os):
|
def isOs(os):
|
||||||
return Backend.getOs() is not None and Backend.getOs().lower() == os.lower()
|
return Backend.getOs() is not None and Backend.getOs().lower() == os.lower()
|
||||||
|
|
||||||
|
# Reference: http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/
|
||||||
|
def cachedmethod(f, cache={}):
|
||||||
|
def g(*args, **kwargs):
|
||||||
|
key = ( f, tuple(args), frozenset(kwargs.items()) )
|
||||||
|
if key not in cache:
|
||||||
|
cache[key] = f(*args, **kwargs)
|
||||||
|
return cache[key]
|
||||||
|
return g
|
||||||
|
|
||||||
def paramToDict(place, parameters=None):
|
def paramToDict(place, parameters=None):
|
||||||
"""
|
"""
|
||||||
Split the parameters into names and values, check if these parameters
|
Split the parameters into names and values, check if these parameters
|
||||||
|
@ -1266,7 +1332,7 @@ def parseUnionPage(output, expression, partial=False, condition=None, sort=True)
|
||||||
if output is None:
|
if output is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
data = []
|
data = BigArray()
|
||||||
|
|
||||||
outCond1 = ( output.startswith(kb.misc.start) and output.endswith(kb.misc.stop) )
|
outCond1 = ( output.startswith(kb.misc.start) and output.endswith(kb.misc.stop) )
|
||||||
outCond2 = ( output.startswith(DUMP_START_MARKER) and output.endswith(DUMP_STOP_MARKER) )
|
outCond2 = ( output.startswith(DUMP_START_MARKER) and output.endswith(DUMP_STOP_MARKER) )
|
||||||
|
@ -2204,6 +2270,7 @@ def isNumPosStrValue(value):
|
||||||
|
|
||||||
return value and isinstance(value, basestring) and value.isdigit() and value != "0"
|
return value and isinstance(value, basestring) and value.isdigit() and value != "0"
|
||||||
|
|
||||||
|
@cachedmethod
|
||||||
def aliasToDbmsEnum(dbms):
|
def aliasToDbmsEnum(dbms):
|
||||||
"""
|
"""
|
||||||
Returns major DBMS name from a given alias
|
Returns major DBMS name from a given alias
|
||||||
|
@ -2730,8 +2797,8 @@ def isNoneValue(value):
|
||||||
if len(value) == 1:
|
if len(value) == 1:
|
||||||
return isNoneValue(value[0])
|
return isNoneValue(value[0])
|
||||||
else:
|
else:
|
||||||
for i in xrange(len(value)):
|
for item in value:
|
||||||
if value[i] and value[i] != "None":
|
if item and item != "None":
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
elif isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
|
|
|
@ -24,6 +24,7 @@ from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.replication import Replication
|
from lib.core.replication import Replication
|
||||||
|
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
|
||||||
class Dump:
|
class Dump:
|
||||||
|
@ -37,8 +38,9 @@ class Dump:
|
||||||
self.__outputFile = None
|
self.__outputFile = None
|
||||||
self.__outputFP = None
|
self.__outputFP = None
|
||||||
|
|
||||||
def __write(self, data, n=True):
|
def __write(self, data, n=True, console=True):
|
||||||
text = "%s%s" % (data, "\n" if n else " ")
|
text = "%s%s" % (data, "\n" if n else " ")
|
||||||
|
if console:
|
||||||
dataToStdout(text)
|
dataToStdout(text)
|
||||||
|
|
||||||
self.__outputFP.write(text)
|
self.__outputFP.write(text)
|
||||||
|
@ -407,7 +409,13 @@ class Dump:
|
||||||
if conf.replicate:
|
if conf.replicate:
|
||||||
rtable.beginTransaction()
|
rtable.beginTransaction()
|
||||||
|
|
||||||
|
if count > TRIM_STDOUT_DUMP_SIZE:
|
||||||
|
warnMsg = "console output will be trimmed "
|
||||||
|
warnMsg += "due to the large table size"
|
||||||
|
logger.warning(warnMsg)
|
||||||
|
|
||||||
for i in range(count):
|
for i in range(count):
|
||||||
|
console = (i >= count - TRIM_STDOUT_DUMP_SIZE)
|
||||||
field = 1
|
field = 1
|
||||||
values = []
|
values = []
|
||||||
|
|
||||||
|
@ -429,7 +437,7 @@ class Dump:
|
||||||
values.append(value)
|
values.append(value)
|
||||||
maxlength = int(info["length"])
|
maxlength = int(info["length"])
|
||||||
blank = " " * (maxlength - len(value))
|
blank = " " * (maxlength - len(value))
|
||||||
self.__write("| %s%s" % (value, blank), n=False)
|
self.__write("| %s%s" % (value, blank), n=False, console=console)
|
||||||
|
|
||||||
if not conf.replicate:
|
if not conf.replicate:
|
||||||
if not conf.multipleTargets and field == fields:
|
if not conf.multipleTargets and field == fields:
|
||||||
|
@ -442,7 +450,7 @@ class Dump:
|
||||||
if conf.replicate:
|
if conf.replicate:
|
||||||
rtable.insert(values)
|
rtable.insert(values)
|
||||||
|
|
||||||
self.__write("|")
|
self.__write("|", console=console)
|
||||||
|
|
||||||
if not conf.multipleTargets and not conf.replicate:
|
if not conf.multipleTargets and not conf.replicate:
|
||||||
dataToDumpFile(dumpFP, "\n")
|
dataToDumpFile(dumpFP, "\n")
|
||||||
|
|
|
@ -31,7 +31,7 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
||||||
errMsg = "profiling requires third-party libraries (%s). " % getUnicode(e, UNICODE_ENCODING)
|
errMsg = "profiling requires third-party libraries (%s). " % getUnicode(e, UNICODE_ENCODING)
|
||||||
errMsg += "Quick steps:%s" % os.linesep
|
errMsg += "Quick steps:%s" % os.linesep
|
||||||
errMsg += "1) Install http://code.google.com/p/pydot/%s" % os.linesep
|
errMsg += "1) Install http://code.google.com/p/pydot/%s" % os.linesep
|
||||||
errMsg += "2) sudo apt-get install python-profiler graphviz"
|
errMsg += "2) sudo apt-get install python-pyparsing python-profiler graphviz"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
|
@ -383,5 +383,8 @@ IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM informat
|
||||||
# Used for status representation in dictionary attack phase
|
# Used for status representation in dictionary attack phase
|
||||||
ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
||||||
|
|
||||||
# Chunk length used in BigArray object (only last one is held in memory)
|
# Chunk length (in items) used by BigArray objects (only last chunk and cached one are held in memory)
|
||||||
BIGARRAY_CHUNK_LENGTH = 10000
|
BIGARRAY_CHUNK_LENGTH = 5000
|
||||||
|
|
||||||
|
# Only console display last n table rows
|
||||||
|
TRIM_STDOUT_DUMP_SIZE = 256
|
||||||
|
|
|
@ -12,6 +12,7 @@ import time
|
||||||
|
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
|
from lib.core.common import BigArray
|
||||||
from lib.core.common import calculateDeltaSeconds
|
from lib.core.common import calculateDeltaSeconds
|
||||||
from lib.core.common import cleanQuery
|
from lib.core.common import cleanQuery
|
||||||
from lib.core.common import dataToSessionFile
|
from lib.core.common import dataToSessionFile
|
||||||
|
@ -123,7 +124,7 @@ def __goInferenceProxy(expression, fromUser=False, expected=None, batch=False, r
|
||||||
count = None
|
count = None
|
||||||
startLimit = 0
|
startLimit = 0
|
||||||
stopLimit = None
|
stopLimit = None
|
||||||
outputs = []
|
outputs = BigArray()
|
||||||
test = None
|
test = None
|
||||||
untilLimitChar = None
|
untilLimitChar = None
|
||||||
untilOrderChar = None
|
untilOrderChar = None
|
||||||
|
|
|
@ -13,6 +13,7 @@ import time
|
||||||
|
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
|
from lib.core.common import BigArray
|
||||||
from lib.core.common import calculateDeltaSeconds
|
from lib.core.common import calculateDeltaSeconds
|
||||||
from lib.core.common import dataToSessionFile
|
from lib.core.common import dataToSessionFile
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
|
@ -321,7 +322,7 @@ def errorUse(expression, expected=None, resumeValue=True, dump=False):
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
threadData.shared.limits = range(startLimit, stopLimit)
|
threadData.shared.limits = range(startLimit, stopLimit)
|
||||||
numThreads = min(conf.threads, len(threadData.shared.limits))
|
numThreads = min(conf.threads, len(threadData.shared.limits))
|
||||||
threadData.shared.outputs = []
|
threadData.shared.outputs = BigArray()
|
||||||
|
|
||||||
if stopLimit > TURN_OFF_RESUME_INFO_LIMIT:
|
if stopLimit > TURN_OFF_RESUME_INFO_LIMIT:
|
||||||
kb.suppressResumeInfo = True
|
kb.suppressResumeInfo = True
|
||||||
|
|
|
@ -33,13 +33,12 @@ from zipfile import ZipFile
|
||||||
|
|
||||||
from extra.pydes.pyDes import des
|
from extra.pydes.pyDes import des
|
||||||
from extra.pydes.pyDes import CBC
|
from extra.pydes.pyDes import CBC
|
||||||
|
from lib.core.common import Backend
|
||||||
from lib.core.common import checkFile
|
from lib.core.common import checkFile
|
||||||
from lib.core.common import clearConsoleLine
|
from lib.core.common import clearConsoleLine
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import getCompiledRegex
|
from lib.core.common import getCompiledRegex
|
||||||
from lib.core.common import getFileItems
|
from lib.core.common import getFileItems
|
||||||
from lib.core.common import Backend
|
|
||||||
from lib.core.common import getCompiledRegex
|
|
||||||
from lib.core.common import getPublicTypeMembers
|
from lib.core.common import getPublicTypeMembers
|
||||||
from lib.core.common import normalizeUnicode
|
from lib.core.common import normalizeUnicode
|
||||||
from lib.core.common import paths
|
from lib.core.common import paths
|
||||||
|
@ -252,6 +251,8 @@ def attackCachedUsersPasswords():
|
||||||
kb.data.cachedUsersPasswords[user][i] += "%s clear-text password: %s" % ('\n' if kb.data.cachedUsersPasswords[user][i][-1] != '\n' else '', password)
|
kb.data.cachedUsersPasswords[user][i] += "%s clear-text password: %s" % ('\n' if kb.data.cachedUsersPasswords[user][i][-1] != '\n' else '', password)
|
||||||
|
|
||||||
def attackDumpedTable():
|
def attackDumpedTable():
|
||||||
|
isOracle, isMySQL = Backend.isDbms(DBMS.ORACLE), Backend.isDbms(DBMS.MYSQL)
|
||||||
|
|
||||||
if kb.data.dumpedTable:
|
if kb.data.dumpedTable:
|
||||||
table = kb.data.dumpedTable
|
table = kb.data.dumpedTable
|
||||||
columns = table.keys()
|
columns = table.keys()
|
||||||
|
@ -275,7 +276,7 @@ def attackDumpedTable():
|
||||||
|
|
||||||
value = table[column]['values'][i]
|
value = table[column]['values'][i]
|
||||||
|
|
||||||
if hashRecognition(value):
|
if hashRecognition(value, isOracle, isMySQL):
|
||||||
if colUser:
|
if colUser:
|
||||||
if table[colUser]['values'][i] not in attack_dict:
|
if table[colUser]['values'][i] not in attack_dict:
|
||||||
attack_dict[table[colUser]['values'][i]] = []
|
attack_dict[table[colUser]['values'][i]] = []
|
||||||
|
@ -310,15 +311,15 @@ def attackDumpedTable():
|
||||||
table[column]['values'][i] += " (%s)" % password
|
table[column]['values'][i] += " (%s)" % password
|
||||||
table[column]['length'] = max(table[column]['length'], len(table[column]['values'][i]))
|
table[column]['length'] = max(table[column]['length'], len(table[column]['values'][i]))
|
||||||
|
|
||||||
def hashRecognition(value):
|
def hashRecognition(value, isOracle=False, isMySQL=False):
|
||||||
retVal = None
|
retVal = None
|
||||||
|
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, basestring):
|
||||||
for name, regex in getPublicTypeMembers(HASH):
|
for name, regex in getPublicTypeMembers(HASH):
|
||||||
# Hashes for Oracle and old MySQL look the same hence these checks
|
# Hashes for Oracle and old MySQL look the same hence these checks
|
||||||
if Backend.isDbms(DBMS.ORACLE) and regex == HASH.MYSQL_OLD:
|
if isOracle and regex == HASH.MYSQL_OLD:
|
||||||
continue
|
continue
|
||||||
elif Backend.isDbms(DBMS.MYSQL) and regex == HASH.ORACLE_OLD:
|
elif isMySQL and regex == HASH.ORACLE_OLD:
|
||||||
continue
|
continue
|
||||||
elif regex == HASH.CRYPT_GENERIC:
|
elif regex == HASH.CRYPT_GENERIC:
|
||||||
if any([getCompiledRegex(GENERAL_IP_ADDRESS_REGEX).match(value), value.lower() == value, value.upper() == value, value.isdigit()]):
|
if any([getCompiledRegex(GENERAL_IP_ADDRESS_REGEX).match(value), value.lower() == value, value.upper() == value, value.isdigit()]):
|
||||||
|
|
|
@ -13,6 +13,7 @@ import time
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
from lib.core.common import arrayizeValue
|
from lib.core.common import arrayizeValue
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
|
from lib.core.common import BigArray
|
||||||
from lib.core.common import clearConsoleLine
|
from lib.core.common import clearConsoleLine
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import getRange
|
from lib.core.common import getRange
|
||||||
|
@ -1385,7 +1386,7 @@ class Enumeration:
|
||||||
|
|
||||||
for column in colList:
|
for column in colList:
|
||||||
lengths[column] = 0
|
lengths[column] = 0
|
||||||
entries[column] = []
|
entries[column] = BigArray()
|
||||||
|
|
||||||
colList = sorted(colList, key=lambda x: len(x) if x else MAX_INT)
|
colList = sorted(colList, key=lambda x: len(x) if x else MAX_INT)
|
||||||
|
|
||||||
|
@ -1706,7 +1707,7 @@ class Enumeration:
|
||||||
lengths[column] = 0
|
lengths[column] = 0
|
||||||
|
|
||||||
if column not in entries:
|
if column not in entries:
|
||||||
entries[column] = []
|
entries[column] = BigArray()
|
||||||
|
|
||||||
if Backend.getIdentifiedDbms() in ( DBMS.MYSQL, DBMS.PGSQL ):
|
if Backend.getIdentifiedDbms() in ( DBMS.MYSQL, DBMS.PGSQL ):
|
||||||
query = rootQuery.blind.query % (column, conf.db, conf.tbl, index)
|
query = rootQuery.blind.query % (column, conf.db, conf.tbl, index)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user