2019-03-21 16:00:09 +03:00
|
|
|
#!/usr/bin/env python2
|
2012-02-16 13:46:41 +04:00
|
|
|
|
|
|
|
"""
|
2019-01-05 23:38:52 +03:00
|
|
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
2017-10-11 15:50:46 +03:00
|
|
|
See the file 'LICENSE' for copying permission
|
2012-02-16 13:46:41 +04:00
|
|
|
"""
|
|
|
|
|
2013-10-21 22:48:00 +04:00
|
|
|
try:
|
2018-03-13 15:45:42 +03:00
|
|
|
import cPickle as pickle
|
2013-10-21 22:48:00 +04:00
|
|
|
except:
|
2018-03-13 15:45:42 +03:00
|
|
|
import pickle
|
2013-10-21 22:48:00 +04:00
|
|
|
|
2018-01-31 12:36:13 +03:00
|
|
|
import bz2
|
2015-01-07 12:49:15 +03:00
|
|
|
import itertools
|
2012-02-16 13:54:29 +04:00
|
|
|
import os
|
2015-01-07 11:21:02 +03:00
|
|
|
import sys
|
2012-02-16 13:46:41 +04:00
|
|
|
import tempfile
|
|
|
|
|
2019-03-28 18:04:38 +03:00
|
|
|
from lib.core.compat import xrange
|
2016-05-31 14:02:26 +03:00
|
|
|
from lib.core.enums import MKSTEMP_PREFIX
|
2014-11-16 16:25:44 +03:00
|
|
|
from lib.core.exception import SqlmapSystemException
|
2015-01-07 11:21:02 +03:00
|
|
|
from lib.core.settings import BIGARRAY_CHUNK_SIZE
|
2017-11-17 13:45:02 +03:00
|
|
|
from lib.core.settings import BIGARRAY_COMPRESS_LEVEL
|
2012-02-16 13:46:41 +04:00
|
|
|
|
2015-01-07 12:53:57 +03:00
|
|
|
DEFAULT_SIZE_OF = sys.getsizeof(object())
|
|
|
|
|
|
|
|
def _size_of(object_):
|
2015-01-07 11:21:02 +03:00
|
|
|
"""
|
2015-01-07 12:53:57 +03:00
|
|
|
Returns total size of a given object_ (in bytes)
|
2015-01-07 11:21:02 +03:00
|
|
|
"""
|
|
|
|
|
2015-01-07 12:53:57 +03:00
|
|
|
retval = sys.getsizeof(object_, DEFAULT_SIZE_OF)
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2015-01-07 12:53:57 +03:00
|
|
|
if isinstance(object_, dict):
|
|
|
|
retval += sum(_size_of(_) for _ in itertools.chain.from_iterable(object_.items()))
|
|
|
|
elif hasattr(object_, "__iter__"):
|
|
|
|
retval += sum(_size_of(_) for _ in object_)
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2015-01-07 12:46:06 +03:00
|
|
|
return retval
|
2015-01-07 11:21:02 +03:00
|
|
|
|
2012-12-06 13:42:53 +04:00
|
|
|
class Cache(object):
|
2012-06-14 13:10:28 +04:00
|
|
|
"""
|
|
|
|
Auxiliary class used for storing cached chunks
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, index, data, dirty):
|
|
|
|
self.index = index
|
|
|
|
self.data = data
|
|
|
|
self.dirty = dirty
|
|
|
|
|
2012-02-16 13:46:41 +04:00
|
|
|
class BigArray(list):
|
|
|
|
"""
|
2012-06-14 13:10:28 +04:00
|
|
|
List-like class used for storing large amounts of data (disk cached)
|
2012-02-16 13:46:41 +04:00
|
|
|
"""
|
|
|
|
|
2017-12-12 12:48:19 +03:00
|
|
|
def __init__(self, items=[]):
|
2012-02-16 13:46:41 +04:00
|
|
|
self.chunks = [[]]
|
2015-01-07 11:21:02 +03:00
|
|
|
self.chunk_length = sys.maxint
|
2012-02-16 13:46:41 +04:00
|
|
|
self.cache = None
|
|
|
|
self.filenames = set()
|
2014-11-09 16:52:50 +03:00
|
|
|
self._os_remove = os.remove
|
2015-01-07 11:21:02 +03:00
|
|
|
self._size_counter = 0
|
2012-02-16 13:46:41 +04:00
|
|
|
|
2017-12-12 12:48:19 +03:00
|
|
|
for item in items:
|
|
|
|
self.append(item)
|
|
|
|
|
2012-02-16 13:46:41 +04:00
|
|
|
def append(self, value):
|
|
|
|
self.chunks[-1].append(value)
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2015-01-07 11:21:02 +03:00
|
|
|
if self.chunk_length == sys.maxint:
|
|
|
|
self._size_counter += _size_of(value)
|
|
|
|
if self._size_counter >= BIGARRAY_CHUNK_SIZE:
|
|
|
|
self.chunk_length = len(self.chunks[-1])
|
|
|
|
self._size_counter = None
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2015-01-07 11:21:02 +03:00
|
|
|
if len(self.chunks[-1]) >= self.chunk_length:
|
2012-02-16 13:46:41 +04:00
|
|
|
filename = self._dump(self.chunks[-1])
|
|
|
|
self.chunks[-1] = filename
|
|
|
|
self.chunks.append([])
|
|
|
|
|
2012-12-18 19:03:35 +04:00
|
|
|
def extend(self, value):
|
|
|
|
for _ in value:
|
|
|
|
self.append(_)
|
|
|
|
|
2012-02-16 13:46:41 +04:00
|
|
|
def pop(self):
|
|
|
|
if len(self.chunks[-1]) < 1:
|
|
|
|
self.chunks.pop()
|
2014-12-23 10:36:00 +03:00
|
|
|
try:
|
2017-11-17 13:45:02 +03:00
|
|
|
with open(self.chunks[-1], "rb") as f:
|
2018-01-31 12:36:13 +03:00
|
|
|
self.chunks[-1] = pickle.loads(bz2.decompress(f.read()))
|
2019-01-22 02:40:48 +03:00
|
|
|
except IOError as ex:
|
2014-12-23 10:36:00 +03:00
|
|
|
errMsg = "exception occurred while retrieving data "
|
2015-09-09 12:53:44 +03:00
|
|
|
errMsg += "from a temporary file ('%s')" % ex.message
|
2018-03-13 13:13:38 +03:00
|
|
|
raise SqlmapSystemException(errMsg)
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2012-02-16 13:46:41 +04:00
|
|
|
return self.chunks[-1].pop()
|
|
|
|
|
|
|
|
def index(self, value):
|
|
|
|
for index in xrange(len(self)):
|
|
|
|
if self[index] == value:
|
|
|
|
return index
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2012-02-16 13:46:41 +04:00
|
|
|
return ValueError, "%s is not in list" % value
|
|
|
|
|
2015-01-07 04:04:10 +03:00
|
|
|
def _dump(self, chunk):
|
2014-11-16 16:25:44 +03:00
|
|
|
try:
|
2016-05-31 14:02:26 +03:00
|
|
|
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.BIG_ARRAY)
|
2014-11-16 16:25:44 +03:00
|
|
|
self.filenames.add(filename)
|
|
|
|
os.close(handle)
|
2017-11-17 13:45:02 +03:00
|
|
|
with open(filename, "w+b") as f:
|
2018-01-31 12:36:13 +03:00
|
|
|
f.write(bz2.compress(pickle.dumps(chunk, pickle.HIGHEST_PROTOCOL), BIGARRAY_COMPRESS_LEVEL))
|
2014-11-16 16:25:44 +03:00
|
|
|
return filename
|
2019-01-22 02:40:48 +03:00
|
|
|
except (OSError, IOError) as ex:
|
2014-11-16 16:25:44 +03:00
|
|
|
errMsg = "exception occurred while storing data "
|
2015-09-09 12:53:44 +03:00
|
|
|
errMsg += "to a temporary file ('%s'). Please " % ex.message
|
2015-01-14 15:53:38 +03:00
|
|
|
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
|
|
|
errMsg += "try to set environment variable 'TEMP' to a location "
|
|
|
|
errMsg += "writeable by the current user"
|
2018-03-13 13:13:38 +03:00
|
|
|
raise SqlmapSystemException(errMsg)
|
2012-02-16 13:46:41 +04:00
|
|
|
|
|
|
|
def _checkcache(self, index):
|
2012-06-14 13:10:28 +04:00
|
|
|
if (self.cache and self.cache.index != index and self.cache.dirty):
|
|
|
|
filename = self._dump(self.cache.data)
|
|
|
|
self.chunks[self.cache.index] = filename
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2012-06-14 13:10:28 +04:00
|
|
|
if not (self.cache and self.cache.index == index):
|
2014-12-23 10:36:00 +03:00
|
|
|
try:
|
2017-11-17 13:45:02 +03:00
|
|
|
with open(self.chunks[index], "rb") as f:
|
2018-01-31 12:36:13 +03:00
|
|
|
self.cache = Cache(index, pickle.loads(bz2.decompress(f.read())), False)
|
2019-01-22 02:40:48 +03:00
|
|
|
except Exception as ex:
|
2014-12-23 10:36:00 +03:00
|
|
|
errMsg = "exception occurred while retrieving data "
|
2015-09-09 12:53:44 +03:00
|
|
|
errMsg += "from a temporary file ('%s')" % ex.message
|
2018-03-13 13:13:38 +03:00
|
|
|
raise SqlmapSystemException(errMsg)
|
2012-02-16 13:46:41 +04:00
|
|
|
|
2014-11-09 20:40:49 +03:00
|
|
|
def __getstate__(self):
|
2014-12-01 02:29:25 +03:00
|
|
|
return self.chunks, self.filenames
|
2014-11-09 20:40:49 +03:00
|
|
|
|
|
|
|
def __setstate__(self, state):
|
|
|
|
self.__init__()
|
2014-12-01 02:29:25 +03:00
|
|
|
self.chunks, self.filenames = state
|
2014-11-09 20:40:49 +03:00
|
|
|
|
2012-07-06 14:24:55 +04:00
|
|
|
def __getslice__(self, i, j):
|
|
|
|
i = max(0, len(self) + i if i < 0 else i)
|
|
|
|
j = min(len(self), len(self) + j if j < 0 else j)
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2017-12-12 12:48:19 +03:00
|
|
|
return BigArray(self[_] for _ in xrange(i, j))
|
2012-07-06 14:24:55 +04:00
|
|
|
|
2012-02-16 13:46:41 +04:00
|
|
|
def __getitem__(self, y):
|
2012-07-07 12:35:29 +04:00
|
|
|
if y < 0:
|
|
|
|
y += len(self)
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2019-01-22 04:29:52 +03:00
|
|
|
index = y // self.chunk_length
|
2015-01-07 11:21:02 +03:00
|
|
|
offset = y % self.chunk_length
|
2012-02-16 13:46:41 +04:00
|
|
|
chunk = self.chunks[index]
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2012-02-16 13:46:41 +04:00
|
|
|
if isinstance(chunk, list):
|
|
|
|
return chunk[offset]
|
|
|
|
else:
|
|
|
|
self._checkcache(index)
|
2012-06-14 13:10:28 +04:00
|
|
|
return self.cache.data[offset]
|
2012-02-16 13:46:41 +04:00
|
|
|
|
|
|
|
def __setitem__(self, y, value):
|
2019-01-22 04:29:52 +03:00
|
|
|
index = y // self.chunk_length
|
2015-01-07 11:21:02 +03:00
|
|
|
offset = y % self.chunk_length
|
2012-02-16 13:46:41 +04:00
|
|
|
chunk = self.chunks[index]
|
2017-10-31 13:05:25 +03:00
|
|
|
|
2012-02-16 13:46:41 +04:00
|
|
|
if isinstance(chunk, list):
|
|
|
|
chunk[offset] = value
|
|
|
|
else:
|
|
|
|
self._checkcache(index)
|
2012-06-14 13:10:28 +04:00
|
|
|
self.cache.data[offset] = value
|
|
|
|
self.cache.dirty = True
|
2012-02-16 13:46:41 +04:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "%s%s" % ("..." if len(self.chunks) > 1 else "", self.chunks[-1].__repr__())
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
for i in xrange(len(self)):
|
|
|
|
yield self[i]
|
|
|
|
|
|
|
|
def __len__(self):
|
2015-01-07 11:21:02 +03:00
|
|
|
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * self.chunk_length + len(self.chunks[-1])
|