mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-05-07 09:13:43 +03:00
Some more trivial refactoring
This commit is contained in:
parent
b1a898662d
commit
22907d5085
|
@ -39,6 +39,7 @@ from lib.core.settings import BOUNDED_INJECTION_MARKER
|
||||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import GENERIC_SQL_COMMENT
|
from lib.core.settings import GENERIC_SQL_COMMENT
|
||||||
|
from lib.core.settings import INFERENCE_MARKER
|
||||||
from lib.core.settings import NULL
|
from lib.core.settings import NULL
|
||||||
from lib.core.settings import PAYLOAD_DELIMITER
|
from lib.core.settings import PAYLOAD_DELIMITER
|
||||||
from lib.core.settings import REPLACEMENT_MARKER
|
from lib.core.settings import REPLACEMENT_MARKER
|
||||||
|
@ -319,7 +320,7 @@ class Agent(object):
|
||||||
origValue = getUnicode(origValue)
|
origValue = getUnicode(origValue)
|
||||||
payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
||||||
|
|
||||||
if "[INFERENCE]" in payload:
|
if INFERENCE_MARKER in payload:
|
||||||
if Backend.getIdentifiedDbms() is not None:
|
if Backend.getIdentifiedDbms() is not None:
|
||||||
inference = queries[Backend.getIdentifiedDbms()].inference
|
inference = queries[Backend.getIdentifiedDbms()].inference
|
||||||
|
|
||||||
|
@ -331,7 +332,7 @@ class Agent(object):
|
||||||
else:
|
else:
|
||||||
inferenceQuery = inference.query
|
inferenceQuery = inference.query
|
||||||
|
|
||||||
payload = payload.replace("[INFERENCE]", inferenceQuery)
|
payload = payload.replace(INFERENCE_MARKER, inferenceQuery)
|
||||||
elif not kb.testMode:
|
elif not kb.testMode:
|
||||||
errMsg = "invalid usage of inference payload without "
|
errMsg = "invalid usage of inference payload without "
|
||||||
errMsg += "knowledge of underlying DBMS"
|
errMsg += "knowledge of underlying DBMS"
|
||||||
|
|
|
@ -27,10 +27,12 @@ def _size_of(object_):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retval = sys.getsizeof(object_, DEFAULT_SIZE_OF)
|
retval = sys.getsizeof(object_, DEFAULT_SIZE_OF)
|
||||||
|
|
||||||
if isinstance(object_, dict):
|
if isinstance(object_, dict):
|
||||||
retval += sum(_size_of(_) for _ in itertools.chain.from_iterable(object_.items()))
|
retval += sum(_size_of(_) for _ in itertools.chain.from_iterable(object_.items()))
|
||||||
elif hasattr(object_, "__iter__"):
|
elif hasattr(object_, "__iter__"):
|
||||||
retval += sum(_size_of(_) for _ in object_)
|
retval += sum(_size_of(_) for _ in object_)
|
||||||
|
|
||||||
return retval
|
return retval
|
||||||
|
|
||||||
class Cache(object):
|
class Cache(object):
|
||||||
|
@ -58,11 +60,13 @@ class BigArray(list):
|
||||||
|
|
||||||
def append(self, value):
|
def append(self, value):
|
||||||
self.chunks[-1].append(value)
|
self.chunks[-1].append(value)
|
||||||
|
|
||||||
if self.chunk_length == sys.maxint:
|
if self.chunk_length == sys.maxint:
|
||||||
self._size_counter += _size_of(value)
|
self._size_counter += _size_of(value)
|
||||||
if self._size_counter >= BIGARRAY_CHUNK_SIZE:
|
if self._size_counter >= BIGARRAY_CHUNK_SIZE:
|
||||||
self.chunk_length = len(self.chunks[-1])
|
self.chunk_length = len(self.chunks[-1])
|
||||||
self._size_counter = None
|
self._size_counter = None
|
||||||
|
|
||||||
if len(self.chunks[-1]) >= self.chunk_length:
|
if len(self.chunks[-1]) >= self.chunk_length:
|
||||||
filename = self._dump(self.chunks[-1])
|
filename = self._dump(self.chunks[-1])
|
||||||
self.chunks[-1] = filename
|
self.chunks[-1] = filename
|
||||||
|
@ -82,12 +86,14 @@ class BigArray(list):
|
||||||
errMsg = "exception occurred while retrieving data "
|
errMsg = "exception occurred while retrieving data "
|
||||||
errMsg += "from a temporary file ('%s')" % ex.message
|
errMsg += "from a temporary file ('%s')" % ex.message
|
||||||
raise SqlmapSystemException, errMsg
|
raise SqlmapSystemException, errMsg
|
||||||
|
|
||||||
return self.chunks[-1].pop()
|
return self.chunks[-1].pop()
|
||||||
|
|
||||||
def index(self, value):
|
def index(self, value):
|
||||||
for index in xrange(len(self)):
|
for index in xrange(len(self)):
|
||||||
if self[index] == value:
|
if self[index] == value:
|
||||||
return index
|
return index
|
||||||
|
|
||||||
return ValueError, "%s is not in list" % value
|
return ValueError, "%s is not in list" % value
|
||||||
|
|
||||||
def _dump(self, chunk):
|
def _dump(self, chunk):
|
||||||
|
@ -110,6 +116,7 @@ class BigArray(list):
|
||||||
if (self.cache and self.cache.index != index and self.cache.dirty):
|
if (self.cache and self.cache.index != index and self.cache.dirty):
|
||||||
filename = self._dump(self.cache.data)
|
filename = self._dump(self.cache.data)
|
||||||
self.chunks[self.cache.index] = filename
|
self.chunks[self.cache.index] = filename
|
||||||
|
|
||||||
if not (self.cache and self.cache.index == index):
|
if not (self.cache and self.cache.index == index):
|
||||||
try:
|
try:
|
||||||
with open(self.chunks[index], "rb") as fp:
|
with open(self.chunks[index], "rb") as fp:
|
||||||
|
@ -128,18 +135,23 @@ class BigArray(list):
|
||||||
|
|
||||||
def __getslice__(self, i, j):
|
def __getslice__(self, i, j):
|
||||||
retval = BigArray()
|
retval = BigArray()
|
||||||
|
|
||||||
i = max(0, len(self) + i if i < 0 else i)
|
i = max(0, len(self) + i if i < 0 else i)
|
||||||
j = min(len(self), len(self) + j if j < 0 else j)
|
j = min(len(self), len(self) + j if j < 0 else j)
|
||||||
|
|
||||||
for _ in xrange(i, j):
|
for _ in xrange(i, j):
|
||||||
retval.append(self[_])
|
retval.append(self[_])
|
||||||
|
|
||||||
return retval
|
return retval
|
||||||
|
|
||||||
def __getitem__(self, y):
|
def __getitem__(self, y):
|
||||||
if y < 0:
|
if y < 0:
|
||||||
y += len(self)
|
y += len(self)
|
||||||
|
|
||||||
index = y / self.chunk_length
|
index = y / self.chunk_length
|
||||||
offset = y % self.chunk_length
|
offset = y % self.chunk_length
|
||||||
chunk = self.chunks[index]
|
chunk = self.chunks[index]
|
||||||
|
|
||||||
if isinstance(chunk, list):
|
if isinstance(chunk, list):
|
||||||
return chunk[offset]
|
return chunk[offset]
|
||||||
else:
|
else:
|
||||||
|
@ -150,6 +162,7 @@ class BigArray(list):
|
||||||
index = y / self.chunk_length
|
index = y / self.chunk_length
|
||||||
offset = y % self.chunk_length
|
offset = y % self.chunk_length
|
||||||
chunk = self.chunks[index]
|
chunk = self.chunks[index]
|
||||||
|
|
||||||
if isinstance(chunk, list):
|
if isinstance(chunk, list):
|
||||||
chunk[offset] = value
|
chunk[offset] = value
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -166,7 +166,7 @@ def htmlunescape(value):
|
||||||
|
|
||||||
retVal = value
|
retVal = value
|
||||||
if value and isinstance(value, basestring):
|
if value and isinstance(value, basestring):
|
||||||
codes = (('<', '<'), ('>', '>'), ('"', '"'), (' ', ' '), ('&', '&'))
|
codes = (("<", '<'), (">", '>'), (""", '"'), (" ", ' '), ("&", '&'), ("'", "'"))
|
||||||
retVal = reduce(lambda x, y: x.replace(y[0], y[1]), codes, retVal)
|
retVal = reduce(lambda x, y: x.replace(y[0], y[1]), codes, retVal)
|
||||||
try:
|
try:
|
||||||
retVal = re.sub(r"&#x([^ ;]+);", lambda match: unichr(int(match.group(1), 16)), retVal)
|
retVal = re.sub(r"&#x([^ ;]+);", lambda match: unichr(int(match.group(1), 16)), retVal)
|
||||||
|
|
|
@ -43,6 +43,7 @@ from lib.core.exception import SqlmapDataException
|
||||||
from lib.core.exception import SqlmapNotVulnerableException
|
from lib.core.exception import SqlmapNotVulnerableException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.settings import GET_VALUE_UPPERCASE_KEYWORDS
|
from lib.core.settings import GET_VALUE_UPPERCASE_KEYWORDS
|
||||||
|
from lib.core.settings import INFERENCE_MARKER
|
||||||
from lib.core.settings import MAX_TECHNIQUES_PER_VALUE
|
from lib.core.settings import MAX_TECHNIQUES_PER_VALUE
|
||||||
from lib.core.settings import SQL_SCALAR_REGEX
|
from lib.core.settings import SQL_SCALAR_REGEX
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
|
@ -304,7 +305,7 @@ def _goBooleanProxy(expression):
|
||||||
return output
|
return output
|
||||||
|
|
||||||
vector = kb.injection.data[kb.technique].vector
|
vector = kb.injection.data[kb.technique].vector
|
||||||
vector = vector.replace("[INFERENCE]", expression)
|
vector = vector.replace(INFERENCE_MARKER, expression)
|
||||||
query = agent.prefixQuery(vector)
|
query = agent.prefixQuery(vector)
|
||||||
query = agent.suffixQuery(query)
|
query = agent.suffixQuery(query)
|
||||||
payload = agent.payload(newValue=query)
|
payload = agent.payload(newValue=query)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user