mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 01:26:42 +03:00
Stabilizing first drei compatible prototype
This commit is contained in:
parent
6dbf24531c
commit
7d9cd0c079
|
@ -220,7 +220,7 @@ def _saveToHashDB():
|
|||
_[key] = injection
|
||||
else:
|
||||
_[key].data.update(injection.data)
|
||||
hashDBWrite(HASHDB_KEYS.KB_INJECTIONS, _.values(), True)
|
||||
hashDBWrite(HASHDB_KEYS.KB_INJECTIONS, list(_.values()), True)
|
||||
|
||||
_ = hashDBRetrieve(HASHDB_KEYS.KB_ABS_FILE_PATHS, True)
|
||||
hashDBWrite(HASHDB_KEYS.KB_ABS_FILE_PATHS, kb.absFilePaths | (_ if isinstance(_, set) else set()), True)
|
||||
|
|
|
@ -3290,7 +3290,9 @@ def arrayizeValue(value):
|
|||
['1']
|
||||
"""
|
||||
|
||||
if not isListLike(value):
|
||||
if isinstance(value, collections.KeysView):
|
||||
value = [_ for _ in value]
|
||||
elif not isListLike(value):
|
||||
value = [value]
|
||||
|
||||
return value
|
||||
|
@ -3393,7 +3395,7 @@ def showHttpErrorCodes():
|
|||
warnMsg = "HTTP error codes detected during run:\n"
|
||||
warnMsg += ", ".join("%d (%s) - %d times" % (code, _http_client.responses[code] if code in _http_client.responses else '?', count) for code, count in kb.httpErrorCodes.items())
|
||||
logger.warn(warnMsg)
|
||||
if any((str(_).startswith('4') or str(_).startswith('5')) and _ != _http_client.INTERNAL_SERVER_ERROR and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()):
|
||||
if any((str(_).startswith('4') or str(_).startswith('5')) and _ != _http_client.INTERNAL_SERVER_ERROR and _ != kb.originalCode for _ in kb.httpErrorCodes):
|
||||
msg = "too many 4xx and/or 5xx HTTP error codes "
|
||||
msg += "could mean that some kind of protection is involved (e.g. WAF)"
|
||||
logger.debug(msg)
|
||||
|
|
|
@ -16,6 +16,7 @@ import re
|
|||
import sys
|
||||
|
||||
from lib.core.settings import IS_WIN
|
||||
from lib.core.settings import PICKLE_PROTOCOL
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from thirdparty import six
|
||||
|
||||
|
@ -50,7 +51,7 @@ def base64pickle(value):
|
|||
retVal = None
|
||||
|
||||
try:
|
||||
retVal = base64encode(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
|
||||
retVal = base64encode(pickle.dumps(value, PICKLE_PROTOCOL))
|
||||
except:
|
||||
warnMsg = "problem occurred while serializing "
|
||||
warnMsg += "instance of a type '%s'" % type(value)
|
||||
|
@ -59,7 +60,7 @@ def base64pickle(value):
|
|||
try:
|
||||
retVal = base64encode(pickle.dumps(value))
|
||||
except:
|
||||
retVal = base64encode(pickle.dumps(str(value), pickle.HIGHEST_PROTOCOL))
|
||||
retVal = base64encode(pickle.dumps(str(value), PICKLE_PROTOCOL))
|
||||
|
||||
return retVal
|
||||
|
||||
|
|
|
@ -194,7 +194,7 @@ class Dump(object):
|
|||
self._areAdmins = userSettings[1]
|
||||
userSettings = userSettings[0]
|
||||
|
||||
users = userSettings.keys()
|
||||
users = list(userSettings.keys())
|
||||
users.sort(key=lambda _: _.lower() if hasattr(_, "lower") else _)
|
||||
|
||||
if conf.api:
|
||||
|
@ -288,7 +288,7 @@ class Dump(object):
|
|||
|
||||
colType = None
|
||||
|
||||
colList = columns.keys()
|
||||
colList = list(columns.keys())
|
||||
colList.sort(key=lambda _: _.lower() if hasattr(_, "lower") else _)
|
||||
|
||||
for column in colList:
|
||||
|
@ -372,7 +372,7 @@ class Dump(object):
|
|||
self._write("| Table%s | Entries%s |" % (blank1, blank2))
|
||||
self._write("+%s+%s+" % (lines1, lines2))
|
||||
|
||||
sortedCounts = counts.keys()
|
||||
sortedCounts = list(counts.keys())
|
||||
sortedCounts.sort(reverse=True)
|
||||
|
||||
for count in sortedCounts:
|
||||
|
@ -484,7 +484,7 @@ class Dump(object):
|
|||
field = 1
|
||||
fields = len(tableValues) - 1
|
||||
|
||||
columns = prioritySortColumns(tableValues.keys())
|
||||
columns = prioritySortColumns(list(tableValues.keys()))
|
||||
|
||||
if conf.col:
|
||||
cols = conf.col.split(',')
|
||||
|
|
|
@ -17,7 +17,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
|||
from lib.core.enums import OS
|
||||
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.3.5.1"
|
||||
VERSION = "1.3.5.2"
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||
|
@ -618,7 +618,10 @@ HASHDB_RETRIEVE_RETRIES = 3
|
|||
HASHDB_END_TRANSACTION_RETRIES = 3
|
||||
|
||||
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
||||
HASHDB_MILESTONE_VALUE = "BZzRotigLX" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
|
||||
HASHDB_MILESTONE_VALUE = "OdqjeUpBLc" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
|
||||
|
||||
# Pickle protocl used for storage of serialized data inside HashDB (https://docs.python.org/3/library/pickle.html#data-stream-format)
|
||||
PICKLE_PROTOCOL = 2
|
||||
|
||||
# Warn user of possible delay due to large page dump in full UNION query injections
|
||||
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
||||
|
|
|
@ -143,7 +143,7 @@ def smokeTest():
|
|||
return retVal
|
||||
|
||||
def adjustValueType(tagName, value):
|
||||
for family in optDict.keys():
|
||||
for family in optDict:
|
||||
for name, type_ in optDict[family].items():
|
||||
if type(type_) == tuple:
|
||||
type_ = type_[0]
|
||||
|
|
|
@ -375,8 +375,12 @@ def main():
|
|||
os.remove(filepath)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if not filterNone(filepath for filepath in glob.glob(os.path.join(kb.tempDir, '*')) if not any(filepath.endswith(_) for _ in ('.lock', '.exe', '_'))):
|
||||
try:
|
||||
shutil.rmtree(kb.tempDir, ignore_errors=True)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if conf.get("hashDB"):
|
||||
conf.hashDB.flush(True)
|
||||
|
|
Loading…
Reference in New Issue
Block a user