mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-02-09 08:00:36 +03:00
Minor patches and updates
This commit is contained in:
parent
eec048daf8
commit
422b1a6f95
|
@ -63,6 +63,7 @@ from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
from lib.core.datatype import OrderedSet
|
||||||
from lib.core.decorators import cachedmethod
|
from lib.core.decorators import cachedmethod
|
||||||
from lib.core.defaults import defaults
|
from lib.core.defaults import defaults
|
||||||
from lib.core.dicts import DBMS_DICT
|
from lib.core.dicts import DBMS_DICT
|
||||||
|
@ -843,7 +844,15 @@ def getManualDirectories():
|
||||||
return directories
|
return directories
|
||||||
|
|
||||||
def getAutoDirectories():
|
def getAutoDirectories():
|
||||||
retVal = set()
|
"""
|
||||||
|
>>> pushValue(kb.absFilePaths)
|
||||||
|
>>> kb.absFilePaths = ["C:\\inetpub\\wwwroot\\index.asp", "/var/www/html"]
|
||||||
|
>>> getAutoDirectories()
|
||||||
|
['C:/inetpub/wwwroot', '/var/www/html']
|
||||||
|
>>> kb.absFilePaths = popValue()
|
||||||
|
"""
|
||||||
|
|
||||||
|
retVal = OrderedSet()
|
||||||
|
|
||||||
if kb.absFilePaths:
|
if kb.absFilePaths:
|
||||||
infoMsg = "retrieved web server absolute paths: "
|
infoMsg = "retrieved web server absolute paths: "
|
||||||
|
@ -1370,7 +1379,16 @@ def weAreFrozen():
|
||||||
|
|
||||||
def parseTargetDirect():
|
def parseTargetDirect():
|
||||||
"""
|
"""
|
||||||
Parse target dbms and set some attributes into the configuration singleton.
|
Parse target dbms and set some attributes into the configuration singleton
|
||||||
|
|
||||||
|
>>> pushValue(conf.direct)
|
||||||
|
>>> conf.direct = "mysql://root:testpass@127.0.0.1:3306/testdb"
|
||||||
|
>>> parseTargetDirect()
|
||||||
|
>>> conf.dbmsDb
|
||||||
|
'testdb'
|
||||||
|
>>> conf.dbmsPass
|
||||||
|
'testpass'
|
||||||
|
>>> conf.direct = popValue()
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not conf.direct:
|
if not conf.direct:
|
||||||
|
@ -1411,6 +1429,9 @@ def parseTargetDirect():
|
||||||
|
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if kb.smokeMode:
|
||||||
|
return
|
||||||
|
|
||||||
if not details:
|
if not details:
|
||||||
errMsg = "invalid target details, valid syntax is for instance "
|
errMsg = "invalid target details, valid syntax is for instance "
|
||||||
errMsg += "'mysql://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_NAME' "
|
errMsg += "'mysql://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_NAME' "
|
||||||
|
@ -1475,7 +1496,16 @@ def parseTargetDirect():
|
||||||
|
|
||||||
def parseTargetUrl():
|
def parseTargetUrl():
|
||||||
"""
|
"""
|
||||||
Parse target URL and set some attributes into the configuration singleton.
|
Parse target URL and set some attributes into the configuration singleton
|
||||||
|
|
||||||
|
>>> pushValue(conf.url)
|
||||||
|
>>> conf.url = "https://www.test.com/?id=1"
|
||||||
|
>>> parseTargetUrl()
|
||||||
|
>>> conf.hostname
|
||||||
|
'www.test.com'
|
||||||
|
>>> conf.scheme
|
||||||
|
'https'
|
||||||
|
>>> conf.url = popValue()
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not conf.url:
|
if not conf.url:
|
||||||
|
@ -1826,11 +1856,13 @@ def directoryPath(filepath):
|
||||||
|
|
||||||
>>> directoryPath('/var/log/apache.log')
|
>>> directoryPath('/var/log/apache.log')
|
||||||
'/var/log'
|
'/var/log'
|
||||||
|
>>> directoryPath('/var/log')
|
||||||
|
'/var/log'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retVal = filepath
|
retVal = filepath
|
||||||
|
|
||||||
if filepath:
|
if filepath and os.path.splitext(filepath)[-1]:
|
||||||
retVal = ntpath.dirname(filepath) if isWindowsDriveLetterPath(filepath) else posixpath.dirname(filepath)
|
retVal = ntpath.dirname(filepath) if isWindowsDriveLetterPath(filepath) else posixpath.dirname(filepath)
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
@ -3029,8 +3061,7 @@ def filterNone(values):
|
||||||
|
|
||||||
def isDBMSVersionAtLeast(version):
|
def isDBMSVersionAtLeast(version):
|
||||||
"""
|
"""
|
||||||
Checks if the recognized DBMS version is at least the version
|
Checks if the recognized DBMS version is at least the version specified
|
||||||
specified
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retVal = None
|
retVal = None
|
||||||
|
@ -3065,6 +3096,12 @@ def isDBMSVersionAtLeast(version):
|
||||||
def parseSqliteTableSchema(value):
|
def parseSqliteTableSchema(value):
|
||||||
"""
|
"""
|
||||||
Parses table column names and types from specified SQLite table schema
|
Parses table column names and types from specified SQLite table schema
|
||||||
|
|
||||||
|
>>> kb.data.cachedColumns = {}
|
||||||
|
>>> parseSqliteTableSchema("CREATE TABLE users\\n\\t\\tid INTEGER\\n\\t\\tname TEXT\\n);")
|
||||||
|
True
|
||||||
|
>>> repr(kb.data.cachedColumns).count(',') == 1
|
||||||
|
True
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retVal = False
|
retVal = False
|
||||||
|
@ -3091,8 +3128,13 @@ def getTechniqueData(technique=None):
|
||||||
|
|
||||||
def isTechniqueAvailable(technique):
|
def isTechniqueAvailable(technique):
|
||||||
"""
|
"""
|
||||||
Returns True if there is injection data which sqlmap could use for
|
Returns True if there is injection data which sqlmap could use for technique specified
|
||||||
technique specified
|
|
||||||
|
>>> pushValue(kb.injection.data)
|
||||||
|
>>> kb.injection.data[PAYLOAD.TECHNIQUE.ERROR] = [test for test in getSortedInjectionTests() if "error" in test["title"].lower()][0]
|
||||||
|
>>> isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR)
|
||||||
|
True
|
||||||
|
>>> kb.injection.data = popValue()
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if conf.tech and isinstance(conf.tech, list) and technique not in conf.tech:
|
if conf.tech and isinstance(conf.tech, list) and technique not in conf.tech:
|
||||||
|
@ -3103,6 +3145,12 @@ def isTechniqueAvailable(technique):
|
||||||
def isStackingAvailable():
|
def isStackingAvailable():
|
||||||
"""
|
"""
|
||||||
Returns True whether techniques using stacking are available
|
Returns True whether techniques using stacking are available
|
||||||
|
|
||||||
|
>>> pushValue(kb.injection.data)
|
||||||
|
>>> kb.injection.data[PAYLOAD.TECHNIQUE.STACKED] = [test for test in getSortedInjectionTests() if "stacked" in test["title"].lower()][0]
|
||||||
|
>>> isStackingAvailable()
|
||||||
|
True
|
||||||
|
>>> kb.injection.data = popValue()
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retVal = False
|
retVal = False
|
||||||
|
@ -3121,6 +3169,12 @@ def isStackingAvailable():
|
||||||
def isInferenceAvailable():
|
def isInferenceAvailable():
|
||||||
"""
|
"""
|
||||||
Returns True whether techniques using inference technique are available
|
Returns True whether techniques using inference technique are available
|
||||||
|
|
||||||
|
>>> pushValue(kb.injection.data)
|
||||||
|
>>> kb.injection.data[PAYLOAD.TECHNIQUE.BOOLEAN] = getSortedInjectionTests()[0]
|
||||||
|
>>> isInferenceAvailable()
|
||||||
|
True
|
||||||
|
>>> kb.injection.data = popValue()
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.STACKED, PAYLOAD.TECHNIQUE.TIME))
|
return any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.STACKED, PAYLOAD.TECHNIQUE.TIME))
|
||||||
|
@ -3290,8 +3344,13 @@ def isListLike(value):
|
||||||
|
|
||||||
def getSortedInjectionTests():
|
def getSortedInjectionTests():
|
||||||
"""
|
"""
|
||||||
Returns prioritized test list by eventually detected DBMS from error
|
Returns prioritized test list by eventually detected DBMS from error messages
|
||||||
messages
|
|
||||||
|
>>> pushValue(kb.forcedDbms)
|
||||||
|
>>> kb.forcedDbms = DBMS.SQLITE
|
||||||
|
>>> [test for test in getSortedInjectionTests() if hasattr(test, "details") and hasattr(test.details, "dbms")][0].details.dbms == kb.forcedDbms
|
||||||
|
True
|
||||||
|
>>> kb.forcedDbms = popValue()
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retVal = copy.deepcopy(conf.tests)
|
retVal = copy.deepcopy(conf.tests)
|
||||||
|
@ -3317,8 +3376,7 @@ def getSortedInjectionTests():
|
||||||
|
|
||||||
def filterListValue(value, regex):
|
def filterListValue(value, regex):
|
||||||
"""
|
"""
|
||||||
Returns list with items that have parts satisfying given regular
|
Returns list with items that have parts satisfying given regular expression
|
||||||
expression
|
|
||||||
|
|
||||||
>>> filterListValue(['users', 'admins', 'logs'], r'(users|admins)')
|
>>> filterListValue(['users', 'admins', 'logs'], r'(users|admins)')
|
||||||
['users', 'admins']
|
['users', 'admins']
|
||||||
|
@ -3348,6 +3406,9 @@ def showHttpErrorCodes():
|
||||||
def openFile(filename, mode='r', encoding=UNICODE_ENCODING, errors="reversible", buffering=1): # "buffering=1" means line buffered (Reference: http://stackoverflow.com/a/3168436)
|
def openFile(filename, mode='r', encoding=UNICODE_ENCODING, errors="reversible", buffering=1): # "buffering=1" means line buffered (Reference: http://stackoverflow.com/a/3168436)
|
||||||
"""
|
"""
|
||||||
Returns file handle of a given filename
|
Returns file handle of a given filename
|
||||||
|
|
||||||
|
>>> "openFile" in openFile(__file__).read()
|
||||||
|
True
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if filename == STDIN_PIPE_DASH:
|
if filename == STDIN_PIPE_DASH:
|
||||||
|
@ -3399,22 +3460,6 @@ def decodeIntToUnicode(value):
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def md5File(filename):
|
|
||||||
"""
|
|
||||||
Calculates MD5 digest of a file
|
|
||||||
|
|
||||||
# Reference: http://stackoverflow.com/a/3431838
|
|
||||||
"""
|
|
||||||
|
|
||||||
checkFile(filename)
|
|
||||||
|
|
||||||
digest = hashlib.md5()
|
|
||||||
with open(filename, "rb") as f:
|
|
||||||
for chunk in iter(lambda: f.read(4096), ""):
|
|
||||||
digest.update(chunk)
|
|
||||||
|
|
||||||
return digest.hexdigest()
|
|
||||||
|
|
||||||
def checkIntegrity():
|
def checkIntegrity():
|
||||||
"""
|
"""
|
||||||
Checks integrity of code files during the unhandled exceptions
|
Checks integrity of code files during the unhandled exceptions
|
||||||
|
@ -3441,6 +3486,9 @@ def checkIntegrity():
|
||||||
def getDaysFromLastUpdate():
|
def getDaysFromLastUpdate():
|
||||||
"""
|
"""
|
||||||
Get total number of days from last update
|
Get total number of days from last update
|
||||||
|
|
||||||
|
>>> getDaysFromLastUpdate() >= 0
|
||||||
|
True
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not paths:
|
if not paths:
|
||||||
|
@ -3451,6 +3499,9 @@ def getDaysFromLastUpdate():
|
||||||
def unhandledExceptionMessage():
|
def unhandledExceptionMessage():
|
||||||
"""
|
"""
|
||||||
Returns detailed message about occurred unhandled exception
|
Returns detailed message about occurred unhandled exception
|
||||||
|
|
||||||
|
>>> all(_ in unhandledExceptionMessage() for _ in ("unhandled exception occurred", "Operating system", "Command line"))
|
||||||
|
True
|
||||||
"""
|
"""
|
||||||
|
|
||||||
errMsg = "unhandled exception occurred in %s. It is recommended to retry your " % VERSION_STRING
|
errMsg = "unhandled exception occurred in %s. It is recommended to retry your " % VERSION_STRING
|
||||||
|
|
|
@ -1987,6 +1987,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||||
kb.serverHeader = None
|
kb.serverHeader = None
|
||||||
kb.singleLogFlags = set()
|
kb.singleLogFlags = set()
|
||||||
kb.skipSeqMatcher = False
|
kb.skipSeqMatcher = False
|
||||||
|
kb.smokeMode = False
|
||||||
kb.reduceTests = None
|
kb.reduceTests = None
|
||||||
kb.tlsSNI = {}
|
kb.tlsSNI = {}
|
||||||
kb.stickyDBMS = False
|
kb.stickyDBMS = False
|
||||||
|
|
|
@ -18,7 +18,7 @@ from lib.core.enums import OS
|
||||||
from thirdparty import six
|
from thirdparty import six
|
||||||
|
|
||||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||||
VERSION = "1.3.5.26"
|
VERSION = "1.3.5.27"
|
||||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||||
|
|
|
@ -7,6 +7,7 @@ See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
import doctest
|
import doctest
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
|
@ -29,6 +30,7 @@ from lib.core.compat import round
|
||||||
from lib.core.compat import xrange
|
from lib.core.compat import xrange
|
||||||
from lib.core.convert import getUnicode
|
from lib.core.convert import getUnicode
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.core.enums import MKSTEMP_PREFIX
|
from lib.core.enums import MKSTEMP_PREFIX
|
||||||
|
@ -161,9 +163,14 @@ def smokeTest():
|
||||||
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), ex)
|
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), ex)
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
else:
|
else:
|
||||||
# Run doc tests
|
logger.setLevel(logging.CRITICAL)
|
||||||
# Reference: http://docs.python.org/library/doctest.html
|
kb.smokeMode = True
|
||||||
|
|
||||||
(failure_count, test_count) = doctest.testmod(module)
|
(failure_count, test_count) = doctest.testmod(module)
|
||||||
|
|
||||||
|
kb.smokeMode = False
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
if failure_count > 0:
|
if failure_count > 0:
|
||||||
retVal = False
|
retVal = False
|
||||||
|
|
||||||
|
|
|
@ -352,7 +352,7 @@ def unionUse(expression, unpack=True, dump=False):
|
||||||
key = re.sub(r"[^A-Za-z0-9]", "", item).lower()
|
key = re.sub(r"[^A-Za-z0-9]", "", item).lower()
|
||||||
if key not in filtered or re.search(r"[^A-Za-z0-9]", item):
|
if key not in filtered or re.search(r"[^A-Za-z0-9]", item):
|
||||||
filtered[key] = item
|
filtered[key] = item
|
||||||
items = filtered.values()
|
items = list(filtered.values())
|
||||||
items = [items]
|
items = [items]
|
||||||
index = None
|
index = None
|
||||||
for index in xrange(1 + len(threadData.shared.buffered)):
|
for index in xrange(1 + len(threadData.shared.buffered)):
|
||||||
|
|
|
@ -5,6 +5,7 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from lib.core.common import isListLike
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import safeSQLIdentificatorNaming
|
from lib.core.common import safeSQLIdentificatorNaming
|
||||||
from lib.core.common import unsafeSQLIdentificatorNaming
|
from lib.core.common import unsafeSQLIdentificatorNaming
|
||||||
|
@ -48,7 +49,7 @@ class Enumeration(GenericEnumeration):
|
||||||
retVal = pivotDumpTable("(%s) AS %s" % (query, kb.aliasName), ['%s.schemaname' % kb.aliasName], blind=True)
|
retVal = pivotDumpTable("(%s) AS %s" % (query, kb.aliasName), ['%s.schemaname' % kb.aliasName], blind=True)
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
kb.data.cachedDbs = retVal[0].values()[0]
|
kb.data.cachedDbs = list(retVal[0].values())[0]
|
||||||
|
|
||||||
if kb.data.cachedDbs:
|
if kb.data.cachedDbs:
|
||||||
kb.data.cachedDbs.sort()
|
kb.data.cachedDbs.sort()
|
||||||
|
@ -83,7 +84,7 @@ class Enumeration(GenericEnumeration):
|
||||||
retVal = pivotDumpTable("(%s) AS %s" % (query, kb.aliasName), ['%s.tablename' % kb.aliasName], blind=True)
|
retVal = pivotDumpTable("(%s) AS %s" % (query, kb.aliasName), ['%s.tablename' % kb.aliasName], blind=True)
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
for table in retVal[0].values()[0]:
|
for table in list(retVal[0].values())[0]:
|
||||||
if db not in kb.data.cachedTables:
|
if db not in kb.data.cachedTables:
|
||||||
kb.data.cachedTables[db] = [table]
|
kb.data.cachedTables[db] = [table]
|
||||||
else:
|
else:
|
||||||
|
@ -131,9 +132,9 @@ class Enumeration(GenericEnumeration):
|
||||||
self.getTables()
|
self.getTables()
|
||||||
|
|
||||||
if len(kb.data.cachedTables) > 0:
|
if len(kb.data.cachedTables) > 0:
|
||||||
tblList = kb.data.cachedTables.values()
|
tblList = list(kb.data.cachedTables.values())
|
||||||
|
|
||||||
if isinstance(tblList[0], (set, tuple, list)):
|
if isListLike(tblList[0]):
|
||||||
tblList = tblList[0]
|
tblList = tblList[0]
|
||||||
else:
|
else:
|
||||||
errMsg = "unable to retrieve the tables "
|
errMsg = "unable to retrieve the tables "
|
||||||
|
|
|
@ -6,6 +6,7 @@ See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.common import filterPairValues
|
from lib.core.common import filterPairValues
|
||||||
|
from lib.core.common import isListLike
|
||||||
from lib.core.common import isTechniqueAvailable
|
from lib.core.common import isTechniqueAvailable
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import safeSQLIdentificatorNaming
|
from lib.core.common import safeSQLIdentificatorNaming
|
||||||
|
@ -47,7 +48,7 @@ class Enumeration(GenericEnumeration):
|
||||||
retVal = pivotDumpTable("(%s) AS %s" % (query, kb.aliasName), ['%s.name' % kb.aliasName], blind=blind, alias=kb.aliasName)
|
retVal = pivotDumpTable("(%s) AS %s" % (query, kb.aliasName), ['%s.name' % kb.aliasName], blind=blind, alias=kb.aliasName)
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
kb.data.cachedUsers = retVal[0].values()[0]
|
kb.data.cachedUsers = list(retVal[0].values())[0]
|
||||||
break
|
break
|
||||||
|
|
||||||
return kb.data.cachedUsers
|
return kb.data.cachedUsers
|
||||||
|
@ -102,7 +103,7 @@ class Enumeration(GenericEnumeration):
|
||||||
retVal = pivotDumpTable("(%s) AS %s" % (query, kb.aliasName), ['%s.name' % kb.aliasName], blind=blind, alias=kb.aliasName)
|
retVal = pivotDumpTable("(%s) AS %s" % (query, kb.aliasName), ['%s.name' % kb.aliasName], blind=blind, alias=kb.aliasName)
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
kb.data.cachedDbs = retVal[0].values()[0]
|
kb.data.cachedDbs = list(retVal[0].values())[0]
|
||||||
break
|
break
|
||||||
|
|
||||||
if kb.data.cachedDbs:
|
if kb.data.cachedDbs:
|
||||||
|
@ -146,7 +147,7 @@ class Enumeration(GenericEnumeration):
|
||||||
retVal = pivotDumpTable("(%s) AS %s" % (query, kb.aliasName), ['%s.name' % kb.aliasName], blind=blind, alias=kb.aliasName)
|
retVal = pivotDumpTable("(%s) AS %s" % (query, kb.aliasName), ['%s.name' % kb.aliasName], blind=blind, alias=kb.aliasName)
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
for table in retVal[0].values()[0]:
|
for table in list(retVal[0].values())[0]:
|
||||||
if db not in kb.data.cachedTables:
|
if db not in kb.data.cachedTables:
|
||||||
kb.data.cachedTables[db] = [table]
|
kb.data.cachedTables[db] = [table]
|
||||||
else:
|
else:
|
||||||
|
@ -195,9 +196,9 @@ class Enumeration(GenericEnumeration):
|
||||||
self.getTables()
|
self.getTables()
|
||||||
|
|
||||||
if len(kb.data.cachedTables) > 0:
|
if len(kb.data.cachedTables) > 0:
|
||||||
tblList = kb.data.cachedTables.values()
|
tblList = list(kb.data.cachedTables.values())
|
||||||
|
|
||||||
if isinstance(tblList[0], (set, tuple, list)):
|
if isListLike(tblList[0]):
|
||||||
tblList = tblList[0]
|
tblList = tblList[0]
|
||||||
else:
|
else:
|
||||||
errMsg = "unable to retrieve the tables "
|
errMsg = "unable to retrieve the tables "
|
||||||
|
|
|
@ -478,9 +478,9 @@ class Databases:
|
||||||
if conf.db in kb.data.cachedTables:
|
if conf.db in kb.data.cachedTables:
|
||||||
tblList = kb.data.cachedTables[conf.db]
|
tblList = kb.data.cachedTables[conf.db]
|
||||||
else:
|
else:
|
||||||
tblList = kb.data.cachedTables.values()
|
tblList = list(kb.data.cachedTables.values())
|
||||||
|
|
||||||
if isinstance(tblList[0], (set, tuple, list)):
|
if isListLike(tblList[0]):
|
||||||
tblList = tblList[0]
|
tblList = tblList[0]
|
||||||
|
|
||||||
tblList = list(tblList)
|
tblList = list(tblList)
|
||||||
|
|
|
@ -93,9 +93,9 @@ class Entries:
|
||||||
self.getTables()
|
self.getTables()
|
||||||
|
|
||||||
if len(kb.data.cachedTables) > 0:
|
if len(kb.data.cachedTables) > 0:
|
||||||
tblList = kb.data.cachedTables.values()
|
tblList = list(kb.data.cachedTables.values())
|
||||||
|
|
||||||
if isinstance(tblList[0], (set, tuple, list)):
|
if isListLike(tblList[0]):
|
||||||
tblList = tblList[0]
|
tblList = tblList[0]
|
||||||
elif not conf.search:
|
elif not conf.search:
|
||||||
errMsg = "unable to retrieve the tables "
|
errMsg = "unable to retrieve the tables "
|
||||||
|
|
Loading…
Reference in New Issue
Block a user