mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-25 11:03:47 +03:00
major enhancement, code refactoring for issue #297
This commit is contained in:
parent
d07881b6c3
commit
f1ab887c55
14
_sqlmap.py
14
_sqlmap.py
|
@ -59,11 +59,11 @@ def main():
|
||||||
cmdLineOptions.update(cmdLineParser().__dict__)
|
cmdLineOptions.update(cmdLineParser().__dict__)
|
||||||
init(cmdLineOptions)
|
init(cmdLineOptions)
|
||||||
|
|
||||||
if hasattr(conf, "ipc_database"):
|
if hasattr(conf, "api"):
|
||||||
# Overwrite system standard output and standard error to write
|
# Overwrite system standard output and standard error to write
|
||||||
# to a temporary I/O database
|
# to an IPC database
|
||||||
sys.stdout = StdDbOut(type_="stdout")
|
sys.stdout = StdDbOut(conf.taskid, messagetype="stdout")
|
||||||
sys.stderr = StdDbOut(type_="stderr")
|
sys.stderr = StdDbOut(conf.taskid, messagetype="stderr")
|
||||||
|
|
||||||
banner()
|
banner()
|
||||||
|
|
||||||
|
@ -122,10 +122,10 @@ def main():
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if hasattr(conf, "ipc_database"):
|
if hasattr(conf, "api"):
|
||||||
try:
|
try:
|
||||||
conf.ipc_database_cursor.close()
|
conf.database_cursor.close()
|
||||||
conf.ipc_database_connection.close()
|
conf.database_connection.close()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -742,7 +742,7 @@ def setColor(message, bold=False):
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def dataToStdout(data, forceOutput=False, bold=False):
|
def dataToStdout(data, forceOutput=False, bold=False, content_type=None, status=None):
|
||||||
"""
|
"""
|
||||||
Writes text to the stdout (console) stream
|
Writes text to the stdout (console) stream
|
||||||
"""
|
"""
|
||||||
|
@ -754,8 +754,15 @@ def dataToStdout(data, forceOutput=False, bold=False):
|
||||||
if kb.get("multiThreadMode"):
|
if kb.get("multiThreadMode"):
|
||||||
logging._acquireLock()
|
logging._acquireLock()
|
||||||
|
|
||||||
message = stdoutencode(data)
|
if isinstance(data, basestring):
|
||||||
sys.stdout.write(setColor(message, bold))
|
message = stdoutencode(data)
|
||||||
|
else:
|
||||||
|
message = data
|
||||||
|
|
||||||
|
if content_type is not None and status is not None:
|
||||||
|
sys.stdout.write(message, status=status, content_type=content_type)
|
||||||
|
else:
|
||||||
|
sys.stdout.write(setColor(message, bold))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
|
@ -104,3 +104,6 @@ def stdoutencode(data):
|
||||||
|
|
||||||
def jsonize(data):
|
def jsonize(data):
|
||||||
return json.dumps(data, sort_keys=False, indent=4)
|
return json.dumps(data, sort_keys=False, indent=4)
|
||||||
|
|
||||||
|
def dejsonize(data):
|
||||||
|
return json.loads(data)
|
||||||
|
|
|
@ -26,6 +26,8 @@ from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.dicts import DUMP_REPLACEMENTS
|
from lib.core.dicts import DUMP_REPLACEMENTS
|
||||||
|
from lib.core.enums import API_CONTENT_STATUS
|
||||||
|
from lib.core.enums import API_CONTENT_TYPE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import DUMP_FORMAT
|
from lib.core.enums import DUMP_FORMAT
|
||||||
from lib.core.exception import SqlmapGenericException
|
from lib.core.exception import SqlmapGenericException
|
||||||
|
@ -52,8 +54,13 @@ class Dump(object):
|
||||||
self._outputFP = None
|
self._outputFP = None
|
||||||
self._lock = threading.Lock()
|
self._lock = threading.Lock()
|
||||||
|
|
||||||
def _write(self, data, newline=True, console=True):
|
def _write(self, data, newline=True, console=True, content_type=None):
|
||||||
|
if hasattr(conf, "api"):
|
||||||
|
dataToStdout(data, content_type=content_type, status=API_CONTENT_STATUS.COMPLETE)
|
||||||
|
return
|
||||||
|
|
||||||
text = "%s%s" % (data, "\n" if newline else " ")
|
text = "%s%s" % (data, "\n" if newline else " ")
|
||||||
|
|
||||||
if console:
|
if console:
|
||||||
dataToStdout(text)
|
dataToStdout(text)
|
||||||
|
|
||||||
|
@ -81,7 +88,7 @@ class Dump(object):
|
||||||
def singleString(self, data):
|
def singleString(self, data):
|
||||||
self._write(data)
|
self._write(data)
|
||||||
|
|
||||||
def string(self, header, data, sort=True):
|
def string(self, header, data, content_type=None, sort=True):
|
||||||
kb.stickyLevel = None
|
kb.stickyLevel = None
|
||||||
|
|
||||||
if isListLike(data):
|
if isListLike(data):
|
||||||
|
@ -92,18 +99,19 @@ class Dump(object):
|
||||||
if _ and _[-1] == '\n':
|
if _ and _[-1] == '\n':
|
||||||
_ = _[:-1]
|
_ = _[:-1]
|
||||||
|
|
||||||
if "\n" in _:
|
if hasattr(conf, "api"):
|
||||||
|
self._write(data, content_type=content_type)
|
||||||
|
elif "\n" in _:
|
||||||
self._write("%s:\n---\n%s\n---" % (header, _))
|
self._write("%s:\n---\n%s\n---" % (header, _))
|
||||||
else:
|
else:
|
||||||
self._write("%s: %s" % (header, ("'%s'" % _) if isinstance(data, basestring) else _))
|
self._write("%s: %s" % (header, ("'%s'" % _) if isinstance(data, basestring) else _))
|
||||||
|
elif hasattr(conf, "api"):
|
||||||
|
self._write(data, content_type=content_type)
|
||||||
else:
|
else:
|
||||||
self._write("%s:\tNone" % header)
|
self._write("%s:\tNone" % header)
|
||||||
|
|
||||||
def lister(self, header, elements, sort=True):
|
def lister(self, header, elements, content_type=None, sort=True):
|
||||||
if elements:
|
if elements and sort:
|
||||||
self._write("%s [%d]:" % (header, len(elements)))
|
|
||||||
|
|
||||||
if sort:
|
|
||||||
try:
|
try:
|
||||||
elements = set(elements)
|
elements = set(elements)
|
||||||
elements = list(elements)
|
elements = list(elements)
|
||||||
|
@ -111,6 +119,13 @@ class Dump(object):
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
if hasattr(conf, "api"):
|
||||||
|
self._write(elements, content_type=content_type)
|
||||||
|
return
|
||||||
|
|
||||||
|
if elements:
|
||||||
|
self._write("%s [%d]:" % (header, len(elements)))
|
||||||
|
|
||||||
for element in elements:
|
for element in elements:
|
||||||
if isinstance(element, basestring):
|
if isinstance(element, basestring):
|
||||||
self._write("[*] %s" % element)
|
self._write("[*] %s" % element)
|
||||||
|
@ -121,29 +136,29 @@ class Dump(object):
|
||||||
self._write("")
|
self._write("")
|
||||||
|
|
||||||
def banner(self, data):
|
def banner(self, data):
|
||||||
self.string("banner", data)
|
self.string("banner", data, content_type=API_CONTENT_TYPE.BANNER)
|
||||||
|
|
||||||
def currentUser(self, data):
|
def currentUser(self, data):
|
||||||
self.string("current user", data)
|
self.string("current user", data, content_type=API_CONTENT_TYPE.CURRENT_USER)
|
||||||
|
|
||||||
def currentDb(self, data):
|
def currentDb(self, data):
|
||||||
if Backend.isDbms(DBMS.MAXDB):
|
if Backend.isDbms(DBMS.MAXDB):
|
||||||
self.string("current database (no practical usage on %s)" % Backend.getIdentifiedDbms(), data)
|
self.string("current database (no practical usage on %s)" % Backend.getIdentifiedDbms(), data, content_type=API_CONTENT_TYPE.CURRENT_DB)
|
||||||
elif Backend.isDbms(DBMS.ORACLE):
|
elif Backend.isDbms(DBMS.ORACLE):
|
||||||
self.string("current schema (equivalent to database on %s)" % Backend.getIdentifiedDbms(), data)
|
self.string("current schema (equivalent to database on %s)" % Backend.getIdentifiedDbms(), data, content_type=API_CONTENT_TYPE.CURRENT_DB)
|
||||||
else:
|
else:
|
||||||
self.string("current database", data)
|
self.string("current database", data, content_type=API_CONTENT_TYPE.CURRENT_DB)
|
||||||
|
|
||||||
def hostname(self, data):
|
def hostname(self, data):
|
||||||
self.string("hostname", data)
|
self.string("hostname", data, content_type=API_CONTENT_TYPE.HOSTNAME)
|
||||||
|
|
||||||
def dba(self, data):
|
def dba(self, data):
|
||||||
self.string("current user is DBA", data)
|
self.string("current user is DBA", data, content_type=API_CONTENT_TYPE.IS_DBA)
|
||||||
|
|
||||||
def users(self, users):
|
def users(self, users):
|
||||||
self.lister("database management system users", users)
|
self.lister("database management system users", users, content_type=API_CONTENT_TYPE.USERS)
|
||||||
|
|
||||||
def userSettings(self, header, userSettings, subHeader):
|
def userSettings(self, header, userSettings, subHeader, content_type=None):
|
||||||
self._areAdmins = set()
|
self._areAdmins = set()
|
||||||
|
|
||||||
if userSettings:
|
if userSettings:
|
||||||
|
@ -179,9 +194,9 @@ class Dump(object):
|
||||||
self.singleString("")
|
self.singleString("")
|
||||||
|
|
||||||
def dbs(self, dbs):
|
def dbs(self, dbs):
|
||||||
self.lister("available databases", dbs)
|
self.lister("available databases", dbs, content_type=API_CONTENT_TYPE.DBS)
|
||||||
|
|
||||||
def dbTables(self, dbTables):
|
def dbTables(self, dbTables, content_type=API_CONTENT_TYPE.TABLES):
|
||||||
if isinstance(dbTables, dict) and len(dbTables) > 0:
|
if isinstance(dbTables, dict) and len(dbTables) > 0:
|
||||||
maxlength = 0
|
maxlength = 0
|
||||||
|
|
||||||
|
@ -219,7 +234,7 @@ class Dump(object):
|
||||||
else:
|
else:
|
||||||
self.string("tables", dbTables)
|
self.string("tables", dbTables)
|
||||||
|
|
||||||
def dbTableColumns(self, tableColumns):
|
def dbTableColumns(self, tableColumns, content_type=API_CONTENT_TYPE.COLUMNS):
|
||||||
if isinstance(tableColumns, dict) and len(tableColumns) > 0:
|
if isinstance(tableColumns, dict) and len(tableColumns) > 0:
|
||||||
for db, tables in tableColumns.items():
|
for db, tables in tableColumns.items():
|
||||||
if not db:
|
if not db:
|
||||||
|
@ -286,7 +301,7 @@ class Dump(object):
|
||||||
else:
|
else:
|
||||||
self._write("+%s+\n" % lines1)
|
self._write("+%s+\n" % lines1)
|
||||||
|
|
||||||
def dbTablesCount(self, dbTables):
|
def dbTablesCount(self, dbTables, content_type=API_CONTENT_TYPE.COUNT):
|
||||||
if isinstance(dbTables, dict) and len(dbTables) > 0:
|
if isinstance(dbTables, dict) and len(dbTables) > 0:
|
||||||
maxlength1 = len("Table")
|
maxlength1 = len("Table")
|
||||||
maxlength2 = len("Entries")
|
maxlength2 = len("Entries")
|
||||||
|
@ -328,7 +343,7 @@ class Dump(object):
|
||||||
else:
|
else:
|
||||||
logger.error("unable to retrieve the number of entries for any table")
|
logger.error("unable to retrieve the number of entries for any table")
|
||||||
|
|
||||||
def dbTableValues(self, tableValues):
|
def dbTableValues(self, tableValues, content_type=API_CONTENT_TYPE.DUMP_TABLE):
|
||||||
replication = None
|
replication = None
|
||||||
rtable = None
|
rtable = None
|
||||||
dumpFP = None
|
dumpFP = None
|
||||||
|
@ -534,7 +549,7 @@ class Dump(object):
|
||||||
dumpFP.close()
|
dumpFP.close()
|
||||||
logger.info("table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName))
|
logger.info("table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName))
|
||||||
|
|
||||||
def dbColumns(self, dbColumnsDict, colConsider, dbs):
|
def dbColumns(self, dbColumnsDict, colConsider, dbs, content_type=API_CONTENT_TYPE.COLUMNS):
|
||||||
for column in dbColumnsDict.keys():
|
for column in dbColumnsDict.keys():
|
||||||
if colConsider == "1":
|
if colConsider == "1":
|
||||||
colConsiderStr = "s like '" + column + "' were"
|
colConsiderStr = "s like '" + column + "' were"
|
||||||
|
@ -565,13 +580,13 @@ class Dump(object):
|
||||||
self.dbTableColumns(_)
|
self.dbTableColumns(_)
|
||||||
|
|
||||||
def query(self, query, queryRes):
|
def query(self, query, queryRes):
|
||||||
self.string(query, queryRes)
|
self.string(query, queryRes, content_type=API_CONTENT_TYPE.SQL_QUERY)
|
||||||
|
|
||||||
def rFile(self, fileData):
|
def rFile(self, fileData):
|
||||||
self.lister("files saved to", fileData, sort=False)
|
self.lister("files saved to", fileData, sort=False, content_type=API_CONTENT_TYPE.FILE_READ)
|
||||||
|
|
||||||
def registerValue(self, registerData):
|
def registerValue(self):
|
||||||
self.string("Registry key value data", registerData, sort=False)
|
self.string("Registry key value data", registerData, registerData, content_type=API_CONTENT_TYPE.REG_READ, sort=False)
|
||||||
|
|
||||||
# object to manage how to print the retrieved queries output to
|
# object to manage how to print the retrieved queries output to
|
||||||
# standard output and sessions file
|
# standard output and sessions file
|
||||||
|
|
|
@ -243,3 +243,33 @@ class WEB_API:
|
||||||
ASP = "asp"
|
ASP = "asp"
|
||||||
ASPX = "aspx"
|
ASPX = "aspx"
|
||||||
JSP = "jsp"
|
JSP = "jsp"
|
||||||
|
|
||||||
|
class API_CONTENT_TYPE:
|
||||||
|
TECHNIQUES = 0
|
||||||
|
BANNER = 1
|
||||||
|
CURRENT_USER = 2
|
||||||
|
CURRENT_DB = 3
|
||||||
|
HOSTNAME = 4
|
||||||
|
IS_DBA = 5
|
||||||
|
USERS = 6
|
||||||
|
PASSWORDS = 7
|
||||||
|
PRIVILEGES = 8
|
||||||
|
ROLES = 9
|
||||||
|
DBS = 10
|
||||||
|
TABLES = 11
|
||||||
|
COLUMNS = 12
|
||||||
|
SCHEMA = 13
|
||||||
|
COUNT = 14
|
||||||
|
DUMP_TABLE = 15
|
||||||
|
SEARCH = 16
|
||||||
|
SQL_QUERY = 17
|
||||||
|
COMMON_TABLES = 18
|
||||||
|
COMMON_COLUMNS = 19
|
||||||
|
FILE_READ = 20
|
||||||
|
FILE_WRITE = 21
|
||||||
|
OS_CMD = 22
|
||||||
|
REG_READ = 23
|
||||||
|
|
||||||
|
class API_CONTENT_STATUS:
|
||||||
|
IN_PROGRESS = 0
|
||||||
|
COMPLETE = 1
|
||||||
|
|
320
lib/utils/api.py
320
lib/utils/api.py
|
@ -19,6 +19,7 @@ from lib.core.common import unArrayizeValue
|
||||||
from lib.core.convert import base64pickle
|
from lib.core.convert import base64pickle
|
||||||
from lib.core.convert import base64unpickle
|
from lib.core.convert import base64unpickle
|
||||||
from lib.core.convert import hexencode
|
from lib.core.convert import hexencode
|
||||||
|
from lib.core.convert import dejsonize
|
||||||
from lib.core.convert import jsonize
|
from lib.core.convert import jsonize
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
@ -27,7 +28,7 @@ from lib.core.datatype import AttribDict
|
||||||
from lib.core.defaults import _defaults
|
from lib.core.defaults import _defaults
|
||||||
from lib.core.log import LOGGER_HANDLER
|
from lib.core.log import LOGGER_HANDLER
|
||||||
from lib.core.optiondict import optDict
|
from lib.core.optiondict import optDict
|
||||||
from lib.core.subprocessng import Popen as execute
|
from lib.core.subprocessng import Popen
|
||||||
from lib.core.subprocessng import send_all
|
from lib.core.subprocessng import send_all
|
||||||
from lib.core.subprocessng import recv_some
|
from lib.core.subprocessng import recv_some
|
||||||
from thirdparty.bottle.bottle import abort
|
from thirdparty.bottle.bottle import abort
|
||||||
|
@ -45,28 +46,125 @@ RESTAPI_SERVER_PORT = 8775
|
||||||
|
|
||||||
# Local global variables
|
# Local global variables
|
||||||
adminid = ""
|
adminid = ""
|
||||||
procs = dict()
|
db = None
|
||||||
tasks = AttribDict()
|
tasks = dict()
|
||||||
|
|
||||||
# Wrapper functions
|
# API objects
|
||||||
|
class Database(object):
|
||||||
|
LOGS_TABLE = "CREATE TABLE logs(id INTEGER PRIMARY KEY AUTOINCREMENT, taskid INTEGER, time TEXT, level TEXT, message TEXT)"
|
||||||
|
DATA_TABLE = "CREATE TABLE data(id INTEGER PRIMARY KEY AUTOINCREMENT, taskid INTEGER, status INTEGER, content_type INTEGER, value TEXT)"
|
||||||
|
ERRORS_TABLE = "CREATE TABLE errors(id INTEGER PRIMARY KEY AUTOINCREMENT, taskid INTEGER, error TEXT)"
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def create(self):
|
||||||
|
_, self.database = tempfile.mkstemp(prefix="sqlmapipc-", text=False)
|
||||||
|
logger.info("IPC database is %s" % self.database)
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
self.connection = sqlite3.connect(self.database, timeout=1, isolation_level=None)
|
||||||
|
self.cursor = self.connection.cursor()
|
||||||
|
|
||||||
|
def disconnect(self):
|
||||||
|
self.cursor.close()
|
||||||
|
self.connection.close()
|
||||||
|
|
||||||
|
def execute(self, statement, arguments=None):
|
||||||
|
if arguments:
|
||||||
|
self.cursor.execute(statement, arguments)
|
||||||
|
else:
|
||||||
|
self.cursor.execute(statement)
|
||||||
|
|
||||||
|
if statement.lstrip().upper().startswith("SELECT"):
|
||||||
|
return self.cursor.fetchall()
|
||||||
|
|
||||||
|
def initialize(self):
|
||||||
|
self.create()
|
||||||
|
self.connect()
|
||||||
|
self.execute(self.LOGS_TABLE)
|
||||||
|
self.execute(self.DATA_TABLE)
|
||||||
|
self.execute(self.ERRORS_TABLE)
|
||||||
|
|
||||||
|
def get_filepath(self):
|
||||||
|
return self.database
|
||||||
|
|
||||||
|
class Task(object):
|
||||||
|
global db
|
||||||
|
|
||||||
|
def __init__(self, taskid):
|
||||||
|
self.process = None
|
||||||
|
self.output_directory = None
|
||||||
|
self.initialize_options(taskid)
|
||||||
|
|
||||||
|
def initialize_options(self, taskid):
|
||||||
|
dataype = {"boolean": False, "string": None, "integer": None, "float": None}
|
||||||
|
self.options = AttribDict()
|
||||||
|
|
||||||
|
for _ in optDict:
|
||||||
|
for name, type_ in optDict[_].items():
|
||||||
|
type_ = unArrayizeValue(type_)
|
||||||
|
self.options[name] = _defaults.get(name, dataype[type_])
|
||||||
|
|
||||||
|
# Let sqlmap engine knows it is getting called by the API, the task ID and the file path of the IPC database
|
||||||
|
self.options.api = True
|
||||||
|
self.options.taskid = taskid
|
||||||
|
self.options.database = db.get_filepath()
|
||||||
|
|
||||||
|
# Enforce batch mode and disable coloring
|
||||||
|
self.options.batch = True
|
||||||
|
self.options.disableColoring = True
|
||||||
|
|
||||||
|
def set_option(self, option, value):
|
||||||
|
self.options[option] = value
|
||||||
|
|
||||||
|
def get_option(self, option):
|
||||||
|
return self.options[option]
|
||||||
|
|
||||||
|
def get_options(self):
|
||||||
|
return self.options
|
||||||
|
|
||||||
|
def set_output_directory(self):
|
||||||
|
self.output_directory = tempfile.mkdtemp(prefix="sqlmapoutput-")
|
||||||
|
self.set_option("oDir", self.output_directory)
|
||||||
|
|
||||||
|
def clean_filesystem(self):
|
||||||
|
shutil.rmtree(self.output_directory)
|
||||||
|
|
||||||
|
def engine_start(self):
|
||||||
|
self.process = Popen("python sqlmap.py --pickled-options %s" % base64pickle(self.options), shell=True, stdin=PIPE)
|
||||||
|
|
||||||
|
def engine_stop(self):
|
||||||
|
if self.process:
|
||||||
|
self.process.terminate()
|
||||||
|
|
||||||
|
def engine_kill(self):
|
||||||
|
if self.process:
|
||||||
|
self.process.kill()
|
||||||
|
|
||||||
|
def engine_get_pid(self):
|
||||||
|
return self.processid.pid
|
||||||
|
|
||||||
|
# Wrapper functions for sqlmap engine
|
||||||
class StdDbOut(object):
|
class StdDbOut(object):
|
||||||
encoding = "UTF-8"
|
encoding = "UTF-8"
|
||||||
|
|
||||||
def __init__(self, type_="stdout"):
|
def __init__(self, taskid, messagetype="stdout"):
|
||||||
# Overwrite system standard output and standard error to write
|
# Overwrite system standard output and standard error to write
|
||||||
# to a temporary I/O database
|
# to an IPC database
|
||||||
self.type = type_
|
self.messagetype = messagetype
|
||||||
|
self.taskid = taskid
|
||||||
|
|
||||||
if self.type == "stdout":
|
if self.messagetype == "stdout":
|
||||||
sys.stdout = self
|
sys.stdout = self
|
||||||
else:
|
else:
|
||||||
sys.stderr = self
|
sys.stderr = self
|
||||||
|
|
||||||
def write(self, string):
|
def write(self, value, status=None, content_type=None):
|
||||||
if self.type == "stdout":
|
if self.messagetype == "stdout":
|
||||||
conf.ipc_database_cursor.execute("INSERT INTO stdout VALUES(NULL, ?, ?)", (time.strftime("%X"), string))
|
conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", (self.taskid, status, content_type, jsonize(value)))
|
||||||
else:
|
else:
|
||||||
conf.ipc_database_cursor.execute("INSERT INTO stderr VALUES(NULL, ?, ?)", (time.strftime("%X"), string))
|
conf.database_cursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)", (self.taskid, value))
|
||||||
|
|
||||||
def flush(self):
|
def flush(self):
|
||||||
pass
|
pass
|
||||||
|
@ -80,20 +178,19 @@ class StdDbOut(object):
|
||||||
class LogRecorder(logging.StreamHandler):
|
class LogRecorder(logging.StreamHandler):
|
||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
"""
|
"""
|
||||||
Record emitted events to temporary database for asynchronous I/O
|
Record emitted events to IPC database for asynchronous I/O
|
||||||
communication with the parent process
|
communication with the parent process
|
||||||
"""
|
"""
|
||||||
conf.ipc_database_cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?)",
|
conf.database_cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)",
|
||||||
(time.strftime("%X"), record.levelname,
|
(conf.taskid, time.strftime("%X"), record.levelname,
|
||||||
record.msg % record.args if record.args else record.msg))
|
record.msg % record.args if record.args else record.msg))
|
||||||
|
|
||||||
def setRestAPILog():
|
def setRestAPILog():
|
||||||
if hasattr(conf, "ipc_database"):
|
if hasattr(conf, "api"):
|
||||||
conf.ipc_database_connection = sqlite3.connect(conf.ipc_database, timeout=1, isolation_level=None)
|
conf.database_connection = sqlite3.connect(conf.database, timeout=1, isolation_level=None)
|
||||||
conf.ipc_database_cursor = conf.ipc_database_connection.cursor()
|
conf.database_cursor = conf.database_connection.cursor()
|
||||||
|
|
||||||
# Set a logging handler that writes log messages to a temporary
|
# Set a logging handler that writes log messages to a IPC database
|
||||||
# I/O database
|
|
||||||
logger.removeHandler(LOGGER_HANDLER)
|
logger.removeHandler(LOGGER_HANDLER)
|
||||||
LOGGER_RECORDER = LogRecorder()
|
LOGGER_RECORDER = LogRecorder()
|
||||||
logger.addHandler(LOGGER_RECORDER)
|
logger.addHandler(LOGGER_RECORDER)
|
||||||
|
@ -106,21 +203,6 @@ def is_admin(taskid):
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def init_options():
|
|
||||||
dataype = {"boolean": False, "string": None, "integer": None, "float": None}
|
|
||||||
options = AttribDict()
|
|
||||||
|
|
||||||
for _ in optDict:
|
|
||||||
for name, type_ in optDict[_].items():
|
|
||||||
type_ = unArrayizeValue(type_)
|
|
||||||
options[name] = _defaults.get(name, dataype[type_])
|
|
||||||
|
|
||||||
# Enforce batch mode and disable coloring
|
|
||||||
options.batch = True
|
|
||||||
options.disableColoring = True
|
|
||||||
|
|
||||||
return options
|
|
||||||
|
|
||||||
@hook("after_request")
|
@hook("after_request")
|
||||||
def security_headers(json_header=True):
|
def security_headers(json_header=True):
|
||||||
"""
|
"""
|
||||||
|
@ -170,25 +252,10 @@ def task_new():
|
||||||
"""
|
"""
|
||||||
Create new task ID
|
Create new task ID
|
||||||
"""
|
"""
|
||||||
global procs
|
|
||||||
global tasks
|
global tasks
|
||||||
|
|
||||||
taskid = hexencode(os.urandom(16))
|
taskid = hexencode(os.urandom(8))
|
||||||
tasks[taskid] = init_options()
|
tasks[taskid] = Task(taskid)
|
||||||
procs[taskid] = AttribDict()
|
|
||||||
|
|
||||||
_, ipc_database_filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)
|
|
||||||
|
|
||||||
# Initiate the temporary database for asynchronous I/O with the
|
|
||||||
# sqlmap engine
|
|
||||||
procs[taskid].ipc_database_connection = sqlite3.connect(ipc_database_filepath, timeout=1, isolation_level=None)
|
|
||||||
procs[taskid].ipc_database_cursor = procs[taskid].ipc_database_connection.cursor()
|
|
||||||
procs[taskid].ipc_database_cursor.execute("CREATE TABLE logs(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, level TEXT, message TEXT)")
|
|
||||||
procs[taskid].ipc_database_cursor.execute("CREATE TABLE stdout(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, message TEXT)")
|
|
||||||
procs[taskid].ipc_database_cursor.execute("CREATE TABLE stderr(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, message TEXT)")
|
|
||||||
|
|
||||||
# Set the temporary database to use for asynchronous I/O communication
|
|
||||||
tasks[taskid].ipc_database = ipc_database_filepath
|
|
||||||
|
|
||||||
return jsonize({"taskid": taskid})
|
return jsonize({"taskid": taskid})
|
||||||
|
|
||||||
|
@ -197,7 +264,8 @@ def task_destroy(taskid):
|
||||||
"""
|
"""
|
||||||
Destroy own task ID
|
Destroy own task ID
|
||||||
"""
|
"""
|
||||||
if taskid in tasks and not is_admin(taskid):
|
if taskid in tasks:
|
||||||
|
tasks[taskid].clean_filesystem()
|
||||||
tasks.pop(taskid)
|
tasks.pop(taskid)
|
||||||
return jsonize({"success": True})
|
return jsonize({"success": True})
|
||||||
else:
|
else:
|
||||||
|
@ -217,16 +285,15 @@ def task_list(taskid):
|
||||||
@get("/task/<taskid>/flush")
|
@get("/task/<taskid>/flush")
|
||||||
def task_flush(taskid):
|
def task_flush(taskid):
|
||||||
"""
|
"""
|
||||||
Flush task spool (destroy all tasks except admin)
|
Flush task spool (destroy all tasks)
|
||||||
"""
|
"""
|
||||||
global adminid
|
|
||||||
global tasks
|
global tasks
|
||||||
|
|
||||||
if is_admin(taskid):
|
if is_admin(taskid):
|
||||||
admin_task = tasks[adminid]
|
for task in tasks:
|
||||||
tasks = AttribDict()
|
tasks[task].clean_filesystem()
|
||||||
tasks[adminid] = admin_task
|
|
||||||
|
|
||||||
|
tasks = dict()
|
||||||
return jsonize({"success": True})
|
return jsonize({"success": True})
|
||||||
else:
|
else:
|
||||||
abort(401)
|
abort(401)
|
||||||
|
@ -248,27 +315,6 @@ def status(taskid):
|
||||||
else:
|
else:
|
||||||
abort(401)
|
abort(401)
|
||||||
|
|
||||||
@get("/cleanup/<taskid>")
|
|
||||||
def cleanup(taskid):
|
|
||||||
"""
|
|
||||||
Destroy all sessions except admin ID and all output directories
|
|
||||||
"""
|
|
||||||
global tasks
|
|
||||||
|
|
||||||
if is_admin(taskid):
|
|
||||||
for task, options in tasks.items():
|
|
||||||
shutil.rmtree(options.oDir)
|
|
||||||
shutil.rmtree(options.ipc_database)
|
|
||||||
|
|
||||||
admin_task = tasks[adminid]
|
|
||||||
tasks = AttribDict()
|
|
||||||
tasks[adminid] = admin_task
|
|
||||||
|
|
||||||
|
|
||||||
return jsonize({"success": True})
|
|
||||||
else:
|
|
||||||
abort(401)
|
|
||||||
|
|
||||||
# Functions to handle options
|
# Functions to handle options
|
||||||
@get("/option/<taskid>/list")
|
@get("/option/<taskid>/list")
|
||||||
def option_list(taskid):
|
def option_list(taskid):
|
||||||
|
@ -278,7 +324,7 @@ def option_list(taskid):
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
abort(500, "Invalid task ID")
|
abort(500, "Invalid task ID")
|
||||||
|
|
||||||
return jsonize(tasks[taskid])
|
return jsonize(tasks[taskid].get_options())
|
||||||
|
|
||||||
@post("/option/<taskid>/get")
|
@post("/option/<taskid>/get")
|
||||||
def option_get(taskid):
|
def option_get(taskid):
|
||||||
|
@ -291,7 +337,7 @@ def option_get(taskid):
|
||||||
option = request.json.get("option", "")
|
option = request.json.get("option", "")
|
||||||
|
|
||||||
if option in tasks[taskid]:
|
if option in tasks[taskid]:
|
||||||
return jsonize({option: tasks[taskid][option]})
|
return jsonize({option: tasks[taskid].get_option(option)})
|
||||||
else:
|
else:
|
||||||
return jsonize({option: None})
|
return jsonize({option: None})
|
||||||
|
|
||||||
|
@ -305,8 +351,8 @@ def option_set(taskid):
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
abort(500, "Invalid task ID")
|
abort(500, "Invalid task ID")
|
||||||
|
|
||||||
for key, value in request.json.items():
|
for option, value in request.json.items():
|
||||||
tasks[taskid][key] = value
|
tasks[taskid].set_option(option, value)
|
||||||
|
|
||||||
return jsonize({"success": True})
|
return jsonize({"success": True})
|
||||||
|
|
||||||
|
@ -317,80 +363,96 @@ def scan_start(taskid):
|
||||||
Launch a scan
|
Launch a scan
|
||||||
"""
|
"""
|
||||||
global tasks
|
global tasks
|
||||||
global procs
|
|
||||||
|
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
abort(500, "Invalid task ID")
|
abort(500, "Invalid task ID")
|
||||||
|
|
||||||
# Initialize sqlmap engine's options with user's provided options, if any
|
# Initialize sqlmap engine's options with user's provided options, if any
|
||||||
for key, value in request.json.items():
|
for option, value in request.json.items():
|
||||||
tasks[taskid][key] = value
|
tasks[taskid].set_option(option, value)
|
||||||
|
|
||||||
# Overwrite output directory value to a temporary directory
|
# Overwrite output directory value to a temporary directory
|
||||||
tasks[taskid].oDir = tempfile.mkdtemp(prefix="sqlmapoutput-")
|
tasks[taskid].set_output_directory()
|
||||||
|
|
||||||
# Launch sqlmap engine in a separate thread
|
# Launch sqlmap engine in a separate thread
|
||||||
logger.debug("starting a scan for task ID %s" % taskid)
|
logger.debug("starting a scan for task ID %s" % taskid)
|
||||||
|
|
||||||
# Launch sqlmap engine
|
# Launch sqlmap engine
|
||||||
procs[taskid].child = execute("python sqlmap.py --pickled-options %s" % base64pickle(tasks[taskid]), shell=True, stdin=PIPE)
|
tasks[taskid].engine_start()
|
||||||
|
|
||||||
return jsonize({"success": True})
|
return jsonize({"success": True})
|
||||||
|
|
||||||
@get("/scan/<taskid>/output")
|
@get("/scan/<taskid>/stop")
|
||||||
def scan_output(taskid):
|
def scan_stop(taskid):
|
||||||
"""
|
"""
|
||||||
Read the standard output of sqlmap core execution
|
Stop a scan
|
||||||
"""
|
"""
|
||||||
global procs
|
|
||||||
global tasks
|
global tasks
|
||||||
|
|
||||||
json_stdout_message = []
|
|
||||||
json_stderr_message = []
|
|
||||||
|
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
abort(500, "Invalid task ID")
|
abort(500, "Invalid task ID")
|
||||||
|
|
||||||
# Read all stdout messages from the temporary I/O database
|
return jsonize({"success": tasks[taskid].engine_stop()})
|
||||||
procs[taskid].ipc_database_cursor.execute("SELECT message FROM stdout")
|
|
||||||
db_stdout_messages = procs[taskid].ipc_database_cursor.fetchall()
|
|
||||||
|
|
||||||
for message in db_stdout_messages:
|
@get("/scan/<taskid>/kill")
|
||||||
json_stdout_message.append(message)
|
def scan_kill(taskid):
|
||||||
|
"""
|
||||||
|
Kill a scan
|
||||||
|
"""
|
||||||
|
global tasks
|
||||||
|
|
||||||
# Read all stderr messages from the temporary I/O database
|
if taskid not in tasks:
|
||||||
procs[taskid].ipc_database_cursor.execute("SELECT message FROM stderr")
|
abort(500, "Invalid task ID")
|
||||||
db_stderr_messages = procs[taskid].ipc_database_cursor.fetchall()
|
|
||||||
|
|
||||||
for message in db_stderr_messages:
|
return jsonize({"success": tasks[taskid].engine_kill()})
|
||||||
json_stderr_message.append(message)
|
|
||||||
|
|
||||||
return jsonize({"stdout": json_stdout_message, "stderr": json_stderr_message})
|
|
||||||
|
|
||||||
@get("/scan/<taskid>/delete")
|
@get("/scan/<taskid>/delete")
|
||||||
def scan_delete(taskid):
|
def scan_delete(taskid):
|
||||||
"""
|
"""
|
||||||
Delete a scan and corresponding temporary output directory
|
Delete a scan and corresponding temporary output directory and IPC database
|
||||||
"""
|
"""
|
||||||
global tasks
|
global tasks
|
||||||
|
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
abort(500, "Invalid task ID")
|
abort(500, "Invalid task ID")
|
||||||
|
|
||||||
shutil.rmtree(tasks[taskid].oDir)
|
scan_stop(taskid)
|
||||||
shutil.rmtree(tasks[taskid].ipc_database)
|
tasks[taskid].clean_filesystem()
|
||||||
|
|
||||||
return jsonize({"success": True})
|
return jsonize({"success": True})
|
||||||
|
|
||||||
|
@get("/scan/<taskid>/data")
|
||||||
|
def scan_data(taskid):
|
||||||
|
"""
|
||||||
|
Retrieve the data of a scan
|
||||||
|
"""
|
||||||
|
global db
|
||||||
|
global tasks
|
||||||
|
json_data_message = list()
|
||||||
|
json_errors_message = list()
|
||||||
|
|
||||||
|
if taskid not in tasks:
|
||||||
|
abort(500, "Invalid task ID")
|
||||||
|
|
||||||
|
# Read all data from the IPC database for the taskid
|
||||||
|
for status, content_type, value in db.execute("SELECT status, content_type, value FROM data WHERE taskid = ? ORDER BY id ASC", (taskid,)):
|
||||||
|
json_data_message.append([status, content_type, dejsonize(value)])
|
||||||
|
|
||||||
|
# Read all error messages from the IPC database
|
||||||
|
for error in db.execute("SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC", (taskid,)):
|
||||||
|
json_errors_message.append(error)
|
||||||
|
|
||||||
|
return jsonize({"data": json_data_message, "error": json_errors_message})
|
||||||
|
|
||||||
# Functions to handle scans' logs
|
# Functions to handle scans' logs
|
||||||
@get("/scan/<taskid>/log/<start>/<end>")
|
@get("/scan/<taskid>/log/<start>/<end>")
|
||||||
def scan_log_limited(taskid, start, end):
|
def scan_log_limited(taskid, start, end):
|
||||||
"""
|
"""
|
||||||
Retrieve a subset of log messages
|
Retrieve a subset of log messages
|
||||||
"""
|
"""
|
||||||
global procs
|
global db
|
||||||
|
global tasks
|
||||||
json_log_messages = {}
|
json_log_messages = list()
|
||||||
|
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
abort(500, "Invalid task ID")
|
abort(500, "Invalid task ID")
|
||||||
|
@ -401,12 +463,9 @@ def scan_log_limited(taskid, start, end):
|
||||||
start = max(1, int(start))
|
start = max(1, int(start))
|
||||||
end = max(1, int(end))
|
end = max(1, int(end))
|
||||||
|
|
||||||
# Read a subset of log messages from the temporary I/O database
|
# Read a subset of log messages from the IPC database
|
||||||
procs[taskid].ipc_database_cursor.execute("SELECT id, time, level, message FROM logs WHERE id >= ? AND id <= ?", (start, end))
|
for time_, level, message in db.execute("SELECT time, level, message FROM logs WHERE taskid = ? AND id >= ? AND id <= ? ORDER BY id ASC", (taskid, start, end)):
|
||||||
db_log_messages = procs[taskid].ipc_database_cursor.fetchall()
|
json_log_messages.append({"time": time_, "level": level, "message": message})
|
||||||
|
|
||||||
for (id_, time_, level, message) in db_log_messages:
|
|
||||||
json_log_messages[id_] = {"time": time_, "level": level, "message": message}
|
|
||||||
|
|
||||||
return jsonize({"log": json_log_messages})
|
return jsonize({"log": json_log_messages})
|
||||||
|
|
||||||
|
@ -415,19 +474,16 @@ def scan_log(taskid):
|
||||||
"""
|
"""
|
||||||
Retrieve the log messages
|
Retrieve the log messages
|
||||||
"""
|
"""
|
||||||
global procs
|
global db
|
||||||
|
global tasks
|
||||||
json_log_messages = {}
|
json_log_messages = list()
|
||||||
|
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
abort(500, "Invalid task ID")
|
abort(500, "Invalid task ID")
|
||||||
|
|
||||||
# Read all log messages from the temporary I/O database
|
# Read all log messages from the IPC database
|
||||||
procs[taskid].ipc_database_cursor.execute("SELECT id, time, level, message FROM logs")
|
for time_, level, message in db.execute("SELECT time, level, message FROM logs WHERE taskid = ? ORDER BY id ASC", (taskid,)):
|
||||||
db_log_messages = procs[taskid].ipc_database_cursor.fetchall()
|
json_log_messages.append({"time": time_, "level": level, "message": message})
|
||||||
|
|
||||||
for (id_, time_, level, message) in db_log_messages:
|
|
||||||
json_log_messages[id_] = {"time": time_, "level": level, "message": message}
|
|
||||||
|
|
||||||
return jsonize({"log": json_log_messages})
|
return jsonize({"log": json_log_messages})
|
||||||
|
|
||||||
|
@ -445,6 +501,7 @@ def download(taskid, target, filename):
|
||||||
abort(500)
|
abort(500)
|
||||||
|
|
||||||
path = os.path.join(paths.SQLMAP_OUTPUT_PATH, target)
|
path = os.path.join(paths.SQLMAP_OUTPUT_PATH, target)
|
||||||
|
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
return static_file(filename, root=path)
|
return static_file(filename, root=path)
|
||||||
else:
|
else:
|
||||||
|
@ -455,10 +512,11 @@ def server(host="0.0.0.0", port=RESTAPI_SERVER_PORT):
|
||||||
REST-JSON API server
|
REST-JSON API server
|
||||||
"""
|
"""
|
||||||
global adminid
|
global adminid
|
||||||
global tasks
|
global db
|
||||||
|
|
||||||
adminid = hexencode(os.urandom(16))
|
adminid = hexencode(os.urandom(16))
|
||||||
tasks[adminid] = init_options()
|
db = Database()
|
||||||
|
db.initialize()
|
||||||
|
|
||||||
logger.info("running REST-JSON API server at '%s:%d'.." % (host, port))
|
logger.info("running REST-JSON API server at '%s:%d'.." % (host, port))
|
||||||
logger.info("the admin task ID is: %s" % adminid)
|
logger.info("the admin task ID is: %s" % adminid)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user