Merge pull request #583 from mattoufoutu/api

RESTful API improvements
This commit is contained in:
Miroslav Stampar 2013-12-17 14:10:19 -08:00
commit fd6dcd8bf5

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
@ -31,40 +32,47 @@ from lib.core.enums import PART_RUN_CONTENT_TYPES
from lib.core.log import LOGGER_HANDLER
from lib.core.optiondict import optDict
from lib.core.subprocessng import Popen
from lib.core.subprocessng import send_all
from lib.core.subprocessng import recv_some
from thirdparty.bottle.bottle import abort
from thirdparty.bottle.bottle import error
from thirdparty.bottle.bottle import error as return_error
from thirdparty.bottle.bottle import get
from thirdparty.bottle.bottle import hook
from thirdparty.bottle.bottle import post
from thirdparty.bottle.bottle import request
from thirdparty.bottle.bottle import response
from thirdparty.bottle.bottle import run
from thirdparty.bottle.bottle import static_file
RESTAPI_SERVER_HOST = "127.0.0.1"
RESTAPI_SERVER_PORT = 8775
# Local global variables
adminid = ""
db = None
db_filepath = None
tasks = dict()
# global settings
class DataStore(object):
admin_id = ""
current_db = None
tasks = dict()
# API objects
class Database(object):
global db_filepath
LOGS_TABLE = "CREATE TABLE logs(id INTEGER PRIMARY KEY AUTOINCREMENT, taskid INTEGER, time TEXT, level TEXT, message TEXT)"
DATA_TABLE = "CREATE TABLE data(id INTEGER PRIMARY KEY AUTOINCREMENT, taskid INTEGER, status INTEGER, content_type INTEGER, value TEXT)"
ERRORS_TABLE = "CREATE TABLE errors(id INTEGER PRIMARY KEY AUTOINCREMENT, taskid INTEGER, error TEXT)"
filepath = None
LOGS_TABLE = ("CREATE TABLE logs("
"id INTEGER PRIMARY KEY AUTOINCREMENT, "
"taskid INTEGER, time TEXT, "
"level TEXT, message TEXT"
")")
DATA_TABLE = ("CREATE TABLE data("
"id INTEGER PRIMARY KEY AUTOINCREMENT, "
"taskid INTEGER, status INTEGER, "
"content_type INTEGER, value TEXT"
")")
ERRORS_TABLE = ("CREATE TABLE errors("
"id INTEGER PRIMARY KEY AUTOINCREMENT, "
"taskid INTEGER, error TEXT"
")")
def __init__(self, database=None):
if database:
self.database = database
else:
self.database = db_filepath
self.database = self.filepath if database is None else database
self.connection = None
self.cursor = None
def connect(self, who="server"):
self.connection = sqlite3.connect(self.database, timeout=3, isolation_level=None)
@ -76,7 +84,7 @@ class Database(object):
self.connection.close()
def commit(self):
self.cursor.commit()
self.connection.commit()
def execute(self, statement, arguments=None):
if arguments:
@ -92,8 +100,8 @@ class Database(object):
self.execute(self.DATA_TABLE)
self.execute(self.ERRORS_TABLE)
class Task(object):
global db_filepath
def __init__(self, taskid):
self.process = None
@ -111,10 +119,11 @@ class Task(object):
type_ = unArrayizeValue(type_)
self.options[name] = _defaults.get(name, datatype[type_])
# Let sqlmap engine knows it is getting called by the API, the task ID and the file path of the IPC database
# Let sqlmap engine knows it is getting called by the API,
# the task ID and the file path of the IPC database
self.options.api = True
self.options.taskid = taskid
self.options.database = db_filepath
self.options.database = Database.filepath
# Enforce batch mode and disable coloring and ETA
self.options.batch = True
@ -145,7 +154,8 @@ class Task(object):
shutil.rmtree(self.output_directory)
def engine_start(self):
self.process = Popen("python sqlmap.py --pickled-options %s" % base64pickle(self.options), shell=True, stdin=PIPE, close_fds=False)
self.process = Popen("python sqlmap.py --pickled-options %s" % base64pickle(self.options),
shell=True, stdin=PIPE, close_fds=False)
def engine_stop(self):
if self.process:
@ -172,6 +182,7 @@ class Task(object):
def engine_has_terminated(self):
return isinstance(self.engine_get_returncode(), int)
# Wrapper functions for sqlmap engine
class StdDbOut(object):
def __init__(self, taskid, messagetype="stdout"):
@ -194,25 +205,27 @@ class StdDbOut(object):
# Ignore all non-relevant messages
return
output = conf.database_cursor.execute("SELECT id, status, value FROM data WHERE taskid = ? AND content_type = ?",
(self.taskid, content_type))
#print >>sys.__stdout__, "output: %s\nvalue: %s\nstatus: %d\ncontent_type: %d\nkb.partRun: %s\n--------------" % (output, value, status, content_type, kb.partRun)
output = conf.database_cursor.execute(
"SELECT id, status, value FROM data WHERE taskid = ? AND content_type = ?",
(self.taskid, content_type))
# Delete partial output from IPC database if we have got a complete output
if status == CONTENT_STATUS.COMPLETE:
if len(output) > 0:
for index in xrange(0, len(output)):
conf.database_cursor.execute("DELETE FROM data WHERE id = ?", (output[index][0],))
for index in xrange(len(output)):
conf.database_cursor.execute("DELETE FROM data WHERE id = ?",
(output[index][0],))
conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", (self.taskid, status, content_type, jsonize(value)))
conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
(self.taskid, status, content_type, jsonize(value)))
if kb.partRun:
kb.partRun = None
elif status == CONTENT_STATUS.IN_PROGRESS:
if len(output) == 0:
conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
(self.taskid, status, content_type, jsonize(value)))
(self.taskid, status, content_type,
jsonize(value)))
else:
new_value = "%s%s" % (dejsonize(output[0][2]), value)
conf.database_cursor.execute("UPDATE data SET value = ? WHERE id = ?",
@ -230,6 +243,7 @@ class StdDbOut(object):
def seek(self):
pass
class LogRecorder(logging.StreamHandler):
def emit(self, record):
"""
@ -238,7 +252,8 @@ class LogRecorder(logging.StreamHandler):
"""
conf.database_cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)",
(conf.taskid, time.strftime("%X"), record.levelname,
record.msg % record.args if record.args else record.msg))
record.msg % record.args if record.args else record.msg))
def setRestAPILog():
if hasattr(conf, "api"):
@ -250,13 +265,11 @@ def setRestAPILog():
LOGGER_RECORDER = LogRecorder()
logger.addHandler(LOGGER_RECORDER)
# Generic functions
def is_admin(taskid):
global adminid
if adminid != taskid:
return False
else:
return True
return DataStore.admin_id == taskid
@hook("after_request")
def security_headers(json_header=True):
@ -277,22 +290,26 @@ def security_headers(json_header=True):
# HTTP Status Code functions #
##############################
@error(401) # Access Denied
@return_error(401) # Access Denied
def error401(error=None):
security_headers(False)
return "Access denied"
@error(404) # Not Found
@return_error(404) # Not Found
def error404(error=None):
security_headers(False)
return "Nothing here"
@error(405) # Method Not Allowed (e.g. when requesting a POST method via GET)
@return_error(405) # Method Not Allowed (e.g. when requesting a POST method via GET)
def error405(error=None):
security_headers(False)
return "Method not allowed"
@error(500) # Internal Server Error
@return_error(500) # Internal Server Error
def error500(error=None):
security_headers(False)
return "Internal server error"
@ -301,208 +318,225 @@ def error500(error=None):
# Task management functions #
#############################
# Users' methods
@get("/task/new")
def task_new():
"""
Create new task ID
"""
global tasks
taskid = hexencode(os.urandom(8))
tasks[taskid] = Task(taskid)
DataStore.tasks[taskid] = Task(taskid)
logger.debug(" [%s] Created new task" % taskid)
return jsonize({"success": True, "taskid": taskid})
logger.debug("Created new task ID: %s" % taskid)
return jsonize({"taskid": taskid})
@get("/task/<taskid>/delete")
def task_delete(taskid):
"""
Delete own task ID
"""
if taskid in tasks:
tasks[taskid].clean_filesystem()
tasks.pop(taskid)
if taskid in DataStore.tasks:
DataStore.tasks[taskid].clean_filesystem()
DataStore.tasks.pop(taskid)
logger.debug("Deleted task ID: %s" % taskid)
logger.debug("[%s] Deleted task" % taskid)
return jsonize({"success": True})
else:
abort(500, "Invalid task ID")
logger.warning("[%s] Invalid task ID provided to task_delete()" % taskid)
return jsonize({"success": False, "message": "Invalid task ID"})
###################
# Admin functions #
###################
@get("/admin/<taskid>/list")
def task_list(taskid):
"""
List task pull
"""
if is_admin(taskid):
logger.debug("Listed task pull")
task_list = list(tasks)
return jsonize({"tasks": task_list, "tasks_num": len(tasks)})
logger.debug("[%s] Listed task pool" % taskid)
tasks = list(DataStore.tasks)
return jsonize({"success": True, "tasks": tasks, "tasks_num": len(tasks)})
else:
abort(401)
logger.warning("[%s] Unauthorized call to task_list()" % taskid)
return jsonize({"success": False, "message": "Unauthorized"})
@get("/admin/<taskid>/flush")
def task_flush(taskid):
"""
Flush task spool (delete all tasks)
"""
global tasks
if is_admin(taskid):
for task in tasks:
tasks[task].clean_filesystem()
for task in DataStore.tasks:
DataStore.tasks[task].clean_filesystem()
tasks = dict()
logger.debug("Flushed task pull")
DataStore.tasks = dict()
logger.debug("[%s] Flushed task pool" % taskid)
return jsonize({"success": True})
else:
abort(401)
logger.warning("[%s] Unauthorized call to task_flush()" % taskid)
return jsonize({"success": False, "message": "Unauthorized"})
##################################
# sqlmap core interact functions #
##################################
# Handle task's options
@get("/option/<taskid>/list")
def option_list(taskid):
"""
List options for a certain task ID
"""
if taskid not in tasks:
abort(500, "Invalid task ID")
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to option_list()" % taskid)
return jsonize({"success": False, "message": "Invalid task ID"})
logger.debug("[%s] Listed task options" % taskid)
return jsonize({"success": True, "options": DataStore.tasks[taskid].get_options()})
return jsonize({"options": tasks[taskid].get_options()})
@post("/option/<taskid>/get")
def option_get(taskid):
"""
Get the value of an option (command line switch) for a certain task ID
"""
global tasks
if taskid not in tasks:
abort(500, "Invalid task ID")
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to option_get()" % taskid)
return jsonize({"success": False, "message": "Invalid task ID"})
option = request.json.get("option", "")
if option in tasks[taskid].options:
return jsonize({option: tasks[taskid].get_option(option)})
if option in DataStore.tasks[taskid].options:
logger.debug("[%s] Retrieved value for option %s" % (taskid, option))
return jsonize({"success": True, option: DataStore.tasks[taskid].get_option(option)})
else:
return jsonize({option: "not set"})
logger.debug("[%s] Requested value for unknown option %s" % (taskid, option))
return jsonize({"success": False, "message": "Unknown option", option: "not set"})
@post("/option/<taskid>/set")
def option_set(taskid):
"""
Set an option (command line switch) for a certain task ID
"""
global tasks
if taskid not in tasks:
abort(500, "Invalid task ID")
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to option_set()" % taskid)
return jsonize({"success": False, "message": "Invalid task ID"})
for option, value in request.json.items():
tasks[taskid].set_option(option, value)
DataStore.tasks[taskid].set_option(option, value)
logger.debug("[%s] Requested to set options" % taskid)
return jsonize({"success": True})
# Handle scans
@post("/scan/<taskid>/start")
def scan_start(taskid):
"""
Launch a scan
"""
global tasks
if taskid not in tasks:
abort(500, "Invalid task ID")
tasks[taskid].reset_options()
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to scan_start()" % taskid)
return jsonize({"success": False, "message": "Invalid task ID"})
# Initialize sqlmap engine's options with user's provided options, if any
for option, value in request.json.items():
tasks[taskid].set_option(option, value)
DataStore.tasks[taskid].set_option(option, value)
# Overwrite output directory value to a temporary directory
tasks[taskid].set_output_directory()
DataStore.tasks[taskid].set_output_directory()
# Launch sqlmap engine in a separate process
tasks[taskid].engine_start()
DataStore.tasks[taskid].engine_start()
logger.debug("[%s] Started scan" % taskid)
return jsonize({"success": True, "engineid": DataStore.tasks[taskid].engine_get_id()})
logger.debug("Started scan for task ID %s" % taskid)
return jsonize({"success": True, "engineid": tasks[taskid].engine_get_id()})
@get("/scan/<taskid>/stop")
def scan_stop(taskid):
"""
Stop a scan
"""
global tasks
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to scan_stop()" % taskid)
return jsonize({"success": False, "message": "Invalid task ID"})
if taskid not in tasks:
abort(500, "Invalid task ID")
DataStore.tasks[taskid].engine_stop()
tasks[taskid].engine_stop()
logger.debug("Stopped scan for task ID %s" % taskid)
logger.debug("[%s] Stopped scan" % taskid)
return jsonize({"success": True})
@get("/scan/<taskid>/kill")
def scan_kill(taskid):
"""
Kill a scan
"""
global tasks
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to scan_kill()" % taskid)
return jsonize({"success": False, "message": "Invalid task ID"})
if taskid not in tasks:
abort(500, "Invalid task ID")
DataStore.tasks[taskid].engine_kill()
tasks[taskid].engine_kill()
logger.debug("Killed scan for task ID %s" % taskid)
logger.debug("[%s] Killed scan" % taskid)
return jsonize({"success": True})
@get("/scan/<taskid>/status")
def scan_status(taskid):
"""
Returns status of a scan
"""
global tasks
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to scan_status()" % taskid)
return jsonize({"success": False, "message": "Invalid task ID"})
if taskid not in tasks:
abort(500, "Invalid task ID")
status = "terminated" if DataStore.tasks[taskid].engine_has_terminated() is True else "running"
status = "terminated" if tasks[taskid].engine_has_terminated() is True else "running"
logger.debug("[%s] Retrieved scan status" % taskid)
return jsonize({
"success": True,
"status": status,
"returncode": DataStore.tasks[taskid].engine_get_returncode()
})
logger.debug("Requested status of scan for task ID %s" % taskid)
return jsonize({"status": status, "returncode": tasks[taskid].engine_get_returncode()})
@get("/scan/<taskid>/data")
def scan_data(taskid):
"""
Retrieve the data of a scan
"""
global db
global tasks
json_data_message = list()
json_errors_message = list()
if taskid not in tasks:
abort(500, "Invalid task ID")
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to scan_data()" % taskid)
return jsonize({"success": False, "message": "Invalid task ID"})
# Read all data from the IPC database for the taskid
for status, content_type, value in db.execute("SELECT status, content_type, value FROM data WHERE taskid = ? ORDER BY id ASC", (taskid,)):
json_data_message.append({"status": status, "type": content_type, "value": dejsonize(value)})
for status, content_type, value in DataStore.current_db.execute(
"SELECT status, content_type, value FROM data WHERE taskid = ? ORDER BY id ASC",
(taskid,)):
json_data_message.append(
{"status": status, "type": content_type, "value": dejsonize(value)})
# Read all error messages from the IPC database
for error in db.execute("SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC", (taskid,)):
for error in DataStore.current_db.execute(
"SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC",
(taskid,)):
json_errors_message.append(error)
logger.debug("Retrieved data and error messages for scan for task ID %s" % taskid)
return jsonize({"data": json_data_message, "error": json_errors_message})
logger.debug("[%s] Retrieved scan data and error messages" % taskid)
return jsonize({"success": True, "data": json_data_message, "error": json_errors_message})
# Functions to handle scans' logs
@get("/scan/<taskid>/log/<start>/<end>")
@ -510,44 +544,49 @@ def scan_log_limited(taskid, start, end):
"""
Retrieve a subset of log messages
"""
global db
global tasks
json_log_messages = list()
if taskid not in tasks:
abort(500, "Invalid task ID")
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to scan_log_limited()")
return jsonize({"success": False, "message": "Invalid task ID"})
if not start.isdigit() or not end.isdigit() or end < start:
abort(500, "Invalid start or end value, must be digits")
logger.warning("[%s] Invalid start or end value provided to scan_log_limited()" % taskid)
return jsonize({"success": False, "message": "Invalid start or end value, must be digits"})
start = max(1, int(start))
end = max(1, int(end))
# Read a subset of log messages from the IPC database
for time_, level, message in db.execute("SELECT time, level, message FROM logs WHERE taskid = ? AND id >= ? AND id <= ? ORDER BY id ASC", (taskid, start, end)):
for time_, level, message in DataStore.current_db.execute(
("SELECT time, level, message FROM logs WHERE "
"taskid = ? AND id >= ? AND id <= ? ORDER BY id ASC"),
(taskid, start, end)):
json_log_messages.append({"time": time_, "level": level, "message": message})
logger.debug("Retrieved subset of log messages for scan for task ID %s" % taskid)
return jsonize({"log": json_log_messages})
logger.debug("[%s] Retrieved scan log messages subset" % taskid)
return jsonize({"success": True, "log": json_log_messages})
@get("/scan/<taskid>/log")
def scan_log(taskid):
"""
Retrieve the log messages
"""
global db
global tasks
json_log_messages = list()
if taskid not in tasks:
abort(500, "Invalid task ID")
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to scan_log()")
return jsonize({"success": False, "message": "Invalid task ID"})
# Read all log messages from the IPC database
for time_, level, message in db.execute("SELECT time, level, message FROM logs WHERE taskid = ? ORDER BY id ASC", (taskid,)):
for time_, level, message in DataStore.current_db.execute(
"SELECT time, level, message FROM logs WHERE taskid = ? ORDER BY id ASC", (taskid,)):
json_log_messages.append({"time": time_, "level": level, "message": message})
logger.debug("Retrieved log messages for scan for task ID %s" % taskid)
return jsonize({"log": json_log_messages})
logger.debug("[%s] Retrieved scan log messages" % taskid)
return jsonize({"success": True, "log": json_log_messages})
# Function to handle files inside the output directory
@get("/download/<taskid>/<target>/<filename:path>")
@ -555,43 +594,47 @@ def download(taskid, target, filename):
"""
Download a certain file from the file system
"""
if taskid not in tasks:
abort(500, "Invalid task ID")
if taskid not in DataStore.tasks:
logger.warning("[%s] Invalid task ID provided to download()" % taskid)
return jsonize({"success": False, "message": "Invalid task ID"})
# Prevent file path traversal - the lame way
if target.startswith("."):
abort(500)
if ".." in target:
logger.warning("[%s] Forbidden path (%s)" % (taskid, target))
return jsonize({"success": False, "message": "Forbidden path"})
path = os.path.join(paths.SQLMAP_OUTPUT_PATH, target)
if os.path.exists(path):
return static_file(filename, root=path)
logger.debug("[%s] Retrieved content of file %s" % (taskid, target))
with open(path, 'rb') as inf:
file_content = inf.read()
return jsonize({"success": True, "file": file_content.encode("base64")})
else:
abort(500, "File does not exist")
logger.warning("[%s] File does not exist %s" % (taskid, target))
return jsonize({"success": False, "message": "File does not exist"})
def server(host="0.0.0.0", port=RESTAPI_SERVER_PORT):
"""
REST-JSON API server
"""
global adminid
global db
global db_filepath
adminid = hexencode(os.urandom(16))
db_filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)[1]
DataStore.admin_id = hexencode(os.urandom(16))
Database.filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)[1]
logger.info("Running REST-JSON API server at '%s:%d'.." % (host, port))
logger.info("Admin ID: %s" % adminid)
logger.debug("IPC database: %s" % db_filepath)
logger.info("Admin ID: %s" % DataStore.admin_id)
logger.debug("IPC database: %s" % Database.filepath)
# Initialize IPC database
db = Database()
db.connect()
db.init()
DataStore.current_db = Database()
DataStore.current_db.connect()
DataStore.current_db.init()
# Run RESTful API
run(host=host, port=port, quiet=True, debug=False)
def client(host=RESTAPI_SERVER_HOST, port=RESTAPI_SERVER_PORT):
"""
REST-JSON API client
@ -602,6 +645,8 @@ def client(host=RESTAPI_SERVER_HOST, port=RESTAPI_SERVER_PORT):
# TODO: write a simple client with requests, for now use curl from command line
logger.error("Not yet implemented, use curl from command line instead for now, for example:")
print "\n\t$ curl http://%s:%d/task/new" % (host, port)
print "\t$ curl -H \"Content-Type: application/json\" -X POST -d '{\"url\": \"http://testphp.vulnweb.com/artists.php?artist=1\"}' http://%s:%d/scan/:taskid/start" % (host, port)
print ("\t$ curl -H \"Content-Type: application/json\" "
"-X POST -d '{\"url\": \"http://testphp.vulnweb.com/artists.php?artist=1\"}' "
"http://%s:%d/scan/:taskid/start") % (host, port)
print "\t$ curl http://%s:%d/scan/:taskid/data" % (host, port)
print "\t$ curl http://%s:%d/scan/:taskid/log\n" % (host, port)