From 3effaee2a1f234d95318cb75364539210a5dcebe Mon Sep 17 00:00:00 2001 From: Mathieu Deous Date: Sun, 15 Dec 2013 00:19:58 +0100 Subject: [PATCH] avoid using global variables, use a "store" class --- lib/utils/api.py | 143 ++++++++++++++++++----------------------------- 1 file changed, 55 insertions(+), 88 deletions(-) diff --git a/lib/utils/api.py b/lib/utils/api.py index 5943b33a7..30427ebdc 100644 --- a/lib/utils/api.py +++ b/lib/utils/api.py @@ -43,17 +43,17 @@ from thirdparty.bottle.bottle import run RESTAPI_SERVER_HOST = "127.0.0.1" RESTAPI_SERVER_PORT = 8775 -# Local global variables -adminid = "" -db = None -db_filepath = None -tasks = dict() + +# global settings +class DataStore(object): + admin_id = "" + current_db = None + tasks = dict() # API objects class Database(object): - global db_filepath - + filepath = None LOGS_TABLE = ("CREATE TABLE logs(" "id INTEGER PRIMARY KEY AUTOINCREMENT, " "taskid INTEGER, time TEXT, " @@ -70,10 +70,7 @@ class Database(object): ")") def __init__(self, database=None): - if database: - self.database = database - else: - self.database = db_filepath + self.database = self.filepath if database is None else database def connect(self, who="server"): self.connection = sqlite3.connect(self.database, timeout=3, isolation_level=None) @@ -103,7 +100,6 @@ class Database(object): class Task(object): - global db_filepath def __init__(self, taskid): self.process = None @@ -125,7 +121,7 @@ class Task(object): # the task ID and the file path of the IPC database self.options.api = True self.options.taskid = taskid - self.options.database = db_filepath + self.options.database = Database.filepath # Enforce batch mode and disable coloring and ETA self.options.batch = True @@ -269,11 +265,7 @@ def setRestAPILog(): # Generic functions def is_admin(taskid): - global adminid - if adminid != taskid: - return False - else: - return True + return DataStore.admin_id == taskid @hook("after_request") @@ -328,10 +320,8 @@ def task_new(): """ Create new task ID """ - global tasks - taskid = hexencode(os.urandom(8)) - tasks[taskid] = Task(taskid) + DataStore.tasks[taskid] = Task(taskid) logger.debug("Created new task ID: %s" % taskid) return jsonize({"success": True, "taskid": taskid}) @@ -342,9 +332,9 @@ def task_delete(taskid): """ Delete own task ID """ - if taskid in tasks: - tasks[taskid].clean_filesystem() - tasks.pop(taskid) + if taskid in DataStore.tasks: + DataStore.tasks[taskid].clean_filesystem() + DataStore.tasks.pop(taskid) logger.debug("Deleted task ID: %s" % taskid) return jsonize({"success": True}) @@ -362,8 +352,8 @@ def task_list(taskid): """ if is_admin(taskid): logger.debug("Listed task pull") - task_list = list(tasks) - return jsonize({"success": True, "tasks": task_list, "tasks_num": len(tasks)}) + task_list = list(DataStore.tasks) + return jsonize({"success": True, "tasks": task_list, "tasks_num": len(task_list)}) else: return jsonize({"success": False, "message": "Unauthorized"}) @@ -373,13 +363,11 @@ def task_flush(taskid): """ Flush task spool (delete all tasks) """ - global tasks - if is_admin(taskid): - for task in tasks: - tasks[task].clean_filesystem() + for task in DataStore.tasks: + DataStore.tasks[task].clean_filesystem() - tasks = dict() + DataStore.tasks = dict() logger.debug("Flushed task pull") return jsonize({"success": True}) else: @@ -395,10 +383,10 @@ def option_list(taskid): """ List options for a certain task ID """ - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) - return jsonize({"success": True, "options": tasks[taskid].get_options()}) + return jsonize({"success": True, "options": DataStore.tasks[taskid].get_options()}) @post("/option//get") @@ -406,15 +394,13 @@ def option_get(taskid): """ Get the value of an option (command line switch) for a certain task ID """ - global tasks - - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) option = request.json.get("option", "") - if option in tasks[taskid].options: - return jsonize({"success": True, option: tasks[taskid].get_option(option)}) + if option in DataStore.tasks[taskid].options: + return jsonize({"success": True, option: DataStore.tasks[taskid].get_option(option)}) else: return jsonize({"success": False, "message": "Unknown option", option: "not set"}) @@ -424,13 +410,11 @@ def option_set(taskid): """ Set an option (command line switch) for a certain task ID """ - global tasks - - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) for option, value in request.json.items(): - tasks[taskid].set_option(option, value) + DataStore.tasks[taskid].set_option(option, value) return jsonize({"success": True}) @@ -440,23 +424,21 @@ def scan_start(taskid): """ Launch a scan """ - global tasks - - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) # Initialize sqlmap engine's options with user's provided options, if any for option, value in request.json.items(): - tasks[taskid].set_option(option, value) + DataStore.tasks[taskid].set_option(option, value) # Overwrite output directory value to a temporary directory - tasks[taskid].set_output_directory() + DataStore.tasks[taskid].set_output_directory() # Launch sqlmap engine in a separate process - tasks[taskid].engine_start() + DataStore.tasks[taskid].engine_start() logger.debug("Started scan for task ID %s" % taskid) - return jsonize({"success": True, "engineid": tasks[taskid].engine_get_id()}) + return jsonize({"success": True, "engineid": DataStore.tasks[taskid].engine_get_id()}) @get("/scan//stop") @@ -464,12 +446,10 @@ def scan_stop(taskid): """ Stop a scan """ - global tasks - - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) - tasks[taskid].engine_stop() + DataStore.tasks[taskid].engine_stop() logger.debug("Stopped scan for task ID %s" % taskid) return jsonize({"success": True}) @@ -480,12 +460,10 @@ def scan_kill(taskid): """ Kill a scan """ - global tasks - - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) - tasks[taskid].engine_kill() + DataStore.tasks[taskid].engine_kill() logger.debug("Killed scan for task ID %s" % taskid) return jsonize({"success": True}) @@ -496,18 +474,16 @@ def scan_status(taskid): """ Returns status of a scan """ - global tasks - - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) - status = "terminated" if tasks[taskid].engine_has_terminated() is True else "running" + status = "terminated" if DataStore.tasks[taskid].engine_has_terminated() is True else "running" logger.debug("Requested status of scan for task ID %s" % taskid) return jsonize({ "success": True, "status": status, - "returncode": tasks[taskid].engine_get_returncode() + "returncode": DataStore.tasks[taskid].engine_get_returncode() }) @@ -516,24 +492,23 @@ def scan_data(taskid): """ Retrieve the data of a scan """ - global db - global tasks json_data_message = list() json_errors_message = list() - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) # Read all data from the IPC database for the taskid - for status, content_type, value in db.execute( + for status, content_type, value in DataStore.current_db.execute( "SELECT status, content_type, value FROM data WHERE taskid = ? ORDER BY id ASC", (taskid,)): json_data_message.append( {"status": status, "type": content_type, "value": dejsonize(value)}) # Read all error messages from the IPC database - for error in db.execute("SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC", - (taskid,)): + for error in DataStore.current_db.execute( + "SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC", + (taskid,)): json_errors_message.append(error) logger.debug("Retrieved data and error messages for scan for task ID %s" % taskid) @@ -545,11 +520,9 @@ def scan_log_limited(taskid, start, end): """ Retrieve a subset of log messages """ - global db - global tasks json_log_messages = list() - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) if not start.isdigit() or not end.isdigit() or end < start: @@ -559,7 +532,7 @@ def scan_log_limited(taskid, start, end): end = max(1, int(end)) # Read a subset of log messages from the IPC database - for time_, level, message in db.execute( + for time_, level, message in DataStore.current_db.execute( ("SELECT time, level, message FROM logs WHERE " "taskid = ? AND id >= ? AND id <= ? ORDER BY id ASC"), (taskid, start, end)): @@ -574,15 +547,13 @@ def scan_log(taskid): """ Retrieve the log messages """ - global db - global tasks json_log_messages = list() - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) # Read all log messages from the IPC database - for time_, level, message in db.execute( + for time_, level, message in DataStore.current_db.execute( "SELECT time, level, message FROM logs WHERE taskid = ? ORDER BY id ASC", (taskid,)): json_log_messages.append({"time": time_, "level": level, "message": message}) @@ -595,7 +566,7 @@ def download(taskid, target, filename): """ Download a certain file from the file system """ - if taskid not in tasks: + if taskid not in DataStore.tasks: return jsonize({"success": False, "message": "Invalid task ID"}) # Prevent file path traversal - the lame way @@ -616,21 +587,17 @@ def server(host="0.0.0.0", port=RESTAPI_SERVER_PORT): """ REST-JSON API server """ - global adminid - global db - global db_filepath - - adminid = hexencode(os.urandom(16)) - db_filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)[1] + DataStore.admin_id = hexencode(os.urandom(16)) + Database.filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)[1] logger.info("Running REST-JSON API server at '%s:%d'.." % (host, port)) - logger.info("Admin ID: %s" % adminid) - logger.debug("IPC database: %s" % db_filepath) + logger.info("Admin ID: %s" % DataStore.admin_id) + logger.debug("IPC database: %s" % Database.filepath) # Initialize IPC database - db = Database() - db.connect() - db.init() + DataStore.current_db = Database() + DataStore.current_db.connect() + DataStore.current_db.init() # Run RESTful API run(host=host, port=port, quiet=True, debug=False)