From 5b3c8d8991262b3ca4ad2ad51fb3815d67c5acd6 Mon Sep 17 00:00:00 2001 From: Bernardo Damele Date: Thu, 24 Jan 2013 12:57:24 +0000 Subject: [PATCH] first implementation of asynchronous inter-protocol communication between the sqlmap RESTful API and the sqlmap engine with SQLite --- lib/core/option.py | 27 ++++++++------------ lib/utils/api.py | 63 +++++++++++++++++++++++++++------------------- 2 files changed, 47 insertions(+), 43 deletions(-) diff --git a/lib/core/option.py b/lib/core/option.py index 1be07f19d..b40027877 100644 --- a/lib/core/option.py +++ b/lib/core/option.py @@ -13,6 +13,7 @@ import re import socket import string import sys +import sqlite3 import threading import time import urllib2 @@ -1793,29 +1794,21 @@ def _mergeOptions(inputOptions, overrideOptions): if hasattr(conf, key) and conf[key] is None: conf[key] = value -# Logger recorder object, which keeps the log structure class LogRecorder(logging.StreamHandler): - """ - Logging handler class which only records CUSTOM_LOGGING.PAYLOAD entries - to a global list. - """ - loghist = [] - def emit(self, record): """ - Simply record the emitted events. + Record emitted events to temporary database for asynchronous I/O + communication with the parent process """ - self.loghist.append({'levelname': record.levelname, - 'text': record.msg % record.args if record.args else record.msg, - 'id': len(self.loghist) + 1}) - - if conf.fdLog: - # TODO: this is very heavy operation and slows down a lot the - # whole execution of the sqlmap engine, find an alternative - os.write(conf.fdLog, base64pickle(self.loghist)) + connection = sqlite3.connect(conf.ipc, isolation_level=None) + cursor = connection.cursor() + cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?)", + (time.strftime("%X"), record.levelname, record.msg % record.args if record.args else record.msg)) + cursor.close() + connection.close() def _setRestAPILog(): - if hasattr(conf, "fdLog") and conf.fdLog: + if hasattr(conf, "ipc"): logger.removeHandler(LOGGER_HANDLER) LOGGER_RECORDER = LogRecorder() logger.addHandler(LOGGER_RECORDER) diff --git a/lib/utils/api.py b/lib/utils/api.py index 527b18748..446c89c0e 100644 --- a/lib/utils/api.py +++ b/lib/utils/api.py @@ -7,6 +7,7 @@ See the file 'doc/COPYING' for copying permission import os import shutil +import sqlite3 import tempfile from subprocess import PIPE @@ -39,7 +40,6 @@ RESTAPI_SERVER_PORT = 8775 # Local global variables adminid = "" -pipes = dict() procs = dict() tasks = AttribDict() @@ -115,6 +115,19 @@ def task_new(): taskid = hexencode(os.urandom(16)) tasks[taskid] = init_options() + # Initiate the temporary database for asynchronous I/O with the + # sqlmap engine (children processes) + _, ipc_filepath = tempfile.mkstemp(prefix="sqlmapipc-", suffix=".db", text=False) + connection = sqlite3.connect(ipc_filepath, isolation_level=None) + cursor = connection.cursor() + cursor.execute("DROP TABLE IF EXISTS logs") + cursor.execute("CREATE TABLE logs(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, level TEXT, message TEXT)") + cursor.close() + connection.close() + + # Set the temporary database to use for asynchronous I/O communication + tasks[taskid].ipc = ipc_filepath + return jsonize({"taskid": taskid}) @get("/task//destroy") @@ -242,7 +255,6 @@ def scan_start(taskid): """ global tasks global procs - global pipes if taskid not in tasks: abort(500, "Invalid task ID") @@ -253,16 +265,11 @@ def scan_start(taskid): tasks[taskid][key] = value # Overwrite output directory (oDir) value to a temporary directory - tasks[taskid].oDir = tempfile.mkdtemp(prefix="sqlmap-") + tasks[taskid].oDir = tempfile.mkdtemp(prefix="sqlmaptask-") # Launch sqlmap engine in a separate thread logger.debug("starting a scan for task ID %s" % taskid) - pipes[taskid] = os.pipe() - - # Provide sqlmap engine with the writable pipe for logging - tasks[taskid]["fdLog"] = pipes[taskid][1] - # Launch sqlmap engine procs[taskid] = execute("python sqlmap.py --pickled-options %s" % base64pickle(tasks[taskid]), shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=False) @@ -273,7 +280,6 @@ def scan_output(taskid): """ Read the standard output of sqlmap core execution """ - global pipes global tasks if taskid not in tasks: @@ -303,46 +309,51 @@ def scan_delete(taskid): @get("/scan//log//") def scan_log_limited(taskid, start, end): """ - Retrieve the log messages + Retrieve a subset of log messages """ - log = None + json_log_messages = {} if taskid not in tasks: abort(500, "Invalid task ID") + # Temporary "protection" against SQL injection FTW ;) if not start.isdigit() or not end.isdigit() or end <= start: abort(500, "Invalid start or end value, must be digits") - start = max(0, int(start) - 1) + start = max(1, int(start)) end = max(1, int(end)) - pickledLog = os.read(pipes[taskid][0], 100000) - try: - log = base64unpickle(pickledLog) - log = log[slice(start, end)] - except (KeyError, IndexError, TypeError), e: - logger.error("handled exception when trying to unpickle logger dictionary in scan_log_limited(): %s" % str(e)) + # Read a subset of log messages from the temporary I/O database + connection = sqlite3.connect(tasks[taskid].ipc, isolation_level=None) + cursor = connection.cursor() + cursor.execute("SELECT id, time, level, message FROM logs WHERE id >= %d AND id <= %d" % (start, end)) + db_log_messages = cursor.fetchall() - return jsonize({"log": log}) + for (id_, time_, level, message) in db_log_messages: + json_log_messages[id_] = {"time": time_, "level": level, "message": message} + + return jsonize({"log": json_log_messages}) @get("/scan//log") def scan_log(taskid): """ Retrieve the log messages """ - log = None + json_log_messages = {} if taskid not in tasks: abort(500, "Invalid task ID") - pickledLog = os.read(pipes[taskid][0], 100000) + # Read all log messages from the temporary I/O database + connection = sqlite3.connect(tasks[taskid].ipc, isolation_level=None) + cursor = connection.cursor() + cursor.execute("SELECT id, time, level, message FROM logs") + db_log_messages = cursor.fetchall() - try: - log = base64unpickle(pickledLog) - except (KeyError, IndexError, TypeError), e: - logger.error("handled exception when trying to unpickle logger dictionary in scan_log(): %s" % str(e)) + for (id_, time_, level, message) in db_log_messages: + json_log_messages[id_] = {"time": time_, "level": level, "message": message} - return jsonize({"log": log}) + return jsonize({"log": json_log_messages}) # Function to handle files inside the output directory @get("/download///")