mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 17:46:37 +03:00
first implementation of asynchronous inter-protocol communication between the sqlmap RESTful API and the sqlmap engine with SQLite
This commit is contained in:
parent
7d01eb79b4
commit
5b3c8d8991
|
@ -13,6 +13,7 @@ import re
|
||||||
import socket
|
import socket
|
||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
|
import sqlite3
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import urllib2
|
import urllib2
|
||||||
|
@ -1793,29 +1794,21 @@ def _mergeOptions(inputOptions, overrideOptions):
|
||||||
if hasattr(conf, key) and conf[key] is None:
|
if hasattr(conf, key) and conf[key] is None:
|
||||||
conf[key] = value
|
conf[key] = value
|
||||||
|
|
||||||
# Logger recorder object, which keeps the log structure
|
|
||||||
class LogRecorder(logging.StreamHandler):
|
class LogRecorder(logging.StreamHandler):
|
||||||
"""
|
|
||||||
Logging handler class which only records CUSTOM_LOGGING.PAYLOAD entries
|
|
||||||
to a global list.
|
|
||||||
"""
|
|
||||||
loghist = []
|
|
||||||
|
|
||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
"""
|
"""
|
||||||
Simply record the emitted events.
|
Record emitted events to temporary database for asynchronous I/O
|
||||||
|
communication with the parent process
|
||||||
"""
|
"""
|
||||||
self.loghist.append({'levelname': record.levelname,
|
connection = sqlite3.connect(conf.ipc, isolation_level=None)
|
||||||
'text': record.msg % record.args if record.args else record.msg,
|
cursor = connection.cursor()
|
||||||
'id': len(self.loghist) + 1})
|
cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?)",
|
||||||
|
(time.strftime("%X"), record.levelname, record.msg % record.args if record.args else record.msg))
|
||||||
if conf.fdLog:
|
cursor.close()
|
||||||
# TODO: this is very heavy operation and slows down a lot the
|
connection.close()
|
||||||
# whole execution of the sqlmap engine, find an alternative
|
|
||||||
os.write(conf.fdLog, base64pickle(self.loghist))
|
|
||||||
|
|
||||||
def _setRestAPILog():
|
def _setRestAPILog():
|
||||||
if hasattr(conf, "fdLog") and conf.fdLog:
|
if hasattr(conf, "ipc"):
|
||||||
logger.removeHandler(LOGGER_HANDLER)
|
logger.removeHandler(LOGGER_HANDLER)
|
||||||
LOGGER_RECORDER = LogRecorder()
|
LOGGER_RECORDER = LogRecorder()
|
||||||
logger.addHandler(LOGGER_RECORDER)
|
logger.addHandler(LOGGER_RECORDER)
|
||||||
|
|
|
@ -7,6 +7,7 @@ See the file 'doc/COPYING' for copying permission
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import sqlite3
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from subprocess import PIPE
|
from subprocess import PIPE
|
||||||
|
@ -39,7 +40,6 @@ RESTAPI_SERVER_PORT = 8775
|
||||||
|
|
||||||
# Local global variables
|
# Local global variables
|
||||||
adminid = ""
|
adminid = ""
|
||||||
pipes = dict()
|
|
||||||
procs = dict()
|
procs = dict()
|
||||||
tasks = AttribDict()
|
tasks = AttribDict()
|
||||||
|
|
||||||
|
@ -115,6 +115,19 @@ def task_new():
|
||||||
taskid = hexencode(os.urandom(16))
|
taskid = hexencode(os.urandom(16))
|
||||||
tasks[taskid] = init_options()
|
tasks[taskid] = init_options()
|
||||||
|
|
||||||
|
# Initiate the temporary database for asynchronous I/O with the
|
||||||
|
# sqlmap engine (children processes)
|
||||||
|
_, ipc_filepath = tempfile.mkstemp(prefix="sqlmapipc-", suffix=".db", text=False)
|
||||||
|
connection = sqlite3.connect(ipc_filepath, isolation_level=None)
|
||||||
|
cursor = connection.cursor()
|
||||||
|
cursor.execute("DROP TABLE IF EXISTS logs")
|
||||||
|
cursor.execute("CREATE TABLE logs(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, level TEXT, message TEXT)")
|
||||||
|
cursor.close()
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
# Set the temporary database to use for asynchronous I/O communication
|
||||||
|
tasks[taskid].ipc = ipc_filepath
|
||||||
|
|
||||||
return jsonize({"taskid": taskid})
|
return jsonize({"taskid": taskid})
|
||||||
|
|
||||||
@get("/task/<taskid>/destroy")
|
@get("/task/<taskid>/destroy")
|
||||||
|
@ -242,7 +255,6 @@ def scan_start(taskid):
|
||||||
"""
|
"""
|
||||||
global tasks
|
global tasks
|
||||||
global procs
|
global procs
|
||||||
global pipes
|
|
||||||
|
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
abort(500, "Invalid task ID")
|
abort(500, "Invalid task ID")
|
||||||
|
@ -253,16 +265,11 @@ def scan_start(taskid):
|
||||||
tasks[taskid][key] = value
|
tasks[taskid][key] = value
|
||||||
|
|
||||||
# Overwrite output directory (oDir) value to a temporary directory
|
# Overwrite output directory (oDir) value to a temporary directory
|
||||||
tasks[taskid].oDir = tempfile.mkdtemp(prefix="sqlmap-")
|
tasks[taskid].oDir = tempfile.mkdtemp(prefix="sqlmaptask-")
|
||||||
|
|
||||||
# Launch sqlmap engine in a separate thread
|
# Launch sqlmap engine in a separate thread
|
||||||
logger.debug("starting a scan for task ID %s" % taskid)
|
logger.debug("starting a scan for task ID %s" % taskid)
|
||||||
|
|
||||||
pipes[taskid] = os.pipe()
|
|
||||||
|
|
||||||
# Provide sqlmap engine with the writable pipe for logging
|
|
||||||
tasks[taskid]["fdLog"] = pipes[taskid][1]
|
|
||||||
|
|
||||||
# Launch sqlmap engine
|
# Launch sqlmap engine
|
||||||
procs[taskid] = execute("python sqlmap.py --pickled-options %s" % base64pickle(tasks[taskid]), shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=False)
|
procs[taskid] = execute("python sqlmap.py --pickled-options %s" % base64pickle(tasks[taskid]), shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=False)
|
||||||
|
|
||||||
|
@ -273,7 +280,6 @@ def scan_output(taskid):
|
||||||
"""
|
"""
|
||||||
Read the standard output of sqlmap core execution
|
Read the standard output of sqlmap core execution
|
||||||
"""
|
"""
|
||||||
global pipes
|
|
||||||
global tasks
|
global tasks
|
||||||
|
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
|
@ -303,46 +309,51 @@ def scan_delete(taskid):
|
||||||
@get("/scan/<taskid>/log/<start>/<end>")
|
@get("/scan/<taskid>/log/<start>/<end>")
|
||||||
def scan_log_limited(taskid, start, end):
|
def scan_log_limited(taskid, start, end):
|
||||||
"""
|
"""
|
||||||
Retrieve the log messages
|
Retrieve a subset of log messages
|
||||||
"""
|
"""
|
||||||
log = None
|
json_log_messages = {}
|
||||||
|
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
abort(500, "Invalid task ID")
|
abort(500, "Invalid task ID")
|
||||||
|
|
||||||
|
# Temporary "protection" against SQL injection FTW ;)
|
||||||
if not start.isdigit() or not end.isdigit() or end <= start:
|
if not start.isdigit() or not end.isdigit() or end <= start:
|
||||||
abort(500, "Invalid start or end value, must be digits")
|
abort(500, "Invalid start or end value, must be digits")
|
||||||
|
|
||||||
start = max(0, int(start) - 1)
|
start = max(1, int(start))
|
||||||
end = max(1, int(end))
|
end = max(1, int(end))
|
||||||
pickledLog = os.read(pipes[taskid][0], 100000)
|
|
||||||
|
|
||||||
try:
|
# Read a subset of log messages from the temporary I/O database
|
||||||
log = base64unpickle(pickledLog)
|
connection = sqlite3.connect(tasks[taskid].ipc, isolation_level=None)
|
||||||
log = log[slice(start, end)]
|
cursor = connection.cursor()
|
||||||
except (KeyError, IndexError, TypeError), e:
|
cursor.execute("SELECT id, time, level, message FROM logs WHERE id >= %d AND id <= %d" % (start, end))
|
||||||
logger.error("handled exception when trying to unpickle logger dictionary in scan_log_limited(): %s" % str(e))
|
db_log_messages = cursor.fetchall()
|
||||||
|
|
||||||
return jsonize({"log": log})
|
for (id_, time_, level, message) in db_log_messages:
|
||||||
|
json_log_messages[id_] = {"time": time_, "level": level, "message": message}
|
||||||
|
|
||||||
|
return jsonize({"log": json_log_messages})
|
||||||
|
|
||||||
@get("/scan/<taskid>/log")
|
@get("/scan/<taskid>/log")
|
||||||
def scan_log(taskid):
|
def scan_log(taskid):
|
||||||
"""
|
"""
|
||||||
Retrieve the log messages
|
Retrieve the log messages
|
||||||
"""
|
"""
|
||||||
log = None
|
json_log_messages = {}
|
||||||
|
|
||||||
if taskid not in tasks:
|
if taskid not in tasks:
|
||||||
abort(500, "Invalid task ID")
|
abort(500, "Invalid task ID")
|
||||||
|
|
||||||
pickledLog = os.read(pipes[taskid][0], 100000)
|
# Read all log messages from the temporary I/O database
|
||||||
|
connection = sqlite3.connect(tasks[taskid].ipc, isolation_level=None)
|
||||||
|
cursor = connection.cursor()
|
||||||
|
cursor.execute("SELECT id, time, level, message FROM logs")
|
||||||
|
db_log_messages = cursor.fetchall()
|
||||||
|
|
||||||
try:
|
for (id_, time_, level, message) in db_log_messages:
|
||||||
log = base64unpickle(pickledLog)
|
json_log_messages[id_] = {"time": time_, "level": level, "message": message}
|
||||||
except (KeyError, IndexError, TypeError), e:
|
|
||||||
logger.error("handled exception when trying to unpickle logger dictionary in scan_log(): %s" % str(e))
|
|
||||||
|
|
||||||
return jsonize({"log": log})
|
return jsonize({"log": json_log_messages})
|
||||||
|
|
||||||
# Function to handle files inside the output directory
|
# Function to handle files inside the output directory
|
||||||
@get("/download/<taskid>/<target>/<filename:path>")
|
@get("/download/<taskid>/<target>/<filename:path>")
|
||||||
|
|
Loading…
Reference in New Issue
Block a user