mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-02-16 19:40:37 +03:00
Merge branch 'master' of github.com:sqlmapproject/sqlmap
This commit is contained in:
commit
95b922309c
|
@ -30,12 +30,14 @@ from lib.core.common import unhandledExceptionMessage
|
|||
from lib.core.exception import SqlmapBaseException
|
||||
from lib.core.exception import SqlmapSilentQuitException
|
||||
from lib.core.exception import SqlmapUserQuitException
|
||||
from lib.core.option import initOptions
|
||||
from lib.core.option import init
|
||||
from lib.core.profiling import profile
|
||||
from lib.core.settings import LEGAL_DISCLAIMER
|
||||
from lib.core.testing import smokeTest
|
||||
from lib.core.testing import liveTest
|
||||
from lib.parse.cmdline import cmdLineParser
|
||||
from lib.utils.api import setRestAPILog
|
||||
from lib.utils.api import StdDbOut
|
||||
|
||||
def modulePath():
|
||||
|
@ -57,19 +59,22 @@ def main():
|
|||
|
||||
# Store original command line options for possible later restoration
|
||||
cmdLineOptions.update(cmdLineParser().__dict__)
|
||||
init(cmdLineOptions)
|
||||
initOptions(cmdLineOptions)
|
||||
|
||||
if hasattr(conf, "api"):
|
||||
# Overwrite system standard output and standard error to write
|
||||
# to an IPC database
|
||||
sys.stdout = StdDbOut(conf.taskid, messagetype="stdout")
|
||||
sys.stderr = StdDbOut(conf.taskid, messagetype="stderr")
|
||||
setRestAPILog()
|
||||
|
||||
banner()
|
||||
|
||||
dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True)
|
||||
dataToStdout("[*] starting at %s\n\n" % time.strftime("%X"), forceOutput=True)
|
||||
|
||||
init()
|
||||
|
||||
if conf.profile:
|
||||
profile()
|
||||
elif conf.smokeTest:
|
||||
|
|
|
@ -66,9 +66,10 @@ def main():
|
|||
test_counts = []
|
||||
attachments = {}
|
||||
|
||||
command_line = "python /opt/sqlmap/sqlmap.py --live-test"
|
||||
proc = subprocess.Popen(command_line, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
proc = subprocess.Popen("python /opt/sqlmap/sqlmap.py --update", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
proc.wait()
|
||||
|
||||
proc = subprocess.Popen("python /opt/sqlmap/sqlmap.py --live-test", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
proc.wait()
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ from lib.core.data import conf
|
|||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.data import paths
|
||||
from lib.core.enums import API_CONTENT_TYPE
|
||||
from lib.core.exception import SqlmapNoneDataException
|
||||
from lib.core.exception import SqlmapUnsupportedDBMSException
|
||||
from lib.core.settings import SUPPORTED_DBMS
|
||||
|
@ -77,7 +78,7 @@ def action():
|
|||
if conf.getPasswordHashes:
|
||||
try:
|
||||
conf.dumper.userSettings("database management system users password hashes",
|
||||
conf.dbmsHandler.getPasswordHashes(), "password hash")
|
||||
conf.dbmsHandler.getPasswordHashes(), "password hash", API_CONTENT_TYPE.PASSWORDS)
|
||||
except SqlmapNoneDataException, ex:
|
||||
logger.critical(ex)
|
||||
except:
|
||||
|
@ -86,7 +87,7 @@ def action():
|
|||
if conf.getPrivileges:
|
||||
try:
|
||||
conf.dumper.userSettings("database management system users privileges",
|
||||
conf.dbmsHandler.getPrivileges(), "privilege")
|
||||
conf.dbmsHandler.getPrivileges(), "privilege", API_CONTENT_TYPE.PRIVILEGES)
|
||||
except SqlmapNoneDataException, ex:
|
||||
logger.critical(ex)
|
||||
except:
|
||||
|
@ -95,7 +96,7 @@ def action():
|
|||
if conf.getRoles:
|
||||
try:
|
||||
conf.dumper.userSettings("database management system users roles",
|
||||
conf.dbmsHandler.getRoles(), "role")
|
||||
conf.dbmsHandler.getRoles(), "role", API_CONTENT_TYPE.ROLES)
|
||||
except SqlmapNoneDataException, ex:
|
||||
logger.critical(ex)
|
||||
except:
|
||||
|
@ -111,10 +112,10 @@ def action():
|
|||
conf.dumper.dbTables(tableExists(paths.COMMON_TABLES))
|
||||
|
||||
if conf.getSchema:
|
||||
conf.dumper.dbTableColumns(conf.dbmsHandler.getSchema())
|
||||
conf.dumper.dbTableColumns(conf.dbmsHandler.getSchema(), API_CONTENT_TYPE.SCHEMA)
|
||||
|
||||
if conf.getColumns:
|
||||
conf.dumper.dbTableColumns(conf.dbmsHandler.getColumns())
|
||||
conf.dumper.dbTableColumns(conf.dbmsHandler.getColumns(), API_CONTENT_TYPE.COLUMNS)
|
||||
|
||||
if conf.getCount:
|
||||
conf.dumper.dbTablesCount(conf.dbmsHandler.getCount())
|
||||
|
|
|
@ -36,6 +36,7 @@ from lib.core.common import urldecode
|
|||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.enums import API_CONTENT_TYPE
|
||||
from lib.core.enums import HASHDB_KEYS
|
||||
from lib.core.enums import HEURISTIC_TEST
|
||||
from lib.core.enums import HTTPMETHOD
|
||||
|
@ -151,9 +152,11 @@ def _showInjections():
|
|||
header = "sqlmap identified the following injection points with "
|
||||
header += "a total of %d HTTP(s) requests" % kb.testQueryCount
|
||||
|
||||
data = "".join(set(map(lambda x: _formatInjection(x), kb.injections))).rstrip("\n")
|
||||
|
||||
conf.dumper.string(header, data)
|
||||
if hasattr(conf, "api"):
|
||||
conf.dumper.string("", kb.injections, content_type=API_CONTENT_TYPE.TECHNIQUES)
|
||||
else:
|
||||
data = "".join(set(map(lambda x: _formatInjection(x), kb.injections))).rstrip("\n")
|
||||
conf.dumper.string(header, data)
|
||||
|
||||
if conf.tamper:
|
||||
warnMsg = "changes made by tampering scripts are not "
|
||||
|
|
|
@ -760,7 +760,8 @@ def dataToStdout(data, forceOutput=False, bold=False, content_type=None, status=
|
|||
message = data
|
||||
|
||||
if hasattr(conf, "api"):
|
||||
sys.stdout.write(message, status=status, content_type=content_type)
|
||||
if content_type and status:
|
||||
sys.stdout.write(message, status, content_type)
|
||||
else:
|
||||
sys.stdout.write(setColor(message, bold))
|
||||
|
||||
|
@ -772,7 +773,7 @@ def dataToStdout(data, forceOutput=False, bold=False, content_type=None, status=
|
|||
if kb.get("multiThreadMode"):
|
||||
logging._releaseLock()
|
||||
|
||||
kb.prependFlag = len(data) == 1 and data not in ('\n', '\r') or len(data) > 2 and data[0] == '\r' and data[-1] != '\n'
|
||||
kb.prependFlag = isinstance(data, basestring) and (len(data) == 1 and data not in ('\n', '\r') or len(data) > 2 and data[0] == '\r' and data[-1] != '\n')
|
||||
|
||||
def dataToTrafficFile(data):
|
||||
if not conf.trafficFile:
|
||||
|
|
|
@ -46,7 +46,6 @@ class Dump(object):
|
|||
"""
|
||||
This class defines methods used to parse and output the results
|
||||
of SQL injection actions
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
|
@ -85,8 +84,8 @@ class Dump(object):
|
|||
def getOutputFile(self):
|
||||
return self._outputFile
|
||||
|
||||
def singleString(self, data):
|
||||
self._write(data)
|
||||
def singleString(self, data, content_type=None):
|
||||
self._write(data, content_type=content_type)
|
||||
|
||||
def string(self, header, data, content_type=None, sort=True):
|
||||
kb.stickyLevel = None
|
||||
|
@ -161,9 +160,6 @@ class Dump(object):
|
|||
def userSettings(self, header, userSettings, subHeader, content_type=None):
|
||||
self._areAdmins = set()
|
||||
|
||||
if userSettings:
|
||||
self._write("%s:" % header)
|
||||
|
||||
if isinstance(userSettings, (tuple, list, set)):
|
||||
self._areAdmins = userSettings[1]
|
||||
userSettings = userSettings[0]
|
||||
|
@ -171,6 +167,13 @@ class Dump(object):
|
|||
users = userSettings.keys()
|
||||
users.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
||||
|
||||
if hasattr(conf, "api"):
|
||||
self._write(userSettings, content_type=content_type)
|
||||
return
|
||||
|
||||
if userSettings:
|
||||
self._write("%s:" % header)
|
||||
|
||||
for user in users:
|
||||
settings = userSettings[user]
|
||||
|
||||
|
@ -196,8 +199,12 @@ class Dump(object):
|
|||
def dbs(self, dbs):
|
||||
self.lister("available databases", dbs, content_type=API_CONTENT_TYPE.DBS)
|
||||
|
||||
def dbTables(self, dbTables, content_type=API_CONTENT_TYPE.TABLES):
|
||||
def dbTables(self, dbTables):
|
||||
if isinstance(dbTables, dict) and len(dbTables) > 0:
|
||||
if hasattr(conf, "api"):
|
||||
self._write(dbTables, content_type=API_CONTENT_TYPE.TABLES)
|
||||
return
|
||||
|
||||
maxlength = 0
|
||||
|
||||
for tables in dbTables.values():
|
||||
|
@ -230,12 +237,16 @@ class Dump(object):
|
|||
|
||||
self._write("+%s+\n" % lines)
|
||||
elif dbTables is None or len(dbTables) == 0:
|
||||
self.singleString("No tables found")
|
||||
self.singleString("No tables found", content_type=API_CONTENT_TYPE.TABLES)
|
||||
else:
|
||||
self.string("tables", dbTables)
|
||||
self.string("tables", dbTables, content_type=API_CONTENT_TYPE.TABLES)
|
||||
|
||||
def dbTableColumns(self, tableColumns, content_type=API_CONTENT_TYPE.COLUMNS):
|
||||
def dbTableColumns(self, tableColumns, content_type=None):
|
||||
if isinstance(tableColumns, dict) and len(tableColumns) > 0:
|
||||
if hasattr(conf, "api"):
|
||||
self._write(tableColumns, content_type=content_type)
|
||||
return
|
||||
|
||||
for db, tables in tableColumns.items():
|
||||
if not db:
|
||||
db = "All"
|
||||
|
@ -301,8 +312,12 @@ class Dump(object):
|
|||
else:
|
||||
self._write("+%s+\n" % lines1)
|
||||
|
||||
def dbTablesCount(self, dbTables, content_type=API_CONTENT_TYPE.COUNT):
|
||||
def dbTablesCount(self, dbTables):
|
||||
if isinstance(dbTables, dict) and len(dbTables) > 0:
|
||||
if hasattr(conf, "api"):
|
||||
self._write(dbTables, content_type=API_CONTENT_TYPE.COUNT)
|
||||
return
|
||||
|
||||
maxlength1 = len("Table")
|
||||
maxlength2 = len("Entries")
|
||||
|
||||
|
@ -343,7 +358,7 @@ class Dump(object):
|
|||
else:
|
||||
logger.error("unable to retrieve the number of entries for any table")
|
||||
|
||||
def dbTableValues(self, tableValues, content_type=API_CONTENT_TYPE.DUMP_TABLE):
|
||||
def dbTableValues(self, tableValues):
|
||||
replication = None
|
||||
rtable = None
|
||||
dumpFP = None
|
||||
|
@ -356,6 +371,10 @@ class Dump(object):
|
|||
db = "All"
|
||||
table = tableValues["__infos__"]["table"]
|
||||
|
||||
if hasattr(conf, "api"):
|
||||
self._write(tableValues, content_type=API_CONTENT_TYPE.DUMP_TABLE)
|
||||
return
|
||||
|
||||
if conf.dumpFormat == DUMP_FORMAT.SQLITE:
|
||||
replication = Replication("%s%s%s.sqlite3" % (conf.dumpPath, os.sep, unsafeSQLIdentificatorNaming(db)))
|
||||
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
||||
|
@ -549,7 +568,11 @@ class Dump(object):
|
|||
dumpFP.close()
|
||||
logger.info("table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName))
|
||||
|
||||
def dbColumns(self, dbColumnsDict, colConsider, dbs, content_type=API_CONTENT_TYPE.COLUMNS):
|
||||
def dbColumns(self, dbColumnsDict, colConsider, dbs):
|
||||
if hasattr(conf, "api"):
|
||||
self._write(dbColumnsDict, content_type=API_CONTENT_TYPE.COLUMNS)
|
||||
return
|
||||
|
||||
for column in dbColumnsDict.keys():
|
||||
if colConsider == "1":
|
||||
colConsiderStr = "s like '" + column + "' were"
|
||||
|
|
|
@ -246,29 +246,30 @@ class WEB_API:
|
|||
|
||||
class API_CONTENT_TYPE:
|
||||
TECHNIQUES = 0
|
||||
BANNER = 1
|
||||
CURRENT_USER = 2
|
||||
CURRENT_DB = 3
|
||||
HOSTNAME = 4
|
||||
IS_DBA = 5
|
||||
USERS = 6
|
||||
PASSWORDS = 7
|
||||
PRIVILEGES = 8
|
||||
ROLES = 9
|
||||
DBS = 10
|
||||
TABLES = 11
|
||||
COLUMNS = 12
|
||||
SCHEMA = 13
|
||||
COUNT = 14
|
||||
DUMP_TABLE = 15
|
||||
SEARCH = 16
|
||||
SQL_QUERY = 17
|
||||
COMMON_TABLES = 18
|
||||
COMMON_COLUMNS = 19
|
||||
FILE_READ = 20
|
||||
FILE_WRITE = 21
|
||||
OS_CMD = 22
|
||||
REG_READ = 23
|
||||
DBMS_FINGERPRINT = 1
|
||||
BANNER = 2
|
||||
CURRENT_USER = 3
|
||||
CURRENT_DB = 4
|
||||
HOSTNAME = 5
|
||||
IS_DBA = 6
|
||||
USERS = 7
|
||||
PASSWORDS = 8
|
||||
PRIVILEGES = 9
|
||||
ROLES = 10
|
||||
DBS = 11
|
||||
TABLES = 12
|
||||
COLUMNS = 13
|
||||
SCHEMA = 14
|
||||
COUNT = 15
|
||||
DUMP_TABLE = 16
|
||||
SEARCH = 17
|
||||
SQL_QUERY = 18
|
||||
COMMON_TABLES = 19
|
||||
COMMON_COLUMNS = 20
|
||||
FILE_READ = 21
|
||||
FILE_WRITE = 22
|
||||
OS_CMD = 23
|
||||
REG_READ = 24
|
||||
|
||||
class API_CONTENT_STATUS:
|
||||
IN_PROGRESS = 0
|
||||
|
|
|
@ -136,7 +136,6 @@ from lib.request.httpshandler import HTTPSHandler
|
|||
from lib.request.rangehandler import HTTPRangeHandler
|
||||
from lib.request.redirecthandler import SmartRedirectHandler
|
||||
from lib.request.templates import getPageTemplate
|
||||
from lib.utils.api import setRestAPILog
|
||||
from lib.utils.crawler import crawl
|
||||
from lib.utils.deps import checkDependencies
|
||||
from lib.utils.google import Google
|
||||
|
@ -2052,21 +2051,22 @@ def _resolveCrossReferences():
|
|||
lib.core.common.getPageTemplate = getPageTemplate
|
||||
lib.core.convert.singleTimeWarnMessage = singleTimeWarnMessage
|
||||
|
||||
def init(inputOptions=AttribDict(), overrideOptions=False):
|
||||
"""
|
||||
Set attributes into both configuration and knowledge base singletons
|
||||
based upon command line and configuration file options.
|
||||
"""
|
||||
|
||||
def initOptions(inputOptions=AttribDict(), overrideOptions=False):
|
||||
if not inputOptions.disableColoring:
|
||||
coloramainit()
|
||||
|
||||
_setConfAttributes()
|
||||
_setKnowledgeBaseAttributes()
|
||||
_mergeOptions(inputOptions, overrideOptions)
|
||||
|
||||
def init():
|
||||
"""
|
||||
Set attributes into both configuration and knowledge base singletons
|
||||
based upon command line and configuration file options.
|
||||
"""
|
||||
|
||||
_useWizardInterface()
|
||||
setVerbosity()
|
||||
setRestAPILog()
|
||||
_saveCmdline()
|
||||
_setRequestFromFile()
|
||||
_cleanupOptions()
|
||||
|
|
|
@ -29,6 +29,7 @@ from lib.core.exception import SqlmapBaseException
|
|||
from lib.core.exception import SqlmapNotVulnerableException
|
||||
from lib.core.log import LOGGER_HANDLER
|
||||
from lib.core.option import init
|
||||
from lib.core.option import initOptions
|
||||
from lib.core.optiondict import optDict
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.parse.cmdline import cmdLineParser
|
||||
|
@ -243,7 +244,8 @@ def initCase(switches=None):
|
|||
if key in cmdLineOptions.__dict__:
|
||||
cmdLineOptions.__dict__[key] = value
|
||||
|
||||
init(cmdLineOptions, True)
|
||||
initOptions(cmdLineOptions, True)
|
||||
init()
|
||||
|
||||
def cleanCase():
|
||||
shutil.rmtree(paths.SQLMAP_OUTPUT_PATH, True)
|
||||
|
|
137
lib/utils/api.py
137
lib/utils/api.py
|
@ -60,10 +60,10 @@ class Database(object):
|
|||
|
||||
def create(self):
|
||||
_, self.database = tempfile.mkstemp(prefix="sqlmapipc-", text=False)
|
||||
logger.info("IPC database is %s" % self.database)
|
||||
logger.debug("IPC database: %s" % self.database)
|
||||
|
||||
def connect(self):
|
||||
self.connection = sqlite3.connect(self.database, timeout=1, isolation_level=None)
|
||||
self.connection = sqlite3.connect(self.database, timeout=3, isolation_level=None)
|
||||
self.cursor = self.connection.cursor()
|
||||
|
||||
def disconnect(self):
|
||||
|
@ -132,18 +132,32 @@ class Task(object):
|
|||
shutil.rmtree(self.output_directory)
|
||||
|
||||
def engine_start(self):
|
||||
self.process = Popen("python sqlmap.py --pickled-options %s" % base64pickle(self.options), shell=True, stdin=PIPE)
|
||||
self.process = Popen("python sqlmap.py --pickled-options %s" % base64pickle(self.options), shell=True, stdin=PIPE, close_fds=False)
|
||||
|
||||
def engine_stop(self):
|
||||
if self.process:
|
||||
self.process.terminate()
|
||||
return self.process.terminate()
|
||||
else:
|
||||
return None
|
||||
|
||||
def engine_kill(self):
|
||||
if self.process:
|
||||
self.process.kill()
|
||||
return self.process.kill()
|
||||
else:
|
||||
return None
|
||||
|
||||
def engine_get_pid(self):
|
||||
return self.processid.pid
|
||||
def engine_get_id(self):
|
||||
if self.process:
|
||||
return self.process.pid
|
||||
else:
|
||||
return None
|
||||
|
||||
def engine_get_returncode(self):
|
||||
self.process.poll()
|
||||
return self.process.returncode
|
||||
|
||||
def engine_has_terminated(self):
|
||||
return isinstance(self.engine_get_returncode(), int)
|
||||
|
||||
# Wrapper functions for sqlmap engine
|
||||
class StdDbOut(object):
|
||||
|
@ -162,9 +176,13 @@ class StdDbOut(object):
|
|||
|
||||
def write(self, value, status=None, content_type=None):
|
||||
if self.messagetype == "stdout":
|
||||
conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", (self.taskid, status, content_type, jsonize(value)))
|
||||
#conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
|
||||
# (self.taskid, status, content_type, base64pickle(value)))
|
||||
conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
|
||||
(self.taskid, status, content_type, jsonize(value)))
|
||||
else:
|
||||
conf.database_cursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)", (self.taskid, value))
|
||||
conf.database_cursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)",
|
||||
(self.taskid, str(value) if value else ""))
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
@ -257,35 +275,42 @@ def task_new():
|
|||
taskid = hexencode(os.urandom(8))
|
||||
tasks[taskid] = Task(taskid)
|
||||
|
||||
logger.debug("Created new task ID: %s" % taskid)
|
||||
return jsonize({"taskid": taskid})
|
||||
|
||||
@get("/task/<taskid>/destroy")
|
||||
def task_destroy(taskid):
|
||||
@get("/task/<taskid>/delete")
|
||||
def task_delete(taskid):
|
||||
"""
|
||||
Destroy own task ID
|
||||
Delete own task ID
|
||||
"""
|
||||
if taskid in tasks:
|
||||
tasks[taskid].clean_filesystem()
|
||||
tasks.pop(taskid)
|
||||
|
||||
logger.debug("Deleted task ID: %s" % taskid)
|
||||
return jsonize({"success": True})
|
||||
else:
|
||||
abort(500, "Invalid task ID")
|
||||
|
||||
# Admin's methods
|
||||
@get("/task/<taskid>/list")
|
||||
###################
|
||||
# Admin functions #
|
||||
###################
|
||||
|
||||
@get("/admin/<taskid>/list")
|
||||
def task_list(taskid):
|
||||
"""
|
||||
List all active tasks
|
||||
List task pull
|
||||
"""
|
||||
if is_admin(taskid):
|
||||
return jsonize({"tasks": tasks})
|
||||
logger.debug("Listed task pull")
|
||||
return jsonize({"tasks": tasks, "tasks_num": len(tasks)})
|
||||
else:
|
||||
abort(401)
|
||||
|
||||
@get("/task/<taskid>/flush")
|
||||
@get("/admin/<taskid>/flush")
|
||||
def task_flush(taskid):
|
||||
"""
|
||||
Flush task spool (destroy all tasks)
|
||||
Flush task spool (delete all tasks)
|
||||
"""
|
||||
global tasks
|
||||
|
||||
|
@ -294,6 +319,7 @@ def task_flush(taskid):
|
|||
tasks[task].clean_filesystem()
|
||||
|
||||
tasks = dict()
|
||||
logger.debug("Flushed task pull")
|
||||
return jsonize({"success": True})
|
||||
else:
|
||||
abort(401)
|
||||
|
@ -302,20 +328,7 @@ def task_flush(taskid):
|
|||
# sqlmap core interact functions #
|
||||
##################################
|
||||
|
||||
# Admin's methods
|
||||
@get("/status/<taskid>")
|
||||
def status(taskid):
|
||||
"""
|
||||
Verify the status of the API as well as the core
|
||||
"""
|
||||
|
||||
if is_admin(taskid):
|
||||
tasks_num = len(tasks)
|
||||
return jsonize({"tasks": tasks_num})
|
||||
else:
|
||||
abort(401)
|
||||
|
||||
# Functions to handle options
|
||||
# Handle task's options
|
||||
@get("/option/<taskid>/list")
|
||||
def option_list(taskid):
|
||||
"""
|
||||
|
@ -324,13 +337,15 @@ def option_list(taskid):
|
|||
if taskid not in tasks:
|
||||
abort(500, "Invalid task ID")
|
||||
|
||||
return jsonize(tasks[taskid].get_options())
|
||||
return jsonize({"options": tasks[taskid].get_options()})
|
||||
|
||||
@post("/option/<taskid>/get")
|
||||
def option_get(taskid):
|
||||
"""
|
||||
Get the value of an option (command line switch) for a certain task ID
|
||||
"""
|
||||
global tasks
|
||||
|
||||
if taskid not in tasks:
|
||||
abort(500, "Invalid task ID")
|
||||
|
||||
|
@ -339,7 +354,7 @@ def option_get(taskid):
|
|||
if option in tasks[taskid]:
|
||||
return jsonize({option: tasks[taskid].get_option(option)})
|
||||
else:
|
||||
return jsonize({option: None})
|
||||
return jsonize({option: "not set"})
|
||||
|
||||
@post("/option/<taskid>/set")
|
||||
def option_set(taskid):
|
||||
|
@ -356,7 +371,7 @@ def option_set(taskid):
|
|||
|
||||
return jsonize({"success": True})
|
||||
|
||||
# Function to handle scans
|
||||
# Handle scans
|
||||
@post("/scan/<taskid>/start")
|
||||
def scan_start(taskid):
|
||||
"""
|
||||
|
@ -374,13 +389,11 @@ def scan_start(taskid):
|
|||
# Overwrite output directory value to a temporary directory
|
||||
tasks[taskid].set_output_directory()
|
||||
|
||||
# Launch sqlmap engine in a separate thread
|
||||
logger.debug("starting a scan for task ID %s" % taskid)
|
||||
|
||||
# Launch sqlmap engine
|
||||
# Launch sqlmap engine in a separate process
|
||||
tasks[taskid].engine_start()
|
||||
|
||||
return jsonize({"success": True})
|
||||
logger.debug("Started scan for task ID %s" % taskid)
|
||||
return jsonize({"success": True, "engineid": tasks[taskid].engine_get_id()})
|
||||
|
||||
@get("/scan/<taskid>/stop")
|
||||
def scan_stop(taskid):
|
||||
|
@ -392,7 +405,10 @@ def scan_stop(taskid):
|
|||
if taskid not in tasks:
|
||||
abort(500, "Invalid task ID")
|
||||
|
||||
return jsonize({"success": tasks[taskid].engine_stop()})
|
||||
tasks[taskid].engine_stop()
|
||||
|
||||
logger.debug("Stopped scan for task ID %s" % taskid)
|
||||
return jsonize({"success": True})
|
||||
|
||||
@get("/scan/<taskid>/kill")
|
||||
def scan_kill(taskid):
|
||||
|
@ -404,22 +420,25 @@ def scan_kill(taskid):
|
|||
if taskid not in tasks:
|
||||
abort(500, "Invalid task ID")
|
||||
|
||||
return jsonize({"success": tasks[taskid].engine_kill()})
|
||||
tasks[taskid].engine_kill()
|
||||
|
||||
@get("/scan/<taskid>/delete")
|
||||
def scan_delete(taskid):
|
||||
logger.debug("Killed scan for task ID %s" % taskid)
|
||||
return jsonize({"success": True})
|
||||
|
||||
@get("/scan/<taskid>/status")
|
||||
def scan_status(taskid):
|
||||
"""
|
||||
Delete a scan and corresponding temporary output directory and IPC database
|
||||
Returns status of a scan
|
||||
"""
|
||||
global tasks
|
||||
|
||||
if taskid not in tasks:
|
||||
abort(500, "Invalid task ID")
|
||||
|
||||
scan_stop(taskid)
|
||||
tasks[taskid].clean_filesystem()
|
||||
status = "terminated" if tasks[taskid].engine_has_terminated() is True else "running"
|
||||
|
||||
return jsonize({"success": True})
|
||||
logger.debug("Requested status of scan for task ID %s" % taskid)
|
||||
return jsonize({"status": status, "returncode": tasks[taskid].engine_get_returncode()})
|
||||
|
||||
@get("/scan/<taskid>/data")
|
||||
def scan_data(taskid):
|
||||
|
@ -436,12 +455,14 @@ def scan_data(taskid):
|
|||
|
||||
# Read all data from the IPC database for the taskid
|
||||
for status, content_type, value in db.execute("SELECT status, content_type, value FROM data WHERE taskid = ? ORDER BY id ASC", (taskid,)):
|
||||
json_data_message.append([status, content_type, dejsonize(value)])
|
||||
#json_data_message.append({"status": status, "type": content_type, "value": base64unpickle(value)})
|
||||
json_data_message.append({"status": status, "type": content_type, "value": dejsonize(value)})
|
||||
|
||||
# Read all error messages from the IPC database
|
||||
for error in db.execute("SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC", (taskid,)):
|
||||
json_errors_message.append(error)
|
||||
|
||||
logger.debug("Retrieved data and error messages for scan for task ID %s" % taskid)
|
||||
return jsonize({"data": json_data_message, "error": json_errors_message})
|
||||
|
||||
# Functions to handle scans' logs
|
||||
|
@ -467,6 +488,7 @@ def scan_log_limited(taskid, start, end):
|
|||
for time_, level, message in db.execute("SELECT time, level, message FROM logs WHERE taskid = ? AND id >= ? AND id <= ? ORDER BY id ASC", (taskid, start, end)):
|
||||
json_log_messages.append({"time": time_, "level": level, "message": message})
|
||||
|
||||
logger.debug("Retrieved subset of log messages for scan for task ID %s" % taskid)
|
||||
return jsonize({"log": json_log_messages})
|
||||
|
||||
@get("/scan/<taskid>/log")
|
||||
|
@ -485,6 +507,7 @@ def scan_log(taskid):
|
|||
for time_, level, message in db.execute("SELECT time, level, message FROM logs WHERE taskid = ? ORDER BY id ASC", (taskid,)):
|
||||
json_log_messages.append({"time": time_, "level": level, "message": message})
|
||||
|
||||
logger.debug("Retrieved log messages for scan for task ID %s" % taskid)
|
||||
return jsonize({"log": json_log_messages})
|
||||
|
||||
# Function to handle files inside the output directory
|
||||
|
@ -505,7 +528,7 @@ def download(taskid, target, filename):
|
|||
if os.path.exists(path):
|
||||
return static_file(filename, root=path)
|
||||
else:
|
||||
abort(500)
|
||||
abort(500, "File does not exist")
|
||||
|
||||
def server(host="0.0.0.0", port=RESTAPI_SERVER_PORT):
|
||||
"""
|
||||
|
@ -515,24 +538,26 @@ def server(host="0.0.0.0", port=RESTAPI_SERVER_PORT):
|
|||
global db
|
||||
|
||||
adminid = hexencode(os.urandom(16))
|
||||
|
||||
logger.info("Running REST-JSON API server at '%s:%d'.." % (host, port))
|
||||
logger.info("Admin ID: %s" % adminid)
|
||||
|
||||
# Initialize IPC database
|
||||
db = Database()
|
||||
db.initialize()
|
||||
|
||||
logger.info("running REST-JSON API server at '%s:%d'.." % (host, port))
|
||||
logger.info("the admin task ID is: %s" % adminid)
|
||||
|
||||
# Run RESTful API
|
||||
run(host=host, port=port, quiet=False, debug=False)
|
||||
run(host=host, port=port, quiet=True, debug=False)
|
||||
|
||||
def client(host=RESTAPI_SERVER_HOST, port=RESTAPI_SERVER_PORT):
|
||||
"""
|
||||
REST-JSON API client
|
||||
"""
|
||||
addr = "http://%s:%d" % (host, port)
|
||||
logger.info("starting debug REST-JSON client to '%s'..." % addr)
|
||||
logger.info("Starting REST-JSON API client to '%s'..." % addr)
|
||||
|
||||
# TODO: write a simple client with requests, for now use curl from command line
|
||||
logger.error("not yet implemented, use curl from command line instead for now, for example:")
|
||||
logger.error("Not yet implemented, use curl from command line instead for now, for example:")
|
||||
print "\n\t$ curl http://%s:%d/task/new" % (host, port)
|
||||
print "\t$ curl -H \"Content-Type: application/json\" -X POST -d '{\"url\": \"http://testphp.vulnweb.com/artists.php?artist=1\"}' http://%s:%d/scan/:taskid/start" % (host, port)
|
||||
print "\t$ curl http://%s:%d/scan/:taskid/output" % (host, port)
|
||||
|
|
Loading…
Reference in New Issue
Block a user