2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-10-14 18:41:14 +04:00
|
|
|
Copyright (c) 2006-2010 sqlmap developers (http://sqlmap.sourceforge.net/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2010-05-24 15:00:49 +04:00
|
|
|
import codecs
|
2008-10-15 19:38:22 +04:00
|
|
|
import cookielib
|
2008-12-20 04:54:08 +03:00
|
|
|
import difflib
|
2010-10-17 01:33:15 +04:00
|
|
|
import inspect
|
2008-10-15 19:38:22 +04:00
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
2008-12-04 20:40:03 +03:00
|
|
|
import socket
|
2010-10-15 14:36:29 +04:00
|
|
|
import sys
|
2010-11-02 10:32:08 +03:00
|
|
|
import threading
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
|
|
|
|
2010-11-15 14:34:57 +03:00
|
|
|
from extra.clientform.clientform import ParseResponse
|
2011-02-02 14:49:59 +03:00
|
|
|
from extra.clientform.clientform import ParseError
|
2010-06-01 16:21:10 +04:00
|
|
|
from extra.keepalive import keepalive
|
2010-10-07 16:12:26 +04:00
|
|
|
from extra.xmlobject import xmlobject
|
2010-11-15 15:07:13 +03:00
|
|
|
from lib.controller.checks import checkConnection
|
2011-01-28 19:36:09 +03:00
|
|
|
from lib.core.common import Backend
|
2011-01-20 18:55:50 +03:00
|
|
|
from lib.core.common import extractRegexResult
|
2010-04-16 17:40:02 +04:00
|
|
|
from lib.core.common import getConsoleWidth
|
2010-10-14 15:06:28 +04:00
|
|
|
from lib.core.common import getFileItems
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.common import getFileType
|
2010-02-04 17:50:54 +03:00
|
|
|
from lib.core.common import normalizePath
|
2010-02-04 17:37:00 +03:00
|
|
|
from lib.core.common import ntToPosixSlashes
|
2011-01-08 12:30:10 +03:00
|
|
|
from lib.core.common import openFile
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.common import parseTargetDirect
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import parseTargetUrl
|
|
|
|
from lib.core.common import paths
|
|
|
|
from lib.core.common import randomRange
|
2011-01-13 20:36:54 +03:00
|
|
|
from lib.core.common import randomStr
|
2010-11-07 11:11:56 +03:00
|
|
|
from lib.core.common import readCachedFileContent
|
2010-11-04 13:29:40 +03:00
|
|
|
from lib.core.common import readInput
|
2010-10-28 00:39:50 +04:00
|
|
|
from lib.core.common import runningAsAdmin
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import sanitizeStr
|
2010-05-28 19:57:43 +04:00
|
|
|
from lib.core.common import UnicodeRawConfigParser
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
|
|
|
from lib.core.data import paths
|
2010-10-21 17:13:12 +04:00
|
|
|
from lib.core.data import queries
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.datatype import advancedDict
|
2010-11-29 04:04:42 +03:00
|
|
|
from lib.core.datatype import injectionDict
|
2011-01-13 20:36:54 +03:00
|
|
|
from lib.core.enums import DBMS
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2010-12-15 14:30:29 +03:00
|
|
|
from lib.core.enums import PAYLOAD
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PRIORITY
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapFilePathException
|
|
|
|
from lib.core.exception import sqlmapGenericException
|
2009-12-03 01:54:39 +03:00
|
|
|
from lib.core.exception import sqlmapMissingDependence
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.exception import sqlmapMissingMandatoryOptionException
|
|
|
|
from lib.core.exception import sqlmapMissingPrivileges
|
2011-01-20 01:08:56 +03:00
|
|
|
from lib.core.exception import sqlmapSilentQuitException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapSyntaxException
|
|
|
|
from lib.core.exception import sqlmapUnsupportedDBMSException
|
2010-11-04 13:29:40 +03:00
|
|
|
from lib.core.exception import sqlmapUserQuitException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.optiondict import optDict
|
2011-01-20 14:01:01 +03:00
|
|
|
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
2009-06-11 19:01:48 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.settings import PLATFORM
|
2010-05-20 14:52:14 +04:00
|
|
|
from lib.core.settings import PYVERSION
|
2009-04-28 03:05:11 +04:00
|
|
|
from lib.core.settings import SITE
|
2011-02-10 22:51:34 +03:00
|
|
|
from lib.core.settings import DEFAULT_TOR_PROXY
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.settings import SUPPORTED_DBMS
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.settings import SUPPORTED_OS
|
2009-04-28 03:05:11 +04:00
|
|
|
from lib.core.settings import VERSION_STRING
|
2010-11-29 17:48:07 +03:00
|
|
|
from lib.core.settings import MSSQL_ALIASES
|
|
|
|
from lib.core.settings import MYSQL_ALIASES
|
|
|
|
from lib.core.settings import PGSQL_ALIASES
|
|
|
|
from lib.core.settings import ORACLE_ALIASES
|
|
|
|
from lib.core.settings import SQLITE_ALIASES
|
|
|
|
from lib.core.settings import ACCESS_ALIASES
|
|
|
|
from lib.core.settings import FIREBIRD_ALIASES
|
|
|
|
from lib.core.settings import MAXDB_ALIASES
|
|
|
|
from lib.core.settings import SYBASE_ALIASES
|
2011-02-02 13:10:28 +03:00
|
|
|
from lib.core.settings import BURP_SPLITTER
|
|
|
|
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
2011-01-16 20:52:42 +03:00
|
|
|
from lib.core.settings import TIME_DELAY_CANDIDATES
|
2011-01-21 00:57:54 +03:00
|
|
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
2011-01-20 19:07:08 +03:00
|
|
|
from lib.core.settings import WEBSCARAB_SPLITTER
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.update import update
|
|
|
|
from lib.parse.configfile import configFileParser
|
2010-11-28 21:10:54 +03:00
|
|
|
from lib.parse.payloads import loadPayloads
|
2010-11-15 14:34:57 +03:00
|
|
|
from lib.request.connect import Connect as Request
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request.proxy import ProxyHTTPSHandler
|
2010-12-26 12:33:04 +03:00
|
|
|
from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler
|
2010-01-07 15:59:09 +03:00
|
|
|
from lib.request.certhandler import HTTPSCertAuthHandler
|
2010-11-08 15:26:13 +03:00
|
|
|
from lib.request.rangehandler import HTTPRangeHandler
|
2010-03-15 17:24:43 +03:00
|
|
|
from lib.request.redirecthandler import SmartRedirectHandler
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.utils.google import Google
|
|
|
|
|
2010-06-01 16:21:10 +04:00
|
|
|
authHandler = urllib2.BaseHandler()
|
|
|
|
keepAliveHandler = keepalive.HTTPHandler()
|
2008-10-15 19:38:22 +04:00
|
|
|
proxyHandler = urllib2.BaseHandler()
|
2010-03-15 17:24:43 +03:00
|
|
|
redirectHandler = SmartRedirectHandler()
|
2010-11-08 15:26:13 +03:00
|
|
|
rangeHandler = HTTPRangeHandler()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __urllib2Opener():
|
|
|
|
"""
|
|
|
|
This function creates the urllib2 OpenerDirector.
|
|
|
|
"""
|
|
|
|
|
|
|
|
global authHandler
|
2010-06-01 16:21:10 +04:00
|
|
|
global keepAliveHandler
|
2008-10-15 19:38:22 +04:00
|
|
|
global proxyHandler
|
2010-11-08 15:26:13 +03:00
|
|
|
global rangeHandler
|
2010-03-15 17:24:43 +03:00
|
|
|
global redirectHandler
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
debugMsg = "creating HTTP requests opener object"
|
|
|
|
logger.debug(debugMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-11-08 15:26:13 +03:00
|
|
|
handlers = [proxyHandler, authHandler, redirectHandler, rangeHandler]
|
2010-06-10 18:14:56 +04:00
|
|
|
|
2010-06-01 16:21:10 +04:00
|
|
|
if not conf.dropSetCookie:
|
2010-01-02 05:02:12 +03:00
|
|
|
conf.cj = cookielib.LWPCookieJar()
|
2010-06-01 16:21:10 +04:00
|
|
|
handlers.append(urllib2.HTTPCookieProcessor(conf.cj))
|
2010-06-10 18:14:56 +04:00
|
|
|
|
2010-06-11 01:19:45 +04:00
|
|
|
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
|
2010-06-30 15:41:42 +04:00
|
|
|
if conf.keepAlive:
|
2011-01-12 00:46:21 +03:00
|
|
|
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
|
|
|
|
warnMsg += "been disabled because of it's incompatibility "
|
|
|
|
|
2010-06-30 15:41:42 +04:00
|
|
|
if conf.proxy:
|
|
|
|
warnMsg += "with HTTP(s) proxy"
|
|
|
|
logger.warn(warnMsg)
|
2010-12-27 03:14:29 +03:00
|
|
|
elif conf.aType:
|
|
|
|
warnMsg += "with authentication methods"
|
|
|
|
logger.warn(warnMsg)
|
2010-06-30 15:41:42 +04:00
|
|
|
else:
|
|
|
|
handlers.append(keepAliveHandler)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-06-10 18:14:56 +04:00
|
|
|
opener = urllib2.build_opener(*handlers)
|
2008-10-15 19:38:22 +04:00
|
|
|
urllib2.install_opener(opener)
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
def __feedTargetsDict(reqFile, addedTargetUrls):
|
2011-01-20 18:55:50 +03:00
|
|
|
"""
|
|
|
|
Parses web scarab and burp logs and adds results to the target url list
|
|
|
|
"""
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
def __parseWebScarabLog(content):
|
|
|
|
"""
|
|
|
|
Parses web scarab logs (POST method not supported)
|
|
|
|
"""
|
2011-01-20 19:07:08 +03:00
|
|
|
reqResList = content.split(WEBSCARAB_SPLITTER)
|
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
for request in reqResList:
|
|
|
|
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
|
|
|
|
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
|
|
|
|
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not method or not url:
|
|
|
|
logger.debug("Invalid log data")
|
|
|
|
continue
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if method.upper() == "POST":
|
2011-01-20 19:17:38 +03:00
|
|
|
warnMsg = "POST requests from WebScarab logs aren't supported "
|
|
|
|
warnMsg += "as their body content is stored in separate files. "
|
|
|
|
warnMsg += "Nevertheless you can use -r to load them individually."
|
2011-01-20 18:55:50 +03:00
|
|
|
logger.warning(warnMsg)
|
|
|
|
continue
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not kb.targetUrls or url not in addedTargetUrls:
|
|
|
|
kb.targetUrls.add((url, method, None, cookie))
|
|
|
|
addedTargetUrls.add(url)
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
def __parseBurpLog(content):
|
|
|
|
"""
|
|
|
|
Parses burp logs
|
|
|
|
"""
|
|
|
|
port = None
|
|
|
|
scheme = None
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2011-01-20 19:07:08 +03:00
|
|
|
reqResList = content.split(BURP_SPLITTER)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
for request in reqResList:
|
|
|
|
if scheme is None:
|
|
|
|
schemePort = re.search("\d\d[\:|\.]\d\d[\:|\.]\d\d\s+(http[\w]*)\:\/\/.*?\:([\d]+)", request, re.I)
|
2008-12-09 00:24:24 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if schemePort:
|
|
|
|
scheme = schemePort.group(1)
|
|
|
|
port = schemePort.group(2)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not re.search ("^[\n]*(GET|POST).*?\sHTTP\/", request, re.I):
|
2008-11-28 01:33:33 +03:00
|
|
|
continue
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if re.search("^[\n]*(GET|POST).*?\.(gif|jpg|png)\sHTTP\/", request, re.I):
|
|
|
|
continue
|
|
|
|
|
|
|
|
getPostReq = False
|
|
|
|
url = None
|
|
|
|
host = None
|
|
|
|
method = None
|
|
|
|
data = None
|
|
|
|
cookie = None
|
|
|
|
params = False
|
|
|
|
lines = request.split("\n")
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
if len(line) == 0 or line == "\n":
|
|
|
|
continue
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if line.startswith("GET ") or line.startswith("POST "):
|
|
|
|
if line.startswith("GET "):
|
|
|
|
index = 4
|
|
|
|
else:
|
|
|
|
index = 5
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
url = line[index:line.index(" HTTP/")]
|
|
|
|
method = line[:index-1]
|
|
|
|
|
|
|
|
if "?" in line and "=" in line:
|
|
|
|
params = True
|
|
|
|
|
|
|
|
getPostReq = True
|
|
|
|
|
|
|
|
# POST parameters
|
|
|
|
elif data is not None and params:
|
|
|
|
data += line
|
|
|
|
|
|
|
|
# GET parameters
|
|
|
|
elif "?" in line and "=" in line and ": " not in line:
|
2008-11-22 04:57:22 +03:00
|
|
|
params = True
|
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Headers
|
|
|
|
elif ": " in line:
|
|
|
|
key, value = line.split(": ", 1)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Cookie and Host headers
|
|
|
|
if key.lower() == "cookie":
|
|
|
|
cookie = value
|
|
|
|
elif key.lower() == "host":
|
|
|
|
splitValue = value.split(":")
|
|
|
|
host = splitValue[0]
|
2010-06-30 01:06:03 +04:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if len(splitValue) > 1:
|
|
|
|
port = splitValue[1]
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not scheme and port == "443":
|
|
|
|
scheme = "https"
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Avoid to add a static content length header to
|
|
|
|
# conf.httpHeaders and consider the following lines as
|
|
|
|
# POSTed data
|
|
|
|
if key == "Content-Length":
|
|
|
|
data = ""
|
|
|
|
params = True
|
2010-11-12 15:25:02 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Avoid proxy and connection type related headers
|
|
|
|
elif key not in ( "Proxy-Connection", "Connection" ):
|
|
|
|
conf.httpHeaders.append((str(key), str(value)))
|
2010-11-12 15:25:02 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if conf.scope:
|
|
|
|
getPostReq &= re.search(conf.scope, host) is not None
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-24 14:26:51 +03:00
|
|
|
if getPostReq and (params or cookie):
|
2011-01-20 18:55:50 +03:00
|
|
|
if not url.startswith("http"):
|
|
|
|
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
|
|
|
scheme = None
|
|
|
|
port = None
|
2010-06-30 01:06:03 +04:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not kb.targetUrls or url not in addedTargetUrls:
|
|
|
|
kb.targetUrls.add((url, method, data, cookie))
|
|
|
|
addedTargetUrls.add(url)
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
fp = openFile(reqFile, "rb")
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2011-01-20 19:07:08 +03:00
|
|
|
content = fp.read()
|
|
|
|
content = content.replace("\r", "")
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if conf.scope:
|
|
|
|
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
|
|
|
|
2011-01-20 19:07:08 +03:00
|
|
|
__parseBurpLog(content)
|
|
|
|
__parseWebScarabLog(content)
|
2008-11-20 20:56:09 +03:00
|
|
|
|
2010-10-21 17:13:12 +04:00
|
|
|
def __loadQueries():
|
|
|
|
"""
|
|
|
|
Loads queries from 'xml/queries.xml' file.
|
|
|
|
"""
|
2010-10-22 18:23:14 +04:00
|
|
|
|
2010-10-21 17:13:12 +04:00
|
|
|
for node in xmlobject.XMLFile(path=paths.QUERIES_XML, textfilter=sanitizeStr).root.dbms:
|
|
|
|
queries[node.value] = node
|
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
def __setMultipleTargets():
|
|
|
|
"""
|
|
|
|
Define a configuration parameter if we are running in multiple target
|
|
|
|
mode.
|
|
|
|
"""
|
|
|
|
|
2008-11-22 04:57:22 +03:00
|
|
|
initialTargetsCount = len(kb.targetUrls)
|
2008-11-28 01:33:33 +03:00
|
|
|
addedTargetUrls = set()
|
2008-11-20 20:56:09 +03:00
|
|
|
|
|
|
|
if not conf.list:
|
|
|
|
return
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
debugMsg = "parsing targets list from '%s'" % conf.list
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
if not os.path.exists(conf.list):
|
2008-11-28 01:33:33 +03:00
|
|
|
errMsg = "the specified list of targets does not exist"
|
2008-11-20 20:56:09 +03:00
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
if os.path.isfile(conf.list):
|
2008-11-28 01:33:33 +03:00
|
|
|
__feedTargetsDict(conf.list, addedTargetUrls)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
elif os.path.isdir(conf.list):
|
|
|
|
files = os.listdir(conf.list)
|
|
|
|
files.sort()
|
|
|
|
|
|
|
|
for reqFile in files:
|
|
|
|
if not re.search("([\d]+)\-request", reqFile):
|
|
|
|
continue
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
__feedTargetsDict(os.path.join(conf.list, reqFile), addedTargetUrls)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
else:
|
2008-11-28 01:33:33 +03:00
|
|
|
errMsg = "the specified list of targets is not a file "
|
2008-11-20 20:56:09 +03:00
|
|
|
errMsg += "nor a directory"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
2008-11-22 04:57:22 +03:00
|
|
|
updatedTargetsCount = len(kb.targetUrls)
|
|
|
|
|
|
|
|
if updatedTargetsCount > initialTargetsCount:
|
2008-11-28 01:33:33 +03:00
|
|
|
infoMsg = "sqlmap parsed %d " % (updatedTargetsCount - initialTargetsCount)
|
|
|
|
infoMsg += "testable requests from the targets list"
|
2008-11-22 04:57:22 +03:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-03-16 18:21:42 +03:00
|
|
|
def __setRequestFromFile():
|
|
|
|
"""
|
|
|
|
This function checks if the way to make a HTTP request is through supplied
|
|
|
|
textual file, parses it and saves the information into the knowledge base.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.requestFile:
|
|
|
|
return
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-03-16 18:21:42 +03:00
|
|
|
addedTargetUrls = set()
|
|
|
|
|
|
|
|
conf.requestFile = os.path.expanduser(conf.requestFile)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-03-16 18:21:42 +03:00
|
|
|
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if not os.path.isfile(conf.requestFile):
|
|
|
|
errMsg = "the specified HTTP request file "
|
|
|
|
errMsg += "does not exist"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
__feedTargetsDict(conf.requestFile, addedTargetUrls)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
def __setGoogleDorking():
|
|
|
|
"""
|
|
|
|
This function checks if the way to request testable hosts is through
|
|
|
|
Google dorking then requests to Google the search parameter, parses
|
|
|
|
the results and save the testable hosts into the knowledge base.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.googleDork:
|
|
|
|
return
|
|
|
|
|
2010-06-11 14:08:19 +04:00
|
|
|
global keepAliveHandler
|
|
|
|
global proxyHandler
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
debugMsg = "initializing Google dorking requests"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
logMsg = "first request to Google to get the session cookie"
|
|
|
|
logger.info(logMsg)
|
|
|
|
|
2010-06-11 14:08:19 +04:00
|
|
|
handlers = [ proxyHandler ]
|
|
|
|
|
|
|
|
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
|
2010-06-30 15:41:42 +04:00
|
|
|
if conf.keepAlive:
|
|
|
|
if conf.proxy:
|
|
|
|
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
|
|
|
|
warnMsg += "been disabled because of it's incompatibility "
|
|
|
|
warnMsg += "with HTTP(s) proxy"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
else:
|
|
|
|
handlers.append(keepAliveHandler)
|
2010-06-11 14:08:19 +04:00
|
|
|
|
|
|
|
googleObj = Google(handlers)
|
2008-11-28 01:33:33 +03:00
|
|
|
googleObj.getCookie()
|
|
|
|
|
|
|
|
matches = googleObj.search(conf.googleDork)
|
|
|
|
|
|
|
|
if not matches:
|
|
|
|
errMsg = "unable to find results for your "
|
|
|
|
errMsg += "Google dork expression"
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
|
|
|
googleObj.getTargetUrls()
|
|
|
|
|
|
|
|
if kb.targetUrls:
|
|
|
|
logMsg = "sqlmap got %d results for your " % len(matches)
|
|
|
|
logMsg += "Google dork expression, "
|
|
|
|
|
|
|
|
if len(matches) == len(kb.targetUrls):
|
|
|
|
logMsg += "all "
|
|
|
|
else:
|
|
|
|
logMsg += "%d " % len(kb.targetUrls)
|
|
|
|
|
|
|
|
logMsg += "of them are testable targets"
|
|
|
|
logger.info(logMsg)
|
|
|
|
else:
|
|
|
|
errMsg = "sqlmap got %d results " % len(matches)
|
|
|
|
errMsg += "for your Google dork expression, but none of them "
|
|
|
|
errMsg += "have GET parameters to test for SQL injection"
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
2010-11-15 14:34:57 +03:00
|
|
|
def __findPageForms():
|
2010-11-15 15:07:13 +03:00
|
|
|
if not conf.forms:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not checkConnection():
|
|
|
|
return
|
|
|
|
|
2010-11-15 14:34:57 +03:00
|
|
|
infoMsg = "searching for forms"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
response, _ = Request.queryPage(response=True)
|
2011-02-02 14:49:59 +03:00
|
|
|
try:
|
|
|
|
forms = ParseResponse(response, backwards_compat=False)
|
|
|
|
except ParseError:
|
|
|
|
errMsg = "badly formed HTML at the target url. can't parse forms"
|
|
|
|
raise sqlmapGenericException, errMsg
|
2010-11-15 14:34:57 +03:00
|
|
|
|
2010-11-15 17:17:51 +03:00
|
|
|
if forms:
|
|
|
|
for form in forms:
|
|
|
|
request = form.click()
|
|
|
|
url = request.get_full_url()
|
|
|
|
method = request.get_method()
|
|
|
|
data = request.get_data() if request.has_data() else None
|
|
|
|
|
|
|
|
target = (url, method, data, conf.cookie)
|
|
|
|
kb.targetUrls.add(target)
|
2010-11-29 15:46:18 +03:00
|
|
|
kb.formNames.append(target)
|
2010-11-15 17:17:51 +03:00
|
|
|
else:
|
|
|
|
errMsg = "there were no forms found at a given target url"
|
|
|
|
raise sqlmapGenericException, errMsg
|
2010-11-15 14:34:57 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def __setMetasploit():
|
|
|
|
if not conf.osPwn and not conf.osSmb and not conf.osBof:
|
|
|
|
return
|
|
|
|
|
2009-06-11 19:01:48 +04:00
|
|
|
debugMsg = "setting the takeover out-of-band functionality"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = False
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2010-10-28 00:39:50 +04:00
|
|
|
warnMsg = "some sqlmap takeover functionalities are not yet "
|
|
|
|
warnMsg += "supported on Windows. Please use Linux in a virtual "
|
2011-01-20 01:08:56 +03:00
|
|
|
warnMsg += "machine for out-of-band features."
|
2009-06-11 19:01:48 +04:00
|
|
|
|
2011-01-20 01:08:56 +03:00
|
|
|
logger.critical(warnMsg)
|
2009-06-11 19:01:48 +04:00
|
|
|
|
2011-01-20 01:08:56 +03:00
|
|
|
raise sqlmapSilentQuitException
|
2009-06-11 19:01:48 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if conf.osSmb:
|
2010-10-28 00:39:50 +04:00
|
|
|
isAdmin = runningAsAdmin()
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2009-06-11 19:01:48 +04:00
|
|
|
if isAdmin is not True:
|
2010-10-28 00:39:50 +04:00
|
|
|
errMsg = "you need to run sqlmap as an administrator "
|
|
|
|
errMsg += "if you want to perform a SMB relay attack because "
|
|
|
|
errMsg += "it will need to listen on a user-specified SMB "
|
|
|
|
errMsg += "TCP port for incoming connection attempts"
|
2009-04-22 15:48:07 +04:00
|
|
|
raise sqlmapMissingPrivileges, errMsg
|
|
|
|
|
|
|
|
if conf.msfPath:
|
2010-02-04 17:50:54 +03:00
|
|
|
condition = os.path.exists(normalizePath(conf.msfPath))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(conf.msfPath, "msfcli")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(conf.msfPath, "msfconsole")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(conf.msfPath, "msfencode")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(conf.msfPath, "msfpayload")))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if condition:
|
|
|
|
debugMsg = "provided Metasploit Framework 3 path "
|
|
|
|
debugMsg += "'%s' is valid" % conf.msfPath
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = True
|
|
|
|
else:
|
|
|
|
warnMsg = "the provided Metasploit Framework 3 path "
|
|
|
|
warnMsg += "'%s' is not valid. The cause could " % conf.msfPath
|
|
|
|
warnMsg += "be that the path does not exists or that one "
|
|
|
|
warnMsg += "or more of the needed Metasploit executables "
|
|
|
|
warnMsg += "within msfcli, msfconsole, msfencode and "
|
|
|
|
warnMsg += "msfpayload do not exist"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
else:
|
|
|
|
warnMsg = "you did not provide the local path where Metasploit "
|
|
|
|
warnMsg += "Framework 3 is installed"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not msfEnvPathExists:
|
2009-04-22 15:48:07 +04:00
|
|
|
warnMsg = "sqlmap is going to look for Metasploit Framework 3 "
|
|
|
|
warnMsg += "installation into the environment paths"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
envPaths = os.environ["PATH"]
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2009-04-22 15:48:07 +04:00
|
|
|
envPaths = envPaths.split(";")
|
|
|
|
else:
|
|
|
|
envPaths = envPaths.split(":")
|
|
|
|
|
|
|
|
for envPath in envPaths:
|
2009-05-13 00:24:00 +04:00
|
|
|
envPath = envPath.replace(";", "")
|
2010-02-04 17:50:54 +03:00
|
|
|
condition = os.path.exists(normalizePath(envPath))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfcli")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfconsole")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfencode")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfpayload")))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if condition:
|
|
|
|
infoMsg = "Metasploit Framework 3 has been found "
|
|
|
|
infoMsg += "installed in the '%s' path" % envPath
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = True
|
|
|
|
conf.msfPath = envPath
|
|
|
|
|
|
|
|
break
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not msfEnvPathExists:
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg = "unable to locate Metasploit Framework 3 installation. "
|
|
|
|
errMsg += "Get it from http://metasploit.com/framework/download/"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
def __setWriteFile():
|
|
|
|
if not conf.wFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "setting the write file functionality"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
if not os.path.exists(conf.wFile):
|
|
|
|
errMsg = "the provided local file '%s' does not exist" % conf.wFile
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
if not conf.dFile:
|
|
|
|
errMsg = "you did not provide the back-end DBMS absolute path "
|
|
|
|
errMsg += "where you want to write the local file '%s'" % conf.wFile
|
|
|
|
raise sqlmapMissingMandatoryOptionException, errMsg
|
|
|
|
|
|
|
|
conf.wFileType = getFileType(conf.wFile)
|
|
|
|
|
|
|
|
def __setOS():
|
|
|
|
"""
|
|
|
|
Force the back-end DBMS operating system option.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.os:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "forcing back-end DBMS operating system to user defined value"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.os = conf.os.lower()
|
|
|
|
|
|
|
|
if conf.os not in SUPPORTED_OS:
|
|
|
|
errMsg = "you provided an unsupported back-end DBMS operating "
|
|
|
|
errMsg += "system. The supported DBMS operating systems for OS "
|
|
|
|
errMsg += "and file system access are Linux and Windows. "
|
|
|
|
errMsg += "If you do not know the back-end DBMS underlying OS, "
|
|
|
|
errMsg += "do not provide it and sqlmap will fingerprint it for "
|
|
|
|
errMsg += "you."
|
|
|
|
raise sqlmapUnsupportedDBMSException, errMsg
|
|
|
|
|
2010-12-09 16:47:17 +03:00
|
|
|
def __setTechnique():
|
|
|
|
if not isinstance(conf.technique, int):
|
|
|
|
return
|
|
|
|
|
|
|
|
if conf.technique < 0 or conf.technique > 5:
|
|
|
|
errMsg = "the value of --technique must be an integer "
|
|
|
|
errMsg += "between 0 and 5"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
def __setDBMS():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2008-12-09 00:24:24 +03:00
|
|
|
Force the back-end DBMS option.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.dbms:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "forcing back-end DBMS to user defined value"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.dbms = conf.dbms.lower()
|
2010-04-06 14:15:19 +04:00
|
|
|
firstRegExp = "(%s)" % "|".join([alias for alias in SUPPORTED_DBMS])
|
2011-01-20 02:06:15 +03:00
|
|
|
dbmsRegExp = re.search("%s ([\d\.]+)" % firstRegExp, conf.dbms, re.I)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if dbmsRegExp:
|
2011-01-28 19:36:09 +03:00
|
|
|
Backend.setVersion(str(dbmsRegExp.group(2)))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.dbms not in SUPPORTED_DBMS:
|
|
|
|
errMsg = "you provided an unsupported back-end database management "
|
|
|
|
errMsg += "system. The supported DBMS are MySQL, PostgreSQL, "
|
|
|
|
errMsg += "Microsoft SQL Server and Oracle. If you do not know "
|
|
|
|
errMsg += "the back-end DBMS, do not provide it and sqlmap will "
|
|
|
|
errMsg += "fingerprint it for you."
|
|
|
|
raise sqlmapUnsupportedDBMSException, errMsg
|
|
|
|
|
2010-11-29 17:48:07 +03:00
|
|
|
for aliases in (MSSQL_ALIASES, MYSQL_ALIASES, PGSQL_ALIASES, \
|
|
|
|
ORACLE_ALIASES, SQLITE_ALIASES, ACCESS_ALIASES, \
|
|
|
|
FIREBIRD_ALIASES, MAXDB_ALIASES, SYBASE_ALIASES):
|
|
|
|
if conf.dbms in aliases:
|
|
|
|
conf.dbms = aliases[0]
|
|
|
|
|
|
|
|
break
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
def __setTamperingFunctions():
|
|
|
|
"""
|
2010-10-17 01:33:15 +04:00
|
|
|
Loads tampering functions from given script(s)
|
2010-10-13 02:45:25 +04:00
|
|
|
"""
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
if conf.tamper:
|
2010-11-07 19:24:44 +03:00
|
|
|
last_priority = PRIORITY.HIGHEST
|
2010-11-04 13:29:40 +03:00
|
|
|
check_priority = True
|
|
|
|
resolve_priorities = False
|
|
|
|
priorities = []
|
|
|
|
|
2010-10-17 01:52:16 +04:00
|
|
|
for tfile in conf.tamper.split(','):
|
2010-10-17 01:33:15 +04:00
|
|
|
found = False
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:55:34 +04:00
|
|
|
tfile = tfile.strip()
|
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
if not tfile:
|
2010-10-13 02:45:25 +04:00
|
|
|
continue
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
elif not os.path.exists(tfile):
|
|
|
|
errMsg = "tamper script '%s' does not exist" % tfile
|
2010-10-13 02:45:25 +04:00
|
|
|
raise sqlmapFilePathException, errMsg
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
elif not tfile.endswith('.py'):
|
|
|
|
errMsg = "tamper script '%s' should have an extension '.py'" % tfile
|
2010-10-13 02:45:25 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
dirname, filename = os.path.split(tfile)
|
2010-10-13 02:45:25 +04:00
|
|
|
dirname = os.path.abspath(dirname)
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
infoMsg = "loading tamper script '%s'" % filename[:-3]
|
2010-10-14 10:00:10 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if not os.path.exists(os.path.join(dirname, '__init__.py')):
|
|
|
|
errMsg = "make sure that there is an empty file '__init__.py' "
|
2010-10-17 01:33:15 +04:00
|
|
|
errMsg += "inside of tamper scripts directory '%s'" % dirname
|
2010-10-13 02:45:25 +04:00
|
|
|
raise sqlmapGenericException, errMsg
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if dirname not in sys.path:
|
|
|
|
sys.path.insert(0, dirname)
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
try:
|
|
|
|
module = __import__(filename[:-3])
|
|
|
|
except ImportError, msg:
|
2010-10-17 01:33:15 +04:00
|
|
|
raise sqlmapSyntaxException, "can not import tamper script '%s' (%s)" % (filename[:-3], msg)
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
for name, function in inspect.getmembers(module, inspect.isfunction):
|
2010-10-29 20:11:50 +04:00
|
|
|
if name == "tamper" and function.func_code.co_argcount == 1:
|
2010-10-13 02:45:25 +04:00
|
|
|
found = True
|
2010-11-07 19:24:44 +03:00
|
|
|
kb.tamperFunctions.append(function)
|
2010-10-17 01:33:15 +04:00
|
|
|
|
2010-11-07 19:24:44 +03:00
|
|
|
if check_priority and priority > last_priority:
|
|
|
|
message = "it seems that you might have mixed "
|
|
|
|
message += "the order of tamper scripts.\n"
|
|
|
|
message += "Do you want to auto resolve this? [Y/n/q]"
|
2010-11-04 13:29:40 +03:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if not test or test[0] in ("y", "Y"):
|
|
|
|
resolve_priorities = True
|
|
|
|
elif test[0] in ("n", "N"):
|
|
|
|
resolve_priorities = False
|
|
|
|
elif test[0] in ("q", "Q"):
|
|
|
|
raise sqlmapUserQuitException
|
|
|
|
|
|
|
|
check_priority = False
|
|
|
|
|
|
|
|
priorities.append((priority, function))
|
|
|
|
last_priority = priority
|
2010-11-07 19:24:44 +03:00
|
|
|
|
2010-10-15 14:36:29 +04:00
|
|
|
break
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if not found:
|
2010-10-29 20:11:50 +04:00
|
|
|
raise sqlmapGenericException, "missing function 'tamper(value)' in tamper script '%s'" % tfile
|
2010-10-13 02:45:25 +04:00
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
if resolve_priorities and priorities:
|
2010-11-07 19:24:44 +03:00
|
|
|
priorities.sort(reverse=True)
|
2010-11-04 13:29:40 +03:00
|
|
|
kb.tamperFunctions = []
|
2010-11-07 19:24:44 +03:00
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
for _, function in priorities:
|
|
|
|
kb.tamperFunctions.append(function)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setThreads():
|
2008-12-04 20:40:03 +03:00
|
|
|
if not isinstance(conf.threads, int) or conf.threads <= 0:
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.threads = 1
|
|
|
|
|
|
|
|
def __setHTTPProxy():
|
|
|
|
"""
|
|
|
|
Check and set the HTTP proxy to pass by all HTTP requests.
|
|
|
|
"""
|
|
|
|
|
|
|
|
global proxyHandler
|
|
|
|
|
2010-06-30 15:41:42 +04:00
|
|
|
if not conf.proxy:
|
2010-02-26 13:01:23 +03:00
|
|
|
if conf.hostname in ('localhost', '127.0.0.1') or conf.ignoreProxy:
|
|
|
|
proxyHandler = urllib2.ProxyHandler({})
|
2010-06-30 15:41:42 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "setting the HTTP proxy to pass by all HTTP requests"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
__proxySplit = urlparse.urlsplit(conf.proxy)
|
|
|
|
__hostnamePort = __proxySplit[1].split(":")
|
|
|
|
|
|
|
|
__scheme = __proxySplit[0]
|
|
|
|
__hostname = __hostnamePort[0]
|
|
|
|
__port = None
|
2010-10-15 14:28:06 +04:00
|
|
|
__proxyString = ""
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if len(__hostnamePort) == 2:
|
2010-02-10 15:06:23 +03:00
|
|
|
try:
|
|
|
|
__port = int(__hostnamePort[1])
|
|
|
|
except:
|
|
|
|
pass #drops into the next check block
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if not __scheme or not __hostname or not __port:
|
|
|
|
errMsg = "proxy value must be in format 'http://url:port'"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-08-19 02:45:00 +04:00
|
|
|
if conf.pCred:
|
|
|
|
pCredRegExp = re.search("^(.*?):(.*?)$", conf.pCred)
|
|
|
|
|
|
|
|
if not pCredRegExp:
|
|
|
|
errMsg = "Proxy authentication credentials "
|
|
|
|
errMsg += "value must be in format username:password"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2010-08-19 02:45:00 +04:00
|
|
|
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
|
2010-10-15 14:28:06 +04:00
|
|
|
__proxyString = "%s@" % conf.pCred
|
|
|
|
|
|
|
|
__proxyString += "%s:%d" % (__hostname, __port)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# Workaround for http://bugs.python.org/issue1424152 (urllib/urllib2:
|
|
|
|
# HTTPS over (Squid) Proxy fails) as long as HTTP over SSL requests
|
2009-06-16 19:12:02 +04:00
|
|
|
# can't be tunneled over an HTTP proxy natively by Python (<= 2.5)
|
|
|
|
# urllib2 standard library
|
2010-11-12 17:21:46 +03:00
|
|
|
if PYVERSION >= "2.6":
|
|
|
|
proxyHandler = urllib2.ProxyHandler({"http": __proxyString, "https": __proxyString})
|
|
|
|
elif conf.scheme == "https":
|
|
|
|
proxyHandler = ProxyHTTPSHandler(__proxyString)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
|
|
|
proxyHandler = urllib2.ProxyHandler({"http": __proxyString})
|
|
|
|
|
2010-04-16 16:44:47 +04:00
|
|
|
def __setSafeUrl():
|
|
|
|
"""
|
|
|
|
Check and set the safe URL options.
|
|
|
|
"""
|
|
|
|
if not conf.safUrl:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not re.search("^http[s]*://", conf.safUrl):
|
|
|
|
if ":443/" in conf.safUrl:
|
|
|
|
conf.safUrl = "https://" + conf.safUrl
|
|
|
|
else:
|
|
|
|
conf.safUrl = "http://" + conf.safUrl
|
|
|
|
|
|
|
|
if conf.saFreq <= 0:
|
|
|
|
errMsg = "please provide a valid value (>0) for safe frequency (--safe-freq) while using safe url feature"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-01-21 00:57:54 +03:00
|
|
|
def __setPrefixSuffix():
|
|
|
|
if conf.prefix is not None and conf.suffix is None:
|
|
|
|
errMsg = "you specified the payload prefix, but did not provide "
|
|
|
|
errMsg += "the payload suffix"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
elif conf.prefix is None and conf.suffix is not None:
|
|
|
|
errMsg = "you specified the payload suffix, but did not provide "
|
|
|
|
errMsg += "the payload prefix"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-01-24 15:25:45 +03:00
|
|
|
if conf.prefix is not None and conf.suffix is not None:
|
|
|
|
# Create a custom boundary object for user's supplied prefix
|
|
|
|
# and suffix
|
|
|
|
boundary = advancedDict()
|
|
|
|
|
|
|
|
boundary.level = 1
|
|
|
|
boundary.clause = [ 0 ]
|
|
|
|
boundary.where = [ 1, 2, 3 ]
|
|
|
|
boundary.prefix = conf.prefix
|
|
|
|
boundary.suffix = conf.suffix
|
|
|
|
|
|
|
|
if " like" in boundary.suffix.lower():
|
|
|
|
if "'" in boundary.suffix.lower():
|
|
|
|
boundary.ptype = 3
|
|
|
|
elif '"' in boundary.suffix.lower():
|
|
|
|
boundary.ptype = 5
|
|
|
|
elif "'" in boundary.suffix:
|
|
|
|
boundary.ptype = 2
|
|
|
|
elif '"' in boundary.suffix:
|
|
|
|
boundary.ptype = 4
|
|
|
|
else:
|
|
|
|
boundary.ptype = 1
|
|
|
|
|
|
|
|
# Prepend user's provided boundaries to all others boundaries
|
|
|
|
conf.boundaries.insert(0, boundary)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setHTTPAuthentication():
|
|
|
|
"""
|
2010-01-07 15:59:09 +03:00
|
|
|
Check and set the HTTP(s) authentication method (Basic, Digest, NTLM or Certificate),
|
|
|
|
username and password for first three methods, or key file and certification file for
|
|
|
|
certificate authentication
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
global authHandler
|
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not conf.aType and not conf.aCred and not conf.aCert:
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
elif conf.aType and not conf.aCred:
|
2009-12-03 01:54:39 +03:00
|
|
|
errMsg = "you specified the HTTP authentication type, but "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "did not provide the credentials"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
elif not conf.aType and conf.aCred:
|
2009-12-03 01:54:39 +03:00
|
|
|
errMsg = "you specified the HTTP authentication credentials, "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "but did not provide the type"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not conf.aCert:
|
|
|
|
debugMsg = "setting the HTTP authentication type and credentials"
|
|
|
|
logger.debug(debugMsg)
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
aTypeLower = conf.aType.lower()
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if aTypeLower not in ( "basic", "digest", "ntlm" ):
|
|
|
|
errMsg = "HTTP authentication type value must be "
|
|
|
|
errMsg += "Basic, Digest or NTLM"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-04-07 13:47:14 +04:00
|
|
|
elif aTypeLower in ( "basic", "digest" ):
|
|
|
|
regExp = "^(.*?):(.*?)$"
|
|
|
|
errMsg = "HTTP %s authentication credentials " % aTypeLower
|
|
|
|
errMsg += "value must be in format username:password"
|
|
|
|
elif aTypeLower == "ntlm":
|
|
|
|
regExp = "^(.*?)\\\(.*?):(.*?)$"
|
|
|
|
errMsg = "HTTP NTLM authentication credentials value must "
|
|
|
|
errMsg += "be in format DOMAIN\username:password"
|
|
|
|
|
|
|
|
aCredRegExp = re.search(regExp, conf.aCred)
|
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not aCredRegExp:
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
authUsername = aCredRegExp.group(1)
|
|
|
|
authPassword = aCredRegExp.group(2)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
passwordMgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
|
|
|
passwordMgr.add_password(None, "%s://%s" % (conf.scheme, conf.hostname), authUsername, authPassword)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if aTypeLower == "basic":
|
2010-12-26 12:33:04 +03:00
|
|
|
authHandler = SmartHTTPBasicAuthHandler(passwordMgr)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
elif aTypeLower == "digest":
|
|
|
|
authHandler = urllib2.HTTPDigestAuthHandler(passwordMgr)
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
elif aTypeLower == "ntlm":
|
|
|
|
try:
|
|
|
|
from ntlm import HTTPNtlmAuthHandler
|
|
|
|
except ImportError, _:
|
|
|
|
errMsg = "sqlmap requires Python NTLM third-party library "
|
|
|
|
errMsg += "in order to authenticate via NTLM, "
|
|
|
|
errMsg += "http://code.google.com/p/python-ntlm/"
|
|
|
|
raise sqlmapMissingDependence, errMsg
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
authHandler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(passwordMgr)
|
|
|
|
else:
|
|
|
|
debugMsg = "setting the HTTP(s) authentication certificate"
|
|
|
|
logger.debug(debugMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
aCertRegExp = re.search("^(.+?),\s*(.+?)$", conf.aCert)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not aCertRegExp:
|
|
|
|
errMsg = "HTTP authentication certificate option "
|
|
|
|
errMsg += "must be in format key_file,cert_file"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-11-08 15:36:48 +03:00
|
|
|
# os.path.expanduser for support of paths with ~
|
2010-01-07 15:59:09 +03:00
|
|
|
key_file = os.path.expanduser(aCertRegExp.group(1))
|
|
|
|
cert_file = os.path.expanduser(aCertRegExp.group(2))
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-03-21 03:39:44 +03:00
|
|
|
for ifile in (key_file, cert_file):
|
|
|
|
if not os.path.exists(ifile):
|
|
|
|
errMsg = "File '%s' does not exist" % ifile
|
2010-01-07 15:59:09 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
authHandler = HTTPSCertAuthHandler(key_file, cert_file)
|
2009-12-03 01:54:39 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setHTTPMethod():
|
|
|
|
"""
|
|
|
|
Check and set the HTTP method to perform HTTP requests through.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.method:
|
|
|
|
conf.method = conf.method.upper()
|
|
|
|
|
2010-11-08 12:44:32 +03:00
|
|
|
if conf.method not in (HTTPMETHOD.GET, HTTPMETHOD.POST):
|
2008-10-15 19:38:22 +04:00
|
|
|
warnMsg = "'%s' " % conf.method
|
|
|
|
warnMsg += "is an unsupported HTTP method, "
|
2010-11-08 12:44:32 +03:00
|
|
|
warnMsg += "setting to default method, %s" % HTTPMETHOD.GET
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.method = HTTPMETHOD.GET
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.method = HTTPMETHOD.GET
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
debugMsg = "setting the HTTP method to %s" % conf.method
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
def __setHTTPExtraHeaders():
|
2009-06-16 19:12:02 +04:00
|
|
|
if conf.hostname:
|
|
|
|
conf.httpHeaders.append(("Host", conf.hostname))
|
2009-05-19 18:40:04 +04:00
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
if conf.headers:
|
|
|
|
debugMsg = "setting extra HTTP headers"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.headers = conf.headers.split("\n")
|
|
|
|
|
|
|
|
for headerValue in conf.headers:
|
|
|
|
header, value = headerValue.split(": ")
|
|
|
|
|
|
|
|
if header and value:
|
|
|
|
conf.httpHeaders.append((header, value))
|
2010-06-30 03:51:44 +04:00
|
|
|
elif not conf.httpHeaders or len(conf.httpHeaders) == 1:
|
2008-12-09 00:24:24 +03:00
|
|
|
conf.httpHeaders.append(("Accept", "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"))
|
|
|
|
conf.httpHeaders.append(("Accept-Language", "en-us,en;q=0.5"))
|
|
|
|
conf.httpHeaders.append(("Accept-Charset", "ISO-8859-15,utf-8;q=0.7,*;q=0.7"))
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __defaultHTTPUserAgent():
|
|
|
|
"""
|
|
|
|
@return: default sqlmap HTTP User-Agent header
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return "%s (%s)" % (VERSION_STRING, SITE)
|
|
|
|
|
|
|
|
# Firefox 3 running on Ubuntu 9.04 updated at April 2009
|
|
|
|
#return "Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.0.9) Gecko/2009042113 Ubuntu/9.04 (jaunty) Firefox/3.0.9"
|
|
|
|
|
2009-04-25 00:13:21 +04:00
|
|
|
# Internet Explorer 7.0 running on Windows 2003 Service Pack 2 english
|
|
|
|
# updated at March 2009
|
2009-04-28 03:05:11 +04:00
|
|
|
#return "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)"
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __setHTTPUserAgent():
|
|
|
|
"""
|
|
|
|
Set the HTTP User-Agent header.
|
|
|
|
Depending on the user options it can be:
|
|
|
|
|
|
|
|
* The default sqlmap string
|
|
|
|
* A default value read as user option
|
|
|
|
* A random value read from a list of User-Agent headers from a
|
|
|
|
file choosed as user option
|
|
|
|
"""
|
2010-11-07 11:13:20 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.agent:
|
|
|
|
debugMsg = "setting the HTTP User-Agent header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-11-08 16:26:45 +03:00
|
|
|
conf.httpHeaders.append(("User-Agent", conf.agent))
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
2011-02-02 17:51:12 +03:00
|
|
|
if not conf.randomAgent:
|
2010-06-30 03:51:44 +04:00
|
|
|
addDefaultUserAgent = True
|
|
|
|
|
|
|
|
for header, _ in conf.httpHeaders:
|
2010-11-08 16:26:45 +03:00
|
|
|
if header == "User-Agent":
|
2010-06-30 03:51:44 +04:00
|
|
|
addDefaultUserAgent = False
|
|
|
|
break
|
|
|
|
|
|
|
|
if addDefaultUserAgent:
|
2010-11-08 16:26:45 +03:00
|
|
|
conf.httpHeaders.append(("User-Agent", __defaultHTTPUserAgent()))
|
2010-06-30 03:51:44 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
2010-11-07 11:11:56 +03:00
|
|
|
if not kb.userAgents:
|
|
|
|
debugMsg = "loading random HTTP User-Agent header(s) from "
|
2011-02-02 17:51:12 +03:00
|
|
|
debugMsg += "file '%s'" % paths.USER_AGENTS
|
2010-11-07 11:11:56 +03:00
|
|
|
logger.debug(debugMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-07 11:11:56 +03:00
|
|
|
try:
|
2011-02-02 17:51:12 +03:00
|
|
|
kb.userAgents = getFileItems(paths.USER_AGENTS)
|
2010-11-07 11:11:56 +03:00
|
|
|
except IOError:
|
|
|
|
warnMsg = "unable to read HTTP User-Agent header "
|
2011-02-02 17:51:12 +03:00
|
|
|
warnMsg += "file '%s'" % paths.USER_AGENTS
|
2010-11-07 11:11:56 +03:00
|
|
|
logger.warn(warnMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-08 16:26:45 +03:00
|
|
|
conf.httpHeaders.append(("User-Agent", __defaultHTTPUserAgent()))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-07 11:11:56 +03:00
|
|
|
return
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-07 11:11:56 +03:00
|
|
|
__count = len(kb.userAgents)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if __count == 1:
|
2010-11-07 11:11:56 +03:00
|
|
|
__userAgent = kb.userAgents[0]
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-11-22 13:58:08 +03:00
|
|
|
__userAgent = kb.userAgents[randomRange(stop=__count-1)]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
__userAgent = sanitizeStr(__userAgent)
|
2010-11-08 16:26:45 +03:00
|
|
|
conf.httpHeaders.append(("User-Agent", __userAgent))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
logMsg = "fetched random HTTP User-Agent header from "
|
2011-02-02 17:51:12 +03:00
|
|
|
logMsg += "file '%s': %s" % (paths.USER_AGENTS, __userAgent)
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.info(logMsg)
|
|
|
|
|
|
|
|
def __setHTTPReferer():
|
|
|
|
"""
|
|
|
|
Set the HTTP Referer
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.referer:
|
|
|
|
debugMsg = "setting the HTTP Referer header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.httpHeaders.append(("Referer", conf.referer))
|
|
|
|
|
|
|
|
def __setHTTPCookies():
|
|
|
|
"""
|
|
|
|
Set the HTTP Cookie header
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.cookie:
|
|
|
|
debugMsg = "setting the HTTP Cookie header"
|
|
|
|
logger.debug(debugMsg)
|
2010-06-30 03:51:44 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.httpHeaders.append(("Connection", "Keep-Alive"))
|
2010-11-08 16:26:45 +03:00
|
|
|
conf.httpHeaders.append(("Cookie", conf.cookie))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
def __setHTTPTimeout():
|
|
|
|
"""
|
|
|
|
Set the HTTP timeout
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.timeout:
|
|
|
|
debugMsg = "setting the HTTP timeout"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.timeout = float(conf.timeout)
|
|
|
|
|
|
|
|
if conf.timeout < 3.0:
|
|
|
|
warnMsg = "the minimum HTTP timeout is 3 seconds, sqlmap "
|
|
|
|
warnMsg += "will going to reset it"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.timeout = 3.0
|
|
|
|
else:
|
2008-12-19 23:48:33 +03:00
|
|
|
conf.timeout = 30.0
|
2008-12-04 20:40:03 +03:00
|
|
|
|
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __cleanupOptions():
|
|
|
|
"""
|
|
|
|
Cleanup configuration attributes.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "cleaning up configuration parameters"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
width = getConsoleWidth()
|
|
|
|
|
|
|
|
if conf.eta:
|
|
|
|
conf.progressWidth = width-26
|
|
|
|
else:
|
|
|
|
conf.progressWidth = width-46
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.testParameter:
|
|
|
|
conf.testParameter = conf.testParameter.replace(" ", "")
|
|
|
|
conf.testParameter = conf.testParameter.split(",")
|
|
|
|
else:
|
|
|
|
conf.testParameter = []
|
|
|
|
|
|
|
|
if conf.db:
|
|
|
|
conf.db = conf.db.replace(" ", "")
|
|
|
|
|
|
|
|
if conf.tbl:
|
|
|
|
conf.tbl = conf.tbl.replace(" ", "")
|
|
|
|
|
|
|
|
if conf.col:
|
|
|
|
conf.col = conf.col.replace(" ", "")
|
|
|
|
|
|
|
|
if conf.user:
|
|
|
|
conf.user = conf.user.replace(" ", "")
|
|
|
|
|
2008-11-09 19:57:47 +03:00
|
|
|
if conf.delay:
|
|
|
|
conf.delay = float(conf.delay)
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if conf.rFile:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.rFile = ntToPosixSlashes(normalizePath(conf.rFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.wFile:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.wFile = ntToPosixSlashes(normalizePath(conf.wFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.dFile:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.dFile = ntToPosixSlashes(normalizePath(conf.dFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.msfPath:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.msfPath = ntToPosixSlashes(normalizePath(conf.msfPath))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.tmpPath:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-11-15 14:34:57 +03:00
|
|
|
if conf.googleDork or conf.list or conf.forms:
|
2008-11-28 01:33:33 +03:00
|
|
|
conf.multipleTargets = True
|
|
|
|
|
2010-10-12 23:41:29 +04:00
|
|
|
if conf.optimize:
|
2010-10-25 16:33:49 +04:00
|
|
|
#conf.predictOutput = True
|
|
|
|
conf.keepAlive = True
|
2011-01-14 17:37:03 +03:00
|
|
|
conf.nullConnection = not conf.textOnly
|
2011-02-01 00:21:13 +03:00
|
|
|
conf.threads = 4 if conf.threads < 4 else conf.threads
|
2010-10-12 23:41:29 +04:00
|
|
|
|
2011-02-10 22:51:34 +03:00
|
|
|
if conf.tor:
|
|
|
|
conf.proxy = DEFAULT_TOR_PROXY
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setConfAttributes():
|
|
|
|
"""
|
|
|
|
This function set some needed attributes into the configuration
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "initializing the configuration"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-11-28 21:10:54 +03:00
|
|
|
conf.boundaries = []
|
2010-05-13 19:19:28 +04:00
|
|
|
conf.cj = None
|
|
|
|
conf.dbmsConnector = None
|
|
|
|
conf.dbmsHandler = None
|
|
|
|
conf.dumpPath = None
|
|
|
|
conf.httpHeaders = []
|
|
|
|
conf.hostname = None
|
|
|
|
conf.loggedToOut = None
|
|
|
|
conf.multipleTargets = False
|
|
|
|
conf.outputPath = None
|
|
|
|
conf.paramDict = {}
|
|
|
|
conf.parameters = {}
|
|
|
|
conf.path = None
|
|
|
|
conf.port = None
|
|
|
|
conf.redirectHandled = False
|
|
|
|
conf.scheme = None
|
|
|
|
conf.sessionFP = None
|
|
|
|
conf.start = True
|
2010-11-28 21:10:54 +03:00
|
|
|
conf.tests = []
|
2010-11-08 14:22:47 +03:00
|
|
|
conf.trafficFP = None
|
2010-05-13 19:19:28 +04:00
|
|
|
conf.wFileType = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-18 13:02:01 +03:00
|
|
|
def __setKnowledgeBaseAttributes(flushAll=True):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function set some needed attributes into the knowledge base
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "initializing the knowledge base"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.absFilePaths = set()
|
2010-10-18 12:54:08 +04:00
|
|
|
kb.authHeader = None
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.bannerFp = advancedDict()
|
2010-05-31 12:13:08 +04:00
|
|
|
|
2010-12-27 17:17:20 +03:00
|
|
|
kb.brute = advancedDict()
|
|
|
|
kb.brute.tables = []
|
|
|
|
kb.brute.columns = []
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.cache = advancedDict()
|
2010-11-01 23:56:13 +03:00
|
|
|
kb.cache.content = {}
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.cache.regex = {}
|
2010-12-14 15:22:17 +03:00
|
|
|
kb.cache.stdev = {}
|
2010-05-31 12:13:08 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.commonOutputs = None
|
2010-11-02 10:32:08 +03:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.data = advancedDict()
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-01-20 02:06:15 +03:00
|
|
|
# Active back-end DBMS fingerprint
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.dbms = None
|
2010-12-18 00:29:09 +03:00
|
|
|
kb.dbmsVersion = [ UNKNOWN_DBMS_VERSION ]
|
2010-10-13 02:45:25 +04:00
|
|
|
|
2011-01-16 20:52:42 +03:00
|
|
|
kb.delayCandidates = TIME_DELAY_CANDIDATES * [0]
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.dep = None
|
|
|
|
kb.docRoot = None
|
2010-10-25 14:41:37 +04:00
|
|
|
kb.dynamicMarkings = []
|
2010-12-22 02:55:55 +03:00
|
|
|
kb.endDetection = False
|
2011-01-02 10:37:47 +03:00
|
|
|
kb.httpErrorCodes = {}
|
2010-12-24 15:13:48 +03:00
|
|
|
kb.errorIsNone = True
|
2010-11-29 15:46:18 +03:00
|
|
|
kb.formNames = []
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.headersCount = 0
|
|
|
|
kb.headersFp = {}
|
2010-10-14 15:06:28 +04:00
|
|
|
kb.hintValue = None
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.htmlFp = []
|
2010-11-29 04:04:42 +03:00
|
|
|
kb.injection = injectionDict()
|
2010-11-28 21:10:54 +03:00
|
|
|
kb.injections = []
|
2010-11-02 12:06:38 +03:00
|
|
|
|
|
|
|
kb.locks = advancedDict()
|
|
|
|
kb.locks.cacheLock = threading.Lock()
|
2010-12-21 01:45:01 +03:00
|
|
|
kb.locks.logLock = threading.Lock()
|
2010-11-02 12:06:38 +03:00
|
|
|
|
2010-12-18 12:51:34 +03:00
|
|
|
kb.matchRatio = None
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.nullConnection = None
|
2010-12-04 13:05:18 +03:00
|
|
|
kb.pageTemplate = None
|
2010-12-20 12:44:21 +03:00
|
|
|
kb.pageTemplates = dict()
|
2010-12-06 21:20:57 +03:00
|
|
|
kb.originalPage = None
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# Back-end DBMS underlying operating system fingerprint via banner (-b)
|
2009-09-26 03:03:45 +04:00
|
|
|
# parsing
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.os = None
|
|
|
|
kb.osVersion = None
|
|
|
|
kb.osSP = None
|
|
|
|
|
2011-01-20 14:01:01 +03:00
|
|
|
kb.pageEncoding = DEFAULT_PAGE_ENCODING
|
2010-10-25 17:52:21 +04:00
|
|
|
kb.pageStable = None
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.partRun = None
|
2010-10-18 13:02:56 +04:00
|
|
|
kb.proxyAuthHeader = None
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.queryCounter = 0
|
2010-12-07 16:34:06 +03:00
|
|
|
kb.redirectSetCookie = None
|
2010-12-07 19:04:53 +03:00
|
|
|
kb.responseTimes = []
|
2010-10-13 02:45:25 +04:00
|
|
|
kb.resumedQueries = {}
|
2010-11-29 18:25:45 +03:00
|
|
|
kb.retriesCount = 0
|
2011-01-12 02:47:32 +03:00
|
|
|
kb.skipOthersDbms = None
|
2011-01-07 19:47:46 +03:00
|
|
|
kb.suppressSession = False
|
2010-12-03 13:44:16 +03:00
|
|
|
kb.technique = None
|
2010-12-07 16:34:06 +03:00
|
|
|
kb.testMode = False
|
|
|
|
kb.testQueryCount = 0
|
2010-12-20 21:56:06 +03:00
|
|
|
kb.threadContinue = True
|
2010-12-21 17:21:24 +03:00
|
|
|
kb.threadException = False
|
2010-12-21 01:45:01 +03:00
|
|
|
kb.threadData = {}
|
2010-03-22 18:39:29 +03:00
|
|
|
|
2011-01-13 20:36:54 +03:00
|
|
|
kb.misc = advancedDict()
|
|
|
|
kb.misc.delimiter = randomStr(length=6)
|
|
|
|
kb.misc.start = ":%s:" % randomStr(length=3, lowercase=True)
|
|
|
|
kb.misc.stop = ":%s:" % randomStr(length=3, lowercase=True)
|
|
|
|
kb.misc.space = ":%s:" % randomStr(length=1, lowercase=True)
|
2011-01-18 00:13:59 +03:00
|
|
|
kb.misc.forcedDbms = None
|
2011-01-13 20:36:54 +03:00
|
|
|
|
2010-12-18 13:02:01 +03:00
|
|
|
if flushAll:
|
|
|
|
kb.keywords = set(getFileItems(paths.SQL_KEYWORDS))
|
|
|
|
kb.tamperFunctions = []
|
|
|
|
kb.targetUrls = set()
|
2010-12-23 14:28:13 +03:00
|
|
|
kb.testedParams = set()
|
2010-12-18 13:02:01 +03:00
|
|
|
kb.userAgents = None
|
2010-12-27 13:56:28 +03:00
|
|
|
kb.wordlist = None
|
2010-12-18 13:02:01 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __saveCmdline():
|
|
|
|
"""
|
|
|
|
Saves the command line options on a sqlmap configuration INI file
|
2011-01-28 19:36:09 +03:00
|
|
|
Format.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.saveCmdline:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "saving command line options on a sqlmap configuration INI file"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
config = UnicodeRawConfigParser()
|
2008-10-15 19:38:22 +04:00
|
|
|
userOpts = {}
|
|
|
|
|
|
|
|
for family in optDict.keys():
|
|
|
|
userOpts[family] = []
|
|
|
|
|
|
|
|
for option, value in conf.items():
|
|
|
|
for family, optionData in optDict.items():
|
|
|
|
if option in optionData:
|
|
|
|
userOpts[family].append((option, value, optionData[option]))
|
|
|
|
|
|
|
|
for family, optionData in userOpts.items():
|
2009-01-04 01:59:22 +03:00
|
|
|
config.add_section(family)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
optionData.sort()
|
|
|
|
|
|
|
|
for option, value, datatype in optionData:
|
2010-05-28 19:57:43 +04:00
|
|
|
if isinstance(datatype, (list, tuple, set)):
|
|
|
|
datatype = datatype[0]
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if value is None:
|
2008-10-15 19:38:22 +04:00
|
|
|
if datatype == "boolean":
|
|
|
|
value = "False"
|
2008-11-09 19:57:47 +03:00
|
|
|
elif datatype in ( "integer", "float" ):
|
2008-12-02 02:07:41 +03:00
|
|
|
if option in ( "threads", "verbose" ):
|
2008-10-26 19:10:28 +03:00
|
|
|
value = "1"
|
2008-12-17 00:30:24 +03:00
|
|
|
elif option == "timeout":
|
|
|
|
value = "10"
|
2008-10-26 19:10:28 +03:00
|
|
|
else:
|
|
|
|
value = "0"
|
2008-10-15 19:38:22 +04:00
|
|
|
elif datatype == "string":
|
|
|
|
value = ""
|
|
|
|
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(value, basestring):
|
2008-12-09 00:24:24 +03:00
|
|
|
value = value.replace("\n", "\n ")
|
|
|
|
|
2009-01-04 01:59:22 +03:00
|
|
|
config.set(family, option, value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-08 12:30:10 +03:00
|
|
|
confFP = openFile(paths.SQLMAP_CONFIG, "wb")
|
2009-01-04 01:59:22 +03:00
|
|
|
config.write(confFP)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
infoMsg = "saved command line options on '%s' configuration file" % paths.SQLMAP_CONFIG
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
def __setVerbosity():
|
|
|
|
"""
|
|
|
|
This function set the verbosity of sqlmap output messages.
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.verbose is None:
|
2008-12-02 02:07:41 +03:00
|
|
|
conf.verbose = 1
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
conf.verbose = int(conf.verbose)
|
|
|
|
|
2010-09-26 18:02:13 +04:00
|
|
|
if conf.verbose == 0:
|
|
|
|
logger.setLevel(logging.CRITICAL)
|
|
|
|
elif conf.verbose == 1:
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.setLevel(logging.INFO)
|
2010-03-11 01:08:11 +03:00
|
|
|
elif conf.verbose > 2 and conf.eta:
|
|
|
|
conf.verbose = 2
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2008-10-15 19:38:22 +04:00
|
|
|
elif conf.verbose == 2:
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
elif conf.verbose == 3:
|
|
|
|
logger.setLevel(9)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose == 4:
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.setLevel(8)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose >= 5:
|
|
|
|
logger.setLevel(7)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __mergeOptions(inputOptions):
|
|
|
|
"""
|
|
|
|
Merge command line options with configuration file options.
|
|
|
|
|
|
|
|
@param inputOptions: optparse object with command line options.
|
|
|
|
@type inputOptions: C{instance}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if inputOptions.configFile:
|
|
|
|
configFileParser(inputOptions.configFile)
|
|
|
|
|
2009-06-05 14:15:55 +04:00
|
|
|
if hasattr(inputOptions, "items"):
|
|
|
|
inputOptionsItems = inputOptions.items()
|
|
|
|
else:
|
|
|
|
inputOptionsItems = inputOptions.__dict__.items()
|
|
|
|
|
|
|
|
for key, value in inputOptionsItems:
|
2010-12-14 00:36:23 +03:00
|
|
|
if key not in conf or (conf[key] is False and value is True) or \
|
|
|
|
value not in (None, False):
|
2008-10-15 19:38:22 +04:00
|
|
|
conf[key] = value
|
|
|
|
|
2010-11-08 14:22:47 +03:00
|
|
|
def __setTrafficOutputFP():
|
|
|
|
if conf.trafficFile:
|
2011-01-08 12:30:10 +03:00
|
|
|
conf.trafficFP = openFile(conf.trafficFile, "w+")
|
2010-11-08 14:22:47 +03:00
|
|
|
|
2010-04-26 15:23:12 +04:00
|
|
|
def __basicOptionValidation():
|
|
|
|
if conf.limitStart is not None and not (isinstance(conf.limitStart, int) and conf.limitStart > 0):
|
2010-04-26 16:34:21 +04:00
|
|
|
errMsg = "value for --start (limitStart) option must be an integer value greater than zero (>0)"
|
2010-04-26 15:23:12 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
if conf.limitStop is not None and not (isinstance(conf.limitStop, int) and conf.limitStop > 0):
|
2010-04-26 16:34:21 +04:00
|
|
|
errMsg = "value for --stop (limitStop) option must be an integer value greater than zero (>0)"
|
2010-04-26 15:23:12 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-10-15 14:28:06 +04:00
|
|
|
if conf.limitStart is not None and isinstance(conf.limitStart, int) and conf.limitStart > 0 and \
|
2010-10-31 17:42:13 +03:00
|
|
|
conf.limitStop is not None and isinstance(conf.limitStop, int) and conf.limitStop <= conf.limitStart:
|
2010-04-26 16:34:21 +04:00
|
|
|
errMsg = "value for --start (limitStart) option must be smaller than value for --stop (limitStop) option"
|
2010-04-26 15:23:12 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-10-31 17:42:13 +03:00
|
|
|
if conf.firstChar is not None and isinstance(conf.firstChar, int) and conf.firstChar > 0 and \
|
|
|
|
conf.lastChar is not None and isinstance(conf.lastChar, int) and conf.lastChar < conf.firstChar:
|
|
|
|
errMsg = "value for --first (firstChar) option must be smaller than or equal to value for --last (lastChar) option"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-10-15 14:28:06 +04:00
|
|
|
if conf.cpuThrottle is not None and isinstance(conf.cpuThrottle, int) and (conf.cpuThrottle > 100 or conf.cpuThrottle < 0):
|
2010-05-28 13:13:50 +04:00
|
|
|
errMsg = "value for --cpu-throttle (cpuThrottle) option must be in range [0,100]"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-08-10 23:53:29 +04:00
|
|
|
|
2010-10-17 01:52:16 +04:00
|
|
|
if conf.textOnly and conf.nullConnection:
|
2010-10-12 23:41:29 +04:00
|
|
|
errMsg = "switch --text-only is incompatible with switch --null-connection"
|
2010-11-07 11:58:24 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-10-17 01:52:16 +04:00
|
|
|
if conf.data and conf.nullConnection:
|
2010-10-15 15:05:50 +04:00
|
|
|
errMsg = "switch --data is incompatible with switch --null-connection"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-10-25 16:33:49 +04:00
|
|
|
if conf.predictOutput and conf.threads > 1:
|
|
|
|
errMsg = "switch --predict-output is incompatible with switch --threads"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-02-02 13:10:28 +03:00
|
|
|
if conf.threads > MAX_NUMBER_OF_THREADS:
|
|
|
|
errMsg = "maximum number of used threads is %d avoiding possible connection issues" % MAX_NUMBER_OF_THREADS
|
2011-02-02 12:24:37 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-11-15 14:34:57 +03:00
|
|
|
if conf.forms and not conf.url:
|
|
|
|
errMsg = "switch --forms requires usage of -u (--url) switch"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
if conf.forms and (conf.list or conf.direct or conf.requestFile or conf.googleDork):
|
|
|
|
errMsg = "switch --forms is compatible only with -u (--url) target switch"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-12-14 15:37:21 +03:00
|
|
|
if conf.timeSec < 1:
|
|
|
|
errMsg = "value for --time-sec option must be an integer greater than 0"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def init(inputOptions=advancedDict()):
|
|
|
|
"""
|
|
|
|
Set attributes into both configuration and knowledge base singletons
|
|
|
|
based upon command line and configuration file options.
|
|
|
|
"""
|
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
__setConfAttributes()
|
|
|
|
__setKnowledgeBaseAttributes()
|
2008-10-15 19:38:22 +04:00
|
|
|
__mergeOptions(inputOptions)
|
|
|
|
__setVerbosity()
|
|
|
|
__saveCmdline()
|
|
|
|
__cleanupOptions()
|
2010-04-26 15:23:12 +04:00
|
|
|
__basicOptionValidation()
|
2010-01-12 16:11:26 +03:00
|
|
|
__setRequestFromFile()
|
2010-06-30 03:51:44 +04:00
|
|
|
__setMultipleTargets()
|
2010-10-13 02:45:25 +04:00
|
|
|
__setTamperingFunctions()
|
2010-11-08 14:22:47 +03:00
|
|
|
__setTrafficOutputFP()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2009-06-16 19:12:02 +04:00
|
|
|
parseTargetUrl()
|
2010-03-27 02:23:25 +03:00
|
|
|
parseTargetDirect()
|
|
|
|
|
2010-12-23 17:06:22 +03:00
|
|
|
if conf.url or conf.list or conf.requestFile or conf.googleDork or conf.liveTest:
|
2010-03-27 02:23:25 +03:00
|
|
|
__setHTTPTimeout()
|
2010-06-30 03:51:44 +04:00
|
|
|
__setHTTPExtraHeaders()
|
2010-03-27 02:23:25 +03:00
|
|
|
__setHTTPCookies()
|
|
|
|
__setHTTPReferer()
|
|
|
|
__setHTTPUserAgent()
|
|
|
|
__setHTTPMethod()
|
|
|
|
__setHTTPAuthentication()
|
|
|
|
__setHTTPProxy()
|
2010-04-16 16:44:47 +04:00
|
|
|
__setSafeUrl()
|
2010-03-27 02:23:25 +03:00
|
|
|
__setGoogleDorking()
|
|
|
|
__urllib2Opener()
|
2010-11-15 15:07:13 +03:00
|
|
|
__findPageForms()
|
2010-03-27 02:23:25 +03:00
|
|
|
__setDBMS()
|
2010-12-09 16:47:17 +03:00
|
|
|
__setTechnique()
|
2009-06-16 19:12:02 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
__setThreads()
|
2009-04-22 15:48:07 +04:00
|
|
|
__setOS()
|
|
|
|
__setWriteFile()
|
|
|
|
__setMetasploit()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-28 21:10:54 +03:00
|
|
|
loadPayloads()
|
2011-01-24 15:25:45 +03:00
|
|
|
__setPrefixSuffix()
|
2008-10-15 19:38:22 +04:00
|
|
|
update()
|
2010-10-21 17:13:12 +04:00
|
|
|
__loadQueries()
|