sqlmap/lib/core/option.py

1868 lines
62 KiB
Python
Raw Normal View History

2008-10-15 19:38:22 +04:00
#!/usr/bin/env python
"""
2008-10-15 19:56:32 +04:00
$Id$
2008-10-15 19:38:22 +04:00
2011-07-08 00:10:03 +04:00
Copyright (c) 2006-2011 sqlmap developers (http://www.sqlmap.org/)
2010-10-15 03:18:29 +04:00
See the file 'doc/COPYING' for copying permission
2008-10-15 19:38:22 +04:00
"""
import codecs
2008-10-15 19:38:22 +04:00
import cookielib
import difflib
import inspect
2008-10-15 19:38:22 +04:00
import logging
import os
import re
import socket
2010-10-15 14:36:29 +04:00
import sys
import threading
2008-10-15 19:38:22 +04:00
import urllib2
import urlparse
import lib.core.common
import lib.core.threads
2010-06-01 16:21:10 +04:00
from extra.keepalive import keepalive
from extra.oset.pyoset import oset
from extra.socks import socks
2010-11-15 15:07:13 +03:00
from lib.controller.checks import checkConnection
from lib.core.common import Backend
from lib.core.common import dataToStdout
2011-04-07 14:00:47 +04:00
from lib.core.common import getPublicTypeMembers
2011-01-20 18:55:50 +03:00
from lib.core.common import extractRegexResult
from lib.core.common import filterStringValue
from lib.core.common import findPageForms
from lib.core.common import getConsoleWidth
from lib.core.common import getFileItems
from lib.core.common import getFileType
from lib.core.common import normalizePath
from lib.core.common import ntToPosixSlashes
from lib.core.common import openFile
from lib.core.common import parseTargetDirect
2008-10-15 19:38:22 +04:00
from lib.core.common import parseTargetUrl
from lib.core.common import paths
from lib.core.common import randomRange
from lib.core.common import randomStr
from lib.core.common import readCachedFileContent
from lib.core.common import readInput
2010-10-28 00:39:50 +04:00
from lib.core.common import runningAsAdmin
2008-10-15 19:38:22 +04:00
from lib.core.common import sanitizeStr
from lib.core.common import setOptimize
2010-05-28 19:57:43 +04:00
from lib.core.common import UnicodeRawConfigParser
from lib.core.convert import urldecode
2011-11-22 01:31:08 +04:00
from lib.core.convert import urlencode
2008-10-15 19:38:22 +04:00
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.data import paths
from lib.core.data import queries
2011-07-08 10:02:31 +04:00
from lib.core.datatype import AttribDict
from lib.core.datatype import InjectionDict
from lib.core.defaults import defaults
from lib.core.enums import DBMS
2011-03-18 16:46:51 +03:00
from lib.core.enums import HTTPHEADER
2010-11-08 12:44:32 +03:00
from lib.core.enums import HTTPMETHOD
2011-04-29 23:27:23 +04:00
from lib.core.enums import MOBILES
from lib.core.enums import PAYLOAD
from lib.core.enums import PRIORITY
2011-05-30 13:46:32 +04:00
from lib.core.enums import REFLECTIVE_COUNTER
from lib.core.exception import sqlmapConnectionException
2008-10-15 19:38:22 +04:00
from lib.core.exception import sqlmapFilePathException
from lib.core.exception import sqlmapGenericException
2009-12-03 01:54:39 +03:00
from lib.core.exception import sqlmapMissingDependence
from lib.core.exception import sqlmapMissingMandatoryOptionException
from lib.core.exception import sqlmapMissingPrivileges
from lib.core.exception import sqlmapSilentQuitException
2008-10-15 19:38:22 +04:00
from lib.core.exception import sqlmapSyntaxException
from lib.core.exception import sqlmapUnsupportedDBMSException
from lib.core.exception import sqlmapUserQuitException
2008-10-15 19:38:22 +04:00
from lib.core.optiondict import optDict
2011-05-18 03:03:31 +04:00
from lib.core.settings import CODECS_LIST_PAGE
2011-11-22 01:31:08 +04:00
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
from lib.core.settings import DEFAULT_PAGE_ENCODING
2011-11-24 01:39:53 +04:00
from lib.core.settings import DEFAULT_TOR_SOCKS_PORT
2011-06-17 19:23:58 +04:00
from lib.core.settings import GENERAL_IP_ADDRESS_REGEX
from lib.core.settings import IS_WIN
from lib.core.settings import PLATFORM
from lib.core.settings import PYVERSION
from lib.core.settings import SITE
from lib.core.settings import DBMS_DICT
2008-10-15 19:38:22 +04:00
from lib.core.settings import SUPPORTED_DBMS
from lib.core.settings import SUPPORTED_OS
from lib.core.settings import VERSION_STRING
from lib.core.settings import MSSQL_ALIASES
from lib.core.settings import MYSQL_ALIASES
from lib.core.settings import PGSQL_ALIASES
from lib.core.settings import ORACLE_ALIASES
from lib.core.settings import SQLITE_ALIASES
from lib.core.settings import ACCESS_ALIASES
from lib.core.settings import FIREBIRD_ALIASES
from lib.core.settings import MAXDB_ALIASES
from lib.core.settings import SYBASE_ALIASES
from lib.core.settings import DB2_ALIASES
2011-10-27 21:31:34 +04:00
from lib.core.settings import BURP_REQUEST_REGEX
from lib.core.settings import LOCALHOST
2011-02-02 13:10:28 +03:00
from lib.core.settings import MAX_NUMBER_OF_THREADS
2011-08-29 17:08:25 +04:00
from lib.core.settings import PARAMETER_SPLITTING_REGEX
from lib.core.settings import TIME_DELAY_CANDIDATES
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
2011-10-10 01:21:41 +04:00
from lib.core.settings import UNION_CHAR_REGEX
from lib.core.settings import UNKNOWN_DBMS_VERSION
2011-01-20 19:07:08 +03:00
from lib.core.settings import WEBSCARAB_SPLITTER
2011-11-23 18:26:40 +04:00
from lib.core.threads import getCurrentThreadData
2008-10-15 19:38:22 +04:00
from lib.core.update import update
from lib.parse.configfile import configFileParser
from lib.parse.payloads import loadPayloads
from lib.request.connect import Connect as Request
2008-10-15 19:38:22 +04:00
from lib.request.proxy import ProxyHTTPSHandler
from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler
2010-01-07 15:59:09 +03:00
from lib.request.certhandler import HTTPSCertAuthHandler
from lib.request.rangehandler import HTTPRangeHandler
from lib.request.redirecthandler import SmartRedirectHandler
from lib.request.templates import getPageTemplate
from lib.utils.crawler import Crawler
2011-06-13 23:00:27 +04:00
from lib.utils.deps import checkDependencies
2008-10-15 19:38:22 +04:00
from lib.utils.google import Google
2011-06-22 18:33:52 +04:00
from xml.etree.ElementTree import ElementTree
2008-10-15 19:38:22 +04:00
2010-06-01 16:21:10 +04:00
authHandler = urllib2.BaseHandler()
keepAliveHandler = keepalive.HTTPHandler()
2008-10-15 19:38:22 +04:00
proxyHandler = urllib2.BaseHandler()
redirectHandler = SmartRedirectHandler()
rangeHandler = HTTPRangeHandler()
2008-10-15 19:38:22 +04:00
def __urllib2Opener():
"""
This function creates the urllib2 OpenerDirector.
"""
global authHandler
2010-06-01 16:21:10 +04:00
global keepAliveHandler
2008-10-15 19:38:22 +04:00
global proxyHandler
global rangeHandler
global redirectHandler
2008-10-15 19:38:22 +04:00
debugMsg = "creating HTTP requests opener object"
logger.debug(debugMsg)
handlers = [proxyHandler, authHandler, redirectHandler, rangeHandler]
2010-06-10 18:14:56 +04:00
2010-06-01 16:21:10 +04:00
if not conf.dropSetCookie:
conf.cj = cookielib.LWPCookieJar()
2010-06-01 16:21:10 +04:00
handlers.append(urllib2.HTTPCookieProcessor(conf.cj))
2010-06-10 18:14:56 +04:00
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
if conf.keepAlive:
2011-01-12 00:46:21 +03:00
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
warnMsg += "been disabled because of it's incompatibility "
if conf.proxy:
warnMsg += "with HTTP(s) proxy"
logger.warn(warnMsg)
elif conf.aType:
warnMsg += "with authentication methods"
logger.warn(warnMsg)
else:
handlers.append(keepAliveHandler)
2008-10-15 19:38:22 +04:00
2010-06-10 18:14:56 +04:00
opener = urllib2.build_opener(*handlers)
2008-10-15 19:38:22 +04:00
urllib2.install_opener(opener)
def __feedTargetsDict(reqFile, addedTargetUrls):
2011-01-20 18:55:50 +03:00
"""
Parses web scarab and burp logs and adds results to the target url list
"""
2011-01-20 18:55:50 +03:00
def __parseWebScarabLog(content):
"""
Parses web scarab logs (POST method not supported)
"""
2011-01-20 19:07:08 +03:00
reqResList = content.split(WEBSCARAB_SPLITTER)
getPostReq = False
2011-01-20 19:07:08 +03:00
2011-01-20 18:55:50 +03:00
for request in reqResList:
2011-04-30 17:20:05 +04:00
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
2011-01-20 18:55:50 +03:00
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
getPostReq = True
2011-01-20 18:55:50 +03:00
if not method or not url:
2011-04-18 02:25:25 +04:00
logger.debug("not a valid WebScarab log data")
2011-01-20 18:55:50 +03:00
continue
2011-01-20 18:55:50 +03:00
if method.upper() == "POST":
2011-01-20 19:17:38 +03:00
warnMsg = "POST requests from WebScarab logs aren't supported "
warnMsg += "as their body content is stored in separate files. "
warnMsg += "Nevertheless you can use -r to load them individually."
2011-01-20 18:55:50 +03:00
logger.warning(warnMsg)
continue
2010-03-05 17:06:03 +03:00
if conf.scope:
getPostReq &= re.search(conf.scope, url, re.I) is not None
if getPostReq:
if not kb.targetUrls or url not in addedTargetUrls:
kb.targetUrls.add((url, method, None, cookie))
addedTargetUrls.add(url)
2011-01-20 18:55:50 +03:00
def __parseBurpLog(content):
"""
Parses burp logs
"""
2011-04-30 17:20:05 +04:00
port = None
2011-01-20 18:55:50 +03:00
scheme = None
2011-10-27 21:31:34 +04:00
reqResList = re.findall(BURP_REQUEST_REGEX, content, re.I | re.S)
if not reqResList:
reqResList = [content]
2011-01-20 18:55:50 +03:00
for request in reqResList:
if scheme is None:
2011-10-27 21:31:34 +04:00
schemePort = re.search("(http[\w]*)\:\/\/.*?\:([\d]+).+?={10,}", request, re.I | re.S)
2011-01-20 18:55:50 +03:00
if schemePort:
scheme = schemePort.group(1)
2011-04-30 17:20:05 +04:00
port = schemePort.group(2)
2011-10-27 21:31:34 +04:00
if not re.search ("^[\n]*(GET|POST).*?\sHTTP\/", request, re.I | re.M):
continue
2011-10-27 21:31:34 +04:00
if re.search("^[\n]*(GET|POST).*?\.(gif|jpg|png)\sHTTP\/", request, re.I | re.M):
2011-01-20 18:55:50 +03:00
continue
getPostReq = False
2011-04-30 17:20:05 +04:00
url = None
host = None
method = None
data = None
cookie = None
params = False
lines = request.split("\n")
2011-01-20 18:55:50 +03:00
for line in lines:
if len(line) == 0 or line == "\n":
if method == HTTPMETHOD.POST and data is None:
data = ""
params = True
elif (line.startswith("GET ") or line.startswith("POST ")) and " HTTP/" in line:
2011-01-20 18:55:50 +03:00
if line.startswith("GET "):
index = 4
else:
index = 5
2011-01-20 18:55:50 +03:00
url = line[index:line.index(" HTTP/")]
method = line[:index-1]
if "?" in line and "=" in line:
params = True
getPostReq = True
# POST parameters
elif data is not None and params:
data += line
# GET parameters
elif "?" in line and "=" in line and ": " not in line:
params = True
2011-01-20 18:55:50 +03:00
# Headers
elif ": " in line:
key, value = line.split(": ", 1)
2011-01-20 18:55:50 +03:00
# Cookie and Host headers
if key.lower() == "cookie":
cookie = value
elif key.lower() == "host":
if '://' in value:
scheme, value = value.split('://')[:2]
2011-01-20 18:55:50 +03:00
splitValue = value.split(":")
host = splitValue[0]
2011-01-20 18:55:50 +03:00
if len(splitValue) > 1:
port = filterStringValue(splitValue[1], '[0-9]')
2011-01-20 18:55:50 +03:00
# Avoid to add a static content length header to
# conf.httpHeaders and consider the following lines as
# POSTed data
2011-11-29 23:17:07 +04:00
if key == HTTPHEADER.CONTENT_LENGTH:
2011-01-20 18:55:50 +03:00
params = True
2011-01-20 18:55:50 +03:00
# Avoid proxy and connection type related headers
2011-11-29 23:17:07 +04:00
elif key not in ( HTTPHEADER.PROXY_CONNECTION, HTTPHEADER.CONNECTION ):
2011-01-20 18:55:50 +03:00
conf.httpHeaders.append((str(key), str(value)))
2011-01-20 18:55:50 +03:00
if conf.scope:
getPostReq &= re.search(conf.scope, host) is not None
if getPostReq and (params or cookie):
2011-04-14 14:14:46 +04:00
if not port and isinstance(scheme, basestring) and scheme.lower() == "https":
port = "443"
2011-04-14 14:14:46 +04:00
elif not scheme and port == "443":
scheme = "https"
2011-01-20 18:55:50 +03:00
if not url.startswith("http"):
2011-04-30 17:20:05 +04:00
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
2011-01-20 18:55:50 +03:00
scheme = None
2011-04-30 17:20:05 +04:00
port = None
2011-01-20 18:55:50 +03:00
if not kb.targetUrls or url not in addedTargetUrls:
2011-11-22 01:31:08 +04:00
kb.targetUrls.add((url, method, urldecode(data) if data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in data else data, cookie))
2011-01-20 18:55:50 +03:00
addedTargetUrls.add(url)
2010-03-05 17:06:03 +03:00
2011-01-20 18:55:50 +03:00
fp = openFile(reqFile, "rb")
2010-03-05 17:06:03 +03:00
2011-01-20 19:07:08 +03:00
content = fp.read()
content = content.replace("\r", "")
2011-01-20 18:55:50 +03:00
if conf.scope:
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
2011-01-20 19:07:08 +03:00
__parseBurpLog(content)
__parseWebScarabLog(content)
def __loadQueries():
"""
Loads queries from 'xml/queries.xml' file.
"""
2010-10-22 18:23:14 +04:00
2011-06-22 18:33:52 +04:00
def iterate(node, retVal=None):
class DictObject(object):
def __init__(self):
self.__dict__ = {}
def __contains__(self, name):
return name in self.__dict__
if retVal is None:
retVal = DictObject()
2011-06-22 19:36:59 +04:00
for child in node.findall("*"):
2011-06-22 18:33:52 +04:00
instance = DictObject()
retVal.__dict__[child.tag] = instance
if child.attrib:
instance.__dict__.update(child.attrib)
2011-06-22 18:33:52 +04:00
else:
iterate(child, instance)
2011-06-22 18:33:52 +04:00
return retVal
tree = ElementTree()
tree.parse(paths.QUERIES_XML)
2011-06-22 19:36:59 +04:00
for node in tree.findall("*"):
2011-06-22 18:33:52 +04:00
queries[node.attrib['value']] = iterate(node)
def __setMultipleTargets():
"""
Define a configuration parameter if we are running in multiple target
mode.
"""
initialTargetsCount = len(kb.targetUrls)
addedTargetUrls = set()
if not conf.logFile:
return
debugMsg = "parsing targets list from '%s'" % conf.logFile
logger.debug(debugMsg)
if not os.path.exists(conf.logFile):
errMsg = "the specified list of targets does not exist"
raise sqlmapFilePathException, errMsg
if os.path.isfile(conf.logFile):
__feedTargetsDict(conf.logFile, addedTargetUrls)
elif os.path.isdir(conf.logFile):
files = os.listdir(conf.logFile)
files.sort()
for reqFile in files:
if not re.search("([\d]+)\-request", reqFile):
continue
__feedTargetsDict(os.path.join(conf.logFile, reqFile), addedTargetUrls)
else:
2011-04-30 17:20:05 +04:00
errMsg = "the specified list of targets is not a file "
errMsg += "nor a directory"
raise sqlmapFilePathException, errMsg
updatedTargetsCount = len(kb.targetUrls)
if updatedTargetsCount > initialTargetsCount:
2011-04-30 17:20:05 +04:00
infoMsg = "sqlmap parsed %d " % (updatedTargetsCount - initialTargetsCount)
infoMsg += "testable requests from the targets list"
logger.info(infoMsg)
def __setRequestFromFile():
"""
This function checks if the way to make a HTTP request is through supplied
textual file, parses it and saves the information into the knowledge base.
"""
if not conf.requestFile:
return
addedTargetUrls = set()
conf.requestFile = os.path.expanduser(conf.requestFile)
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
logger.info(infoMsg)
if not os.path.isfile(conf.requestFile):
2011-04-30 17:20:05 +04:00
errMsg = "the specified HTTP request file "
errMsg += "does not exist"
raise sqlmapFilePathException, errMsg
__feedTargetsDict(conf.requestFile, addedTargetUrls)
def __setCrawler():
2011-06-24 09:40:03 +04:00
if not conf.crawlDepth:
return
crawler = Crawler()
2011-06-24 09:40:03 +04:00
crawler.getTargetUrls()
def __setGoogleDorking():
"""
This function checks if the way to request testable hosts is through
Google dorking then requests to Google the search parameter, parses
the results and save the testable hosts into the knowledge base.
"""
if not conf.googleDork:
return
global keepAliveHandler
global proxyHandler
debugMsg = "initializing Google dorking requests"
logger.debug(debugMsg)
2011-04-30 19:29:59 +04:00
infoMsg = "first request to Google to get the session cookie"
logger.info(infoMsg)
handlers = [ proxyHandler ]
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
if conf.keepAlive:
if conf.proxy:
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
warnMsg += "been disabled because of it's incompatibility "
warnMsg += "with HTTP(s) proxy"
logger.warn(warnMsg)
else:
handlers.append(keepAliveHandler)
googleObj = Google(handlers)
googleObj.getCookie()
def search():
matches = googleObj.search(conf.googleDork)
if not matches:
errMsg = "unable to find results for your "
errMsg += "Google dork expression"
raise sqlmapGenericException, errMsg
googleObj.getTargetUrls()
return matches
while True:
matches = search()
2011-06-08 20:08:20 +04:00
if kb.targetUrls:
infoMsg = "sqlmap got %d results for your " % len(matches)
infoMsg += "Google dork expression, "
if len(matches) == len(kb.targetUrls):
infoMsg += "all "
else:
infoMsg += "%d " % len(kb.targetUrls)
infoMsg += "of them are testable targets"
logger.info(infoMsg)
break
else:
message = "sqlmap got %d results " % len(matches)
message += "for your Google dork expression, but none of them "
message += "have GET parameters to test for SQL injection. "
2011-06-08 20:08:20 +04:00
message += "Do you want to skip to the next result page? [Y/n]"
test = readInput(message, default="Y")
if test[0] in ("n", "N"):
raise sqlmapSilentQuitException
else:
conf.googlePage += 1
def __setBulkMultipleTargets():
if not conf.bulkFile:
return
conf.bulkFile = os.path.expanduser(conf.bulkFile)
infoMsg = "parsing multiple targets list from '%s'" % conf.bulkFile
logger.info(infoMsg)
if not os.path.isfile(conf.bulkFile):
errMsg = "the specified bulk file "
errMsg += "does not exist"
raise sqlmapFilePathException, errMsg
f = open(conf.bulkFile, 'r')
2011-06-08 20:08:20 +04:00
for line in f.xreadlines():
if re.search(r"[^ ]+\?(.+)", line, re.I):
2011-06-09 12:14:54 +04:00
kb.targetUrls.add((line.strip(), None, None, None))
2011-06-08 20:08:20 +04:00
f.close()
def __findPageForms():
if not conf.forms or conf.crawlDepth:
2010-11-15 15:07:13 +03:00
return
if not checkConnection():
return
infoMsg = "searching for forms"
logger.info(infoMsg)
page, _ = Request.queryPage(content=True)
findPageForms(page, conf.url, True, True)
def __setMetasploit():
if not conf.osPwn and not conf.osSmb and not conf.osBof:
return
debugMsg = "setting the takeover out-of-band functionality"
logger.debug(debugMsg)
msfEnvPathExists = False
if IS_WIN:
2011-04-30 17:20:05 +04:00
warnMsg = "some sqlmap takeover functionalities are not yet "
2010-10-28 00:39:50 +04:00
warnMsg += "supported on Windows. Please use Linux in a virtual "
warnMsg += "machine for out-of-band features."
logger.critical(warnMsg)
raise sqlmapSilentQuitException
if conf.osSmb:
2010-10-28 00:39:50 +04:00
isAdmin = runningAsAdmin()
if isAdmin is not True:
2011-04-30 17:20:05 +04:00
errMsg = "you need to run sqlmap as an administrator "
2010-10-28 00:39:50 +04:00
errMsg += "if you want to perform a SMB relay attack because "
errMsg += "it will need to listen on a user-specified SMB "
errMsg += "TCP port for incoming connection attempts"
raise sqlmapMissingPrivileges, errMsg
if conf.msfPath:
condition = False
for path in [conf.msfPath, os.path.join(conf.msfPath, 'bin')]:
2011-04-30 17:20:05 +04:00
condition = os.path.exists(normalizePath(path))
condition &= os.path.exists(normalizePath(os.path.join(path, "msfcli")))
condition &= os.path.exists(normalizePath(os.path.join(path, "msfconsole")))
condition &= os.path.exists(normalizePath(os.path.join(path, "msfencode")))
condition &= os.path.exists(normalizePath(os.path.join(path, "msfpayload")))
if condition:
conf.msfPath = path
break
if condition:
debugMsg = "provided Metasploit Framework path "
debugMsg += "'%s' is valid" % conf.msfPath
logger.debug(debugMsg)
msfEnvPathExists = True
else:
warnMsg = "the provided Metasploit Framework path "
warnMsg += "'%s' is not valid. The cause could " % conf.msfPath
warnMsg += "be that the path does not exists or that one "
warnMsg += "or more of the needed Metasploit executables "
warnMsg += "within msfcli, msfconsole, msfencode and "
warnMsg += "msfpayload do not exist"
logger.warn(warnMsg)
else:
2011-04-30 17:20:05 +04:00
warnMsg = "you did not provide the local path where Metasploit "
warnMsg += "Framework is installed"
logger.warn(warnMsg)
if not msfEnvPathExists:
warnMsg = "sqlmap is going to look for Metasploit Framework "
warnMsg += "installation into the environment paths"
logger.warn(warnMsg)
envPaths = os.environ["PATH"]
if IS_WIN:
envPaths = envPaths.split(";")
else:
envPaths = envPaths.split(":")
for envPath in envPaths:
2011-04-30 17:20:05 +04:00
envPath = envPath.replace(";", "")
condition = os.path.exists(normalizePath(envPath))
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfcli")))
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfconsole")))
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfencode")))
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfpayload")))
if condition:
infoMsg = "Metasploit Framework has been found "
infoMsg += "installed in the '%s' path" % envPath
logger.info(infoMsg)
msfEnvPathExists = True
2011-04-30 17:20:05 +04:00
conf.msfPath = envPath
break
if not msfEnvPathExists:
errMsg = "unable to locate Metasploit Framework installation. "
errMsg += "Get it from http://metasploit.com/framework/download/"
raise sqlmapFilePathException, errMsg
def __setWriteFile():
if not conf.wFile:
return
debugMsg = "setting the write file functionality"
logger.debug(debugMsg)
if not os.path.exists(conf.wFile):
errMsg = "the provided local file '%s' does not exist" % conf.wFile
raise sqlmapFilePathException, errMsg
if not conf.dFile:
2011-04-30 17:20:05 +04:00
errMsg = "you did not provide the back-end DBMS absolute path "
errMsg += "where you want to write the local file '%s'" % conf.wFile
raise sqlmapMissingMandatoryOptionException, errMsg
conf.wFileType = getFileType(conf.wFile)
def __setOS():
"""
Force the back-end DBMS operating system option.
"""
if not conf.os:
return
if conf.os.lower() not in SUPPORTED_OS:
errMsg = "you provided an unsupported back-end DBMS operating "
errMsg += "system. The supported DBMS operating systems for OS "
errMsg += "and file system access are %s. " % ', '.join([o.capitalize() for o in SUPPORTED_OS])
errMsg += "If you do not know the back-end DBMS underlying OS, "
errMsg += "do not provide it and sqlmap will fingerprint it for "
errMsg += "you."
raise sqlmapUnsupportedDBMSException, errMsg
debugMsg = "forcing back-end DBMS operating system to user defined "
debugMsg += "value '%s'" % conf.os
logger.debug(debugMsg)
Backend.setOs(conf.os)
def __setTechnique():
2011-04-07 14:27:22 +04:00
validTechniques = sorted(getPublicTypeMembers(PAYLOAD.TECHNIQUE), key=lambda x: x[1])
validLetters = map(lambda x: x[0][0].upper(), validTechniques)
2011-04-07 14:00:47 +04:00
if conf.tech and isinstance(conf.tech, basestring):
2011-12-02 18:11:43 +04:00
_ = []
2011-04-07 14:07:52 +04:00
2011-04-07 14:37:48 +04:00
for letter in conf.tech.upper():
if letter not in validLetters:
errMsg = "value for --technique must be a string composed "
2011-04-07 14:40:58 +04:00
errMsg += "by the letters %s. Refer to the " % ", ".join(validLetters)
2011-04-07 14:37:48 +04:00
errMsg += "user's manual for details"
raise sqlmapSyntaxException, errMsg
for validTech, validInt in validTechniques:
if letter == validTech[0]:
2011-12-02 18:11:43 +04:00
_.append(validInt)
2011-04-07 14:37:48 +04:00
break
2011-04-07 14:07:52 +04:00
2011-12-02 18:11:43 +04:00
conf.tech = _
if len(conf.tech) > 0:
# TODO: consider MySQL/PHP/ASP/web backdoor case where stacked
# queries is technically not necessary
if any(map(lambda x: conf.__getitem__(x), ['rFile', 'wFile', \
'osCmd', 'osShell', 'osPwn', 'osSmb', 'osBof', 'regRead', \
'regAdd', 'regDel'])) and PAYLOAD.TECHNIQUE.STACKED not in conf.tech:
errMsg = "value for --technique must include stacked queries "
2011-04-07 14:00:47 +04:00
errMsg += "technique (S) when you want to access the file "
errMsg += "system, takeover the operating system or access "
errMsg += "Windows registry hives"
raise sqlmapSyntaxException, errMsg
def __setDBMS():
2008-10-15 19:38:22 +04:00
"""
Force the back-end DBMS option.
2008-10-15 19:38:22 +04:00
"""
if not conf.dbms:
return
debugMsg = "forcing back-end DBMS to user defined value"
logger.debug(debugMsg)
conf.dbms = conf.dbms.lower()
regex = re.search("%s ([\d\.]+)" % ("(%s)" % "|".join([alias for alias in SUPPORTED_DBMS])), conf.dbms, re.I)
2011-05-10 20:24:09 +04:00
if regex:
conf.dbms = regex.group(1)
Backend.setVersion(regex.group(2))
2008-10-15 19:38:22 +04:00
if conf.dbms not in SUPPORTED_DBMS:
errMsg = "you provided an unsupported back-end database management "
errMsg += "system. The supported DBMS are %s. " % ', '.join([d for d in DBMS_DICT])
errMsg += "If you do not know the back-end DBMS, do not provide "
errMsg += "it and sqlmap will fingerprint it for you."
2008-10-15 19:38:22 +04:00
raise sqlmapUnsupportedDBMSException, errMsg
for aliases in (MSSQL_ALIASES, MYSQL_ALIASES, PGSQL_ALIASES, ORACLE_ALIASES, \
SQLITE_ALIASES, ACCESS_ALIASES, FIREBIRD_ALIASES, \
MAXDB_ALIASES, SYBASE_ALIASES, DB2_ALIASES):
if conf.dbms in aliases:
conf.dbms = aliases[0]
break
2010-10-13 02:45:25 +04:00
def __setTamperingFunctions():
"""
Loads tampering functions from given script(s)
2010-10-13 02:45:25 +04:00
"""
2010-10-15 14:36:29 +04:00
if conf.tamper:
last_priority = PRIORITY.HIGHEST
check_priority = True
resolve_priorities = False
priorities = []
2011-08-29 17:08:25 +04:00
for tfile in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper):
found = False
2010-10-15 14:36:29 +04:00
2010-10-17 01:55:34 +04:00
tfile = tfile.strip()
if not tfile:
2010-10-13 02:45:25 +04:00
continue
2010-10-15 14:36:29 +04:00
elif os.path.exists(os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)):
tfile = os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)
elif not os.path.exists(tfile):
errMsg = "tamper script '%s' does not exist" % tfile
2010-10-13 02:45:25 +04:00
raise sqlmapFilePathException, errMsg
2010-10-15 14:36:29 +04:00
elif not tfile.endswith('.py'):
errMsg = "tamper script '%s' should have an extension '.py'" % tfile
2010-10-13 02:45:25 +04:00
raise sqlmapSyntaxException, errMsg
2010-10-14 10:00:10 +04:00
dirname, filename = os.path.split(tfile)
2010-10-13 02:45:25 +04:00
dirname = os.path.abspath(dirname)
2010-10-14 10:00:10 +04:00
2011-04-30 17:20:05 +04:00
infoMsg = "loading tamper script '%s'" % filename[:-3]
2010-10-14 10:00:10 +04:00
logger.info(infoMsg)
2010-10-13 02:45:25 +04:00
if not os.path.exists(os.path.join(dirname, '__init__.py')):
2011-04-30 17:20:05 +04:00
errMsg = "make sure that there is an empty file '__init__.py' "
errMsg += "inside of tamper scripts directory '%s'" % dirname
2010-10-13 02:45:25 +04:00
raise sqlmapGenericException, errMsg
2010-10-14 10:00:10 +04:00
2010-10-13 02:45:25 +04:00
if dirname not in sys.path:
sys.path.insert(0, dirname)
2010-10-15 14:36:29 +04:00
2010-10-13 02:45:25 +04:00
try:
module = __import__(filename[:-3])
except ImportError, msg:
raise sqlmapSyntaxException, "can not import tamper script '%s' (%s)" % (filename[:-3], msg)
2010-10-15 14:36:29 +04:00
priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__
2010-10-13 02:45:25 +04:00
for name, function in inspect.getmembers(module, inspect.isfunction):
if name == "tamper" and function.func_code.co_argcount == 1:
2010-10-13 02:45:25 +04:00
found = True
kb.tamperFunctions.append(function)
if check_priority and priority > last_priority:
2011-04-30 17:20:05 +04:00
message = "it seems that you might have mixed "
message += "the order of tamper scripts.\n"
message += "Do you want to auto resolve this? [Y/n/q] "
test = readInput(message, default="Y")
if not test or test[0] in ("y", "Y"):
resolve_priorities = True
elif test[0] in ("n", "N"):
resolve_priorities = False
elif test[0] in ("q", "Q"):
raise sqlmapUserQuitException
check_priority = False
priorities.append((priority, function))
last_priority = priority
2010-10-15 14:36:29 +04:00
break
elif name == "dependencies":
function()
2010-10-15 14:36:29 +04:00
2010-10-13 02:45:25 +04:00
if not found:
raise sqlmapGenericException, "missing function 'tamper(value)' in tamper script '%s'" % tfile
2010-10-13 02:45:25 +04:00
if resolve_priorities and priorities:
priorities.sort(reverse=True)
kb.tamperFunctions = []
for _, function in priorities:
kb.tamperFunctions.append(function)
2008-10-15 19:38:22 +04:00
def __setThreads():
if not isinstance(conf.threads, int) or conf.threads <= 0:
2008-10-15 19:38:22 +04:00
conf.threads = 1
2011-04-08 01:39:18 +04:00
def __setDNSCache():
"""
Makes a cached version of socket._getaddrinfo to avoid subsequent DNS requests.
"""
def _getaddrinfo(*args, **kwargs):
2011-06-24 22:19:33 +04:00
if args in kb.cache:
2011-04-08 01:39:18 +04:00
return kb.cache[args]
2011-06-17 19:23:58 +04:00
2011-04-08 01:39:18 +04:00
else:
kb.cache[args] = socket._getaddrinfo(*args, **kwargs)
return kb.cache[args]
2011-04-11 00:53:27 +04:00
if not hasattr(socket, '_getaddrinfo'):
2011-04-08 01:39:18 +04:00
socket._getaddrinfo = socket.getaddrinfo
socket.getaddrinfo = _getaddrinfo
2008-10-15 19:38:22 +04:00
def __setHTTPProxy():
"""
Check and set the HTTP proxy to pass by all HTTP requests.
"""
global proxyHandler
if not conf.proxy:
2010-02-26 13:01:23 +03:00
if conf.hostname in ('localhost', '127.0.0.1') or conf.ignoreProxy:
proxyHandler = urllib2.ProxyHandler({})
2008-10-15 19:38:22 +04:00
return
debugMsg = "setting the HTTP proxy to pass by all HTTP requests"
logger.debug(debugMsg)
2011-04-30 17:20:05 +04:00
__proxySplit = urlparse.urlsplit(conf.proxy)
2008-10-15 19:38:22 +04:00
__hostnamePort = __proxySplit[1].split(":")
2011-04-30 17:20:05 +04:00
__scheme = __proxySplit[0]
__hostname = __hostnamePort[0]
__port = None
__proxyString = ""
2008-10-15 19:38:22 +04:00
if len(__hostnamePort) == 2:
try:
__port = int(__hostnamePort[1])
except:
pass #drops into the next check block
2008-10-15 19:38:22 +04:00
if not __scheme or not __hostname or not __port:
errMsg = "proxy value must be in format 'http://url:port'"
raise sqlmapSyntaxException, errMsg
if conf.pCred:
pCredRegExp = re.search("^(.*?):(.*?)$", conf.pCred)
if not pCredRegExp:
2011-04-30 17:20:05 +04:00
errMsg = "Proxy authentication credentials "
errMsg += "value must be in format username:password"
raise sqlmapSyntaxException, errMsg
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
2010-10-15 14:28:06 +04:00
__proxyString = "%s@" % conf.pCred
__proxyString += "%s:%d" % (__hostname, __port)
2008-10-15 19:38:22 +04:00
# Workaround for http://bugs.python.org/issue1424152 (urllib/urllib2:
# HTTPS over (Squid) Proxy fails) as long as HTTP over SSL requests
# can't be tunneled over an HTTP proxy natively by Python (<= 2.5)
# urllib2 standard library
if PYVERSION >= "2.6":
proxyHandler = urllib2.ProxyHandler({"http": __proxyString, "https": __proxyString})
elif conf.scheme == "https":
proxyHandler = ProxyHTTPSHandler(__proxyString)
2008-10-15 19:38:22 +04:00
else:
proxyHandler = urllib2.ProxyHandler({"http": __proxyString})
def __setSafeUrl():
"""
Check and set the safe URL options.
"""
if not conf.safUrl:
return
if not re.search("^http[s]*://", conf.safUrl):
if ":443/" in conf.safUrl:
conf.safUrl = "https://" + conf.safUrl
else:
conf.safUrl = "http://" + conf.safUrl
if conf.saFreq <= 0:
errMsg = "please provide a valid value (>0) for safe frequency (--safe-freq) while using safe url feature"
raise sqlmapSyntaxException, errMsg
def __setPrefixSuffix():
if conf.prefix is not None and conf.suffix is None:
errMsg = "you specified the payload prefix, but did not provide "
errMsg += "the payload suffix"
raise sqlmapSyntaxException, errMsg
elif conf.prefix is None and conf.suffix is not None:
errMsg = "you specified the payload suffix, but did not provide "
errMsg += "the payload prefix"
raise sqlmapSyntaxException, errMsg
if conf.prefix is not None and conf.suffix is not None:
# Create a custom boundary object for user's supplied prefix
# and suffix
2011-07-08 10:02:31 +04:00
boundary = AttribDict()
boundary.level = 1
boundary.clause = [ 0 ]
boundary.where = [ 1, 2, 3 ]
boundary.prefix = conf.prefix
boundary.suffix = conf.suffix
if " like" in boundary.suffix.lower():
if "'" in boundary.suffix.lower():
boundary.ptype = 3
elif '"' in boundary.suffix.lower():
boundary.ptype = 5
elif "'" in boundary.suffix:
boundary.ptype = 2
elif '"' in boundary.suffix:
boundary.ptype = 4
else:
boundary.ptype = 1
# user who provides --prefix/--suffix does not want other boundaries
# to be tested for
conf.boundaries = [ boundary ]
2008-10-15 19:38:22 +04:00
def __setHTTPAuthentication():
"""
2010-01-07 15:59:09 +03:00
Check and set the HTTP(s) authentication method (Basic, Digest, NTLM or Certificate),
username and password for first three methods, or key file and certification file for
certificate authentication
2008-10-15 19:38:22 +04:00
"""
global authHandler
2010-01-07 15:59:09 +03:00
if not conf.aType and not conf.aCred and not conf.aCert:
2008-10-15 19:38:22 +04:00
return
elif conf.aType and not conf.aCred:
2011-04-30 17:20:05 +04:00
errMsg = "you specified the HTTP authentication type, but "
2008-10-15 19:38:22 +04:00
errMsg += "did not provide the credentials"
raise sqlmapSyntaxException, errMsg
elif not conf.aType and conf.aCred:
2011-04-30 17:20:05 +04:00
errMsg = "you specified the HTTP authentication credentials, "
2008-10-15 19:38:22 +04:00
errMsg += "but did not provide the type"
raise sqlmapSyntaxException, errMsg
2010-01-07 15:59:09 +03:00
if not conf.aCert:
debugMsg = "setting the HTTP authentication type and credentials"
logger.debug(debugMsg)
2010-01-07 15:59:09 +03:00
aTypeLower = conf.aType.lower()
2010-01-07 15:59:09 +03:00
if aTypeLower not in ( "basic", "digest", "ntlm" ):
2011-04-30 17:20:05 +04:00
errMsg = "HTTP authentication type value must be "
2010-01-07 15:59:09 +03:00
errMsg += "Basic, Digest or NTLM"
raise sqlmapSyntaxException, errMsg
elif aTypeLower in ( "basic", "digest" ):
regExp = "^(.*?):(.*?)$"
2011-04-30 17:20:05 +04:00
errMsg = "HTTP %s authentication credentials " % aTypeLower
errMsg += "value must be in format username:password"
elif aTypeLower == "ntlm":
regExp = "^(.*\\\\.*):(.*?)$"
2011-04-30 17:20:05 +04:00
errMsg = "HTTP NTLM authentication credentials value must "
errMsg += "be in format DOMAIN\username:password"
aCredRegExp = re.search(regExp, conf.aCred)
2010-01-07 15:59:09 +03:00
if not aCredRegExp:
raise sqlmapSyntaxException, errMsg
2010-01-07 15:59:09 +03:00
authUsername = aCredRegExp.group(1)
authPassword = aCredRegExp.group(2)
2010-01-07 15:59:09 +03:00
passwordMgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
passwordMgr.add_password(None, "%s://%s" % (conf.scheme, conf.hostname), authUsername, authPassword)
2010-01-07 15:59:09 +03:00
if aTypeLower == "basic":
authHandler = SmartHTTPBasicAuthHandler(passwordMgr)
2010-01-07 15:59:09 +03:00
elif aTypeLower == "digest":
authHandler = urllib2.HTTPDigestAuthHandler(passwordMgr)
2010-01-07 15:59:09 +03:00
elif aTypeLower == "ntlm":
try:
from ntlm import HTTPNtlmAuthHandler
except ImportError, _:
2011-04-30 17:20:05 +04:00
errMsg = "sqlmap requires Python NTLM third-party library "
2010-01-07 15:59:09 +03:00
errMsg += "in order to authenticate via NTLM, "
errMsg += "http://code.google.com/p/python-ntlm/"
raise sqlmapMissingDependence, errMsg
2010-01-07 15:59:09 +03:00
authHandler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(passwordMgr)
else:
debugMsg = "setting the HTTP(s) authentication certificate"
logger.debug(debugMsg)
2010-01-07 15:59:09 +03:00
aCertRegExp = re.search("^(.+?),\s*(.+?)$", conf.aCert)
2010-01-07 15:59:09 +03:00
if not aCertRegExp:
2011-04-30 17:20:05 +04:00
errMsg = "HTTP authentication certificate option "
2010-01-07 15:59:09 +03:00
errMsg += "must be in format key_file,cert_file"
raise sqlmapSyntaxException, errMsg
# os.path.expanduser for support of paths with ~
2010-01-07 15:59:09 +03:00
key_file = os.path.expanduser(aCertRegExp.group(1))
cert_file = os.path.expanduser(aCertRegExp.group(2))
2010-03-21 03:39:44 +03:00
for ifile in (key_file, cert_file):
if not os.path.exists(ifile):
2011-04-30 17:20:05 +04:00
errMsg = "File '%s' does not exist" % ifile
2010-01-07 15:59:09 +03:00
raise sqlmapSyntaxException, errMsg
2010-01-07 15:59:09 +03:00
authHandler = HTTPSCertAuthHandler(key_file, cert_file)
2009-12-03 01:54:39 +03:00
2008-10-15 19:38:22 +04:00
def __setHTTPMethod():
"""
Check and set the HTTP method to perform HTTP requests through.
"""
if conf.data:
conf.method = HTTPMETHOD.POST
2008-10-15 19:38:22 +04:00
else:
2010-11-08 12:44:32 +03:00
conf.method = HTTPMETHOD.GET
2008-10-15 19:38:22 +04:00
debugMsg = "setting the HTTP method to %s" % conf.method
logger.debug(debugMsg)
def __setHTTPExtraHeaders():
if conf.headers:
debugMsg = "setting extra HTTP headers"
logger.debug(debugMsg)
conf.headers = conf.headers.split("\n")
for headerValue in conf.headers:
header, value = headerValue.split(": ")
if header and value:
conf.httpHeaders.append((header, value))
2011-04-17 12:33:46 +04:00
elif not conf.httpHeaders or len(conf.httpHeaders) == 1:
2011-11-29 23:17:07 +04:00
conf.httpHeaders.append((HTTPHEADER.ACCEPT_LANGUAGE, "en-us,en;q=0.5"))
2011-10-26 02:06:47 +04:00
if not conf.charset:
2011-11-29 23:17:07 +04:00
conf.httpHeaders.append((HTTPHEADER.ACCEPT_CHARSET, "ISO-8859-15,utf-8;q=0.7,*;q=0.7"))
2011-10-26 02:06:47 +04:00
else:
2011-11-29 23:17:07 +04:00
conf.httpHeaders.append((HTTPHEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset))
2011-04-17 12:48:13 +04:00
# Invalidating any caching mechanism in between
2011-04-17 12:33:46 +04:00
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
2011-11-29 23:17:07 +04:00
conf.httpHeaders.append((HTTPHEADER.CACHE_CONTROL, "no-cache,no-store"))
conf.httpHeaders.append((HTTPHEADER.PRAGMA, "no-cache"))
2011-04-17 12:33:46 +04:00
2008-10-15 19:38:22 +04:00
def __defaultHTTPUserAgent():
"""
@return: default sqlmap HTTP User-Agent header
@rtype: C{str}
"""
return "%s (%s)" % (VERSION_STRING, SITE)
# Firefox 3 running on Ubuntu 9.04 updated at April 2009
#return "Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.0.9) Gecko/2009042113 Ubuntu/9.04 (jaunty) Firefox/3.0.9"
2009-04-25 00:13:21 +04:00
# Internet Explorer 7.0 running on Windows 2003 Service Pack 2 english
# updated at March 2009
#return "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)"
2008-10-15 19:38:22 +04:00
def __setHTTPUserAgent():
"""
Set the HTTP User-Agent header.
Depending on the user options it can be:
* The default sqlmap string
* A default value read as user option
* A random value read from a list of User-Agent headers from a
file choosed as user option
"""
2010-11-07 11:13:20 +03:00
2011-04-29 23:27:23 +04:00
if conf.mobile:
message = "which smartphone do you want sqlmap to imitate "
message += "through HTTP User-Agent header?\n"
2011-04-29 23:27:23 +04:00
items = sorted(getPublicTypeMembers(MOBILES, True))
for count in xrange(len(items)):
item = items[count]
message += "[%d] %s%s\n" % (count + 1, item[:item.find(';')], " (default)" if item == MOBILES.IPHONE else "")
2011-04-29 23:27:23 +04:00
test = readInput(message.rstrip('\n'), default=items.index(MOBILES.IPHONE) + 1)
2011-04-29 23:27:23 +04:00
try:
item = items[int(test) - 1]
except:
item = MOBILES.IPHONE
item = item[item.find(';') + 1:]
conf.httpHeaders.append(("User-Agent", item))
elif conf.agent:
2008-10-15 19:38:22 +04:00
debugMsg = "setting the HTTP User-Agent header"
logger.debug(debugMsg)
2010-11-08 16:26:45 +03:00
conf.httpHeaders.append(("User-Agent", conf.agent))
2008-10-15 19:38:22 +04:00
2011-04-29 23:27:23 +04:00
elif not conf.randomAgent:
addDefaultUserAgent = True
for header, _ in conf.httpHeaders:
2010-11-08 16:26:45 +03:00
if header == "User-Agent":
addDefaultUserAgent = False
break
if addDefaultUserAgent:
2010-11-08 16:26:45 +03:00
conf.httpHeaders.append(("User-Agent", __defaultHTTPUserAgent()))
else:
2011-04-30 11:01:21 +04:00
if not kb.userAgents:
2011-04-30 17:20:05 +04:00
debugMsg = "loading random HTTP User-Agent header(s) from "
2011-04-30 11:01:21 +04:00
debugMsg += "file '%s'" % paths.USER_AGENTS
logger.debug(debugMsg)
2008-10-15 19:38:22 +04:00
2011-04-30 11:01:21 +04:00
try:
kb.userAgents = getFileItems(paths.USER_AGENTS)
except IOError:
2011-04-30 17:20:05 +04:00
warnMsg = "unable to read HTTP User-Agent header "
2011-04-30 11:01:21 +04:00
warnMsg += "file '%s'" % paths.USER_AGENTS
logger.warn(warnMsg)
conf.httpHeaders.append((HTTPHEADER.USER_AGENT, __defaultHTTPUserAgent()))
return
2008-10-15 19:38:22 +04:00
2011-04-29 23:27:23 +04:00
count = len(kb.userAgents)
2008-10-15 19:38:22 +04:00
2011-04-29 23:27:23 +04:00
if count == 1:
userAgent = kb.userAgents[0]
else:
userAgent = kb.userAgents[randomRange(stop=count-1)]
2008-10-15 19:38:22 +04:00
2011-04-29 23:27:23 +04:00
userAgent = sanitizeStr(userAgent)
conf.httpHeaders.append((HTTPHEADER.USER_AGENT, userAgent))
2011-04-30 19:29:59 +04:00
infoMsg = "fetched random HTTP User-Agent header from "
infoMsg += "file '%s': %s" % (paths.USER_AGENTS, userAgent)
logger.info(infoMsg)
2008-10-15 19:38:22 +04:00
def __setHTTPReferer():
"""
Set the HTTP Referer
"""
if conf.referer:
debugMsg = "setting the HTTP Referer header"
logger.debug(debugMsg)
2011-03-18 16:46:51 +03:00
conf.httpHeaders.append((HTTPHEADER.REFERER, conf.referer))
2008-10-15 19:38:22 +04:00
def __setHTTPCookies():
"""
Set the HTTP Cookie header
"""
if conf.cookie:
debugMsg = "setting the HTTP Cookie header"
logger.debug(debugMsg)
2011-03-18 16:46:51 +03:00
conf.httpHeaders.append((HTTPHEADER.COOKIE, conf.cookie))
2008-10-15 19:38:22 +04:00
def __setHTTPTimeout():
"""
Set the HTTP timeout
"""
if conf.timeout:
debugMsg = "setting the HTTP timeout"
logger.debug(debugMsg)
conf.timeout = float(conf.timeout)
if conf.timeout < 3.0:
2011-04-30 17:20:05 +04:00
warnMsg = "the minimum HTTP timeout is 3 seconds, sqlmap "
warnMsg += "will going to reset it"
logger.warn(warnMsg)
conf.timeout = 3.0
else:
conf.timeout = 30.0
socket.setdefaulttimeout(conf.timeout)
def __checkDependencies():
"""
Checks for missing dependencies.
"""
if conf.dependencies:
checkDependencies()
2008-10-15 19:38:22 +04:00
def __cleanupOptions():
"""
Cleanup configuration attributes.
"""
debugMsg = "cleaning up configuration parameters"
logger.debug(debugMsg)
2010-05-28 19:57:43 +04:00
width = getConsoleWidth()
if conf.eta:
conf.progressWidth = width-26
else:
conf.progressWidth = width-46
2008-10-15 19:38:22 +04:00
if conf.testParameter:
conf.testParameter = urldecode(conf.testParameter)
2008-10-15 19:38:22 +04:00
conf.testParameter = conf.testParameter.replace(" ", "")
2011-08-29 17:08:25 +04:00
conf.testParameter = re.split(PARAMETER_SPLITTING_REGEX, conf.testParameter)
2008-10-15 19:38:22 +04:00
else:
conf.testParameter = []
if conf.user:
conf.user = conf.user.replace(" ", "")
2011-08-29 17:08:25 +04:00
if conf.rParam:
conf.rParam = conf.rParam.replace(" ", "")
conf.rParam = re.split(PARAMETER_SPLITTING_REGEX, conf.rParam)
else:
conf.rParam = []
2011-08-29 17:29:42 +04:00
if conf.skip:
conf.skip = conf.skip.replace(" ", "")
conf.skip = re.split(PARAMETER_SPLITTING_REGEX, conf.skip)
else:
conf.skip = []
if conf.delay:
conf.delay = float(conf.delay)
if conf.rFile:
conf.rFile = ntToPosixSlashes(normalizePath(conf.rFile))
if conf.wFile:
conf.wFile = ntToPosixSlashes(normalizePath(conf.wFile))
if conf.dFile:
conf.dFile = ntToPosixSlashes(normalizePath(conf.dFile))
if conf.msfPath:
conf.msfPath = ntToPosixSlashes(normalizePath(conf.msfPath))
if conf.tmpPath:
conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath))
2011-06-24 09:40:03 +04:00
if conf.googleDork or conf.logFile or conf.bulkFile or conf.forms or conf.crawlDepth:
conf.multipleTargets = True
2010-10-12 23:41:29 +04:00
if conf.optimize:
setOptimize()
2010-10-12 23:41:29 +04:00
if conf.data:
if re.search(r'%[0-9a-f]{2}', conf.data, re.I):
original = conf.data
class _(unicode): pass
2011-11-22 01:31:08 +04:00
conf.data = _(urldecode(conf.data) if conf.data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data)
setattr(conf.data, UNENCODED_ORIGINAL_VALUE, original)
else:
2011-11-22 01:31:08 +04:00
conf.data = urldecode(conf.data) if conf.data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data
if conf.os:
conf.os = conf.os.capitalize()
if conf.dbms:
conf.dbms = conf.dbms.capitalize()
if conf.testFilter:
if not any([char in conf.testFilter for char in ('.', ')', '(', ']', '[')]):
conf.testFilter = conf.testFilter.replace('*', '.*')
if conf.timeSec not in kb.explicitSettings:
if conf.tor:
conf.timeSec = 2 * conf.timeSec
kb.adjustTimeDelay = False
2011-04-30 17:20:05 +04:00
warnMsg = "increasing default value for "
2011-04-20 00:23:08 +04:00
warnMsg += "--time-sec to %d because " % conf.timeSec
2011-04-21 14:08:38 +04:00
warnMsg += "--tor switch was provided"
logger.warn(warnMsg)
else:
kb.adjustTimeDelay = True
2011-04-18 18:46:18 +04:00
else:
kb.adjustTimeDelay = False
if conf.code:
conf.code = int(conf.code)
2011-11-30 23:26:03 +04:00
if conf.csvDel:
conf.csvDel = conf.csvDel.decode('string_escape') # e.g. '\\t' -> '\t'
2011-11-23 18:26:40 +04:00
threadData = getCurrentThreadData()
threadData.reset()
2008-10-15 19:38:22 +04:00
def __setConfAttributes():
"""
This function set some needed attributes into the configuration
singleton.
"""
debugMsg = "initializing the configuration"
logger.debug(debugMsg)
2011-04-30 17:20:05 +04:00
conf.boundaries = []
conf.cj = None
conf.dbmsConnector = None
conf.dbmsHandler = None
conf.dumpPath = None
conf.hashDB = None
conf.hashDBFile = None
2011-04-30 17:20:05 +04:00
conf.httpHeaders = []
conf.hostname = None
conf.multipleTargets = False
conf.outputPath = None
conf.paramDict = {}
conf.parameters = {}
conf.path = None
conf.port = None
conf.resultsFilename = None
conf.resultsFP = None
2011-04-30 17:20:05 +04:00
conf.scheme = None
conf.sessionFP = None
conf.start = True
conf.tests = []
conf.trafficFP = None
conf.wFileType = None
2008-10-15 19:38:22 +04:00
2010-12-18 13:02:01 +03:00
def __setKnowledgeBaseAttributes(flushAll=True):
2008-10-15 19:38:22 +04:00
"""
This function set some needed attributes into the knowledge base
singleton.
"""
debugMsg = "initializing the knowledge base"
logger.debug(debugMsg)
2011-04-30 17:20:05 +04:00
kb.absFilePaths = set()
kb.adjustTimeDelay = False
2011-05-24 03:20:03 +04:00
kb.alwaysRedirect = None
kb.alwaysRefresh = None
2011-04-30 17:20:05 +04:00
kb.arch = None
kb.authHeader = None
2011-07-08 10:02:31 +04:00
kb.bannerFp = AttribDict()
2011-07-08 10:02:31 +04:00
kb.brute = AttribDict({'tables':[], 'columns':[]})
2011-04-30 17:20:05 +04:00
kb.bruteMode = False
2011-07-08 10:02:31 +04:00
kb.cache = AttribDict()
2011-04-30 17:20:05 +04:00
kb.cache.content = {}
kb.cache.regex = {}
kb.cache.stdev = {}
2011-04-30 17:20:05 +04:00
kb.commonOutputs = None
2011-07-08 10:02:31 +04:00
kb.data = AttribDict()
2011-05-11 00:44:36 +04:00
kb.dataOutputFlag = False
# Active back-end DBMS fingerprint
2011-04-30 17:20:05 +04:00
kb.dbms = None
kb.dbmsVersion = [ UNKNOWN_DBMS_VERSION ]
kb.delayCandidates = TIME_DELAY_CANDIDATES * [0]
kb.dep = None
kb.docRoot = None
kb.dumpMode = False
2011-04-30 17:20:05 +04:00
kb.dynamicMarkings = []
kb.dynamicParameters = False
2011-04-30 17:20:05 +04:00
kb.endDetection = False
kb.explicitSettings = set()
2011-04-30 17:20:05 +04:00
kb.errorIsNone = True
2011-09-26 01:10:45 +04:00
kb.forcedDbms = None
2011-04-30 17:20:05 +04:00
kb.headersFp = {}
kb.heuristicTest = None
2011-04-30 17:20:05 +04:00
kb.hintValue = None
kb.htmlFp = []
2011-11-21 20:41:02 +04:00
kb.httpErrorCodes = {}
kb.inferenceMode = False
kb.ignoreTimeout = False
2011-07-08 10:02:31 +04:00
kb.injection = InjectionDict()
2011-04-30 17:20:05 +04:00
kb.injections = []
kb.lastParserStatus = None
2011-07-08 10:02:31 +04:00
kb.locks = AttribDict()
2011-04-30 17:20:05 +04:00
kb.locks.cacheLock = threading.Lock()
kb.locks.logLock = threading.Lock()
kb.locks.ioLock = threading.Lock()
kb.locks.countLock = threading.Lock()
2011-04-30 17:20:05 +04:00
kb.matchRatio = None
kb.multiThreadMode = False
2011-04-30 17:20:05 +04:00
kb.nullConnection = None
kb.pageTemplate = None
kb.pageTemplates = dict()
2011-09-09 15:36:09 +04:00
kb.previousMethod = None
kb.orderByColumns = None
2011-04-30 17:20:05 +04:00
kb.originalPage = None
kb.originalTimeDelay = None
# Back-end DBMS underlying operating system fingerprint via banner (-b)
# parsing
2011-04-30 17:20:05 +04:00
kb.os = None
kb.osVersion = None
kb.osSP = None
kb.pageEncoding = DEFAULT_PAGE_ENCODING
kb.pageStable = None
kb.partRun = None
2011-11-22 16:18:24 +04:00
kb.processResponseCounter = 0
2011-04-30 17:20:05 +04:00
kb.proxyAuthHeader = None
kb.queryCounter = 0
kb.redirectSetCookie = None
2011-05-30 13:46:32 +04:00
kb.reflectiveMechanism = True
kb.reflectiveCounters = {REFLECTIVE_COUNTER.MISS:0, REFLECTIVE_COUNTER.HIT:0}
2011-04-30 17:20:05 +04:00
kb.responseTimes = []
kb.resumedQueries = {}
kb.safeCharEncode = False
2011-04-30 17:20:05 +04:00
kb.singleLogFlags = set()
kb.skipOthersDbms = None
2011-09-26 01:10:45 +04:00
kb.stickyFlag = False
2011-04-30 17:20:05 +04:00
kb.suppressSession = False
kb.suppressResumeInfo = False
kb.technique = None
kb.testMode = False
kb.testQueryCount = 0
kb.threadContinue = True
kb.threadException = False
kb.uChar = "NULL"
kb.xpCmdshellAvailable = False
2011-09-26 01:10:45 +04:00
kb.chars = AttribDict()
kb.chars.delimiter = randomStr(length=6, lowercase=True)
kb.chars.start = ":%s:" % randomStr(length=3, lowercase=True)
kb.chars.stop = ":%s:" % randomStr(length=3, lowercase=True)
kb.chars.at = ":%s:" % randomStr(length=1, lowercase=True)
kb.chars.space = ":%s:" % randomStr(length=1, lowercase=True)
kb.chars.dollar = ":%s:" % randomStr(length=1, lowercase=True)
2010-12-18 13:02:01 +03:00
if flushAll:
2011-04-30 17:20:05 +04:00
kb.keywords = set(getFileItems(paths.SQL_KEYWORDS))
2011-11-06 15:18:16 +04:00
kb.scanOnlyGoogleGETs = None
2010-12-18 13:02:01 +03:00
kb.tamperFunctions = []
kb.targetUrls = oset()
2011-04-30 17:20:05 +04:00
kb.testedParams = set()
kb.userAgents = None
2011-05-24 21:15:25 +04:00
kb.vainRun = True
2011-04-30 17:20:05 +04:00
kb.wordlist = None
2010-12-18 13:02:01 +03:00
def __useWizardInterface():
"""
Presents simple wizard interface for beginner users
"""
if not conf.wizard:
return
logger.info("starting wizard interface")
while True:
while not conf.url:
message = "Please enter full target URL (-u): "
conf.url = readInput(message, default=None)
message = "POST data (--data) [Enter for None]: "
conf.data = readInput(message, default=None)
2011-05-25 12:14:39 +04:00
if filter(lambda x: '=' in str(x), [conf.url, conf.data]) or '*' in conf.url:
break
else:
2011-11-15 15:17:39 +04:00
warnMsg = "no GET and/or POST parameter(s) found for testing "
warnMsg += "(e.g. GET parameter 'id' in 'www.site.com/index.php?id=1')"
logger.critical(warnMsg)
2011-11-02 18:33:23 +04:00
if conf.crawlDepth or conf.forms:
break
else:
conf.url = conf.data = None
choice = None
while choice is None or choice not in ("", "1", "2", "3"):
message = "Injection difficulty (--level/--risk). Please choose:\n"
message += "[1] Normal (default)\n[2] Medium\n[3] Hard"
choice = readInput(message, default='1')
if choice == '2':
conf.risk = 2
conf.level = 3
elif choice == '3':
conf.risk = 3
conf.level = 5
else:
conf.risk = 1
conf.level = 1
choice = None
while choice is None or choice not in ("", "1", "2", "3"):
message = "Enumeration (--banner/--current-user/etc). Please choose:\n"
message += "[1] Basic (default)\n[2] Smart\n[3] All"
choice = readInput(message, default='1')
if choice == '2':
map(lambda x: conf.__setitem__(x, True), ['getBanner', 'getCurrentUser', 'getCurrentDb', 'isDba', 'getUsers', 'getDbs', 'getTables', 'getSchema', 'excludeSysDbs'])
elif choice == '3':
map(lambda x: conf.__setitem__(x, True), ['getBanner', 'getCurrentUser', 'getCurrentDb', 'isDba', 'getUsers', 'getPasswordHashes', 'getPrivileges', 'getRoles', 'dumpAll'])
else:
map(lambda x: conf.__setitem__(x, True), ['getBanner', 'getCurrentUser', 'getCurrentDb', 'isDba'])
logger.debug("muting sqlmap.. it will do the magic for you")
conf.verbose = 0
2011-11-02 18:33:23 +04:00
conf.batch = True
conf.threads = 4
dataToStdout("\nsqlmap is running, please wait..\n\n")
2008-10-15 19:38:22 +04:00
def __saveCmdline():
"""
Saves the command line options on a sqlmap configuration INI file
Format.
2008-10-15 19:38:22 +04:00
"""
if not conf.saveCmdline:
return
debugMsg = "saving command line options on a sqlmap configuration INI file"
logger.debug(debugMsg)
2010-05-28 19:57:43 +04:00
config = UnicodeRawConfigParser()
2008-10-15 19:38:22 +04:00
userOpts = {}
for family in optDict.keys():
userOpts[family] = []
for option, value in conf.items():
for family, optionData in optDict.items():
if option in optionData:
userOpts[family].append((option, value, optionData[option]))
for family, optionData in userOpts.items():
config.add_section(family)
2008-10-15 19:38:22 +04:00
optionData.sort()
for option, value, datatype in optionData:
2010-05-28 19:57:43 +04:00
if isinstance(datatype, (list, tuple, set)):
datatype = datatype[0]
if value is None:
2008-10-15 19:38:22 +04:00
if datatype == "boolean":
value = "False"
elif datatype in ( "integer", "float" ):
if option in ( "threads", "verbose" ):
value = "1"
elif option == "timeout":
value = "10"
else:
value = "0"
2008-10-15 19:38:22 +04:00
elif datatype == "string":
value = ""
if isinstance(value, basestring):
value = value.replace("\n", "\n ")
config.set(family, option, value)
2008-10-15 19:38:22 +04:00
confFP = openFile(paths.SQLMAP_CONFIG, "wb")
config.write(confFP)
2008-10-15 19:38:22 +04:00
infoMsg = "saved command line options on '%s' configuration file" % paths.SQLMAP_CONFIG
logger.info(infoMsg)
def __setVerbosity():
"""
This function set the verbosity of sqlmap output messages.
"""
if conf.verbose is None:
conf.verbose = 1
2008-10-15 19:38:22 +04:00
conf.verbose = int(conf.verbose)
2010-09-26 18:02:13 +04:00
if conf.verbose == 0:
logger.setLevel(logging.ERROR)
2010-09-26 18:02:13 +04:00
elif conf.verbose == 1:
2008-10-15 19:38:22 +04:00
logger.setLevel(logging.INFO)
2010-03-11 01:08:11 +03:00
elif conf.verbose > 2 and conf.eta:
conf.verbose = 2
logger.setLevel(logging.DEBUG)
2008-10-15 19:38:22 +04:00
elif conf.verbose == 2:
logger.setLevel(logging.DEBUG)
elif conf.verbose == 3:
logger.setLevel(9)
elif conf.verbose == 4:
2008-10-15 19:38:22 +04:00
logger.setLevel(8)
elif conf.verbose >= 5:
logger.setLevel(7)
2008-10-15 19:38:22 +04:00
def __mergeOptions(inputOptions, overrideOptions):
2008-10-15 19:38:22 +04:00
"""
Merge command line options with configuration file and default options.
2008-10-15 19:38:22 +04:00
@param inputOptions: optparse object with command line options.
@type inputOptions: C{instance}
"""
if inputOptions.configFile:
configFileParser(inputOptions.configFile)
if hasattr(inputOptions, "items"):
inputOptionsItems = inputOptions.items()
else:
inputOptionsItems = inputOptions.__dict__.items()
for key, value in inputOptionsItems:
if key not in conf or value not in (None, False) or overrideOptions:
conf[key] = value
for key, value in conf.items():
if value:
kb.explicitSettings.add(key)
for key, value in defaults.items():
if not conf[key]:
2008-10-15 19:38:22 +04:00
conf[key] = value
2010-11-08 14:22:47 +03:00
def __setTrafficOutputFP():
if conf.trafficFile:
infoMsg = "setting file for logging HTTP traffic"
logger.info(infoMsg)
conf.trafficFP = openFile(conf.trafficFile, "w+")
2010-11-08 14:22:47 +03:00
def __setTorSocksProxySettings():
if not conf.tor:
return
infoMsg = "setting Tor SOCKS proxy settings"
logger.info(infoMsg)
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, LOCALHOST, DEFAULT_TOR_SOCKS_PORT)
socks.wrapmodule(urllib2)
2011-10-25 21:37:43 +04:00
def __checkTor():
if conf.checkTor:
2011-10-25 22:07:33 +04:00
infoMsg = "checking Tor connection"
logger.info(infoMsg)
2011-10-25 21:37:43 +04:00
page, _, _ = Request.getPage(url="https://check.torproject.org/", raise404=False)
if not page or 'Congratulations' not in page:
errMsg = "it seems that Tor is not properly set"
2011-10-25 21:37:43 +04:00
raise sqlmapConnectionException, errMsg
else:
2011-11-01 23:06:27 +04:00
infoMsg = "Tor is properly being used"
2011-10-25 21:37:43 +04:00
logger.info(infoMsg)
def __basicOptionValidation():
if conf.limitStart is not None and not (isinstance(conf.limitStart, int) and conf.limitStart > 0):
2010-04-26 16:34:21 +04:00
errMsg = "value for --start (limitStart) option must be an integer value greater than zero (>0)"
raise sqlmapSyntaxException, errMsg
if conf.limitStop is not None and not (isinstance(conf.limitStop, int) and conf.limitStop > 0):
2010-04-26 16:34:21 +04:00
errMsg = "value for --stop (limitStop) option must be an integer value greater than zero (>0)"
raise sqlmapSyntaxException, errMsg
2010-10-15 14:28:06 +04:00
if conf.limitStart is not None and isinstance(conf.limitStart, int) and conf.limitStart > 0 and \
conf.limitStop is not None and isinstance(conf.limitStop, int) and conf.limitStop < conf.limitStart:
errMsg = "value for --start (limitStart) option must be smaller or equal than value for --stop (limitStop) option"
raise sqlmapSyntaxException, errMsg
if conf.firstChar is not None and isinstance(conf.firstChar, int) and conf.firstChar > 0 and \
conf.lastChar is not None and isinstance(conf.lastChar, int) and conf.lastChar < conf.firstChar:
errMsg = "value for --first (firstChar) option must be smaller than or equal to value for --last (lastChar) option"
raise sqlmapSyntaxException, errMsg
2010-10-15 14:28:06 +04:00
if conf.cpuThrottle is not None and isinstance(conf.cpuThrottle, int) and (conf.cpuThrottle > 100 or conf.cpuThrottle < 0):
2010-05-28 13:13:50 +04:00
errMsg = "value for --cpu-throttle (cpuThrottle) option must be in range [0,100]"
raise sqlmapSyntaxException, errMsg
if conf.textOnly and conf.nullConnection:
2011-05-02 17:34:55 +04:00
errMsg = "switch --text-only is incompatible with switch --null-connection"
2010-11-07 11:58:24 +03:00
raise sqlmapSyntaxException, errMsg
2011-06-11 12:33:36 +04:00
if conf.titles and conf.nullConnection:
errMsg = "switch --titles is incompatible with switch --null-connection"
raise sqlmapSyntaxException, errMsg
if conf.data and conf.nullConnection:
2011-05-02 17:34:55 +04:00
errMsg = "switch --data is incompatible with switch --null-connection"
2010-10-15 15:05:50 +04:00
raise sqlmapSyntaxException, errMsg
2011-06-22 18:39:31 +04:00
if conf.string and conf.nullConnection:
errMsg = "switch --string is incompatible with switch --null-connection"
raise sqlmapSyntaxException, errMsg
if conf.regexp and conf.nullConnection:
errMsg = "switch --regexp is incompatible with switch --null-connection"
raise sqlmapSyntaxException, errMsg
2011-07-08 17:12:53 +04:00
if conf.predictOutput and (conf.threads > 1 or conf.optimize):
errMsg = "switch --predict-output is incompatible with switch --threads and -o"
2010-10-25 16:33:49 +04:00
raise sqlmapSyntaxException, errMsg
2011-02-02 13:10:28 +03:00
if conf.threads > MAX_NUMBER_OF_THREADS:
errMsg = "maximum number of used threads is %d avoiding possible connection issues" % MAX_NUMBER_OF_THREADS
raise sqlmapSyntaxException, errMsg
if conf.forms and not conf.url:
errMsg = "switch --forms requires usage of -u (--url) switch"
raise sqlmapSyntaxException, errMsg
if conf.tor and conf.ignoreProxy:
errMsg = "switch --tor is incompatible with switch --ignore-proxy"
raise sqlmapSyntaxException, errMsg
2011-04-29 23:27:23 +04:00
if conf.tor and conf.proxy:
errMsg = "switch --tor is incompatible with switch --proxy"
raise sqlmapSyntaxException, errMsg
2011-08-29 17:29:42 +04:00
2011-12-01 14:07:39 +04:00
if conf.checkTor and not (conf.tor or conf.proxy):
errMsg = "switch --check-tor requires usage of switch --tor (or --proxy with HTTP proxy address using Tor)"
2011-10-25 21:37:43 +04:00
raise sqlmapSyntaxException, errMsg
2011-08-29 17:29:42 +04:00
if conf.skip and conf.testParameter:
errMsg = "switch --skip is incompatible with switch -p"
raise sqlmapSyntaxException, errMsg
2011-04-29 23:27:23 +04:00
if conf.mobile and conf.agent:
errMsg = "switch --mobile is incompatible with switch --user-agent"
raise sqlmapSyntaxException, errMsg
if conf.proxy and conf.ignoreProxy:
errMsg = "switch --proxy is incompatible with switch --ignore-proxy"
raise sqlmapSyntaxException, errMsg
if conf.forms and any([conf.logFile, conf.bulkFile, conf.direct, conf.requestFile, conf.googleDork]):
errMsg = "switch --forms is compatible only with -u (--url) target switch"
raise sqlmapSyntaxException, errMsg
if conf.timeSec < 1:
errMsg = "value for --time-sec option must be an integer greater than 0"
raise sqlmapSyntaxException, errMsg
2011-10-10 01:21:41 +04:00
if conf.uChar and not re.match(UNION_CHAR_REGEX, conf.uChar):
errMsg = "value for --union-char option must be an alpha-numeric value (e.g. 1)"
raise sqlmapSyntaxException, errMsg
if isinstance(conf.uCols, basestring):
if not conf.uCols.isdigit() and ("-" not in conf.uCols or len(conf.uCols.split("-")) != 2):
errMsg = "value for --union-cols must be a range with hyphon "
errMsg += "(e.g. 1-10) or integer value (e.g. 5)"
raise sqlmapSyntaxException, errMsg
if conf.charset:
try:
codecs.lookup(conf.charset)
except LookupError:
2011-06-08 20:08:20 +04:00
errMsg = "unknown charset '%s'. Please visit " % conf.charset
errMsg += "'%s' to get the full list of " % CODECS_LIST_PAGE
errMsg += "supported charsets"
raise sqlmapSyntaxException, errMsg
def __resolveCrossReferences():
lib.core.threads.readInput = readInput
lib.core.common.getPageTemplate = getPageTemplate
2011-07-08 10:02:31 +04:00
def init(inputOptions=AttribDict(), overrideOptions=False):
2008-10-15 19:38:22 +04:00
"""
Set attributes into both configuration and knowledge base singletons
based upon command line and configuration file options.
"""
2010-05-28 19:57:43 +04:00
__setConfAttributes()
__setKnowledgeBaseAttributes()
__mergeOptions(inputOptions, overrideOptions)
__useWizardInterface()
__setVerbosity()
2008-10-15 19:38:22 +04:00
__saveCmdline()
__setRequestFromFile()
2008-10-15 19:38:22 +04:00
__cleanupOptions()
__checkDependencies()
__basicOptionValidation()
__setTorSocksProxySettings()
__setMultipleTargets()
2010-10-13 02:45:25 +04:00
__setTamperingFunctions()
2010-11-08 14:22:47 +03:00
__setTrafficOutputFP()
__resolveCrossReferences()
parseTargetUrl()
parseTargetDirect()
if any([conf.url, conf.logFile, conf.bulkFile, conf.requestFile, conf.googleDork, conf.liveTest]):
__setHTTPTimeout()
__setHTTPExtraHeaders()
__setHTTPCookies()
__setHTTPReferer()
__setHTTPUserAgent()
__setHTTPMethod()
__setHTTPAuthentication()
__setHTTPProxy()
2011-04-08 01:39:18 +04:00
__setDNSCache()
__setSafeUrl()
__setGoogleDorking()
__setBulkMultipleTargets()
__urllib2Opener()
2011-10-25 21:37:43 +04:00
__checkTor()
2011-06-20 16:11:09 +04:00
__setCrawler()
2010-11-15 15:07:13 +03:00
__findPageForms()
__setDBMS()
__setTechnique()
2008-10-15 19:38:22 +04:00
__setThreads()
__setOS()
__setWriteFile()
__setMetasploit()
2008-10-15 19:38:22 +04:00
loadPayloads()
__setPrefixSuffix()
2008-10-15 19:38:22 +04:00
update()
__loadQueries()