2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
This file is part of the sqlmap project, http://sqlmap.sourceforge.net.
|
|
|
|
|
2010-03-03 18:26:27 +03:00
|
|
|
Copyright (c) 2007-2010 Bernardo Damele A. G. <bernardo.damele@gmail.com>
|
2009-04-22 15:48:07 +04:00
|
|
|
Copyright (c) 2006 Daniele Bellucci <daniele.bellucci@gmail.com>
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
sqlmap is free software; you can redistribute it and/or modify it under
|
|
|
|
the terms of the GNU General Public License as published by the Free
|
|
|
|
Software Foundation version 2 of the License.
|
|
|
|
|
|
|
|
sqlmap is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
|
|
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
|
|
|
details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License along
|
|
|
|
with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
|
|
|
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
|
|
|
"""
|
|
|
|
|
|
|
|
import cookielib
|
2009-04-22 15:48:07 +04:00
|
|
|
import ctypes
|
2008-12-20 04:54:08 +03:00
|
|
|
import difflib
|
2008-10-15 19:38:22 +04:00
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
2008-12-04 20:40:03 +03:00
|
|
|
import socket
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
|
|
|
|
2009-01-04 01:59:22 +03:00
|
|
|
from ConfigParser import ConfigParser
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.common import getFileType
|
2010-02-04 17:50:54 +03:00
|
|
|
from lib.core.common import normalizePath
|
2010-02-04 17:37:00 +03:00
|
|
|
from lib.core.common import ntToPosixSlashes
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import parseTargetUrl
|
|
|
|
from lib.core.common import paths
|
|
|
|
from lib.core.common import randomRange
|
|
|
|
from lib.core.common import sanitizeStr
|
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
|
|
|
from lib.core.data import paths
|
|
|
|
from lib.core.datatype import advancedDict
|
|
|
|
from lib.core.exception import sqlmapFilePathException
|
|
|
|
from lib.core.exception import sqlmapGenericException
|
2009-12-03 01:54:39 +03:00
|
|
|
from lib.core.exception import sqlmapMissingDependence
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.exception import sqlmapMissingMandatoryOptionException
|
|
|
|
from lib.core.exception import sqlmapMissingPrivileges
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapSyntaxException
|
|
|
|
from lib.core.exception import sqlmapUnsupportedDBMSException
|
|
|
|
from lib.core.optiondict import optDict
|
|
|
|
from lib.core.settings import MSSQL_ALIASES
|
|
|
|
from lib.core.settings import MYSQL_ALIASES
|
2009-07-09 15:05:24 +04:00
|
|
|
from lib.core.settings import PGSQL_ALIASES
|
|
|
|
from lib.core.settings import ORACLE_ALIASES
|
2009-06-11 19:01:48 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.settings import PLATFORM
|
2009-04-28 03:05:11 +04:00
|
|
|
from lib.core.settings import SITE
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.settings import SUPPORTED_DBMS
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.settings import SUPPORTED_OS
|
2009-04-28 03:05:11 +04:00
|
|
|
from lib.core.settings import VERSION_STRING
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.update import update
|
|
|
|
from lib.parse.configfile import configFileParser
|
|
|
|
from lib.parse.queriesfile import queriesParser
|
|
|
|
from lib.request.proxy import ProxyHTTPSHandler
|
2010-01-07 15:59:09 +03:00
|
|
|
from lib.request.certhandler import HTTPSCertAuthHandler
|
2010-03-15 17:24:43 +03:00
|
|
|
from lib.request.redirecthandler import SmartRedirectHandler
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.utils.google import Google
|
|
|
|
|
|
|
|
authHandler = urllib2.BaseHandler()
|
|
|
|
proxyHandler = urllib2.BaseHandler()
|
2010-03-15 17:24:43 +03:00
|
|
|
redirectHandler = SmartRedirectHandler()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __urllib2Opener():
|
|
|
|
"""
|
|
|
|
This function creates the urllib2 OpenerDirector.
|
|
|
|
"""
|
|
|
|
|
|
|
|
global authHandler
|
|
|
|
global proxyHandler
|
2010-03-15 17:24:43 +03:00
|
|
|
global redirectHandler
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
debugMsg = "creating HTTP requests opener object"
|
|
|
|
logger.debug(debugMsg)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
|
|
|
if conf.dropSetCookie:
|
|
|
|
opener = urllib2.build_opener(proxyHandler, authHandler)
|
|
|
|
else:
|
|
|
|
conf.cj = cookielib.LWPCookieJar()
|
2010-03-15 17:24:43 +03:00
|
|
|
opener = urllib2.build_opener(proxyHandler, authHandler, urllib2.HTTPCookieProcessor(conf.cj), redirectHandler)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
urllib2.install_opener(opener)
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
def __feedTargetsDict(reqFile, addedTargetUrls):
|
2008-11-22 04:57:22 +03:00
|
|
|
fp = open(reqFile, "r")
|
|
|
|
|
|
|
|
fread = fp.read()
|
|
|
|
fread = fread.replace("\r", "")
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
reqResList = fread.split("======================================================")
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2009-06-04 18:36:31 +04:00
|
|
|
port = None
|
2009-06-03 19:04:40 +04:00
|
|
|
scheme = None
|
|
|
|
|
2010-03-05 17:06:03 +03:00
|
|
|
if conf.scope:
|
2010-03-05 17:59:33 +03:00
|
|
|
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2008-11-22 04:57:22 +03:00
|
|
|
for request in reqResList:
|
2009-06-03 19:04:40 +04:00
|
|
|
if scheme is None:
|
2009-06-04 18:42:53 +04:00
|
|
|
schemePort = re.search("\d\d[\:|\.]\d\d[\:|\.]\d\d\s+(http[\w]*)\:\/\/.*?\:([\d]+)", request, re.I)
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2009-06-04 18:36:31 +04:00
|
|
|
if schemePort:
|
|
|
|
scheme = schemePort.group(1)
|
|
|
|
port = schemePort.group(2)
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
if not re.search ("^[\n]*(GET|POST).*?\sHTTP\/", request, re.I):
|
|
|
|
continue
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
if re.search("^[\n]*(GET|POST).*?\.(gif|jpg|png)\sHTTP\/", request, re.I):
|
|
|
|
continue
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
getPostReq = False
|
|
|
|
url = None
|
|
|
|
host = None
|
|
|
|
method = None
|
|
|
|
data = None
|
|
|
|
cookie = None
|
|
|
|
params = False
|
|
|
|
lines = request.split("\n")
|
2008-11-22 04:57:22 +03:00
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
if len(line) == 0 or line == "\n":
|
2008-11-28 01:33:33 +03:00
|
|
|
continue
|
2008-11-22 04:57:22 +03:00
|
|
|
|
|
|
|
if line.startswith("GET ") or line.startswith("POST "):
|
|
|
|
if line.startswith("GET "):
|
|
|
|
index = 4
|
|
|
|
else:
|
|
|
|
index = 5
|
|
|
|
|
|
|
|
url = line[index:line.index(" HTTP/")]
|
|
|
|
method = line[:index-1]
|
|
|
|
|
|
|
|
if "?" in line and "=" in line:
|
|
|
|
params = True
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
getPostReq = True
|
|
|
|
|
2009-04-23 19:04:28 +04:00
|
|
|
# GET parameters
|
2008-11-28 01:33:33 +03:00
|
|
|
elif "?" in line and "=" in line and ": " not in line:
|
2008-11-22 04:57:22 +03:00
|
|
|
data = line
|
|
|
|
params = True
|
|
|
|
|
2009-04-23 19:04:28 +04:00
|
|
|
# Cookie and Host headers
|
2008-11-22 04:57:22 +03:00
|
|
|
elif ": " in line:
|
|
|
|
key, value = line.split(": ", 1)
|
|
|
|
|
|
|
|
if key.lower() == "cookie":
|
|
|
|
cookie = value
|
|
|
|
elif key.lower() == "host":
|
|
|
|
host = value
|
|
|
|
|
2009-04-23 19:04:28 +04:00
|
|
|
# POST parameters
|
|
|
|
elif method is not None and method == "POST" and "=" in line:
|
|
|
|
data = line
|
|
|
|
params = True
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2010-01-09 23:44:50 +03:00
|
|
|
if conf.scope:
|
|
|
|
getPostReq &= re.search(conf.scope, host) is not None
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
if getPostReq and params:
|
2008-11-22 04:57:22 +03:00
|
|
|
if not url.startswith("http"):
|
2009-06-04 18:36:31 +04:00
|
|
|
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
2009-06-03 19:04:40 +04:00
|
|
|
scheme = None
|
2009-06-04 18:36:31 +04:00
|
|
|
port = None
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
if not kb.targetUrls or url not in addedTargetUrls:
|
|
|
|
kb.targetUrls.add(( url, method, data, cookie ))
|
|
|
|
addedTargetUrls.add(url)
|
2008-11-20 20:56:09 +03:00
|
|
|
|
|
|
|
def __setMultipleTargets():
|
|
|
|
"""
|
|
|
|
Define a configuration parameter if we are running in multiple target
|
|
|
|
mode.
|
|
|
|
"""
|
|
|
|
|
2008-11-22 04:57:22 +03:00
|
|
|
initialTargetsCount = len(kb.targetUrls)
|
2008-11-28 01:33:33 +03:00
|
|
|
addedTargetUrls = set()
|
2008-11-20 20:56:09 +03:00
|
|
|
|
|
|
|
if not conf.list:
|
|
|
|
return
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
debugMsg = "parsing targets list from '%s'" % conf.list
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
if not os.path.exists(conf.list):
|
2008-11-28 01:33:33 +03:00
|
|
|
errMsg = "the specified list of targets does not exist"
|
2008-11-20 20:56:09 +03:00
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
if os.path.isfile(conf.list):
|
2008-11-28 01:33:33 +03:00
|
|
|
__feedTargetsDict(conf.list, addedTargetUrls)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
elif os.path.isdir(conf.list):
|
|
|
|
files = os.listdir(conf.list)
|
|
|
|
files.sort()
|
|
|
|
|
|
|
|
for reqFile in files:
|
|
|
|
if not re.search("([\d]+)\-request", reqFile):
|
|
|
|
continue
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
__feedTargetsDict(os.path.join(conf.list, reqFile), addedTargetUrls)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
else:
|
2008-11-28 01:33:33 +03:00
|
|
|
errMsg = "the specified list of targets is not a file "
|
2008-11-20 20:56:09 +03:00
|
|
|
errMsg += "nor a directory"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
2008-11-22 04:57:22 +03:00
|
|
|
updatedTargetsCount = len(kb.targetUrls)
|
|
|
|
|
|
|
|
if updatedTargetsCount > initialTargetsCount:
|
2008-11-28 01:33:33 +03:00
|
|
|
infoMsg = "sqlmap parsed %d " % (updatedTargetsCount - initialTargetsCount)
|
|
|
|
infoMsg += "testable requests from the targets list"
|
2008-11-22 04:57:22 +03:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-03-16 18:21:42 +03:00
|
|
|
def __setRequestFromFile():
|
|
|
|
"""
|
|
|
|
This function checks if the way to make a HTTP request is through supplied
|
|
|
|
textual file, parses it and saves the information into the knowledge base.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.requestFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
addedTargetUrls = set()
|
|
|
|
|
|
|
|
conf.requestFile = os.path.expanduser(conf.requestFile)
|
|
|
|
|
|
|
|
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if not os.path.isfile(conf.requestFile):
|
|
|
|
errMsg = "the specified HTTP request file "
|
|
|
|
errMsg += "does not exist"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
__feedTargetsDict(conf.requestFile, addedTargetUrls)
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
def __setGoogleDorking():
|
|
|
|
"""
|
|
|
|
This function checks if the way to request testable hosts is through
|
|
|
|
Google dorking then requests to Google the search parameter, parses
|
|
|
|
the results and save the testable hosts into the knowledge base.
|
|
|
|
"""
|
|
|
|
|
|
|
|
global proxyHandler
|
|
|
|
|
|
|
|
if not conf.googleDork:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "initializing Google dorking requests"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
logMsg = "first request to Google to get the session cookie"
|
|
|
|
logger.info(logMsg)
|
|
|
|
|
|
|
|
googleObj = Google(proxyHandler)
|
|
|
|
googleObj.getCookie()
|
|
|
|
|
|
|
|
matches = googleObj.search(conf.googleDork)
|
|
|
|
|
|
|
|
if not matches:
|
|
|
|
errMsg = "unable to find results for your "
|
|
|
|
errMsg += "Google dork expression"
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
|
|
|
googleObj.getTargetUrls()
|
|
|
|
|
|
|
|
if kb.targetUrls:
|
|
|
|
logMsg = "sqlmap got %d results for your " % len(matches)
|
|
|
|
logMsg += "Google dork expression, "
|
|
|
|
|
|
|
|
if len(matches) == len(kb.targetUrls):
|
|
|
|
logMsg += "all "
|
|
|
|
else:
|
|
|
|
logMsg += "%d " % len(kb.targetUrls)
|
|
|
|
|
|
|
|
logMsg += "of them are testable targets"
|
|
|
|
logger.info(logMsg)
|
|
|
|
else:
|
|
|
|
errMsg = "sqlmap got %d results " % len(matches)
|
|
|
|
errMsg += "for your Google dork expression, but none of them "
|
|
|
|
errMsg += "have GET parameters to test for SQL injection"
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def __setMetasploit():
|
|
|
|
if not conf.osPwn and not conf.osSmb and not conf.osBof:
|
|
|
|
return
|
|
|
|
|
2009-06-11 19:01:48 +04:00
|
|
|
debugMsg = "setting the takeover out-of-band functionality"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = False
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2009-06-11 19:01:48 +04:00
|
|
|
warnMsg = "Metasploit's msfconsole and msfcli are not supported "
|
|
|
|
warnMsg += "on the native Windows Ruby interpreter. Please "
|
|
|
|
warnMsg += "install Metasploit, Python interpreter and sqlmap on "
|
|
|
|
warnMsg += "Cygwin or use Linux in VMWare to use sqlmap takeover "
|
|
|
|
warnMsg += "out-of-band features. sqlmap will now continue "
|
|
|
|
warnMsg += "without calling any takeover feature"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.osPwn = None
|
|
|
|
conf.osSmb = None
|
|
|
|
conf.osBof = None
|
|
|
|
|
|
|
|
return
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if conf.osSmb:
|
|
|
|
isAdmin = False
|
|
|
|
|
2009-05-01 01:42:54 +04:00
|
|
|
if "linux" in PLATFORM or "darwin" in PLATFORM:
|
|
|
|
isAdmin = os.geteuid()
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2009-05-01 01:42:54 +04:00
|
|
|
if isinstance(isAdmin, (int, float, long)) and isAdmin == 0:
|
2009-04-22 15:48:07 +04:00
|
|
|
isAdmin = True
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
elif IS_WIN:
|
2009-05-01 01:42:54 +04:00
|
|
|
isAdmin = ctypes.windll.shell32.IsUserAnAdmin()
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2009-05-01 01:42:54 +04:00
|
|
|
if isinstance(isAdmin, (int, float, long)) and isAdmin == 1:
|
2009-04-22 15:48:07 +04:00
|
|
|
isAdmin = True
|
|
|
|
|
|
|
|
else:
|
|
|
|
warnMsg = "sqlmap is not able to check if you are running it "
|
|
|
|
warnMsg += "as an Administrator accout on this platform. "
|
|
|
|
warnMsg += "sqlmap will assume that you are an Administrator "
|
|
|
|
warnMsg += "which is mandatory for the SMB relay attack to "
|
|
|
|
warnMsg += "work properly"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
isAdmin = True
|
|
|
|
|
2009-06-11 19:01:48 +04:00
|
|
|
if isAdmin is not True:
|
|
|
|
errMsg = "you need to run sqlmap as an Administrator/root "
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg += "user if you want to perform a SMB relay attack "
|
|
|
|
errMsg += "because it will need to listen on a user-specified "
|
|
|
|
errMsg += "SMB TCP port for incoming connection attempts"
|
|
|
|
raise sqlmapMissingPrivileges, errMsg
|
|
|
|
|
|
|
|
if conf.msfPath:
|
2010-02-04 17:50:54 +03:00
|
|
|
condition = os.path.exists(normalizePath(conf.msfPath))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(conf.msfPath, "msfcli")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(conf.msfPath, "msfconsole")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(conf.msfPath, "msfencode")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(conf.msfPath, "msfpayload")))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if condition:
|
|
|
|
debugMsg = "provided Metasploit Framework 3 path "
|
|
|
|
debugMsg += "'%s' is valid" % conf.msfPath
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = True
|
|
|
|
else:
|
|
|
|
warnMsg = "the provided Metasploit Framework 3 path "
|
|
|
|
warnMsg += "'%s' is not valid. The cause could " % conf.msfPath
|
|
|
|
warnMsg += "be that the path does not exists or that one "
|
|
|
|
warnMsg += "or more of the needed Metasploit executables "
|
|
|
|
warnMsg += "within msfcli, msfconsole, msfencode and "
|
|
|
|
warnMsg += "msfpayload do not exist"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
else:
|
|
|
|
warnMsg = "you did not provide the local path where Metasploit "
|
|
|
|
warnMsg += "Framework 3 is installed"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not msfEnvPathExists:
|
2009-04-22 15:48:07 +04:00
|
|
|
warnMsg = "sqlmap is going to look for Metasploit Framework 3 "
|
|
|
|
warnMsg += "installation into the environment paths"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
envPaths = os.environ["PATH"]
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2009-04-22 15:48:07 +04:00
|
|
|
envPaths = envPaths.split(";")
|
|
|
|
else:
|
|
|
|
envPaths = envPaths.split(":")
|
|
|
|
|
|
|
|
for envPath in envPaths:
|
2009-05-13 00:24:00 +04:00
|
|
|
envPath = envPath.replace(";", "")
|
2010-02-04 17:50:54 +03:00
|
|
|
condition = os.path.exists(normalizePath(envPath))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfcli")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfconsole")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfencode")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfpayload")))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if condition:
|
|
|
|
infoMsg = "Metasploit Framework 3 has been found "
|
|
|
|
infoMsg += "installed in the '%s' path" % envPath
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = True
|
|
|
|
conf.msfPath = envPath
|
|
|
|
|
|
|
|
break
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not msfEnvPathExists:
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg = "unable to locate Metasploit Framework 3 installation. "
|
|
|
|
errMsg += "Get it from http://metasploit.com/framework/download/"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
def __setWriteFile():
|
|
|
|
if not conf.wFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "setting the write file functionality"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
if not os.path.exists(conf.wFile):
|
|
|
|
errMsg = "the provided local file '%s' does not exist" % conf.wFile
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
if not conf.dFile:
|
|
|
|
errMsg = "you did not provide the back-end DBMS absolute path "
|
|
|
|
errMsg += "where you want to write the local file '%s'" % conf.wFile
|
|
|
|
raise sqlmapMissingMandatoryOptionException, errMsg
|
|
|
|
|
|
|
|
conf.wFileType = getFileType(conf.wFile)
|
|
|
|
|
2008-12-22 00:39:53 +03:00
|
|
|
def __setUnionTech():
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.uTech is None:
|
2008-12-29 21:48:23 +03:00
|
|
|
conf.uTech = "NULL"
|
2008-12-22 00:39:53 +03:00
|
|
|
|
|
|
|
return
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
debugMsg = "setting the UNION query SQL injection detection technique"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2008-12-29 21:48:23 +03:00
|
|
|
uTechOriginal = conf.uTech
|
|
|
|
conf.uTech = conf.uTech.lower()
|
|
|
|
|
|
|
|
if conf.uTech and conf.uTech not in ( "null", "orderby" ):
|
2008-12-22 00:39:53 +03:00
|
|
|
infoMsg = "resetting the UNION query detection technique to "
|
2008-12-29 21:48:23 +03:00
|
|
|
infoMsg += "'NULL', '%s' is not a valid technique" % uTechOriginal
|
2008-12-22 00:39:53 +03:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2008-12-29 21:48:23 +03:00
|
|
|
conf.uTech = "NULL"
|
2008-12-22 00:39:53 +03:00
|
|
|
|
|
|
|
else:
|
|
|
|
debugMsg = "setting UNION query detection technique to "
|
2008-12-29 21:48:23 +03:00
|
|
|
debugMsg += "'%s'" % uTechOriginal
|
2008-12-22 00:39:53 +03:00
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def __setOS():
|
|
|
|
"""
|
|
|
|
Force the back-end DBMS operating system option.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.os:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "forcing back-end DBMS operating system to user defined value"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.os = conf.os.lower()
|
|
|
|
|
|
|
|
if conf.os not in SUPPORTED_OS:
|
|
|
|
errMsg = "you provided an unsupported back-end DBMS operating "
|
|
|
|
errMsg += "system. The supported DBMS operating systems for OS "
|
|
|
|
errMsg += "and file system access are Linux and Windows. "
|
|
|
|
errMsg += "If you do not know the back-end DBMS underlying OS, "
|
|
|
|
errMsg += "do not provide it and sqlmap will fingerprint it for "
|
|
|
|
errMsg += "you."
|
|
|
|
raise sqlmapUnsupportedDBMSException, errMsg
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
def __setDBMS():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2008-12-09 00:24:24 +03:00
|
|
|
Force the back-end DBMS option.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.dbms:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "forcing back-end DBMS to user defined value"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.dbms = conf.dbms.lower()
|
2009-07-09 15:05:24 +04:00
|
|
|
firstRegExp = "(%s|%s|%s|%s)" % ("|".join([alias for alias in MSSQL_ALIASES]),
|
|
|
|
"|".join([alias for alias in MYSQL_ALIASES]),
|
|
|
|
"|".join([alias for alias in PGSQL_ALIASES]),
|
|
|
|
"|".join([alias for alias in ORACLE_ALIASES]))
|
2008-10-15 19:38:22 +04:00
|
|
|
dbmsRegExp = re.search("%s ([\d\.]+)" % firstRegExp, conf.dbms)
|
|
|
|
|
|
|
|
if dbmsRegExp:
|
2009-04-22 15:48:07 +04:00
|
|
|
conf.dbms = dbmsRegExp.group(1)
|
|
|
|
kb.dbmsVersion = [ dbmsRegExp.group(2) ]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.dbms not in SUPPORTED_DBMS:
|
|
|
|
errMsg = "you provided an unsupported back-end database management "
|
|
|
|
errMsg += "system. The supported DBMS are MySQL, PostgreSQL, "
|
|
|
|
errMsg += "Microsoft SQL Server and Oracle. If you do not know "
|
|
|
|
errMsg += "the back-end DBMS, do not provide it and sqlmap will "
|
|
|
|
errMsg += "fingerprint it for you."
|
|
|
|
raise sqlmapUnsupportedDBMSException, errMsg
|
|
|
|
|
|
|
|
def __setThreads():
|
2008-12-04 20:40:03 +03:00
|
|
|
if not isinstance(conf.threads, int) or conf.threads <= 0:
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.threads = 1
|
|
|
|
|
|
|
|
def __setHTTPProxy():
|
|
|
|
"""
|
|
|
|
Check and set the HTTP proxy to pass by all HTTP requests.
|
|
|
|
"""
|
|
|
|
|
|
|
|
global proxyHandler
|
|
|
|
|
2010-02-26 13:01:23 +03:00
|
|
|
if not conf.proxy:
|
|
|
|
if conf.hostname in ('localhost', '127.0.0.1') or conf.ignoreProxy:
|
|
|
|
proxyHandler = urllib2.ProxyHandler({})
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "setting the HTTP proxy to pass by all HTTP requests"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
__proxySplit = urlparse.urlsplit(conf.proxy)
|
|
|
|
__hostnamePort = __proxySplit[1].split(":")
|
|
|
|
|
|
|
|
__scheme = __proxySplit[0]
|
|
|
|
__hostname = __hostnamePort[0]
|
|
|
|
__port = None
|
|
|
|
|
|
|
|
if len(__hostnamePort) == 2:
|
2010-02-10 15:06:23 +03:00
|
|
|
try:
|
|
|
|
__port = int(__hostnamePort[1])
|
|
|
|
except:
|
|
|
|
pass #drops into the next check block
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if not __scheme or not __hostname or not __port:
|
|
|
|
errMsg = "proxy value must be in format 'http://url:port'"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
__proxyString = "%s:%d" % (__hostname, __port)
|
|
|
|
|
|
|
|
# Workaround for http://bugs.python.org/issue1424152 (urllib/urllib2:
|
|
|
|
# HTTPS over (Squid) Proxy fails) as long as HTTP over SSL requests
|
2009-06-16 19:12:02 +04:00
|
|
|
# can't be tunneled over an HTTP proxy natively by Python (<= 2.5)
|
|
|
|
# urllib2 standard library
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.scheme == "https":
|
|
|
|
proxyHandler = ProxyHTTPSHandler(__proxyString)
|
|
|
|
else:
|
|
|
|
proxyHandler = urllib2.ProxyHandler({"http": __proxyString})
|
|
|
|
|
|
|
|
def __setHTTPAuthentication():
|
|
|
|
"""
|
2010-01-07 15:59:09 +03:00
|
|
|
Check and set the HTTP(s) authentication method (Basic, Digest, NTLM or Certificate),
|
|
|
|
username and password for first three methods, or key file and certification file for
|
|
|
|
certificate authentication
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
global authHandler
|
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not conf.aType and not conf.aCred and not conf.aCert:
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
elif conf.aType and not conf.aCred:
|
2009-12-03 01:54:39 +03:00
|
|
|
errMsg = "you specified the HTTP authentication type, but "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "did not provide the credentials"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
elif not conf.aType and conf.aCred:
|
2009-12-03 01:54:39 +03:00
|
|
|
errMsg = "you specified the HTTP authentication credentials, "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "but did not provide the type"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not conf.aCert:
|
|
|
|
debugMsg = "setting the HTTP authentication type and credentials"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
aTypeLower = conf.aType.lower()
|
|
|
|
|
|
|
|
if aTypeLower not in ( "basic", "digest", "ntlm" ):
|
|
|
|
errMsg = "HTTP authentication type value must be "
|
|
|
|
errMsg += "Basic, Digest or NTLM"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
aCredRegExp = re.search("^(.*?)\:(.*?)$", conf.aCred)
|
|
|
|
|
|
|
|
if not aCredRegExp:
|
|
|
|
errMsg = "HTTP authentication credentials value must be "
|
|
|
|
errMsg += "in format username:password"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
authUsername = aCredRegExp.group(1)
|
|
|
|
authPassword = aCredRegExp.group(2)
|
|
|
|
|
|
|
|
passwordMgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
|
|
|
passwordMgr.add_password(None, "%s://%s" % (conf.scheme, conf.hostname), authUsername, authPassword)
|
|
|
|
|
|
|
|
if aTypeLower == "basic":
|
|
|
|
authHandler = urllib2.HTTPBasicAuthHandler(passwordMgr)
|
|
|
|
|
|
|
|
elif aTypeLower == "digest":
|
|
|
|
authHandler = urllib2.HTTPDigestAuthHandler(passwordMgr)
|
|
|
|
|
|
|
|
elif aTypeLower == "ntlm":
|
|
|
|
try:
|
|
|
|
from ntlm import HTTPNtlmAuthHandler
|
|
|
|
except ImportError, _:
|
|
|
|
errMsg = "sqlmap requires Python NTLM third-party library "
|
|
|
|
errMsg += "in order to authenticate via NTLM, "
|
|
|
|
errMsg += "http://code.google.com/p/python-ntlm/"
|
|
|
|
raise sqlmapMissingDependence, errMsg
|
|
|
|
|
|
|
|
authHandler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(passwordMgr)
|
|
|
|
else:
|
|
|
|
debugMsg = "setting the HTTP(s) authentication certificate"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
aCertRegExp = re.search("^(.+?),\s*(.+?)$", conf.aCert)
|
|
|
|
|
|
|
|
if not aCertRegExp:
|
|
|
|
errMsg = "HTTP authentication certificate option "
|
|
|
|
errMsg += "must be in format key_file,cert_file"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
#os.path.expanduser for support of paths with ~
|
|
|
|
key_file = os.path.expanduser(aCertRegExp.group(1))
|
|
|
|
cert_file = os.path.expanduser(aCertRegExp.group(2))
|
|
|
|
|
2010-03-21 03:39:44 +03:00
|
|
|
for ifile in (key_file, cert_file):
|
|
|
|
if not os.path.exists(ifile):
|
|
|
|
errMsg = "File '%s' does not exist" % ifile
|
2010-01-07 15:59:09 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
authHandler = HTTPSCertAuthHandler(key_file, cert_file)
|
2009-12-03 01:54:39 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setHTTPMethod():
|
|
|
|
"""
|
|
|
|
Check and set the HTTP method to perform HTTP requests through.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.method:
|
|
|
|
conf.method = conf.method.upper()
|
|
|
|
|
|
|
|
if conf.method not in ("GET", "POST"):
|
|
|
|
warnMsg = "'%s' " % conf.method
|
|
|
|
warnMsg += "is an unsupported HTTP method, "
|
|
|
|
warnMsg += "setting to default method, GET"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.method = "GET"
|
|
|
|
else:
|
|
|
|
conf.method = "GET"
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
debugMsg = "setting the HTTP method to %s" % conf.method
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
def __setHTTPExtraHeaders():
|
2009-06-16 19:12:02 +04:00
|
|
|
if conf.hostname:
|
|
|
|
conf.httpHeaders.append(("Host", conf.hostname))
|
2009-05-19 18:40:04 +04:00
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
if conf.headers:
|
|
|
|
debugMsg = "setting extra HTTP headers"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.headers = conf.headers.split("\n")
|
|
|
|
|
|
|
|
for headerValue in conf.headers:
|
|
|
|
header, value = headerValue.split(": ")
|
|
|
|
|
|
|
|
if header and value:
|
|
|
|
conf.httpHeaders.append((header, value))
|
|
|
|
|
|
|
|
else:
|
|
|
|
conf.httpHeaders.append(("Accept", "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"))
|
|
|
|
conf.httpHeaders.append(("Accept-Language", "en-us,en;q=0.5"))
|
|
|
|
conf.httpHeaders.append(("Accept-Charset", "ISO-8859-15,utf-8;q=0.7,*;q=0.7"))
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __defaultHTTPUserAgent():
|
|
|
|
"""
|
|
|
|
@return: default sqlmap HTTP User-Agent header
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return "%s (%s)" % (VERSION_STRING, SITE)
|
|
|
|
|
|
|
|
# Firefox 3 running on Ubuntu 9.04 updated at April 2009
|
|
|
|
#return "Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.0.9) Gecko/2009042113 Ubuntu/9.04 (jaunty) Firefox/3.0.9"
|
|
|
|
|
2009-04-25 00:13:21 +04:00
|
|
|
# Internet Explorer 7.0 running on Windows 2003 Service Pack 2 english
|
|
|
|
# updated at March 2009
|
2009-04-28 03:05:11 +04:00
|
|
|
#return "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)"
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __setHTTPUserAgent():
|
|
|
|
"""
|
|
|
|
Set the HTTP User-Agent header.
|
|
|
|
Depending on the user options it can be:
|
|
|
|
|
|
|
|
* The default sqlmap string
|
|
|
|
* A default value read as user option
|
|
|
|
* A random value read from a list of User-Agent headers from a
|
|
|
|
file choosed as user option
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.agent:
|
|
|
|
debugMsg = "setting the HTTP User-Agent header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.httpHeaders.append(("User-Agent", conf.agent))
|
|
|
|
return
|
|
|
|
|
|
|
|
if not conf.userAgentsFile:
|
|
|
|
conf.httpHeaders.append(("User-Agent", __defaultHTTPUserAgent()))
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "fetching random HTTP User-Agent header from "
|
|
|
|
debugMsg += "file '%s'" % conf.userAgentsFile
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
try:
|
2008-10-31 02:20:14 +03:00
|
|
|
fd = open(conf.userAgentsFile, "r")
|
2008-10-15 19:38:22 +04:00
|
|
|
except IOError:
|
|
|
|
warnMsg = "unable to read HTTP User-Agent header "
|
|
|
|
warnMsg += "file '%s'" % conf.userAgentsFile
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.httpHeaders.append(("User-Agent", __defaultHTTPUserAgent()))
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
__count = 0
|
|
|
|
__userAgents = []
|
|
|
|
|
|
|
|
while True:
|
|
|
|
line = fd.readline()
|
|
|
|
|
|
|
|
if not line:
|
|
|
|
break
|
|
|
|
|
|
|
|
__userAgents.append(line)
|
|
|
|
__count += 1
|
|
|
|
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
if __count == 1:
|
|
|
|
__userAgent = __userAgents[0]
|
|
|
|
else:
|
|
|
|
__userAgent = __userAgents[randomRange(stop=__count)]
|
|
|
|
|
|
|
|
__userAgent = sanitizeStr(__userAgent)
|
|
|
|
conf.httpHeaders.append(("User-Agent", __userAgent))
|
|
|
|
|
|
|
|
logMsg = "fetched random HTTP User-Agent header from "
|
|
|
|
logMsg += "file '%s': %s" % (conf.userAgentsFile, __userAgent)
|
|
|
|
logger.info(logMsg)
|
|
|
|
|
|
|
|
def __setHTTPReferer():
|
|
|
|
"""
|
|
|
|
Set the HTTP Referer
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.referer:
|
|
|
|
debugMsg = "setting the HTTP Referer header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.httpHeaders.append(("Referer", conf.referer))
|
|
|
|
|
|
|
|
def __setHTTPCookies():
|
|
|
|
"""
|
|
|
|
Set the HTTP Cookie header
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.cookie:
|
|
|
|
debugMsg = "setting the HTTP Cookie header"
|
|
|
|
logger.debug(debugMsg)
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.httpHeaders.append(("Connection", "Keep-Alive"))
|
|
|
|
conf.httpHeaders.append(("Cookie", conf.cookie))
|
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
def __setHTTPTimeout():
|
|
|
|
"""
|
|
|
|
Set the HTTP timeout
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.timeout:
|
|
|
|
debugMsg = "setting the HTTP timeout"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.timeout = float(conf.timeout)
|
|
|
|
|
|
|
|
if conf.timeout < 3.0:
|
|
|
|
warnMsg = "the minimum HTTP timeout is 3 seconds, sqlmap "
|
|
|
|
warnMsg += "will going to reset it"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.timeout = 3.0
|
|
|
|
else:
|
2008-12-19 23:48:33 +03:00
|
|
|
conf.timeout = 30.0
|
2008-12-04 20:40:03 +03:00
|
|
|
|
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __cleanupOptions():
|
|
|
|
"""
|
|
|
|
Cleanup configuration attributes.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "cleaning up configuration parameters"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
if conf.testParameter:
|
|
|
|
conf.testParameter = conf.testParameter.replace(" ", "")
|
|
|
|
conf.testParameter = conf.testParameter.split(",")
|
|
|
|
else:
|
|
|
|
conf.testParameter = []
|
|
|
|
|
|
|
|
if conf.db:
|
|
|
|
conf.db = conf.db.replace(" ", "")
|
|
|
|
|
|
|
|
if conf.tbl:
|
|
|
|
conf.tbl = conf.tbl.replace(" ", "")
|
|
|
|
|
|
|
|
if conf.col:
|
|
|
|
conf.col = conf.col.replace(" ", "")
|
|
|
|
|
|
|
|
if conf.user:
|
|
|
|
conf.user = conf.user.replace(" ", "")
|
|
|
|
|
2008-11-09 19:57:47 +03:00
|
|
|
if conf.delay:
|
|
|
|
conf.delay = float(conf.delay)
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if conf.rFile:
|
2010-02-04 17:50:54 +03:00
|
|
|
conf.rFile = normalizePath(ntToPosixSlashes(conf.rFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.wFile:
|
2010-02-04 17:50:54 +03:00
|
|
|
conf.wFile = normalizePath(ntToPosixSlashes(conf.wFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.dFile:
|
2010-02-04 17:50:54 +03:00
|
|
|
conf.dFile = normalizePath(ntToPosixSlashes(conf.dFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.msfPath:
|
2010-02-04 17:50:54 +03:00
|
|
|
conf.msfPath = normalizePath(ntToPosixSlashes(conf.msfPath))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.tmpPath:
|
2010-02-04 17:50:54 +03:00
|
|
|
conf.tmpPath = normalizePath(ntToPosixSlashes(conf.tmpPath))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
if conf.googleDork or conf.list:
|
|
|
|
conf.multipleTargets = True
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setConfAttributes():
|
|
|
|
"""
|
|
|
|
This function set some needed attributes into the configuration
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "initializing the configuration"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
conf.cj = None
|
|
|
|
conf.dbmsHandler = None
|
|
|
|
conf.dumpPath = None
|
|
|
|
conf.httpHeaders = []
|
|
|
|
conf.hostname = None
|
|
|
|
conf.loggedToOut = None
|
2009-04-22 15:48:07 +04:00
|
|
|
conf.matchRatio = None
|
2009-02-09 13:28:03 +03:00
|
|
|
conf.md5hash = None
|
2008-11-20 20:56:09 +03:00
|
|
|
conf.multipleTargets = False
|
|
|
|
conf.outputPath = None
|
|
|
|
conf.paramDict = {}
|
|
|
|
conf.parameters = {}
|
|
|
|
conf.path = None
|
|
|
|
conf.port = None
|
2010-03-16 17:30:57 +03:00
|
|
|
conf.redirectHandled = False
|
2009-04-22 15:48:07 +04:00
|
|
|
conf.retriesCount = 0
|
2008-11-20 20:56:09 +03:00
|
|
|
conf.scheme = None
|
2008-12-20 04:54:08 +03:00
|
|
|
#conf.seqMatcher = difflib.SequenceMatcher(lambda x: x in " \t")
|
|
|
|
conf.seqMatcher = difflib.SequenceMatcher(None)
|
2010-03-10 17:14:27 +03:00
|
|
|
conf.seqLock = None
|
2008-11-20 20:56:09 +03:00
|
|
|
conf.sessionFP = None
|
|
|
|
conf.start = True
|
2010-03-11 14:14:20 +03:00
|
|
|
conf.threadContinue = True
|
2008-12-04 20:40:03 +03:00
|
|
|
conf.threadException = False
|
2009-04-22 15:48:07 +04:00
|
|
|
conf.wFileType = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-03-25 14:47:18 +03:00
|
|
|
if conf.eta:
|
|
|
|
conf.progressWidth = 54
|
|
|
|
else:
|
|
|
|
conf.progressWidth = 34
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setKnowledgeBaseAttributes():
|
|
|
|
"""
|
|
|
|
This function set some needed attributes into the knowledge base
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "initializing the knowledge base"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
kb.absFilePaths = set()
|
2009-04-22 15:48:07 +04:00
|
|
|
kb.bannerFp = advancedDict()
|
|
|
|
kb.data = advancedDict()
|
|
|
|
|
|
|
|
# Basic back-end DBMS fingerprint
|
2008-10-15 19:38:22 +04:00
|
|
|
kb.dbms = None
|
|
|
|
kb.dbmsDetected = False
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# Active (extensive) back-end DBMS fingerprint
|
2010-01-02 05:02:12 +03:00
|
|
|
kb.dbmsVersion = [ "Unknown" ]
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
kb.dep = None
|
|
|
|
kb.docRoot = None
|
2008-11-18 20:42:46 +03:00
|
|
|
kb.headersCount = 0
|
2008-11-17 03:00:54 +03:00
|
|
|
kb.headersFp = {}
|
2008-10-15 19:38:22 +04:00
|
|
|
kb.htmlFp = []
|
|
|
|
kb.injParameter = None
|
|
|
|
kb.injPlace = None
|
|
|
|
kb.injType = None
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# Back-end DBMS underlying operating system fingerprint via banner (-b)
|
2009-09-26 03:03:45 +04:00
|
|
|
# parsing
|
2009-04-22 15:48:07 +04:00
|
|
|
kb.os = None
|
|
|
|
kb.osVersion = None
|
|
|
|
kb.osSP = None
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
kb.parenthesis = None
|
|
|
|
kb.resumedQueries = {}
|
2008-12-17 00:30:24 +03:00
|
|
|
kb.stackedTest = None
|
2008-11-28 01:33:33 +03:00
|
|
|
kb.targetUrls = set()
|
2008-11-13 03:03:04 +03:00
|
|
|
kb.timeTest = None
|
2008-10-15 19:38:22 +04:00
|
|
|
kb.unionComment = ""
|
|
|
|
kb.unionCount = None
|
|
|
|
kb.unionPosition = None
|
2010-03-22 18:39:29 +03:00
|
|
|
kb.unionNegative = False
|
|
|
|
kb.unionFalseCond = False
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __saveCmdline():
|
|
|
|
"""
|
|
|
|
Saves the command line options on a sqlmap configuration INI file
|
|
|
|
format.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.saveCmdline:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "saving command line options on a sqlmap configuration INI file"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2009-01-04 01:59:22 +03:00
|
|
|
config = ConfigParser()
|
2008-10-15 19:38:22 +04:00
|
|
|
userOpts = {}
|
|
|
|
|
|
|
|
for family in optDict.keys():
|
|
|
|
userOpts[family] = []
|
|
|
|
|
|
|
|
for option, value in conf.items():
|
|
|
|
for family, optionData in optDict.items():
|
|
|
|
if option in optionData:
|
|
|
|
userOpts[family].append((option, value, optionData[option]))
|
|
|
|
|
|
|
|
for family, optionData in userOpts.items():
|
2009-01-04 01:59:22 +03:00
|
|
|
config.add_section(family)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
optionData.sort()
|
|
|
|
|
|
|
|
for option, value, datatype in optionData:
|
2010-01-02 05:02:12 +03:00
|
|
|
if value is None:
|
2008-10-15 19:38:22 +04:00
|
|
|
if datatype == "boolean":
|
|
|
|
value = "False"
|
2008-11-09 19:57:47 +03:00
|
|
|
elif datatype in ( "integer", "float" ):
|
2008-12-02 02:07:41 +03:00
|
|
|
if option in ( "threads", "verbose" ):
|
2008-10-26 19:10:28 +03:00
|
|
|
value = "1"
|
2008-12-17 00:30:24 +03:00
|
|
|
elif option == "timeout":
|
|
|
|
value = "10"
|
2008-10-26 19:10:28 +03:00
|
|
|
else:
|
|
|
|
value = "0"
|
2008-10-15 19:38:22 +04:00
|
|
|
elif datatype == "string":
|
|
|
|
value = ""
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
if isinstance(value, str):
|
|
|
|
value = value.replace("\n", "\n ")
|
|
|
|
|
2009-01-04 01:59:22 +03:00
|
|
|
config.set(family, option, value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2009-01-04 01:59:22 +03:00
|
|
|
confFP = open(paths.SQLMAP_CONFIG, "wb")
|
|
|
|
config.write(confFP)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
infoMsg = "saved command line options on '%s' configuration file" % paths.SQLMAP_CONFIG
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
def __setVerbosity():
|
|
|
|
"""
|
|
|
|
This function set the verbosity of sqlmap output messages.
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.verbose is None:
|
2008-12-02 02:07:41 +03:00
|
|
|
conf.verbose = 1
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
conf.verbose = int(conf.verbose)
|
|
|
|
|
2008-12-02 02:07:41 +03:00
|
|
|
if conf.verbose == 1:
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.setLevel(logging.INFO)
|
2010-03-11 01:08:11 +03:00
|
|
|
elif conf.verbose > 2 and conf.eta:
|
|
|
|
conf.verbose = 2
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2008-10-15 19:38:22 +04:00
|
|
|
elif conf.verbose == 2:
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
elif conf.verbose == 3:
|
|
|
|
logger.setLevel(9)
|
|
|
|
elif conf.verbose >= 4:
|
|
|
|
logger.setLevel(8)
|
|
|
|
|
|
|
|
def __mergeOptions(inputOptions):
|
|
|
|
"""
|
|
|
|
Merge command line options with configuration file options.
|
|
|
|
|
|
|
|
@param inputOptions: optparse object with command line options.
|
|
|
|
@type inputOptions: C{instance}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if inputOptions.configFile:
|
|
|
|
configFileParser(inputOptions.configFile)
|
|
|
|
|
2009-06-05 14:15:55 +04:00
|
|
|
if hasattr(inputOptions, "items"):
|
|
|
|
inputOptionsItems = inputOptions.items()
|
|
|
|
else:
|
|
|
|
inputOptionsItems = inputOptions.__dict__.items()
|
|
|
|
|
|
|
|
for key, value in inputOptionsItems:
|
2010-01-02 05:02:12 +03:00
|
|
|
if not conf.has_key(key) or conf[key] is None or value is not None:
|
2008-10-15 19:38:22 +04:00
|
|
|
conf[key] = value
|
|
|
|
|
|
|
|
def init(inputOptions=advancedDict()):
|
|
|
|
"""
|
|
|
|
Set attributes into both configuration and knowledge base singletons
|
|
|
|
based upon command line and configuration file options.
|
|
|
|
"""
|
|
|
|
|
|
|
|
__mergeOptions(inputOptions)
|
|
|
|
__setVerbosity()
|
|
|
|
__saveCmdline()
|
|
|
|
__setConfAttributes()
|
|
|
|
__setKnowledgeBaseAttributes()
|
|
|
|
__cleanupOptions()
|
2009-06-16 19:12:02 +04:00
|
|
|
|
2010-01-12 16:11:26 +03:00
|
|
|
__setRequestFromFile()
|
|
|
|
|
2009-06-16 19:12:02 +04:00
|
|
|
parseTargetUrl()
|
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
__setHTTPTimeout()
|
2008-10-15 19:38:22 +04:00
|
|
|
__setHTTPCookies()
|
|
|
|
__setHTTPReferer()
|
|
|
|
__setHTTPUserAgent()
|
2008-12-09 00:24:24 +03:00
|
|
|
__setHTTPExtraHeaders()
|
2008-10-15 19:38:22 +04:00
|
|
|
__setHTTPMethod()
|
|
|
|
__setHTTPAuthentication()
|
|
|
|
__setHTTPProxy()
|
|
|
|
__setThreads()
|
2008-12-09 00:24:24 +03:00
|
|
|
__setDBMS()
|
2009-04-22 15:48:07 +04:00
|
|
|
__setOS()
|
2008-12-22 00:39:53 +03:00
|
|
|
__setUnionTech()
|
2009-04-22 15:48:07 +04:00
|
|
|
__setWriteFile()
|
|
|
|
__setMetasploit()
|
2008-10-15 19:38:22 +04:00
|
|
|
__setGoogleDorking()
|
2008-11-28 01:33:33 +03:00
|
|
|
__setMultipleTargets()
|
2008-10-15 19:38:22 +04:00
|
|
|
__urllib2Opener()
|
|
|
|
|
|
|
|
update()
|
|
|
|
queriesParser()
|