2008-10-15 19:38:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
2008-10-15 19:56:32 +04:00
|
|
|
$Id$
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-15 16:33:18 +04:00
|
|
|
Copyright (c) 2006-2011 sqlmap developers (http://sqlmap.sourceforge.net/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2010-05-24 15:00:49 +04:00
|
|
|
import codecs
|
2008-10-15 19:38:22 +04:00
|
|
|
import cookielib
|
2008-12-20 04:54:08 +03:00
|
|
|
import difflib
|
2010-10-17 01:33:15 +04:00
|
|
|
import inspect
|
2008-10-15 19:38:22 +04:00
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
2008-12-04 20:40:03 +03:00
|
|
|
import socket
|
2010-10-15 14:36:29 +04:00
|
|
|
import sys
|
2010-11-02 10:32:08 +03:00
|
|
|
import threading
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
|
|
|
|
2010-11-15 14:34:57 +03:00
|
|
|
from extra.clientform.clientform import ParseResponse
|
2011-02-02 14:49:59 +03:00
|
|
|
from extra.clientform.clientform import ParseError
|
2010-06-01 16:21:10 +04:00
|
|
|
from extra.keepalive import keepalive
|
2011-05-11 12:55:48 +04:00
|
|
|
from extra.oset.pyoset import oset
|
2011-05-21 15:46:57 +04:00
|
|
|
from extra.socks import socks
|
2010-10-07 16:12:26 +04:00
|
|
|
from extra.xmlobject import xmlobject
|
2010-11-15 15:07:13 +03:00
|
|
|
from lib.controller.checks import checkConnection
|
2011-01-28 19:36:09 +03:00
|
|
|
from lib.core.common import Backend
|
2011-04-01 18:55:39 +04:00
|
|
|
from lib.core.common import dataToStdout
|
2011-04-07 14:00:47 +04:00
|
|
|
from lib.core.common import getPublicTypeMembers
|
2011-01-20 18:55:50 +03:00
|
|
|
from lib.core.common import extractRegexResult
|
2011-04-01 16:09:11 +04:00
|
|
|
from lib.core.common import filterStringValue
|
2010-04-16 17:40:02 +04:00
|
|
|
from lib.core.common import getConsoleWidth
|
2010-10-14 15:06:28 +04:00
|
|
|
from lib.core.common import getFileItems
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.common import getFileType
|
2010-02-04 17:50:54 +03:00
|
|
|
from lib.core.common import normalizePath
|
2010-02-04 17:37:00 +03:00
|
|
|
from lib.core.common import ntToPosixSlashes
|
2011-01-08 12:30:10 +03:00
|
|
|
from lib.core.common import openFile
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.common import parseTargetDirect
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import parseTargetUrl
|
|
|
|
from lib.core.common import paths
|
|
|
|
from lib.core.common import randomRange
|
2011-01-13 20:36:54 +03:00
|
|
|
from lib.core.common import randomStr
|
2010-11-07 11:11:56 +03:00
|
|
|
from lib.core.common import readCachedFileContent
|
2010-11-04 13:29:40 +03:00
|
|
|
from lib.core.common import readInput
|
2010-10-28 00:39:50 +04:00
|
|
|
from lib.core.common import runningAsAdmin
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import sanitizeStr
|
2010-05-28 19:57:43 +04:00
|
|
|
from lib.core.common import UnicodeRawConfigParser
|
2011-03-03 12:42:50 +03:00
|
|
|
from lib.core.convert import urldecode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
|
|
|
from lib.core.data import paths
|
2010-10-21 17:13:12 +04:00
|
|
|
from lib.core.data import queries
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.datatype import advancedDict
|
2010-11-29 04:04:42 +03:00
|
|
|
from lib.core.datatype import injectionDict
|
2011-01-13 20:36:54 +03:00
|
|
|
from lib.core.enums import DBMS
|
2011-03-18 16:46:51 +03:00
|
|
|
from lib.core.enums import HTTPHEADER
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2011-04-29 23:27:23 +04:00
|
|
|
from lib.core.enums import MOBILES
|
2010-12-15 14:30:29 +03:00
|
|
|
from lib.core.enums import PAYLOAD
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PRIORITY
|
2011-05-24 15:06:58 +04:00
|
|
|
from lib.core.exception import sqlmapConnectionException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapFilePathException
|
|
|
|
from lib.core.exception import sqlmapGenericException
|
2009-12-03 01:54:39 +03:00
|
|
|
from lib.core.exception import sqlmapMissingDependence
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.exception import sqlmapMissingMandatoryOptionException
|
|
|
|
from lib.core.exception import sqlmapMissingPrivileges
|
2011-01-20 01:08:56 +03:00
|
|
|
from lib.core.exception import sqlmapSilentQuitException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.exception import sqlmapSyntaxException
|
|
|
|
from lib.core.exception import sqlmapUnsupportedDBMSException
|
2010-11-04 13:29:40 +03:00
|
|
|
from lib.core.exception import sqlmapUserQuitException
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.optiondict import optDict
|
2011-05-18 03:03:31 +04:00
|
|
|
from lib.core.settings import CODECS_LIST_PAGE
|
2011-01-20 14:01:01 +03:00
|
|
|
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
2011-05-24 15:06:58 +04:00
|
|
|
from lib.core.settings import DEFAULT_TOR_PORTS
|
2009-06-11 19:01:48 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.settings import PLATFORM
|
2010-05-20 14:52:14 +04:00
|
|
|
from lib.core.settings import PYVERSION
|
2009-04-28 03:05:11 +04:00
|
|
|
from lib.core.settings import SITE
|
2011-04-23 20:25:09 +04:00
|
|
|
from lib.core.settings import DBMS_DICT
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.settings import SUPPORTED_DBMS
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.settings import SUPPORTED_OS
|
2009-04-28 03:05:11 +04:00
|
|
|
from lib.core.settings import VERSION_STRING
|
2010-11-29 17:48:07 +03:00
|
|
|
from lib.core.settings import MSSQL_ALIASES
|
|
|
|
from lib.core.settings import MYSQL_ALIASES
|
|
|
|
from lib.core.settings import PGSQL_ALIASES
|
|
|
|
from lib.core.settings import ORACLE_ALIASES
|
|
|
|
from lib.core.settings import SQLITE_ALIASES
|
|
|
|
from lib.core.settings import ACCESS_ALIASES
|
|
|
|
from lib.core.settings import FIREBIRD_ALIASES
|
|
|
|
from lib.core.settings import MAXDB_ALIASES
|
|
|
|
from lib.core.settings import SYBASE_ALIASES
|
2011-02-02 13:10:28 +03:00
|
|
|
from lib.core.settings import BURP_SPLITTER
|
2011-05-24 15:06:58 +04:00
|
|
|
from lib.core.settings import LOCALHOST
|
2011-02-02 13:10:28 +03:00
|
|
|
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
2011-04-15 12:52:53 +04:00
|
|
|
from lib.core.settings import TIME_DEFAULT_DELAY
|
2011-01-16 20:52:42 +03:00
|
|
|
from lib.core.settings import TIME_DELAY_CANDIDATES
|
2011-01-21 00:57:54 +03:00
|
|
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
2011-01-20 19:07:08 +03:00
|
|
|
from lib.core.settings import WEBSCARAB_SPLITTER
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.update import update
|
|
|
|
from lib.parse.configfile import configFileParser
|
2010-11-28 21:10:54 +03:00
|
|
|
from lib.parse.payloads import loadPayloads
|
2010-11-15 14:34:57 +03:00
|
|
|
from lib.request.connect import Connect as Request
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.request.proxy import ProxyHTTPSHandler
|
2010-12-26 12:33:04 +03:00
|
|
|
from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler
|
2010-01-07 15:59:09 +03:00
|
|
|
from lib.request.certhandler import HTTPSCertAuthHandler
|
2010-11-08 15:26:13 +03:00
|
|
|
from lib.request.rangehandler import HTTPRangeHandler
|
2010-03-15 17:24:43 +03:00
|
|
|
from lib.request.redirecthandler import SmartRedirectHandler
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.utils.google import Google
|
|
|
|
|
2010-06-01 16:21:10 +04:00
|
|
|
authHandler = urllib2.BaseHandler()
|
|
|
|
keepAliveHandler = keepalive.HTTPHandler()
|
2008-10-15 19:38:22 +04:00
|
|
|
proxyHandler = urllib2.BaseHandler()
|
2010-03-15 17:24:43 +03:00
|
|
|
redirectHandler = SmartRedirectHandler()
|
2010-11-08 15:26:13 +03:00
|
|
|
rangeHandler = HTTPRangeHandler()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __urllib2Opener():
|
|
|
|
"""
|
|
|
|
This function creates the urllib2 OpenerDirector.
|
|
|
|
"""
|
|
|
|
|
|
|
|
global authHandler
|
2010-06-01 16:21:10 +04:00
|
|
|
global keepAliveHandler
|
2008-10-15 19:38:22 +04:00
|
|
|
global proxyHandler
|
2010-11-08 15:26:13 +03:00
|
|
|
global rangeHandler
|
2010-03-15 17:24:43 +03:00
|
|
|
global redirectHandler
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
debugMsg = "creating HTTP requests opener object"
|
|
|
|
logger.debug(debugMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-11-08 15:26:13 +03:00
|
|
|
handlers = [proxyHandler, authHandler, redirectHandler, rangeHandler]
|
2010-06-10 18:14:56 +04:00
|
|
|
|
2010-06-01 16:21:10 +04:00
|
|
|
if not conf.dropSetCookie:
|
2010-01-02 05:02:12 +03:00
|
|
|
conf.cj = cookielib.LWPCookieJar()
|
2010-06-01 16:21:10 +04:00
|
|
|
handlers.append(urllib2.HTTPCookieProcessor(conf.cj))
|
2010-06-10 18:14:56 +04:00
|
|
|
|
2010-06-11 01:19:45 +04:00
|
|
|
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
|
2010-06-30 15:41:42 +04:00
|
|
|
if conf.keepAlive:
|
2011-01-12 00:46:21 +03:00
|
|
|
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
|
|
|
|
warnMsg += "been disabled because of it's incompatibility "
|
|
|
|
|
2010-06-30 15:41:42 +04:00
|
|
|
if conf.proxy:
|
|
|
|
warnMsg += "with HTTP(s) proxy"
|
|
|
|
logger.warn(warnMsg)
|
2010-12-27 03:14:29 +03:00
|
|
|
elif conf.aType:
|
|
|
|
warnMsg += "with authentication methods"
|
|
|
|
logger.warn(warnMsg)
|
2010-06-30 15:41:42 +04:00
|
|
|
else:
|
|
|
|
handlers.append(keepAliveHandler)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-06-10 18:14:56 +04:00
|
|
|
opener = urllib2.build_opener(*handlers)
|
2008-10-15 19:38:22 +04:00
|
|
|
urllib2.install_opener(opener)
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
def __feedTargetsDict(reqFile, addedTargetUrls):
|
2011-01-20 18:55:50 +03:00
|
|
|
"""
|
|
|
|
Parses web scarab and burp logs and adds results to the target url list
|
|
|
|
"""
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
def __parseWebScarabLog(content):
|
|
|
|
"""
|
|
|
|
Parses web scarab logs (POST method not supported)
|
|
|
|
"""
|
2011-02-20 00:03:55 +03:00
|
|
|
|
2011-01-20 19:07:08 +03:00
|
|
|
reqResList = content.split(WEBSCARAB_SPLITTER)
|
2011-02-20 00:03:55 +03:00
|
|
|
getPostReq = False
|
2011-01-20 19:07:08 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
for request in reqResList:
|
2011-04-30 17:20:05 +04:00
|
|
|
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
|
2011-01-20 18:55:50 +03:00
|
|
|
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
|
|
|
|
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
|
2011-02-20 00:03:55 +03:00
|
|
|
getPostReq = True
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not method or not url:
|
2011-04-18 02:25:25 +04:00
|
|
|
logger.debug("not a valid WebScarab log data")
|
2011-01-20 18:55:50 +03:00
|
|
|
continue
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if method.upper() == "POST":
|
2011-01-20 19:17:38 +03:00
|
|
|
warnMsg = "POST requests from WebScarab logs aren't supported "
|
|
|
|
warnMsg += "as their body content is stored in separate files. "
|
|
|
|
warnMsg += "Nevertheless you can use -r to load them individually."
|
2011-01-20 18:55:50 +03:00
|
|
|
logger.warning(warnMsg)
|
|
|
|
continue
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2011-02-20 00:03:55 +03:00
|
|
|
if conf.scope:
|
|
|
|
getPostReq &= re.search(conf.scope, url) is not None
|
|
|
|
|
|
|
|
if getPostReq:
|
|
|
|
if not kb.targetUrls or url not in addedTargetUrls:
|
|
|
|
kb.targetUrls.add((url, method, None, cookie))
|
|
|
|
addedTargetUrls.add(url)
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
def __parseBurpLog(content):
|
|
|
|
"""
|
|
|
|
Parses burp logs
|
|
|
|
"""
|
2011-04-30 17:20:05 +04:00
|
|
|
port = None
|
2011-01-20 18:55:50 +03:00
|
|
|
scheme = None
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2011-01-20 19:07:08 +03:00
|
|
|
reqResList = content.split(BURP_SPLITTER)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
for request in reqResList:
|
|
|
|
if scheme is None:
|
|
|
|
schemePort = re.search("\d\d[\:|\.]\d\d[\:|\.]\d\d\s+(http[\w]*)\:\/\/.*?\:([\d]+)", request, re.I)
|
2008-12-09 00:24:24 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if schemePort:
|
|
|
|
scheme = schemePort.group(1)
|
2011-04-30 17:20:05 +04:00
|
|
|
port = schemePort.group(2)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not re.search ("^[\n]*(GET|POST).*?\sHTTP\/", request, re.I):
|
2008-11-28 01:33:33 +03:00
|
|
|
continue
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if re.search("^[\n]*(GET|POST).*?\.(gif|jpg|png)\sHTTP\/", request, re.I):
|
|
|
|
continue
|
|
|
|
|
|
|
|
getPostReq = False
|
2011-04-30 17:20:05 +04:00
|
|
|
url = None
|
|
|
|
host = None
|
|
|
|
method = None
|
|
|
|
data = None
|
|
|
|
cookie = None
|
|
|
|
params = False
|
|
|
|
lines = request.split("\n")
|
2011-01-20 18:55:50 +03:00
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
if len(line) == 0 or line == "\n":
|
2011-03-24 11:43:40 +03:00
|
|
|
if method == HTTPMETHOD.POST and data is None:
|
2011-03-19 20:37:26 +03:00
|
|
|
data = ""
|
|
|
|
params = True
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-03-19 20:37:26 +03:00
|
|
|
elif (line.startswith("GET ") or line.startswith("POST ")) and " HTTP/" in line:
|
2011-01-20 18:55:50 +03:00
|
|
|
if line.startswith("GET "):
|
|
|
|
index = 4
|
|
|
|
else:
|
|
|
|
index = 5
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
url = line[index:line.index(" HTTP/")]
|
|
|
|
method = line[:index-1]
|
|
|
|
|
|
|
|
if "?" in line and "=" in line:
|
|
|
|
params = True
|
|
|
|
|
|
|
|
getPostReq = True
|
|
|
|
|
|
|
|
# POST parameters
|
|
|
|
elif data is not None and params:
|
|
|
|
data += line
|
|
|
|
|
|
|
|
# GET parameters
|
|
|
|
elif "?" in line and "=" in line and ": " not in line:
|
2008-11-22 04:57:22 +03:00
|
|
|
params = True
|
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Headers
|
|
|
|
elif ": " in line:
|
|
|
|
key, value = line.split(": ", 1)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Cookie and Host headers
|
|
|
|
if key.lower() == "cookie":
|
|
|
|
cookie = value
|
|
|
|
elif key.lower() == "host":
|
2011-04-01 16:09:11 +04:00
|
|
|
if '://' in value:
|
|
|
|
scheme, value = value.split('://')[:2]
|
2011-01-20 18:55:50 +03:00
|
|
|
splitValue = value.split(":")
|
|
|
|
host = splitValue[0]
|
2010-06-30 01:06:03 +04:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if len(splitValue) > 1:
|
2011-04-01 16:09:11 +04:00
|
|
|
port = filterStringValue(splitValue[1], '[0-9]')
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Avoid to add a static content length header to
|
|
|
|
# conf.httpHeaders and consider the following lines as
|
|
|
|
# POSTed data
|
|
|
|
if key == "Content-Length":
|
|
|
|
params = True
|
2010-11-12 15:25:02 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Avoid proxy and connection type related headers
|
|
|
|
elif key not in ( "Proxy-Connection", "Connection" ):
|
|
|
|
conf.httpHeaders.append((str(key), str(value)))
|
2010-11-12 15:25:02 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if conf.scope:
|
|
|
|
getPostReq &= re.search(conf.scope, host) is not None
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-24 14:26:51 +03:00
|
|
|
if getPostReq and (params or cookie):
|
2011-04-14 14:14:46 +04:00
|
|
|
if not port and isinstance(scheme, basestring) and scheme.lower() == "https":
|
2011-04-14 14:12:07 +04:00
|
|
|
port = "443"
|
2011-04-14 14:14:46 +04:00
|
|
|
elif not scheme and port == "443":
|
|
|
|
scheme = "https"
|
2011-04-14 14:12:07 +04:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not url.startswith("http"):
|
2011-04-30 17:20:05 +04:00
|
|
|
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
2011-01-20 18:55:50 +03:00
|
|
|
scheme = None
|
2011-04-30 17:20:05 +04:00
|
|
|
port = None
|
2010-06-30 01:06:03 +04:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not kb.targetUrls or url not in addedTargetUrls:
|
2011-03-19 20:44:34 +03:00
|
|
|
kb.targetUrls.add((url, method, urldecode(data), cookie))
|
2011-01-20 18:55:50 +03:00
|
|
|
addedTargetUrls.add(url)
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
fp = openFile(reqFile, "rb")
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2011-01-20 19:07:08 +03:00
|
|
|
content = fp.read()
|
|
|
|
content = content.replace("\r", "")
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if conf.scope:
|
|
|
|
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
|
|
|
|
2011-01-20 19:07:08 +03:00
|
|
|
__parseBurpLog(content)
|
|
|
|
__parseWebScarabLog(content)
|
2008-11-20 20:56:09 +03:00
|
|
|
|
2010-10-21 17:13:12 +04:00
|
|
|
def __loadQueries():
|
|
|
|
"""
|
|
|
|
Loads queries from 'xml/queries.xml' file.
|
|
|
|
"""
|
2010-10-22 18:23:14 +04:00
|
|
|
|
2010-10-21 17:13:12 +04:00
|
|
|
for node in xmlobject.XMLFile(path=paths.QUERIES_XML, textfilter=sanitizeStr).root.dbms:
|
|
|
|
queries[node.value] = node
|
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
def __setMultipleTargets():
|
|
|
|
"""
|
|
|
|
Define a configuration parameter if we are running in multiple target
|
|
|
|
mode.
|
|
|
|
"""
|
|
|
|
|
2008-11-22 04:57:22 +03:00
|
|
|
initialTargetsCount = len(kb.targetUrls)
|
2008-11-28 01:33:33 +03:00
|
|
|
addedTargetUrls = set()
|
2008-11-20 20:56:09 +03:00
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
if not conf.logFile:
|
2008-11-20 20:56:09 +03:00
|
|
|
return
|
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
debugMsg = "parsing targets list from '%s'" % conf.logFile
|
2008-11-28 01:33:33 +03:00
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
if not os.path.exists(conf.logFile):
|
2008-11-28 01:33:33 +03:00
|
|
|
errMsg = "the specified list of targets does not exist"
|
2008-11-20 20:56:09 +03:00
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
if os.path.isfile(conf.logFile):
|
|
|
|
__feedTargetsDict(conf.logFile, addedTargetUrls)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
elif os.path.isdir(conf.logFile):
|
|
|
|
files = os.listdir(conf.logFile)
|
2008-11-20 20:56:09 +03:00
|
|
|
files.sort()
|
|
|
|
|
|
|
|
for reqFile in files:
|
|
|
|
if not re.search("([\d]+)\-request", reqFile):
|
|
|
|
continue
|
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
__feedTargetsDict(os.path.join(conf.logFile, reqFile), addedTargetUrls)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "the specified list of targets is not a file "
|
2008-11-20 20:56:09 +03:00
|
|
|
errMsg += "nor a directory"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
2008-11-22 04:57:22 +03:00
|
|
|
updatedTargetsCount = len(kb.targetUrls)
|
|
|
|
|
|
|
|
if updatedTargetsCount > initialTargetsCount:
|
2011-04-30 17:20:05 +04:00
|
|
|
infoMsg = "sqlmap parsed %d " % (updatedTargetsCount - initialTargetsCount)
|
2008-11-28 01:33:33 +03:00
|
|
|
infoMsg += "testable requests from the targets list"
|
2008-11-22 04:57:22 +03:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-03-16 18:21:42 +03:00
|
|
|
def __setRequestFromFile():
|
|
|
|
"""
|
|
|
|
This function checks if the way to make a HTTP request is through supplied
|
|
|
|
textual file, parses it and saves the information into the knowledge base.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.requestFile:
|
|
|
|
return
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-03-16 18:21:42 +03:00
|
|
|
addedTargetUrls = set()
|
|
|
|
|
|
|
|
conf.requestFile = os.path.expanduser(conf.requestFile)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-03-16 18:21:42 +03:00
|
|
|
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if not os.path.isfile(conf.requestFile):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "the specified HTTP request file "
|
2010-03-16 18:21:42 +03:00
|
|
|
errMsg += "does not exist"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
__feedTargetsDict(conf.requestFile, addedTargetUrls)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
def __setGoogleDorking():
|
|
|
|
"""
|
|
|
|
This function checks if the way to request testable hosts is through
|
|
|
|
Google dorking then requests to Google the search parameter, parses
|
|
|
|
the results and save the testable hosts into the knowledge base.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.googleDork:
|
|
|
|
return
|
|
|
|
|
2010-06-11 14:08:19 +04:00
|
|
|
global keepAliveHandler
|
|
|
|
global proxyHandler
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
debugMsg = "initializing Google dorking requests"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-04-30 19:29:59 +04:00
|
|
|
infoMsg = "first request to Google to get the session cookie"
|
|
|
|
logger.info(infoMsg)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2010-06-11 14:08:19 +04:00
|
|
|
handlers = [ proxyHandler ]
|
|
|
|
|
|
|
|
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
|
2010-06-30 15:41:42 +04:00
|
|
|
if conf.keepAlive:
|
|
|
|
if conf.proxy:
|
|
|
|
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
|
|
|
|
warnMsg += "been disabled because of it's incompatibility "
|
|
|
|
warnMsg += "with HTTP(s) proxy"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
else:
|
|
|
|
handlers.append(keepAliveHandler)
|
2010-06-11 14:08:19 +04:00
|
|
|
|
|
|
|
googleObj = Google(handlers)
|
2008-11-28 01:33:33 +03:00
|
|
|
googleObj.getCookie()
|
|
|
|
|
|
|
|
matches = googleObj.search(conf.googleDork)
|
|
|
|
|
|
|
|
if not matches:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "unable to find results for your "
|
2008-11-28 01:33:33 +03:00
|
|
|
errMsg += "Google dork expression"
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
|
|
|
googleObj.getTargetUrls()
|
|
|
|
|
|
|
|
if kb.targetUrls:
|
2011-04-30 19:29:59 +04:00
|
|
|
infoMsg = "sqlmap got %d results for your " % len(matches)
|
|
|
|
infoMsg += "Google dork expression, "
|
2008-11-28 01:33:33 +03:00
|
|
|
|
|
|
|
if len(matches) == len(kb.targetUrls):
|
2011-04-30 19:29:59 +04:00
|
|
|
infoMsg += "all "
|
2008-11-28 01:33:33 +03:00
|
|
|
else:
|
2011-04-30 19:29:59 +04:00
|
|
|
infoMsg += "%d " % len(kb.targetUrls)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2011-04-30 19:29:59 +04:00
|
|
|
infoMsg += "of them are testable targets"
|
|
|
|
logger.info(infoMsg)
|
2008-11-28 01:33:33 +03:00
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "sqlmap got %d results " % len(matches)
|
2008-11-28 01:33:33 +03:00
|
|
|
errMsg += "for your Google dork expression, but none of them "
|
|
|
|
errMsg += "have GET parameters to test for SQL injection"
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
2011-05-11 12:46:40 +04:00
|
|
|
def __setBulkMultipleTargets():
|
|
|
|
if not conf.bulkFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
conf.bulkFile = os.path.expanduser(conf.bulkFile)
|
|
|
|
|
|
|
|
infoMsg = "parsing multiple targets list from '%s'" % conf.bulkFile
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if not os.path.isfile(conf.bulkFile):
|
|
|
|
errMsg = "the specified bulk file "
|
|
|
|
errMsg += "does not exist"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
f = open(conf.bulkFile, 'r')
|
|
|
|
for line in f.xreadlines():
|
|
|
|
if re.search(r"[^ ]+\?(.+)", line, re.I):
|
|
|
|
kb.targetUrls.add((line, None, None, None))
|
|
|
|
f.close()
|
|
|
|
|
2010-11-15 14:34:57 +03:00
|
|
|
def __findPageForms():
|
2010-11-15 15:07:13 +03:00
|
|
|
if not conf.forms:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not checkConnection():
|
|
|
|
return
|
|
|
|
|
2010-11-15 14:34:57 +03:00
|
|
|
infoMsg = "searching for forms"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
response, _ = Request.queryPage(response=True)
|
2011-03-27 17:41:54 +04:00
|
|
|
|
|
|
|
if response is None or isinstance(response, basestring):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "can't do form parsing as no valid response "
|
2011-03-27 17:41:54 +04:00
|
|
|
errMsg += "object found. please check previous log messages "
|
|
|
|
errMsg += "for connection issues"
|
|
|
|
raise sqlmapGenericException, errMsg
|
|
|
|
|
2011-02-02 14:49:59 +03:00
|
|
|
try:
|
|
|
|
forms = ParseResponse(response, backwards_compat=False)
|
|
|
|
except ParseError:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "badly formed HTML at the target url. can't parse forms"
|
2011-02-02 14:49:59 +03:00
|
|
|
raise sqlmapGenericException, errMsg
|
2010-11-15 14:34:57 +03:00
|
|
|
|
2010-11-15 17:17:51 +03:00
|
|
|
if forms:
|
|
|
|
for form in forms:
|
2011-04-02 02:12:24 +04:00
|
|
|
for control in form.controls:
|
|
|
|
if hasattr(control, 'items'):
|
2011-05-02 22:18:04 +04:00
|
|
|
# if control has selectable items select first non-disabled
|
|
|
|
for item in control.items:
|
|
|
|
if not item.disabled:
|
|
|
|
item.selected = True
|
|
|
|
break
|
2010-11-15 17:17:51 +03:00
|
|
|
request = form.click()
|
2011-03-03 13:39:04 +03:00
|
|
|
url = urldecode(request.get_full_url(), kb.pageEncoding)
|
2010-11-15 17:17:51 +03:00
|
|
|
method = request.get_method()
|
2011-03-03 13:39:04 +03:00
|
|
|
data = urldecode(request.get_data(), kb.pageEncoding) if request.has_data() else None
|
2011-04-02 02:19:42 +04:00
|
|
|
if not data and method and method.upper() == HTTPMETHOD.POST:
|
|
|
|
debugMsg = "invalid POST form with blank data detected"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
continue
|
2010-11-15 17:17:51 +03:00
|
|
|
target = (url, method, data, conf.cookie)
|
|
|
|
kb.targetUrls.add(target)
|
2010-11-29 15:46:18 +03:00
|
|
|
kb.formNames.append(target)
|
2010-11-15 17:17:51 +03:00
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "there were no forms found at the given target url"
|
2010-11-15 17:17:51 +03:00
|
|
|
raise sqlmapGenericException, errMsg
|
2010-11-15 14:34:57 +03:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
def __setMetasploit():
|
|
|
|
if not conf.osPwn and not conf.osSmb and not conf.osBof:
|
|
|
|
return
|
|
|
|
|
2009-06-11 19:01:48 +04:00
|
|
|
debugMsg = "setting the takeover out-of-band functionality"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = False
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "some sqlmap takeover functionalities are not yet "
|
2010-10-28 00:39:50 +04:00
|
|
|
warnMsg += "supported on Windows. Please use Linux in a virtual "
|
2011-01-20 01:08:56 +03:00
|
|
|
warnMsg += "machine for out-of-band features."
|
2009-06-11 19:01:48 +04:00
|
|
|
|
2011-01-20 01:08:56 +03:00
|
|
|
logger.critical(warnMsg)
|
2009-06-11 19:01:48 +04:00
|
|
|
|
2011-01-20 01:08:56 +03:00
|
|
|
raise sqlmapSilentQuitException
|
2009-06-11 19:01:48 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if conf.osSmb:
|
2010-10-28 00:39:50 +04:00
|
|
|
isAdmin = runningAsAdmin()
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2009-06-11 19:01:48 +04:00
|
|
|
if isAdmin is not True:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you need to run sqlmap as an administrator "
|
2010-10-28 00:39:50 +04:00
|
|
|
errMsg += "if you want to perform a SMB relay attack because "
|
|
|
|
errMsg += "it will need to listen on a user-specified SMB "
|
|
|
|
errMsg += "TCP port for incoming connection attempts"
|
2009-04-22 15:48:07 +04:00
|
|
|
raise sqlmapMissingPrivileges, errMsg
|
|
|
|
|
|
|
|
if conf.msfPath:
|
2011-04-09 02:42:07 +04:00
|
|
|
condition = False
|
|
|
|
|
|
|
|
for path in [conf.msfPath, os.path.join(conf.msfPath, 'bin')]:
|
2011-04-30 17:20:05 +04:00
|
|
|
condition = os.path.exists(normalizePath(path))
|
2011-04-09 02:42:07 +04:00
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(path, "msfcli")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(path, "msfconsole")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(path, "msfencode")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(path, "msfpayload")))
|
|
|
|
|
|
|
|
if condition:
|
|
|
|
conf.msfPath = path
|
|
|
|
break
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if condition:
|
2011-04-30 17:20:05 +04:00
|
|
|
debugMsg = "provided Metasploit Framework 3 path "
|
2009-04-22 15:48:07 +04:00
|
|
|
debugMsg += "'%s' is valid" % conf.msfPath
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = True
|
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "the provided Metasploit Framework 3 path "
|
2009-04-22 15:48:07 +04:00
|
|
|
warnMsg += "'%s' is not valid. The cause could " % conf.msfPath
|
|
|
|
warnMsg += "be that the path does not exists or that one "
|
|
|
|
warnMsg += "or more of the needed Metasploit executables "
|
|
|
|
warnMsg += "within msfcli, msfconsole, msfencode and "
|
|
|
|
warnMsg += "msfpayload do not exist"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "you did not provide the local path where Metasploit "
|
2009-04-22 15:48:07 +04:00
|
|
|
warnMsg += "Framework 3 is installed"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not msfEnvPathExists:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "sqlmap is going to look for Metasploit Framework 3 "
|
2009-04-22 15:48:07 +04:00
|
|
|
warnMsg += "installation into the environment paths"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
envPaths = os.environ["PATH"]
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2009-04-22 15:48:07 +04:00
|
|
|
envPaths = envPaths.split(";")
|
|
|
|
else:
|
|
|
|
envPaths = envPaths.split(":")
|
|
|
|
|
|
|
|
for envPath in envPaths:
|
2011-04-30 17:20:05 +04:00
|
|
|
envPath = envPath.replace(";", "")
|
|
|
|
condition = os.path.exists(normalizePath(envPath))
|
2010-02-04 17:50:54 +03:00
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfcli")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfconsole")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfencode")))
|
|
|
|
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfpayload")))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if condition:
|
2011-04-30 17:20:05 +04:00
|
|
|
infoMsg = "Metasploit Framework 3 has been found "
|
2009-04-22 15:48:07 +04:00
|
|
|
infoMsg += "installed in the '%s' path" % envPath
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = True
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.msfPath = envPath
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
break
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not msfEnvPathExists:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "unable to locate Metasploit Framework 3 installation. "
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg += "Get it from http://metasploit.com/framework/download/"
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
def __setWriteFile():
|
|
|
|
if not conf.wFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "setting the write file functionality"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
if not os.path.exists(conf.wFile):
|
|
|
|
errMsg = "the provided local file '%s' does not exist" % conf.wFile
|
|
|
|
raise sqlmapFilePathException, errMsg
|
|
|
|
|
|
|
|
if not conf.dFile:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you did not provide the back-end DBMS absolute path "
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg += "where you want to write the local file '%s'" % conf.wFile
|
|
|
|
raise sqlmapMissingMandatoryOptionException, errMsg
|
|
|
|
|
|
|
|
conf.wFileType = getFileType(conf.wFile)
|
|
|
|
|
|
|
|
def __setOS():
|
|
|
|
"""
|
|
|
|
Force the back-end DBMS operating system option.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.os:
|
|
|
|
return
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
if conf.os.lower() not in SUPPORTED_OS:
|
|
|
|
errMsg = "you provided an unsupported back-end DBMS operating "
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg += "system. The supported DBMS operating systems for OS "
|
2011-04-23 20:25:09 +04:00
|
|
|
errMsg += "and file system access are %s. " % ', '.join([o.capitalize() for o in SUPPORTED_OS])
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg += "If you do not know the back-end DBMS underlying OS, "
|
|
|
|
errMsg += "do not provide it and sqlmap will fingerprint it for "
|
|
|
|
errMsg += "you."
|
|
|
|
raise sqlmapUnsupportedDBMSException, errMsg
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
debugMsg = "forcing back-end DBMS operating system to user defined "
|
|
|
|
debugMsg += "value '%s'" % conf.os
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
Backend.setOs(conf.os)
|
|
|
|
|
2010-12-09 16:47:17 +03:00
|
|
|
def __setTechnique():
|
2011-04-07 14:27:22 +04:00
|
|
|
validTechniques = sorted(getPublicTypeMembers(PAYLOAD.TECHNIQUE), key=lambda x: x[1])
|
|
|
|
validLetters = map(lambda x: x[0][0].upper(), validTechniques)
|
2011-04-07 14:00:47 +04:00
|
|
|
|
|
|
|
if conf.tech and isinstance(conf.tech, basestring):
|
2011-04-07 14:07:52 +04:00
|
|
|
selTechniques = []
|
|
|
|
|
2011-04-07 14:37:48 +04:00
|
|
|
for letter in conf.tech.upper():
|
|
|
|
if letter not in validLetters:
|
|
|
|
errMsg = "value for --technique must be a string composed "
|
2011-04-07 14:40:58 +04:00
|
|
|
errMsg += "by the letters %s. Refer to the " % ", ".join(validLetters)
|
2011-04-07 14:37:48 +04:00
|
|
|
errMsg += "user's manual for details"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
for validTech, validInt in validTechniques:
|
|
|
|
if letter == validTech[0]:
|
|
|
|
selTechniques.append(validInt)
|
|
|
|
break
|
2011-04-07 14:07:52 +04:00
|
|
|
|
2011-04-07 14:00:47 +04:00
|
|
|
conf.tech = selTechniques
|
2011-04-06 18:41:44 +04:00
|
|
|
|
|
|
|
if len(conf.tech) > 0:
|
|
|
|
# TODO: consider MySQL/PHP/ASP/web backdoor case where stacked
|
|
|
|
# queries is technically not necessary
|
|
|
|
if any(map(lambda x: conf.__getitem__(x), ['rFile', 'wFile', \
|
|
|
|
'osCmd', 'osShell', 'osPwn', 'osSmb', 'osBof', 'regRead', \
|
|
|
|
'regAdd', 'regDel'])) and PAYLOAD.TECHNIQUE.STACKED not in conf.tech:
|
|
|
|
errMsg = "value for --technique must include stacked queries "
|
2011-04-07 14:00:47 +04:00
|
|
|
errMsg += "technique (S) when you want to access the file "
|
2011-04-06 18:41:44 +04:00
|
|
|
errMsg += "system, takeover the operating system or access "
|
|
|
|
errMsg += "Windows registry hives"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-12-09 16:47:17 +03:00
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
def __setDBMS():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2008-12-09 00:24:24 +03:00
|
|
|
Force the back-end DBMS option.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.dbms:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "forcing back-end DBMS to user defined value"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.dbms = conf.dbms.lower()
|
2011-05-10 17:28:07 +04:00
|
|
|
regex = re.search("%s ([\d\.]+)" % ("(%s)" % "|".join([alias for alias in SUPPORTED_DBMS])), conf.dbms, re.I)
|
2011-05-10 20:24:09 +04:00
|
|
|
|
2011-05-10 17:28:07 +04:00
|
|
|
if regex:
|
|
|
|
conf.dbms = regex.group(1)
|
|
|
|
Backend.setVersion(regex.group(2))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.dbms not in SUPPORTED_DBMS:
|
2011-04-23 20:25:09 +04:00
|
|
|
errMsg = "you provided an unsupported back-end database management "
|
|
|
|
errMsg += "system. The supported DBMS are %s. " % ', '.join([d for d in DBMS_DICT])
|
|
|
|
errMsg += "If you do not know the back-end DBMS, do not provide "
|
|
|
|
errMsg += "it and sqlmap will fingerprint it for you."
|
2008-10-15 19:38:22 +04:00
|
|
|
raise sqlmapUnsupportedDBMSException, errMsg
|
|
|
|
|
2010-11-29 17:48:07 +03:00
|
|
|
for aliases in (MSSQL_ALIASES, MYSQL_ALIASES, PGSQL_ALIASES, \
|
|
|
|
ORACLE_ALIASES, SQLITE_ALIASES, ACCESS_ALIASES, \
|
2011-05-06 14:27:43 +04:00
|
|
|
FIREBIRD_ALIASES, MAXDB_ALIASES, SYBASE_ALIASES):
|
2010-11-29 17:48:07 +03:00
|
|
|
if conf.dbms in aliases:
|
|
|
|
conf.dbms = aliases[0]
|
|
|
|
|
|
|
|
break
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
def __setTamperingFunctions():
|
|
|
|
"""
|
2010-10-17 01:33:15 +04:00
|
|
|
Loads tampering functions from given script(s)
|
2010-10-13 02:45:25 +04:00
|
|
|
"""
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
if conf.tamper:
|
2010-11-07 19:24:44 +03:00
|
|
|
last_priority = PRIORITY.HIGHEST
|
2010-11-04 13:29:40 +03:00
|
|
|
check_priority = True
|
|
|
|
resolve_priorities = False
|
|
|
|
priorities = []
|
|
|
|
|
2011-05-19 01:47:40 +04:00
|
|
|
for tfile in re.split(r'[,|;]', conf.tamper):
|
2010-10-17 01:33:15 +04:00
|
|
|
found = False
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:55:34 +04:00
|
|
|
tfile = tfile.strip()
|
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
if not tfile:
|
2010-10-13 02:45:25 +04:00
|
|
|
continue
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2011-05-19 01:47:40 +04:00
|
|
|
elif os.path.exists(os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)):
|
|
|
|
tfile = os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)
|
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
elif not os.path.exists(tfile):
|
|
|
|
errMsg = "tamper script '%s' does not exist" % tfile
|
2010-10-13 02:45:25 +04:00
|
|
|
raise sqlmapFilePathException, errMsg
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
elif not tfile.endswith('.py'):
|
|
|
|
errMsg = "tamper script '%s' should have an extension '.py'" % tfile
|
2010-10-13 02:45:25 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
dirname, filename = os.path.split(tfile)
|
2010-10-13 02:45:25 +04:00
|
|
|
dirname = os.path.abspath(dirname)
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
infoMsg = "loading tamper script '%s'" % filename[:-3]
|
2010-10-14 10:00:10 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if not os.path.exists(os.path.join(dirname, '__init__.py')):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "make sure that there is an empty file '__init__.py' "
|
2010-10-17 01:33:15 +04:00
|
|
|
errMsg += "inside of tamper scripts directory '%s'" % dirname
|
2010-10-13 02:45:25 +04:00
|
|
|
raise sqlmapGenericException, errMsg
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if dirname not in sys.path:
|
|
|
|
sys.path.insert(0, dirname)
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
try:
|
|
|
|
module = __import__(filename[:-3])
|
|
|
|
except ImportError, msg:
|
2010-10-17 01:33:15 +04:00
|
|
|
raise sqlmapSyntaxException, "can not import tamper script '%s' (%s)" % (filename[:-3], msg)
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
for name, function in inspect.getmembers(module, inspect.isfunction):
|
2010-10-29 20:11:50 +04:00
|
|
|
if name == "tamper" and function.func_code.co_argcount == 1:
|
2010-10-13 02:45:25 +04:00
|
|
|
found = True
|
2010-11-07 19:24:44 +03:00
|
|
|
kb.tamperFunctions.append(function)
|
2010-10-17 01:33:15 +04:00
|
|
|
|
2010-11-07 19:24:44 +03:00
|
|
|
if check_priority and priority > last_priority:
|
2011-04-30 17:20:05 +04:00
|
|
|
message = "it seems that you might have mixed "
|
2010-11-07 19:24:44 +03:00
|
|
|
message += "the order of tamper scripts.\n"
|
|
|
|
message += "Do you want to auto resolve this? [Y/n/q]"
|
2010-11-04 13:29:40 +03:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if not test or test[0] in ("y", "Y"):
|
|
|
|
resolve_priorities = True
|
|
|
|
elif test[0] in ("n", "N"):
|
|
|
|
resolve_priorities = False
|
|
|
|
elif test[0] in ("q", "Q"):
|
|
|
|
raise sqlmapUserQuitException
|
|
|
|
|
|
|
|
check_priority = False
|
|
|
|
|
|
|
|
priorities.append((priority, function))
|
|
|
|
last_priority = priority
|
2010-11-07 19:24:44 +03:00
|
|
|
|
2010-10-15 14:36:29 +04:00
|
|
|
break
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if not found:
|
2010-10-29 20:11:50 +04:00
|
|
|
raise sqlmapGenericException, "missing function 'tamper(value)' in tamper script '%s'" % tfile
|
2010-10-13 02:45:25 +04:00
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
if resolve_priorities and priorities:
|
2010-11-07 19:24:44 +03:00
|
|
|
priorities.sort(reverse=True)
|
2010-11-04 13:29:40 +03:00
|
|
|
kb.tamperFunctions = []
|
2010-11-07 19:24:44 +03:00
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
for _, function in priorities:
|
|
|
|
kb.tamperFunctions.append(function)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setThreads():
|
2008-12-04 20:40:03 +03:00
|
|
|
if not isinstance(conf.threads, int) or conf.threads <= 0:
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.threads = 1
|
|
|
|
|
2011-04-08 01:39:18 +04:00
|
|
|
def __setDNSCache():
|
|
|
|
"""
|
|
|
|
Makes a cached version of socket._getaddrinfo to avoid subsequent DNS requests.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def _getaddrinfo(*args, **kwargs):
|
|
|
|
if args in kb.cache:
|
|
|
|
return kb.cache[args]
|
|
|
|
else:
|
|
|
|
kb.cache[args] = socket._getaddrinfo(*args, **kwargs)
|
|
|
|
return kb.cache[args]
|
|
|
|
|
2011-04-11 00:53:27 +04:00
|
|
|
if not hasattr(socket, '_getaddrinfo'):
|
2011-04-08 01:39:18 +04:00
|
|
|
socket._getaddrinfo = socket.getaddrinfo
|
|
|
|
socket.getaddrinfo = _getaddrinfo
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setHTTPProxy():
|
|
|
|
"""
|
|
|
|
Check and set the HTTP proxy to pass by all HTTP requests.
|
|
|
|
"""
|
|
|
|
|
|
|
|
global proxyHandler
|
|
|
|
|
2010-06-30 15:41:42 +04:00
|
|
|
if not conf.proxy:
|
2010-02-26 13:01:23 +03:00
|
|
|
if conf.hostname in ('localhost', '127.0.0.1') or conf.ignoreProxy:
|
|
|
|
proxyHandler = urllib2.ProxyHandler({})
|
2010-06-30 15:41:42 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "setting the HTTP proxy to pass by all HTTP requests"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
__proxySplit = urlparse.urlsplit(conf.proxy)
|
2008-10-15 19:38:22 +04:00
|
|
|
__hostnamePort = __proxySplit[1].split(":")
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
__scheme = __proxySplit[0]
|
|
|
|
__hostname = __hostnamePort[0]
|
|
|
|
__port = None
|
|
|
|
__proxyString = ""
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if len(__hostnamePort) == 2:
|
2010-02-10 15:06:23 +03:00
|
|
|
try:
|
|
|
|
__port = int(__hostnamePort[1])
|
|
|
|
except:
|
|
|
|
pass #drops into the next check block
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if not __scheme or not __hostname or not __port:
|
|
|
|
errMsg = "proxy value must be in format 'http://url:port'"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-08-19 02:45:00 +04:00
|
|
|
if conf.pCred:
|
|
|
|
pCredRegExp = re.search("^(.*?):(.*?)$", conf.pCred)
|
|
|
|
|
|
|
|
if not pCredRegExp:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "Proxy authentication credentials "
|
2010-08-19 02:45:00 +04:00
|
|
|
errMsg += "value must be in format username:password"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2010-08-19 02:45:00 +04:00
|
|
|
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
|
2010-10-15 14:28:06 +04:00
|
|
|
__proxyString = "%s@" % conf.pCred
|
|
|
|
|
|
|
|
__proxyString += "%s:%d" % (__hostname, __port)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# Workaround for http://bugs.python.org/issue1424152 (urllib/urllib2:
|
|
|
|
# HTTPS over (Squid) Proxy fails) as long as HTTP over SSL requests
|
2009-06-16 19:12:02 +04:00
|
|
|
# can't be tunneled over an HTTP proxy natively by Python (<= 2.5)
|
|
|
|
# urllib2 standard library
|
2010-11-12 17:21:46 +03:00
|
|
|
if PYVERSION >= "2.6":
|
|
|
|
proxyHandler = urllib2.ProxyHandler({"http": __proxyString, "https": __proxyString})
|
|
|
|
elif conf.scheme == "https":
|
|
|
|
proxyHandler = ProxyHTTPSHandler(__proxyString)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
|
|
|
proxyHandler = urllib2.ProxyHandler({"http": __proxyString})
|
|
|
|
|
2010-04-16 16:44:47 +04:00
|
|
|
def __setSafeUrl():
|
|
|
|
"""
|
|
|
|
Check and set the safe URL options.
|
|
|
|
"""
|
|
|
|
if not conf.safUrl:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not re.search("^http[s]*://", conf.safUrl):
|
|
|
|
if ":443/" in conf.safUrl:
|
|
|
|
conf.safUrl = "https://" + conf.safUrl
|
|
|
|
else:
|
|
|
|
conf.safUrl = "http://" + conf.safUrl
|
|
|
|
|
|
|
|
if conf.saFreq <= 0:
|
|
|
|
errMsg = "please provide a valid value (>0) for safe frequency (--safe-freq) while using safe url feature"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-01-21 00:57:54 +03:00
|
|
|
def __setPrefixSuffix():
|
|
|
|
if conf.prefix is not None and conf.suffix is None:
|
|
|
|
errMsg = "you specified the payload prefix, but did not provide "
|
|
|
|
errMsg += "the payload suffix"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
elif conf.prefix is None and conf.suffix is not None:
|
|
|
|
errMsg = "you specified the payload suffix, but did not provide "
|
|
|
|
errMsg += "the payload prefix"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-01-24 15:25:45 +03:00
|
|
|
if conf.prefix is not None and conf.suffix is not None:
|
|
|
|
# Create a custom boundary object for user's supplied prefix
|
|
|
|
# and suffix
|
|
|
|
boundary = advancedDict()
|
|
|
|
|
|
|
|
boundary.level = 1
|
|
|
|
boundary.clause = [ 0 ]
|
|
|
|
boundary.where = [ 1, 2, 3 ]
|
|
|
|
boundary.prefix = conf.prefix
|
|
|
|
boundary.suffix = conf.suffix
|
|
|
|
|
|
|
|
if " like" in boundary.suffix.lower():
|
|
|
|
if "'" in boundary.suffix.lower():
|
|
|
|
boundary.ptype = 3
|
|
|
|
elif '"' in boundary.suffix.lower():
|
|
|
|
boundary.ptype = 5
|
|
|
|
elif "'" in boundary.suffix:
|
|
|
|
boundary.ptype = 2
|
|
|
|
elif '"' in boundary.suffix:
|
|
|
|
boundary.ptype = 4
|
|
|
|
else:
|
|
|
|
boundary.ptype = 1
|
|
|
|
|
|
|
|
# Prepend user's provided boundaries to all others boundaries
|
|
|
|
conf.boundaries.insert(0, boundary)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setHTTPAuthentication():
|
|
|
|
"""
|
2010-01-07 15:59:09 +03:00
|
|
|
Check and set the HTTP(s) authentication method (Basic, Digest, NTLM or Certificate),
|
|
|
|
username and password for first three methods, or key file and certification file for
|
|
|
|
certificate authentication
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
global authHandler
|
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not conf.aType and not conf.aCred and not conf.aCert:
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
elif conf.aType and not conf.aCred:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you specified the HTTP authentication type, but "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "did not provide the credentials"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
elif not conf.aType and conf.aCred:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you specified the HTTP authentication credentials, "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "but did not provide the type"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not conf.aCert:
|
|
|
|
debugMsg = "setting the HTTP authentication type and credentials"
|
|
|
|
logger.debug(debugMsg)
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
aTypeLower = conf.aType.lower()
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if aTypeLower not in ( "basic", "digest", "ntlm" ):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "HTTP authentication type value must be "
|
2010-01-07 15:59:09 +03:00
|
|
|
errMsg += "Basic, Digest or NTLM"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-04-07 13:47:14 +04:00
|
|
|
elif aTypeLower in ( "basic", "digest" ):
|
|
|
|
regExp = "^(.*?):(.*?)$"
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "HTTP %s authentication credentials " % aTypeLower
|
2010-04-07 13:47:14 +04:00
|
|
|
errMsg += "value must be in format username:password"
|
|
|
|
elif aTypeLower == "ntlm":
|
|
|
|
regExp = "^(.*?)\\\(.*?):(.*?)$"
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "HTTP NTLM authentication credentials value must "
|
2010-04-07 13:47:14 +04:00
|
|
|
errMsg += "be in format DOMAIN\username:password"
|
|
|
|
|
|
|
|
aCredRegExp = re.search(regExp, conf.aCred)
|
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not aCredRegExp:
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
authUsername = aCredRegExp.group(1)
|
|
|
|
authPassword = aCredRegExp.group(2)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
passwordMgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
|
|
|
passwordMgr.add_password(None, "%s://%s" % (conf.scheme, conf.hostname), authUsername, authPassword)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if aTypeLower == "basic":
|
2010-12-26 12:33:04 +03:00
|
|
|
authHandler = SmartHTTPBasicAuthHandler(passwordMgr)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
elif aTypeLower == "digest":
|
|
|
|
authHandler = urllib2.HTTPDigestAuthHandler(passwordMgr)
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
elif aTypeLower == "ntlm":
|
|
|
|
try:
|
|
|
|
from ntlm import HTTPNtlmAuthHandler
|
|
|
|
except ImportError, _:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "sqlmap requires Python NTLM third-party library "
|
2010-01-07 15:59:09 +03:00
|
|
|
errMsg += "in order to authenticate via NTLM, "
|
|
|
|
errMsg += "http://code.google.com/p/python-ntlm/"
|
|
|
|
raise sqlmapMissingDependence, errMsg
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
authHandler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(passwordMgr)
|
|
|
|
else:
|
|
|
|
debugMsg = "setting the HTTP(s) authentication certificate"
|
|
|
|
logger.debug(debugMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
aCertRegExp = re.search("^(.+?),\s*(.+?)$", conf.aCert)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not aCertRegExp:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "HTTP authentication certificate option "
|
2010-01-07 15:59:09 +03:00
|
|
|
errMsg += "must be in format key_file,cert_file"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-11-08 15:36:48 +03:00
|
|
|
# os.path.expanduser for support of paths with ~
|
2010-01-07 15:59:09 +03:00
|
|
|
key_file = os.path.expanduser(aCertRegExp.group(1))
|
|
|
|
cert_file = os.path.expanduser(aCertRegExp.group(2))
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-03-21 03:39:44 +03:00
|
|
|
for ifile in (key_file, cert_file):
|
|
|
|
if not os.path.exists(ifile):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "File '%s' does not exist" % ifile
|
2010-01-07 15:59:09 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
authHandler = HTTPSCertAuthHandler(key_file, cert_file)
|
2009-12-03 01:54:39 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setHTTPMethod():
|
|
|
|
"""
|
|
|
|
Check and set the HTTP method to perform HTTP requests through.
|
|
|
|
"""
|
|
|
|
|
2011-02-12 22:50:27 +03:00
|
|
|
if conf.data:
|
|
|
|
conf.method = HTTPMETHOD.POST
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.method = HTTPMETHOD.GET
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
debugMsg = "setting the HTTP method to %s" % conf.method
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2008-12-09 00:24:24 +03:00
|
|
|
def __setHTTPExtraHeaders():
|
|
|
|
if conf.headers:
|
|
|
|
debugMsg = "setting extra HTTP headers"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.headers = conf.headers.split("\n")
|
|
|
|
|
|
|
|
for headerValue in conf.headers:
|
|
|
|
header, value = headerValue.split(": ")
|
|
|
|
|
|
|
|
if header and value:
|
|
|
|
conf.httpHeaders.append((header, value))
|
2011-04-17 12:33:46 +04:00
|
|
|
|
2010-06-30 03:51:44 +04:00
|
|
|
elif not conf.httpHeaders or len(conf.httpHeaders) == 1:
|
2008-12-09 00:24:24 +03:00
|
|
|
conf.httpHeaders.append(("Accept-Language", "en-us,en;q=0.5"))
|
|
|
|
conf.httpHeaders.append(("Accept-Charset", "ISO-8859-15,utf-8;q=0.7,*;q=0.7"))
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2011-04-17 12:48:13 +04:00
|
|
|
# Invalidating any caching mechanism in between
|
2011-04-17 12:33:46 +04:00
|
|
|
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
|
|
|
|
conf.httpHeaders.append(("Cache-Control", "no-cache,no-store"))
|
|
|
|
conf.httpHeaders.append(("Pragma", "no-cache"))
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __defaultHTTPUserAgent():
|
|
|
|
"""
|
|
|
|
@return: default sqlmap HTTP User-Agent header
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return "%s (%s)" % (VERSION_STRING, SITE)
|
|
|
|
|
|
|
|
# Firefox 3 running on Ubuntu 9.04 updated at April 2009
|
|
|
|
#return "Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.0.9) Gecko/2009042113 Ubuntu/9.04 (jaunty) Firefox/3.0.9"
|
|
|
|
|
2009-04-25 00:13:21 +04:00
|
|
|
# Internet Explorer 7.0 running on Windows 2003 Service Pack 2 english
|
|
|
|
# updated at March 2009
|
2009-04-28 03:05:11 +04:00
|
|
|
#return "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)"
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __setHTTPUserAgent():
|
|
|
|
"""
|
|
|
|
Set the HTTP User-Agent header.
|
|
|
|
Depending on the user options it can be:
|
|
|
|
|
|
|
|
* The default sqlmap string
|
|
|
|
* A default value read as user option
|
|
|
|
* A random value read from a list of User-Agent headers from a
|
|
|
|
file choosed as user option
|
|
|
|
"""
|
2010-11-07 11:13:20 +03:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
if conf.mobile:
|
2011-04-30 01:50:48 +04:00
|
|
|
message = "which smartphone do you want sqlmap to imitate "
|
|
|
|
message += "through HTTP User-Agent header?\n"
|
2011-04-29 23:27:23 +04:00
|
|
|
items = sorted(getPublicTypeMembers(MOBILES, True))
|
|
|
|
|
|
|
|
for count in xrange(len(items)):
|
|
|
|
item = items[count]
|
2011-04-30 01:50:48 +04:00
|
|
|
message += "[%d] %s%s\n" % (count + 1, item[:item.find(';')], " (default)" if item == MOBILES.IPHONE else "")
|
2011-04-29 23:27:23 +04:00
|
|
|
|
|
|
|
test = readInput(message.rstrip('\n'), default=items.index(MOBILES.IPHONE) + 1)
|
2011-04-30 01:50:48 +04:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
try:
|
|
|
|
item = items[int(test) - 1]
|
|
|
|
except:
|
|
|
|
item = MOBILES.IPHONE
|
|
|
|
|
|
|
|
item = item[item.find(';') + 1:]
|
|
|
|
|
|
|
|
conf.httpHeaders.append(("User-Agent", item))
|
|
|
|
|
|
|
|
elif conf.agent:
|
2008-10-15 19:38:22 +04:00
|
|
|
debugMsg = "setting the HTTP User-Agent header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-11-08 16:26:45 +03:00
|
|
|
conf.httpHeaders.append(("User-Agent", conf.agent))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
elif not conf.randomAgent:
|
2010-06-30 03:51:44 +04:00
|
|
|
addDefaultUserAgent = True
|
|
|
|
|
|
|
|
for header, _ in conf.httpHeaders:
|
2010-11-08 16:26:45 +03:00
|
|
|
if header == "User-Agent":
|
2010-06-30 03:51:44 +04:00
|
|
|
addDefaultUserAgent = False
|
|
|
|
break
|
|
|
|
|
|
|
|
if addDefaultUserAgent:
|
2010-11-08 16:26:45 +03:00
|
|
|
conf.httpHeaders.append(("User-Agent", __defaultHTTPUserAgent()))
|
2010-06-30 03:51:44 +04:00
|
|
|
|
2011-04-30 01:50:48 +04:00
|
|
|
else:
|
2011-04-30 11:01:21 +04:00
|
|
|
if not kb.userAgents:
|
2011-04-30 17:20:05 +04:00
|
|
|
debugMsg = "loading random HTTP User-Agent header(s) from "
|
2011-04-30 11:01:21 +04:00
|
|
|
debugMsg += "file '%s'" % paths.USER_AGENTS
|
|
|
|
logger.debug(debugMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 11:01:21 +04:00
|
|
|
try:
|
|
|
|
kb.userAgents = getFileItems(paths.USER_AGENTS)
|
|
|
|
except IOError:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "unable to read HTTP User-Agent header "
|
2011-04-30 11:01:21 +04:00
|
|
|
warnMsg += "file '%s'" % paths.USER_AGENTS
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.httpHeaders.append((HTTPHEADER.USER_AGENT, __defaultHTTPUserAgent()))
|
|
|
|
return
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
count = len(kb.userAgents)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
if count == 1:
|
|
|
|
userAgent = kb.userAgents[0]
|
|
|
|
else:
|
|
|
|
userAgent = kb.userAgents[randomRange(stop=count-1)]
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
userAgent = sanitizeStr(userAgent)
|
|
|
|
conf.httpHeaders.append((HTTPHEADER.USER_AGENT, userAgent))
|
|
|
|
|
2011-04-30 19:29:59 +04:00
|
|
|
infoMsg = "fetched random HTTP User-Agent header from "
|
|
|
|
infoMsg += "file '%s': %s" % (paths.USER_AGENTS, userAgent)
|
|
|
|
logger.info(infoMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __setHTTPReferer():
|
|
|
|
"""
|
|
|
|
Set the HTTP Referer
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.referer:
|
|
|
|
debugMsg = "setting the HTTP Referer header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-03-18 16:46:51 +03:00
|
|
|
conf.httpHeaders.append((HTTPHEADER.REFERER, conf.referer))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
def __setHTTPCookies():
|
|
|
|
"""
|
|
|
|
Set the HTTP Cookie header
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.cookie:
|
|
|
|
debugMsg = "setting the HTTP Cookie header"
|
|
|
|
logger.debug(debugMsg)
|
2010-06-30 03:51:44 +04:00
|
|
|
|
2011-03-18 16:46:51 +03:00
|
|
|
conf.httpHeaders.append((HTTPHEADER.COOKIE, conf.cookie))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2008-12-04 20:40:03 +03:00
|
|
|
def __setHTTPTimeout():
|
|
|
|
"""
|
|
|
|
Set the HTTP timeout
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.timeout:
|
|
|
|
debugMsg = "setting the HTTP timeout"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.timeout = float(conf.timeout)
|
|
|
|
|
|
|
|
if conf.timeout < 3.0:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "the minimum HTTP timeout is 3 seconds, sqlmap "
|
2008-12-04 20:40:03 +03:00
|
|
|
warnMsg += "will going to reset it"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.timeout = 3.0
|
|
|
|
else:
|
2008-12-19 23:48:33 +03:00
|
|
|
conf.timeout = 30.0
|
2008-12-04 20:40:03 +03:00
|
|
|
|
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __cleanupOptions():
|
|
|
|
"""
|
|
|
|
Cleanup configuration attributes.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "cleaning up configuration parameters"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
width = getConsoleWidth()
|
|
|
|
|
|
|
|
if conf.eta:
|
|
|
|
conf.progressWidth = width-26
|
|
|
|
else:
|
|
|
|
conf.progressWidth = width-46
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.testParameter:
|
2011-03-19 19:53:14 +03:00
|
|
|
conf.testParameter = urldecode(conf.testParameter)
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.testParameter = conf.testParameter.replace(" ", "")
|
|
|
|
conf.testParameter = conf.testParameter.split(",")
|
|
|
|
else:
|
|
|
|
conf.testParameter = []
|
|
|
|
|
|
|
|
if conf.user:
|
|
|
|
conf.user = conf.user.replace(" ", "")
|
|
|
|
|
2008-11-09 19:57:47 +03:00
|
|
|
if conf.delay:
|
|
|
|
conf.delay = float(conf.delay)
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if conf.rFile:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.rFile = ntToPosixSlashes(normalizePath(conf.rFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.wFile:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.wFile = ntToPosixSlashes(normalizePath(conf.wFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.dFile:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.dFile = ntToPosixSlashes(normalizePath(conf.dFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.msfPath:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.msfPath = ntToPosixSlashes(normalizePath(conf.msfPath))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.tmpPath:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-05-11 12:46:40 +04:00
|
|
|
if conf.googleDork or conf.logFile or conf.bulkFile or conf.forms:
|
2008-11-28 01:33:33 +03:00
|
|
|
conf.multipleTargets = True
|
|
|
|
|
2010-10-12 23:41:29 +04:00
|
|
|
if conf.optimize:
|
2010-10-25 16:33:49 +04:00
|
|
|
#conf.predictOutput = True
|
|
|
|
conf.keepAlive = True
|
2011-01-14 17:37:03 +03:00
|
|
|
conf.nullConnection = not conf.textOnly
|
2011-04-06 12:15:20 +04:00
|
|
|
conf.threads = 3 if conf.threads < 3 else conf.threads
|
2010-10-12 23:41:29 +04:00
|
|
|
|
2011-03-19 19:53:14 +03:00
|
|
|
if conf.data:
|
|
|
|
conf.data = urldecode(conf.data)
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
if conf.os:
|
|
|
|
conf.os = conf.os.capitalize()
|
|
|
|
|
|
|
|
if conf.dbms:
|
|
|
|
conf.dbms = conf.dbms.capitalize()
|
|
|
|
|
2011-05-02 17:34:55 +04:00
|
|
|
if conf.optimize and any([conf.data, conf.textOnly]):
|
|
|
|
conf.nullConnection = False
|
|
|
|
|
|
|
|
debugMsg = "turning off --null-connection switch used indirectly by switch -o"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-04-19 12:34:21 +04:00
|
|
|
# to distinguish explicit usage of --time-sec
|
2011-04-18 18:51:48 +04:00
|
|
|
if conf.timeSec is None:
|
2011-04-19 12:43:29 +04:00
|
|
|
if conf.tor:
|
2011-04-19 12:34:21 +04:00
|
|
|
conf.timeSec = 2 * TIME_DEFAULT_DELAY
|
|
|
|
kb.adjustTimeDelay = False
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "increasing default value for "
|
2011-04-20 00:23:08 +04:00
|
|
|
warnMsg += "--time-sec to %d because " % conf.timeSec
|
2011-04-21 14:08:38 +04:00
|
|
|
warnMsg += "--tor switch was provided"
|
2011-04-19 12:34:21 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
else:
|
|
|
|
conf.timeSec = TIME_DEFAULT_DELAY
|
|
|
|
kb.adjustTimeDelay = True
|
2011-04-18 18:46:18 +04:00
|
|
|
else:
|
|
|
|
kb.adjustTimeDelay = False
|
2011-04-15 12:52:53 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __setConfAttributes():
|
|
|
|
"""
|
|
|
|
This function set some needed attributes into the configuration
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "initializing the configuration"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.boundaries = []
|
|
|
|
conf.cj = None
|
|
|
|
conf.dbmsConnector = None
|
|
|
|
conf.dbmsHandler = None
|
|
|
|
conf.dumpPath = None
|
|
|
|
conf.httpHeaders = []
|
|
|
|
conf.hostname = None
|
|
|
|
conf.multipleTargets = False
|
|
|
|
conf.outputPath = None
|
|
|
|
conf.paramDict = {}
|
|
|
|
conf.parameters = {}
|
|
|
|
conf.path = None
|
|
|
|
conf.port = None
|
2011-05-16 02:21:38 +04:00
|
|
|
conf.resultsFilename = None
|
|
|
|
conf.resultsFP = None
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.scheme = None
|
|
|
|
conf.sessionFP = None
|
|
|
|
conf.start = True
|
|
|
|
conf.tests = []
|
|
|
|
conf.trafficFP = None
|
|
|
|
conf.wFileType = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-12-18 13:02:01 +03:00
|
|
|
def __setKnowledgeBaseAttributes(flushAll=True):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function set some needed attributes into the knowledge base
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "initializing the knowledge base"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.absFilePaths = set()
|
|
|
|
kb.adjustTimeDelay = False
|
2011-05-24 03:20:03 +04:00
|
|
|
kb.alwaysRedirect = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.arch = None
|
|
|
|
kb.authHeader = None
|
|
|
|
kb.bannerFp = advancedDict()
|
2010-05-31 12:13:08 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.brute = advancedDict({'tables':[], 'columns':[]})
|
|
|
|
kb.bruteMode = False
|
2010-12-27 17:17:20 +03:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.cache = advancedDict()
|
|
|
|
kb.cache.content = {}
|
|
|
|
kb.cache.regex = {}
|
|
|
|
kb.cache.stdev = {}
|
2010-05-31 12:13:08 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.commonOutputs = None
|
|
|
|
kb.data = advancedDict()
|
2011-05-11 00:44:36 +04:00
|
|
|
kb.dataOutputFlag = False
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-01-20 02:06:15 +03:00
|
|
|
# Active back-end DBMS fingerprint
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.dbms = None
|
|
|
|
kb.dbmsVersion = [ UNKNOWN_DBMS_VERSION ]
|
|
|
|
|
|
|
|
kb.delayCandidates = TIME_DELAY_CANDIDATES * [0]
|
|
|
|
kb.dep = None
|
|
|
|
kb.docRoot = None
|
|
|
|
kb.dynamicMarkings = []
|
|
|
|
kb.endDetection = False
|
|
|
|
kb.httpErrorCodes = {}
|
|
|
|
kb.errorIsNone = True
|
|
|
|
kb.formNames = []
|
|
|
|
kb.headersCount = 0
|
|
|
|
kb.headersFp = {}
|
|
|
|
kb.hintValue = None
|
|
|
|
kb.htmlFp = []
|
2011-05-22 12:24:13 +04:00
|
|
|
kb.ignoreTimeout = False
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.injection = injectionDict()
|
|
|
|
kb.injections = []
|
|
|
|
|
|
|
|
kb.locks = advancedDict()
|
|
|
|
kb.locks.cacheLock = threading.Lock()
|
|
|
|
kb.locks.logLock = threading.Lock()
|
|
|
|
|
|
|
|
kb.matchRatio = None
|
|
|
|
kb.nullConnection = None
|
|
|
|
kb.pageTemplate = None
|
|
|
|
kb.pageTemplates = dict()
|
|
|
|
kb.originalPage = None
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# Back-end DBMS underlying operating system fingerprint via banner (-b)
|
2009-09-26 03:03:45 +04:00
|
|
|
# parsing
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.os = None
|
|
|
|
kb.osVersion = None
|
|
|
|
kb.osSP = None
|
|
|
|
|
|
|
|
kb.pageEncoding = DEFAULT_PAGE_ENCODING
|
|
|
|
kb.pageStable = None
|
|
|
|
kb.partRun = None
|
|
|
|
kb.proxyAuthHeader = None
|
|
|
|
kb.queryCounter = 0
|
|
|
|
kb.redirectSetCookie = None
|
|
|
|
kb.responseTimes = []
|
|
|
|
kb.resumedQueries = {}
|
|
|
|
kb.singleLogFlags = set()
|
|
|
|
kb.skipOthersDbms = None
|
|
|
|
kb.suppressSession = False
|
|
|
|
kb.suppressResumeInfo = False
|
|
|
|
kb.technique = None
|
|
|
|
kb.testMode = False
|
|
|
|
kb.testQueryCount = 0
|
|
|
|
kb.threadContinue = True
|
|
|
|
kb.threadException = False
|
|
|
|
kb.threadData = {}
|
2011-05-10 19:34:54 +04:00
|
|
|
kb.uChar = "NULL"
|
2011-04-21 18:25:04 +04:00
|
|
|
kb.xpCmdshellAvailable = False
|
2010-03-22 18:39:29 +03:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.misc = advancedDict()
|
|
|
|
kb.misc.delimiter = randomStr(length=6, lowercase=True)
|
|
|
|
kb.misc.start = ":%s:" % randomStr(length=3, lowercase=True)
|
|
|
|
kb.misc.stop = ":%s:" % randomStr(length=3, lowercase=True)
|
|
|
|
kb.misc.space = ":%s:" % randomStr(length=1, lowercase=True)
|
|
|
|
kb.misc.dollar = ":%s:" % randomStr(length=1, lowercase=True)
|
2011-01-18 00:13:59 +03:00
|
|
|
kb.misc.forcedDbms = None
|
2011-01-13 20:36:54 +03:00
|
|
|
|
2010-12-18 13:02:01 +03:00
|
|
|
if flushAll:
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.keywords = set(getFileItems(paths.SQL_KEYWORDS))
|
2010-12-18 13:02:01 +03:00
|
|
|
kb.tamperFunctions = []
|
2011-05-11 12:55:48 +04:00
|
|
|
kb.targetUrls = oset()
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.testedParams = set()
|
|
|
|
kb.userAgents = None
|
2011-05-24 21:15:25 +04:00
|
|
|
kb.vainRun = True
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.wordlist = None
|
2010-12-18 13:02:01 +03:00
|
|
|
|
2011-03-29 15:42:55 +04:00
|
|
|
def __useWizardInterface():
|
|
|
|
"""
|
|
|
|
Presents simple wizard interface for beginner users
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.wizard:
|
|
|
|
return
|
|
|
|
|
|
|
|
logger.info("starting wizard interface")
|
|
|
|
|
2011-05-24 21:24:01 +04:00
|
|
|
while True:
|
|
|
|
while not conf.url:
|
|
|
|
message = "Please enter full target URL (-u): "
|
|
|
|
conf.url = readInput(message, default=None)
|
2011-03-29 15:42:55 +04:00
|
|
|
|
2011-05-24 21:24:01 +04:00
|
|
|
message = "POST data (--data) [Enter for None]: "
|
|
|
|
conf.data = readInput(message, default=None)
|
2011-03-29 15:42:55 +04:00
|
|
|
|
2011-05-25 12:14:39 +04:00
|
|
|
if filter(lambda x: '=' in str(x), [conf.url, conf.data]) or '*' in conf.url:
|
2011-05-24 21:24:01 +04:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
conf.url = conf.data = None
|
2011-05-25 12:14:39 +04:00
|
|
|
warnMsg = "no testable GET and/or POST parameter(s) found "
|
2011-05-24 21:24:01 +04:00
|
|
|
warnMsg += "(e.g. GET parameter 'id' in 'www.site.com/index.php?id=1')"
|
|
|
|
logger.critical(warnMsg)
|
2011-04-01 18:55:39 +04:00
|
|
|
choice = None
|
|
|
|
|
|
|
|
while choice is None or choice not in ("", "1", "2", "3"):
|
|
|
|
message = "Injection difficulty (--level/--risk). Please choose:\n"
|
|
|
|
message += "[1] Normal (default)\n[2] Medium\n[3] Hard"
|
|
|
|
choice = readInput(message, default='1')
|
|
|
|
|
|
|
|
if choice == '2':
|
|
|
|
conf.risk = 2
|
|
|
|
conf.level = 3
|
|
|
|
elif choice == '3':
|
|
|
|
conf.risk = 3
|
|
|
|
conf.level = 5
|
|
|
|
else:
|
|
|
|
conf.risk = 1
|
|
|
|
conf.level = 1
|
|
|
|
|
|
|
|
choice = None
|
|
|
|
|
|
|
|
while choice is None or choice not in ("", "1", "2", "3"):
|
|
|
|
message = "Enumeration (--banner/--current-user/etc). Please choose:\n"
|
|
|
|
message += "[1] Basic (default)\n[2] Smart\n[3] All"
|
|
|
|
choice = readInput(message, default='1')
|
|
|
|
|
|
|
|
if choice == '2':
|
|
|
|
map(lambda x: conf.__setitem__(x, True), ['getBanner', 'getCurrentUser', 'getCurrentDb', 'isDba', 'getUsers', 'getDbs', 'getTables', 'excludeSysDbs'])
|
|
|
|
elif choice == '3':
|
|
|
|
map(lambda x: conf.__setitem__(x, True), ['getBanner', 'getCurrentUser', 'getCurrentDb', 'isDba', 'getUsers', 'getPasswordHashes', 'getPrivileges', 'getRoles', 'dumpAll'])
|
|
|
|
else:
|
|
|
|
map(lambda x: conf.__setitem__(x, True), ['getBanner', 'getCurrentUser', 'getCurrentDb', 'isDba'])
|
2011-03-29 15:42:55 +04:00
|
|
|
|
|
|
|
conf.batch = True
|
2011-03-30 01:54:15 +04:00
|
|
|
conf.threads = 4
|
2011-04-01 18:55:39 +04:00
|
|
|
|
|
|
|
logger.debug("muting sqlmap.. it will do the magic for you")
|
|
|
|
conf.verbose = 0
|
|
|
|
|
|
|
|
dataToStdout("\nsqlmap is running, please wait..\n\n")
|
2011-03-29 15:42:55 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def __saveCmdline():
|
|
|
|
"""
|
|
|
|
Saves the command line options on a sqlmap configuration INI file
|
2011-01-28 19:36:09 +03:00
|
|
|
Format.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.saveCmdline:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "saving command line options on a sqlmap configuration INI file"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
config = UnicodeRawConfigParser()
|
2008-10-15 19:38:22 +04:00
|
|
|
userOpts = {}
|
|
|
|
|
|
|
|
for family in optDict.keys():
|
|
|
|
userOpts[family] = []
|
|
|
|
|
|
|
|
for option, value in conf.items():
|
|
|
|
for family, optionData in optDict.items():
|
|
|
|
if option in optionData:
|
|
|
|
userOpts[family].append((option, value, optionData[option]))
|
|
|
|
|
|
|
|
for family, optionData in userOpts.items():
|
2009-01-04 01:59:22 +03:00
|
|
|
config.add_section(family)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
optionData.sort()
|
|
|
|
|
|
|
|
for option, value, datatype in optionData:
|
2010-05-28 19:57:43 +04:00
|
|
|
if isinstance(datatype, (list, tuple, set)):
|
|
|
|
datatype = datatype[0]
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if value is None:
|
2008-10-15 19:38:22 +04:00
|
|
|
if datatype == "boolean":
|
|
|
|
value = "False"
|
2008-11-09 19:57:47 +03:00
|
|
|
elif datatype in ( "integer", "float" ):
|
2008-12-02 02:07:41 +03:00
|
|
|
if option in ( "threads", "verbose" ):
|
2008-10-26 19:10:28 +03:00
|
|
|
value = "1"
|
2008-12-17 00:30:24 +03:00
|
|
|
elif option == "timeout":
|
|
|
|
value = "10"
|
2008-10-26 19:10:28 +03:00
|
|
|
else:
|
|
|
|
value = "0"
|
2008-10-15 19:38:22 +04:00
|
|
|
elif datatype == "string":
|
|
|
|
value = ""
|
|
|
|
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(value, basestring):
|
2008-12-09 00:24:24 +03:00
|
|
|
value = value.replace("\n", "\n ")
|
|
|
|
|
2009-01-04 01:59:22 +03:00
|
|
|
config.set(family, option, value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-01-08 12:30:10 +03:00
|
|
|
confFP = openFile(paths.SQLMAP_CONFIG, "wb")
|
2009-01-04 01:59:22 +03:00
|
|
|
config.write(confFP)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
infoMsg = "saved command line options on '%s' configuration file" % paths.SQLMAP_CONFIG
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
def __setVerbosity():
|
|
|
|
"""
|
|
|
|
This function set the verbosity of sqlmap output messages.
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.verbose is None:
|
2008-12-02 02:07:41 +03:00
|
|
|
conf.verbose = 1
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
conf.verbose = int(conf.verbose)
|
|
|
|
|
2010-09-26 18:02:13 +04:00
|
|
|
if conf.verbose == 0:
|
2011-03-12 01:02:38 +03:00
|
|
|
logger.setLevel(logging.ERROR)
|
2010-09-26 18:02:13 +04:00
|
|
|
elif conf.verbose == 1:
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.setLevel(logging.INFO)
|
2010-03-11 01:08:11 +03:00
|
|
|
elif conf.verbose > 2 and conf.eta:
|
|
|
|
conf.verbose = 2
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2008-10-15 19:38:22 +04:00
|
|
|
elif conf.verbose == 2:
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
elif conf.verbose == 3:
|
|
|
|
logger.setLevel(9)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose == 4:
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.setLevel(8)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose >= 5:
|
|
|
|
logger.setLevel(7)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-03-24 14:47:01 +03:00
|
|
|
def __mergeOptions(inputOptions, overrideOptions):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Merge command line options with configuration file options.
|
|
|
|
|
|
|
|
@param inputOptions: optparse object with command line options.
|
|
|
|
@type inputOptions: C{instance}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if inputOptions.configFile:
|
|
|
|
configFileParser(inputOptions.configFile)
|
|
|
|
|
2009-06-05 14:15:55 +04:00
|
|
|
if hasattr(inputOptions, "items"):
|
|
|
|
inputOptionsItems = inputOptions.items()
|
|
|
|
else:
|
|
|
|
inputOptionsItems = inputOptions.__dict__.items()
|
|
|
|
|
|
|
|
for key, value in inputOptionsItems:
|
2010-12-14 00:36:23 +03:00
|
|
|
if key not in conf or (conf[key] is False and value is True) or \
|
2011-03-24 14:47:01 +03:00
|
|
|
value not in (None, False) or overrideOptions:
|
2008-10-15 19:38:22 +04:00
|
|
|
conf[key] = value
|
|
|
|
|
2010-11-08 14:22:47 +03:00
|
|
|
def __setTrafficOutputFP():
|
|
|
|
if conf.trafficFile:
|
2011-01-08 12:30:10 +03:00
|
|
|
conf.trafficFP = openFile(conf.trafficFile, "w+")
|
2010-11-08 14:22:47 +03:00
|
|
|
|
2011-05-24 15:06:58 +04:00
|
|
|
def __setTorProxySettings():
|
|
|
|
if not conf.tor:
|
|
|
|
return
|
|
|
|
|
|
|
|
infoMsg = "setting Tor proxy settings"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
found = None
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
|
|
|
|
for port in DEFAULT_TOR_PORTS:
|
|
|
|
try:
|
|
|
|
s.connect((LOCALHOST, port))
|
|
|
|
found = port
|
|
|
|
break
|
|
|
|
except socket.error:
|
|
|
|
pass
|
|
|
|
|
|
|
|
s.close()
|
|
|
|
|
|
|
|
if found:
|
|
|
|
conf.proxy = "http://%s:%d" % (LOCALHOST, found)
|
|
|
|
else:
|
|
|
|
errMsg = "can't establish connection with the Tor proxy. "
|
|
|
|
errMsg += "please make sure that you have "
|
|
|
|
errMsg += "some kind of Vidalia/Privoxy/Polipo "
|
|
|
|
errMsg += "Tor proxy bundle installed for "
|
|
|
|
errMsg += "you to be able to successfully use "
|
|
|
|
errMsg += "--tor switch "
|
|
|
|
if IS_WIN:
|
|
|
|
errMsg += "(e.g. https://www.torproject.org/projects/vidalia.html.en)"
|
|
|
|
else:
|
|
|
|
errMsg += "(e.g. http://www.coresec.org/2011/04/24/sqlmap-with-tor/)"
|
|
|
|
|
|
|
|
raise sqlmapConnectionException, errMsg
|
|
|
|
|
2010-04-26 15:23:12 +04:00
|
|
|
def __basicOptionValidation():
|
|
|
|
if conf.limitStart is not None and not (isinstance(conf.limitStart, int) and conf.limitStart > 0):
|
2010-04-26 16:34:21 +04:00
|
|
|
errMsg = "value for --start (limitStart) option must be an integer value greater than zero (>0)"
|
2010-04-26 15:23:12 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
|
|
|
if conf.limitStop is not None and not (isinstance(conf.limitStop, int) and conf.limitStop > 0):
|
2010-04-26 16:34:21 +04:00
|
|
|
errMsg = "value for --stop (limitStop) option must be an integer value greater than zero (>0)"
|
2010-04-26 15:23:12 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-10-15 14:28:06 +04:00
|
|
|
if conf.limitStart is not None and isinstance(conf.limitStart, int) and conf.limitStart > 0 and \
|
2010-10-31 17:42:13 +03:00
|
|
|
conf.limitStop is not None and isinstance(conf.limitStop, int) and conf.limitStop <= conf.limitStart:
|
2010-04-26 16:34:21 +04:00
|
|
|
errMsg = "value for --start (limitStart) option must be smaller than value for --stop (limitStop) option"
|
2010-04-26 15:23:12 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-10-31 17:42:13 +03:00
|
|
|
if conf.firstChar is not None and isinstance(conf.firstChar, int) and conf.firstChar > 0 and \
|
|
|
|
conf.lastChar is not None and isinstance(conf.lastChar, int) and conf.lastChar < conf.firstChar:
|
|
|
|
errMsg = "value for --first (firstChar) option must be smaller than or equal to value for --last (lastChar) option"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-10-15 14:28:06 +04:00
|
|
|
if conf.cpuThrottle is not None and isinstance(conf.cpuThrottle, int) and (conf.cpuThrottle > 100 or conf.cpuThrottle < 0):
|
2010-05-28 13:13:50 +04:00
|
|
|
errMsg = "value for --cpu-throttle (cpuThrottle) option must be in range [0,100]"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-08-10 23:53:29 +04:00
|
|
|
|
2010-10-17 01:52:16 +04:00
|
|
|
if conf.textOnly and conf.nullConnection:
|
2011-05-02 17:34:55 +04:00
|
|
|
errMsg = "switch --text-only is incompatible with switch --null-connection"
|
2010-11-07 11:58:24 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-10-17 01:52:16 +04:00
|
|
|
if conf.data and conf.nullConnection:
|
2011-05-02 17:34:55 +04:00
|
|
|
errMsg = "switch --data is incompatible with switch --null-connection"
|
2010-10-15 15:05:50 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-10-25 16:33:49 +04:00
|
|
|
if conf.predictOutput and conf.threads > 1:
|
|
|
|
errMsg = "switch --predict-output is incompatible with switch --threads"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-02-02 13:10:28 +03:00
|
|
|
if conf.threads > MAX_NUMBER_OF_THREADS:
|
|
|
|
errMsg = "maximum number of used threads is %d avoiding possible connection issues" % MAX_NUMBER_OF_THREADS
|
2011-02-02 12:24:37 +03:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-11-15 14:34:57 +03:00
|
|
|
if conf.forms and not conf.url:
|
|
|
|
errMsg = "switch --forms requires usage of -u (--url) switch"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2011-04-19 12:34:21 +04:00
|
|
|
|
|
|
|
if conf.tor and conf.ignoreProxy:
|
|
|
|
errMsg = "switch --tor is incompatible with switch --ignore-proxy"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2011-04-29 23:27:23 +04:00
|
|
|
|
2011-05-21 15:46:57 +04:00
|
|
|
if conf.tor and conf.proxy:
|
|
|
|
errMsg = "switch --tor is incompatible with switch --proxy"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
if conf.mobile and conf.agent:
|
|
|
|
errMsg = "switch --mobile is incompatible with switch --user-agent"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
2010-11-15 14:34:57 +03:00
|
|
|
|
2011-04-04 13:19:43 +04:00
|
|
|
if conf.proxy and conf.ignoreProxy:
|
|
|
|
errMsg = "switch --proxy is incompatible with switch --ignore-proxy"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-05-11 12:46:40 +04:00
|
|
|
if conf.forms and any([conf.logFile, conf.bulkFile, conf.direct, conf.requestFile, conf.googleDork]):
|
2010-11-15 14:34:57 +03:00
|
|
|
errMsg = "switch --forms is compatible only with -u (--url) target switch"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2010-12-14 15:37:21 +03:00
|
|
|
if conf.timeSec < 1:
|
|
|
|
errMsg = "value for --time-sec option must be an integer greater than 0"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-04-06 12:39:24 +04:00
|
|
|
if isinstance(conf.uCols, basestring) and ("-" not in conf.uCols or len(conf.uCols.split("-")) != 2):
|
2011-04-06 12:40:06 +04:00
|
|
|
errMsg = "value for --union-cols must be a range with hyphon (e.g. 1-10)"
|
2011-04-06 12:39:24 +04:00
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-05-18 02:55:22 +04:00
|
|
|
if conf.charset:
|
|
|
|
try:
|
|
|
|
codecs.lookup(conf.charset)
|
|
|
|
except LookupError:
|
|
|
|
errMsg = "unknown charset '%s'. please visit page " % conf.charset
|
2011-05-18 03:03:31 +04:00
|
|
|
errMsg += "'%s' " % CODECS_LIST_PAGE
|
2011-05-18 02:55:22 +04:00
|
|
|
errMsg += "to get the full list of supported charsets"
|
|
|
|
raise sqlmapSyntaxException, errMsg
|
|
|
|
|
2011-03-24 14:47:01 +03:00
|
|
|
def init(inputOptions=advancedDict(), overrideOptions=False):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Set attributes into both configuration and knowledge base singletons
|
|
|
|
based upon command line and configuration file options.
|
|
|
|
"""
|
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
__setConfAttributes()
|
|
|
|
__setKnowledgeBaseAttributes()
|
2011-03-24 14:47:01 +03:00
|
|
|
__mergeOptions(inputOptions, overrideOptions)
|
2011-03-29 15:42:55 +04:00
|
|
|
__useWizardInterface()
|
2011-04-01 18:55:39 +04:00
|
|
|
__setVerbosity()
|
2008-10-15 19:38:22 +04:00
|
|
|
__saveCmdline()
|
2011-03-19 19:53:14 +03:00
|
|
|
__setRequestFromFile()
|
2008-10-15 19:38:22 +04:00
|
|
|
__cleanupOptions()
|
2010-04-26 15:23:12 +04:00
|
|
|
__basicOptionValidation()
|
2011-05-24 15:06:58 +04:00
|
|
|
__setTorProxySettings()
|
2010-06-30 03:51:44 +04:00
|
|
|
__setMultipleTargets()
|
2010-10-13 02:45:25 +04:00
|
|
|
__setTamperingFunctions()
|
2010-11-08 14:22:47 +03:00
|
|
|
__setTrafficOutputFP()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2009-06-16 19:12:02 +04:00
|
|
|
parseTargetUrl()
|
2010-03-27 02:23:25 +03:00
|
|
|
parseTargetDirect()
|
|
|
|
|
2011-05-11 12:46:40 +04:00
|
|
|
if any([conf.url, conf.logFile, conf.bulkFile, conf.requestFile, conf.googleDork, conf.liveTest]):
|
2010-03-27 02:23:25 +03:00
|
|
|
__setHTTPTimeout()
|
2010-06-30 03:51:44 +04:00
|
|
|
__setHTTPExtraHeaders()
|
2010-03-27 02:23:25 +03:00
|
|
|
__setHTTPCookies()
|
|
|
|
__setHTTPReferer()
|
|
|
|
__setHTTPUserAgent()
|
|
|
|
__setHTTPMethod()
|
|
|
|
__setHTTPAuthentication()
|
|
|
|
__setHTTPProxy()
|
2011-04-08 01:39:18 +04:00
|
|
|
__setDNSCache()
|
2010-04-16 16:44:47 +04:00
|
|
|
__setSafeUrl()
|
2010-03-27 02:23:25 +03:00
|
|
|
__setGoogleDorking()
|
2011-05-11 12:46:40 +04:00
|
|
|
__setBulkMultipleTargets()
|
2010-03-27 02:23:25 +03:00
|
|
|
__urllib2Opener()
|
2010-11-15 15:07:13 +03:00
|
|
|
__findPageForms()
|
2010-03-27 02:23:25 +03:00
|
|
|
__setDBMS()
|
2010-12-09 16:47:17 +03:00
|
|
|
__setTechnique()
|
2009-06-16 19:12:02 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
__setThreads()
|
2009-04-22 15:48:07 +04:00
|
|
|
__setOS()
|
|
|
|
__setWriteFile()
|
|
|
|
__setMetasploit()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-28 21:10:54 +03:00
|
|
|
loadPayloads()
|
2011-01-24 15:25:45 +03:00
|
|
|
__setPrefixSuffix()
|
2008-10-15 19:38:22 +04:00
|
|
|
update()
|
2010-10-21 17:13:12 +04:00
|
|
|
__loadQueries()
|