2013-02-14 15:32:17 +04:00
|
|
|
#!/usr/bin/env python
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
"""
|
2016-01-06 02:06:12 +03:00
|
|
|
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
import cookielib
|
2013-02-21 14:14:57 +04:00
|
|
|
import glob
|
2010-10-17 01:33:15 +04:00
|
|
|
import inspect
|
2008-10-15 19:38:22 +04:00
|
|
|
import logging
|
2015-07-26 17:19:44 +03:00
|
|
|
import httplib
|
2008-10-15 19:38:22 +04:00
|
|
|
import os
|
2014-04-06 20:05:43 +04:00
|
|
|
import random
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2008-12-04 20:40:03 +03:00
|
|
|
import socket
|
2012-10-18 13:11:20 +04:00
|
|
|
import string
|
2010-10-15 14:36:29 +04:00
|
|
|
import sys
|
2015-01-13 12:33:51 +03:00
|
|
|
import tempfile
|
2010-11-02 10:32:08 +03:00
|
|
|
import threading
|
2013-01-09 19:10:26 +04:00
|
|
|
import time
|
2008-10-15 19:38:22 +04:00
|
|
|
import urllib2
|
|
|
|
import urlparse
|
|
|
|
|
2013-10-17 18:54:53 +04:00
|
|
|
import lib.controller.checks
|
2011-06-07 13:50:00 +04:00
|
|
|
import lib.core.common
|
|
|
|
import lib.core.threads
|
2012-07-31 13:03:44 +04:00
|
|
|
import lib.core.convert
|
2013-08-20 21:35:49 +04:00
|
|
|
import lib.request.connect
|
2015-11-08 18:37:46 +03:00
|
|
|
import lib.utils.search
|
2011-06-07 13:50:00 +04:00
|
|
|
|
2010-11-15 15:07:13 +03:00
|
|
|
from lib.controller.checks import checkConnection
|
2011-01-28 19:36:09 +03:00
|
|
|
from lib.core.common import Backend
|
2012-07-12 18:30:35 +04:00
|
|
|
from lib.core.common import boldifyMessage
|
2013-09-12 01:17:18 +04:00
|
|
|
from lib.core.common import checkFile
|
2011-04-01 18:55:39 +04:00
|
|
|
from lib.core.common import dataToStdout
|
2011-04-07 14:00:47 +04:00
|
|
|
from lib.core.common import getPublicTypeMembers
|
2015-09-10 16:55:49 +03:00
|
|
|
from lib.core.common import getSafeExString
|
2011-01-20 18:55:50 +03:00
|
|
|
from lib.core.common import extractRegexResult
|
2011-04-01 16:09:11 +04:00
|
|
|
from lib.core.common import filterStringValue
|
2011-10-29 12:32:24 +04:00
|
|
|
from lib.core.common import findPageForms
|
2010-04-16 17:40:02 +04:00
|
|
|
from lib.core.common import getConsoleWidth
|
2010-10-14 15:06:28 +04:00
|
|
|
from lib.core.common import getFileItems
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.common import getFileType
|
2012-07-13 12:28:03 +04:00
|
|
|
from lib.core.common import getUnicode
|
2012-06-14 17:38:53 +04:00
|
|
|
from lib.core.common import isListLike
|
2010-02-04 17:50:54 +03:00
|
|
|
from lib.core.common import normalizePath
|
2010-02-04 17:37:00 +03:00
|
|
|
from lib.core.common import ntToPosixSlashes
|
2011-01-08 12:30:10 +03:00
|
|
|
from lib.core.common import openFile
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.common import parseTargetDirect
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import parseTargetUrl
|
|
|
|
from lib.core.common import paths
|
2011-01-13 20:36:54 +03:00
|
|
|
from lib.core.common import randomStr
|
2014-04-06 19:23:13 +04:00
|
|
|
from lib.core.common import readCachedFileContent
|
2010-11-04 13:29:40 +03:00
|
|
|
from lib.core.common import readInput
|
2012-03-08 14:19:34 +04:00
|
|
|
from lib.core.common import resetCookieJar
|
2010-10-28 00:39:50 +04:00
|
|
|
from lib.core.common import runningAsAdmin
|
2015-01-21 11:26:30 +03:00
|
|
|
from lib.core.common import safeExpandUser
|
2011-07-25 15:05:49 +04:00
|
|
|
from lib.core.common import setOptimize
|
2013-01-17 15:03:02 +04:00
|
|
|
from lib.core.common import setPaths
|
2012-07-31 13:03:44 +04:00
|
|
|
from lib.core.common import singleTimeWarnMessage
|
2010-05-28 19:57:43 +04:00
|
|
|
from lib.core.common import UnicodeRawConfigParser
|
2012-07-31 13:03:44 +04:00
|
|
|
from lib.core.common import urldecode
|
2013-01-09 16:34:45 +04:00
|
|
|
from lib.core.convert import base64unpickle
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2013-04-10 21:33:31 +04:00
|
|
|
from lib.core.data import mergedOptions
|
2010-10-21 17:13:12 +04:00
|
|
|
from lib.core.data import queries
|
2011-07-08 10:02:31 +04:00
|
|
|
from lib.core.datatype import AttribDict
|
|
|
|
from lib.core.datatype import InjectionDict
|
2011-06-16 15:42:13 +04:00
|
|
|
from lib.core.defaults import defaults
|
2012-08-21 13:19:15 +04:00
|
|
|
from lib.core.dicts import DBMS_DICT
|
2012-10-22 16:13:30 +04:00
|
|
|
from lib.core.dicts import DUMP_REPLACEMENTS
|
2012-10-09 17:19:47 +04:00
|
|
|
from lib.core.enums import ADJUST_TIME_DELAY
|
2013-03-13 00:16:44 +04:00
|
|
|
from lib.core.enums import AUTH_TYPE
|
2011-12-26 16:24:39 +04:00
|
|
|
from lib.core.enums import CUSTOM_LOGGING
|
2012-11-28 13:58:18 +04:00
|
|
|
from lib.core.enums import DUMP_FORMAT
|
2013-03-20 14:10:24 +04:00
|
|
|
from lib.core.enums import HTTP_HEADER
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2011-04-29 23:27:23 +04:00
|
|
|
from lib.core.enums import MOBILES
|
2014-04-25 11:17:10 +04:00
|
|
|
from lib.core.enums import OPTION_TYPE
|
2010-12-15 14:30:29 +03:00
|
|
|
from lib.core.enums import PAYLOAD
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PRIORITY
|
2012-11-28 13:59:15 +04:00
|
|
|
from lib.core.enums import PROXY_TYPE
|
2011-05-30 13:46:32 +04:00
|
|
|
from lib.core.enums import REFLECTIVE_COUNTER
|
2012-10-05 12:24:09 +04:00
|
|
|
from lib.core.enums import WIZARD
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapConnectionException
|
|
|
|
from lib.core.exception import SqlmapFilePathException
|
|
|
|
from lib.core.exception import SqlmapGenericException
|
2014-11-10 15:41:53 +03:00
|
|
|
from lib.core.exception import SqlmapInstallationException
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapMissingDependence
|
|
|
|
from lib.core.exception import SqlmapMissingMandatoryOptionException
|
|
|
|
from lib.core.exception import SqlmapMissingPrivileges
|
2015-09-25 16:23:42 +03:00
|
|
|
from lib.core.exception import SqlmapNoneDataException
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapSilentQuitException
|
|
|
|
from lib.core.exception import SqlmapSyntaxException
|
2014-11-24 12:13:56 +03:00
|
|
|
from lib.core.exception import SqlmapSystemException
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapUnsupportedDBMSException
|
|
|
|
from lib.core.exception import SqlmapUserQuitException
|
2012-07-10 05:54:37 +04:00
|
|
|
from lib.core.log import FORMATTER
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.optiondict import optDict
|
2012-10-30 21:38:10 +04:00
|
|
|
from lib.core.settings import BURP_REQUEST_REGEX
|
2013-11-08 12:23:38 +04:00
|
|
|
from lib.core.settings import BURP_XML_HISTORY_REGEX
|
2011-05-18 03:03:31 +04:00
|
|
|
from lib.core.settings import CODECS_LIST_PAGE
|
2012-04-11 02:26:42 +04:00
|
|
|
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
2012-11-28 14:41:39 +04:00
|
|
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
2014-08-30 23:34:23 +04:00
|
|
|
from lib.core.settings import DBMS_ALIASES
|
2011-01-20 14:01:01 +03:00
|
|
|
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
2011-12-14 14:19:45 +04:00
|
|
|
from lib.core.settings import DEFAULT_TOR_HTTP_PORTS
|
2011-11-24 01:39:53 +04:00
|
|
|
from lib.core.settings import DEFAULT_TOR_SOCKS_PORT
|
2013-09-24 23:44:59 +04:00
|
|
|
from lib.core.settings import DUMMY_URL
|
2015-08-13 18:21:36 +03:00
|
|
|
from lib.core.settings import IGNORE_SAVE_OPTIONS
|
2012-11-28 14:41:39 +04:00
|
|
|
from lib.core.settings import INJECT_HERE_MARK
|
2009-06-11 19:01:48 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2013-06-11 00:14:45 +04:00
|
|
|
from lib.core.settings import KB_CHARS_BOUNDARY_CHAR
|
2014-11-05 12:56:30 +03:00
|
|
|
from lib.core.settings import KB_CHARS_LOW_FREQUENCY_ALPHABET
|
2012-10-30 21:38:10 +04:00
|
|
|
from lib.core.settings import LOCALHOST
|
2013-03-04 21:05:40 +04:00
|
|
|
from lib.core.settings import MAX_CONNECT_RETRIES
|
2012-10-30 21:38:10 +04:00
|
|
|
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
2012-02-07 14:46:55 +04:00
|
|
|
from lib.core.settings import NULL
|
2012-10-30 21:38:10 +04:00
|
|
|
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
2013-02-26 15:50:43 +04:00
|
|
|
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
2009-04-28 03:05:11 +04:00
|
|
|
from lib.core.settings import SITE
|
2015-11-17 10:52:24 +03:00
|
|
|
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
2014-04-25 11:17:10 +04:00
|
|
|
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.settings import SUPPORTED_DBMS
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.settings import SUPPORTED_OS
|
2011-01-16 20:52:42 +03:00
|
|
|
from lib.core.settings import TIME_DELAY_CANDIDATES
|
2011-10-10 01:21:41 +04:00
|
|
|
from lib.core.settings import UNION_CHAR_REGEX
|
2011-01-21 00:57:54 +03:00
|
|
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
2012-10-30 21:38:10 +04:00
|
|
|
from lib.core.settings import URI_INJECTABLE_REGEX
|
|
|
|
from lib.core.settings import VERSION_STRING
|
2011-01-20 19:07:08 +03:00
|
|
|
from lib.core.settings import WEBSCARAB_SPLITTER
|
2011-11-23 18:26:40 +04:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2016-05-16 16:37:49 +03:00
|
|
|
from lib.core.threads import setDaemon
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.update import update
|
|
|
|
from lib.parse.configfile import configFileParser
|
2015-02-15 19:31:35 +03:00
|
|
|
from lib.parse.payloads import loadBoundaries
|
2010-11-28 21:10:54 +03:00
|
|
|
from lib.parse.payloads import loadPayloads
|
2014-07-03 00:27:51 +04:00
|
|
|
from lib.parse.sitemap import parseSitemap
|
2012-09-25 12:17:25 +04:00
|
|
|
from lib.request.basic import checkCharEncoding
|
2010-11-15 14:34:57 +03:00
|
|
|
from lib.request.connect import Connect as Request
|
2012-04-04 16:27:24 +04:00
|
|
|
from lib.request.dns import DNSServer
|
2010-12-26 12:33:04 +03:00
|
|
|
from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler
|
2012-06-04 23:46:28 +04:00
|
|
|
from lib.request.httpshandler import HTTPSHandler
|
2013-09-12 01:17:18 +04:00
|
|
|
from lib.request.pkihandler import HTTPSPKIAuthHandler
|
2010-11-08 15:26:13 +03:00
|
|
|
from lib.request.rangehandler import HTTPRangeHandler
|
2010-03-15 17:24:43 +03:00
|
|
|
from lib.request.redirecthandler import SmartRedirectHandler
|
2011-06-07 13:50:00 +04:00
|
|
|
from lib.request.templates import getPageTemplate
|
2013-01-09 18:22:21 +04:00
|
|
|
from lib.utils.crawler import crawl
|
2011-06-13 23:00:27 +04:00
|
|
|
from lib.utils.deps import checkDependencies
|
2015-11-08 18:37:46 +03:00
|
|
|
from lib.utils.search import search
|
2014-04-07 22:04:07 +04:00
|
|
|
from lib.utils.purge import purge
|
2012-07-14 19:01:04 +04:00
|
|
|
from thirdparty.keepalive import keepalive
|
|
|
|
from thirdparty.oset.pyoset import oset
|
|
|
|
from thirdparty.socks import socks
|
2011-06-22 18:33:52 +04:00
|
|
|
from xml.etree.ElementTree import ElementTree
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-06-01 16:21:10 +04:00
|
|
|
authHandler = urllib2.BaseHandler()
|
2012-06-04 23:46:28 +04:00
|
|
|
httpsHandler = HTTPSHandler()
|
2010-06-01 16:21:10 +04:00
|
|
|
keepAliveHandler = keepalive.HTTPHandler()
|
2013-08-12 16:25:51 +04:00
|
|
|
proxyHandler = urllib2.ProxyHandler()
|
2010-03-15 17:24:43 +03:00
|
|
|
redirectHandler = SmartRedirectHandler()
|
2010-11-08 15:26:13 +03:00
|
|
|
rangeHandler = HTTPRangeHandler()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _feedTargetsDict(reqFile, addedTargetUrls):
|
2011-01-20 18:55:50 +03:00
|
|
|
"""
|
2013-04-09 13:48:42 +04:00
|
|
|
Parses web scarab and burp logs and adds results to the target URL list
|
2011-01-20 18:55:50 +03:00
|
|
|
"""
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _parseWebScarabLog(content):
|
2011-01-20 18:55:50 +03:00
|
|
|
"""
|
|
|
|
Parses web scarab logs (POST method not supported)
|
|
|
|
"""
|
2011-02-20 00:03:55 +03:00
|
|
|
|
2011-01-20 19:07:08 +03:00
|
|
|
reqResList = content.split(WEBSCARAB_SPLITTER)
|
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
for request in reqResList:
|
2011-04-30 17:20:05 +04:00
|
|
|
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
|
2011-01-20 18:55:50 +03:00
|
|
|
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
|
|
|
|
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not method or not url:
|
2011-04-18 02:25:25 +04:00
|
|
|
logger.debug("not a valid WebScarab log data")
|
2011-01-20 18:55:50 +03:00
|
|
|
continue
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2012-10-07 22:28:24 +04:00
|
|
|
if method.upper() == HTTPMETHOD.POST:
|
2011-01-20 19:17:38 +03:00
|
|
|
warnMsg = "POST requests from WebScarab logs aren't supported "
|
|
|
|
warnMsg += "as their body content is stored in separate files. "
|
|
|
|
warnMsg += "Nevertheless you can use -r to load them individually."
|
2011-01-20 18:55:50 +03:00
|
|
|
logger.warning(warnMsg)
|
|
|
|
continue
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2012-01-16 14:28:21 +04:00
|
|
|
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
2012-10-30 21:38:10 +04:00
|
|
|
if not kb.targets or url not in addedTargetUrls:
|
2014-10-22 16:54:49 +04:00
|
|
|
kb.targets.add((url, method, None, cookie, None))
|
2011-02-20 00:03:55 +03:00
|
|
|
addedTargetUrls.add(url)
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _parseBurpLog(content):
|
2011-01-20 18:55:50 +03:00
|
|
|
"""
|
|
|
|
Parses burp logs
|
|
|
|
"""
|
2009-06-03 19:04:40 +04:00
|
|
|
|
2012-10-09 13:27:19 +04:00
|
|
|
if not re.search(BURP_REQUEST_REGEX, content, re.I | re.S):
|
2013-11-08 12:23:38 +04:00
|
|
|
if re.search(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
2014-04-03 11:46:37 +04:00
|
|
|
reqResList = []
|
|
|
|
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
|
|
|
port, request = match.groups()
|
|
|
|
request = request.decode("base64")
|
2014-10-28 16:02:55 +03:00
|
|
|
_ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request)
|
2014-04-03 11:46:37 +04:00
|
|
|
if _:
|
|
|
|
host = _.group(0).strip()
|
|
|
|
if not re.search(r":\d+\Z", host):
|
|
|
|
request = request.replace(host, "%s:%d" % (host, int(port)))
|
|
|
|
reqResList.append(request)
|
2013-11-08 12:23:38 +04:00
|
|
|
else:
|
|
|
|
reqResList = [content]
|
2012-10-09 13:27:19 +04:00
|
|
|
else:
|
|
|
|
reqResList = re.finditer(BURP_REQUEST_REGEX, content, re.I | re.S)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2012-10-09 13:27:19 +04:00
|
|
|
for match in reqResList:
|
|
|
|
request = match if isinstance(match, basestring) else match.group(0)
|
2014-03-26 00:26:22 +04:00
|
|
|
request = re.sub(r"\A[^\w]+", "", request)
|
2012-10-09 13:27:19 +04:00
|
|
|
|
|
|
|
schemePort = re.search(r"(http[\w]*)\:\/\/.*?\:([\d]+).+?={10,}", request, re.I | re.S)
|
2008-12-09 00:24:24 +03:00
|
|
|
|
2012-10-09 13:27:19 +04:00
|
|
|
if schemePort:
|
|
|
|
scheme = schemePort.group(1)
|
|
|
|
port = schemePort.group(2)
|
|
|
|
else:
|
|
|
|
scheme, port = None, None
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2013-04-10 18:43:57 +04:00
|
|
|
if not re.search(r"^[\n]*(%s).*?\sHTTP\/" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), request, re.I | re.M):
|
2008-11-28 01:33:33 +03:00
|
|
|
continue
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2013-04-17 12:48:17 +04:00
|
|
|
if re.search(r"^[\n]*%s.*?\.(%s)\sHTTP\/" % (HTTPMETHOD.GET, "|".join(CRAWL_EXCLUDE_EXTENSIONS)), request, re.I | re.M):
|
2011-01-20 18:55:50 +03:00
|
|
|
continue
|
|
|
|
|
|
|
|
getPostReq = False
|
2011-04-30 17:20:05 +04:00
|
|
|
url = None
|
|
|
|
host = None
|
|
|
|
method = None
|
|
|
|
data = None
|
|
|
|
cookie = None
|
|
|
|
params = False
|
2013-01-23 16:36:17 +04:00
|
|
|
newline = None
|
|
|
|
lines = request.split('\n')
|
2014-10-22 15:49:29 +04:00
|
|
|
headers = []
|
2011-01-20 18:55:50 +03:00
|
|
|
|
2013-05-25 20:52:59 +04:00
|
|
|
for index in xrange(len(lines)):
|
|
|
|
line = lines[index]
|
|
|
|
|
|
|
|
if not line.strip() and index == len(lines) - 1:
|
|
|
|
break
|
|
|
|
|
2013-01-23 16:36:17 +04:00
|
|
|
newline = "\r\n" if line.endswith('\r') else '\n'
|
|
|
|
line = line.strip('\r')
|
2013-04-10 18:43:57 +04:00
|
|
|
match = re.search(r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None
|
|
|
|
|
2015-06-29 02:12:14 +03:00
|
|
|
if len(line.strip()) == 0 and method and method != HTTPMETHOD.GET and data is None:
|
2013-05-25 20:52:59 +04:00
|
|
|
data = ""
|
|
|
|
params = True
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2013-04-10 18:43:57 +04:00
|
|
|
elif match:
|
|
|
|
method = match.group(1)
|
|
|
|
url = match.group(2)
|
2011-01-20 18:55:50 +03:00
|
|
|
|
2013-10-10 17:04:48 +04:00
|
|
|
if any(_ in line for _ in ('?', '=', CUSTOM_INJECTION_MARK_CHAR)):
|
2011-01-20 18:55:50 +03:00
|
|
|
params = True
|
|
|
|
|
|
|
|
getPostReq = True
|
|
|
|
|
|
|
|
# POST parameters
|
|
|
|
elif data is not None and params:
|
2013-01-23 16:36:17 +04:00
|
|
|
data += "%s%s" % (line, newline)
|
2011-01-20 18:55:50 +03:00
|
|
|
|
|
|
|
# GET parameters
|
|
|
|
elif "?" in line and "=" in line and ": " not in line:
|
2008-11-22 04:57:22 +03:00
|
|
|
params = True
|
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Headers
|
2015-08-31 00:15:50 +03:00
|
|
|
elif re.search(r"\A\S+:", line):
|
|
|
|
key, value = line.split(":", 1)
|
|
|
|
value = value.strip().replace("\r", "").replace("\n", "")
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Cookie and Host headers
|
2013-03-20 14:10:24 +04:00
|
|
|
if key.upper() == HTTP_HEADER.COOKIE.upper():
|
2011-01-20 18:55:50 +03:00
|
|
|
cookie = value
|
2013-03-20 14:10:24 +04:00
|
|
|
elif key.upper() == HTTP_HEADER.HOST.upper():
|
2011-04-01 16:09:11 +04:00
|
|
|
if '://' in value:
|
|
|
|
scheme, value = value.split('://')[:2]
|
2011-01-20 18:55:50 +03:00
|
|
|
splitValue = value.split(":")
|
|
|
|
host = splitValue[0]
|
2010-06-30 01:06:03 +04:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if len(splitValue) > 1:
|
2013-01-23 16:36:17 +04:00
|
|
|
port = filterStringValue(splitValue[1], "[0-9]")
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Avoid to add a static content length header to
|
2014-10-22 15:49:29 +04:00
|
|
|
# headers and consider the following lines as
|
2011-01-20 18:55:50 +03:00
|
|
|
# POSTed data
|
2013-03-20 14:10:24 +04:00
|
|
|
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
|
2011-01-20 18:55:50 +03:00
|
|
|
params = True
|
2010-11-12 15:25:02 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
# Avoid proxy and connection type related headers
|
2013-03-20 14:10:24 +04:00
|
|
|
elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION):
|
2014-10-22 15:49:29 +04:00
|
|
|
headers.append((getUnicode(key), getUnicode(value)))
|
2010-11-12 15:25:02 +03:00
|
|
|
|
2013-02-26 15:50:43 +04:00
|
|
|
if CUSTOM_INJECTION_MARK_CHAR in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""):
|
|
|
|
params = True
|
|
|
|
|
2013-02-13 15:36:01 +04:00
|
|
|
data = data.rstrip("\r\n") if data else data
|
|
|
|
|
2011-01-24 14:26:51 +03:00
|
|
|
if getPostReq and (params or cookie):
|
2011-04-14 14:14:46 +04:00
|
|
|
if not port and isinstance(scheme, basestring) and scheme.lower() == "https":
|
2011-04-14 14:12:07 +04:00
|
|
|
port = "443"
|
2011-04-14 14:14:46 +04:00
|
|
|
elif not scheme and port == "443":
|
|
|
|
scheme = "https"
|
2011-04-14 14:12:07 +04:00
|
|
|
|
2012-04-18 14:01:04 +04:00
|
|
|
if conf.forceSSL:
|
|
|
|
scheme = "https"
|
|
|
|
port = port or "443"
|
|
|
|
|
2013-01-31 16:58:39 +04:00
|
|
|
if not host:
|
|
|
|
errMsg = "invalid format of a request file"
|
|
|
|
raise SqlmapSyntaxException, errMsg
|
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if not url.startswith("http"):
|
2011-04-30 17:20:05 +04:00
|
|
|
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
2011-01-20 18:55:50 +03:00
|
|
|
scheme = None
|
2011-04-30 17:20:05 +04:00
|
|
|
port = None
|
2010-06-30 01:06:03 +04:00
|
|
|
|
2012-01-16 14:28:21 +04:00
|
|
|
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
2012-10-30 21:38:10 +04:00
|
|
|
if not kb.targets or url not in addedTargetUrls:
|
2016-02-18 13:13:51 +03:00
|
|
|
kb.targets.add((url, conf.method or method, data, cookie, tuple(headers)))
|
2012-01-16 14:28:21 +04:00
|
|
|
addedTargetUrls.add(url)
|
2010-03-05 17:06:03 +03:00
|
|
|
|
2014-11-24 12:13:56 +03:00
|
|
|
checkFile(reqFile)
|
|
|
|
try:
|
|
|
|
with openFile(reqFile, "rb") as f:
|
|
|
|
content = f.read()
|
|
|
|
except (IOError, OSError, MemoryError), ex:
|
|
|
|
errMsg = "something went wrong while trying "
|
2016-01-12 12:27:04 +03:00
|
|
|
errMsg += "to read the content of file '%s' ('%s')" % (reqFile, getSafeExString(ex))
|
2014-11-24 12:13:56 +03:00
|
|
|
raise SqlmapSystemException(errMsg)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-01-20 18:55:50 +03:00
|
|
|
if conf.scope:
|
|
|
|
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
_parseBurpLog(content)
|
|
|
|
_parseWebScarabLog(content)
|
2008-11-20 20:56:09 +03:00
|
|
|
|
2013-02-26 15:54:54 +04:00
|
|
|
if not addedTargetUrls:
|
|
|
|
errMsg = "unable to find usable request(s) "
|
|
|
|
errMsg += "in provided file ('%s')" % reqFile
|
|
|
|
raise SqlmapGenericException(errMsg)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _loadQueries():
|
2010-10-21 17:13:12 +04:00
|
|
|
"""
|
|
|
|
Loads queries from 'xml/queries.xml' file.
|
|
|
|
"""
|
2010-10-22 18:23:14 +04:00
|
|
|
|
2011-06-22 18:33:52 +04:00
|
|
|
def iterate(node, retVal=None):
|
|
|
|
class DictObject(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.__dict__ = {}
|
2013-01-10 18:02:28 +04:00
|
|
|
|
2011-06-22 18:33:52 +04:00
|
|
|
def __contains__(self, name):
|
|
|
|
return name in self.__dict__
|
|
|
|
|
|
|
|
if retVal is None:
|
|
|
|
retVal = DictObject()
|
|
|
|
|
2011-06-22 19:36:59 +04:00
|
|
|
for child in node.findall("*"):
|
2011-06-22 18:33:52 +04:00
|
|
|
instance = DictObject()
|
2011-06-22 19:28:49 +04:00
|
|
|
retVal.__dict__[child.tag] = instance
|
|
|
|
if child.attrib:
|
|
|
|
instance.__dict__.update(child.attrib)
|
2011-06-22 18:33:52 +04:00
|
|
|
else:
|
2011-06-22 19:28:49 +04:00
|
|
|
iterate(child, instance)
|
2011-06-22 18:33:52 +04:00
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
|
|
|
tree = ElementTree()
|
2014-11-10 15:41:53 +03:00
|
|
|
try:
|
|
|
|
tree.parse(paths.QUERIES_XML)
|
|
|
|
except Exception, ex:
|
2016-05-22 22:44:17 +03:00
|
|
|
errMsg = "something appears to be wrong with "
|
2016-01-12 12:27:04 +03:00
|
|
|
errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, getSafeExString(ex))
|
2014-11-10 15:41:53 +03:00
|
|
|
errMsg += "sure that you haven't made any changes to it"
|
|
|
|
raise SqlmapInstallationException, errMsg
|
2011-06-22 18:33:52 +04:00
|
|
|
|
2011-06-22 19:36:59 +04:00
|
|
|
for node in tree.findall("*"):
|
2011-06-22 18:33:52 +04:00
|
|
|
queries[node.attrib['value']] = iterate(node)
|
2010-10-21 17:13:12 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setMultipleTargets():
|
2008-11-20 20:56:09 +03:00
|
|
|
"""
|
|
|
|
Define a configuration parameter if we are running in multiple target
|
|
|
|
mode.
|
|
|
|
"""
|
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
initialTargetsCount = len(kb.targets)
|
2008-11-28 01:33:33 +03:00
|
|
|
addedTargetUrls = set()
|
2008-11-20 20:56:09 +03:00
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
if not conf.logFile:
|
2008-11-20 20:56:09 +03:00
|
|
|
return
|
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
debugMsg = "parsing targets list from '%s'" % conf.logFile
|
2008-11-28 01:33:33 +03:00
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
if not os.path.exists(conf.logFile):
|
2008-11-28 01:33:33 +03:00
|
|
|
errMsg = "the specified list of targets does not exist"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2008-11-20 20:56:09 +03:00
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
if os.path.isfile(conf.logFile):
|
2012-12-06 17:14:19 +04:00
|
|
|
_feedTargetsDict(conf.logFile, addedTargetUrls)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
elif os.path.isdir(conf.logFile):
|
|
|
|
files = os.listdir(conf.logFile)
|
2008-11-20 20:56:09 +03:00
|
|
|
files.sort()
|
|
|
|
|
|
|
|
for reqFile in files:
|
|
|
|
if not re.search("([\d]+)\-request", reqFile):
|
|
|
|
continue
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
_feedTargetsDict(os.path.join(conf.logFile, reqFile), addedTargetUrls)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "the specified list of targets is not a file "
|
2008-11-20 20:56:09 +03:00
|
|
|
errMsg += "nor a directory"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2008-11-20 20:56:09 +03:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
updatedTargetsCount = len(kb.targets)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
|
|
|
if updatedTargetsCount > initialTargetsCount:
|
2011-04-30 17:20:05 +04:00
|
|
|
infoMsg = "sqlmap parsed %d " % (updatedTargetsCount - initialTargetsCount)
|
2013-11-08 12:23:38 +04:00
|
|
|
infoMsg += "(parameter unique) requests from the "
|
|
|
|
infoMsg += "targets list ready to be tested"
|
2008-11-22 04:57:22 +03:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _adjustLoggingFormatter():
|
2012-02-24 14:48:19 +04:00
|
|
|
"""
|
|
|
|
Solves problem of line deletition caused by overlapping logging messages
|
|
|
|
and retrieved data info in inference mode
|
|
|
|
"""
|
|
|
|
|
2012-02-24 14:53:28 +04:00
|
|
|
if hasattr(FORMATTER, '_format'):
|
|
|
|
return
|
|
|
|
|
2012-02-24 14:48:19 +04:00
|
|
|
def format(record):
|
2015-01-22 11:17:45 +03:00
|
|
|
message = FORMATTER._format(record)
|
|
|
|
message = boldifyMessage(message)
|
2015-01-28 02:54:39 +03:00
|
|
|
if kb.get("prependFlag"):
|
2015-01-22 11:17:45 +03:00
|
|
|
message = "\n%s" % message
|
2012-07-12 17:58:45 +04:00
|
|
|
kb.prependFlag = False
|
2015-01-22 11:17:45 +03:00
|
|
|
return message
|
2012-02-24 14:53:28 +04:00
|
|
|
|
2012-02-24 14:48:19 +04:00
|
|
|
FORMATTER._format = FORMATTER.format
|
|
|
|
FORMATTER.format = format
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setRequestFromFile():
|
2010-03-16 18:21:42 +03:00
|
|
|
"""
|
|
|
|
This function checks if the way to make a HTTP request is through supplied
|
|
|
|
textual file, parses it and saves the information into the knowledge base.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.requestFile:
|
|
|
|
return
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-03-16 18:21:42 +03:00
|
|
|
addedTargetUrls = set()
|
|
|
|
|
2015-01-21 11:26:30 +03:00
|
|
|
conf.requestFile = safeExpandUser(conf.requestFile)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2010-03-16 18:21:42 +03:00
|
|
|
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if not os.path.isfile(conf.requestFile):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "the specified HTTP request file "
|
2010-03-16 18:21:42 +03:00
|
|
|
errMsg += "does not exist"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2010-03-16 18:21:42 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
_feedTargetsDict(conf.requestFile, addedTargetUrls)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setCrawler():
|
2011-06-24 09:40:03 +04:00
|
|
|
if not conf.crawlDepth:
|
2011-06-20 15:32:30 +04:00
|
|
|
return
|
|
|
|
|
2014-07-03 00:27:51 +04:00
|
|
|
if not any((conf.bulkFile, conf.sitemapUrl)):
|
2013-04-09 13:36:33 +04:00
|
|
|
crawl(conf.url)
|
|
|
|
else:
|
2014-07-03 00:27:51 +04:00
|
|
|
if conf.bulkFile:
|
|
|
|
targets = getFileItems(conf.bulkFile)
|
|
|
|
else:
|
|
|
|
targets = parseSitemap(conf.sitemapUrl)
|
2013-04-09 13:36:33 +04:00
|
|
|
for i in xrange(len(targets)):
|
|
|
|
try:
|
|
|
|
target = targets[i]
|
|
|
|
crawl(target)
|
|
|
|
|
|
|
|
if conf.verbose in (1, 2):
|
2014-04-06 15:42:15 +04:00
|
|
|
status = "%d/%d links visited (%d%%)" % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
|
2013-04-09 13:36:33 +04:00
|
|
|
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
|
|
|
|
except Exception, ex:
|
2016-01-12 12:27:04 +03:00
|
|
|
errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, getSafeExString(ex))
|
2013-04-09 13:36:33 +04:00
|
|
|
logger.error(errMsg)
|
2011-06-20 15:32:30 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
def _doSearch():
|
2008-11-28 01:33:33 +03:00
|
|
|
"""
|
2015-11-08 18:37:46 +03:00
|
|
|
This function performs search dorking, parses results
|
|
|
|
and saves the testable hosts into the knowledge base.
|
2008-11-28 01:33:33 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.googleDork:
|
|
|
|
return
|
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
kb.data.onlyGETs = None
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
def retrieve():
|
2015-11-08 18:37:46 +03:00
|
|
|
links = search(conf.googleDork)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
if not links:
|
2011-06-08 03:24:17 +04:00
|
|
|
errMsg = "unable to find results for your "
|
2015-11-08 18:37:46 +03:00
|
|
|
errMsg += "search dork expression"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
for link in links:
|
|
|
|
link = urldecode(link)
|
|
|
|
if re.search(r"(.*?)\?(.+)", link):
|
2014-10-22 16:54:49 +04:00
|
|
|
kb.targets.add((link, conf.method, conf.data, conf.cookie, None))
|
2012-10-30 21:38:10 +04:00
|
|
|
elif re.search(URI_INJECTABLE_REGEX, link, re.I):
|
2014-08-01 16:19:32 +04:00
|
|
|
if kb.data.onlyGETs is None and conf.data is None and not conf.googleDork:
|
2012-10-30 21:38:10 +04:00
|
|
|
message = "do you want to scan only results containing GET parameters? [Y/n] "
|
|
|
|
test = readInput(message, default="Y")
|
|
|
|
kb.data.onlyGETs = test.lower() != 'n'
|
2014-08-01 16:19:32 +04:00
|
|
|
if not kb.data.onlyGETs or conf.googleDork:
|
2014-10-22 16:54:49 +04:00
|
|
|
kb.targets.add((link, conf.method, conf.data, conf.cookie, None))
|
2012-10-30 21:38:10 +04:00
|
|
|
|
|
|
|
return links
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2011-06-08 03:24:17 +04:00
|
|
|
while True:
|
2012-10-30 21:38:10 +04:00
|
|
|
links = retrieve()
|
2011-06-08 20:08:20 +04:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
if kb.targets:
|
|
|
|
infoMsg = "sqlmap got %d results for your " % len(links)
|
2015-11-08 18:37:46 +03:00
|
|
|
infoMsg += "search dork expression, "
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
if len(links) == len(kb.targets):
|
2011-06-08 03:24:17 +04:00
|
|
|
infoMsg += "all "
|
|
|
|
else:
|
2012-10-30 21:38:10 +04:00
|
|
|
infoMsg += "%d " % len(kb.targets)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2011-06-08 03:24:17 +04:00
|
|
|
infoMsg += "of them are testable targets"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
break
|
|
|
|
|
|
|
|
else:
|
2012-10-30 21:38:10 +04:00
|
|
|
message = "sqlmap got %d results " % len(links)
|
2015-11-08 18:37:46 +03:00
|
|
|
message += "for your search dork expression, but none of them "
|
2011-06-08 03:24:17 +04:00
|
|
|
message += "have GET parameters to test for SQL injection. "
|
2011-06-08 20:08:20 +04:00
|
|
|
message += "Do you want to skip to the next result page? [Y/n]"
|
2011-06-08 03:24:17 +04:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if test[0] in ("n", "N"):
|
2012-12-06 17:14:19 +04:00
|
|
|
raise SqlmapSilentQuitException
|
2011-06-08 03:24:17 +04:00
|
|
|
else:
|
|
|
|
conf.googlePage += 1
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setBulkMultipleTargets():
|
2011-05-11 12:46:40 +04:00
|
|
|
if not conf.bulkFile:
|
|
|
|
return
|
|
|
|
|
2015-01-21 11:26:30 +03:00
|
|
|
conf.bulkFile = safeExpandUser(conf.bulkFile)
|
2011-05-11 12:46:40 +04:00
|
|
|
|
|
|
|
infoMsg = "parsing multiple targets list from '%s'" % conf.bulkFile
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if not os.path.isfile(conf.bulkFile):
|
|
|
|
errMsg = "the specified bulk file "
|
|
|
|
errMsg += "does not exist"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2011-05-11 12:46:40 +04:00
|
|
|
|
2014-07-03 00:27:51 +04:00
|
|
|
found = False
|
2012-07-17 01:28:01 +04:00
|
|
|
for line in getFileItems(conf.bulkFile):
|
2013-05-25 20:00:04 +04:00
|
|
|
if re.match(r"[^ ]+\?(.+)", line, re.I) or CUSTOM_INJECTION_MARK_CHAR in line:
|
2014-07-03 00:27:51 +04:00
|
|
|
found = True
|
2015-09-08 12:53:29 +03:00
|
|
|
kb.targets.add((line.strip(), conf.method, conf.data, conf.cookie, None))
|
2011-06-08 20:08:20 +04:00
|
|
|
|
2014-07-03 00:27:51 +04:00
|
|
|
if not found and not conf.forms and not conf.crawlDepth:
|
|
|
|
warnMsg = "no usable links found (with GET parameters)"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
def _setSitemapTargets():
|
|
|
|
if not conf.sitemapUrl:
|
|
|
|
return
|
|
|
|
|
|
|
|
infoMsg = "parsing sitemap '%s'" % conf.sitemapUrl
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
found = False
|
|
|
|
for item in parseSitemap(conf.sitemapUrl):
|
|
|
|
if re.match(r"[^ ]+\?(.+)", item, re.I):
|
|
|
|
found = True
|
2014-10-22 16:54:49 +04:00
|
|
|
kb.targets.add((item.strip(), None, None, None, None))
|
2014-07-03 00:27:51 +04:00
|
|
|
|
|
|
|
if not found and not conf.forms and not conf.crawlDepth:
|
|
|
|
warnMsg = "no usable links found (with GET parameters)"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _findPageForms():
|
2011-10-29 13:32:20 +04:00
|
|
|
if not conf.forms or conf.crawlDepth:
|
2010-11-15 15:07:13 +03:00
|
|
|
return
|
|
|
|
|
2013-01-09 18:22:21 +04:00
|
|
|
if conf.url and not checkConnection():
|
2010-11-15 15:07:13 +03:00
|
|
|
return
|
|
|
|
|
2010-11-15 14:34:57 +03:00
|
|
|
infoMsg = "searching for forms"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2014-08-01 16:19:32 +04:00
|
|
|
if not any((conf.bulkFile, conf.googleDork, conf.sitemapUrl)):
|
2013-01-09 18:22:21 +04:00
|
|
|
page, _ = Request.queryPage(content=True)
|
|
|
|
findPageForms(page, conf.url, True, True)
|
|
|
|
else:
|
2014-07-03 00:27:51 +04:00
|
|
|
if conf.bulkFile:
|
|
|
|
targets = getFileItems(conf.bulkFile)
|
2014-08-01 16:19:32 +04:00
|
|
|
elif conf.sitemapUrl:
|
2014-07-03 00:27:51 +04:00
|
|
|
targets = parseSitemap(conf.sitemapUrl)
|
2014-08-01 16:19:32 +04:00
|
|
|
elif conf.googleDork:
|
|
|
|
targets = [_[0] for _ in kb.targets]
|
|
|
|
kb.targets.clear()
|
2013-01-09 19:10:26 +04:00
|
|
|
for i in xrange(len(targets)):
|
2013-01-09 18:58:13 +04:00
|
|
|
try:
|
2013-01-09 19:10:26 +04:00
|
|
|
target = targets[i]
|
2013-01-10 16:18:44 +04:00
|
|
|
page, _, _ = Request.getPage(url=target.strip(), crawling=True, raise404=False)
|
2013-01-09 18:58:13 +04:00
|
|
|
findPageForms(page, target, False, True)
|
2013-01-09 19:10:26 +04:00
|
|
|
|
|
|
|
if conf.verbose in (1, 2):
|
|
|
|
status = '%d/%d links visited (%d%%)' % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
|
|
|
|
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
|
2014-08-01 16:19:32 +04:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
break
|
2013-01-09 18:58:13 +04:00
|
|
|
except Exception, ex:
|
2016-01-08 17:33:14 +03:00
|
|
|
errMsg = "problem occurred while searching for forms at '%s' ('%s')" % (target, getSafeExString(ex))
|
2013-01-09 18:58:13 +04:00
|
|
|
logger.error(errMsg)
|
2010-11-15 14:34:57 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setDBMSAuthentication():
|
2012-07-02 03:22:34 +04:00
|
|
|
"""
|
|
|
|
Check and set the DBMS authentication credentials to run statements as
|
|
|
|
another user, not the session user
|
|
|
|
"""
|
|
|
|
|
2012-07-24 17:34:50 +04:00
|
|
|
if not conf.dbmsCred:
|
2012-07-02 03:22:34 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "setting the DBMS authentication credentials"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2012-07-24 17:34:50 +04:00
|
|
|
match = re.search("^(.+?):(.*?)$", conf.dbmsCred)
|
2012-07-02 03:22:34 +04:00
|
|
|
|
2012-07-06 19:28:01 +04:00
|
|
|
if not match:
|
2012-07-02 03:22:34 +04:00
|
|
|
errMsg = "DBMS authentication credentials value must be in format "
|
|
|
|
errMsg += "username:password"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-07-02 03:22:34 +04:00
|
|
|
|
2012-07-06 19:28:01 +04:00
|
|
|
conf.dbmsUsername = match.group(1)
|
|
|
|
conf.dbmsPassword = match.group(2)
|
2012-07-02 03:22:34 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setMetasploit():
|
2009-04-22 15:48:07 +04:00
|
|
|
if not conf.osPwn and not conf.osSmb and not conf.osBof:
|
|
|
|
return
|
|
|
|
|
2009-06-11 19:01:48 +04:00
|
|
|
debugMsg = "setting the takeover out-of-band functionality"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = False
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2013-12-29 19:16:50 +04:00
|
|
|
try:
|
|
|
|
import win32file
|
|
|
|
except ImportError:
|
|
|
|
errMsg = "sqlmap requires third-party module 'pywin32' "
|
|
|
|
errMsg += "in order to use Metasploit functionalities on "
|
|
|
|
errMsg += "Windows. You can download it from "
|
|
|
|
errMsg += "'http://sourceforge.net/projects/pywin32/files/pywin32/'"
|
|
|
|
raise SqlmapMissingDependence(errMsg)
|
|
|
|
|
2012-07-27 18:29:33 +04:00
|
|
|
if not conf.msfPath:
|
|
|
|
def _(key, value):
|
|
|
|
retVal = None
|
2012-12-17 17:29:19 +04:00
|
|
|
|
2012-07-27 18:29:33 +04:00
|
|
|
try:
|
|
|
|
from _winreg import ConnectRegistry, OpenKey, QueryValueEx, HKEY_LOCAL_MACHINE
|
|
|
|
_ = ConnectRegistry(None, HKEY_LOCAL_MACHINE)
|
|
|
|
_ = OpenKey(_, key)
|
2012-07-27 19:05:21 +04:00
|
|
|
retVal = QueryValueEx(_, value)[0]
|
2013-01-10 18:54:28 +04:00
|
|
|
except:
|
2012-12-17 17:29:19 +04:00
|
|
|
logger.debug("unable to identify Metasploit installation path via registry key")
|
|
|
|
|
2012-07-27 18:29:33 +04:00
|
|
|
return retVal
|
|
|
|
|
|
|
|
conf.msfPath = _(r"SOFTWARE\Rapid7\Metasploit", "Location")
|
2012-07-27 19:05:21 +04:00
|
|
|
if conf.msfPath:
|
|
|
|
conf.msfPath = os.path.join(conf.msfPath, "msf3")
|
2009-06-11 19:01:48 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if conf.osSmb:
|
2010-10-28 00:39:50 +04:00
|
|
|
isAdmin = runningAsAdmin()
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2012-03-31 16:08:27 +04:00
|
|
|
if not isAdmin:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you need to run sqlmap as an administrator "
|
2010-10-28 00:39:50 +04:00
|
|
|
errMsg += "if you want to perform a SMB relay attack because "
|
|
|
|
errMsg += "it will need to listen on a user-specified SMB "
|
|
|
|
errMsg += "TCP port for incoming connection attempts"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapMissingPrivileges(errMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.msfPath:
|
2012-07-27 18:29:33 +04:00
|
|
|
for path in (conf.msfPath, os.path.join(conf.msfPath, "bin")):
|
2015-07-23 11:07:21 +03:00
|
|
|
if any(os.path.exists(normalizePath(os.path.join(path, _))) for _ in ("msfcli", "msfconsole")):
|
2012-07-06 19:34:40 +04:00
|
|
|
msfEnvPathExists = True
|
2015-06-29 11:05:16 +03:00
|
|
|
if all(os.path.exists(normalizePath(os.path.join(path, _))) for _ in ("msfvenom",)):
|
2015-07-23 11:07:21 +03:00
|
|
|
kb.oldMsf = False
|
2015-06-29 11:05:16 +03:00
|
|
|
elif all(os.path.exists(normalizePath(os.path.join(path, _))) for _ in ("msfencode", "msfpayload")):
|
2015-07-23 11:07:21 +03:00
|
|
|
kb.oldMsf = True
|
2015-06-29 11:05:16 +03:00
|
|
|
else:
|
|
|
|
msfEnvPathExists = False
|
2015-10-07 10:43:25 +03:00
|
|
|
|
2011-04-09 02:42:07 +04:00
|
|
|
conf.msfPath = path
|
|
|
|
break
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2012-07-06 19:34:40 +04:00
|
|
|
if msfEnvPathExists:
|
2011-09-12 21:26:22 +04:00
|
|
|
debugMsg = "provided Metasploit Framework path "
|
2009-04-22 15:48:07 +04:00
|
|
|
debugMsg += "'%s' is valid" % conf.msfPath
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
else:
|
2011-09-12 21:26:22 +04:00
|
|
|
warnMsg = "the provided Metasploit Framework path "
|
2009-04-22 15:48:07 +04:00
|
|
|
warnMsg += "'%s' is not valid. The cause could " % conf.msfPath
|
|
|
|
warnMsg += "be that the path does not exists or that one "
|
|
|
|
warnMsg += "or more of the needed Metasploit executables "
|
|
|
|
warnMsg += "within msfcli, msfconsole, msfencode and "
|
|
|
|
warnMsg += "msfpayload do not exist"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "you did not provide the local path where Metasploit "
|
2011-09-12 21:26:22 +04:00
|
|
|
warnMsg += "Framework is installed"
|
2009-04-22 15:48:07 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not msfEnvPathExists:
|
2011-09-12 21:26:22 +04:00
|
|
|
warnMsg = "sqlmap is going to look for Metasploit Framework "
|
2013-01-03 01:11:59 +04:00
|
|
|
warnMsg += "installation inside the environment path(s)"
|
2009-04-22 15:48:07 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2012-07-06 19:28:01 +04:00
|
|
|
envPaths = os.environ.get("PATH", "").split(";" if IS_WIN else ":")
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
for envPath in envPaths:
|
2011-04-30 17:20:05 +04:00
|
|
|
envPath = envPath.replace(";", "")
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2015-10-07 10:43:25 +03:00
|
|
|
if any(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("msfcli", "msfconsole")):
|
2009-04-22 15:48:07 +04:00
|
|
|
msfEnvPathExists = True
|
2015-06-29 11:05:16 +03:00
|
|
|
if all(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("msfvenom",)):
|
2015-07-23 11:07:21 +03:00
|
|
|
kb.oldMsf = False
|
2015-06-29 11:05:16 +03:00
|
|
|
elif all(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("msfencode", "msfpayload")):
|
2015-07-23 11:07:21 +03:00
|
|
|
kb.oldMsf = True
|
2015-06-29 11:05:16 +03:00
|
|
|
else:
|
|
|
|
msfEnvPathExists = False
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2015-06-29 11:05:16 +03:00
|
|
|
if msfEnvPathExists:
|
|
|
|
infoMsg = "Metasploit Framework has been found "
|
|
|
|
infoMsg += "installed in the '%s' path" % envPath
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
conf.msfPath = envPath
|
|
|
|
|
|
|
|
break
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not msfEnvPathExists:
|
2011-09-12 21:26:22 +04:00
|
|
|
errMsg = "unable to locate Metasploit Framework installation. "
|
2013-01-07 20:59:59 +04:00
|
|
|
errMsg += "You can get it at 'http://www.metasploit.com/download/'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setWriteFile():
|
2009-04-22 15:48:07 +04:00
|
|
|
if not conf.wFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "setting the write file functionality"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
if not os.path.exists(conf.wFile):
|
|
|
|
errMsg = "the provided local file '%s' does not exist" % conf.wFile
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if not conf.dFile:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you did not provide the back-end DBMS absolute path "
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg += "where you want to write the local file '%s'" % conf.wFile
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapMissingMandatoryOptionException(errMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
conf.wFileType = getFileType(conf.wFile)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setOS():
|
2009-04-22 15:48:07 +04:00
|
|
|
"""
|
|
|
|
Force the back-end DBMS operating system option.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.os:
|
|
|
|
return
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
if conf.os.lower() not in SUPPORTED_OS:
|
|
|
|
errMsg = "you provided an unsupported back-end DBMS operating "
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg += "system. The supported DBMS operating systems for OS "
|
2011-04-23 20:25:09 +04:00
|
|
|
errMsg += "and file system access are %s. " % ', '.join([o.capitalize() for o in SUPPORTED_OS])
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg += "If you do not know the back-end DBMS underlying OS, "
|
|
|
|
errMsg += "do not provide it and sqlmap will fingerprint it for "
|
|
|
|
errMsg += "you."
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapUnsupportedDBMSException(errMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
debugMsg = "forcing back-end DBMS operating system to user defined "
|
|
|
|
debugMsg += "value '%s'" % conf.os
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
Backend.setOs(conf.os)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTechnique():
|
2011-04-07 14:27:22 +04:00
|
|
|
validTechniques = sorted(getPublicTypeMembers(PAYLOAD.TECHNIQUE), key=lambda x: x[1])
|
2013-01-11 14:13:55 +04:00
|
|
|
validLetters = [_[0][0].upper() for _ in validTechniques]
|
2011-04-07 14:00:47 +04:00
|
|
|
|
|
|
|
if conf.tech and isinstance(conf.tech, basestring):
|
2011-12-02 18:11:43 +04:00
|
|
|
_ = []
|
2011-04-07 14:07:52 +04:00
|
|
|
|
2011-04-07 14:37:48 +04:00
|
|
|
for letter in conf.tech.upper():
|
|
|
|
if letter not in validLetters:
|
|
|
|
errMsg = "value for --technique must be a string composed "
|
2011-04-07 14:40:58 +04:00
|
|
|
errMsg += "by the letters %s. Refer to the " % ", ".join(validLetters)
|
2011-04-07 14:37:48 +04:00
|
|
|
errMsg += "user's manual for details"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-07 14:37:48 +04:00
|
|
|
|
|
|
|
for validTech, validInt in validTechniques:
|
|
|
|
if letter == validTech[0]:
|
2011-12-02 18:11:43 +04:00
|
|
|
_.append(validInt)
|
2011-04-07 14:37:48 +04:00
|
|
|
break
|
2011-04-07 14:07:52 +04:00
|
|
|
|
2011-12-02 18:11:43 +04:00
|
|
|
conf.tech = _
|
2011-04-06 18:41:44 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setDBMS():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2008-12-09 00:24:24 +03:00
|
|
|
Force the back-end DBMS option.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.dbms:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "forcing back-end DBMS to user defined value"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.dbms = conf.dbms.lower()
|
2011-05-10 17:28:07 +04:00
|
|
|
regex = re.search("%s ([\d\.]+)" % ("(%s)" % "|".join([alias for alias in SUPPORTED_DBMS])), conf.dbms, re.I)
|
2011-05-10 20:24:09 +04:00
|
|
|
|
2011-05-10 17:28:07 +04:00
|
|
|
if regex:
|
|
|
|
conf.dbms = regex.group(1)
|
|
|
|
Backend.setVersion(regex.group(2))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.dbms not in SUPPORTED_DBMS:
|
2011-04-23 20:25:09 +04:00
|
|
|
errMsg = "you provided an unsupported back-end database management "
|
2014-09-04 01:13:57 +04:00
|
|
|
errMsg += "system. Supported DBMSes are as follows: %s. " % ', '.join(sorted(_ for _ in DBMS_DICT))
|
2011-04-23 20:25:09 +04:00
|
|
|
errMsg += "If you do not know the back-end DBMS, do not provide "
|
|
|
|
errMsg += "it and sqlmap will fingerprint it for you."
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapUnsupportedDBMSException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2014-08-30 23:34:23 +04:00
|
|
|
for dbms, aliases in DBMS_ALIASES:
|
2010-11-29 17:48:07 +03:00
|
|
|
if conf.dbms in aliases:
|
2014-08-30 23:34:23 +04:00
|
|
|
conf.dbms = dbms
|
2010-11-29 17:48:07 +03:00
|
|
|
|
|
|
|
break
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTamperingFunctions():
|
2010-10-13 02:45:25 +04:00
|
|
|
"""
|
2010-10-17 01:33:15 +04:00
|
|
|
Loads tampering functions from given script(s)
|
2010-10-13 02:45:25 +04:00
|
|
|
"""
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
if conf.tamper:
|
2010-11-07 19:24:44 +03:00
|
|
|
last_priority = PRIORITY.HIGHEST
|
2010-11-04 13:29:40 +03:00
|
|
|
check_priority = True
|
|
|
|
resolve_priorities = False
|
|
|
|
priorities = []
|
|
|
|
|
2011-08-29 17:08:25 +04:00
|
|
|
for tfile in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper):
|
2010-10-17 01:33:15 +04:00
|
|
|
found = False
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:55:34 +04:00
|
|
|
tfile = tfile.strip()
|
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
if not tfile:
|
2010-10-13 02:45:25 +04:00
|
|
|
continue
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2011-05-19 01:47:40 +04:00
|
|
|
elif os.path.exists(os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)):
|
|
|
|
tfile = os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)
|
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
elif not os.path.exists(tfile):
|
|
|
|
errMsg = "tamper script '%s' does not exist" % tfile
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
elif not tfile.endswith('.py'):
|
|
|
|
errMsg = "tamper script '%s' should have an extension '.py'" % tfile
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
dirname, filename = os.path.split(tfile)
|
2010-10-13 02:45:25 +04:00
|
|
|
dirname = os.path.abspath(dirname)
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
infoMsg = "loading tamper script '%s'" % filename[:-3]
|
2010-10-14 10:00:10 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if not os.path.exists(os.path.join(dirname, '__init__.py')):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "make sure that there is an empty file '__init__.py' "
|
2010-10-17 01:33:15 +04:00
|
|
|
errMsg += "inside of tamper scripts directory '%s'" % dirname
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if dirname not in sys.path:
|
|
|
|
sys.path.insert(0, dirname)
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
try:
|
2015-10-28 12:29:12 +03:00
|
|
|
module = __import__(filename[:-3].encode(sys.getfilesystemencoding()))
|
2014-11-20 17:08:08 +03:00
|
|
|
except (ImportError, SyntaxError), msg:
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], msg))
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__
|
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
for name, function in inspect.getmembers(module, inspect.isfunction):
|
2014-10-28 15:10:07 +03:00
|
|
|
if name == "tamper" and inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs":
|
2010-10-13 02:45:25 +04:00
|
|
|
found = True
|
2010-11-07 19:24:44 +03:00
|
|
|
kb.tamperFunctions.append(function)
|
2012-11-10 14:01:29 +04:00
|
|
|
function.func_name = module.__name__
|
2010-10-17 01:33:15 +04:00
|
|
|
|
2010-11-07 19:24:44 +03:00
|
|
|
if check_priority and priority > last_priority:
|
2016-05-22 22:44:17 +03:00
|
|
|
message = "it appears that you might have mixed "
|
2012-11-08 14:09:34 +04:00
|
|
|
message += "the order of tamper scripts. "
|
2011-07-07 01:04:45 +04:00
|
|
|
message += "Do you want to auto resolve this? [Y/n/q] "
|
2010-11-04 13:29:40 +03:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
|
|
|
|
if not test or test[0] in ("y", "Y"):
|
|
|
|
resolve_priorities = True
|
|
|
|
elif test[0] in ("n", "N"):
|
|
|
|
resolve_priorities = False
|
|
|
|
elif test[0] in ("q", "Q"):
|
2012-12-06 17:14:19 +04:00
|
|
|
raise SqlmapUserQuitException
|
2010-11-04 13:29:40 +03:00
|
|
|
|
|
|
|
check_priority = False
|
|
|
|
|
|
|
|
priorities.append((priority, function))
|
|
|
|
last_priority = priority
|
2010-11-07 19:24:44 +03:00
|
|
|
|
2010-10-15 14:36:29 +04:00
|
|
|
break
|
2011-07-07 01:04:45 +04:00
|
|
|
elif name == "dependencies":
|
|
|
|
function()
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if not found:
|
2013-08-30 11:55:57 +04:00
|
|
|
errMsg = "missing function 'tamper(payload, **kwargs)' "
|
2012-07-27 02:11:07 +04:00
|
|
|
errMsg += "in tamper script '%s'" % tfile
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2010-10-13 02:45:25 +04:00
|
|
|
|
2014-12-19 17:48:54 +03:00
|
|
|
if kb.tamperFunctions and len(kb.tamperFunctions) > 3:
|
|
|
|
warnMsg = "using too many tamper scripts is usually not "
|
|
|
|
warnMsg += "a good idea"
|
|
|
|
logger.warning(warnMsg)
|
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
if resolve_priorities and priorities:
|
2010-11-07 19:24:44 +03:00
|
|
|
priorities.sort(reverse=True)
|
2010-11-04 13:29:40 +03:00
|
|
|
kb.tamperFunctions = []
|
2010-11-07 19:24:44 +03:00
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
for _, function in priorities:
|
|
|
|
kb.tamperFunctions.append(function)
|
|
|
|
|
2013-02-21 14:14:57 +04:00
|
|
|
def _setWafFunctions():
|
|
|
|
"""
|
|
|
|
Loads WAF/IDS/IPS detecting functions from script(s)
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.identifyWaf:
|
|
|
|
for found in glob.glob(os.path.join(paths.SQLMAP_WAF_PATH, "*.py")):
|
|
|
|
dirname, filename = os.path.split(found)
|
|
|
|
dirname = os.path.abspath(dirname)
|
|
|
|
|
2013-02-21 17:33:12 +04:00
|
|
|
if filename == "__init__.py":
|
|
|
|
continue
|
|
|
|
|
2013-02-21 14:14:57 +04:00
|
|
|
debugMsg = "loading WAF script '%s'" % filename[:-3]
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
if dirname not in sys.path:
|
|
|
|
sys.path.insert(0, dirname)
|
|
|
|
|
|
|
|
try:
|
2014-09-30 11:58:02 +04:00
|
|
|
if filename[:-3] in sys.modules:
|
|
|
|
del sys.modules[filename[:-3]]
|
2013-02-21 14:14:57 +04:00
|
|
|
module = __import__(filename[:-3])
|
|
|
|
except ImportError, msg:
|
|
|
|
raise SqlmapSyntaxException("cannot import WAF script '%s' (%s)" % (filename[:-3], msg))
|
|
|
|
|
|
|
|
_ = dict(inspect.getmembers(module))
|
|
|
|
if "detect" not in _:
|
2014-09-29 16:07:59 +04:00
|
|
|
errMsg = "missing function 'detect(get_page)' "
|
2013-02-21 14:14:57 +04:00
|
|
|
errMsg += "in WAF script '%s'" % found
|
|
|
|
raise SqlmapGenericException(errMsg)
|
|
|
|
else:
|
2013-02-21 17:39:22 +04:00
|
|
|
kb.wafFunctions.append((_["detect"], _.get("__product__", filename[:-3])))
|
2013-02-21 14:14:57 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setThreads():
|
2008-12-04 20:40:03 +03:00
|
|
|
if not isinstance(conf.threads, int) or conf.threads <= 0:
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.threads = 1
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setDNSCache():
|
2011-04-08 01:39:18 +04:00
|
|
|
"""
|
|
|
|
Makes a cached version of socket._getaddrinfo to avoid subsequent DNS requests.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def _getaddrinfo(*args, **kwargs):
|
2011-06-24 22:19:33 +04:00
|
|
|
if args in kb.cache:
|
2011-04-08 01:39:18 +04:00
|
|
|
return kb.cache[args]
|
2011-06-17 19:23:58 +04:00
|
|
|
|
2011-04-08 01:39:18 +04:00
|
|
|
else:
|
|
|
|
kb.cache[args] = socket._getaddrinfo(*args, **kwargs)
|
|
|
|
return kb.cache[args]
|
|
|
|
|
2015-11-17 01:46:10 +03:00
|
|
|
if not hasattr(socket, "_getaddrinfo"):
|
2011-04-08 01:39:18 +04:00
|
|
|
socket._getaddrinfo = socket.getaddrinfo
|
|
|
|
socket.getaddrinfo = _getaddrinfo
|
|
|
|
|
2015-11-17 01:46:10 +03:00
|
|
|
def _setSocketPreConnect():
|
|
|
|
"""
|
|
|
|
Makes a pre-connect version of socket.connect
|
|
|
|
"""
|
|
|
|
|
2015-12-02 14:05:40 +03:00
|
|
|
if conf.disablePrecon:
|
|
|
|
return
|
|
|
|
|
2015-11-17 01:46:10 +03:00
|
|
|
def _():
|
2016-01-11 02:34:03 +03:00
|
|
|
while kb.threadContinue and not conf.disablePrecon:
|
2015-11-17 03:38:43 +03:00
|
|
|
try:
|
2015-11-20 18:52:59 +03:00
|
|
|
for key in socket._ready:
|
|
|
|
if len(socket._ready[key]) < SOCKET_PRE_CONNECT_QUEUE_SIZE:
|
|
|
|
family, type, proto, address = key
|
|
|
|
s = socket.socket(family, type, proto)
|
2015-11-17 03:38:43 +03:00
|
|
|
s._connect(address)
|
2015-11-17 04:35:53 +03:00
|
|
|
with kb.locks.socket:
|
2015-11-20 18:52:59 +03:00
|
|
|
socket._ready[key].append(s._sock)
|
2016-01-11 02:08:38 +03:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
break
|
2016-01-25 00:05:08 +03:00
|
|
|
except:
|
|
|
|
pass
|
2015-11-17 03:38:43 +03:00
|
|
|
finally:
|
|
|
|
time.sleep(0.01)
|
2015-11-17 01:46:10 +03:00
|
|
|
|
|
|
|
def connect(self, address):
|
|
|
|
found = False
|
2016-01-11 02:34:03 +03:00
|
|
|
|
2015-11-20 18:52:59 +03:00
|
|
|
key = (self.family, self.type, self.proto, address)
|
2015-11-17 01:46:10 +03:00
|
|
|
with kb.locks.socket:
|
2015-11-20 18:52:59 +03:00
|
|
|
if key not in socket._ready:
|
|
|
|
socket._ready[key] = []
|
|
|
|
if len(socket._ready[key]) > 0:
|
|
|
|
self._sock = socket._ready[key].pop(0)
|
2015-11-17 01:46:10 +03:00
|
|
|
found = True
|
2016-01-11 02:34:03 +03:00
|
|
|
|
2015-11-17 01:46:10 +03:00
|
|
|
if not found:
|
|
|
|
self._connect(address)
|
|
|
|
|
2015-11-22 17:33:00 +03:00
|
|
|
if not hasattr(socket.socket, "_connect"):
|
2015-11-17 01:46:10 +03:00
|
|
|
socket._ready = {}
|
|
|
|
socket.socket._connect = socket.socket.connect
|
|
|
|
socket.socket.connect = connect
|
|
|
|
|
|
|
|
thread = threading.Thread(target=_)
|
2016-05-16 16:37:49 +03:00
|
|
|
setDaemon(thread)
|
2015-11-17 01:46:10 +03:00
|
|
|
thread.start()
|
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
def _setHTTPHandlers():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2013-01-29 19:04:20 +04:00
|
|
|
Check and set the HTTP/SOCKS proxy for all HTTP requests.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2013-11-13 18:34:19 +04:00
|
|
|
global proxyHandler
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2014-03-18 19:41:05 +04:00
|
|
|
for _ in ("http", "https"):
|
|
|
|
if hasattr(proxyHandler, "%s_open" % _):
|
|
|
|
delattr(proxyHandler, "%s_open" % _)
|
|
|
|
|
2015-09-25 16:23:42 +03:00
|
|
|
if conf.proxyList is not None:
|
|
|
|
if not conf.proxyList:
|
2015-09-26 00:41:47 +03:00
|
|
|
errMsg = "list of usable proxies is exhausted"
|
2015-09-25 16:23:42 +03:00
|
|
|
raise SqlmapNoneDataException(errMsg)
|
2013-08-20 21:35:49 +04:00
|
|
|
|
2015-09-25 16:23:42 +03:00
|
|
|
conf.proxy = conf.proxyList[0]
|
|
|
|
conf.proxyList = conf.proxyList[1:]
|
2010-06-30 15:41:42 +04:00
|
|
|
|
2015-09-25 16:23:42 +03:00
|
|
|
infoMsg = "loading proxy '%s' from a supplied proxy list file" % conf.proxy
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
elif not conf.proxy:
|
|
|
|
if conf.hostname in ("localhost", "127.0.0.1") or conf.ignoreProxy:
|
|
|
|
proxyHandler.proxies = {}
|
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
if conf.proxy:
|
|
|
|
debugMsg = "setting the HTTP/SOCKS proxy for all HTTP requests"
|
|
|
|
logger.debug(debugMsg)
|
2014-12-30 11:48:50 +03:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
try:
|
|
|
|
_ = urlparse.urlsplit(conf.proxy)
|
|
|
|
except Exception, ex:
|
2016-01-12 12:27:04 +03:00
|
|
|
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, getSafeExString(ex))
|
2015-11-08 18:37:46 +03:00
|
|
|
raise SqlmapSyntaxException, errMsg
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
hostnamePort = _.netloc.split(":")
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
scheme = _.scheme.upper()
|
|
|
|
hostname = hostnamePort[0]
|
|
|
|
port = None
|
|
|
|
username = None
|
|
|
|
password = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
if len(hostnamePort) == 2:
|
|
|
|
try:
|
|
|
|
port = int(hostnamePort[1])
|
|
|
|
except:
|
|
|
|
pass # drops into the next check block
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
if not all((scheme, hasattr(PROXY_TYPE, scheme), hostname, port)):
|
|
|
|
errMsg = "proxy value must be in format '(%s)://address:port'" % "|".join(_[0].lower() for _ in getPublicTypeMembers(PROXY_TYPE))
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
if conf.proxyCred:
|
|
|
|
_ = re.search("^(.*?):(.*?)$", conf.proxyCred)
|
|
|
|
if not _:
|
|
|
|
errMsg = "proxy authentication credentials "
|
|
|
|
errMsg += "value must be in format username:password"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
else:
|
|
|
|
username = _.group(1)
|
|
|
|
password = _.group(2)
|
2013-08-12 16:25:51 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
if scheme in (PROXY_TYPE.SOCKS4, PROXY_TYPE.SOCKS5):
|
|
|
|
proxyHandler.proxies = {}
|
2013-08-12 16:25:51 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password)
|
|
|
|
socks.wrapmodule(urllib2)
|
2012-07-16 14:12:52 +04:00
|
|
|
else:
|
2015-11-08 18:37:46 +03:00
|
|
|
socks.unwrapmodule(urllib2)
|
|
|
|
|
|
|
|
if conf.proxyCred:
|
|
|
|
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
|
|
|
|
proxyString = "%s@" % conf.proxyCred
|
|
|
|
else:
|
|
|
|
proxyString = ""
|
2012-07-16 14:12:52 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
proxyString += "%s:%d" % (hostname, port)
|
|
|
|
proxyHandler.proxies = {"http": proxyString, "https": proxyString}
|
2013-08-12 16:25:51 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
proxyHandler.__init__(proxyHandler.proxies)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-11-08 04:20:29 +03:00
|
|
|
debugMsg = "creating HTTP requests opener object"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
handlers = filter(None, [proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler])
|
|
|
|
|
|
|
|
if not conf.dropSetCookie:
|
|
|
|
if not conf.loadCookies:
|
|
|
|
conf.cj = cookielib.CookieJar()
|
|
|
|
else:
|
|
|
|
conf.cj = cookielib.MozillaCookieJar()
|
|
|
|
resetCookieJar(conf.cj)
|
|
|
|
|
|
|
|
handlers.append(urllib2.HTTPCookieProcessor(conf.cj))
|
|
|
|
|
|
|
|
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
|
|
|
|
if conf.keepAlive:
|
|
|
|
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
|
|
|
|
warnMsg += "been disabled because of its incompatibility "
|
|
|
|
|
|
|
|
if conf.proxy:
|
|
|
|
warnMsg += "with HTTP(s) proxy"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
elif conf.authType:
|
|
|
|
warnMsg += "with authentication methods"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
else:
|
|
|
|
handlers.append(keepAliveHandler)
|
|
|
|
|
|
|
|
opener = urllib2.build_opener(*handlers)
|
|
|
|
urllib2.install_opener(opener)
|
|
|
|
|
2015-04-22 17:28:54 +03:00
|
|
|
def _setSafeVisit():
|
2010-04-16 16:44:47 +04:00
|
|
|
"""
|
2015-04-22 17:28:54 +03:00
|
|
|
Check and set the safe visit options.
|
2010-04-16 16:44:47 +04:00
|
|
|
"""
|
2015-04-22 17:28:54 +03:00
|
|
|
if not any ((conf.safeUrl, conf.safeReqFile)):
|
2010-04-16 16:44:47 +04:00
|
|
|
return
|
|
|
|
|
2015-04-22 17:28:54 +03:00
|
|
|
if conf.safeReqFile:
|
|
|
|
checkFile(conf.safeReqFile)
|
|
|
|
|
|
|
|
raw = readCachedFileContent(conf.safeReqFile)
|
|
|
|
match = re.search(r"\A([A-Z]+) ([^ ]+) HTTP/[0-9.]+\Z", raw[:raw.find('\n')])
|
|
|
|
|
|
|
|
if match:
|
|
|
|
kb.safeReq.method = match.group(1)
|
|
|
|
kb.safeReq.url = match.group(2)
|
|
|
|
kb.safeReq.headers = {}
|
|
|
|
|
|
|
|
for line in raw[raw.find('\n') + 1:].split('\n'):
|
|
|
|
line = line.strip()
|
|
|
|
if line and ':' in line:
|
|
|
|
key, value = line.split(':', 1)
|
|
|
|
value = value.strip()
|
|
|
|
kb.safeReq.headers[key] = value
|
|
|
|
if key == HTTP_HEADER.HOST:
|
|
|
|
if not value.startswith("http"):
|
|
|
|
scheme = "http"
|
|
|
|
if value.endswith(":443"):
|
|
|
|
scheme = "https"
|
|
|
|
value = "%s://%s" % (scheme, value)
|
|
|
|
kb.safeReq.url = urlparse.urljoin(value, kb.safeReq.url)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
post = None
|
|
|
|
|
|
|
|
if '\r\n\r\n' in raw:
|
|
|
|
post = raw[raw.find('\r\n\r\n') + 4:]
|
|
|
|
elif '\n\n' in raw:
|
|
|
|
post = raw[raw.find('\n\n') + 2:]
|
|
|
|
|
|
|
|
if post and post.strip():
|
|
|
|
kb.safeReq.post = post
|
|
|
|
else:
|
|
|
|
kb.safeReq.post = None
|
2010-04-16 16:44:47 +04:00
|
|
|
else:
|
2015-04-22 17:28:54 +03:00
|
|
|
errMsg = "invalid format of a safe request file"
|
|
|
|
raise SqlmapSyntaxException, errMsg
|
|
|
|
else:
|
|
|
|
if not re.search("^http[s]*://", conf.safeUrl):
|
|
|
|
if ":443/" in conf.safeUrl:
|
|
|
|
conf.safeUrl = "https://" + conf.safeUrl
|
|
|
|
else:
|
|
|
|
conf.safeUrl = "http://" + conf.safeUrl
|
2010-04-16 16:44:47 +04:00
|
|
|
|
2015-04-21 01:02:47 +03:00
|
|
|
if conf.safeFreq <= 0:
|
2015-04-22 17:28:54 +03:00
|
|
|
errMsg = "please provide a valid value (>0) for safe frequency (--safe-freq) while using safe visit features"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-04-16 16:44:47 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setPrefixSuffix():
|
2011-01-24 15:25:45 +03:00
|
|
|
if conf.prefix is not None and conf.suffix is not None:
|
|
|
|
# Create a custom boundary object for user's supplied prefix
|
|
|
|
# and suffix
|
2011-07-08 10:02:31 +04:00
|
|
|
boundary = AttribDict()
|
2011-01-24 15:25:45 +03:00
|
|
|
|
|
|
|
boundary.level = 1
|
2013-01-10 14:54:07 +04:00
|
|
|
boundary.clause = [0]
|
|
|
|
boundary.where = [1, 2, 3]
|
2011-01-24 15:25:45 +03:00
|
|
|
boundary.prefix = conf.prefix
|
|
|
|
boundary.suffix = conf.suffix
|
|
|
|
|
|
|
|
if " like" in boundary.suffix.lower():
|
|
|
|
if "'" in boundary.suffix.lower():
|
|
|
|
boundary.ptype = 3
|
|
|
|
elif '"' in boundary.suffix.lower():
|
|
|
|
boundary.ptype = 5
|
|
|
|
elif "'" in boundary.suffix:
|
|
|
|
boundary.ptype = 2
|
|
|
|
elif '"' in boundary.suffix:
|
|
|
|
boundary.ptype = 4
|
|
|
|
else:
|
|
|
|
boundary.ptype = 1
|
|
|
|
|
2011-07-07 01:04:45 +04:00
|
|
|
# user who provides --prefix/--suffix does not want other boundaries
|
|
|
|
# to be tested for
|
2013-01-10 14:54:07 +04:00
|
|
|
conf.boundaries = [boundary]
|
2011-01-24 15:25:45 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setAuthCred():
|
2012-07-31 15:06:45 +04:00
|
|
|
"""
|
|
|
|
Adds authentication credentials (if any) for current target to the password manager
|
|
|
|
(used by connection handler)
|
|
|
|
"""
|
|
|
|
|
2014-03-15 12:29:21 +04:00
|
|
|
if kb.passwordMgr and all(_ is not None for _ in (conf.scheme, conf.hostname, conf.port, conf.authUsername, conf.authPassword)):
|
2014-03-15 01:20:20 +04:00
|
|
|
kb.passwordMgr.add_password(None, "%s://%s:%d" % (conf.scheme, conf.hostname, conf.port), conf.authUsername, conf.authPassword)
|
2012-07-31 15:06:45 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPAuthentication():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2013-09-12 01:17:18 +04:00
|
|
|
Check and set the HTTP(s) authentication method (Basic, Digest, NTLM or PKI),
|
|
|
|
username and password for first three methods, or PEM private key file for
|
|
|
|
PKI authentication
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
global authHandler
|
|
|
|
|
2015-09-27 16:59:17 +03:00
|
|
|
if not conf.authType and not conf.authCred and not conf.authFile:
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
2015-09-27 16:59:17 +03:00
|
|
|
if conf.authFile and not conf.authType:
|
2014-12-13 15:48:50 +03:00
|
|
|
conf.authType = AUTH_TYPE.PKI
|
|
|
|
|
2015-09-27 16:59:17 +03:00
|
|
|
elif conf.authType and not conf.authCred and not conf.authFile:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you specified the HTTP authentication type, but "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "did not provide the credentials"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2013-08-09 16:13:48 +04:00
|
|
|
elif not conf.authType and conf.authCred:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you specified the HTTP authentication credentials, "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "but did not provide the type"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2014-12-13 15:48:50 +03:00
|
|
|
elif (conf.authType or "").lower() not in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST, AUTH_TYPE.NTLM, AUTH_TYPE.PKI):
|
2013-09-12 01:22:10 +04:00
|
|
|
errMsg = "HTTP authentication type value must be "
|
|
|
|
errMsg += "Basic, Digest, NTLM or PKI"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2015-09-27 16:59:17 +03:00
|
|
|
if not conf.authFile:
|
2010-01-07 15:59:09 +03:00
|
|
|
debugMsg = "setting the HTTP authentication type and credentials"
|
|
|
|
logger.debug(debugMsg)
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2013-08-09 16:13:48 +04:00
|
|
|
aTypeLower = conf.authType.lower()
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2013-09-12 01:22:10 +04:00
|
|
|
if aTypeLower in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST):
|
2010-04-07 13:47:14 +04:00
|
|
|
regExp = "^(.*?):(.*?)$"
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "HTTP %s authentication credentials " % aTypeLower
|
2013-05-20 01:00:40 +04:00
|
|
|
errMsg += "value must be in format 'username:password'"
|
2013-03-13 00:16:44 +04:00
|
|
|
elif aTypeLower == AUTH_TYPE.NTLM:
|
2011-07-06 09:44:47 +04:00
|
|
|
regExp = "^(.*\\\\.*):(.*?)$"
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "HTTP NTLM authentication credentials value must "
|
2013-05-20 01:00:40 +04:00
|
|
|
errMsg += "be in format 'DOMAIN\username:password'"
|
2013-09-12 01:17:18 +04:00
|
|
|
elif aTypeLower == AUTH_TYPE.PKI:
|
|
|
|
errMsg = "HTTP PKI authentication require "
|
|
|
|
errMsg += "usage of option `--auth-pki`"
|
2013-05-20 01:00:40 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2013-08-09 16:13:48 +04:00
|
|
|
aCredRegExp = re.search(regExp, conf.authCred)
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not aCredRegExp:
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2012-06-05 02:30:12 +04:00
|
|
|
conf.authUsername = aCredRegExp.group(1)
|
|
|
|
conf.authPassword = aCredRegExp.group(2)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2012-06-05 02:30:12 +04:00
|
|
|
kb.passwordMgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
_setAuthCred()
|
2012-07-31 15:06:45 +04:00
|
|
|
|
2013-03-13 00:16:44 +04:00
|
|
|
if aTypeLower == AUTH_TYPE.BASIC:
|
2012-06-05 02:30:12 +04:00
|
|
|
authHandler = SmartHTTPBasicAuthHandler(kb.passwordMgr)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2013-03-13 00:16:44 +04:00
|
|
|
elif aTypeLower == AUTH_TYPE.DIGEST:
|
2012-06-05 02:30:12 +04:00
|
|
|
authHandler = urllib2.HTTPDigestAuthHandler(kb.passwordMgr)
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2013-03-13 00:16:44 +04:00
|
|
|
elif aTypeLower == AUTH_TYPE.NTLM:
|
2010-01-07 15:59:09 +03:00
|
|
|
try:
|
|
|
|
from ntlm import HTTPNtlmAuthHandler
|
2012-02-22 14:45:10 +04:00
|
|
|
except ImportError:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "sqlmap requires Python NTLM third-party library "
|
2010-01-07 15:59:09 +03:00
|
|
|
errMsg += "in order to authenticate via NTLM, "
|
|
|
|
errMsg += "http://code.google.com/p/python-ntlm/"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapMissingDependence(errMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2012-06-05 02:30:12 +04:00
|
|
|
authHandler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(kb.passwordMgr)
|
2010-01-07 15:59:09 +03:00
|
|
|
else:
|
2013-09-12 01:17:18 +04:00
|
|
|
debugMsg = "setting the HTTP(s) authentication PEM private key"
|
2010-01-07 15:59:09 +03:00
|
|
|
logger.debug(debugMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2015-09-27 16:59:17 +03:00
|
|
|
_ = safeExpandUser(conf.authFile)
|
2015-01-21 11:12:12 +03:00
|
|
|
checkFile(_)
|
|
|
|
authHandler = HTTPSPKIAuthHandler(_)
|
2009-12-03 01:54:39 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPExtraHeaders():
|
2008-12-09 00:24:24 +03:00
|
|
|
if conf.headers:
|
|
|
|
debugMsg = "setting extra HTTP headers"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2012-01-07 19:26:54 +04:00
|
|
|
conf.headers = conf.headers.split("\n") if "\n" in conf.headers else conf.headers.split("\\n")
|
2008-12-09 00:24:24 +03:00
|
|
|
|
|
|
|
for headerValue in conf.headers:
|
2015-07-07 10:24:16 +03:00
|
|
|
if not headerValue.strip():
|
|
|
|
continue
|
|
|
|
|
2013-05-30 13:42:27 +04:00
|
|
|
if headerValue.count(':') >= 1:
|
2013-05-23 11:29:43 +04:00
|
|
|
header, value = (_.lstrip() for _ in headerValue.split(":", 1))
|
2008-12-09 00:24:24 +03:00
|
|
|
|
2012-01-07 18:54:56 +04:00
|
|
|
if header and value:
|
|
|
|
conf.httpHeaders.append((header, value))
|
2012-04-23 14:11:00 +04:00
|
|
|
else:
|
2012-04-23 14:15:04 +04:00
|
|
|
errMsg = "invalid header value: %s. Valid header format is 'name:value'" % repr(headerValue).lstrip('u')
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-17 12:33:46 +04:00
|
|
|
|
2015-12-03 03:43:37 +03:00
|
|
|
elif not conf.requestFile and len(conf.httpHeaders or []) < 2:
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_LANGUAGE, "en-us,en;q=0.5"))
|
2011-10-26 02:06:47 +04:00
|
|
|
if not conf.charset:
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "ISO-8859-15,utf-8;q=0.7,*;q=0.7"))
|
2011-10-26 02:06:47 +04:00
|
|
|
else:
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset))
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2011-04-17 12:48:13 +04:00
|
|
|
# Invalidating any caching mechanism in between
|
2011-04-17 12:33:46 +04:00
|
|
|
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache,no-store"))
|
|
|
|
conf.httpHeaders.append((HTTP_HEADER.PRAGMA, "no-cache"))
|
2011-04-17 12:33:46 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _defaultHTTPUserAgent():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
@return: default sqlmap HTTP User-Agent header
|
|
|
|
@rtype: C{str}
|
|
|
|
"""
|
|
|
|
|
2009-04-28 03:05:11 +04:00
|
|
|
return "%s (%s)" % (VERSION_STRING, SITE)
|
|
|
|
|
|
|
|
# Firefox 3 running on Ubuntu 9.04 updated at April 2009
|
|
|
|
#return "Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.0.9) Gecko/2009042113 Ubuntu/9.04 (jaunty) Firefox/3.0.9"
|
|
|
|
|
2009-04-25 00:13:21 +04:00
|
|
|
# Internet Explorer 7.0 running on Windows 2003 Service Pack 2 english
|
|
|
|
# updated at March 2009
|
2009-04-28 03:05:11 +04:00
|
|
|
#return "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)"
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPUserAgent():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Set the HTTP User-Agent header.
|
|
|
|
Depending on the user options it can be:
|
|
|
|
|
|
|
|
* The default sqlmap string
|
|
|
|
* A default value read as user option
|
|
|
|
* A random value read from a list of User-Agent headers from a
|
|
|
|
file choosed as user option
|
|
|
|
"""
|
2010-11-07 11:13:20 +03:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
if conf.mobile:
|
2011-04-30 01:50:48 +04:00
|
|
|
message = "which smartphone do you want sqlmap to imitate "
|
|
|
|
message += "through HTTP User-Agent header?\n"
|
2011-04-29 23:27:23 +04:00
|
|
|
items = sorted(getPublicTypeMembers(MOBILES, True))
|
|
|
|
|
|
|
|
for count in xrange(len(items)):
|
|
|
|
item = items[count]
|
2012-10-30 13:30:22 +04:00
|
|
|
message += "[%d] %s%s\n" % (count + 1, item[0], " (default)" if item == MOBILES.IPHONE else "")
|
2011-04-29 23:27:23 +04:00
|
|
|
|
|
|
|
test = readInput(message.rstrip('\n'), default=items.index(MOBILES.IPHONE) + 1)
|
2011-04-30 01:50:48 +04:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
try:
|
|
|
|
item = items[int(test) - 1]
|
|
|
|
except:
|
|
|
|
item = MOBILES.IPHONE
|
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, item[1]))
|
2011-04-29 23:27:23 +04:00
|
|
|
|
|
|
|
elif conf.agent:
|
2008-10-15 19:38:22 +04:00
|
|
|
debugMsg = "setting the HTTP User-Agent header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, conf.agent))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
elif not conf.randomAgent:
|
2012-06-28 15:55:30 +04:00
|
|
|
_ = True
|
2010-06-30 03:51:44 +04:00
|
|
|
|
|
|
|
for header, _ in conf.httpHeaders:
|
2013-03-20 14:10:24 +04:00
|
|
|
if header == HTTP_HEADER.USER_AGENT:
|
2012-06-28 15:55:30 +04:00
|
|
|
_ = False
|
2010-06-30 03:51:44 +04:00
|
|
|
break
|
|
|
|
|
2012-06-28 15:55:30 +04:00
|
|
|
if _:
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, _defaultHTTPUserAgent()))
|
2010-06-30 03:51:44 +04:00
|
|
|
|
2011-04-30 01:50:48 +04:00
|
|
|
else:
|
2011-04-30 11:01:21 +04:00
|
|
|
if not kb.userAgents:
|
2011-04-30 17:20:05 +04:00
|
|
|
debugMsg = "loading random HTTP User-Agent header(s) from "
|
2011-04-30 11:01:21 +04:00
|
|
|
debugMsg += "file '%s'" % paths.USER_AGENTS
|
|
|
|
logger.debug(debugMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-30 11:01:21 +04:00
|
|
|
try:
|
|
|
|
kb.userAgents = getFileItems(paths.USER_AGENTS)
|
|
|
|
except IOError:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "unable to read HTTP User-Agent header "
|
2011-04-30 11:01:21 +04:00
|
|
|
warnMsg += "file '%s'" % paths.USER_AGENTS
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, _defaultHTTPUserAgent()))
|
2011-04-30 11:01:21 +04:00
|
|
|
return
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2014-04-06 20:05:43 +04:00
|
|
|
userAgent = random.sample(kb.userAgents or [_defaultHTTPUserAgent()], 1)[0]
|
2011-04-29 23:27:23 +04:00
|
|
|
|
2011-04-30 19:29:59 +04:00
|
|
|
infoMsg = "fetched random HTTP User-Agent header from "
|
2014-08-20 15:32:32 +04:00
|
|
|
infoMsg += "file '%s': '%s'" % (paths.USER_AGENTS, userAgent)
|
2011-04-30 19:29:59 +04:00
|
|
|
logger.info(infoMsg)
|
2014-08-20 23:07:19 +04:00
|
|
|
|
2014-06-30 21:27:14 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, userAgent))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPReferer():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Set the HTTP Referer
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.referer:
|
|
|
|
debugMsg = "setting the HTTP Referer header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.referer))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-03-20 02:56:36 +03:00
|
|
|
def _setHTTPHost():
|
|
|
|
"""
|
|
|
|
Set the HTTP Host
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.host:
|
|
|
|
debugMsg = "setting the HTTP Host header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.httpHeaders.append((HTTP_HEADER.HOST, conf.host))
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPCookies():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Set the HTTP Cookie header
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.cookie:
|
|
|
|
debugMsg = "setting the HTTP Cookie header"
|
|
|
|
logger.debug(debugMsg)
|
2010-06-30 03:51:44 +04:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.COOKIE, conf.cookie))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPTimeout():
|
2008-12-04 20:40:03 +03:00
|
|
|
"""
|
|
|
|
Set the HTTP timeout
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.timeout:
|
|
|
|
debugMsg = "setting the HTTP timeout"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.timeout = float(conf.timeout)
|
|
|
|
|
|
|
|
if conf.timeout < 3.0:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "the minimum HTTP timeout is 3 seconds, sqlmap "
|
2008-12-04 20:40:03 +03:00
|
|
|
warnMsg += "will going to reset it"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.timeout = 3.0
|
|
|
|
else:
|
2008-12-19 23:48:33 +03:00
|
|
|
conf.timeout = 30.0
|
2008-12-04 20:40:03 +03:00
|
|
|
|
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _checkDependencies():
|
2011-06-14 23:38:35 +04:00
|
|
|
"""
|
|
|
|
Checks for missing dependencies.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.dependencies:
|
|
|
|
checkDependencies()
|
|
|
|
|
2015-01-13 12:33:51 +03:00
|
|
|
def _createTemporaryDirectory():
|
|
|
|
"""
|
|
|
|
Creates temporary directory for this run.
|
|
|
|
"""
|
|
|
|
|
2015-01-14 07:30:08 +03:00
|
|
|
try:
|
|
|
|
if not os.path.isdir(tempfile.gettempdir()):
|
|
|
|
os.makedirs(tempfile.gettempdir())
|
|
|
|
except IOError, ex:
|
|
|
|
errMsg = "there has been a problem while accessing "
|
2015-09-10 16:51:33 +03:00
|
|
|
errMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
|
2015-01-14 07:30:08 +03:00
|
|
|
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
2015-01-14 15:53:38 +03:00
|
|
|
errMsg += "try to set environment variable 'TEMP' to a location "
|
2015-01-14 07:30:08 +03:00
|
|
|
errMsg += "writeable by the current user"
|
|
|
|
raise SqlmapSystemException, errMsg
|
2015-01-14 15:53:38 +03:00
|
|
|
|
2015-01-28 02:52:40 +03:00
|
|
|
if "sqlmap" not in (tempfile.tempdir or ""):
|
|
|
|
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
|
|
|
|
|
|
|
|
kb.tempDir = tempfile.tempdir
|
|
|
|
|
2015-01-13 12:33:51 +03:00
|
|
|
if not os.path.isdir(tempfile.tempdir):
|
|
|
|
os.makedirs(tempfile.tempdir)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _cleanupOptions():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Cleanup configuration attributes.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "cleaning up configuration parameters"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
width = getConsoleWidth()
|
|
|
|
|
|
|
|
if conf.eta:
|
2012-12-10 20:20:04 +04:00
|
|
|
conf.progressWidth = width - 26
|
2010-05-28 19:57:43 +04:00
|
|
|
else:
|
2012-12-10 20:20:04 +04:00
|
|
|
conf.progressWidth = width - 46
|
2010-05-28 19:57:43 +04:00
|
|
|
|
2013-07-29 22:42:29 +04:00
|
|
|
for key, value in conf.items():
|
2015-12-05 02:52:58 +03:00
|
|
|
if value and any(key.endswith(_) for _ in ("Path", "File", "Dir")):
|
2015-01-21 11:26:30 +03:00
|
|
|
conf[key] = safeExpandUser(value)
|
2013-07-29 22:42:29 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.testParameter:
|
2011-03-19 19:53:14 +03:00
|
|
|
conf.testParameter = urldecode(conf.testParameter)
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.testParameter = conf.testParameter.replace(" ", "")
|
2011-08-29 17:08:25 +04:00
|
|
|
conf.testParameter = re.split(PARAMETER_SPLITTING_REGEX, conf.testParameter)
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
|
|
|
conf.testParameter = []
|
|
|
|
|
|
|
|
if conf.user:
|
|
|
|
conf.user = conf.user.replace(" ", "")
|
|
|
|
|
2011-08-29 17:08:25 +04:00
|
|
|
if conf.rParam:
|
|
|
|
conf.rParam = conf.rParam.replace(" ", "")
|
|
|
|
conf.rParam = re.split(PARAMETER_SPLITTING_REGEX, conf.rParam)
|
|
|
|
else:
|
|
|
|
conf.rParam = []
|
|
|
|
|
2014-04-06 18:48:46 +04:00
|
|
|
if conf.paramDel and '\\' in conf.paramDel:
|
|
|
|
conf.paramDel = conf.paramDel.decode("string_escape")
|
2013-05-30 14:01:13 +04:00
|
|
|
|
2011-08-29 17:29:42 +04:00
|
|
|
if conf.skip:
|
|
|
|
conf.skip = conf.skip.replace(" ", "")
|
|
|
|
conf.skip = re.split(PARAMETER_SPLITTING_REGEX, conf.skip)
|
|
|
|
else:
|
|
|
|
conf.skip = []
|
|
|
|
|
2015-08-28 16:30:28 +03:00
|
|
|
if conf.cookie:
|
|
|
|
conf.cookie = re.sub(r"[\r\n]", "", conf.cookie)
|
|
|
|
|
2008-11-09 19:57:47 +03:00
|
|
|
if conf.delay:
|
|
|
|
conf.delay = float(conf.delay)
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if conf.rFile:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.rFile = ntToPosixSlashes(normalizePath(conf.rFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.wFile:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.wFile = ntToPosixSlashes(normalizePath(conf.wFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.dFile:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.dFile = ntToPosixSlashes(normalizePath(conf.dFile))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2014-07-03 00:27:51 +04:00
|
|
|
if conf.sitemapUrl and not conf.sitemapUrl.lower().startswith("http"):
|
|
|
|
conf.sitemapUrl = "http%s://%s" % ('s' if conf.forceSSL else '', conf.sitemapUrl)
|
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if conf.msfPath:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.msfPath = ntToPosixSlashes(normalizePath(conf.msfPath))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.tmpPath:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2014-07-03 00:27:51 +04:00
|
|
|
if any((conf.googleDork, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.forms, conf.crawlDepth)):
|
2008-11-28 01:33:33 +03:00
|
|
|
conf.multipleTargets = True
|
|
|
|
|
2010-10-12 23:41:29 +04:00
|
|
|
if conf.optimize:
|
2011-07-25 15:05:49 +04:00
|
|
|
setOptimize()
|
2010-10-12 23:41:29 +04:00
|
|
|
|
2011-03-19 19:53:14 +03:00
|
|
|
if conf.data:
|
2012-11-28 14:45:33 +04:00
|
|
|
conf.data = re.sub(INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.data, re.I)
|
2012-11-28 14:41:39 +04:00
|
|
|
|
|
|
|
if conf.url:
|
2012-11-28 14:45:33 +04:00
|
|
|
conf.url = re.sub(INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.url, re.I)
|
2012-11-28 14:41:39 +04:00
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
if conf.os:
|
|
|
|
conf.os = conf.os.capitalize()
|
|
|
|
|
|
|
|
if conf.dbms:
|
|
|
|
conf.dbms = conf.dbms.capitalize()
|
|
|
|
|
2012-07-24 17:43:29 +04:00
|
|
|
if conf.testFilter:
|
2014-09-01 17:48:00 +04:00
|
|
|
conf.testFilter = conf.testFilter.strip('*+')
|
|
|
|
conf.testFilter = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testFilter)
|
2011-09-27 18:09:25 +04:00
|
|
|
|
2016-05-04 14:23:59 +03:00
|
|
|
try:
|
|
|
|
re.compile(conf.testFilter)
|
|
|
|
except re.error:
|
|
|
|
conf.testFilter = re.escape(conf.testFilter)
|
|
|
|
|
2015-10-01 12:57:33 +03:00
|
|
|
if conf.testSkip:
|
|
|
|
conf.testSkip = conf.testSkip.strip('*+')
|
|
|
|
conf.testSkip = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testSkip)
|
|
|
|
|
2016-05-04 14:23:59 +03:00
|
|
|
try:
|
|
|
|
re.compile(conf.testSkip)
|
|
|
|
except re.error:
|
|
|
|
conf.testSkip = re.escape(conf.testSkip)
|
|
|
|
|
2013-01-29 18:55:50 +04:00
|
|
|
if "timeSec" not in kb.explicitSettings:
|
2011-04-19 12:43:29 +04:00
|
|
|
if conf.tor:
|
2011-06-16 15:42:13 +04:00
|
|
|
conf.timeSec = 2 * conf.timeSec
|
2012-10-09 17:19:47 +04:00
|
|
|
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
|
2011-04-19 12:34:21 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "increasing default value for "
|
2012-07-13 17:00:39 +04:00
|
|
|
warnMsg += "option '--time-sec' to %d because " % conf.timeSec
|
2012-02-01 18:49:42 +04:00
|
|
|
warnMsg += "switch '--tor' was provided"
|
2011-04-19 12:34:21 +04:00
|
|
|
logger.warn(warnMsg)
|
2011-04-18 18:46:18 +04:00
|
|
|
else:
|
2012-10-09 17:19:47 +04:00
|
|
|
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
|
2011-04-15 12:52:53 +04:00
|
|
|
|
2013-03-04 21:05:40 +04:00
|
|
|
if conf.retries:
|
|
|
|
conf.retries = min(conf.retries, MAX_CONNECT_RETRIES)
|
|
|
|
|
2011-08-12 20:48:11 +04:00
|
|
|
if conf.code:
|
|
|
|
conf.code = int(conf.code)
|
|
|
|
|
2011-11-30 23:26:03 +04:00
|
|
|
if conf.csvDel:
|
2013-01-10 16:18:44 +04:00
|
|
|
conf.csvDel = conf.csvDel.decode("string_escape") # e.g. '\\t' -> '\t'
|
2011-11-30 23:26:03 +04:00
|
|
|
|
2013-07-11 12:11:43 +04:00
|
|
|
if conf.torPort and isinstance(conf.torPort, basestring) and conf.torPort.isdigit():
|
2011-12-23 14:57:09 +04:00
|
|
|
conf.torPort = int(conf.torPort)
|
|
|
|
|
2011-12-16 03:19:55 +04:00
|
|
|
if conf.torType:
|
|
|
|
conf.torType = conf.torType.upper()
|
|
|
|
|
2014-04-06 18:54:46 +04:00
|
|
|
if conf.outputDir:
|
2016-01-14 01:05:28 +03:00
|
|
|
paths.SQLMAP_OUTPUT_PATH = os.path.realpath(os.path.expanduser(conf.outputDir))
|
2013-01-17 15:03:02 +04:00
|
|
|
setPaths()
|
2012-07-03 02:50:23 +04:00
|
|
|
|
2012-07-31 13:32:53 +04:00
|
|
|
if conf.string:
|
2012-10-18 13:11:20 +04:00
|
|
|
try:
|
|
|
|
conf.string = conf.string.decode("unicode_escape")
|
|
|
|
except:
|
|
|
|
charset = string.whitespace.replace(" ", "")
|
|
|
|
for _ in charset:
|
|
|
|
conf.string = conf.string.replace(_.encode("string_escape"), _)
|
2012-07-31 13:32:53 +04:00
|
|
|
|
2012-10-05 12:24:09 +04:00
|
|
|
if conf.getAll:
|
|
|
|
map(lambda x: conf.__setitem__(x, True), WIZARD.ALL)
|
|
|
|
|
2012-10-22 16:13:30 +04:00
|
|
|
if conf.noCast:
|
|
|
|
for _ in DUMP_REPLACEMENTS.keys():
|
|
|
|
del DUMP_REPLACEMENTS[_]
|
|
|
|
|
2012-11-28 13:58:18 +04:00
|
|
|
if conf.dumpFormat:
|
|
|
|
conf.dumpFormat = conf.dumpFormat.upper()
|
|
|
|
|
|
|
|
if conf.torType:
|
|
|
|
conf.torType = conf.torType.upper()
|
|
|
|
|
2014-08-22 16:19:53 +04:00
|
|
|
if conf.col:
|
|
|
|
conf.col = re.sub(r"\s*,\s*", ",", conf.col)
|
|
|
|
|
|
|
|
if conf.excludeCol:
|
|
|
|
conf.excludeCol = re.sub(r"\s*,\s*", ",", conf.excludeCol)
|
|
|
|
|
|
|
|
if conf.binaryFields:
|
|
|
|
conf.binaryFields = re.sub(r"\s*,\s*", ",", conf.binaryFields)
|
|
|
|
|
2011-11-23 18:26:40 +04:00
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
threadData.reset()
|
|
|
|
|
2015-07-26 17:18:41 +03:00
|
|
|
def _dirtyPatches():
|
|
|
|
"""
|
|
|
|
Place for "dirty" Python related patches
|
|
|
|
"""
|
|
|
|
|
|
|
|
httplib._MAXLINE = 1 * 1024 * 1024 # to accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _purgeOutput():
|
2012-04-23 18:24:23 +04:00
|
|
|
"""
|
|
|
|
Safely removes (purges) output directory.
|
|
|
|
"""
|
|
|
|
|
2012-04-23 18:43:59 +04:00
|
|
|
if conf.purgeOutput:
|
2012-04-23 18:24:23 +04:00
|
|
|
purge(paths.SQLMAP_OUTPUT_PATH)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setConfAttributes():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function set some needed attributes into the configuration
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "initializing the configuration"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2012-06-05 02:30:12 +04:00
|
|
|
conf.authUsername = None
|
|
|
|
conf.authPassword = None
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.boundaries = []
|
|
|
|
conf.cj = None
|
|
|
|
conf.dbmsConnector = None
|
|
|
|
conf.dbmsHandler = None
|
2012-03-31 16:08:27 +04:00
|
|
|
conf.dnsServer = None
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.dumpPath = None
|
2011-09-26 00:36:32 +04:00
|
|
|
conf.hashDB = None
|
|
|
|
conf.hashDBFile = None
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.httpHeaders = []
|
|
|
|
conf.hostname = None
|
2012-05-25 02:07:50 +04:00
|
|
|
conf.ipv6 = False
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.multipleTargets = False
|
|
|
|
conf.outputPath = None
|
|
|
|
conf.paramDict = {}
|
|
|
|
conf.parameters = {}
|
|
|
|
conf.path = None
|
|
|
|
conf.port = None
|
2015-09-25 16:23:42 +03:00
|
|
|
conf.proxyList = None
|
2011-05-16 02:21:38 +04:00
|
|
|
conf.resultsFilename = None
|
|
|
|
conf.resultsFP = None
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.scheme = None
|
|
|
|
conf.tests = []
|
|
|
|
conf.trafficFP = None
|
|
|
|
conf.wFileType = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setKnowledgeBaseAttributes(flushAll=True):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function set some needed attributes into the knowledge base
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "initializing the knowledge base"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.absFilePaths = set()
|
2012-10-09 17:19:47 +04:00
|
|
|
kb.adjustTimeDelay = None
|
2012-12-11 17:44:43 +04:00
|
|
|
kb.alerted = False
|
2011-05-28 02:42:23 +04:00
|
|
|
kb.alwaysRefresh = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.arch = None
|
|
|
|
kb.authHeader = None
|
2011-07-08 10:02:31 +04:00
|
|
|
kb.bannerFp = AttribDict()
|
2013-12-27 01:27:04 +04:00
|
|
|
kb.binaryField = False
|
2010-05-31 12:13:08 +04:00
|
|
|
|
2013-01-10 16:18:44 +04:00
|
|
|
kb.brute = AttribDict({"tables": [], "columns": []})
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.bruteMode = False
|
2010-12-27 17:17:20 +03:00
|
|
|
|
2011-07-08 10:02:31 +04:00
|
|
|
kb.cache = AttribDict()
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.cache.content = {}
|
|
|
|
kb.cache.regex = {}
|
|
|
|
kb.cache.stdev = {}
|
2010-05-31 12:13:08 +04:00
|
|
|
|
2012-07-06 19:34:40 +04:00
|
|
|
kb.chars = AttribDict()
|
|
|
|
kb.chars.delimiter = randomStr(length=6, lowercase=True)
|
2014-11-05 12:56:30 +03:00
|
|
|
kb.chars.start = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR)
|
|
|
|
kb.chars.stop = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR)
|
2013-06-11 00:14:45 +04:00
|
|
|
kb.chars.at, kb.chars.space, kb.chars.dollar, kb.chars.hash_ = ("%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, _, KB_CHARS_BOUNDARY_CHAR) for _ in randomStr(length=4, lowercase=True))
|
2012-07-06 19:34:40 +04:00
|
|
|
|
2014-05-13 02:50:36 +04:00
|
|
|
kb.columnExistsChoice = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.commonOutputs = None
|
2011-12-21 15:50:49 +04:00
|
|
|
kb.counters = {}
|
2011-07-08 10:02:31 +04:00
|
|
|
kb.data = AttribDict()
|
2011-05-11 00:44:36 +04:00
|
|
|
kb.dataOutputFlag = False
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-01-20 02:06:15 +03:00
|
|
|
# Active back-end DBMS fingerprint
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.dbms = None
|
2013-01-10 14:54:07 +04:00
|
|
|
kb.dbmsVersion = [UNKNOWN_DBMS_VERSION]
|
2011-04-30 17:20:05 +04:00
|
|
|
|
|
|
|
kb.delayCandidates = TIME_DELAY_CANDIDATES * [0]
|
|
|
|
kb.dep = None
|
2012-04-02 18:05:30 +04:00
|
|
|
kb.dnsMode = False
|
2012-04-03 13:18:30 +04:00
|
|
|
kb.dnsTest = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.docRoot = None
|
2012-07-12 16:31:28 +04:00
|
|
|
kb.dumpTable = None
|
2016-03-23 12:33:32 +03:00
|
|
|
kb.dumpKeyboardInterrupt = False
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.dynamicMarkings = []
|
2012-08-20 13:40:49 +04:00
|
|
|
kb.dynamicParameter = False
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.endDetection = False
|
2011-06-16 15:42:13 +04:00
|
|
|
kb.explicitSettings = set()
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.extendTests = None
|
2015-08-26 16:26:16 +03:00
|
|
|
kb.errorChunkLength = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.errorIsNone = True
|
2016-05-06 14:06:59 +03:00
|
|
|
kb.falsePositives = []
|
2012-07-06 16:24:44 +04:00
|
|
|
kb.fileReadMode = False
|
2014-07-03 00:27:51 +04:00
|
|
|
kb.followSitemapRecursion = None
|
2011-09-26 01:10:45 +04:00
|
|
|
kb.forcedDbms = None
|
2013-07-31 23:15:03 +04:00
|
|
|
kb.forcePartialUnion = False
|
2015-07-26 18:02:46 +03:00
|
|
|
kb.forceWhere = None
|
2015-07-10 02:19:46 +03:00
|
|
|
kb.futileUnion = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.headersFp = {}
|
2013-01-25 15:34:57 +04:00
|
|
|
kb.heuristicDbms = None
|
2013-07-08 13:48:33 +04:00
|
|
|
kb.heuristicMode = False
|
2011-06-15 21:37:28 +04:00
|
|
|
kb.heuristicTest = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.hintValue = None
|
|
|
|
kb.htmlFp = []
|
2011-11-21 20:41:02 +04:00
|
|
|
kb.httpErrorCodes = {}
|
|
|
|
kb.inferenceMode = False
|
2012-08-22 18:06:09 +04:00
|
|
|
kb.ignoreCasted = None
|
2011-12-05 13:25:56 +04:00
|
|
|
kb.ignoreNotFound = False
|
2011-05-22 12:24:13 +04:00
|
|
|
kb.ignoreTimeout = False
|
2011-07-08 10:02:31 +04:00
|
|
|
kb.injection = InjectionDict()
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.injections = []
|
2013-05-18 23:30:21 +04:00
|
|
|
kb.laggingChecked = False
|
2011-08-05 14:55:21 +04:00
|
|
|
kb.lastParserStatus = None
|
|
|
|
|
2011-07-08 10:02:31 +04:00
|
|
|
kb.locks = AttribDict()
|
2015-11-17 01:46:10 +03:00
|
|
|
for _ in ("cache", "count", "index", "io", "limit", "log", "socket", "redirect", "request", "value"):
|
2011-12-28 20:27:17 +04:00
|
|
|
kb.locks[_] = threading.Lock()
|
2011-04-30 17:20:05 +04:00
|
|
|
|
|
|
|
kb.matchRatio = None
|
2012-04-12 13:44:54 +04:00
|
|
|
kb.maxConnectionsFlag = False
|
2012-01-11 18:28:08 +04:00
|
|
|
kb.mergeCookies = None
|
2011-06-07 13:50:00 +04:00
|
|
|
kb.multiThreadMode = False
|
2012-03-15 19:52:12 +04:00
|
|
|
kb.negativeLogic = False
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.nullConnection = None
|
2015-07-23 11:07:21 +03:00
|
|
|
kb.oldMsf = None
|
2011-08-03 13:08:16 +04:00
|
|
|
kb.orderByColumns = None
|
2012-03-16 00:17:40 +04:00
|
|
|
kb.originalCode = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.originalPage = None
|
2015-07-10 10:24:14 +03:00
|
|
|
kb.originalPageTime = None
|
2011-08-16 10:50:20 +04:00
|
|
|
kb.originalTimeDelay = None
|
2012-08-15 18:37:18 +04:00
|
|
|
kb.originalUrls = dict()
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# Back-end DBMS underlying operating system fingerprint via banner (-b)
|
2009-09-26 03:03:45 +04:00
|
|
|
# parsing
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.os = None
|
|
|
|
kb.osVersion = None
|
|
|
|
kb.osSP = None
|
|
|
|
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.pageCompress = True
|
|
|
|
kb.pageTemplate = None
|
|
|
|
kb.pageTemplates = dict()
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.pageEncoding = DEFAULT_PAGE_ENCODING
|
|
|
|
kb.pageStable = None
|
|
|
|
kb.partRun = None
|
2012-02-08 16:02:50 +04:00
|
|
|
kb.permissionFlag = False
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.postHint = None
|
|
|
|
kb.postSpaceToPlus = False
|
2015-08-18 23:48:55 +03:00
|
|
|
kb.postUrlEncode = True
|
2012-07-12 17:58:45 +04:00
|
|
|
kb.prependFlag = False
|
2011-11-22 16:18:24 +04:00
|
|
|
kb.processResponseCounter = 0
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.previousMethod = None
|
|
|
|
kb.processUserMarks = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.proxyAuthHeader = None
|
|
|
|
kb.queryCounter = 0
|
2011-12-05 02:42:19 +04:00
|
|
|
kb.redirectChoice = None
|
2011-05-30 13:46:32 +04:00
|
|
|
kb.reflectiveMechanism = True
|
2013-01-10 16:18:44 +04:00
|
|
|
kb.reflectiveCounters = {REFLECTIVE_COUNTER.MISS: 0, REFLECTIVE_COUNTER.HIT: 0}
|
2013-01-16 19:04:00 +04:00
|
|
|
kb.requestCounter = 0
|
2013-01-18 01:44:55 +04:00
|
|
|
kb.resendPostOnRedirect = None
|
2016-01-09 19:32:19 +03:00
|
|
|
kb.responseTimes = {}
|
|
|
|
kb.responseTimeMode = None
|
|
|
|
kb.responseTimePayload = None
|
2012-02-17 18:22:48 +04:00
|
|
|
kb.resumeValues = True
|
2011-07-26 00:40:31 +04:00
|
|
|
kb.safeCharEncode = False
|
2015-04-22 17:28:54 +03:00
|
|
|
kb.safeReq = AttribDict()
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.singleLogFlags = set()
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.reduceTests = None
|
2015-12-07 01:49:22 +03:00
|
|
|
kb.tlsSNI = {}
|
2012-07-12 17:24:40 +04:00
|
|
|
kb.stickyDBMS = False
|
2012-07-12 17:23:35 +04:00
|
|
|
kb.stickyLevel = None
|
2014-11-20 18:29:17 +03:00
|
|
|
kb.storeCrawlingChoice = None
|
2013-07-04 17:34:00 +04:00
|
|
|
kb.storeHashesChoice = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.suppressResumeInfo = False
|
|
|
|
kb.technique = None
|
2015-01-14 07:16:32 +03:00
|
|
|
kb.tempDir = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.testMode = False
|
2015-07-17 11:14:35 +03:00
|
|
|
kb.testOnlyCustom = False
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.testQueryCount = 0
|
2014-09-08 16:33:13 +04:00
|
|
|
kb.testType = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.threadContinue = True
|
|
|
|
kb.threadException = False
|
2014-05-13 02:50:36 +04:00
|
|
|
kb.tableExistsChoice = None
|
2012-05-26 11:00:26 +04:00
|
|
|
kb.timeValidCharsRun = 0
|
2012-02-07 14:46:55 +04:00
|
|
|
kb.uChar = NULL
|
2012-06-16 00:41:53 +04:00
|
|
|
kb.unionDuplicates = False
|
2011-04-21 18:25:04 +04:00
|
|
|
kb.xpCmdshellAvailable = False
|
2010-03-22 18:39:29 +03:00
|
|
|
|
2010-12-18 13:02:01 +03:00
|
|
|
if flushAll:
|
2011-12-22 02:09:21 +04:00
|
|
|
kb.headerPaths = {}
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.keywords = set(getFileItems(paths.SQL_KEYWORDS))
|
2012-06-05 02:30:12 +04:00
|
|
|
kb.passwordMgr = None
|
2014-08-28 02:13:27 +04:00
|
|
|
kb.skipVulnHost = None
|
2010-12-18 13:02:01 +03:00
|
|
|
kb.tamperFunctions = []
|
2012-10-30 21:38:10 +04:00
|
|
|
kb.targets = oset()
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.testedParams = set()
|
|
|
|
kb.userAgents = None
|
2011-05-24 21:15:25 +04:00
|
|
|
kb.vainRun = True
|
2012-08-22 18:50:01 +04:00
|
|
|
kb.vulnHosts = set()
|
2013-02-21 14:14:57 +04:00
|
|
|
kb.wafFunctions = []
|
2012-07-18 15:32:34 +04:00
|
|
|
kb.wordlists = None
|
2010-12-18 13:02:01 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _useWizardInterface():
|
2011-03-29 15:42:55 +04:00
|
|
|
"""
|
|
|
|
Presents simple wizard interface for beginner users
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.wizard:
|
|
|
|
return
|
|
|
|
|
|
|
|
logger.info("starting wizard interface")
|
|
|
|
|
2014-06-09 01:55:15 +04:00
|
|
|
while not conf.url:
|
|
|
|
message = "Please enter full target URL (-u): "
|
|
|
|
conf.url = readInput(message, default=None)
|
|
|
|
|
2014-11-21 13:20:54 +03:00
|
|
|
message = "%s data (--data) [Enter for None]: " % ((conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST)
|
2014-06-09 01:55:15 +04:00
|
|
|
conf.data = readInput(message, default=None)
|
|
|
|
|
|
|
|
if not (filter(lambda _: '=' in unicode(_), (conf.url, conf.data)) or '*' in conf.url):
|
2014-11-21 13:20:54 +03:00
|
|
|
warnMsg = "no GET and/or %s parameter(s) found for testing " % ((conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST)
|
2014-06-09 01:55:15 +04:00
|
|
|
warnMsg += "(e.g. GET parameter 'id' in 'http://www.site.com/vuln.php?id=1'). "
|
|
|
|
if not conf.crawlDepth and not conf.forms:
|
|
|
|
warnMsg += "Will search for forms"
|
|
|
|
conf.forms = True
|
|
|
|
logger.warn(warnMsg)
|
2011-11-02 18:33:23 +04:00
|
|
|
|
2011-04-01 18:55:39 +04:00
|
|
|
choice = None
|
|
|
|
|
|
|
|
while choice is None or choice not in ("", "1", "2", "3"):
|
|
|
|
message = "Injection difficulty (--level/--risk). Please choose:\n"
|
|
|
|
message += "[1] Normal (default)\n[2] Medium\n[3] Hard"
|
|
|
|
choice = readInput(message, default='1')
|
|
|
|
|
|
|
|
if choice == '2':
|
|
|
|
conf.risk = 2
|
|
|
|
conf.level = 3
|
|
|
|
elif choice == '3':
|
|
|
|
conf.risk = 3
|
|
|
|
conf.level = 5
|
|
|
|
else:
|
|
|
|
conf.risk = 1
|
|
|
|
conf.level = 1
|
|
|
|
|
2012-10-05 12:24:09 +04:00
|
|
|
if not conf.getAll:
|
|
|
|
choice = None
|
2011-04-01 18:55:39 +04:00
|
|
|
|
2012-10-05 12:24:09 +04:00
|
|
|
while choice is None or choice not in ("", "1", "2", "3"):
|
|
|
|
message = "Enumeration (--banner/--current-user/etc). Please choose:\n"
|
2013-05-22 23:21:43 +04:00
|
|
|
message += "[1] Basic (default)\n[2] Intermediate\n[3] All"
|
2012-10-05 12:24:09 +04:00
|
|
|
choice = readInput(message, default='1')
|
2011-04-01 18:55:39 +04:00
|
|
|
|
2012-10-05 12:24:09 +04:00
|
|
|
if choice == '2':
|
2013-05-22 23:21:43 +04:00
|
|
|
map(lambda x: conf.__setitem__(x, True), WIZARD.INTERMEDIATE)
|
2012-10-05 12:24:09 +04:00
|
|
|
elif choice == '3':
|
|
|
|
map(lambda x: conf.__setitem__(x, True), WIZARD.ALL)
|
|
|
|
else:
|
|
|
|
map(lambda x: conf.__setitem__(x, True), WIZARD.BASIC)
|
2011-03-29 15:42:55 +04:00
|
|
|
|
2011-04-01 18:55:39 +04:00
|
|
|
logger.debug("muting sqlmap.. it will do the magic for you")
|
|
|
|
conf.verbose = 0
|
|
|
|
|
2011-11-02 18:33:23 +04:00
|
|
|
conf.batch = True
|
|
|
|
conf.threads = 4
|
|
|
|
|
2011-04-01 18:55:39 +04:00
|
|
|
dataToStdout("\nsqlmap is running, please wait..\n\n")
|
2011-03-29 15:42:55 +04:00
|
|
|
|
2015-08-14 23:49:32 +03:00
|
|
|
def _saveConfig():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2015-08-14 23:49:32 +03:00
|
|
|
Saves the command line options to a sqlmap configuration INI file
|
2011-01-28 19:36:09 +03:00
|
|
|
Format.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2015-08-14 23:49:32 +03:00
|
|
|
if not conf.saveConfig:
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
2015-08-14 23:49:32 +03:00
|
|
|
debugMsg = "saving command line options to a sqlmap configuration INI file"
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
config = UnicodeRawConfigParser()
|
2008-10-15 19:38:22 +04:00
|
|
|
userOpts = {}
|
|
|
|
|
|
|
|
for family in optDict.keys():
|
|
|
|
userOpts[family] = []
|
|
|
|
|
|
|
|
for option, value in conf.items():
|
|
|
|
for family, optionData in optDict.items():
|
|
|
|
if option in optionData:
|
|
|
|
userOpts[family].append((option, value, optionData[option]))
|
|
|
|
|
|
|
|
for family, optionData in userOpts.items():
|
2009-01-04 01:59:22 +03:00
|
|
|
config.add_section(family)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
optionData.sort()
|
|
|
|
|
|
|
|
for option, value, datatype in optionData:
|
2012-06-14 17:38:53 +04:00
|
|
|
if datatype and isListLike(datatype):
|
2010-05-28 19:57:43 +04:00
|
|
|
datatype = datatype[0]
|
|
|
|
|
2015-08-13 18:21:36 +03:00
|
|
|
if option in IGNORE_SAVE_OPTIONS:
|
2015-08-14 23:49:32 +03:00
|
|
|
continue
|
2015-08-13 18:21:36 +03:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if value is None:
|
2014-04-25 11:17:10 +04:00
|
|
|
if datatype == OPTION_TYPE.BOOLEAN:
|
2008-10-15 19:38:22 +04:00
|
|
|
value = "False"
|
2014-04-25 11:17:10 +04:00
|
|
|
elif datatype in (OPTION_TYPE.INTEGER, OPTION_TYPE.FLOAT):
|
2014-04-25 11:32:57 +04:00
|
|
|
if option in defaults:
|
|
|
|
value = str(defaults[option])
|
2008-10-26 19:10:28 +03:00
|
|
|
else:
|
|
|
|
value = "0"
|
2014-04-25 11:17:10 +04:00
|
|
|
elif datatype == OPTION_TYPE.STRING:
|
2008-10-15 19:38:22 +04:00
|
|
|
value = ""
|
|
|
|
|
2010-05-25 14:09:35 +04:00
|
|
|
if isinstance(value, basestring):
|
2008-12-09 00:24:24 +03:00
|
|
|
value = value.replace("\n", "\n ")
|
|
|
|
|
2009-01-04 01:59:22 +03:00
|
|
|
config.set(family, option, value)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-08-14 23:49:32 +03:00
|
|
|
confFP = openFile(conf.saveConfig, "wb")
|
2014-12-13 02:08:18 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
config.write(confFP)
|
|
|
|
except IOError, ex:
|
|
|
|
errMsg = "something went wrong while trying "
|
2016-01-12 12:27:04 +03:00
|
|
|
errMsg += "to write to the configuration file '%s' ('%s')" % (conf.saveConfig, getSafeExString(ex))
|
2014-12-13 02:08:18 +03:00
|
|
|
raise SqlmapSystemException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-08-14 23:49:32 +03:00
|
|
|
infoMsg = "saved command line options to the configuration file '%s'" % conf.saveConfig
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2012-12-17 15:29:33 +04:00
|
|
|
def setVerbosity():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function set the verbosity of sqlmap output messages.
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.verbose is None:
|
2008-12-02 02:07:41 +03:00
|
|
|
conf.verbose = 1
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
conf.verbose = int(conf.verbose)
|
|
|
|
|
2010-09-26 18:02:13 +04:00
|
|
|
if conf.verbose == 0:
|
2011-03-12 01:02:38 +03:00
|
|
|
logger.setLevel(logging.ERROR)
|
2010-09-26 18:02:13 +04:00
|
|
|
elif conf.verbose == 1:
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.setLevel(logging.INFO)
|
2010-03-11 01:08:11 +03:00
|
|
|
elif conf.verbose > 2 and conf.eta:
|
|
|
|
conf.verbose = 2
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2008-10-15 19:38:22 +04:00
|
|
|
elif conf.verbose == 2:
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
elif conf.verbose == 3:
|
2011-12-26 16:24:39 +04:00
|
|
|
logger.setLevel(CUSTOM_LOGGING.PAYLOAD)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose == 4:
|
2011-12-26 16:24:39 +04:00
|
|
|
logger.setLevel(CUSTOM_LOGGING.TRAFFIC_OUT)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose >= 5:
|
2011-12-26 16:24:39 +04:00
|
|
|
logger.setLevel(CUSTOM_LOGGING.TRAFFIC_IN)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2016-02-16 11:56:53 +03:00
|
|
|
def _normalizeOptions(inputOptions):
|
|
|
|
"""
|
|
|
|
Sets proper option types
|
|
|
|
"""
|
|
|
|
|
|
|
|
types_ = {}
|
|
|
|
for group in optDict.keys():
|
|
|
|
types_.update(optDict[group])
|
|
|
|
|
|
|
|
for key in inputOptions:
|
|
|
|
if key in types_:
|
|
|
|
value = inputOptions[key]
|
|
|
|
if value is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
type_ = types_[key]
|
|
|
|
if type_ and isinstance(type_, tuple):
|
|
|
|
type_ = type_[0]
|
|
|
|
|
|
|
|
if type_ == OPTION_TYPE.BOOLEAN:
|
|
|
|
try:
|
|
|
|
value = bool(value)
|
|
|
|
except (TypeError, ValueError):
|
|
|
|
value = False
|
|
|
|
elif type_ == OPTION_TYPE.INTEGER:
|
|
|
|
try:
|
|
|
|
value = int(value)
|
|
|
|
except (TypeError, ValueError):
|
|
|
|
value = 0
|
|
|
|
elif type_ == OPTION_TYPE.FLOAT:
|
|
|
|
try:
|
|
|
|
value = float(value)
|
|
|
|
except (TypeError, ValueError):
|
|
|
|
value = 0.0
|
|
|
|
|
|
|
|
inputOptions[key] = value
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _mergeOptions(inputOptions, overrideOptions):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2011-06-16 15:42:13 +04:00
|
|
|
Merge command line options with configuration file and default options.
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
@param inputOptions: optparse object with command line options.
|
|
|
|
@type inputOptions: C{instance}
|
|
|
|
"""
|
|
|
|
|
2013-01-09 16:34:45 +04:00
|
|
|
if inputOptions.pickledOptions:
|
2015-08-18 23:03:42 +03:00
|
|
|
try:
|
|
|
|
inputOptions = base64unpickle(inputOptions.pickledOptions)
|
2016-02-16 11:56:53 +03:00
|
|
|
_normalizeOptions(inputOptions)
|
2015-08-18 23:03:42 +03:00
|
|
|
except Exception, ex:
|
|
|
|
errMsg = "provided invalid value '%s' for option '--pickled-options'" % inputOptions.pickledOptions
|
2015-09-10 16:51:33 +03:00
|
|
|
errMsg += " ('%s')" % ex if ex.message else ""
|
2015-08-18 23:03:42 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2013-01-09 16:34:45 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if inputOptions.configFile:
|
|
|
|
configFileParser(inputOptions.configFile)
|
|
|
|
|
2009-06-05 14:15:55 +04:00
|
|
|
if hasattr(inputOptions, "items"):
|
|
|
|
inputOptionsItems = inputOptions.items()
|
|
|
|
else:
|
|
|
|
inputOptionsItems = inputOptions.__dict__.items()
|
|
|
|
|
|
|
|
for key, value in inputOptionsItems:
|
2011-06-16 15:42:13 +04:00
|
|
|
if key not in conf or value not in (None, False) or overrideOptions:
|
|
|
|
conf[key] = value
|
|
|
|
|
|
|
|
for key, value in conf.items():
|
2012-07-17 12:36:22 +04:00
|
|
|
if value is not None:
|
2011-06-16 15:42:13 +04:00
|
|
|
kb.explicitSettings.add(key)
|
|
|
|
|
|
|
|
for key, value in defaults.items():
|
2012-12-20 20:53:43 +04:00
|
|
|
if hasattr(conf, key) and conf[key] is None:
|
2008-10-15 19:38:22 +04:00
|
|
|
conf[key] = value
|
|
|
|
|
2016-02-16 11:56:53 +03:00
|
|
|
lut = {}
|
2014-04-25 11:17:10 +04:00
|
|
|
for group in optDict.keys():
|
2016-02-16 11:56:53 +03:00
|
|
|
lut.update((_.upper(), _) for _ in optDict[group])
|
2014-04-25 11:17:10 +04:00
|
|
|
|
2016-02-16 11:56:53 +03:00
|
|
|
envOptions = {}
|
|
|
|
for key, value in os.environ.items():
|
|
|
|
if key.upper().startswith(SQLMAP_ENVIRONMENT_PREFIX):
|
|
|
|
_ = key[len(SQLMAP_ENVIRONMENT_PREFIX):].upper()
|
|
|
|
if _ in lut:
|
|
|
|
envOptions[lut[_]] = value
|
2014-04-25 11:17:10 +04:00
|
|
|
|
2016-02-16 11:56:53 +03:00
|
|
|
if envOptions:
|
|
|
|
_normalizeOptions(envOptions)
|
|
|
|
for key, value in envOptions.items():
|
2014-04-25 11:17:10 +04:00
|
|
|
conf[key] = value
|
|
|
|
|
2013-04-10 21:33:31 +04:00
|
|
|
mergedOptions.update(conf)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTrafficOutputFP():
|
2010-11-08 14:22:47 +03:00
|
|
|
if conf.trafficFile:
|
2011-07-11 12:54:39 +04:00
|
|
|
infoMsg = "setting file for logging HTTP traffic"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2011-01-08 12:30:10 +03:00
|
|
|
conf.trafficFP = openFile(conf.trafficFile, "w+")
|
2010-11-08 14:22:47 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setDNSServer():
|
2012-07-24 17:34:50 +04:00
|
|
|
if not conf.dnsName:
|
2012-03-31 16:08:27 +04:00
|
|
|
return
|
|
|
|
|
2012-04-04 16:27:24 +04:00
|
|
|
infoMsg = "setting up DNS server instance"
|
2012-03-31 16:08:27 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
isAdmin = runningAsAdmin()
|
|
|
|
|
|
|
|
if isAdmin:
|
2012-05-27 13:11:19 +04:00
|
|
|
try:
|
|
|
|
conf.dnsServer = DNSServer()
|
|
|
|
conf.dnsServer.run()
|
|
|
|
except socket.error, msg:
|
|
|
|
errMsg = "there was an error while setting up "
|
|
|
|
errMsg += "DNS server instance ('%s')" % msg
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2012-03-31 16:08:27 +04:00
|
|
|
else:
|
|
|
|
errMsg = "you need to run sqlmap as an administrator "
|
|
|
|
errMsg += "if you want to perform a DNS data exfiltration attack "
|
2012-04-04 16:27:24 +04:00
|
|
|
errMsg += "as it will need to listen on privileged UDP port 53 "
|
2012-03-31 16:08:27 +04:00
|
|
|
errMsg += "for incoming address resolution attempts"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapMissingPrivileges(errMsg)
|
2012-03-31 16:08:27 +04:00
|
|
|
|
2013-08-12 16:25:51 +04:00
|
|
|
def _setProxyList():
|
|
|
|
if not conf.proxyFile:
|
|
|
|
return
|
|
|
|
|
2014-04-06 19:23:13 +04:00
|
|
|
conf.proxyList = []
|
|
|
|
for match in re.finditer(r"(?i)((http[^:]*|socks[^:]*)://)?([\w.]+):(\d+)", readCachedFileContent(conf.proxyFile)):
|
|
|
|
_, type_, address, port = match.groups()
|
|
|
|
conf.proxyList.append("%s://%s:%s" % (type_ or "http", address, port))
|
2013-08-12 16:25:51 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTorProxySettings():
|
2011-12-16 03:19:55 +04:00
|
|
|
if not conf.tor:
|
2011-12-14 14:19:45 +04:00
|
|
|
return
|
|
|
|
|
2012-11-28 13:59:15 +04:00
|
|
|
if conf.torType == PROXY_TYPE.HTTP:
|
2012-12-06 17:14:19 +04:00
|
|
|
_setTorHttpProxySettings()
|
2011-12-16 03:19:55 +04:00
|
|
|
else:
|
2012-12-06 17:14:19 +04:00
|
|
|
_setTorSocksProxySettings()
|
2011-12-16 03:19:55 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTorHttpProxySettings():
|
2011-12-14 14:19:45 +04:00
|
|
|
infoMsg = "setting Tor HTTP proxy settings"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
found = None
|
|
|
|
|
2013-01-09 18:38:41 +04:00
|
|
|
for port in (DEFAULT_TOR_HTTP_PORTS if not conf.torPort else (conf.torPort,)):
|
2011-12-14 14:19:45 +04:00
|
|
|
try:
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
s.connect((LOCALHOST, port))
|
|
|
|
found = port
|
|
|
|
break
|
|
|
|
except socket.error:
|
|
|
|
pass
|
|
|
|
|
|
|
|
s.close()
|
|
|
|
|
|
|
|
if found:
|
|
|
|
conf.proxy = "http://%s:%d" % (LOCALHOST, found)
|
|
|
|
else:
|
|
|
|
errMsg = "can't establish connection with the Tor proxy. "
|
|
|
|
errMsg += "Please make sure that you have Vidalia, Privoxy or "
|
|
|
|
errMsg += "Polipo bundle installed for you to be able to "
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg += "successfully use switch '--tor' "
|
2011-12-14 14:19:45 +04:00
|
|
|
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2011-12-14 14:19:45 +04:00
|
|
|
|
2012-05-10 22:17:32 +04:00
|
|
|
if not conf.checkTor:
|
2012-05-10 22:30:25 +04:00
|
|
|
warnMsg = "use switch '--check-tor' at "
|
2013-03-26 17:27:51 +04:00
|
|
|
warnMsg += "your own convenience when accessing "
|
|
|
|
warnMsg += "Tor anonymizing network because of "
|
2012-05-10 22:30:25 +04:00
|
|
|
warnMsg += "known issues with default settings of various 'bundles' "
|
2012-07-13 17:02:11 +04:00
|
|
|
warnMsg += "(e.g. Vidalia)"
|
2012-05-10 22:17:32 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTorSocksProxySettings():
|
2011-11-24 01:17:08 +04:00
|
|
|
infoMsg = "setting Tor SOCKS proxy settings"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2011-12-04 20:37:18 +04:00
|
|
|
# Has to be SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29)
|
2012-11-28 13:59:15 +04:00
|
|
|
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, conf.torPort or DEFAULT_TOR_SOCKS_PORT)
|
2011-11-24 01:17:08 +04:00
|
|
|
socks.wrapmodule(urllib2)
|
|
|
|
|
2015-03-24 17:25:16 +03:00
|
|
|
def _checkWebSocket():
|
|
|
|
if conf.url and (conf.url.startswith("ws:/") or conf.url.startswith("wss:/")):
|
|
|
|
try:
|
|
|
|
from websocket import ABNF
|
|
|
|
except ImportError:
|
2015-05-11 12:01:21 +03:00
|
|
|
errMsg = "sqlmap requires third-party module 'websocket-client' "
|
|
|
|
errMsg += "in order to use WebSocket funcionality"
|
2015-03-24 17:25:16 +03:00
|
|
|
raise SqlmapMissingDependence(errMsg)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _checkTor():
|
2011-12-14 14:19:45 +04:00
|
|
|
if not conf.checkTor:
|
|
|
|
return
|
2011-10-25 22:07:33 +04:00
|
|
|
|
2011-12-14 14:19:45 +04:00
|
|
|
infoMsg = "checking Tor connection"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2015-09-21 12:25:59 +03:00
|
|
|
try:
|
|
|
|
page, _, _ = Request.getPage(url="https://check.torproject.org/", raise404=False)
|
|
|
|
except SqlmapConnectionException:
|
|
|
|
page = None
|
|
|
|
|
2011-12-14 14:19:45 +04:00
|
|
|
if not page or 'Congratulations' not in page:
|
2016-05-22 22:44:17 +03:00
|
|
|
errMsg = "it appears that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2011-12-14 14:19:45 +04:00
|
|
|
else:
|
|
|
|
infoMsg = "Tor is properly being used"
|
|
|
|
logger.info(infoMsg)
|
2011-10-25 21:37:43 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _basicOptionValidation():
|
2010-04-26 15:23:12 +04:00
|
|
|
if conf.limitStart is not None and not (isinstance(conf.limitStart, int) and conf.limitStart > 0):
|
2012-10-16 12:28:59 +04:00
|
|
|
errMsg = "value for option '--start' (limitStart) must be an integer value greater than zero (>0)"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-04-26 15:23:12 +04:00
|
|
|
|
|
|
|
if conf.limitStop is not None and not (isinstance(conf.limitStop, int) and conf.limitStop > 0):
|
2012-10-16 12:28:59 +04:00
|
|
|
errMsg = "value for option '--stop' (limitStop) must be an integer value greater than zero (>0)"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-10-16 12:28:59 +04:00
|
|
|
|
2014-03-25 12:28:12 +04:00
|
|
|
if conf.level is not None and not (isinstance(conf.level, int) and conf.level >= 1 and conf.level <= 5):
|
|
|
|
errMsg = "value for option '--level' must be an integer value from range [1, 5]"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-10-16 12:28:59 +04:00
|
|
|
|
2014-03-25 12:28:12 +04:00
|
|
|
if conf.risk is not None and not (isinstance(conf.risk, int) and conf.risk >= 1 and conf.risk <= 3):
|
|
|
|
errMsg = "value for option '--risk' must be an integer value from range [1, 3]"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-04-26 15:23:12 +04:00
|
|
|
|
2014-04-06 20:09:54 +04:00
|
|
|
if isinstance(conf.limitStart, int) and conf.limitStart > 0 and \
|
|
|
|
isinstance(conf.limitStop, int) and conf.limitStop < conf.limitStart:
|
2012-10-16 12:28:59 +04:00
|
|
|
errMsg = "value for option '--start' (limitStart) must be smaller or equal than value for --stop (limitStop) option"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2014-04-06 20:09:54 +04:00
|
|
|
if isinstance(conf.firstChar, int) and conf.firstChar > 0 and \
|
|
|
|
isinstance(conf.lastChar, int) and conf.lastChar < conf.firstChar:
|
2012-10-16 12:28:59 +04:00
|
|
|
errMsg = "value for option '--first' (firstChar) must be smaller than or equal to value for --last (lastChar) option"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-10-31 17:42:13 +03:00
|
|
|
|
2010-10-17 01:52:16 +04:00
|
|
|
if conf.textOnly and conf.nullConnection:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--text-only' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-11-07 11:58:24 +03:00
|
|
|
|
2013-04-16 16:23:27 +04:00
|
|
|
if conf.direct and conf.url:
|
|
|
|
errMsg = "option '-d' is incompatible with option '-u' ('--url')"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2015-09-21 15:57:44 +03:00
|
|
|
if conf.identifyWaf and conf.skipWaf:
|
|
|
|
errMsg = "switch '--identify-waf' is incompatible with switch '--skip-waf'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2011-06-11 12:33:36 +04:00
|
|
|
if conf.titles and conf.nullConnection:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--titles' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-06-11 12:33:36 +04:00
|
|
|
|
2014-07-20 01:23:55 +04:00
|
|
|
if conf.dumpTable and conf.search:
|
|
|
|
errMsg = "switch '--dump' is incompatible with switch '--search'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2010-10-17 01:52:16 +04:00
|
|
|
if conf.data and conf.nullConnection:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "option '--data' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-10-15 15:05:50 +04:00
|
|
|
|
2011-06-22 18:39:31 +04:00
|
|
|
if conf.string and conf.nullConnection:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "option '--string' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-06-22 18:39:31 +04:00
|
|
|
|
2012-07-26 14:06:02 +04:00
|
|
|
if conf.notString and conf.nullConnection:
|
|
|
|
errMsg = "option '--not-string' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-07-26 14:06:02 +04:00
|
|
|
|
2013-03-19 13:52:37 +04:00
|
|
|
if conf.noCast and conf.hexConvert:
|
|
|
|
errMsg = "switch '--no-cast' is incompatible with switch '--hex'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2014-06-23 14:24:08 +04:00
|
|
|
if conf.dumpAll and conf.search:
|
|
|
|
errMsg = "switch '--dump-all' is incompatible with switch '--search'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2012-07-26 14:06:02 +04:00
|
|
|
if conf.string and conf.notString:
|
|
|
|
errMsg = "option '--string' is incompatible with switch '--not-string'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-07-26 14:06:02 +04:00
|
|
|
|
2011-06-22 18:39:31 +04:00
|
|
|
if conf.regexp and conf.nullConnection:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "option '--regexp' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-06-22 18:39:31 +04:00
|
|
|
|
2014-12-30 17:14:47 +03:00
|
|
|
if conf.regexp:
|
|
|
|
try:
|
|
|
|
re.compile(conf.regexp)
|
|
|
|
except re.error, ex:
|
2016-01-12 12:27:04 +03:00
|
|
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, getSafeExString(ex))
|
2014-12-30 17:14:47 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2015-04-06 23:07:22 +03:00
|
|
|
if conf.crawlExclude:
|
|
|
|
try:
|
|
|
|
re.compile(conf.crawlExclude)
|
|
|
|
except re.error, ex:
|
2016-01-12 12:27:04 +03:00
|
|
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, getSafeExString(ex))
|
2015-04-06 23:07:22 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2011-12-22 03:04:36 +04:00
|
|
|
if conf.dumpTable and conf.dumpAll:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--dump' is incompatible with switch '--dump-all'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-12-22 03:04:36 +04:00
|
|
|
|
2011-07-08 17:12:53 +04:00
|
|
|
if conf.predictOutput and (conf.threads > 1 or conf.optimize):
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--predict-output' is incompatible with option '--threads' and switch '-o'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-10-25 16:33:49 +04:00
|
|
|
|
2014-12-21 07:15:42 +03:00
|
|
|
if conf.threads > MAX_NUMBER_OF_THREADS and not conf.get("skipThreadCheck"):
|
2013-01-16 19:07:12 +04:00
|
|
|
errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-02-02 12:24:37 +03:00
|
|
|
|
2014-08-01 16:19:32 +04:00
|
|
|
if conf.forms and not any((conf.url, conf.googleDork, conf.bulkFile, conf.sitemapUrl)):
|
|
|
|
errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-12-04 19:14:14 +04:00
|
|
|
|
2015-04-06 23:07:22 +03:00
|
|
|
if conf.crawlExclude and not conf.crawlDepth:
|
|
|
|
errMsg = "option '--crawl-exclude' requires usage of switch '--crawl'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2015-04-22 16:32:14 +03:00
|
|
|
if conf.safePost and not conf.safeUrl:
|
|
|
|
errMsg = "option '--safe-post' requires usage of option '--safe-url'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2015-04-22 17:28:54 +03:00
|
|
|
if conf.safeFreq and not any((conf.safeUrl, conf.safeReqFile)):
|
|
|
|
errMsg = "option '--safe-freq' requires usage of option '--safe-url' or '--safe-req'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
|
|
|
if conf.safeReqFile and any((conf.safeUrl, conf.safePost)):
|
|
|
|
errMsg = "option '--safe-req' is incompatible with option '--safe-url' and option '--safe-post'"
|
2015-04-22 16:32:14 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2014-10-23 13:54:29 +04:00
|
|
|
if conf.csrfUrl and not conf.csrfToken:
|
|
|
|
errMsg = "option '--csrf-url' requires usage of option '--csrf-token'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2014-10-23 16:23:01 +04:00
|
|
|
if conf.csrfToken and conf.threads > 1:
|
2014-10-23 13:54:29 +04:00
|
|
|
errMsg = "option '--csrf-url' is incompatible with option '--threads'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2013-09-24 23:44:59 +04:00
|
|
|
if conf.requestFile and conf.url and conf.url != DUMMY_URL:
|
2013-04-16 16:23:27 +04:00
|
|
|
errMsg = "option '-r' is incompatible with option '-u' ('--url')"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-19 12:34:21 +04:00
|
|
|
|
2013-04-16 16:31:10 +04:00
|
|
|
if conf.direct and conf.proxy:
|
|
|
|
errMsg = "option '-d' is incompatible with option '--proxy'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
|
|
|
if conf.direct and conf.tor:
|
|
|
|
errMsg = "option '-d' is incompatible with switch '--tor'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2013-04-21 23:42:23 +04:00
|
|
|
if not conf.tech:
|
|
|
|
errMsg = "option '--technique' can't be empty"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2011-04-19 12:34:21 +04:00
|
|
|
if conf.tor and conf.ignoreProxy:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--tor' is incompatible with switch '--ignore-proxy'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-29 23:27:23 +04:00
|
|
|
|
2011-05-21 15:46:57 +04:00
|
|
|
if conf.tor and conf.proxy:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--tor' is incompatible with option '--proxy'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-08-29 17:29:42 +04:00
|
|
|
|
2015-09-25 16:23:42 +03:00
|
|
|
if conf.proxy and conf.proxyFile:
|
|
|
|
errMsg = "switch '--proxy' is incompatible with option '--proxy-file'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2012-11-29 18:44:14 +04:00
|
|
|
if conf.checkTor and not any((conf.tor, conf.proxy)):
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--check-tor' requires usage of switch '--tor' (or option '--proxy' with HTTP proxy address using Tor)"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-12-14 14:19:45 +04:00
|
|
|
|
2015-01-24 01:00:28 +03:00
|
|
|
if conf.torPort is not None and not (isinstance(conf.torPort, int) and conf.torPort >= 0 and conf.torPort <= 65535):
|
|
|
|
errMsg = "value for option '--tor-port' must be in range 0-65535"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-12-23 14:57:09 +04:00
|
|
|
|
2012-11-28 13:59:15 +04:00
|
|
|
if conf.torType not in getPublicTypeMembers(PROXY_TYPE, True):
|
|
|
|
errMsg = "option '--tor-type' accepts one of following values: %s" % ", ".join(getPublicTypeMembers(PROXY_TYPE, True))
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-10-25 21:37:43 +04:00
|
|
|
|
2012-11-28 13:58:18 +04:00
|
|
|
if conf.dumpFormat not in getPublicTypeMembers(DUMP_FORMAT, True):
|
|
|
|
errMsg = "option '--dump-format' accepts one of following values: %s" % ", ".join(getPublicTypeMembers(DUMP_FORMAT, True))
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-11-28 13:58:18 +04:00
|
|
|
|
2011-08-29 17:29:42 +04:00
|
|
|
if conf.skip and conf.testParameter:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "option '--skip' is incompatible with option '-p'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-05-21 15:46:57 +04:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
if conf.mobile and conf.agent:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--mobile' is incompatible with option '--user-agent'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-11-15 14:34:57 +03:00
|
|
|
|
2011-04-04 13:19:43 +04:00
|
|
|
if conf.proxy and conf.ignoreProxy:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "option '--proxy' is incompatible with switch '--ignore-proxy'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-04 13:19:43 +04:00
|
|
|
|
2010-12-14 15:37:21 +03:00
|
|
|
if conf.timeSec < 1:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "value for option '--time-sec' must be a positive integer"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-12-14 15:37:21 +03:00
|
|
|
|
2011-10-10 01:21:41 +04:00
|
|
|
if conf.uChar and not re.match(UNION_CHAR_REGEX, conf.uChar):
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "value for option '--union-char' must be an alpha-numeric value (e.g. 1)"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-10-10 01:21:41 +04:00
|
|
|
|
2011-06-18 14:51:14 +04:00
|
|
|
if isinstance(conf.uCols, basestring):
|
|
|
|
if not conf.uCols.isdigit() and ("-" not in conf.uCols or len(conf.uCols.split("-")) != 2):
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "value for option '--union-cols' must be a range with hyphon "
|
2011-06-18 14:51:14 +04:00
|
|
|
errMsg += "(e.g. 1-10) or integer value (e.g. 5)"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-06 12:39:24 +04:00
|
|
|
|
2013-04-17 13:12:15 +04:00
|
|
|
if conf.dbmsCred and ':' not in conf.dbmsCred:
|
|
|
|
errMsg = "value for option '--dbms-cred' must be in "
|
|
|
|
errMsg += "format <username>:<password> (e.g. \"root:pass\")"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2011-05-18 02:55:22 +04:00
|
|
|
if conf.charset:
|
2012-09-25 12:17:25 +04:00
|
|
|
_ = checkCharEncoding(conf.charset, False)
|
|
|
|
if _ is None:
|
2011-06-08 20:08:20 +04:00
|
|
|
errMsg = "unknown charset '%s'. Please visit " % conf.charset
|
|
|
|
errMsg += "'%s' to get the full list of " % CODECS_LIST_PAGE
|
|
|
|
errMsg += "supported charsets"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-09-25 12:17:25 +04:00
|
|
|
else:
|
|
|
|
conf.charset = _
|
2011-05-18 02:55:22 +04:00
|
|
|
|
2012-07-24 17:34:50 +04:00
|
|
|
if conf.loadCookies:
|
|
|
|
if not os.path.exists(conf.loadCookies):
|
|
|
|
errMsg = "cookies file '%s' does not exist" % conf.loadCookies
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2012-03-08 14:03:59 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _resolveCrossReferences():
|
2011-06-07 13:50:00 +04:00
|
|
|
lib.core.threads.readInput = readInput
|
|
|
|
lib.core.common.getPageTemplate = getPageTemplate
|
2012-07-31 13:03:44 +04:00
|
|
|
lib.core.convert.singleTimeWarnMessage = singleTimeWarnMessage
|
2015-11-08 18:37:46 +03:00
|
|
|
lib.request.connect.setHTTPHandlers = _setHTTPHandlers
|
|
|
|
lib.utils.search.setHTTPHandlers = _setHTTPHandlers
|
2013-10-17 18:54:53 +04:00
|
|
|
lib.controller.checks.setVerbosity = setVerbosity
|
2011-06-07 13:50:00 +04:00
|
|
|
|
2013-01-29 19:33:16 +04:00
|
|
|
def initOptions(inputOptions=AttribDict(), overrideOptions=False):
|
2012-12-06 17:14:19 +04:00
|
|
|
_setConfAttributes()
|
|
|
|
_setKnowledgeBaseAttributes()
|
|
|
|
_mergeOptions(inputOptions, overrideOptions)
|
2013-01-29 19:33:16 +04:00
|
|
|
|
|
|
|
def init():
|
|
|
|
"""
|
|
|
|
Set attributes into both configuration and knowledge base singletons
|
|
|
|
based upon command line and configuration file options.
|
|
|
|
"""
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
_useWizardInterface()
|
2012-12-17 15:29:33 +04:00
|
|
|
setVerbosity()
|
2015-08-14 23:49:32 +03:00
|
|
|
_saveConfig()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setRequestFromFile()
|
|
|
|
_cleanupOptions()
|
2015-07-26 17:18:41 +03:00
|
|
|
_dirtyPatches()
|
2012-12-06 17:14:19 +04:00
|
|
|
_purgeOutput()
|
|
|
|
_checkDependencies()
|
2015-01-13 12:33:51 +03:00
|
|
|
_createTemporaryDirectory()
|
2012-12-06 17:14:19 +04:00
|
|
|
_basicOptionValidation()
|
2013-08-12 16:25:51 +04:00
|
|
|
_setProxyList()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setTorProxySettings()
|
|
|
|
_setDNSServer()
|
|
|
|
_adjustLoggingFormatter()
|
|
|
|
_setMultipleTargets()
|
|
|
|
_setTamperingFunctions()
|
2013-02-21 14:14:57 +04:00
|
|
|
_setWafFunctions()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setTrafficOutputFP()
|
|
|
|
_resolveCrossReferences()
|
2015-03-24 17:25:16 +03:00
|
|
|
_checkWebSocket()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2009-06-16 19:12:02 +04:00
|
|
|
parseTargetUrl()
|
2010-03-27 02:23:25 +03:00
|
|
|
parseTargetDirect()
|
|
|
|
|
2014-07-03 00:27:51 +04:00
|
|
|
if any((conf.url, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.requestFile, conf.googleDork, conf.liveTest)):
|
2012-12-06 17:14:19 +04:00
|
|
|
_setHTTPTimeout()
|
|
|
|
_setHTTPExtraHeaders()
|
|
|
|
_setHTTPCookies()
|
|
|
|
_setHTTPReferer()
|
2015-03-20 02:56:36 +03:00
|
|
|
_setHTTPHost()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setHTTPUserAgent()
|
|
|
|
_setHTTPAuthentication()
|
2015-11-08 18:37:46 +03:00
|
|
|
_setHTTPHandlers()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setDNSCache()
|
2015-11-17 01:46:10 +03:00
|
|
|
_setSocketPreConnect()
|
2015-04-22 17:28:54 +03:00
|
|
|
_setSafeVisit()
|
2015-11-08 18:37:46 +03:00
|
|
|
_doSearch()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setBulkMultipleTargets()
|
2014-07-03 00:27:51 +04:00
|
|
|
_setSitemapTargets()
|
2012-12-06 17:14:19 +04:00
|
|
|
_checkTor()
|
|
|
|
_setCrawler()
|
|
|
|
_findPageForms()
|
|
|
|
_setDBMS()
|
|
|
|
_setTechnique()
|
|
|
|
|
|
|
|
_setThreads()
|
|
|
|
_setOS()
|
|
|
|
_setWriteFile()
|
|
|
|
_setMetasploit()
|
|
|
|
_setDBMSAuthentication()
|
2015-02-15 19:31:35 +03:00
|
|
|
loadBoundaries()
|
2010-11-28 21:10:54 +03:00
|
|
|
loadPayloads()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setPrefixSuffix()
|
2008-10-15 19:38:22 +04:00
|
|
|
update()
|
2012-12-06 17:14:19 +04:00
|
|
|
_loadQueries()
|