2019-05-08 13:47:52 +03:00
|
|
|
#!/usr/bin/env python
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
"""
|
2023-01-03 01:24:59 +03:00
|
|
|
Copyright (c) 2006-2023 sqlmap developers (https://sqlmap.org/)
|
2017-10-11 15:50:46 +03:00
|
|
|
See the file 'LICENSE' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2019-06-04 13:15:39 +03:00
|
|
|
from __future__ import division
|
|
|
|
|
2019-11-26 00:59:22 +03:00
|
|
|
import codecs
|
2019-05-09 11:12:27 +03:00
|
|
|
import functools
|
2013-02-21 14:14:57 +04:00
|
|
|
import glob
|
2010-10-17 01:33:15 +04:00
|
|
|
import inspect
|
2008-10-15 19:38:22 +04:00
|
|
|
import logging
|
|
|
|
import os
|
2014-04-06 20:05:43 +04:00
|
|
|
import random
|
2008-10-15 19:38:22 +04:00
|
|
|
import re
|
2008-12-04 20:40:03 +03:00
|
|
|
import socket
|
2010-10-15 14:36:29 +04:00
|
|
|
import sys
|
2015-01-13 12:33:51 +03:00
|
|
|
import tempfile
|
2010-11-02 10:32:08 +03:00
|
|
|
import threading
|
2013-01-09 19:10:26 +04:00
|
|
|
import time
|
2020-12-08 15:40:46 +03:00
|
|
|
import traceback
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2010-11-15 15:07:13 +03:00
|
|
|
from lib.controller.checks import checkConnection
|
2011-01-28 19:36:09 +03:00
|
|
|
from lib.core.common import Backend
|
2012-07-12 18:30:35 +04:00
|
|
|
from lib.core.common import boldifyMessage
|
2013-09-12 01:17:18 +04:00
|
|
|
from lib.core.common import checkFile
|
2011-04-01 18:55:39 +04:00
|
|
|
from lib.core.common import dataToStdout
|
2019-02-04 17:49:13 +03:00
|
|
|
from lib.core.common import decodeStringEscape
|
2019-05-06 02:40:58 +03:00
|
|
|
from lib.core.common import fetchRandomAgent
|
2019-03-29 04:28:16 +03:00
|
|
|
from lib.core.common import filterNone
|
2016-10-10 15:19:44 +03:00
|
|
|
from lib.core.common import findLocalPort
|
2011-10-29 12:32:24 +04:00
|
|
|
from lib.core.common import findPageForms
|
2010-04-16 17:40:02 +04:00
|
|
|
from lib.core.common import getConsoleWidth
|
2010-10-14 15:06:28 +04:00
|
|
|
from lib.core.common import getFileItems
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.common import getFileType
|
2019-06-04 15:44:06 +03:00
|
|
|
from lib.core.common import getPublicTypeMembers
|
|
|
|
from lib.core.common import getSafeExString
|
2019-02-05 15:42:44 +03:00
|
|
|
from lib.core.common import intersect
|
2010-02-04 17:50:54 +03:00
|
|
|
from lib.core.common import normalizePath
|
2010-02-04 17:37:00 +03:00
|
|
|
from lib.core.common import ntToPosixSlashes
|
2011-01-08 12:30:10 +03:00
|
|
|
from lib.core.common import openFile
|
2018-06-19 17:08:38 +03:00
|
|
|
from lib.core.common import parseRequestFile
|
2010-03-27 02:23:25 +03:00
|
|
|
from lib.core.common import parseTargetDirect
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import paths
|
2011-01-13 20:36:54 +03:00
|
|
|
from lib.core.common import randomStr
|
2014-04-06 19:23:13 +04:00
|
|
|
from lib.core.common import readCachedFileContent
|
2010-11-04 13:29:40 +03:00
|
|
|
from lib.core.common import readInput
|
2012-03-08 14:19:34 +04:00
|
|
|
from lib.core.common import resetCookieJar
|
2010-10-28 00:39:50 +04:00
|
|
|
from lib.core.common import runningAsAdmin
|
2015-01-21 11:26:30 +03:00
|
|
|
from lib.core.common import safeExpandUser
|
2019-03-13 18:40:22 +03:00
|
|
|
from lib.core.common import safeFilepathEncode
|
2017-04-10 15:50:17 +03:00
|
|
|
from lib.core.common import saveConfig
|
2018-07-31 03:18:33 +03:00
|
|
|
from lib.core.common import setColor
|
2011-07-25 15:05:49 +04:00
|
|
|
from lib.core.common import setOptimize
|
2013-01-17 15:03:02 +04:00
|
|
|
from lib.core.common import setPaths
|
2012-07-31 13:03:44 +04:00
|
|
|
from lib.core.common import singleTimeWarnMessage
|
|
|
|
from lib.core.common import urldecode
|
2019-05-09 11:12:27 +03:00
|
|
|
from lib.core.compat import cmp
|
2019-05-03 00:51:54 +03:00
|
|
|
from lib.core.compat import round
|
2019-03-28 18:04:38 +03:00
|
|
|
from lib.core.compat import xrange
|
2019-05-06 01:54:21 +03:00
|
|
|
from lib.core.convert import getUnicode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
|
|
|
from lib.core.data import logger
|
2013-04-10 21:33:31 +04:00
|
|
|
from lib.core.data import mergedOptions
|
2010-10-21 17:13:12 +04:00
|
|
|
from lib.core.data import queries
|
2011-07-08 10:02:31 +04:00
|
|
|
from lib.core.datatype import AttribDict
|
|
|
|
from lib.core.datatype import InjectionDict
|
2019-03-27 17:48:51 +03:00
|
|
|
from lib.core.datatype import OrderedSet
|
2011-06-16 15:42:13 +04:00
|
|
|
from lib.core.defaults import defaults
|
2012-08-21 13:19:15 +04:00
|
|
|
from lib.core.dicts import DBMS_DICT
|
2012-10-22 16:13:30 +04:00
|
|
|
from lib.core.dicts import DUMP_REPLACEMENTS
|
2012-10-09 17:19:47 +04:00
|
|
|
from lib.core.enums import ADJUST_TIME_DELAY
|
2013-03-13 00:16:44 +04:00
|
|
|
from lib.core.enums import AUTH_TYPE
|
2011-12-26 16:24:39 +04:00
|
|
|
from lib.core.enums import CUSTOM_LOGGING
|
2012-11-28 13:58:18 +04:00
|
|
|
from lib.core.enums import DUMP_FORMAT
|
2021-03-22 13:03:57 +03:00
|
|
|
from lib.core.enums import FORK
|
2013-03-20 14:10:24 +04:00
|
|
|
from lib.core.enums import HTTP_HEADER
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
2019-03-04 17:24:12 +03:00
|
|
|
from lib.core.enums import MKSTEMP_PREFIX
|
2011-04-29 23:27:23 +04:00
|
|
|
from lib.core.enums import MOBILES
|
2014-04-25 11:17:10 +04:00
|
|
|
from lib.core.enums import OPTION_TYPE
|
2010-12-15 14:30:29 +03:00
|
|
|
from lib.core.enums import PAYLOAD
|
2010-11-08 12:20:02 +03:00
|
|
|
from lib.core.enums import PRIORITY
|
2012-11-28 13:59:15 +04:00
|
|
|
from lib.core.enums import PROXY_TYPE
|
2011-05-30 13:46:32 +04:00
|
|
|
from lib.core.enums import REFLECTIVE_COUNTER
|
2012-10-05 12:24:09 +04:00
|
|
|
from lib.core.enums import WIZARD
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapConnectionException
|
2018-12-04 01:18:52 +03:00
|
|
|
from lib.core.exception import SqlmapDataException
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapFilePathException
|
|
|
|
from lib.core.exception import SqlmapGenericException
|
2014-11-10 15:41:53 +03:00
|
|
|
from lib.core.exception import SqlmapInstallationException
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapMissingDependence
|
|
|
|
from lib.core.exception import SqlmapMissingMandatoryOptionException
|
|
|
|
from lib.core.exception import SqlmapMissingPrivileges
|
|
|
|
from lib.core.exception import SqlmapSilentQuitException
|
|
|
|
from lib.core.exception import SqlmapSyntaxException
|
2014-11-24 12:13:56 +03:00
|
|
|
from lib.core.exception import SqlmapSystemException
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapUnsupportedDBMSException
|
|
|
|
from lib.core.exception import SqlmapUserQuitException
|
2019-11-19 13:56:01 +03:00
|
|
|
from lib.core.exception import SqlmapValueException
|
2012-07-10 05:54:37 +04:00
|
|
|
from lib.core.log import FORMATTER
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.optiondict import optDict
|
2011-05-18 03:03:31 +04:00
|
|
|
from lib.core.settings import CODECS_LIST_PAGE
|
2012-11-28 14:41:39 +04:00
|
|
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
2014-08-30 23:34:23 +04:00
|
|
|
from lib.core.settings import DBMS_ALIASES
|
2019-01-06 04:29:09 +03:00
|
|
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
2011-01-20 14:01:01 +03:00
|
|
|
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
2011-12-14 14:19:45 +04:00
|
|
|
from lib.core.settings import DEFAULT_TOR_HTTP_PORTS
|
2016-10-10 15:19:44 +03:00
|
|
|
from lib.core.settings import DEFAULT_TOR_SOCKS_PORTS
|
2018-11-29 02:09:05 +03:00
|
|
|
from lib.core.settings import DEFAULT_USER_AGENT
|
2013-09-24 23:44:59 +04:00
|
|
|
from lib.core.settings import DUMMY_URL
|
2019-10-15 17:08:58 +03:00
|
|
|
from lib.core.settings import IGNORE_CODE_WILDCARD
|
2009-06-11 19:01:48 +04:00
|
|
|
from lib.core.settings import IS_WIN
|
2013-06-11 00:14:45 +04:00
|
|
|
from lib.core.settings import KB_CHARS_BOUNDARY_CHAR
|
2014-11-05 12:56:30 +03:00
|
|
|
from lib.core.settings import KB_CHARS_LOW_FREQUENCY_ALPHABET
|
2012-10-30 21:38:10 +04:00
|
|
|
from lib.core.settings import LOCALHOST
|
2013-03-04 21:05:40 +04:00
|
|
|
from lib.core.settings import MAX_CONNECT_RETRIES
|
2012-10-30 21:38:10 +04:00
|
|
|
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
2012-02-07 14:46:55 +04:00
|
|
|
from lib.core.settings import NULL
|
2012-10-30 21:38:10 +04:00
|
|
|
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
2016-07-15 01:33:33 +03:00
|
|
|
from lib.core.settings import PRECONNECT_CANDIDATE_TIMEOUT
|
2020-01-08 12:53:08 +03:00
|
|
|
from lib.core.settings import PROXY_ENVIRONMENT_VARIABLES
|
2015-11-17 10:52:24 +03:00
|
|
|
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
2014-04-25 11:17:10 +04:00
|
|
|
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.settings import SUPPORTED_DBMS
|
2009-04-22 15:48:07 +04:00
|
|
|
from lib.core.settings import SUPPORTED_OS
|
2011-01-16 20:52:42 +03:00
|
|
|
from lib.core.settings import TIME_DELAY_CANDIDATES
|
2011-10-10 01:21:41 +04:00
|
|
|
from lib.core.settings import UNION_CHAR_REGEX
|
2011-01-21 00:57:54 +03:00
|
|
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
2012-10-30 21:38:10 +04:00
|
|
|
from lib.core.settings import URI_INJECTABLE_REGEX
|
2011-11-23 18:26:40 +04:00
|
|
|
from lib.core.threads import getCurrentThreadData
|
2016-05-16 16:37:49 +03:00
|
|
|
from lib.core.threads import setDaemon
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.update import update
|
|
|
|
from lib.parse.configfile import configFileParser
|
2015-02-15 19:31:35 +03:00
|
|
|
from lib.parse.payloads import loadBoundaries
|
2010-11-28 21:10:54 +03:00
|
|
|
from lib.parse.payloads import loadPayloads
|
2012-09-25 12:17:25 +04:00
|
|
|
from lib.request.basic import checkCharEncoding
|
2010-12-26 12:33:04 +03:00
|
|
|
from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler
|
2019-03-19 16:07:39 +03:00
|
|
|
from lib.request.chunkedhandler import ChunkedHandler
|
2019-06-04 15:44:06 +03:00
|
|
|
from lib.request.connect import Connect as Request
|
|
|
|
from lib.request.dns import DNSServer
|
2012-06-04 23:46:28 +04:00
|
|
|
from lib.request.httpshandler import HTTPSHandler
|
2013-09-12 01:17:18 +04:00
|
|
|
from lib.request.pkihandler import HTTPSPKIAuthHandler
|
2010-11-08 15:26:13 +03:00
|
|
|
from lib.request.rangehandler import HTTPRangeHandler
|
2010-03-15 17:24:43 +03:00
|
|
|
from lib.request.redirecthandler import SmartRedirectHandler
|
2013-01-09 18:22:21 +04:00
|
|
|
from lib.utils.crawler import crawl
|
2011-06-13 23:00:27 +04:00
|
|
|
from lib.utils.deps import checkDependencies
|
2019-06-04 15:44:06 +03:00
|
|
|
from lib.utils.har import HTTPCollectorFactory
|
2014-04-07 22:04:07 +04:00
|
|
|
from lib.utils.purge import purge
|
2019-06-04 15:44:06 +03:00
|
|
|
from lib.utils.search import search
|
2019-03-28 15:53:54 +03:00
|
|
|
from thirdparty import six
|
2012-07-14 19:01:04 +04:00
|
|
|
from thirdparty.keepalive import keepalive
|
2016-09-02 15:14:17 +03:00
|
|
|
from thirdparty.multipart import multipartpost
|
2021-01-15 11:31:57 +03:00
|
|
|
from thirdparty.six.moves import collections_abc as _collections
|
2019-03-27 15:33:46 +03:00
|
|
|
from thirdparty.six.moves import http_client as _http_client
|
|
|
|
from thirdparty.six.moves import http_cookiejar as _http_cookiejar
|
|
|
|
from thirdparty.six.moves import urllib as _urllib
|
2012-07-14 19:01:04 +04:00
|
|
|
from thirdparty.socks import socks
|
2011-06-22 18:33:52 +04:00
|
|
|
from xml.etree.ElementTree import ElementTree
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2019-03-27 15:33:46 +03:00
|
|
|
authHandler = _urllib.request.BaseHandler()
|
2019-03-19 16:07:39 +03:00
|
|
|
chunkedHandler = ChunkedHandler()
|
2012-06-04 23:46:28 +04:00
|
|
|
httpsHandler = HTTPSHandler()
|
2010-06-01 16:21:10 +04:00
|
|
|
keepAliveHandler = keepalive.HTTPHandler()
|
2019-03-27 15:33:46 +03:00
|
|
|
proxyHandler = _urllib.request.ProxyHandler()
|
2010-03-15 17:24:43 +03:00
|
|
|
redirectHandler = SmartRedirectHandler()
|
2010-11-08 15:26:13 +03:00
|
|
|
rangeHandler = HTTPRangeHandler()
|
2016-09-02 15:14:17 +03:00
|
|
|
multipartPostHandler = multipartpost.MultipartPostHandler()
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2016-10-04 11:39:28 +03:00
|
|
|
# Reference: https://mail.python.org/pipermail/python-list/2009-November/558615.html
|
|
|
|
try:
|
|
|
|
WindowsError
|
|
|
|
except NameError:
|
|
|
|
WindowsError = None
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _loadQueries():
|
2010-10-21 17:13:12 +04:00
|
|
|
"""
|
|
|
|
Loads queries from 'xml/queries.xml' file.
|
|
|
|
"""
|
2010-10-22 18:23:14 +04:00
|
|
|
|
2011-06-22 18:33:52 +04:00
|
|
|
def iterate(node, retVal=None):
|
|
|
|
class DictObject(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.__dict__ = {}
|
2013-01-10 18:02:28 +04:00
|
|
|
|
2011-06-22 18:33:52 +04:00
|
|
|
def __contains__(self, name):
|
|
|
|
return name in self.__dict__
|
|
|
|
|
|
|
|
if retVal is None:
|
|
|
|
retVal = DictObject()
|
|
|
|
|
2011-06-22 19:36:59 +04:00
|
|
|
for child in node.findall("*"):
|
2011-06-22 18:33:52 +04:00
|
|
|
instance = DictObject()
|
2011-06-22 19:28:49 +04:00
|
|
|
retVal.__dict__[child.tag] = instance
|
|
|
|
if child.attrib:
|
|
|
|
instance.__dict__.update(child.attrib)
|
2011-06-22 18:33:52 +04:00
|
|
|
else:
|
2011-06-22 19:28:49 +04:00
|
|
|
iterate(child, instance)
|
2011-06-22 18:33:52 +04:00
|
|
|
|
|
|
|
return retVal
|
|
|
|
|
|
|
|
tree = ElementTree()
|
2014-11-10 15:41:53 +03:00
|
|
|
try:
|
|
|
|
tree.parse(paths.QUERIES_XML)
|
2019-01-22 02:40:48 +03:00
|
|
|
except Exception as ex:
|
2016-05-22 22:44:17 +03:00
|
|
|
errMsg = "something appears to be wrong with "
|
2016-01-12 12:27:04 +03:00
|
|
|
errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, getSafeExString(ex))
|
2014-11-10 15:41:53 +03:00
|
|
|
errMsg += "sure that you haven't made any changes to it"
|
2018-03-13 13:13:38 +03:00
|
|
|
raise SqlmapInstallationException(errMsg)
|
2011-06-22 18:33:52 +04:00
|
|
|
|
2011-06-22 19:36:59 +04:00
|
|
|
for node in tree.findall("*"):
|
2011-06-22 18:33:52 +04:00
|
|
|
queries[node.attrib['value']] = iterate(node)
|
2010-10-21 17:13:12 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setMultipleTargets():
|
2008-11-20 20:56:09 +03:00
|
|
|
"""
|
|
|
|
Define a configuration parameter if we are running in multiple target
|
|
|
|
mode.
|
|
|
|
"""
|
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
initialTargetsCount = len(kb.targets)
|
2018-06-19 17:08:38 +03:00
|
|
|
seen = set()
|
2008-11-20 20:56:09 +03:00
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
if not conf.logFile:
|
2008-11-20 20:56:09 +03:00
|
|
|
return
|
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
debugMsg = "parsing targets list from '%s'" % conf.logFile
|
2008-11-28 01:33:33 +03:00
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
if not os.path.exists(conf.logFile):
|
2008-11-28 01:33:33 +03:00
|
|
|
errMsg = "the specified list of targets does not exist"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2008-11-20 20:56:09 +03:00
|
|
|
|
2019-03-15 17:36:13 +03:00
|
|
|
if checkFile(conf.logFile, False):
|
2018-06-19 17:08:38 +03:00
|
|
|
for target in parseRequestFile(conf.logFile):
|
2019-01-06 04:29:09 +03:00
|
|
|
url, _, data, _, _ = target
|
|
|
|
key = re.sub(r"(\w+=)[^%s ]*" % (conf.paramDel or DEFAULT_GET_POST_DELIMITER), r"\g<1>", "%s %s" % (url, data))
|
|
|
|
if key not in seen:
|
2018-06-19 17:08:38 +03:00
|
|
|
kb.targets.add(target)
|
2019-01-06 04:29:09 +03:00
|
|
|
seen.add(key)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2011-05-11 00:48:34 +04:00
|
|
|
elif os.path.isdir(conf.logFile):
|
|
|
|
files = os.listdir(conf.logFile)
|
2008-11-20 20:56:09 +03:00
|
|
|
files.sort()
|
|
|
|
|
|
|
|
for reqFile in files:
|
2017-10-31 13:38:09 +03:00
|
|
|
if not re.search(r"([\d]+)\-request", reqFile):
|
2008-11-20 20:56:09 +03:00
|
|
|
continue
|
|
|
|
|
2018-06-19 17:08:38 +03:00
|
|
|
for target in parseRequestFile(os.path.join(conf.logFile, reqFile)):
|
2019-01-06 04:29:09 +03:00
|
|
|
url, _, data, _, _ = target
|
|
|
|
key = re.sub(r"(\w+=)[^%s ]*" % (conf.paramDel or DEFAULT_GET_POST_DELIMITER), r"\g<1>", "%s %s" % (url, data))
|
|
|
|
if key not in seen:
|
2018-06-19 17:08:38 +03:00
|
|
|
kb.targets.add(target)
|
2019-01-06 04:29:09 +03:00
|
|
|
seen.add(key)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
2008-11-20 20:56:09 +03:00
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "the specified list of targets is not a file "
|
2008-11-20 20:56:09 +03:00
|
|
|
errMsg += "nor a directory"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2008-11-20 20:56:09 +03:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
updatedTargetsCount = len(kb.targets)
|
2008-11-22 04:57:22 +03:00
|
|
|
|
|
|
|
if updatedTargetsCount > initialTargetsCount:
|
2011-04-30 17:20:05 +04:00
|
|
|
infoMsg = "sqlmap parsed %d " % (updatedTargetsCount - initialTargetsCount)
|
2013-11-08 12:23:38 +04:00
|
|
|
infoMsg += "(parameter unique) requests from the "
|
|
|
|
infoMsg += "targets list ready to be tested"
|
2008-11-22 04:57:22 +03:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _adjustLoggingFormatter():
|
2012-02-24 14:48:19 +04:00
|
|
|
"""
|
|
|
|
Solves problem of line deletition caused by overlapping logging messages
|
|
|
|
and retrieved data info in inference mode
|
|
|
|
"""
|
|
|
|
|
2012-02-24 14:53:28 +04:00
|
|
|
if hasattr(FORMATTER, '_format'):
|
|
|
|
return
|
|
|
|
|
2012-02-24 14:48:19 +04:00
|
|
|
def format(record):
|
2015-01-22 11:17:45 +03:00
|
|
|
message = FORMATTER._format(record)
|
|
|
|
message = boldifyMessage(message)
|
2015-01-28 02:54:39 +03:00
|
|
|
if kb.get("prependFlag"):
|
2015-01-22 11:17:45 +03:00
|
|
|
message = "\n%s" % message
|
2012-07-12 17:58:45 +04:00
|
|
|
kb.prependFlag = False
|
2015-01-22 11:17:45 +03:00
|
|
|
return message
|
2012-02-24 14:53:28 +04:00
|
|
|
|
2012-02-24 14:48:19 +04:00
|
|
|
FORMATTER._format = FORMATTER.format
|
|
|
|
FORMATTER.format = format
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setRequestFromFile():
|
2010-03-16 18:21:42 +03:00
|
|
|
"""
|
|
|
|
This function checks if the way to make a HTTP request is through supplied
|
|
|
|
textual file, parses it and saves the information into the knowledge base.
|
|
|
|
"""
|
|
|
|
|
2018-06-19 17:39:10 +03:00
|
|
|
if conf.requestFile:
|
2019-04-18 12:07:58 +03:00
|
|
|
for requestFile in re.split(PARAMETER_SPLITTING_REGEX, conf.requestFile):
|
|
|
|
requestFile = safeExpandUser(requestFile)
|
2019-07-31 20:53:20 +03:00
|
|
|
url = None
|
2019-04-18 12:07:58 +03:00
|
|
|
seen = set()
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2019-04-18 12:07:58 +03:00
|
|
|
if not checkFile(requestFile, False):
|
|
|
|
errMsg = "specified HTTP request file '%s' " % requestFile
|
|
|
|
errMsg += "does not exist"
|
|
|
|
raise SqlmapFilePathException(errMsg)
|
2010-03-16 18:21:42 +03:00
|
|
|
|
2019-04-18 12:07:58 +03:00
|
|
|
infoMsg = "parsing HTTP request from '%s'" % requestFile
|
|
|
|
logger.info(infoMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2019-04-18 12:07:58 +03:00
|
|
|
for target in parseRequestFile(requestFile):
|
|
|
|
url = target[0]
|
|
|
|
if url not in seen:
|
|
|
|
kb.targets.add(target)
|
|
|
|
if len(kb.targets) > 1:
|
|
|
|
conf.multipleTargets = True
|
|
|
|
seen.add(url)
|
2010-03-16 18:21:42 +03:00
|
|
|
|
2019-07-31 20:53:20 +03:00
|
|
|
if url is None:
|
|
|
|
errMsg = "specified file '%s' " % requestFile
|
2019-10-09 20:26:12 +03:00
|
|
|
errMsg += "does not contain a usable HTTP request (with parameters)"
|
2019-07-31 20:53:20 +03:00
|
|
|
raise SqlmapDataException(errMsg)
|
|
|
|
|
2018-06-19 17:39:10 +03:00
|
|
|
if conf.secondReq:
|
|
|
|
conf.secondReq = safeExpandUser(conf.secondReq)
|
|
|
|
|
2019-03-15 17:36:13 +03:00
|
|
|
if not checkFile(conf.secondReq, False):
|
2018-06-21 00:21:55 +03:00
|
|
|
errMsg = "specified second-order HTTP request file '%s' " % conf.secondReq
|
2018-06-19 17:39:10 +03:00
|
|
|
errMsg += "does not exist"
|
|
|
|
raise SqlmapFilePathException(errMsg)
|
|
|
|
|
|
|
|
infoMsg = "parsing second-order HTTP request from '%s'" % conf.secondReq
|
|
|
|
logger.info(infoMsg)
|
2017-04-11 10:47:27 +03:00
|
|
|
|
2020-01-08 12:37:59 +03:00
|
|
|
try:
|
|
|
|
target = next(parseRequestFile(conf.secondReq, False))
|
|
|
|
kb.secondReq = target
|
|
|
|
except StopIteration:
|
|
|
|
errMsg = "specified second-order HTTP request file '%s' " % conf.secondReq
|
|
|
|
errMsg += "does not contain a valid HTTP request"
|
|
|
|
raise SqlmapDataException(errMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setCrawler():
|
2011-06-24 09:40:03 +04:00
|
|
|
if not conf.crawlDepth:
|
2011-06-20 15:32:30 +04:00
|
|
|
return
|
|
|
|
|
2019-11-05 00:43:28 +03:00
|
|
|
if not conf.bulkFile:
|
2019-11-15 12:02:51 +03:00
|
|
|
if conf.url:
|
|
|
|
crawl(conf.url)
|
|
|
|
elif conf.requestFile and kb.targets:
|
2020-01-28 11:42:00 +03:00
|
|
|
target = next(iter(kb.targets))
|
2019-11-15 12:02:51 +03:00
|
|
|
crawl(target[0], target[2], target[3])
|
2011-06-20 15:32:30 +04:00
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
def _doSearch():
|
2008-11-28 01:33:33 +03:00
|
|
|
"""
|
2015-11-08 18:37:46 +03:00
|
|
|
This function performs search dorking, parses results
|
|
|
|
and saves the testable hosts into the knowledge base.
|
2008-11-28 01:33:33 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.googleDork:
|
|
|
|
return
|
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
kb.data.onlyGETs = None
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
def retrieve():
|
2015-11-08 18:37:46 +03:00
|
|
|
links = search(conf.googleDork)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
if not links:
|
2011-06-08 03:24:17 +04:00
|
|
|
errMsg = "unable to find results for your "
|
2015-11-08 18:37:46 +03:00
|
|
|
errMsg += "search dork expression"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
for link in links:
|
|
|
|
link = urldecode(link)
|
2020-05-02 14:59:06 +03:00
|
|
|
if re.search(r"(.*?)\?(.+)", link) or conf.forms:
|
2014-10-22 16:54:49 +04:00
|
|
|
kb.targets.add((link, conf.method, conf.data, conf.cookie, None))
|
2012-10-30 21:38:10 +04:00
|
|
|
elif re.search(URI_INJECTABLE_REGEX, link, re.I):
|
2014-08-01 16:19:32 +04:00
|
|
|
if kb.data.onlyGETs is None and conf.data is None and not conf.googleDork:
|
2012-10-30 21:38:10 +04:00
|
|
|
message = "do you want to scan only results containing GET parameters? [Y/n] "
|
2017-04-18 16:48:05 +03:00
|
|
|
kb.data.onlyGETs = readInput(message, default='Y', boolean=True)
|
2014-08-01 16:19:32 +04:00
|
|
|
if not kb.data.onlyGETs or conf.googleDork:
|
2014-10-22 16:54:49 +04:00
|
|
|
kb.targets.add((link, conf.method, conf.data, conf.cookie, None))
|
2012-10-30 21:38:10 +04:00
|
|
|
|
|
|
|
return links
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2011-06-08 03:24:17 +04:00
|
|
|
while True:
|
2012-10-30 21:38:10 +04:00
|
|
|
links = retrieve()
|
2011-06-08 20:08:20 +04:00
|
|
|
|
2012-10-30 21:38:10 +04:00
|
|
|
if kb.targets:
|
2019-11-07 02:03:06 +03:00
|
|
|
infoMsg = "found %d results for your " % len(links)
|
2020-05-02 14:59:06 +03:00
|
|
|
infoMsg += "search dork expression"
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2020-05-02 14:59:06 +03:00
|
|
|
if not conf.forms:
|
|
|
|
infoMsg += ", "
|
|
|
|
|
|
|
|
if len(links) == len(kb.targets):
|
|
|
|
infoMsg += "all "
|
|
|
|
else:
|
|
|
|
infoMsg += "%d " % len(kb.targets)
|
|
|
|
|
|
|
|
infoMsg += "of them are testable targets"
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2011-06-08 03:24:17 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
break
|
|
|
|
|
|
|
|
else:
|
2019-11-07 02:03:06 +03:00
|
|
|
message = "found %d results " % len(links)
|
2015-11-08 18:37:46 +03:00
|
|
|
message += "for your search dork expression, but none of them "
|
2011-06-08 03:24:17 +04:00
|
|
|
message += "have GET parameters to test for SQL injection. "
|
2011-06-08 20:08:20 +04:00
|
|
|
message += "Do you want to skip to the next result page? [Y/n]"
|
2011-06-08 03:24:17 +04:00
|
|
|
|
2017-04-18 16:48:05 +03:00
|
|
|
if not readInput(message, default='Y', boolean=True):
|
2012-12-06 17:14:19 +04:00
|
|
|
raise SqlmapSilentQuitException
|
2011-06-08 03:24:17 +04:00
|
|
|
else:
|
|
|
|
conf.googlePage += 1
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2020-10-15 13:11:21 +03:00
|
|
|
def _setStdinPipeTargets():
|
2022-11-21 02:20:05 +03:00
|
|
|
if conf.url:
|
|
|
|
return
|
|
|
|
|
2021-09-29 17:41:58 +03:00
|
|
|
if isinstance(conf.stdinPipe, _collections.Iterable):
|
2020-10-15 13:11:21 +03:00
|
|
|
infoMsg = "using 'STDIN' for parsing targets list"
|
|
|
|
logger.info(infoMsg)
|
2011-05-11 12:46:40 +04:00
|
|
|
|
2020-10-15 00:04:01 +03:00
|
|
|
class _(object):
|
|
|
|
def __init__(self):
|
2020-10-15 13:11:21 +03:00
|
|
|
self.__rest = OrderedSet()
|
2020-10-15 00:04:01 +03:00
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __next__(self):
|
|
|
|
return self.next()
|
|
|
|
|
|
|
|
def next(self):
|
2020-12-31 14:00:13 +03:00
|
|
|
try:
|
|
|
|
line = next(conf.stdinPipe)
|
2022-04-29 18:32:16 +03:00
|
|
|
except (IOError, OSError, TypeError):
|
2020-12-31 14:00:13 +03:00
|
|
|
line = None
|
|
|
|
|
2020-10-14 12:34:52 +03:00
|
|
|
if line:
|
2020-10-15 13:11:21 +03:00
|
|
|
match = re.search(r"\b(https?://[^\s'\"]+|[\w.]+\.\w{2,3}[/\w+]*\?[^\s'\"]+)", line, re.I)
|
2020-10-14 12:34:52 +03:00
|
|
|
if match:
|
2020-10-15 00:04:01 +03:00
|
|
|
return (match.group(0), conf.method, conf.data, conf.cookie, None)
|
|
|
|
elif self.__rest:
|
|
|
|
return self.__rest.pop()
|
2020-10-15 13:11:21 +03:00
|
|
|
|
|
|
|
raise StopIteration()
|
2020-10-15 00:04:01 +03:00
|
|
|
|
|
|
|
def add(self, elem):
|
|
|
|
self.__rest.add(elem)
|
|
|
|
|
2020-10-14 12:34:52 +03:00
|
|
|
kb.targets = _()
|
2011-05-11 12:46:40 +04:00
|
|
|
|
2020-10-15 13:11:21 +03:00
|
|
|
def _setBulkMultipleTargets():
|
|
|
|
if not conf.bulkFile:
|
|
|
|
return
|
2011-05-11 12:46:40 +04:00
|
|
|
|
2020-10-15 13:11:21 +03:00
|
|
|
conf.bulkFile = safeExpandUser(conf.bulkFile)
|
2011-05-11 12:46:40 +04:00
|
|
|
|
2020-10-15 13:11:21 +03:00
|
|
|
infoMsg = "parsing multiple targets list from '%s'" % conf.bulkFile
|
|
|
|
logger.info(infoMsg)
|
2020-03-05 00:43:50 +03:00
|
|
|
|
2020-10-15 13:11:21 +03:00
|
|
|
if not checkFile(conf.bulkFile, False):
|
|
|
|
errMsg = "the specified bulk file "
|
|
|
|
errMsg += "does not exist"
|
|
|
|
raise SqlmapFilePathException(errMsg)
|
2011-06-08 20:08:20 +04:00
|
|
|
|
2020-10-15 13:11:21 +03:00
|
|
|
found = False
|
|
|
|
for line in getFileItems(conf.bulkFile):
|
|
|
|
if conf.scope and not re.search(conf.scope, line, re.I):
|
|
|
|
continue
|
|
|
|
|
2020-10-29 14:33:12 +03:00
|
|
|
if re.match(r"[^ ]+\?(.+)", line, re.I) or kb.customInjectionMark in line or conf.data:
|
2020-10-15 13:11:21 +03:00
|
|
|
found = True
|
|
|
|
kb.targets.add((line.strip(), conf.method, conf.data, conf.cookie, None))
|
|
|
|
|
|
|
|
if not found and not conf.forms and not conf.crawlDepth:
|
|
|
|
warnMsg = "no usable links found (with GET parameters)"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2014-07-03 00:27:51 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _findPageForms():
|
2011-10-29 13:32:20 +04:00
|
|
|
if not conf.forms or conf.crawlDepth:
|
2010-11-15 15:07:13 +03:00
|
|
|
return
|
|
|
|
|
2013-01-09 18:22:21 +04:00
|
|
|
if conf.url and not checkConnection():
|
2010-11-15 15:07:13 +03:00
|
|
|
return
|
|
|
|
|
2019-07-16 14:37:41 +03:00
|
|
|
found = False
|
2010-11-15 14:34:57 +03:00
|
|
|
infoMsg = "searching for forms"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2019-11-05 00:43:28 +03:00
|
|
|
if not any((conf.bulkFile, conf.googleDork)):
|
2019-10-09 20:41:33 +03:00
|
|
|
page, _, _ = Request.queryPage(content=True, ignoreSecondOrder=True)
|
2019-07-16 14:37:41 +03:00
|
|
|
if findPageForms(page, conf.url, True, True):
|
|
|
|
found = True
|
2013-01-09 18:22:21 +04:00
|
|
|
else:
|
2014-07-03 00:27:51 +04:00
|
|
|
if conf.bulkFile:
|
|
|
|
targets = getFileItems(conf.bulkFile)
|
2014-08-01 16:19:32 +04:00
|
|
|
elif conf.googleDork:
|
|
|
|
targets = [_[0] for _ in kb.targets]
|
|
|
|
kb.targets.clear()
|
2019-09-22 21:14:43 +03:00
|
|
|
else:
|
|
|
|
targets = []
|
|
|
|
|
2013-01-09 19:10:26 +04:00
|
|
|
for i in xrange(len(targets)):
|
2013-01-09 18:58:13 +04:00
|
|
|
try:
|
2019-10-31 22:40:37 +03:00
|
|
|
target = targets[i].strip()
|
|
|
|
|
2019-10-31 22:49:38 +03:00
|
|
|
if not re.search(r"(?i)\Ahttp[s]*://", target):
|
2019-10-31 22:40:37 +03:00
|
|
|
target = "http://%s" % target
|
|
|
|
|
2019-05-08 18:11:58 +03:00
|
|
|
page, _, _ = Request.getPage(url=target.strip(), cookie=conf.cookie, crawling=True, raise404=False)
|
2019-07-16 14:37:41 +03:00
|
|
|
if findPageForms(page, target, False, True):
|
|
|
|
found = True
|
2013-01-09 19:10:26 +04:00
|
|
|
|
|
|
|
if conf.verbose in (1, 2):
|
|
|
|
status = '%d/%d links visited (%d%%)' % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
|
|
|
|
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
|
2014-08-01 16:19:32 +04:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
break
|
2019-01-22 02:40:48 +03:00
|
|
|
except Exception as ex:
|
2016-01-08 17:33:14 +03:00
|
|
|
errMsg = "problem occurred while searching for forms at '%s' ('%s')" % (target, getSafeExString(ex))
|
2013-01-09 18:58:13 +04:00
|
|
|
logger.error(errMsg)
|
2010-11-15 14:34:57 +03:00
|
|
|
|
2019-07-16 14:37:41 +03:00
|
|
|
if not found:
|
|
|
|
warnMsg = "no forms found"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2019-07-16 14:37:41 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setDBMSAuthentication():
|
2012-07-02 03:22:34 +04:00
|
|
|
"""
|
|
|
|
Check and set the DBMS authentication credentials to run statements as
|
|
|
|
another user, not the session user
|
|
|
|
"""
|
|
|
|
|
2012-07-24 17:34:50 +04:00
|
|
|
if not conf.dbmsCred:
|
2012-07-02 03:22:34 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "setting the DBMS authentication credentials"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2017-10-31 13:38:09 +03:00
|
|
|
match = re.search(r"^(.+?):(.*?)$", conf.dbmsCred)
|
2012-07-02 03:22:34 +04:00
|
|
|
|
2012-07-06 19:28:01 +04:00
|
|
|
if not match:
|
2012-07-02 03:22:34 +04:00
|
|
|
errMsg = "DBMS authentication credentials value must be in format "
|
|
|
|
errMsg += "username:password"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-07-02 03:22:34 +04:00
|
|
|
|
2012-07-06 19:28:01 +04:00
|
|
|
conf.dbmsUsername = match.group(1)
|
|
|
|
conf.dbmsPassword = match.group(2)
|
2012-07-02 03:22:34 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setMetasploit():
|
2009-04-22 15:48:07 +04:00
|
|
|
if not conf.osPwn and not conf.osSmb and not conf.osBof:
|
|
|
|
return
|
|
|
|
|
2009-06-11 19:01:48 +04:00
|
|
|
debugMsg = "setting the takeover out-of-band functionality"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
msfEnvPathExists = False
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if IS_WIN:
|
2013-12-29 19:16:50 +04:00
|
|
|
try:
|
2018-03-13 15:45:42 +03:00
|
|
|
__import__("win32file")
|
2013-12-29 19:16:50 +04:00
|
|
|
except ImportError:
|
|
|
|
errMsg = "sqlmap requires third-party module 'pywin32' "
|
|
|
|
errMsg += "in order to use Metasploit functionalities on "
|
|
|
|
errMsg += "Windows. You can download it from "
|
2019-07-08 14:48:13 +03:00
|
|
|
errMsg += "'https://github.com/mhammond/pywin32'"
|
2013-12-29 19:16:50 +04:00
|
|
|
raise SqlmapMissingDependence(errMsg)
|
|
|
|
|
2012-07-27 18:29:33 +04:00
|
|
|
if not conf.msfPath:
|
2019-07-15 14:08:22 +03:00
|
|
|
for candidate in os.environ.get("PATH", "").split(';'):
|
|
|
|
if all(_ in candidate for _ in ("metasploit", "bin")):
|
|
|
|
conf.msfPath = os.path.dirname(candidate.rstrip('\\'))
|
|
|
|
break
|
2009-06-11 19:01:48 +04:00
|
|
|
|
2009-04-22 15:48:07 +04:00
|
|
|
if conf.osSmb:
|
2010-10-28 00:39:50 +04:00
|
|
|
isAdmin = runningAsAdmin()
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2012-03-31 16:08:27 +04:00
|
|
|
if not isAdmin:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you need to run sqlmap as an administrator "
|
2010-10-28 00:39:50 +04:00
|
|
|
errMsg += "if you want to perform a SMB relay attack because "
|
|
|
|
errMsg += "it will need to listen on a user-specified SMB "
|
|
|
|
errMsg += "TCP port for incoming connection attempts"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapMissingPrivileges(errMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.msfPath:
|
2012-07-27 18:29:33 +04:00
|
|
|
for path in (conf.msfPath, os.path.join(conf.msfPath, "bin")):
|
2019-02-04 18:05:16 +03:00
|
|
|
if any(os.path.exists(normalizePath(os.path.join(path, "%s%s" % (_, ".bat" if IS_WIN else "")))) for _ in ("msfcli", "msfconsole")):
|
2012-07-06 19:34:40 +04:00
|
|
|
msfEnvPathExists = True
|
2019-02-04 18:05:16 +03:00
|
|
|
if all(os.path.exists(normalizePath(os.path.join(path, "%s%s" % (_, ".bat" if IS_WIN else "")))) for _ in ("msfvenom",)):
|
2015-07-23 11:07:21 +03:00
|
|
|
kb.oldMsf = False
|
2019-02-04 18:05:16 +03:00
|
|
|
elif all(os.path.exists(normalizePath(os.path.join(path, "%s%s" % (_, ".bat" if IS_WIN else "")))) for _ in ("msfencode", "msfpayload")):
|
2015-07-23 11:07:21 +03:00
|
|
|
kb.oldMsf = True
|
2015-06-29 11:05:16 +03:00
|
|
|
else:
|
|
|
|
msfEnvPathExists = False
|
2015-10-07 10:43:25 +03:00
|
|
|
|
2011-04-09 02:42:07 +04:00
|
|
|
conf.msfPath = path
|
|
|
|
break
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2012-07-06 19:34:40 +04:00
|
|
|
if msfEnvPathExists:
|
2011-09-12 21:26:22 +04:00
|
|
|
debugMsg = "provided Metasploit Framework path "
|
2009-04-22 15:48:07 +04:00
|
|
|
debugMsg += "'%s' is valid" % conf.msfPath
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
else:
|
2011-09-12 21:26:22 +04:00
|
|
|
warnMsg = "the provided Metasploit Framework path "
|
2009-04-22 15:48:07 +04:00
|
|
|
warnMsg += "'%s' is not valid. The cause could " % conf.msfPath
|
|
|
|
warnMsg += "be that the path does not exists or that one "
|
|
|
|
warnMsg += "or more of the needed Metasploit executables "
|
|
|
|
warnMsg += "within msfcli, msfconsole, msfencode and "
|
|
|
|
warnMsg += "msfpayload do not exist"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
else:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "you did not provide the local path where Metasploit "
|
2011-09-12 21:26:22 +04:00
|
|
|
warnMsg += "Framework is installed"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not msfEnvPathExists:
|
2011-09-12 21:26:22 +04:00
|
|
|
warnMsg = "sqlmap is going to look for Metasploit Framework "
|
2013-01-03 01:11:59 +04:00
|
|
|
warnMsg += "installation inside the environment path(s)"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2012-07-06 19:28:01 +04:00
|
|
|
envPaths = os.environ.get("PATH", "").split(";" if IS_WIN else ":")
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
for envPath in envPaths:
|
2011-04-30 17:20:05 +04:00
|
|
|
envPath = envPath.replace(";", "")
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2019-02-04 18:05:16 +03:00
|
|
|
if any(os.path.exists(normalizePath(os.path.join(envPath, "%s%s" % (_, ".bat" if IS_WIN else "")))) for _ in ("msfcli", "msfconsole")):
|
2009-04-22 15:48:07 +04:00
|
|
|
msfEnvPathExists = True
|
2019-02-04 18:05:16 +03:00
|
|
|
if all(os.path.exists(normalizePath(os.path.join(envPath, "%s%s" % (_, ".bat" if IS_WIN else "")))) for _ in ("msfvenom",)):
|
2015-07-23 11:07:21 +03:00
|
|
|
kb.oldMsf = False
|
2019-02-04 18:05:16 +03:00
|
|
|
elif all(os.path.exists(normalizePath(os.path.join(envPath, "%s%s" % (_, ".bat" if IS_WIN else "")))) for _ in ("msfencode", "msfpayload")):
|
2015-07-23 11:07:21 +03:00
|
|
|
kb.oldMsf = True
|
2015-06-29 11:05:16 +03:00
|
|
|
else:
|
|
|
|
msfEnvPathExists = False
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2015-06-29 11:05:16 +03:00
|
|
|
if msfEnvPathExists:
|
|
|
|
infoMsg = "Metasploit Framework has been found "
|
|
|
|
infoMsg += "installed in the '%s' path" % envPath
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
conf.msfPath = envPath
|
|
|
|
|
|
|
|
break
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if not msfEnvPathExists:
|
2011-09-12 21:26:22 +04:00
|
|
|
errMsg = "unable to locate Metasploit Framework installation. "
|
2017-12-12 15:39:58 +03:00
|
|
|
errMsg += "You can get it at 'https://www.metasploit.com/download/'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setWriteFile():
|
2018-08-28 15:31:20 +03:00
|
|
|
if not conf.fileWrite:
|
2009-04-22 15:48:07 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "setting the write file functionality"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2018-08-28 15:31:20 +03:00
|
|
|
if not os.path.exists(conf.fileWrite):
|
|
|
|
errMsg = "the provided local file '%s' does not exist" % conf.fileWrite
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2018-08-28 15:31:20 +03:00
|
|
|
if not conf.fileDest:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you did not provide the back-end DBMS absolute path "
|
2018-08-28 15:31:20 +03:00
|
|
|
errMsg += "where you want to write the local file '%s'" % conf.fileWrite
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapMissingMandatoryOptionException(errMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2018-08-28 15:31:20 +03:00
|
|
|
conf.fileWriteType = getFileType(conf.fileWrite)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setOS():
|
2009-04-22 15:48:07 +04:00
|
|
|
"""
|
|
|
|
Force the back-end DBMS operating system option.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.os:
|
|
|
|
return
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
if conf.os.lower() not in SUPPORTED_OS:
|
|
|
|
errMsg = "you provided an unsupported back-end DBMS operating "
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg += "system. The supported DBMS operating systems for OS "
|
2011-04-23 20:25:09 +04:00
|
|
|
errMsg += "and file system access are %s. " % ', '.join([o.capitalize() for o in SUPPORTED_OS])
|
2009-04-22 15:48:07 +04:00
|
|
|
errMsg += "If you do not know the back-end DBMS underlying OS, "
|
|
|
|
errMsg += "do not provide it and sqlmap will fingerprint it for "
|
|
|
|
errMsg += "you."
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapUnsupportedDBMSException(errMsg)
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
debugMsg = "forcing back-end DBMS operating system to user defined "
|
|
|
|
debugMsg += "value '%s'" % conf.os
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
Backend.setOs(conf.os)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTechnique():
|
2011-04-07 14:27:22 +04:00
|
|
|
validTechniques = sorted(getPublicTypeMembers(PAYLOAD.TECHNIQUE), key=lambda x: x[1])
|
2013-01-11 14:13:55 +04:00
|
|
|
validLetters = [_[0][0].upper() for _ in validTechniques]
|
2011-04-07 14:00:47 +04:00
|
|
|
|
2019-06-07 01:21:43 +03:00
|
|
|
if conf.technique and isinstance(conf.technique, six.string_types):
|
2011-12-02 18:11:43 +04:00
|
|
|
_ = []
|
2011-04-07 14:07:52 +04:00
|
|
|
|
2019-06-07 01:21:43 +03:00
|
|
|
for letter in conf.technique.upper():
|
2011-04-07 14:37:48 +04:00
|
|
|
if letter not in validLetters:
|
|
|
|
errMsg = "value for --technique must be a string composed "
|
2011-04-07 14:40:58 +04:00
|
|
|
errMsg += "by the letters %s. Refer to the " % ", ".join(validLetters)
|
2011-04-07 14:37:48 +04:00
|
|
|
errMsg += "user's manual for details"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-07 14:37:48 +04:00
|
|
|
|
|
|
|
for validTech, validInt in validTechniques:
|
|
|
|
if letter == validTech[0]:
|
2011-12-02 18:11:43 +04:00
|
|
|
_.append(validInt)
|
2011-04-07 14:37:48 +04:00
|
|
|
break
|
2011-04-07 14:07:52 +04:00
|
|
|
|
2019-06-07 01:21:43 +03:00
|
|
|
conf.technique = _
|
2011-04-06 18:41:44 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setDBMS():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2008-12-09 00:24:24 +03:00
|
|
|
Force the back-end DBMS option.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.dbms:
|
|
|
|
return
|
|
|
|
|
|
|
|
debugMsg = "forcing back-end DBMS to user defined value"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.dbms = conf.dbms.lower()
|
2020-03-03 16:40:40 +03:00
|
|
|
regex = re.search(r"%s ([\d\.]+)" % ("(%s)" % "|".join(SUPPORTED_DBMS)), conf.dbms, re.I)
|
2011-05-10 20:24:09 +04:00
|
|
|
|
2011-05-10 17:28:07 +04:00
|
|
|
if regex:
|
|
|
|
conf.dbms = regex.group(1)
|
|
|
|
Backend.setVersion(regex.group(2))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if conf.dbms not in SUPPORTED_DBMS:
|
2011-04-23 20:25:09 +04:00
|
|
|
errMsg = "you provided an unsupported back-end database management "
|
2021-03-22 13:03:57 +03:00
|
|
|
errMsg += "system. Supported DBMSes are as follows: %s. " % ', '.join(sorted((_ for _ in (list(DBMS_DICT) + getPublicTypeMembers(FORK, True))), key=str.lower))
|
2011-04-23 20:25:09 +04:00
|
|
|
errMsg += "If you do not know the back-end DBMS, do not provide "
|
|
|
|
errMsg += "it and sqlmap will fingerprint it for you."
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapUnsupportedDBMSException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2014-08-30 23:34:23 +04:00
|
|
|
for dbms, aliases in DBMS_ALIASES:
|
2010-11-29 17:48:07 +03:00
|
|
|
if conf.dbms in aliases:
|
2014-08-30 23:34:23 +04:00
|
|
|
conf.dbms = dbms
|
2010-11-29 17:48:07 +03:00
|
|
|
|
|
|
|
break
|
|
|
|
|
2018-07-31 03:18:33 +03:00
|
|
|
def _listTamperingFunctions():
|
|
|
|
"""
|
|
|
|
Lists available tamper functions
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.listTampers:
|
|
|
|
infoMsg = "listing available tamper scripts\n"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
for script in sorted(glob.glob(os.path.join(paths.SQLMAP_TAMPER_PATH, "*.py"))):
|
|
|
|
content = openFile(script, "rb").read()
|
|
|
|
match = re.search(r'(?s)__priority__.+"""(.+)"""', content)
|
|
|
|
if match:
|
|
|
|
comment = match.group(1).strip()
|
|
|
|
dataToStdout("* %s - %s\n" % (setColor(os.path.basename(script), "yellow"), re.sub(r" *\n *", " ", comment.split("\n\n")[0].strip())))
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTamperingFunctions():
|
2010-10-13 02:45:25 +04:00
|
|
|
"""
|
2010-10-17 01:33:15 +04:00
|
|
|
Loads tampering functions from given script(s)
|
2010-10-13 02:45:25 +04:00
|
|
|
"""
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-17 01:33:15 +04:00
|
|
|
if conf.tamper:
|
2010-11-07 19:24:44 +03:00
|
|
|
last_priority = PRIORITY.HIGHEST
|
2010-11-04 13:29:40 +03:00
|
|
|
check_priority = True
|
|
|
|
resolve_priorities = False
|
|
|
|
priorities = []
|
|
|
|
|
2016-09-06 15:25:29 +03:00
|
|
|
for script in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper):
|
2010-10-17 01:33:15 +04:00
|
|
|
found = False
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2019-03-13 18:40:22 +03:00
|
|
|
path = safeFilepathEncode(paths.SQLMAP_TAMPER_PATH)
|
|
|
|
script = safeFilepathEncode(script.strip())
|
2010-10-17 01:55:34 +04:00
|
|
|
|
2017-01-22 01:09:15 +03:00
|
|
|
try:
|
|
|
|
if not script:
|
|
|
|
continue
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2017-01-24 20:07:06 +03:00
|
|
|
elif os.path.exists(os.path.join(path, script if script.endswith(".py") else "%s.py" % script)):
|
|
|
|
script = os.path.join(path, script if script.endswith(".py") else "%s.py" % script)
|
2011-05-19 01:47:40 +04:00
|
|
|
|
2017-01-22 01:09:15 +03:00
|
|
|
elif not os.path.exists(script):
|
|
|
|
errMsg = "tamper script '%s' does not exist" % script
|
|
|
|
raise SqlmapFilePathException(errMsg)
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2017-01-22 01:09:15 +03:00
|
|
|
elif not script.endswith(".py"):
|
|
|
|
errMsg = "tamper script '%s' should have an extension '.py'" % script
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
errMsg = "invalid character provided in option '--tamper'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2016-09-06 15:25:29 +03:00
|
|
|
dirname, filename = os.path.split(script)
|
2010-10-13 02:45:25 +04:00
|
|
|
dirname = os.path.abspath(dirname)
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2018-04-09 13:14:46 +03:00
|
|
|
infoMsg = "loading tamper module '%s'" % filename[:-3]
|
2010-10-14 10:00:10 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2016-09-06 15:25:29 +03:00
|
|
|
if not os.path.exists(os.path.join(dirname, "__init__.py")):
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "make sure that there is an empty file '__init__.py' "
|
2010-10-17 01:33:15 +04:00
|
|
|
errMsg += "inside of tamper scripts directory '%s'" % dirname
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2010-10-14 10:00:10 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if dirname not in sys.path:
|
|
|
|
sys.path.insert(0, dirname)
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
try:
|
2019-03-13 18:40:22 +03:00
|
|
|
module = __import__(safeFilepathEncode(filename[:-3]))
|
2019-01-22 02:40:48 +03:00
|
|
|
except Exception as ex:
|
2019-03-13 18:49:41 +03:00
|
|
|
raise SqlmapSyntaxException("cannot import tamper module '%s' (%s)" % (getUnicode(filename[:-3]), getSafeExString(ex)))
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2016-09-06 15:25:29 +03:00
|
|
|
priority = PRIORITY.NORMAL if not hasattr(module, "__priority__") else module.__priority__
|
2010-11-04 13:29:40 +03:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
for name, function in inspect.getmembers(module, inspect.isfunction):
|
2023-03-16 01:00:16 +03:00
|
|
|
if name == "tamper" and (hasattr(inspect, "signature") and all(_ in inspect.signature(function).parameters for _ in ("payload", "kwargs")) or inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs"):
|
2010-10-13 02:45:25 +04:00
|
|
|
found = True
|
2010-11-07 19:24:44 +03:00
|
|
|
kb.tamperFunctions.append(function)
|
2019-03-29 04:28:16 +03:00
|
|
|
function.__name__ = module.__name__
|
2010-10-17 01:33:15 +04:00
|
|
|
|
2010-11-07 19:24:44 +03:00
|
|
|
if check_priority and priority > last_priority:
|
2016-05-22 22:44:17 +03:00
|
|
|
message = "it appears that you might have mixed "
|
2012-11-08 14:09:34 +04:00
|
|
|
message += "the order of tamper scripts. "
|
2011-07-07 01:04:45 +04:00
|
|
|
message += "Do you want to auto resolve this? [Y/n/q] "
|
2017-04-19 15:46:27 +03:00
|
|
|
choice = readInput(message, default='Y').upper()
|
2010-11-04 13:29:40 +03:00
|
|
|
|
2017-04-18 16:48:05 +03:00
|
|
|
if choice == 'N':
|
2010-11-04 13:29:40 +03:00
|
|
|
resolve_priorities = False
|
2017-04-18 16:48:05 +03:00
|
|
|
elif choice == 'Q':
|
2012-12-06 17:14:19 +04:00
|
|
|
raise SqlmapUserQuitException
|
2017-04-18 16:48:05 +03:00
|
|
|
else:
|
|
|
|
resolve_priorities = True
|
2010-11-04 13:29:40 +03:00
|
|
|
|
|
|
|
check_priority = False
|
|
|
|
|
|
|
|
priorities.append((priority, function))
|
|
|
|
last_priority = priority
|
2010-11-07 19:24:44 +03:00
|
|
|
|
2010-10-15 14:36:29 +04:00
|
|
|
break
|
2011-07-07 01:04:45 +04:00
|
|
|
elif name == "dependencies":
|
2018-04-09 13:14:46 +03:00
|
|
|
try:
|
|
|
|
function()
|
2019-01-22 02:40:48 +03:00
|
|
|
except Exception as ex:
|
2018-04-09 13:14:46 +03:00
|
|
|
errMsg = "error occurred while checking dependencies "
|
2019-03-13 18:49:41 +03:00
|
|
|
errMsg += "for tamper module '%s' ('%s')" % (getUnicode(filename[:-3]), getSafeExString(ex))
|
2018-04-09 13:14:46 +03:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2010-10-15 14:36:29 +04:00
|
|
|
|
2010-10-13 02:45:25 +04:00
|
|
|
if not found:
|
2013-08-30 11:55:57 +04:00
|
|
|
errMsg = "missing function 'tamper(payload, **kwargs)' "
|
2016-09-06 15:25:29 +03:00
|
|
|
errMsg += "in tamper script '%s'" % script
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2010-10-13 02:45:25 +04:00
|
|
|
|
2014-12-19 17:48:54 +03:00
|
|
|
if kb.tamperFunctions and len(kb.tamperFunctions) > 3:
|
|
|
|
warnMsg = "using too many tamper scripts is usually not "
|
|
|
|
warnMsg += "a good idea"
|
|
|
|
logger.warning(warnMsg)
|
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
if resolve_priorities and priorities:
|
2019-05-09 11:12:27 +03:00
|
|
|
priorities.sort(key=functools.cmp_to_key(lambda a, b: cmp(a[0], b[0])), reverse=True)
|
2010-11-04 13:29:40 +03:00
|
|
|
kb.tamperFunctions = []
|
2010-11-07 19:24:44 +03:00
|
|
|
|
2010-11-04 13:29:40 +03:00
|
|
|
for _, function in priorities:
|
|
|
|
kb.tamperFunctions.append(function)
|
|
|
|
|
2019-03-04 17:24:12 +03:00
|
|
|
def _setPreprocessFunctions():
|
|
|
|
"""
|
2020-09-21 18:04:44 +03:00
|
|
|
Loads preprocess function(s) from given script(s)
|
2019-03-04 17:24:12 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.preprocess:
|
|
|
|
for script in re.split(PARAMETER_SPLITTING_REGEX, conf.preprocess):
|
|
|
|
found = False
|
2019-05-30 23:40:51 +03:00
|
|
|
function = None
|
2019-03-04 17:24:12 +03:00
|
|
|
|
2019-03-13 18:40:22 +03:00
|
|
|
script = safeFilepathEncode(script.strip())
|
2019-03-04 17:24:12 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
if not script:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not os.path.exists(script):
|
|
|
|
errMsg = "preprocess script '%s' does not exist" % script
|
|
|
|
raise SqlmapFilePathException(errMsg)
|
|
|
|
|
|
|
|
elif not script.endswith(".py"):
|
|
|
|
errMsg = "preprocess script '%s' should have an extension '.py'" % script
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
errMsg = "invalid character provided in option '--preprocess'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
|
|
|
dirname, filename = os.path.split(script)
|
|
|
|
dirname = os.path.abspath(dirname)
|
|
|
|
|
|
|
|
infoMsg = "loading preprocess module '%s'" % filename[:-3]
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if not os.path.exists(os.path.join(dirname, "__init__.py")):
|
|
|
|
errMsg = "make sure that there is an empty file '__init__.py' "
|
|
|
|
errMsg += "inside of preprocess scripts directory '%s'" % dirname
|
|
|
|
raise SqlmapGenericException(errMsg)
|
|
|
|
|
|
|
|
if dirname not in sys.path:
|
|
|
|
sys.path.insert(0, dirname)
|
|
|
|
|
|
|
|
try:
|
2019-03-13 18:40:22 +03:00
|
|
|
module = __import__(safeFilepathEncode(filename[:-3]))
|
2019-03-04 17:24:12 +03:00
|
|
|
except Exception as ex:
|
2019-03-13 18:49:41 +03:00
|
|
|
raise SqlmapSyntaxException("cannot import preprocess module '%s' (%s)" % (getUnicode(filename[:-3]), getSafeExString(ex)))
|
2019-03-04 17:24:12 +03:00
|
|
|
|
|
|
|
for name, function in inspect.getmembers(module, inspect.isfunction):
|
2020-09-28 14:12:59 +03:00
|
|
|
try:
|
|
|
|
if name == "preprocess" and inspect.getargspec(function).args and all(_ in inspect.getargspec(function).args for _ in ("req",)):
|
|
|
|
found = True
|
2019-03-04 17:24:12 +03:00
|
|
|
|
2020-09-28 14:12:59 +03:00
|
|
|
kb.preprocessFunctions.append(function)
|
|
|
|
function.__name__ = module.__name__
|
2019-03-04 17:24:12 +03:00
|
|
|
|
2020-09-28 14:12:59 +03:00
|
|
|
break
|
|
|
|
except ValueError: # Note: https://github.com/sqlmapproject/sqlmap/issues/4357
|
|
|
|
pass
|
2019-03-04 17:24:12 +03:00
|
|
|
|
|
|
|
if not found:
|
2020-09-21 18:04:44 +03:00
|
|
|
errMsg = "missing function 'preprocess(req)' "
|
2019-03-04 17:24:12 +03:00
|
|
|
errMsg += "in preprocess script '%s'" % script
|
|
|
|
raise SqlmapGenericException(errMsg)
|
|
|
|
else:
|
|
|
|
try:
|
2020-09-21 18:04:44 +03:00
|
|
|
function(_urllib.request.Request("http://localhost"))
|
2019-03-04 17:24:12 +03:00
|
|
|
except:
|
2020-12-08 15:40:46 +03:00
|
|
|
tbMsg = traceback.format_exc()
|
|
|
|
|
|
|
|
if conf.debug:
|
|
|
|
dataToStdout(tbMsg)
|
|
|
|
|
2019-03-04 17:24:12 +03:00
|
|
|
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.PREPROCESS, suffix=".py")
|
|
|
|
os.close(handle)
|
|
|
|
|
2020-09-21 18:04:44 +03:00
|
|
|
openFile(filename, "w+b").write("#!/usr/bin/env\n\ndef preprocess(req):\n pass\n")
|
|
|
|
openFile(os.path.join(os.path.dirname(filename), "__init__.py"), "w+b").write("pass")
|
2019-03-04 17:24:12 +03:00
|
|
|
|
2020-09-21 18:04:44 +03:00
|
|
|
errMsg = "function 'preprocess(req)' "
|
2019-03-04 17:24:12 +03:00
|
|
|
errMsg += "in preprocess script '%s' " % script
|
2020-09-21 18:04:44 +03:00
|
|
|
errMsg += "appears to be invalid "
|
|
|
|
errMsg += "(Note: find template script at '%s')" % filename
|
|
|
|
raise SqlmapGenericException(errMsg)
|
|
|
|
|
|
|
|
def _setPostprocessFunctions():
|
|
|
|
"""
|
|
|
|
Loads postprocess function(s) from given script(s)
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.postprocess:
|
|
|
|
for script in re.split(PARAMETER_SPLITTING_REGEX, conf.postprocess):
|
|
|
|
found = False
|
|
|
|
function = None
|
|
|
|
|
|
|
|
script = safeFilepathEncode(script.strip())
|
|
|
|
|
|
|
|
try:
|
|
|
|
if not script:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not os.path.exists(script):
|
|
|
|
errMsg = "postprocess script '%s' does not exist" % script
|
|
|
|
raise SqlmapFilePathException(errMsg)
|
|
|
|
|
|
|
|
elif not script.endswith(".py"):
|
|
|
|
errMsg = "postprocess script '%s' should have an extension '.py'" % script
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
errMsg = "invalid character provided in option '--postprocess'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
|
|
|
dirname, filename = os.path.split(script)
|
|
|
|
dirname = os.path.abspath(dirname)
|
|
|
|
|
|
|
|
infoMsg = "loading postprocess module '%s'" % filename[:-3]
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
if not os.path.exists(os.path.join(dirname, "__init__.py")):
|
|
|
|
errMsg = "make sure that there is an empty file '__init__.py' "
|
|
|
|
errMsg += "inside of postprocess scripts directory '%s'" % dirname
|
|
|
|
raise SqlmapGenericException(errMsg)
|
|
|
|
|
|
|
|
if dirname not in sys.path:
|
|
|
|
sys.path.insert(0, dirname)
|
|
|
|
|
|
|
|
try:
|
|
|
|
module = __import__(safeFilepathEncode(filename[:-3]))
|
|
|
|
except Exception as ex:
|
|
|
|
raise SqlmapSyntaxException("cannot import postprocess module '%s' (%s)" % (getUnicode(filename[:-3]), getSafeExString(ex)))
|
|
|
|
|
|
|
|
for name, function in inspect.getmembers(module, inspect.isfunction):
|
|
|
|
if name == "postprocess" and inspect.getargspec(function).args and all(_ in inspect.getargspec(function).args for _ in ("page", "headers", "code")):
|
|
|
|
found = True
|
|
|
|
|
|
|
|
kb.postprocessFunctions.append(function)
|
|
|
|
function.__name__ = module.__name__
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
if not found:
|
|
|
|
errMsg = "missing function 'postprocess(page, headers=None, code=None)' "
|
|
|
|
errMsg += "in postprocess script '%s'" % script
|
|
|
|
raise SqlmapGenericException(errMsg)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
_, _, _ = function("", {}, None)
|
|
|
|
except:
|
|
|
|
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.PREPROCESS, suffix=".py")
|
|
|
|
os.close(handle)
|
|
|
|
|
|
|
|
openFile(filename, "w+b").write("#!/usr/bin/env\n\ndef postprocess(page, headers=None, code=None):\n return page, headers, code\n")
|
|
|
|
openFile(os.path.join(os.path.dirname(filename), "__init__.py"), "w+b").write("pass")
|
|
|
|
|
|
|
|
errMsg = "function 'postprocess(page, headers=None, code=None)' "
|
|
|
|
errMsg += "in postprocess script '%s' " % script
|
2019-03-04 17:24:12 +03:00
|
|
|
errMsg += "should return a tuple '(page, headers, code)' "
|
|
|
|
errMsg += "(Note: find template script at '%s')" % filename
|
|
|
|
raise SqlmapGenericException(errMsg)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setThreads():
|
2008-12-04 20:40:03 +03:00
|
|
|
if not isinstance(conf.threads, int) or conf.threads <= 0:
|
2008-10-15 19:38:22 +04:00
|
|
|
conf.threads = 1
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setDNSCache():
|
2011-04-08 01:39:18 +04:00
|
|
|
"""
|
|
|
|
Makes a cached version of socket._getaddrinfo to avoid subsequent DNS requests.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def _getaddrinfo(*args, **kwargs):
|
2016-09-09 12:06:38 +03:00
|
|
|
if args in kb.cache.addrinfo:
|
|
|
|
return kb.cache.addrinfo[args]
|
2011-06-17 19:23:58 +04:00
|
|
|
|
2011-04-08 01:39:18 +04:00
|
|
|
else:
|
2016-09-09 12:06:38 +03:00
|
|
|
kb.cache.addrinfo[args] = socket._getaddrinfo(*args, **kwargs)
|
|
|
|
return kb.cache.addrinfo[args]
|
2011-04-08 01:39:18 +04:00
|
|
|
|
2015-11-17 01:46:10 +03:00
|
|
|
if not hasattr(socket, "_getaddrinfo"):
|
2011-04-08 01:39:18 +04:00
|
|
|
socket._getaddrinfo = socket.getaddrinfo
|
|
|
|
socket.getaddrinfo = _getaddrinfo
|
|
|
|
|
2015-11-17 01:46:10 +03:00
|
|
|
def _setSocketPreConnect():
|
|
|
|
"""
|
2019-05-08 02:58:59 +03:00
|
|
|
Makes a pre-connect version of socket.create_connection
|
2015-11-17 01:46:10 +03:00
|
|
|
"""
|
|
|
|
|
2015-12-02 14:05:40 +03:00
|
|
|
if conf.disablePrecon:
|
|
|
|
return
|
|
|
|
|
2018-01-31 13:13:08 +03:00
|
|
|
def _thread():
|
2016-10-17 13:33:07 +03:00
|
|
|
while kb.get("threadContinue") and not conf.get("disablePrecon"):
|
2015-11-17 03:38:43 +03:00
|
|
|
try:
|
2015-11-20 18:52:59 +03:00
|
|
|
for key in socket._ready:
|
|
|
|
if len(socket._ready[key]) < SOCKET_PRE_CONNECT_QUEUE_SIZE:
|
2019-05-08 02:58:59 +03:00
|
|
|
s = socket.create_connection(*key[0], **dict(key[1]))
|
2015-11-17 04:35:53 +03:00
|
|
|
with kb.locks.socket:
|
2019-05-08 02:58:59 +03:00
|
|
|
socket._ready[key].append((s, time.time()))
|
2016-01-11 02:08:38 +03:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
break
|
2016-01-25 00:05:08 +03:00
|
|
|
except:
|
|
|
|
pass
|
2015-11-17 03:38:43 +03:00
|
|
|
finally:
|
|
|
|
time.sleep(0.01)
|
2015-11-17 01:46:10 +03:00
|
|
|
|
2019-05-08 02:58:59 +03:00
|
|
|
def create_connection(*args, **kwargs):
|
|
|
|
retVal = None
|
2016-01-11 02:34:03 +03:00
|
|
|
|
2019-05-08 02:58:59 +03:00
|
|
|
key = (tuple(args), frozenset(kwargs.items()))
|
2015-11-17 01:46:10 +03:00
|
|
|
with kb.locks.socket:
|
2015-11-20 18:52:59 +03:00
|
|
|
if key not in socket._ready:
|
|
|
|
socket._ready[key] = []
|
2019-05-08 02:58:59 +03:00
|
|
|
|
2016-07-15 01:33:33 +03:00
|
|
|
while len(socket._ready[key]) > 0:
|
|
|
|
candidate, created = socket._ready[key].pop(0)
|
|
|
|
if (time.time() - created) < PRECONNECT_CANDIDATE_TIMEOUT:
|
2019-05-08 02:58:59 +03:00
|
|
|
retVal = candidate
|
2016-07-15 01:33:33 +03:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
try:
|
2018-01-31 13:13:08 +03:00
|
|
|
candidate.shutdown(socket.SHUT_RDWR)
|
2016-07-15 01:33:33 +03:00
|
|
|
candidate.close()
|
|
|
|
except socket.error:
|
|
|
|
pass
|
2016-01-11 02:34:03 +03:00
|
|
|
|
2019-05-08 02:58:59 +03:00
|
|
|
if not retVal:
|
|
|
|
retVal = socket._create_connection(*args, **kwargs)
|
|
|
|
|
|
|
|
return retVal
|
2015-11-17 01:46:10 +03:00
|
|
|
|
2019-05-13 13:26:13 +03:00
|
|
|
if not hasattr(socket, "_create_connection"):
|
2015-11-17 01:46:10 +03:00
|
|
|
socket._ready = {}
|
2019-05-08 02:58:59 +03:00
|
|
|
socket._create_connection = socket.create_connection
|
|
|
|
socket.create_connection = create_connection
|
2015-11-17 01:46:10 +03:00
|
|
|
|
2018-01-31 13:13:08 +03:00
|
|
|
thread = threading.Thread(target=_thread)
|
2016-05-16 16:37:49 +03:00
|
|
|
setDaemon(thread)
|
2015-11-17 01:46:10 +03:00
|
|
|
thread.start()
|
|
|
|
|
2015-11-08 18:37:46 +03:00
|
|
|
def _setHTTPHandlers():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2013-01-29 19:04:20 +04:00
|
|
|
Check and set the HTTP/SOCKS proxy for all HTTP requests.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
with kb.locks.handlers:
|
2020-07-26 20:34:30 +03:00
|
|
|
if conf.proxyList:
|
2019-05-16 02:10:49 +03:00
|
|
|
conf.proxy = conf.proxyList[0]
|
2020-06-14 23:12:00 +03:00
|
|
|
conf.proxyList = conf.proxyList[1:] + conf.proxyList[:1]
|
2013-08-20 21:35:49 +04:00
|
|
|
|
2020-06-14 23:12:00 +03:00
|
|
|
if len(conf.proxyList) > 1:
|
|
|
|
infoMsg = "loading proxy '%s' from a supplied proxy list file" % conf.proxy
|
|
|
|
logger.info(infoMsg)
|
2015-09-25 16:23:42 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
elif not conf.proxy:
|
|
|
|
if conf.hostname in ("localhost", "127.0.0.1") or conf.ignoreProxy:
|
|
|
|
proxyHandler.proxies = {}
|
2014-12-30 11:48:50 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
if conf.proxy:
|
|
|
|
debugMsg = "setting the HTTP/SOCKS proxy for all HTTP requests"
|
|
|
|
logger.debug(debugMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
try:
|
|
|
|
_ = _urllib.parse.urlsplit(conf.proxy)
|
|
|
|
except Exception as ex:
|
|
|
|
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, getSafeExString(ex))
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2019-12-15 18:06:26 +03:00
|
|
|
hostnamePort = _.netloc.rsplit(":", 1)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
scheme = _.scheme.upper()
|
|
|
|
hostname = hostnamePort[0]
|
|
|
|
port = None
|
|
|
|
username = None
|
|
|
|
password = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
if len(hostnamePort) == 2:
|
|
|
|
try:
|
|
|
|
port = int(hostnamePort[1])
|
|
|
|
except:
|
|
|
|
pass # drops into the next check block
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
if not all((scheme, hasattr(PROXY_TYPE, scheme), hostname, port)):
|
|
|
|
errMsg = "proxy value must be in format '(%s)://address:port'" % "|".join(_[0].lower() for _ in getPublicTypeMembers(PROXY_TYPE))
|
2015-11-08 18:37:46 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2013-08-12 16:25:51 +04:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
if conf.proxyCred:
|
|
|
|
_ = re.search(r"\A(.*?):(.*?)\Z", conf.proxyCred)
|
|
|
|
if not _:
|
|
|
|
errMsg = "proxy authentication credentials "
|
|
|
|
errMsg += "value must be in format username:password"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
else:
|
|
|
|
username = _.group(1)
|
|
|
|
password = _.group(2)
|
2013-08-12 16:25:51 +04:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
if scheme in (PROXY_TYPE.SOCKS4, PROXY_TYPE.SOCKS5):
|
|
|
|
proxyHandler.proxies = {}
|
2019-03-27 17:16:23 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
if scheme == PROXY_TYPE.SOCKS4:
|
|
|
|
warnMsg = "SOCKS4 does not support resolving (DNS) names (i.e. causing DNS leakage)"
|
|
|
|
singleTimeWarnMessage(warnMsg)
|
2015-11-08 18:37:46 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password)
|
|
|
|
socks.wrapmodule(_http_client)
|
2015-11-08 18:37:46 +03:00
|
|
|
else:
|
2019-05-16 02:10:49 +03:00
|
|
|
socks.unwrapmodule(_http_client)
|
2012-07-16 14:12:52 +04:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
if conf.proxyCred:
|
|
|
|
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
|
|
|
|
proxyString = "%s@" % conf.proxyCred
|
|
|
|
else:
|
|
|
|
proxyString = ""
|
2013-08-12 16:25:51 +04:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
proxyString += "%s:%d" % (hostname, port)
|
|
|
|
proxyHandler.proxies = {"http": proxyString, "https": proxyString}
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
proxyHandler.__init__(proxyHandler.proxies)
|
2015-11-08 04:20:29 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
if not proxyHandler.proxies:
|
|
|
|
for _ in ("http", "https"):
|
|
|
|
if hasattr(proxyHandler, "%s_open" % _):
|
|
|
|
delattr(proxyHandler, "%s_open" % _)
|
2015-11-08 04:20:29 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
debugMsg = "creating HTTP requests opener object"
|
|
|
|
logger.debug(debugMsg)
|
2015-11-08 04:20:29 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
handlers = filterNone([multipartPostHandler, proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, chunkedHandler if conf.chunked else None, httpsHandler])
|
2015-11-08 04:20:29 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
if not conf.dropSetCookie:
|
|
|
|
if not conf.loadCookies:
|
|
|
|
conf.cj = _http_cookiejar.CookieJar()
|
|
|
|
else:
|
|
|
|
conf.cj = _http_cookiejar.MozillaCookieJar()
|
|
|
|
resetCookieJar(conf.cj)
|
2015-11-08 04:20:29 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
handlers.append(_urllib.request.HTTPCookieProcessor(conf.cj))
|
|
|
|
|
|
|
|
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
|
|
|
|
if conf.keepAlive:
|
|
|
|
warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
|
|
|
|
warnMsg += "been disabled because of its incompatibility "
|
|
|
|
|
|
|
|
if conf.proxy:
|
|
|
|
warnMsg += "with HTTP(s) proxy"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2019-05-16 02:10:49 +03:00
|
|
|
elif conf.authType:
|
|
|
|
warnMsg += "with authentication methods"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2019-05-16 02:10:49 +03:00
|
|
|
else:
|
|
|
|
handlers.append(keepAliveHandler)
|
2015-11-08 04:20:29 +03:00
|
|
|
|
2019-05-16 02:10:49 +03:00
|
|
|
opener = _urllib.request.build_opener(*handlers)
|
|
|
|
opener.addheaders = [] # Note: clearing default "User-Agent: Python-urllib/X.Y"
|
|
|
|
_urllib.request.install_opener(opener)
|
2015-11-08 04:20:29 +03:00
|
|
|
|
2015-04-22 17:28:54 +03:00
|
|
|
def _setSafeVisit():
|
2010-04-16 16:44:47 +04:00
|
|
|
"""
|
2015-04-22 17:28:54 +03:00
|
|
|
Check and set the safe visit options.
|
2010-04-16 16:44:47 +04:00
|
|
|
"""
|
2016-10-17 13:36:42 +03:00
|
|
|
if not any((conf.safeUrl, conf.safeReqFile)):
|
2010-04-16 16:44:47 +04:00
|
|
|
return
|
|
|
|
|
2015-04-22 17:28:54 +03:00
|
|
|
if conf.safeReqFile:
|
|
|
|
checkFile(conf.safeReqFile)
|
|
|
|
|
|
|
|
raw = readCachedFileContent(conf.safeReqFile)
|
2019-03-25 13:17:25 +03:00
|
|
|
match = re.search(r"\A([A-Z]+) ([^ ]+) HTTP/[0-9.]+\Z", raw.split('\n')[0].strip())
|
2015-04-22 17:28:54 +03:00
|
|
|
|
|
|
|
if match:
|
|
|
|
kb.safeReq.method = match.group(1)
|
|
|
|
kb.safeReq.url = match.group(2)
|
|
|
|
kb.safeReq.headers = {}
|
|
|
|
|
2019-03-25 13:17:25 +03:00
|
|
|
for line in raw.split('\n')[1:]:
|
2015-04-22 17:28:54 +03:00
|
|
|
line = line.strip()
|
|
|
|
if line and ':' in line:
|
|
|
|
key, value = line.split(':', 1)
|
|
|
|
value = value.strip()
|
|
|
|
kb.safeReq.headers[key] = value
|
2018-11-28 02:29:17 +03:00
|
|
|
if key.upper() == HTTP_HEADER.HOST.upper():
|
2015-04-22 17:28:54 +03:00
|
|
|
if not value.startswith("http"):
|
|
|
|
scheme = "http"
|
|
|
|
if value.endswith(":443"):
|
|
|
|
scheme = "https"
|
|
|
|
value = "%s://%s" % (scheme, value)
|
2019-03-27 15:33:46 +03:00
|
|
|
kb.safeReq.url = _urllib.parse.urljoin(value, kb.safeReq.url)
|
2015-04-22 17:28:54 +03:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
post = None
|
|
|
|
|
|
|
|
if '\r\n\r\n' in raw:
|
|
|
|
post = raw[raw.find('\r\n\r\n') + 4:]
|
|
|
|
elif '\n\n' in raw:
|
|
|
|
post = raw[raw.find('\n\n') + 2:]
|
|
|
|
|
|
|
|
if post and post.strip():
|
|
|
|
kb.safeReq.post = post
|
|
|
|
else:
|
|
|
|
kb.safeReq.post = None
|
2010-04-16 16:44:47 +04:00
|
|
|
else:
|
2015-04-22 17:28:54 +03:00
|
|
|
errMsg = "invalid format of a safe request file"
|
2018-03-13 13:13:38 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2015-04-22 17:28:54 +03:00
|
|
|
else:
|
2019-10-31 22:49:38 +03:00
|
|
|
if not re.search(r"(?i)\Ahttp[s]*://", conf.safeUrl):
|
2015-04-22 17:28:54 +03:00
|
|
|
if ":443/" in conf.safeUrl:
|
2019-10-31 22:38:37 +03:00
|
|
|
conf.safeUrl = "https://%s" % conf.safeUrl
|
2015-04-22 17:28:54 +03:00
|
|
|
else:
|
2019-10-31 22:38:37 +03:00
|
|
|
conf.safeUrl = "http://%s" % conf.safeUrl
|
2010-04-16 16:44:47 +04:00
|
|
|
|
2019-06-14 13:20:38 +03:00
|
|
|
if (conf.safeFreq or 0) <= 0:
|
2020-01-07 13:48:02 +03:00
|
|
|
errMsg = "please provide a valid value (>0) for safe frequency ('--safe-freq') while using safe visit features"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-04-16 16:44:47 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setPrefixSuffix():
|
2011-01-24 15:25:45 +03:00
|
|
|
if conf.prefix is not None and conf.suffix is not None:
|
|
|
|
# Create a custom boundary object for user's supplied prefix
|
|
|
|
# and suffix
|
2011-07-08 10:02:31 +04:00
|
|
|
boundary = AttribDict()
|
2011-01-24 15:25:45 +03:00
|
|
|
|
|
|
|
boundary.level = 1
|
2013-01-10 14:54:07 +04:00
|
|
|
boundary.clause = [0]
|
|
|
|
boundary.where = [1, 2, 3]
|
2011-01-24 15:25:45 +03:00
|
|
|
boundary.prefix = conf.prefix
|
|
|
|
boundary.suffix = conf.suffix
|
|
|
|
|
|
|
|
if " like" in boundary.suffix.lower():
|
|
|
|
if "'" in boundary.suffix.lower():
|
|
|
|
boundary.ptype = 3
|
|
|
|
elif '"' in boundary.suffix.lower():
|
|
|
|
boundary.ptype = 5
|
|
|
|
elif "'" in boundary.suffix:
|
|
|
|
boundary.ptype = 2
|
|
|
|
elif '"' in boundary.suffix:
|
|
|
|
boundary.ptype = 4
|
|
|
|
else:
|
|
|
|
boundary.ptype = 1
|
|
|
|
|
2011-07-07 01:04:45 +04:00
|
|
|
# user who provides --prefix/--suffix does not want other boundaries
|
|
|
|
# to be tested for
|
2013-01-10 14:54:07 +04:00
|
|
|
conf.boundaries = [boundary]
|
2011-01-24 15:25:45 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setAuthCred():
|
2012-07-31 15:06:45 +04:00
|
|
|
"""
|
|
|
|
Adds authentication credentials (if any) for current target to the password manager
|
|
|
|
(used by connection handler)
|
|
|
|
"""
|
|
|
|
|
2014-03-15 12:29:21 +04:00
|
|
|
if kb.passwordMgr and all(_ is not None for _ in (conf.scheme, conf.hostname, conf.port, conf.authUsername, conf.authPassword)):
|
2014-03-15 01:20:20 +04:00
|
|
|
kb.passwordMgr.add_password(None, "%s://%s:%d" % (conf.scheme, conf.hostname, conf.port), conf.authUsername, conf.authPassword)
|
2012-07-31 15:06:45 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPAuthentication():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2021-03-11 22:41:05 +03:00
|
|
|
Check and set the HTTP(s) authentication method (Basic, Digest, Bearer, NTLM or PKI),
|
2013-09-12 01:17:18 +04:00
|
|
|
username and password for first three methods, or PEM private key file for
|
|
|
|
PKI authentication
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
global authHandler
|
|
|
|
|
2015-09-27 16:59:17 +03:00
|
|
|
if not conf.authType and not conf.authCred and not conf.authFile:
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
2015-09-27 16:59:17 +03:00
|
|
|
if conf.authFile and not conf.authType:
|
2014-12-13 15:48:50 +03:00
|
|
|
conf.authType = AUTH_TYPE.PKI
|
|
|
|
|
2015-09-27 16:59:17 +03:00
|
|
|
elif conf.authType and not conf.authCred and not conf.authFile:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you specified the HTTP authentication type, but "
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg += "did not provide the credentials"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2013-08-09 16:13:48 +04:00
|
|
|
elif not conf.authType and conf.authCred:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you specified the HTTP authentication credentials, "
|
2019-10-29 15:52:11 +03:00
|
|
|
errMsg += "but did not provide the type (e.g. --auth-type=\"basic\")"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2021-03-11 22:41:05 +03:00
|
|
|
elif (conf.authType or "").lower() not in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST, AUTH_TYPE.BEARER, AUTH_TYPE.NTLM, AUTH_TYPE.PKI):
|
2013-09-12 01:22:10 +04:00
|
|
|
errMsg = "HTTP authentication type value must be "
|
2021-03-11 22:41:05 +03:00
|
|
|
errMsg += "Basic, Digest, Bearer, NTLM or PKI"
|
2013-09-12 01:22:10 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2015-09-27 16:59:17 +03:00
|
|
|
if not conf.authFile:
|
2010-01-07 15:59:09 +03:00
|
|
|
debugMsg = "setting the HTTP authentication type and credentials"
|
|
|
|
logger.debug(debugMsg)
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2016-12-20 01:47:39 +03:00
|
|
|
authType = conf.authType.lower()
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2016-12-20 01:47:39 +03:00
|
|
|
if authType in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST):
|
2010-04-07 13:47:14 +04:00
|
|
|
regExp = "^(.*?):(.*?)$"
|
2016-12-20 01:47:39 +03:00
|
|
|
errMsg = "HTTP %s authentication credentials " % authType
|
2013-05-20 01:00:40 +04:00
|
|
|
errMsg += "value must be in format 'username:password'"
|
2021-03-11 22:41:05 +03:00
|
|
|
elif authType == AUTH_TYPE.BEARER:
|
|
|
|
conf.httpHeaders.append((HTTP_HEADER.AUTHORIZATION, "Bearer %s" % conf.authCred.strip()))
|
|
|
|
return
|
2016-12-20 01:47:39 +03:00
|
|
|
elif authType == AUTH_TYPE.NTLM:
|
2011-07-06 09:44:47 +04:00
|
|
|
regExp = "^(.*\\\\.*):(.*?)$"
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "HTTP NTLM authentication credentials value must "
|
2019-01-22 04:08:02 +03:00
|
|
|
errMsg += "be in format 'DOMAIN\\username:password'"
|
2016-12-20 01:47:39 +03:00
|
|
|
elif authType == AUTH_TYPE.PKI:
|
2013-09-12 01:17:18 +04:00
|
|
|
errMsg = "HTTP PKI authentication require "
|
|
|
|
errMsg += "usage of option `--auth-pki`"
|
2013-05-20 01:00:40 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2013-08-09 16:13:48 +04:00
|
|
|
aCredRegExp = re.search(regExp, conf.authCred)
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2010-01-07 15:59:09 +03:00
|
|
|
if not aCredRegExp:
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-04-07 13:47:14 +04:00
|
|
|
|
2012-06-05 02:30:12 +04:00
|
|
|
conf.authUsername = aCredRegExp.group(1)
|
|
|
|
conf.authPassword = aCredRegExp.group(2)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2019-03-27 15:33:46 +03:00
|
|
|
kb.passwordMgr = _urllib.request.HTTPPasswordMgrWithDefaultRealm()
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
_setAuthCred()
|
2012-07-31 15:06:45 +04:00
|
|
|
|
2016-12-20 01:47:39 +03:00
|
|
|
if authType == AUTH_TYPE.BASIC:
|
2012-06-05 02:30:12 +04:00
|
|
|
authHandler = SmartHTTPBasicAuthHandler(kb.passwordMgr)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2016-12-20 01:47:39 +03:00
|
|
|
elif authType == AUTH_TYPE.DIGEST:
|
2019-03-27 15:33:46 +03:00
|
|
|
authHandler = _urllib.request.HTTPDigestAuthHandler(kb.passwordMgr)
|
2010-03-27 02:23:25 +03:00
|
|
|
|
2016-12-20 01:47:39 +03:00
|
|
|
elif authType == AUTH_TYPE.NTLM:
|
2010-01-07 15:59:09 +03:00
|
|
|
try:
|
|
|
|
from ntlm import HTTPNtlmAuthHandler
|
2012-02-22 14:45:10 +04:00
|
|
|
except ImportError:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "sqlmap requires Python NTLM third-party library "
|
2019-09-05 12:15:43 +03:00
|
|
|
errMsg += "in order to authenticate via NTLM. Download from "
|
|
|
|
errMsg += "'https://github.com/mullender/python-ntlm'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapMissingDependence(errMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2012-06-05 02:30:12 +04:00
|
|
|
authHandler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(kb.passwordMgr)
|
2010-01-07 15:59:09 +03:00
|
|
|
else:
|
2013-09-12 01:17:18 +04:00
|
|
|
debugMsg = "setting the HTTP(s) authentication PEM private key"
|
2010-01-07 15:59:09 +03:00
|
|
|
logger.debug(debugMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2015-09-27 16:59:17 +03:00
|
|
|
_ = safeExpandUser(conf.authFile)
|
2015-01-21 11:12:12 +03:00
|
|
|
checkFile(_)
|
|
|
|
authHandler = HTTPSPKIAuthHandler(_)
|
2009-12-03 01:54:39 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPExtraHeaders():
|
2008-12-09 00:24:24 +03:00
|
|
|
if conf.headers:
|
|
|
|
debugMsg = "setting extra HTTP headers"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2012-01-07 19:26:54 +04:00
|
|
|
conf.headers = conf.headers.split("\n") if "\n" in conf.headers else conf.headers.split("\\n")
|
2008-12-09 00:24:24 +03:00
|
|
|
|
|
|
|
for headerValue in conf.headers:
|
2015-07-07 10:24:16 +03:00
|
|
|
if not headerValue.strip():
|
|
|
|
continue
|
|
|
|
|
2013-05-30 13:42:27 +04:00
|
|
|
if headerValue.count(':') >= 1:
|
2013-05-23 11:29:43 +04:00
|
|
|
header, value = (_.lstrip() for _ in headerValue.split(":", 1))
|
2008-12-09 00:24:24 +03:00
|
|
|
|
2012-01-07 18:54:56 +04:00
|
|
|
if header and value:
|
|
|
|
conf.httpHeaders.append((header, value))
|
2019-08-02 21:29:52 +03:00
|
|
|
elif headerValue.startswith('@'):
|
|
|
|
checkFile(headerValue[1:])
|
|
|
|
kb.headersFile = headerValue[1:]
|
2012-04-23 14:11:00 +04:00
|
|
|
else:
|
2012-04-23 14:15:04 +04:00
|
|
|
errMsg = "invalid header value: %s. Valid header format is 'name:value'" % repr(headerValue).lstrip('u')
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-17 12:33:46 +04:00
|
|
|
|
2015-12-03 03:43:37 +03:00
|
|
|
elif not conf.requestFile and len(conf.httpHeaders or []) < 2:
|
2017-09-21 15:35:24 +03:00
|
|
|
if conf.encoding:
|
|
|
|
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.encoding))
|
2008-11-15 15:25:19 +03:00
|
|
|
|
2011-04-17 12:48:13 +04:00
|
|
|
# Invalidating any caching mechanism in between
|
2016-09-29 11:44:00 +03:00
|
|
|
# Reference: http://stackoverflow.com/a/1383359
|
|
|
|
conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache"))
|
2011-04-17 12:33:46 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPUserAgent():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Set the HTTP User-Agent header.
|
|
|
|
Depending on the user options it can be:
|
|
|
|
|
|
|
|
* The default sqlmap string
|
|
|
|
* A default value read as user option
|
|
|
|
* A random value read from a list of User-Agent headers from a
|
|
|
|
file choosed as user option
|
|
|
|
"""
|
2010-11-07 11:13:20 +03:00
|
|
|
|
2019-03-02 03:28:58 +03:00
|
|
|
debugMsg = "setting the HTTP User-Agent header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
if conf.mobile:
|
2019-03-02 03:28:58 +03:00
|
|
|
if conf.randomAgent:
|
|
|
|
_ = random.sample([_[1] for _ in getPublicTypeMembers(MOBILES, True)], 1)[0]
|
|
|
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, _))
|
|
|
|
else:
|
|
|
|
message = "which smartphone do you want sqlmap to imitate "
|
|
|
|
message += "through HTTP User-Agent header?\n"
|
|
|
|
items = sorted(getPublicTypeMembers(MOBILES, True))
|
2011-04-29 23:27:23 +04:00
|
|
|
|
2019-03-02 03:28:58 +03:00
|
|
|
for count in xrange(len(items)):
|
|
|
|
item = items[count]
|
|
|
|
message += "[%d] %s%s\n" % (count + 1, item[0], " (default)" if item == MOBILES.IPHONE else "")
|
2011-04-29 23:27:23 +04:00
|
|
|
|
2019-03-02 03:28:58 +03:00
|
|
|
test = readInput(message.rstrip('\n'), default=items.index(MOBILES.IPHONE) + 1)
|
2011-04-30 01:50:48 +04:00
|
|
|
|
2019-03-02 03:28:58 +03:00
|
|
|
try:
|
|
|
|
item = items[int(test) - 1]
|
|
|
|
except:
|
|
|
|
item = MOBILES.IPHONE
|
2011-04-29 23:27:23 +04:00
|
|
|
|
2019-03-02 03:28:58 +03:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, item[1]))
|
2011-04-29 23:27:23 +04:00
|
|
|
|
|
|
|
elif conf.agent:
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, conf.agent))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
elif not conf.randomAgent:
|
2012-06-28 15:55:30 +04:00
|
|
|
_ = True
|
2010-06-30 03:51:44 +04:00
|
|
|
|
|
|
|
for header, _ in conf.httpHeaders:
|
2018-11-28 02:29:17 +03:00
|
|
|
if header.upper() == HTTP_HEADER.USER_AGENT.upper():
|
2012-06-28 15:55:30 +04:00
|
|
|
_ = False
|
2010-06-30 03:51:44 +04:00
|
|
|
break
|
|
|
|
|
2012-06-28 15:55:30 +04:00
|
|
|
if _:
|
2018-11-29 02:09:05 +03:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, DEFAULT_USER_AGENT))
|
2010-06-30 03:51:44 +04:00
|
|
|
|
2011-04-30 01:50:48 +04:00
|
|
|
else:
|
2019-05-06 02:40:58 +03:00
|
|
|
userAgent = fetchRandomAgent()
|
2011-04-29 23:27:23 +04:00
|
|
|
|
2017-12-04 16:22:51 +03:00
|
|
|
infoMsg = "fetched random HTTP User-Agent header value '%s' from " % userAgent
|
|
|
|
infoMsg += "file '%s'" % paths.USER_AGENTS
|
2011-04-30 19:29:59 +04:00
|
|
|
logger.info(infoMsg)
|
2014-08-20 23:07:19 +04:00
|
|
|
|
2014-06-30 21:27:14 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, userAgent))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPReferer():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Set the HTTP Referer
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.referer:
|
|
|
|
debugMsg = "setting the HTTP Referer header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.referer))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-03-20 02:56:36 +03:00
|
|
|
def _setHTTPHost():
|
|
|
|
"""
|
|
|
|
Set the HTTP Host
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.host:
|
|
|
|
debugMsg = "setting the HTTP Host header"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.httpHeaders.append((HTTP_HEADER.HOST, conf.host))
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPCookies():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Set the HTTP Cookie header
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.cookie:
|
|
|
|
debugMsg = "setting the HTTP Cookie header"
|
|
|
|
logger.debug(debugMsg)
|
2010-06-30 03:51:44 +04:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
conf.httpHeaders.append((HTTP_HEADER.COOKIE, conf.cookie))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2018-10-27 15:30:28 +03:00
|
|
|
def _setHostname():
|
|
|
|
"""
|
|
|
|
Set value conf.hostname
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.url:
|
2018-12-04 01:18:52 +03:00
|
|
|
try:
|
2019-03-27 15:33:46 +03:00
|
|
|
conf.hostname = _urllib.parse.urlsplit(conf.url).netloc.split(':')[0]
|
2019-01-22 02:40:48 +03:00
|
|
|
except ValueError as ex:
|
2018-12-04 01:18:52 +03:00
|
|
|
errMsg = "problem occurred while "
|
|
|
|
errMsg += "parsing an URL '%s' ('%s')" % (conf.url, getSafeExString(ex))
|
|
|
|
raise SqlmapDataException(errMsg)
|
2018-10-27 15:30:28 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHTTPTimeout():
|
2008-12-04 20:40:03 +03:00
|
|
|
"""
|
|
|
|
Set the HTTP timeout
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.timeout:
|
|
|
|
debugMsg = "setting the HTTP timeout"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.timeout = float(conf.timeout)
|
|
|
|
|
|
|
|
if conf.timeout < 3.0:
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "the minimum HTTP timeout is 3 seconds, sqlmap "
|
2008-12-04 20:40:03 +03:00
|
|
|
warnMsg += "will going to reset it"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2008-12-04 20:40:03 +03:00
|
|
|
|
|
|
|
conf.timeout = 3.0
|
|
|
|
else:
|
2008-12-19 23:48:33 +03:00
|
|
|
conf.timeout = 30.0
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2019-11-19 13:56:01 +03:00
|
|
|
try:
|
|
|
|
socket.setdefaulttimeout(conf.timeout)
|
|
|
|
except OverflowError as ex:
|
|
|
|
raise SqlmapValueException("invalid value used for option '--timeout' ('%s')" % getSafeExString(ex))
|
2008-12-04 20:40:03 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _checkDependencies():
|
2011-06-14 23:38:35 +04:00
|
|
|
"""
|
|
|
|
Checks for missing dependencies.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if conf.dependencies:
|
|
|
|
checkDependencies()
|
|
|
|
|
2019-02-05 18:58:18 +03:00
|
|
|
def _createHomeDirectories():
|
|
|
|
"""
|
|
|
|
Creates directories inside sqlmap's home directory
|
|
|
|
"""
|
|
|
|
|
2019-07-16 15:02:16 +03:00
|
|
|
if conf.get("purge"):
|
2019-07-16 10:40:59 +03:00
|
|
|
return
|
|
|
|
|
2020-09-28 14:12:59 +03:00
|
|
|
for context in ("output", "history"):
|
2020-10-05 12:35:49 +03:00
|
|
|
directory = paths["SQLMAP_%s_PATH" % getUnicode(context).upper()] # NOTE: https://github.com/sqlmapproject/sqlmap/issues/4363
|
2019-02-05 18:58:18 +03:00
|
|
|
try:
|
|
|
|
if not os.path.isdir(directory):
|
|
|
|
os.makedirs(directory)
|
|
|
|
|
|
|
|
_ = os.path.join(directory, randomStr())
|
|
|
|
open(_, "w+b").close()
|
|
|
|
os.remove(_)
|
|
|
|
|
2019-07-16 15:02:16 +03:00
|
|
|
if conf.get("outputDir") and context == "output":
|
2019-02-05 18:58:18 +03:00
|
|
|
warnMsg = "using '%s' as the %s directory" % (directory, context)
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2019-02-05 18:58:18 +03:00
|
|
|
except (OSError, IOError) as ex:
|
2019-05-09 11:52:33 +03:00
|
|
|
tempDir = tempfile.mkdtemp(prefix="sqlmap%s" % context)
|
2019-02-05 18:58:18 +03:00
|
|
|
warnMsg = "unable to %s %s directory " % ("create" if not os.path.isdir(directory) else "write to the", context)
|
|
|
|
warnMsg += "'%s' (%s). " % (directory, getUnicode(ex))
|
|
|
|
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2019-02-05 18:58:18 +03:00
|
|
|
|
|
|
|
paths["SQLMAP_%s_PATH" % context.upper()] = tempDir
|
|
|
|
|
2019-05-06 16:39:11 +03:00
|
|
|
def _pympTempLeakPatch(tempDir): # Cross-referenced function
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-01-13 12:33:51 +03:00
|
|
|
def _createTemporaryDirectory():
|
|
|
|
"""
|
|
|
|
Creates temporary directory for this run.
|
|
|
|
"""
|
|
|
|
|
2016-05-31 15:55:56 +03:00
|
|
|
if conf.tmpDir:
|
|
|
|
try:
|
|
|
|
if not os.path.isdir(conf.tmpDir):
|
|
|
|
os.makedirs(conf.tmpDir)
|
|
|
|
|
|
|
|
_ = os.path.join(conf.tmpDir, randomStr())
|
2016-09-23 16:41:12 +03:00
|
|
|
|
2016-05-31 15:55:56 +03:00
|
|
|
open(_, "w+b").close()
|
|
|
|
os.remove(_)
|
|
|
|
|
|
|
|
tempfile.tempdir = conf.tmpDir
|
|
|
|
|
|
|
|
warnMsg = "using '%s' as the temporary directory" % conf.tmpDir
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2019-01-22 02:40:48 +03:00
|
|
|
except (OSError, IOError) as ex:
|
2016-05-31 15:55:56 +03:00
|
|
|
errMsg = "there has been a problem while accessing "
|
|
|
|
errMsg += "temporary directory location(s) ('%s')" % getSafeExString(ex)
|
2018-03-13 13:13:38 +03:00
|
|
|
raise SqlmapSystemException(errMsg)
|
2016-05-31 15:55:56 +03:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
if not os.path.isdir(tempfile.gettempdir()):
|
|
|
|
os.makedirs(tempfile.gettempdir())
|
2019-01-22 02:40:48 +03:00
|
|
|
except Exception as ex:
|
2016-09-23 16:41:12 +03:00
|
|
|
warnMsg = "there has been a problem while accessing "
|
|
|
|
warnMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
|
|
|
|
warnMsg += "make sure that there is enough disk space left. If problem persists, "
|
|
|
|
warnMsg += "try to set environment variable 'TEMP' to a location "
|
|
|
|
warnMsg += "writeable by the current user"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2016-05-31 15:55:56 +03:00
|
|
|
|
|
|
|
if "sqlmap" not in (tempfile.tempdir or "") or conf.tmpDir and tempfile.tempdir == conf.tmpDir:
|
2016-09-23 16:41:12 +03:00
|
|
|
try:
|
|
|
|
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
|
2018-06-29 17:34:16 +03:00
|
|
|
except:
|
2016-09-23 16:41:12 +03:00
|
|
|
tempfile.tempdir = os.path.join(paths.SQLMAP_HOME_PATH, "tmp", "sqlmap%s%d" % (randomStr(6), os.getpid()))
|
2015-01-28 02:52:40 +03:00
|
|
|
|
|
|
|
kb.tempDir = tempfile.tempdir
|
|
|
|
|
2015-01-13 12:33:51 +03:00
|
|
|
if not os.path.isdir(tempfile.tempdir):
|
2016-10-01 22:02:40 +03:00
|
|
|
try:
|
|
|
|
os.makedirs(tempfile.tempdir)
|
2019-01-22 02:40:48 +03:00
|
|
|
except Exception as ex:
|
2016-10-01 22:02:40 +03:00
|
|
|
errMsg = "there has been a problem while setting "
|
|
|
|
errMsg += "temporary directory location ('%s')" % getSafeExString(ex)
|
2018-03-13 13:13:38 +03:00
|
|
|
raise SqlmapSystemException(errMsg)
|
2015-01-13 12:33:51 +03:00
|
|
|
|
2019-05-06 16:39:11 +03:00
|
|
|
if six.PY3:
|
|
|
|
_pympTempLeakPatch(kb.tempDir)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _cleanupOptions():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Cleanup configuration attributes.
|
|
|
|
"""
|
|
|
|
|
2019-11-26 00:57:44 +03:00
|
|
|
if conf.encoding:
|
|
|
|
try:
|
|
|
|
codecs.lookup(conf.encoding)
|
2019-11-26 00:59:22 +03:00
|
|
|
except LookupError:
|
2019-11-26 00:57:44 +03:00
|
|
|
errMsg = "unknown encoding '%s'" % conf.encoding
|
|
|
|
raise SqlmapValueException(errMsg)
|
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
debugMsg = "cleaning up configuration parameters"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2010-05-28 19:57:43 +04:00
|
|
|
width = getConsoleWidth()
|
|
|
|
|
|
|
|
if conf.eta:
|
2012-12-10 20:20:04 +04:00
|
|
|
conf.progressWidth = width - 26
|
2010-05-28 19:57:43 +04:00
|
|
|
else:
|
2012-12-10 20:20:04 +04:00
|
|
|
conf.progressWidth = width - 46
|
2010-05-28 19:57:43 +04:00
|
|
|
|
2013-07-29 22:42:29 +04:00
|
|
|
for key, value in conf.items():
|
2015-12-05 02:52:58 +03:00
|
|
|
if value and any(key.endswith(_) for _ in ("Path", "File", "Dir")):
|
2020-10-14 12:34:52 +03:00
|
|
|
if isinstance(value, str):
|
|
|
|
conf[key] = safeExpandUser(value)
|
2013-07-29 22:42:29 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.testParameter:
|
2011-03-19 19:53:14 +03:00
|
|
|
conf.testParameter = urldecode(conf.testParameter)
|
2019-05-14 14:58:42 +03:00
|
|
|
conf.testParameter = [_.strip() for _ in re.split(PARAMETER_SPLITTING_REGEX, conf.testParameter)]
|
2008-10-15 19:38:22 +04:00
|
|
|
else:
|
|
|
|
conf.testParameter = []
|
|
|
|
|
2019-07-17 14:20:24 +03:00
|
|
|
if conf.ignoreCode:
|
2019-10-15 17:08:58 +03:00
|
|
|
if conf.ignoreCode == IGNORE_CODE_WILDCARD:
|
|
|
|
conf.ignoreCode = xrange(0, 1000)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
conf.ignoreCode = [int(_) for _ in re.split(PARAMETER_SPLITTING_REGEX, conf.ignoreCode)]
|
|
|
|
except ValueError:
|
2023-01-24 14:00:23 +03:00
|
|
|
errMsg = "option '--ignore-code' should contain a list of integer values or a wildcard value '%s'" % IGNORE_CODE_WILDCARD
|
2019-10-15 17:08:58 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2019-07-17 14:20:24 +03:00
|
|
|
else:
|
|
|
|
conf.ignoreCode = []
|
|
|
|
|
2023-01-24 14:00:23 +03:00
|
|
|
if conf.abortCode:
|
|
|
|
try:
|
|
|
|
conf.abortCode = [int(_) for _ in re.split(PARAMETER_SPLITTING_REGEX, conf.abortCode)]
|
|
|
|
except ValueError:
|
|
|
|
errMsg = "option '--abort-code' should contain a list of integer values"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
else:
|
|
|
|
conf.abortCode = []
|
|
|
|
|
2019-05-17 12:00:51 +03:00
|
|
|
if conf.paramFilter:
|
|
|
|
conf.paramFilter = [_.strip() for _ in re.split(PARAMETER_SPLITTING_REGEX, conf.paramFilter.upper())]
|
|
|
|
else:
|
|
|
|
conf.paramFilter = []
|
|
|
|
|
2019-04-17 15:22:36 +03:00
|
|
|
if conf.base64Parameter:
|
|
|
|
conf.base64Parameter = urldecode(conf.base64Parameter)
|
2020-10-20 13:37:07 +03:00
|
|
|
conf.base64Parameter = conf.base64Parameter.strip()
|
2019-04-17 15:22:36 +03:00
|
|
|
conf.base64Parameter = re.split(PARAMETER_SPLITTING_REGEX, conf.base64Parameter)
|
|
|
|
else:
|
|
|
|
conf.base64Parameter = []
|
|
|
|
|
2016-05-25 16:43:39 +03:00
|
|
|
if conf.agent:
|
2016-06-03 03:02:11 +03:00
|
|
|
conf.agent = re.sub(r"[\r\n]", "", conf.agent)
|
2016-05-25 16:43:39 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.user:
|
|
|
|
conf.user = conf.user.replace(" ", "")
|
|
|
|
|
2011-08-29 17:08:25 +04:00
|
|
|
if conf.rParam:
|
2019-07-19 13:17:07 +03:00
|
|
|
if all(_ in conf.rParam for _ in ('=', ',')):
|
|
|
|
original = conf.rParam
|
|
|
|
conf.rParam = []
|
|
|
|
for part in original.split(';'):
|
|
|
|
if '=' in part:
|
|
|
|
left, right = part.split('=', 1)
|
|
|
|
conf.rParam.append(left)
|
|
|
|
kb.randomPool[left] = filterNone(_.strip() for _ in right.split(','))
|
2019-07-19 13:24:34 +03:00
|
|
|
else:
|
|
|
|
conf.rParam.append(part)
|
2019-07-19 13:17:07 +03:00
|
|
|
else:
|
|
|
|
conf.rParam = conf.rParam.replace(" ", "")
|
|
|
|
conf.rParam = re.split(PARAMETER_SPLITTING_REGEX, conf.rParam)
|
2011-08-29 17:08:25 +04:00
|
|
|
else:
|
|
|
|
conf.rParam = []
|
|
|
|
|
2019-02-04 17:49:13 +03:00
|
|
|
if conf.paramDel:
|
|
|
|
conf.paramDel = decodeStringEscape(conf.paramDel)
|
2013-05-30 14:01:13 +04:00
|
|
|
|
2011-08-29 17:29:42 +04:00
|
|
|
if conf.skip:
|
|
|
|
conf.skip = conf.skip.replace(" ", "")
|
|
|
|
conf.skip = re.split(PARAMETER_SPLITTING_REGEX, conf.skip)
|
|
|
|
else:
|
|
|
|
conf.skip = []
|
|
|
|
|
2015-08-28 16:30:28 +03:00
|
|
|
if conf.cookie:
|
|
|
|
conf.cookie = re.sub(r"[\r\n]", "", conf.cookie)
|
|
|
|
|
2008-11-09 19:57:47 +03:00
|
|
|
if conf.delay:
|
|
|
|
conf.delay = float(conf.delay)
|
|
|
|
|
2017-12-04 16:22:51 +03:00
|
|
|
if conf.url:
|
2019-06-07 02:39:11 +03:00
|
|
|
conf.url = conf.url.strip().lstrip('/')
|
2018-12-16 04:56:31 +03:00
|
|
|
if not re.search(r"\A\w+://", conf.url):
|
|
|
|
conf.url = "http://%s" % conf.url
|
2017-12-04 16:22:51 +03:00
|
|
|
|
2018-08-28 15:31:20 +03:00
|
|
|
if conf.fileRead:
|
|
|
|
conf.fileRead = ntToPosixSlashes(normalizePath(conf.fileRead))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2018-08-28 15:31:20 +03:00
|
|
|
if conf.fileWrite:
|
|
|
|
conf.fileWrite = ntToPosixSlashes(normalizePath(conf.fileWrite))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2018-08-28 15:31:20 +03:00
|
|
|
if conf.fileDest:
|
|
|
|
conf.fileDest = ntToPosixSlashes(normalizePath(conf.fileDest))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.msfPath:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.msfPath = ntToPosixSlashes(normalizePath(conf.msfPath))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
if conf.tmpPath:
|
2010-04-23 20:34:20 +04:00
|
|
|
conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath))
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2020-12-02 00:56:24 +03:00
|
|
|
if any((conf.googleDork, conf.logFile, conf.bulkFile, conf.forms, conf.crawlDepth, conf.stdinPipe)):
|
2008-11-28 01:33:33 +03:00
|
|
|
conf.multipleTargets = True
|
|
|
|
|
2010-10-12 23:41:29 +04:00
|
|
|
if conf.optimize:
|
2011-07-25 15:05:49 +04:00
|
|
|
setOptimize()
|
2010-10-12 23:41:29 +04:00
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
if conf.os:
|
|
|
|
conf.os = conf.os.capitalize()
|
|
|
|
|
2017-08-28 13:30:42 +03:00
|
|
|
if conf.forceDbms:
|
|
|
|
conf.dbms = conf.forceDbms
|
|
|
|
|
2011-04-23 20:25:09 +04:00
|
|
|
if conf.dbms:
|
2017-08-28 14:02:08 +03:00
|
|
|
kb.dbmsFilter = []
|
|
|
|
for _ in conf.dbms.split(','):
|
|
|
|
for dbms, aliases in DBMS_ALIASES:
|
|
|
|
if _.strip().lower() in aliases:
|
|
|
|
kb.dbmsFilter.append(dbms)
|
|
|
|
conf.dbms = dbms if conf.dbms and ',' not in conf.dbms else None
|
|
|
|
break
|
2011-04-23 20:25:09 +04:00
|
|
|
|
2023-09-04 19:34:21 +03:00
|
|
|
if conf.uValues:
|
|
|
|
conf.uCols = "%d-%d" % (1 + conf.uValues.count(','), 1 + conf.uValues.count(','))
|
|
|
|
|
2012-07-24 17:43:29 +04:00
|
|
|
if conf.testFilter:
|
2014-09-01 17:48:00 +04:00
|
|
|
conf.testFilter = conf.testFilter.strip('*+')
|
2018-06-10 00:38:00 +03:00
|
|
|
conf.testFilter = re.sub(r"([^.])([*+])", r"\g<1>.\g<2>", conf.testFilter)
|
2011-09-27 18:09:25 +04:00
|
|
|
|
2016-05-04 14:23:59 +03:00
|
|
|
try:
|
|
|
|
re.compile(conf.testFilter)
|
|
|
|
except re.error:
|
|
|
|
conf.testFilter = re.escape(conf.testFilter)
|
|
|
|
|
2018-12-10 16:53:11 +03:00
|
|
|
if conf.csrfToken:
|
|
|
|
original = conf.csrfToken
|
|
|
|
try:
|
|
|
|
re.compile(conf.csrfToken)
|
|
|
|
|
|
|
|
if re.escape(conf.csrfToken) != conf.csrfToken:
|
2019-03-06 13:20:57 +03:00
|
|
|
message = "provided value for option '--csrf-token' is a regular expression? [y/N] "
|
|
|
|
if not readInput(message, default='N', boolean=True):
|
2018-12-10 16:53:11 +03:00
|
|
|
conf.csrfToken = re.escape(conf.csrfToken)
|
|
|
|
except re.error:
|
|
|
|
conf.csrfToken = re.escape(conf.csrfToken)
|
|
|
|
finally:
|
2019-04-19 12:24:34 +03:00
|
|
|
class _(six.text_type):
|
2018-12-10 16:53:11 +03:00
|
|
|
pass
|
|
|
|
conf.csrfToken = _(conf.csrfToken)
|
|
|
|
conf.csrfToken._original = original
|
|
|
|
|
2015-10-01 12:57:33 +03:00
|
|
|
if conf.testSkip:
|
|
|
|
conf.testSkip = conf.testSkip.strip('*+')
|
2018-06-10 00:38:00 +03:00
|
|
|
conf.testSkip = re.sub(r"([^.])([*+])", r"\g<1>.\g<2>", conf.testSkip)
|
2015-10-01 12:57:33 +03:00
|
|
|
|
2016-05-04 14:23:59 +03:00
|
|
|
try:
|
|
|
|
re.compile(conf.testSkip)
|
|
|
|
except re.error:
|
|
|
|
conf.testSkip = re.escape(conf.testSkip)
|
|
|
|
|
2013-01-29 18:55:50 +04:00
|
|
|
if "timeSec" not in kb.explicitSettings:
|
2011-04-19 12:43:29 +04:00
|
|
|
if conf.tor:
|
2011-06-16 15:42:13 +04:00
|
|
|
conf.timeSec = 2 * conf.timeSec
|
2012-10-09 17:19:47 +04:00
|
|
|
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
|
2011-04-19 12:34:21 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
warnMsg = "increasing default value for "
|
2012-07-13 17:00:39 +04:00
|
|
|
warnMsg += "option '--time-sec' to %d because " % conf.timeSec
|
2012-02-01 18:49:42 +04:00
|
|
|
warnMsg += "switch '--tor' was provided"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2011-04-18 18:46:18 +04:00
|
|
|
else:
|
2012-10-09 17:19:47 +04:00
|
|
|
kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE
|
2011-04-15 12:52:53 +04:00
|
|
|
|
2013-03-04 21:05:40 +04:00
|
|
|
if conf.retries:
|
|
|
|
conf.retries = min(conf.retries, MAX_CONNECT_RETRIES)
|
|
|
|
|
2022-03-15 23:52:05 +03:00
|
|
|
if conf.url:
|
|
|
|
match = re.search(r"\A(\w+://)?([^/@?]+)@", conf.url)
|
|
|
|
if match:
|
|
|
|
credentials = match.group(2)
|
|
|
|
conf.url = conf.url.replace("%s@" % credentials, "", 1)
|
|
|
|
|
|
|
|
conf.authType = AUTH_TYPE.BASIC
|
|
|
|
conf.authCred = credentials if ':' in credentials else "%s:" % credentials
|
|
|
|
|
2011-08-12 20:48:11 +04:00
|
|
|
if conf.code:
|
|
|
|
conf.code = int(conf.code)
|
|
|
|
|
2011-11-30 23:26:03 +04:00
|
|
|
if conf.csvDel:
|
2019-02-04 17:49:13 +03:00
|
|
|
conf.csvDel = decodeStringEscape(conf.csvDel)
|
2011-11-30 23:26:03 +04:00
|
|
|
|
2019-03-28 15:53:54 +03:00
|
|
|
if conf.torPort and hasattr(conf.torPort, "isdigit") and conf.torPort.isdigit():
|
2011-12-23 14:57:09 +04:00
|
|
|
conf.torPort = int(conf.torPort)
|
|
|
|
|
2011-12-16 03:19:55 +04:00
|
|
|
if conf.torType:
|
|
|
|
conf.torType = conf.torType.upper()
|
|
|
|
|
2014-04-06 18:54:46 +04:00
|
|
|
if conf.outputDir:
|
2016-01-14 01:05:28 +03:00
|
|
|
paths.SQLMAP_OUTPUT_PATH = os.path.realpath(os.path.expanduser(conf.outputDir))
|
2016-08-02 01:17:59 +03:00
|
|
|
setPaths(paths.SQLMAP_ROOT_PATH)
|
2012-07-03 02:50:23 +04:00
|
|
|
|
2012-07-31 13:32:53 +04:00
|
|
|
if conf.string:
|
2019-02-04 17:49:13 +03:00
|
|
|
conf.string = decodeStringEscape(conf.string)
|
2012-07-31 13:32:53 +04:00
|
|
|
|
2012-10-05 12:24:09 +04:00
|
|
|
if conf.getAll:
|
2019-05-02 17:54:54 +03:00
|
|
|
for _ in WIZARD.ALL:
|
|
|
|
conf.__setitem__(_, True)
|
2012-10-05 12:24:09 +04:00
|
|
|
|
2012-10-22 16:13:30 +04:00
|
|
|
if conf.noCast:
|
2020-01-28 11:42:00 +03:00
|
|
|
DUMP_REPLACEMENTS.clear()
|
2012-10-22 16:13:30 +04:00
|
|
|
|
2012-11-28 13:58:18 +04:00
|
|
|
if conf.dumpFormat:
|
|
|
|
conf.dumpFormat = conf.dumpFormat.upper()
|
|
|
|
|
|
|
|
if conf.torType:
|
|
|
|
conf.torType = conf.torType.upper()
|
|
|
|
|
2014-08-22 16:19:53 +04:00
|
|
|
if conf.col:
|
2017-04-18 16:56:24 +03:00
|
|
|
conf.col = re.sub(r"\s*,\s*", ',', conf.col)
|
2014-08-22 16:19:53 +04:00
|
|
|
|
2018-02-13 17:53:50 +03:00
|
|
|
if conf.exclude:
|
2019-11-04 14:53:29 +03:00
|
|
|
regex = False
|
2020-12-17 15:34:23 +03:00
|
|
|
original = conf.exclude
|
|
|
|
|
2019-11-04 14:53:29 +03:00
|
|
|
if any(_ in conf.exclude for _ in ('+', '*')):
|
|
|
|
try:
|
|
|
|
re.compile(conf.exclude)
|
|
|
|
except re.error:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
regex = True
|
|
|
|
|
|
|
|
if not regex:
|
|
|
|
conf.exclude = re.sub(r"\s*,\s*", ',', conf.exclude)
|
2019-11-11 16:09:02 +03:00
|
|
|
conf.exclude = r"\A%s\Z" % '|'.join(re.escape(_) for _ in conf.exclude.split(','))
|
2020-09-23 16:29:28 +03:00
|
|
|
else:
|
|
|
|
conf.exclude = re.sub(r"(\w+)\$", r"\g<1>\$", conf.exclude)
|
2014-08-22 16:19:53 +04:00
|
|
|
|
2020-12-17 15:34:23 +03:00
|
|
|
class _(six.text_type):
|
|
|
|
pass
|
|
|
|
|
|
|
|
conf.exclude = _(conf.exclude)
|
|
|
|
conf.exclude._original = original
|
|
|
|
|
2014-08-22 16:19:53 +04:00
|
|
|
if conf.binaryFields:
|
2019-11-17 02:52:04 +03:00
|
|
|
conf.binaryFields = conf.binaryFields.replace(" ", "")
|
|
|
|
conf.binaryFields = re.split(PARAMETER_SPLITTING_REGEX, conf.binaryFields)
|
2014-08-22 16:19:53 +04:00
|
|
|
|
2020-01-08 12:53:08 +03:00
|
|
|
envProxy = max(os.environ.get(_, "") for _ in PROXY_ENVIRONMENT_VARIABLES)
|
2020-01-07 23:08:18 +03:00
|
|
|
if re.search(r"\A(https?|socks[45])://.+:\d+\Z", envProxy) and conf.proxy is None:
|
|
|
|
debugMsg = "using environment proxy '%s'" % envProxy
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
|
|
|
conf.proxy = envProxy
|
|
|
|
|
2016-10-10 02:57:55 +03:00
|
|
|
if any((conf.proxy, conf.proxyFile, conf.tor)):
|
|
|
|
conf.disablePrecon = True
|
|
|
|
|
2018-06-21 00:21:55 +03:00
|
|
|
if conf.dummy:
|
|
|
|
conf.batch = True
|
|
|
|
|
2011-11-23 18:26:40 +04:00
|
|
|
threadData = getCurrentThreadData()
|
|
|
|
threadData.reset()
|
|
|
|
|
2016-10-17 13:33:07 +03:00
|
|
|
def _cleanupEnvironment():
|
|
|
|
"""
|
2021-02-11 15:00:54 +03:00
|
|
|
Cleanup environment (e.g. from leftovers after --shell).
|
2016-10-17 13:33:07 +03:00
|
|
|
"""
|
|
|
|
|
2019-03-27 15:33:46 +03:00
|
|
|
if issubclass(_http_client.socket.socket, socks.socksocket):
|
|
|
|
socks.unwrapmodule(_http_client)
|
2016-10-17 13:33:07 +03:00
|
|
|
|
|
|
|
if hasattr(socket, "_ready"):
|
|
|
|
socket._ready.clear()
|
|
|
|
|
2018-06-21 00:52:08 +03:00
|
|
|
def _purge():
|
2012-04-23 18:24:23 +04:00
|
|
|
"""
|
2018-06-21 00:52:08 +03:00
|
|
|
Safely removes (purges) sqlmap data directory.
|
2012-04-23 18:24:23 +04:00
|
|
|
"""
|
|
|
|
|
2018-06-21 00:52:08 +03:00
|
|
|
if conf.purge:
|
|
|
|
purge(paths.SQLMAP_HOME_PATH)
|
2012-04-23 18:24:23 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setConfAttributes():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function set some needed attributes into the configuration
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "initializing the configuration"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2012-06-05 02:30:12 +04:00
|
|
|
conf.authUsername = None
|
|
|
|
conf.authPassword = None
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.boundaries = []
|
|
|
|
conf.cj = None
|
|
|
|
conf.dbmsConnector = None
|
|
|
|
conf.dbmsHandler = None
|
2012-03-31 16:08:27 +04:00
|
|
|
conf.dnsServer = None
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.dumpPath = None
|
2011-09-26 00:36:32 +04:00
|
|
|
conf.hashDB = None
|
|
|
|
conf.hashDBFile = None
|
2017-07-03 17:55:24 +03:00
|
|
|
conf.httpCollector = None
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.httpHeaders = []
|
|
|
|
conf.hostname = None
|
2012-05-25 02:07:50 +04:00
|
|
|
conf.ipv6 = False
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.multipleTargets = False
|
|
|
|
conf.outputPath = None
|
|
|
|
conf.paramDict = {}
|
|
|
|
conf.parameters = {}
|
|
|
|
conf.path = None
|
|
|
|
conf.port = None
|
2015-09-25 16:23:42 +03:00
|
|
|
conf.proxyList = None
|
2011-05-16 02:21:38 +04:00
|
|
|
conf.resultsFP = None
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.scheme = None
|
|
|
|
conf.tests = []
|
|
|
|
conf.trafficFP = None
|
2017-07-03 17:55:24 +03:00
|
|
|
conf.HARCollectorFactory = None
|
2018-08-28 15:31:20 +03:00
|
|
|
conf.fileWriteType = None
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setKnowledgeBaseAttributes(flushAll=True):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function set some needed attributes into the knowledge base
|
|
|
|
singleton.
|
|
|
|
"""
|
|
|
|
|
|
|
|
debugMsg = "initializing the knowledge base"
|
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.absFilePaths = set()
|
2012-10-09 17:19:47 +04:00
|
|
|
kb.adjustTimeDelay = None
|
2012-12-11 17:44:43 +04:00
|
|
|
kb.alerted = False
|
2018-09-22 00:06:45 +03:00
|
|
|
kb.aliasName = randomStr()
|
2011-05-28 02:42:23 +04:00
|
|
|
kb.alwaysRefresh = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.arch = None
|
|
|
|
kb.authHeader = None
|
2011-07-08 10:02:31 +04:00
|
|
|
kb.bannerFp = AttribDict()
|
2020-08-10 23:26:03 +03:00
|
|
|
kb.base64Originals = {}
|
2013-12-27 01:27:04 +04:00
|
|
|
kb.binaryField = False
|
2017-06-07 13:55:14 +03:00
|
|
|
kb.browserVerification = None
|
2010-05-31 12:13:08 +04:00
|
|
|
|
2013-01-10 16:18:44 +04:00
|
|
|
kb.brute = AttribDict({"tables": [], "columns": []})
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.bruteMode = False
|
2010-12-27 17:17:20 +03:00
|
|
|
|
2011-07-08 10:02:31 +04:00
|
|
|
kb.cache = AttribDict()
|
2016-09-09 12:06:38 +03:00
|
|
|
kb.cache.addrinfo = {}
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.cache.content = {}
|
2021-03-04 01:28:27 +03:00
|
|
|
kb.cache.comparison = {}
|
2016-09-09 12:06:38 +03:00
|
|
|
kb.cache.encoding = {}
|
2017-12-29 15:04:52 +03:00
|
|
|
kb.cache.alphaBoundaries = None
|
2020-04-27 15:23:47 +03:00
|
|
|
kb.cache.hashRegex = None
|
2016-09-20 11:03:00 +03:00
|
|
|
kb.cache.intBoundaries = None
|
2016-09-09 12:06:38 +03:00
|
|
|
kb.cache.parsedDbms = {}
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.cache.regex = {}
|
|
|
|
kb.cache.stdev = {}
|
2010-05-31 12:13:08 +04:00
|
|
|
|
2016-06-01 16:48:04 +03:00
|
|
|
kb.captchaDetected = None
|
|
|
|
|
2012-07-06 19:34:40 +04:00
|
|
|
kb.chars = AttribDict()
|
|
|
|
kb.chars.delimiter = randomStr(length=6, lowercase=True)
|
2014-11-05 12:56:30 +03:00
|
|
|
kb.chars.start = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR)
|
|
|
|
kb.chars.stop = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR)
|
2013-06-11 00:14:45 +04:00
|
|
|
kb.chars.at, kb.chars.space, kb.chars.dollar, kb.chars.hash_ = ("%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, _, KB_CHARS_BOUNDARY_CHAR) for _ in randomStr(length=4, lowercase=True))
|
2012-07-06 19:34:40 +04:00
|
|
|
|
2021-01-12 15:21:51 +03:00
|
|
|
kb.choices = AttribDict(keycheck=False)
|
2019-05-21 15:18:14 +03:00
|
|
|
kb.codePage = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.commonOutputs = None
|
2016-10-02 12:13:40 +03:00
|
|
|
kb.connErrorCounter = 0
|
2019-03-22 15:49:52 +03:00
|
|
|
kb.copyExecTest = None
|
2011-12-21 15:50:49 +04:00
|
|
|
kb.counters = {}
|
2017-07-20 03:41:47 +03:00
|
|
|
kb.customInjectionMark = CUSTOM_INJECTION_MARK_CHAR
|
2011-07-08 10:02:31 +04:00
|
|
|
kb.data = AttribDict()
|
2011-05-11 00:44:36 +04:00
|
|
|
kb.dataOutputFlag = False
|
2009-04-22 15:48:07 +04:00
|
|
|
|
2011-01-20 02:06:15 +03:00
|
|
|
# Active back-end DBMS fingerprint
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.dbms = None
|
2017-08-28 14:02:08 +03:00
|
|
|
kb.dbmsFilter = []
|
2013-01-10 14:54:07 +04:00
|
|
|
kb.dbmsVersion = [UNKNOWN_DBMS_VERSION]
|
2011-04-30 17:20:05 +04:00
|
|
|
|
|
|
|
kb.delayCandidates = TIME_DELAY_CANDIDATES * [0]
|
|
|
|
kb.dep = None
|
2019-11-18 13:40:07 +03:00
|
|
|
kb.disableHtmlDecoding = False
|
2022-09-13 23:59:34 +03:00
|
|
|
kb.disableShiftTable = False
|
2012-04-02 18:05:30 +04:00
|
|
|
kb.dnsMode = False
|
2012-04-03 13:18:30 +04:00
|
|
|
kb.dnsTest = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.docRoot = None
|
2017-05-26 15:14:35 +03:00
|
|
|
kb.droppingRequests = False
|
2016-07-15 00:18:28 +03:00
|
|
|
kb.dumpColumns = None
|
2012-07-12 16:31:28 +04:00
|
|
|
kb.dumpTable = None
|
2016-03-23 12:33:32 +03:00
|
|
|
kb.dumpKeyboardInterrupt = False
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.dynamicMarkings = []
|
2012-08-20 13:40:49 +04:00
|
|
|
kb.dynamicParameter = False
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.endDetection = False
|
2011-06-16 15:42:13 +04:00
|
|
|
kb.explicitSettings = set()
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.extendTests = None
|
2015-08-26 16:26:16 +03:00
|
|
|
kb.errorChunkLength = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.errorIsNone = True
|
2016-05-06 14:06:59 +03:00
|
|
|
kb.falsePositives = []
|
2012-07-06 16:24:44 +04:00
|
|
|
kb.fileReadMode = False
|
2020-03-26 16:58:58 +03:00
|
|
|
kb.fingerprinted = False
|
2014-07-03 00:27:51 +04:00
|
|
|
kb.followSitemapRecursion = None
|
2011-09-26 01:10:45 +04:00
|
|
|
kb.forcedDbms = None
|
2013-07-31 23:15:03 +04:00
|
|
|
kb.forcePartialUnion = False
|
2019-06-01 17:33:27 +03:00
|
|
|
kb.forceThreads = None
|
2015-07-26 18:02:46 +03:00
|
|
|
kb.forceWhere = None
|
2020-01-21 01:11:37 +03:00
|
|
|
kb.forkNote = None
|
2015-07-10 02:19:46 +03:00
|
|
|
kb.futileUnion = None
|
2020-03-02 14:43:12 +03:00
|
|
|
kb.fuzzUnionTest = None
|
2018-03-31 12:02:48 +03:00
|
|
|
kb.heavilyDynamic = False
|
2019-08-02 21:29:52 +03:00
|
|
|
kb.headersFile = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.headersFp = {}
|
2013-01-25 15:34:57 +04:00
|
|
|
kb.heuristicDbms = None
|
2017-06-05 17:48:14 +03:00
|
|
|
kb.heuristicExtendedDbms = None
|
2013-07-08 13:48:33 +04:00
|
|
|
kb.heuristicMode = False
|
2016-05-30 17:46:23 +03:00
|
|
|
kb.heuristicPage = False
|
2011-06-15 21:37:28 +04:00
|
|
|
kb.heuristicTest = None
|
2019-06-04 13:15:39 +03:00
|
|
|
kb.hintValue = ""
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.htmlFp = []
|
2011-11-21 20:41:02 +04:00
|
|
|
kb.httpErrorCodes = {}
|
|
|
|
kb.inferenceMode = False
|
2012-08-22 18:06:09 +04:00
|
|
|
kb.ignoreCasted = None
|
2011-12-05 13:25:56 +04:00
|
|
|
kb.ignoreNotFound = False
|
2011-05-22 12:24:13 +04:00
|
|
|
kb.ignoreTimeout = False
|
2019-05-24 14:54:10 +03:00
|
|
|
kb.identifiedWafs = set()
|
2011-07-08 10:02:31 +04:00
|
|
|
kb.injection = InjectionDict()
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.injections = []
|
2020-10-27 16:06:56 +03:00
|
|
|
kb.jsonAggMode = False
|
2013-05-18 23:30:21 +04:00
|
|
|
kb.laggingChecked = False
|
2011-08-05 14:55:21 +04:00
|
|
|
kb.lastParserStatus = None
|
|
|
|
|
2011-07-08 10:02:31 +04:00
|
|
|
kb.locks = AttribDict()
|
2022-12-06 13:55:03 +03:00
|
|
|
for _ in ("cache", "connError", "count", "handlers", "hint", "identYwaf", "index", "io", "limit", "liveCookies", "log", "socket", "redirect", "request", "value"):
|
2011-12-28 20:27:17 +04:00
|
|
|
kb.locks[_] = threading.Lock()
|
2011-04-30 17:20:05 +04:00
|
|
|
|
|
|
|
kb.matchRatio = None
|
2012-04-12 13:44:54 +04:00
|
|
|
kb.maxConnectionsFlag = False
|
2012-01-11 18:28:08 +04:00
|
|
|
kb.mergeCookies = None
|
2021-09-29 17:13:13 +03:00
|
|
|
kb.multiThreadMode = False
|
2019-08-06 03:54:18 +03:00
|
|
|
kb.multipleCtrlC = False
|
2012-03-15 19:52:12 +04:00
|
|
|
kb.negativeLogic = False
|
2020-12-01 14:16:15 +03:00
|
|
|
kb.nchar = True
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.nullConnection = None
|
2015-07-23 11:07:21 +03:00
|
|
|
kb.oldMsf = None
|
2011-08-03 13:08:16 +04:00
|
|
|
kb.orderByColumns = None
|
2012-03-16 00:17:40 +04:00
|
|
|
kb.originalCode = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.originalPage = None
|
2015-07-10 10:24:14 +03:00
|
|
|
kb.originalPageTime = None
|
2011-08-16 10:50:20 +04:00
|
|
|
kb.originalTimeDelay = None
|
2012-08-15 18:37:18 +04:00
|
|
|
kb.originalUrls = dict()
|
2009-04-22 15:48:07 +04:00
|
|
|
|
|
|
|
# Back-end DBMS underlying operating system fingerprint via banner (-b)
|
2009-09-26 03:03:45 +04:00
|
|
|
# parsing
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.os = None
|
|
|
|
kb.osVersion = None
|
|
|
|
kb.osSP = None
|
|
|
|
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.pageCompress = True
|
|
|
|
kb.pageTemplate = None
|
|
|
|
kb.pageTemplates = dict()
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.pageEncoding = DEFAULT_PAGE_ENCODING
|
|
|
|
kb.pageStable = None
|
|
|
|
kb.partRun = None
|
2012-02-08 16:02:50 +04:00
|
|
|
kb.permissionFlag = False
|
2022-02-11 00:30:17 +03:00
|
|
|
kb.place = None
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.postHint = None
|
|
|
|
kb.postSpaceToPlus = False
|
2015-08-18 23:48:55 +03:00
|
|
|
kb.postUrlEncode = True
|
2012-07-12 17:58:45 +04:00
|
|
|
kb.prependFlag = False
|
2011-11-22 16:18:24 +04:00
|
|
|
kb.processResponseCounter = 0
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.previousMethod = None
|
2023-02-07 11:40:42 +03:00
|
|
|
kb.processNonCustom = None
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.processUserMarks = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.proxyAuthHeader = None
|
|
|
|
kb.queryCounter = 0
|
2019-07-19 13:17:07 +03:00
|
|
|
kb.randomPool = {}
|
2011-05-30 13:46:32 +04:00
|
|
|
kb.reflectiveMechanism = True
|
2013-01-10 16:18:44 +04:00
|
|
|
kb.reflectiveCounters = {REFLECTIVE_COUNTER.MISS: 0, REFLECTIVE_COUNTER.HIT: 0}
|
2013-01-16 19:04:00 +04:00
|
|
|
kb.requestCounter = 0
|
2013-01-18 01:44:55 +04:00
|
|
|
kb.resendPostOnRedirect = None
|
2016-05-30 11:32:49 +03:00
|
|
|
kb.resolutionDbms = None
|
2016-01-09 19:32:19 +03:00
|
|
|
kb.responseTimes = {}
|
|
|
|
kb.responseTimeMode = None
|
|
|
|
kb.responseTimePayload = None
|
2012-02-17 18:22:48 +04:00
|
|
|
kb.resumeValues = True
|
2011-07-26 00:40:31 +04:00
|
|
|
kb.safeCharEncode = False
|
2015-04-22 17:28:54 +03:00
|
|
|
kb.safeReq = AttribDict()
|
2018-06-19 17:39:10 +03:00
|
|
|
kb.secondReq = None
|
2018-09-05 00:01:17 +03:00
|
|
|
kb.serverHeader = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.singleLogFlags = set()
|
2016-06-30 15:57:56 +03:00
|
|
|
kb.skipSeqMatcher = False
|
2019-05-06 12:41:19 +03:00
|
|
|
kb.smokeMode = False
|
2013-02-01 20:24:04 +04:00
|
|
|
kb.reduceTests = None
|
2020-08-23 23:11:24 +03:00
|
|
|
kb.sslSuccess = False
|
2023-09-28 21:34:52 +03:00
|
|
|
kb.startTime = time.time()
|
2012-07-12 17:24:40 +04:00
|
|
|
kb.stickyDBMS = False
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.suppressResumeInfo = False
|
2016-05-31 12:08:23 +03:00
|
|
|
kb.tableFrom = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.technique = None
|
2015-01-14 07:16:32 +03:00
|
|
|
kb.tempDir = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.testMode = False
|
2015-07-17 11:14:35 +03:00
|
|
|
kb.testOnlyCustom = False
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.testQueryCount = 0
|
2014-09-08 16:33:13 +04:00
|
|
|
kb.testType = None
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.threadContinue = True
|
|
|
|
kb.threadException = False
|
2012-02-07 14:46:55 +04:00
|
|
|
kb.uChar = NULL
|
2019-10-17 16:16:21 +03:00
|
|
|
kb.udfFail = False
|
2012-06-16 00:41:53 +04:00
|
|
|
kb.unionDuplicates = False
|
2020-03-02 14:43:12 +03:00
|
|
|
kb.unionTemplate = None
|
2019-11-28 15:53:47 +03:00
|
|
|
kb.webSocketRecvCount = None
|
2018-02-27 14:37:45 +03:00
|
|
|
kb.wizardMode = False
|
2011-04-21 18:25:04 +04:00
|
|
|
kb.xpCmdshellAvailable = False
|
2010-03-22 18:39:29 +03:00
|
|
|
|
2010-12-18 13:02:01 +03:00
|
|
|
if flushAll:
|
2019-11-05 01:53:35 +03:00
|
|
|
kb.checkSitemap = None
|
2011-12-22 02:09:21 +04:00
|
|
|
kb.headerPaths = {}
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.keywords = set(getFileItems(paths.SQL_KEYWORDS))
|
2020-10-14 13:22:56 +03:00
|
|
|
kb.lastCtrlCTime = None
|
2019-11-05 01:53:35 +03:00
|
|
|
kb.normalizeCrawlingChoice = None
|
2012-06-05 02:30:12 +04:00
|
|
|
kb.passwordMgr = None
|
2020-09-21 18:04:44 +03:00
|
|
|
kb.postprocessFunctions = []
|
2019-03-04 17:24:12 +03:00
|
|
|
kb.preprocessFunctions = []
|
2014-08-28 02:13:27 +04:00
|
|
|
kb.skipVulnHost = None
|
2019-11-05 01:53:35 +03:00
|
|
|
kb.storeCrawlingChoice = None
|
2010-12-18 13:02:01 +03:00
|
|
|
kb.tamperFunctions = []
|
2019-03-27 17:48:51 +03:00
|
|
|
kb.targets = OrderedSet()
|
2011-04-30 17:20:05 +04:00
|
|
|
kb.testedParams = set()
|
|
|
|
kb.userAgents = None
|
2011-05-24 21:15:25 +04:00
|
|
|
kb.vainRun = True
|
2012-08-22 18:50:01 +04:00
|
|
|
kb.vulnHosts = set()
|
2013-02-21 14:14:57 +04:00
|
|
|
kb.wafFunctions = []
|
2012-07-18 15:32:34 +04:00
|
|
|
kb.wordlists = None
|
2010-12-18 13:02:01 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _useWizardInterface():
|
2011-03-29 15:42:55 +04:00
|
|
|
"""
|
|
|
|
Presents simple wizard interface for beginner users
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.wizard:
|
|
|
|
return
|
|
|
|
|
|
|
|
logger.info("starting wizard interface")
|
|
|
|
|
2014-06-09 01:55:15 +04:00
|
|
|
while not conf.url:
|
|
|
|
message = "Please enter full target URL (-u): "
|
2022-06-29 16:11:40 +03:00
|
|
|
conf.url = readInput(message, default=None, checkBatch=False)
|
2014-06-09 01:55:15 +04:00
|
|
|
|
2020-07-16 15:30:50 +03:00
|
|
|
message = "%s data (--data) [Enter for None]: " % ((conf.method if conf.method != HTTPMETHOD.GET else None) or HTTPMETHOD.POST)
|
2014-06-09 01:55:15 +04:00
|
|
|
conf.data = readInput(message, default=None)
|
|
|
|
|
2019-05-03 02:20:10 +03:00
|
|
|
if not (any('=' in _ for _ in (conf.url, conf.data)) or '*' in conf.url):
|
2020-07-16 15:30:50 +03:00
|
|
|
warnMsg = "no GET and/or %s parameter(s) found for testing " % ((conf.method if conf.method != HTTPMETHOD.GET else None) or HTTPMETHOD.POST)
|
2014-06-09 01:55:15 +04:00
|
|
|
warnMsg += "(e.g. GET parameter 'id' in 'http://www.site.com/vuln.php?id=1'). "
|
|
|
|
if not conf.crawlDepth and not conf.forms:
|
|
|
|
warnMsg += "Will search for forms"
|
|
|
|
conf.forms = True
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2011-11-02 18:33:23 +04:00
|
|
|
|
2011-04-01 18:55:39 +04:00
|
|
|
choice = None
|
|
|
|
|
|
|
|
while choice is None or choice not in ("", "1", "2", "3"):
|
|
|
|
message = "Injection difficulty (--level/--risk). Please choose:\n"
|
|
|
|
message += "[1] Normal (default)\n[2] Medium\n[3] Hard"
|
|
|
|
choice = readInput(message, default='1')
|
|
|
|
|
|
|
|
if choice == '2':
|
|
|
|
conf.risk = 2
|
|
|
|
conf.level = 3
|
|
|
|
elif choice == '3':
|
|
|
|
conf.risk = 3
|
|
|
|
conf.level = 5
|
|
|
|
else:
|
|
|
|
conf.risk = 1
|
|
|
|
conf.level = 1
|
|
|
|
|
2012-10-05 12:24:09 +04:00
|
|
|
if not conf.getAll:
|
|
|
|
choice = None
|
2011-04-01 18:55:39 +04:00
|
|
|
|
2012-10-05 12:24:09 +04:00
|
|
|
while choice is None or choice not in ("", "1", "2", "3"):
|
|
|
|
message = "Enumeration (--banner/--current-user/etc). Please choose:\n"
|
2013-05-22 23:21:43 +04:00
|
|
|
message += "[1] Basic (default)\n[2] Intermediate\n[3] All"
|
2012-10-05 12:24:09 +04:00
|
|
|
choice = readInput(message, default='1')
|
2011-04-01 18:55:39 +04:00
|
|
|
|
2012-10-05 12:24:09 +04:00
|
|
|
if choice == '2':
|
2019-05-03 00:51:54 +03:00
|
|
|
options = WIZARD.INTERMEDIATE
|
2012-10-05 12:24:09 +04:00
|
|
|
elif choice == '3':
|
2019-05-03 00:51:54 +03:00
|
|
|
options = WIZARD.ALL
|
2012-10-05 12:24:09 +04:00
|
|
|
else:
|
2019-05-03 00:51:54 +03:00
|
|
|
options = WIZARD.BASIC
|
|
|
|
|
|
|
|
for _ in options:
|
|
|
|
conf.__setitem__(_, True)
|
2011-03-29 15:42:55 +04:00
|
|
|
|
2011-04-01 18:55:39 +04:00
|
|
|
logger.debug("muting sqlmap.. it will do the magic for you")
|
|
|
|
conf.verbose = 0
|
|
|
|
|
2011-11-02 18:33:23 +04:00
|
|
|
conf.batch = True
|
|
|
|
conf.threads = 4
|
|
|
|
|
2011-04-01 18:55:39 +04:00
|
|
|
dataToStdout("\nsqlmap is running, please wait..\n\n")
|
2011-03-29 15:42:55 +04:00
|
|
|
|
2018-02-27 14:37:45 +03:00
|
|
|
kb.wizardMode = True
|
|
|
|
|
2015-08-14 23:49:32 +03:00
|
|
|
def _saveConfig():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2015-08-14 23:49:32 +03:00
|
|
|
Saves the command line options to a sqlmap configuration INI file
|
2011-01-28 19:36:09 +03:00
|
|
|
Format.
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2015-08-14 23:49:32 +03:00
|
|
|
if not conf.saveConfig:
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
2015-08-14 23:49:32 +03:00
|
|
|
debugMsg = "saving command line options to a sqlmap configuration INI file"
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.debug(debugMsg)
|
|
|
|
|
2017-04-10 15:50:17 +03:00
|
|
|
saveConfig(conf, conf.saveConfig)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2015-08-14 23:49:32 +03:00
|
|
|
infoMsg = "saved command line options to the configuration file '%s'" % conf.saveConfig
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2012-12-17 15:29:33 +04:00
|
|
|
def setVerbosity():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
This function set the verbosity of sqlmap output messages.
|
|
|
|
"""
|
|
|
|
|
2010-01-02 05:02:12 +03:00
|
|
|
if conf.verbose is None:
|
2008-12-02 02:07:41 +03:00
|
|
|
conf.verbose = 1
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
conf.verbose = int(conf.verbose)
|
|
|
|
|
2010-09-26 18:02:13 +04:00
|
|
|
if conf.verbose == 0:
|
2011-03-12 01:02:38 +03:00
|
|
|
logger.setLevel(logging.ERROR)
|
2010-09-26 18:02:13 +04:00
|
|
|
elif conf.verbose == 1:
|
2008-10-15 19:38:22 +04:00
|
|
|
logger.setLevel(logging.INFO)
|
2010-03-11 01:08:11 +03:00
|
|
|
elif conf.verbose > 2 and conf.eta:
|
|
|
|
conf.verbose = 2
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2008-10-15 19:38:22 +04:00
|
|
|
elif conf.verbose == 2:
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
elif conf.verbose == 3:
|
2011-12-26 16:24:39 +04:00
|
|
|
logger.setLevel(CUSTOM_LOGGING.PAYLOAD)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose == 4:
|
2011-12-26 16:24:39 +04:00
|
|
|
logger.setLevel(CUSTOM_LOGGING.TRAFFIC_OUT)
|
2010-11-08 01:34:29 +03:00
|
|
|
elif conf.verbose >= 5:
|
2011-12-26 16:24:39 +04:00
|
|
|
logger.setLevel(CUSTOM_LOGGING.TRAFFIC_IN)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2016-02-16 11:56:53 +03:00
|
|
|
def _normalizeOptions(inputOptions):
|
|
|
|
"""
|
|
|
|
Sets proper option types
|
|
|
|
"""
|
|
|
|
|
|
|
|
types_ = {}
|
|
|
|
for group in optDict.keys():
|
|
|
|
types_.update(optDict[group])
|
|
|
|
|
|
|
|
for key in inputOptions:
|
|
|
|
if key in types_:
|
|
|
|
value = inputOptions[key]
|
|
|
|
if value is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
type_ = types_[key]
|
|
|
|
if type_ and isinstance(type_, tuple):
|
|
|
|
type_ = type_[0]
|
|
|
|
|
|
|
|
if type_ == OPTION_TYPE.BOOLEAN:
|
|
|
|
try:
|
|
|
|
value = bool(value)
|
|
|
|
except (TypeError, ValueError):
|
|
|
|
value = False
|
|
|
|
elif type_ == OPTION_TYPE.INTEGER:
|
|
|
|
try:
|
|
|
|
value = int(value)
|
|
|
|
except (TypeError, ValueError):
|
|
|
|
value = 0
|
|
|
|
elif type_ == OPTION_TYPE.FLOAT:
|
|
|
|
try:
|
|
|
|
value = float(value)
|
|
|
|
except (TypeError, ValueError):
|
|
|
|
value = 0.0
|
|
|
|
|
|
|
|
inputOptions[key] = value
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _mergeOptions(inputOptions, overrideOptions):
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2011-06-16 15:42:13 +04:00
|
|
|
Merge command line options with configuration file and default options.
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
@param inputOptions: optparse object with command line options.
|
|
|
|
@type inputOptions: C{instance}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if inputOptions.configFile:
|
|
|
|
configFileParser(inputOptions.configFile)
|
|
|
|
|
2009-06-05 14:15:55 +04:00
|
|
|
if hasattr(inputOptions, "items"):
|
|
|
|
inputOptionsItems = inputOptions.items()
|
|
|
|
else:
|
|
|
|
inputOptionsItems = inputOptions.__dict__.items()
|
|
|
|
|
|
|
|
for key, value in inputOptionsItems:
|
2011-06-16 15:42:13 +04:00
|
|
|
if key not in conf or value not in (None, False) or overrideOptions:
|
|
|
|
conf[key] = value
|
|
|
|
|
2017-04-10 20:21:22 +03:00
|
|
|
if not conf.api:
|
2016-10-18 21:17:51 +03:00
|
|
|
for key, value in conf.items():
|
|
|
|
if value is not None:
|
|
|
|
kb.explicitSettings.add(key)
|
2011-06-16 15:42:13 +04:00
|
|
|
|
|
|
|
for key, value in defaults.items():
|
2012-12-20 20:53:43 +04:00
|
|
|
if hasattr(conf, key) and conf[key] is None:
|
2008-10-15 19:38:22 +04:00
|
|
|
conf[key] = value
|
|
|
|
|
2019-10-23 15:41:14 +03:00
|
|
|
if conf.unstable:
|
|
|
|
if key in ("timeSec", "retries", "timeout"):
|
|
|
|
conf[key] *= 2
|
|
|
|
|
|
|
|
if conf.unstable:
|
|
|
|
conf.forcePartial = True
|
|
|
|
|
2016-02-16 11:56:53 +03:00
|
|
|
lut = {}
|
2014-04-25 11:17:10 +04:00
|
|
|
for group in optDict.keys():
|
2016-02-16 11:56:53 +03:00
|
|
|
lut.update((_.upper(), _) for _ in optDict[group])
|
2014-04-25 11:17:10 +04:00
|
|
|
|
2016-02-16 11:56:53 +03:00
|
|
|
envOptions = {}
|
|
|
|
for key, value in os.environ.items():
|
|
|
|
if key.upper().startswith(SQLMAP_ENVIRONMENT_PREFIX):
|
|
|
|
_ = key[len(SQLMAP_ENVIRONMENT_PREFIX):].upper()
|
|
|
|
if _ in lut:
|
|
|
|
envOptions[lut[_]] = value
|
2014-04-25 11:17:10 +04:00
|
|
|
|
2016-02-16 11:56:53 +03:00
|
|
|
if envOptions:
|
|
|
|
_normalizeOptions(envOptions)
|
|
|
|
for key, value in envOptions.items():
|
2014-04-25 11:17:10 +04:00
|
|
|
conf[key] = value
|
|
|
|
|
2013-04-10 21:33:31 +04:00
|
|
|
mergedOptions.update(conf)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTrafficOutputFP():
|
2010-11-08 14:22:47 +03:00
|
|
|
if conf.trafficFile:
|
2011-07-11 12:54:39 +04:00
|
|
|
infoMsg = "setting file for logging HTTP traffic"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2011-01-08 12:30:10 +03:00
|
|
|
conf.trafficFP = openFile(conf.trafficFile, "w+")
|
2010-11-08 14:22:47 +03:00
|
|
|
|
2017-07-03 17:55:24 +03:00
|
|
|
def _setupHTTPCollector():
|
|
|
|
if not conf.harFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
conf.httpCollector = HTTPCollectorFactory(conf.harFile).create()
|
2017-06-23 16:44:33 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setDNSServer():
|
2016-10-22 22:52:18 +03:00
|
|
|
if not conf.dnsDomain:
|
2012-03-31 16:08:27 +04:00
|
|
|
return
|
|
|
|
|
2012-04-04 16:27:24 +04:00
|
|
|
infoMsg = "setting up DNS server instance"
|
2012-03-31 16:08:27 +04:00
|
|
|
logger.info(infoMsg)
|
|
|
|
|
|
|
|
isAdmin = runningAsAdmin()
|
|
|
|
|
|
|
|
if isAdmin:
|
2012-05-27 13:11:19 +04:00
|
|
|
try:
|
|
|
|
conf.dnsServer = DNSServer()
|
|
|
|
conf.dnsServer.run()
|
2019-01-22 04:08:02 +03:00
|
|
|
except socket.error as ex:
|
2012-05-27 13:11:19 +04:00
|
|
|
errMsg = "there was an error while setting up "
|
2019-01-22 04:08:02 +03:00
|
|
|
errMsg += "DNS server instance ('%s')" % getSafeExString(ex)
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2012-03-31 16:08:27 +04:00
|
|
|
else:
|
|
|
|
errMsg = "you need to run sqlmap as an administrator "
|
|
|
|
errMsg += "if you want to perform a DNS data exfiltration attack "
|
2012-04-04 16:27:24 +04:00
|
|
|
errMsg += "as it will need to listen on privileged UDP port 53 "
|
2012-03-31 16:08:27 +04:00
|
|
|
errMsg += "for incoming address resolution attempts"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapMissingPrivileges(errMsg)
|
2012-03-31 16:08:27 +04:00
|
|
|
|
2013-08-12 16:25:51 +04:00
|
|
|
def _setProxyList():
|
|
|
|
if not conf.proxyFile:
|
|
|
|
return
|
|
|
|
|
2014-04-06 19:23:13 +04:00
|
|
|
conf.proxyList = []
|
2017-02-18 02:32:32 +03:00
|
|
|
for match in re.finditer(r"(?i)((http[^:]*|socks[^:]*)://)?([\w\-.]+):(\d+)", readCachedFileContent(conf.proxyFile)):
|
2014-04-06 19:23:13 +04:00
|
|
|
_, type_, address, port = match.groups()
|
|
|
|
conf.proxyList.append("%s://%s:%s" % (type_ or "http", address, port))
|
2013-08-12 16:25:51 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTorProxySettings():
|
2011-12-16 03:19:55 +04:00
|
|
|
if not conf.tor:
|
2011-12-14 14:19:45 +04:00
|
|
|
return
|
|
|
|
|
2012-11-28 13:59:15 +04:00
|
|
|
if conf.torType == PROXY_TYPE.HTTP:
|
2012-12-06 17:14:19 +04:00
|
|
|
_setTorHttpProxySettings()
|
2011-12-16 03:19:55 +04:00
|
|
|
else:
|
2012-12-06 17:14:19 +04:00
|
|
|
_setTorSocksProxySettings()
|
2011-12-16 03:19:55 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTorHttpProxySettings():
|
2011-12-14 14:19:45 +04:00
|
|
|
infoMsg = "setting Tor HTTP proxy settings"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2016-10-10 15:19:44 +03:00
|
|
|
port = findLocalPort(DEFAULT_TOR_HTTP_PORTS if not conf.torPort else (conf.torPort,))
|
2011-12-14 14:19:45 +04:00
|
|
|
|
2016-10-10 15:19:44 +03:00
|
|
|
if port:
|
|
|
|
conf.proxy = "http://%s:%d" % (LOCALHOST, port)
|
2011-12-14 14:19:45 +04:00
|
|
|
else:
|
2016-07-06 14:30:46 +03:00
|
|
|
errMsg = "can't establish connection with the Tor HTTP proxy. "
|
2016-10-10 15:19:44 +03:00
|
|
|
errMsg += "Please make sure that you have Tor (bundle) installed and setup "
|
|
|
|
errMsg += "so you could be able to successfully use switch '--tor' "
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2011-12-14 14:19:45 +04:00
|
|
|
|
2012-05-10 22:17:32 +04:00
|
|
|
if not conf.checkTor:
|
2012-05-10 22:30:25 +04:00
|
|
|
warnMsg = "use switch '--check-tor' at "
|
2013-03-26 17:27:51 +04:00
|
|
|
warnMsg += "your own convenience when accessing "
|
|
|
|
warnMsg += "Tor anonymizing network because of "
|
2012-05-10 22:30:25 +04:00
|
|
|
warnMsg += "known issues with default settings of various 'bundles' "
|
2012-07-13 17:02:11 +04:00
|
|
|
warnMsg += "(e.g. Vidalia)"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2012-05-10 22:17:32 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setTorSocksProxySettings():
|
2011-11-24 01:17:08 +04:00
|
|
|
infoMsg = "setting Tor SOCKS proxy settings"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2016-10-10 15:19:44 +03:00
|
|
|
port = findLocalPort(DEFAULT_TOR_SOCKS_PORTS if not conf.torPort else (conf.torPort,))
|
|
|
|
|
|
|
|
if not port:
|
|
|
|
errMsg = "can't establish connection with the Tor SOCKS proxy. "
|
|
|
|
errMsg += "Please make sure that you have Tor service installed and setup "
|
|
|
|
errMsg += "so you could be able to successfully use switch '--tor' "
|
|
|
|
raise SqlmapConnectionException(errMsg)
|
|
|
|
|
|
|
|
# SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29)
|
|
|
|
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, port)
|
2019-03-27 15:33:46 +03:00
|
|
|
socks.wrapmodule(_http_client)
|
2011-11-24 01:17:08 +04:00
|
|
|
|
2019-03-19 16:07:39 +03:00
|
|
|
def _setHttpChunked():
|
|
|
|
if conf.chunked and conf.data:
|
2021-02-01 23:34:26 +03:00
|
|
|
if hasattr(_http_client.HTTPConnection, "_set_content_length"):
|
|
|
|
_http_client.HTTPConnection._set_content_length = lambda self, *args, **kwargs: None
|
|
|
|
else:
|
|
|
|
def putheader(self, header, *values):
|
|
|
|
if header != HTTP_HEADER.CONTENT_LENGTH:
|
|
|
|
self._putheader(header, *values)
|
|
|
|
|
2021-02-10 16:09:13 +03:00
|
|
|
if not hasattr(_http_client.HTTPConnection, "_putheader"):
|
|
|
|
_http_client.HTTPConnection._putheader = _http_client.HTTPConnection.putheader
|
|
|
|
|
2021-02-01 23:34:26 +03:00
|
|
|
_http_client.HTTPConnection.putheader = putheader
|
2019-03-19 16:07:39 +03:00
|
|
|
|
2015-03-24 17:25:16 +03:00
|
|
|
def _checkWebSocket():
|
|
|
|
if conf.url and (conf.url.startswith("ws:/") or conf.url.startswith("wss:/")):
|
|
|
|
try:
|
|
|
|
from websocket import ABNF
|
|
|
|
except ImportError:
|
2015-05-11 12:01:21 +03:00
|
|
|
errMsg = "sqlmap requires third-party module 'websocket-client' "
|
2018-03-13 15:45:42 +03:00
|
|
|
errMsg += "in order to use WebSocket functionality"
|
2015-03-24 17:25:16 +03:00
|
|
|
raise SqlmapMissingDependence(errMsg)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _checkTor():
|
2011-12-14 14:19:45 +04:00
|
|
|
if not conf.checkTor:
|
|
|
|
return
|
2011-10-25 22:07:33 +04:00
|
|
|
|
2011-12-14 14:19:45 +04:00
|
|
|
infoMsg = "checking Tor connection"
|
|
|
|
logger.info(infoMsg)
|
|
|
|
|
2015-09-21 12:25:59 +03:00
|
|
|
try:
|
2018-03-26 16:39:48 +03:00
|
|
|
page, _, _ = Request.getPage(url="https://check.torproject.org/", raise404=False)
|
2015-09-21 12:25:59 +03:00
|
|
|
except SqlmapConnectionException:
|
|
|
|
page = None
|
|
|
|
|
2019-05-07 16:31:47 +03:00
|
|
|
if not page or "Congratulations" not in page:
|
2016-05-22 22:44:17 +03:00
|
|
|
errMsg = "it appears that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapConnectionException(errMsg)
|
2011-12-14 14:19:45 +04:00
|
|
|
else:
|
|
|
|
infoMsg = "Tor is properly being used"
|
|
|
|
logger.info(infoMsg)
|
2011-10-25 21:37:43 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _basicOptionValidation():
|
2010-04-26 15:23:12 +04:00
|
|
|
if conf.limitStart is not None and not (isinstance(conf.limitStart, int) and conf.limitStart > 0):
|
2012-10-16 12:28:59 +04:00
|
|
|
errMsg = "value for option '--start' (limitStart) must be an integer value greater than zero (>0)"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-04-26 15:23:12 +04:00
|
|
|
|
|
|
|
if conf.limitStop is not None and not (isinstance(conf.limitStop, int) and conf.limitStop > 0):
|
2012-10-16 12:28:59 +04:00
|
|
|
errMsg = "value for option '--stop' (limitStop) must be an integer value greater than zero (>0)"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-10-16 12:28:59 +04:00
|
|
|
|
2014-03-25 12:28:12 +04:00
|
|
|
if conf.level is not None and not (isinstance(conf.level, int) and conf.level >= 1 and conf.level <= 5):
|
|
|
|
errMsg = "value for option '--level' must be an integer value from range [1, 5]"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-10-16 12:28:59 +04:00
|
|
|
|
2014-03-25 12:28:12 +04:00
|
|
|
if conf.risk is not None and not (isinstance(conf.risk, int) and conf.risk >= 1 and conf.risk <= 3):
|
|
|
|
errMsg = "value for option '--risk' must be an integer value from range [1, 3]"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-04-26 15:23:12 +04:00
|
|
|
|
2014-04-06 20:09:54 +04:00
|
|
|
if isinstance(conf.limitStart, int) and conf.limitStart > 0 and \
|
|
|
|
isinstance(conf.limitStop, int) and conf.limitStop < conf.limitStart:
|
2017-08-11 11:47:32 +03:00
|
|
|
warnMsg = "usage of option '--start' (limitStart) which is bigger than value for --stop (limitStop) option is considered unstable"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2010-07-30 00:01:04 +04:00
|
|
|
|
2014-04-06 20:09:54 +04:00
|
|
|
if isinstance(conf.firstChar, int) and conf.firstChar > 0 and \
|
|
|
|
isinstance(conf.lastChar, int) and conf.lastChar < conf.firstChar:
|
2012-10-16 12:28:59 +04:00
|
|
|
errMsg = "value for option '--first' (firstChar) must be smaller than or equal to value for --last (lastChar) option"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-10-31 17:42:13 +03:00
|
|
|
|
2021-02-08 23:50:20 +03:00
|
|
|
if conf.proxyFile and not any((conf.randomAgent, conf.mobile, conf.agent, conf.requestFile)):
|
2021-02-08 23:47:36 +03:00
|
|
|
warnMsg = "usage of switch '--random-agent' is strongly recommended when "
|
|
|
|
warnMsg += "using option '--proxy-file'"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2021-02-08 23:47:36 +03:00
|
|
|
|
2010-10-17 01:52:16 +04:00
|
|
|
if conf.textOnly and conf.nullConnection:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--text-only' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-11-07 11:58:24 +03:00
|
|
|
|
2023-09-04 19:34:21 +03:00
|
|
|
if conf.uValues and conf.uChar:
|
|
|
|
errMsg = "option '--union-values' is incompatible with option '--union-char'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2021-03-07 23:22:17 +03:00
|
|
|
if conf.base64Parameter and conf.tamper:
|
|
|
|
errMsg = "option '--base64' is incompatible with option '--tamper'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2016-10-06 23:27:29 +03:00
|
|
|
if conf.eta and conf.verbose > defaults.verbose:
|
|
|
|
errMsg = "switch '--eta' is incompatible with option '-v'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2018-06-20 01:08:55 +03:00
|
|
|
if conf.secondUrl and conf.secondReq:
|
|
|
|
errMsg = "option '--second-url' is incompatible with option '--second-req')"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2013-04-16 16:23:27 +04:00
|
|
|
if conf.direct and conf.url:
|
|
|
|
errMsg = "option '-d' is incompatible with option '-u' ('--url')"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2018-12-31 03:20:43 +03:00
|
|
|
if conf.direct and conf.dbms:
|
|
|
|
errMsg = "option '-d' is incompatible with option '--dbms'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2011-06-11 12:33:36 +04:00
|
|
|
if conf.titles and conf.nullConnection:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--titles' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-06-11 12:33:36 +04:00
|
|
|
|
2014-07-20 01:23:55 +04:00
|
|
|
if conf.dumpTable and conf.search:
|
|
|
|
errMsg = "switch '--dump' is incompatible with switch '--search'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2019-03-25 13:12:18 +03:00
|
|
|
if conf.chunked and not any((conf.data, conf.requestFile, conf.forms)):
|
2019-03-20 14:01:24 +03:00
|
|
|
errMsg = "switch '--chunked' requires usage of (POST) options/switches '--data', '-r' or '--forms'"
|
2019-03-19 16:07:39 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2017-04-10 15:50:17 +03:00
|
|
|
if conf.api and not conf.configFile:
|
|
|
|
errMsg = "switch '--api' requires usage of option '-c'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2010-10-17 01:52:16 +04:00
|
|
|
if conf.data and conf.nullConnection:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "option '--data' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-10-15 15:05:50 +04:00
|
|
|
|
2011-06-22 18:39:31 +04:00
|
|
|
if conf.string and conf.nullConnection:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "option '--string' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-06-22 18:39:31 +04:00
|
|
|
|
2012-07-26 14:06:02 +04:00
|
|
|
if conf.notString and conf.nullConnection:
|
|
|
|
errMsg = "option '--not-string' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-07-26 14:06:02 +04:00
|
|
|
|
2019-01-10 13:28:10 +03:00
|
|
|
if conf.tor and conf.osPwn:
|
2017-09-12 11:32:22 +03:00
|
|
|
errMsg = "option '--tor' is incompatible with switch '--os-pwn'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2013-03-19 13:52:37 +04:00
|
|
|
if conf.noCast and conf.hexConvert:
|
|
|
|
errMsg = "switch '--no-cast' is incompatible with switch '--hex'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2020-12-26 00:55:19 +03:00
|
|
|
if conf.crawlDepth:
|
|
|
|
try:
|
|
|
|
xrange(conf.crawlDepth)
|
|
|
|
except OverflowError as ex:
|
|
|
|
errMsg = "invalid value used for option '--crawl' ('%s')" % getSafeExString(ex)
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2014-06-23 14:24:08 +04:00
|
|
|
if conf.dumpAll and conf.search:
|
|
|
|
errMsg = "switch '--dump-all' is incompatible with switch '--search'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2012-07-26 14:06:02 +04:00
|
|
|
if conf.string and conf.notString:
|
|
|
|
errMsg = "option '--string' is incompatible with switch '--not-string'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-07-26 14:06:02 +04:00
|
|
|
|
2011-06-22 18:39:31 +04:00
|
|
|
if conf.regexp and conf.nullConnection:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "option '--regexp' is incompatible with switch '--null-connection'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-06-22 18:39:31 +04:00
|
|
|
|
2014-12-30 17:14:47 +03:00
|
|
|
if conf.regexp:
|
|
|
|
try:
|
|
|
|
re.compile(conf.regexp)
|
2019-01-22 02:40:48 +03:00
|
|
|
except Exception as ex:
|
2016-01-12 12:27:04 +03:00
|
|
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, getSafeExString(ex))
|
2014-12-30 17:14:47 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2019-09-16 11:11:28 +03:00
|
|
|
if conf.paramExclude:
|
2023-01-09 17:34:08 +03:00
|
|
|
if re.search(r"\A\w+,", conf.paramExclude):
|
2023-01-09 17:35:21 +03:00
|
|
|
conf.paramExclude = r"\A(%s)\Z" % ('|'.join(re.escape(_).strip() for _ in conf.paramExclude.split(',')))
|
2023-01-09 17:34:08 +03:00
|
|
|
|
2019-09-16 11:11:28 +03:00
|
|
|
try:
|
|
|
|
re.compile(conf.paramExclude)
|
|
|
|
except Exception as ex:
|
|
|
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.paramExclude, getSafeExString(ex))
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2021-11-01 23:50:16 +03:00
|
|
|
if conf.retryOn:
|
|
|
|
try:
|
|
|
|
re.compile(conf.retryOn)
|
|
|
|
except Exception as ex:
|
|
|
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.retryOn, getSafeExString(ex))
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2021-11-02 00:00:26 +03:00
|
|
|
if conf.retries == defaults.retries:
|
|
|
|
conf.retries = 5 * conf.retries
|
|
|
|
|
|
|
|
warnMsg = "increasing default value for "
|
|
|
|
warnMsg += "option '--retries' to %d because " % conf.retries
|
|
|
|
warnMsg += "option '--retry-on' was provided"
|
2022-06-22 13:04:34 +03:00
|
|
|
logger.warning(warnMsg)
|
2021-11-02 00:00:26 +03:00
|
|
|
|
|
|
|
|
2022-10-17 12:59:17 +03:00
|
|
|
if conf.cookieDel and len(conf.cookieDel) != 1:
|
2019-12-05 15:50:16 +03:00
|
|
|
errMsg = "option '--cookie-del' should contain a single character (e.g. ';')"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2015-04-06 23:07:22 +03:00
|
|
|
if conf.crawlExclude:
|
|
|
|
try:
|
|
|
|
re.compile(conf.crawlExclude)
|
2019-01-22 02:40:48 +03:00
|
|
|
except Exception as ex:
|
2016-01-12 12:27:04 +03:00
|
|
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, getSafeExString(ex))
|
2015-04-06 23:07:22 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2019-12-06 17:20:09 +03:00
|
|
|
if conf.scope:
|
|
|
|
try:
|
|
|
|
re.compile(conf.scope)
|
|
|
|
except Exception as ex:
|
|
|
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.scope, getSafeExString(ex))
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2011-12-22 03:04:36 +04:00
|
|
|
if conf.dumpTable and conf.dumpAll:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--dump' is incompatible with switch '--dump-all'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-12-22 03:04:36 +04:00
|
|
|
|
2011-07-08 17:12:53 +04:00
|
|
|
if conf.predictOutput and (conf.threads > 1 or conf.optimize):
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--predict-output' is incompatible with option '--threads' and switch '-o'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-10-25 16:33:49 +04:00
|
|
|
|
2014-12-21 07:15:42 +03:00
|
|
|
if conf.threads > MAX_NUMBER_OF_THREADS and not conf.get("skipThreadCheck"):
|
2013-01-16 19:07:12 +04:00
|
|
|
errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-02-02 12:24:37 +03:00
|
|
|
|
2019-11-05 00:43:28 +03:00
|
|
|
if conf.forms and not any((conf.url, conf.googleDork, conf.bulkFile)):
|
|
|
|
errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g' or '-m'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-12-04 19:14:14 +04:00
|
|
|
|
2015-04-06 23:07:22 +03:00
|
|
|
if conf.crawlExclude and not conf.crawlDepth:
|
|
|
|
errMsg = "option '--crawl-exclude' requires usage of switch '--crawl'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2015-04-22 16:32:14 +03:00
|
|
|
if conf.safePost and not conf.safeUrl:
|
|
|
|
errMsg = "option '--safe-post' requires usage of option '--safe-url'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2015-04-22 17:28:54 +03:00
|
|
|
if conf.safeFreq and not any((conf.safeUrl, conf.safeReqFile)):
|
|
|
|
errMsg = "option '--safe-freq' requires usage of option '--safe-url' or '--safe-req'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
|
|
|
if conf.safeReqFile and any((conf.safeUrl, conf.safePost)):
|
|
|
|
errMsg = "option '--safe-req' is incompatible with option '--safe-url' and option '--safe-post'"
|
2015-04-22 16:32:14 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2014-10-23 13:54:29 +04:00
|
|
|
if conf.csrfUrl and not conf.csrfToken:
|
|
|
|
errMsg = "option '--csrf-url' requires usage of option '--csrf-token'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2019-09-09 14:56:37 +03:00
|
|
|
if conf.csrfMethod and not conf.csrfToken:
|
|
|
|
errMsg = "option '--csrf-method' requires usage of option '--csrf-token'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2022-10-17 12:52:22 +03:00
|
|
|
|
|
|
|
if conf.csrfData and not conf.csrfToken:
|
|
|
|
errMsg = "option '--csrf-data' requires usage of option '--csrf-token'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2019-09-09 14:56:37 +03:00
|
|
|
|
2014-10-23 16:23:01 +04:00
|
|
|
if conf.csrfToken and conf.threads > 1:
|
2014-10-23 13:54:29 +04:00
|
|
|
errMsg = "option '--csrf-url' is incompatible with option '--threads'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2013-09-24 23:44:59 +04:00
|
|
|
if conf.requestFile and conf.url and conf.url != DUMMY_URL:
|
2013-04-16 16:23:27 +04:00
|
|
|
errMsg = "option '-r' is incompatible with option '-u' ('--url')"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-19 12:34:21 +04:00
|
|
|
|
2013-04-16 16:31:10 +04:00
|
|
|
if conf.direct and conf.proxy:
|
|
|
|
errMsg = "option '-d' is incompatible with option '--proxy'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
|
|
|
if conf.direct and conf.tor:
|
|
|
|
errMsg = "option '-d' is incompatible with switch '--tor'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2019-06-07 01:21:43 +03:00
|
|
|
if not conf.technique:
|
2013-04-21 23:42:23 +04:00
|
|
|
errMsg = "option '--technique' can't be empty"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2011-04-19 12:34:21 +04:00
|
|
|
if conf.tor and conf.ignoreProxy:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--tor' is incompatible with switch '--ignore-proxy'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-29 23:27:23 +04:00
|
|
|
|
2011-05-21 15:46:57 +04:00
|
|
|
if conf.tor and conf.proxy:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--tor' is incompatible with option '--proxy'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-08-29 17:29:42 +04:00
|
|
|
|
2015-09-25 16:23:42 +03:00
|
|
|
if conf.proxy and conf.proxyFile:
|
|
|
|
errMsg = "switch '--proxy' is incompatible with option '--proxy-file'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2020-12-27 01:04:48 +03:00
|
|
|
if conf.proxyFreq and not conf.proxyFile:
|
|
|
|
errMsg = "option '--proxy-freq' requires usage of option '--proxy-file'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2012-11-29 18:44:14 +04:00
|
|
|
if conf.checkTor and not any((conf.tor, conf.proxy)):
|
2018-03-16 16:30:47 +03:00
|
|
|
errMsg = "switch '--check-tor' requires usage of switch '--tor' (or option '--proxy' with HTTP proxy address of Tor service)"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-12-14 14:19:45 +04:00
|
|
|
|
2015-01-24 01:00:28 +03:00
|
|
|
if conf.torPort is not None and not (isinstance(conf.torPort, int) and conf.torPort >= 0 and conf.torPort <= 65535):
|
2018-03-16 16:30:47 +03:00
|
|
|
errMsg = "value for option '--tor-port' must be in range [0, 65535]"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-12-23 14:57:09 +04:00
|
|
|
|
2012-11-28 13:59:15 +04:00
|
|
|
if conf.torType not in getPublicTypeMembers(PROXY_TYPE, True):
|
|
|
|
errMsg = "option '--tor-type' accepts one of following values: %s" % ", ".join(getPublicTypeMembers(PROXY_TYPE, True))
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-10-25 21:37:43 +04:00
|
|
|
|
2012-11-28 13:58:18 +04:00
|
|
|
if conf.dumpFormat not in getPublicTypeMembers(DUMP_FORMAT, True):
|
|
|
|
errMsg = "option '--dump-format' accepts one of following values: %s" % ", ".join(getPublicTypeMembers(DUMP_FORMAT, True))
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-11-28 13:58:18 +04:00
|
|
|
|
2023-09-04 19:34:21 +03:00
|
|
|
if conf.uValues and (not re.search(r"\A['\w\s.,()%s-]+\Z" % CUSTOM_INJECTION_MARK_CHAR, conf.uValues) or conf.uValues.count(CUSTOM_INJECTION_MARK_CHAR) != 1):
|
|
|
|
errMsg = "option '--union-values' must contain valid UNION column values, along with the injection position "
|
|
|
|
errMsg += "(e.g. 'NULL,1,%s,NULL')" % CUSTOM_INJECTION_MARK_CHAR
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2011-08-29 17:29:42 +04:00
|
|
|
if conf.skip and conf.testParameter:
|
2019-02-05 15:42:44 +03:00
|
|
|
if intersect(conf.skip, conf.testParameter):
|
|
|
|
errMsg = "option '--skip' is incompatible with option '-p'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
|
|
|
if conf.rParam and conf.testParameter:
|
|
|
|
if intersect(conf.rParam, conf.testParameter):
|
|
|
|
errMsg = "option '--randomize' is incompatible with option '-p'"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-05-21 15:46:57 +04:00
|
|
|
|
2011-04-29 23:27:23 +04:00
|
|
|
if conf.mobile and conf.agent:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "switch '--mobile' is incompatible with option '--user-agent'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-11-15 14:34:57 +03:00
|
|
|
|
2011-04-04 13:19:43 +04:00
|
|
|
if conf.proxy and conf.ignoreProxy:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "option '--proxy' is incompatible with switch '--ignore-proxy'"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-04 13:19:43 +04:00
|
|
|
|
2019-11-11 16:03:43 +03:00
|
|
|
if conf.alert and conf.alert.startswith('-'):
|
|
|
|
errMsg = "value for option '--alert' must be valid operating system command(s)"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2010-12-14 15:37:21 +03:00
|
|
|
if conf.timeSec < 1:
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "value for option '--time-sec' must be a positive integer"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2010-12-14 15:37:21 +03:00
|
|
|
|
2020-01-06 00:43:25 +03:00
|
|
|
if conf.hashFile and any((conf.direct, conf.url, conf.logFile, conf.bulkFile, conf.googleDork, conf.configFile, conf.requestFile, conf.updateAll, conf.smokeTest, conf.wizard, conf.dependencies, conf.purge, conf.listTampers)):
|
2018-12-17 19:48:22 +03:00
|
|
|
errMsg = "option '--crack' should be used as a standalone"
|
2018-12-17 19:43:11 +03:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2019-03-28 15:53:54 +03:00
|
|
|
if isinstance(conf.uCols, six.string_types):
|
2011-06-18 14:51:14 +04:00
|
|
|
if not conf.uCols.isdigit() and ("-" not in conf.uCols or len(conf.uCols.split("-")) != 2):
|
2012-02-01 18:49:42 +04:00
|
|
|
errMsg = "value for option '--union-cols' must be a range with hyphon "
|
2011-06-18 14:51:14 +04:00
|
|
|
errMsg += "(e.g. 1-10) or integer value (e.g. 5)"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2011-04-06 12:39:24 +04:00
|
|
|
|
2013-04-17 13:12:15 +04:00
|
|
|
if conf.dbmsCred and ':' not in conf.dbmsCred:
|
|
|
|
errMsg = "value for option '--dbms-cred' must be in "
|
|
|
|
errMsg += "format <username>:<password> (e.g. \"root:pass\")"
|
|
|
|
raise SqlmapSyntaxException(errMsg)
|
|
|
|
|
2017-09-21 15:35:24 +03:00
|
|
|
if conf.encoding:
|
|
|
|
_ = checkCharEncoding(conf.encoding, False)
|
2012-09-25 12:17:25 +04:00
|
|
|
if _ is None:
|
2018-01-15 15:53:46 +03:00
|
|
|
errMsg = "unknown encoding '%s'. Please visit " % conf.encoding
|
2011-06-08 20:08:20 +04:00
|
|
|
errMsg += "'%s' to get the full list of " % CODECS_LIST_PAGE
|
2018-01-15 15:53:46 +03:00
|
|
|
errMsg += "supported encodings"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2012-09-25 12:17:25 +04:00
|
|
|
else:
|
2017-09-21 15:35:24 +03:00
|
|
|
conf.encoding = _
|
2011-05-18 02:55:22 +04:00
|
|
|
|
2022-06-29 16:36:43 +03:00
|
|
|
if conf.fileWrite and not os.path.isfile(conf.fileWrite):
|
|
|
|
errMsg = "file '%s' does not exist" % os.path.abspath(conf.fileWrite)
|
|
|
|
raise SqlmapFilePathException(errMsg)
|
|
|
|
|
|
|
|
if conf.loadCookies and not os.path.exists(conf.loadCookies):
|
|
|
|
errMsg = "cookies file '%s' does not exist" % os.path.abspath(conf.loadCookies)
|
|
|
|
raise SqlmapFilePathException(errMsg)
|
2012-03-08 14:03:59 +04:00
|
|
|
|
2013-01-29 19:33:16 +04:00
|
|
|
def initOptions(inputOptions=AttribDict(), overrideOptions=False):
|
2012-12-06 17:14:19 +04:00
|
|
|
_setConfAttributes()
|
|
|
|
_setKnowledgeBaseAttributes()
|
|
|
|
_mergeOptions(inputOptions, overrideOptions)
|
2013-01-29 19:33:16 +04:00
|
|
|
|
|
|
|
def init():
|
|
|
|
"""
|
|
|
|
Set attributes into both configuration and knowledge base singletons
|
|
|
|
based upon command line and configuration file options.
|
|
|
|
"""
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
_useWizardInterface()
|
2012-12-17 15:29:33 +04:00
|
|
|
setVerbosity()
|
2015-08-14 23:49:32 +03:00
|
|
|
_saveConfig()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setRequestFromFile()
|
|
|
|
_cleanupOptions()
|
2016-10-17 13:33:07 +03:00
|
|
|
_cleanupEnvironment()
|
2018-06-21 00:52:08 +03:00
|
|
|
_purge()
|
2012-12-06 17:14:19 +04:00
|
|
|
_checkDependencies()
|
2019-02-05 18:58:18 +03:00
|
|
|
_createHomeDirectories()
|
2015-01-13 12:33:51 +03:00
|
|
|
_createTemporaryDirectory()
|
2012-12-06 17:14:19 +04:00
|
|
|
_basicOptionValidation()
|
2013-08-12 16:25:51 +04:00
|
|
|
_setProxyList()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setTorProxySettings()
|
|
|
|
_setDNSServer()
|
|
|
|
_adjustLoggingFormatter()
|
|
|
|
_setMultipleTargets()
|
2018-07-31 03:18:33 +03:00
|
|
|
_listTamperingFunctions()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setTamperingFunctions()
|
2019-03-04 17:24:12 +03:00
|
|
|
_setPreprocessFunctions()
|
2020-09-21 18:04:44 +03:00
|
|
|
_setPostprocessFunctions()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setTrafficOutputFP()
|
2017-07-03 17:55:24 +03:00
|
|
|
_setupHTTPCollector()
|
2019-03-19 16:07:39 +03:00
|
|
|
_setHttpChunked()
|
2015-03-24 17:25:16 +03:00
|
|
|
_checkWebSocket()
|
2010-03-27 02:23:25 +03:00
|
|
|
|
|
|
|
parseTargetDirect()
|
|
|
|
|
2020-10-15 13:11:21 +03:00
|
|
|
if any((conf.url, conf.logFile, conf.bulkFile, conf.requestFile, conf.googleDork, conf.stdinPipe)):
|
2018-10-27 15:30:28 +03:00
|
|
|
_setHostname()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setHTTPTimeout()
|
|
|
|
_setHTTPExtraHeaders()
|
|
|
|
_setHTTPCookies()
|
|
|
|
_setHTTPReferer()
|
2015-03-20 02:56:36 +03:00
|
|
|
_setHTTPHost()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setHTTPUserAgent()
|
|
|
|
_setHTTPAuthentication()
|
2015-11-08 18:37:46 +03:00
|
|
|
_setHTTPHandlers()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setDNSCache()
|
2015-11-17 01:46:10 +03:00
|
|
|
_setSocketPreConnect()
|
2015-04-22 17:28:54 +03:00
|
|
|
_setSafeVisit()
|
2015-11-08 18:37:46 +03:00
|
|
|
_doSearch()
|
2020-10-15 13:11:21 +03:00
|
|
|
_setStdinPipeTargets()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setBulkMultipleTargets()
|
|
|
|
_checkTor()
|
|
|
|
_setCrawler()
|
|
|
|
_findPageForms()
|
|
|
|
_setDBMS()
|
|
|
|
_setTechnique()
|
|
|
|
|
|
|
|
_setThreads()
|
|
|
|
_setOS()
|
|
|
|
_setWriteFile()
|
|
|
|
_setMetasploit()
|
|
|
|
_setDBMSAuthentication()
|
2015-02-15 19:31:35 +03:00
|
|
|
loadBoundaries()
|
2010-11-28 21:10:54 +03:00
|
|
|
loadPayloads()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setPrefixSuffix()
|
2008-10-15 19:38:22 +04:00
|
|
|
update()
|
2012-12-06 17:14:19 +04:00
|
|
|
_loadQueries()
|