2013-02-14 15:32:17 +04:00
|
|
|
#!/usr/bin/env python
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
"""
|
2014-01-13 21:24:49 +04:00
|
|
|
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
|
2010-10-15 03:18:29 +04:00
|
|
|
See the file 'doc/COPYING' for copying permission
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2010-05-24 15:00:49 +04:00
|
|
|
import codecs
|
2013-02-14 18:32:16 +04:00
|
|
|
import functools
|
2008-10-15 19:38:22 +04:00
|
|
|
import os
|
2010-06-30 01:07:23 +04:00
|
|
|
import re
|
2010-12-09 00:16:18 +03:00
|
|
|
import tempfile
|
2008-10-15 19:38:22 +04:00
|
|
|
import time
|
2012-11-05 18:58:54 +04:00
|
|
|
import urlparse
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
from lib.core.common import Backend
|
2014-06-04 18:35:07 +04:00
|
|
|
from lib.core.common import getUnicode
|
2012-02-25 14:43:10 +04:00
|
|
|
from lib.core.common import hashDBRetrieve
|
2011-02-14 00:58:48 +03:00
|
|
|
from lib.core.common import intersect
|
2014-11-03 01:38:52 +03:00
|
|
|
from lib.core.common import normalizeUnicode
|
2014-12-12 06:40:44 +03:00
|
|
|
from lib.core.common import openFile
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.common import paramToDict
|
2010-10-10 22:56:43 +04:00
|
|
|
from lib.core.common import readInput
|
2012-03-08 14:19:34 +04:00
|
|
|
from lib.core.common import resetCookieJar
|
2012-07-31 13:03:44 +04:00
|
|
|
from lib.core.common import urldecode
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import conf
|
|
|
|
from lib.core.data import kb
|
2009-09-26 03:03:45 +04:00
|
|
|
from lib.core.data import logger
|
2013-04-10 21:33:31 +04:00
|
|
|
from lib.core.data import mergedOptions
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.data import paths
|
2014-05-29 11:33:14 +04:00
|
|
|
from lib.core.datatype import InjectionDict
|
2012-10-23 16:37:45 +04:00
|
|
|
from lib.core.dicts import DBMS_DICT
|
2008-10-15 19:38:22 +04:00
|
|
|
from lib.core.dump import dumper
|
2011-12-28 17:50:03 +04:00
|
|
|
from lib.core.enums import HASHDB_KEYS
|
2013-03-20 14:10:24 +04:00
|
|
|
from lib.core.enums import HTTP_HEADER
|
2010-11-08 12:44:32 +03:00
|
|
|
from lib.core.enums import HTTPMETHOD
|
|
|
|
from lib.core.enums import PLACE
|
2012-10-04 13:25:44 +04:00
|
|
|
from lib.core.enums import POST_HINT
|
2012-12-06 17:14:19 +04:00
|
|
|
from lib.core.exception import SqlmapFilePathException
|
|
|
|
from lib.core.exception import SqlmapGenericException
|
|
|
|
from lib.core.exception import SqlmapMissingPrivileges
|
|
|
|
from lib.core.exception import SqlmapSyntaxException
|
|
|
|
from lib.core.exception import SqlmapUserQuitException
|
|
|
|
from lib.core.option import _setDBMS
|
|
|
|
from lib.core.option import _setKnowledgeBaseAttributes
|
|
|
|
from lib.core.option import _setAuthCred
|
2013-02-13 15:24:42 +04:00
|
|
|
from lib.core.settings import ASTERISK_MARKER
|
2014-10-23 16:03:44 +04:00
|
|
|
from lib.core.settings import CSRF_TOKEN_PARAMETER_INFIXES
|
2012-04-17 12:41:19 +04:00
|
|
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
2014-10-09 17:21:26 +04:00
|
|
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
2011-12-20 16:52:41 +04:00
|
|
|
from lib.core.settings import HOST_ALIASES
|
2014-10-09 17:21:26 +04:00
|
|
|
from lib.core.settings import ARRAY_LIKE_RECOGNITION_REGEX
|
2012-10-02 16:23:58 +04:00
|
|
|
from lib.core.settings import JSON_RECOGNITION_REGEX
|
2014-02-26 11:56:17 +04:00
|
|
|
from lib.core.settings import JSON_LIKE_RECOGNITION_REGEX
|
2012-10-16 14:32:58 +04:00
|
|
|
from lib.core.settings import MULTIPART_RECOGNITION_REGEX
|
2013-02-14 19:18:16 +04:00
|
|
|
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
2011-02-14 00:58:48 +03:00
|
|
|
from lib.core.settings import REFERER_ALIASES
|
2013-04-10 21:33:31 +04:00
|
|
|
from lib.core.settings import RESTORE_MERGED_OPTIONS
|
2011-05-16 02:21:38 +04:00
|
|
|
from lib.core.settings import RESULTS_FILE_FORMAT
|
2012-06-21 14:09:10 +04:00
|
|
|
from lib.core.settings import SUPPORTED_DBMS
|
2013-01-19 21:06:36 +04:00
|
|
|
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
2011-01-30 14:36:03 +03:00
|
|
|
from lib.core.settings import UNICODE_ENCODING
|
2012-06-21 14:09:10 +04:00
|
|
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
2011-01-31 23:36:01 +03:00
|
|
|
from lib.core.settings import URI_INJECTABLE_REGEX
|
2011-02-14 00:58:48 +03:00
|
|
|
from lib.core.settings import USER_AGENT_ALIASES
|
2014-04-30 23:25:45 +04:00
|
|
|
from lib.core.settings import XML_RECOGNITION_REGEX
|
2011-09-26 00:36:32 +04:00
|
|
|
from lib.utils.hashdb import HashDB
|
2010-05-28 20:43:04 +04:00
|
|
|
from lib.core.xmldump import dumper as xmldumper
|
2012-10-04 13:52:40 +04:00
|
|
|
from thirdparty.odict.odict import OrderedDict
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setRequestParams():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Check and set the parameters and perform checks on 'data' option for
|
|
|
|
HTTP method POST.
|
|
|
|
"""
|
|
|
|
|
2010-03-27 02:23:25 +03:00
|
|
|
if conf.direct:
|
|
|
|
conf.parameters[None] = "direct connection"
|
|
|
|
return
|
|
|
|
|
2012-04-17 18:23:00 +04:00
|
|
|
testableParameters = False
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# Perform checks on GET parameters
|
2012-07-14 13:01:30 +04:00
|
|
|
if conf.parameters.get(PLACE.GET):
|
2010-11-08 12:44:32 +03:00
|
|
|
parameters = conf.parameters[PLACE.GET]
|
2012-04-17 18:23:00 +04:00
|
|
|
paramDict = paramToDict(PLACE.GET, parameters)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-04-17 18:23:00 +04:00
|
|
|
if paramDict:
|
|
|
|
conf.paramDict[PLACE.GET] = paramDict
|
|
|
|
testableParameters = True
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
# Perform checks on POST parameters
|
2012-11-13 13:21:11 +04:00
|
|
|
if conf.method == HTTPMETHOD.POST and conf.data is None:
|
2008-10-15 19:38:22 +04:00
|
|
|
errMsg = "HTTP POST method depends on HTTP data value to be posted"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapSyntaxException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-11-13 13:21:11 +04:00
|
|
|
if conf.data is not None:
|
2013-04-10 18:43:57 +04:00
|
|
|
conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method
|
2012-10-04 13:25:44 +04:00
|
|
|
|
2013-02-14 18:32:16 +04:00
|
|
|
def process(match, repl):
|
2013-02-14 18:41:27 +04:00
|
|
|
retVal = match.group(0)
|
|
|
|
|
|
|
|
if not (conf.testParameter and match.group("name") not in conf.testParameter):
|
2013-02-14 18:32:16 +04:00
|
|
|
retVal = repl
|
|
|
|
while True:
|
|
|
|
_ = re.search(r"\\g<([^>]+)>", retVal)
|
|
|
|
if _:
|
|
|
|
retVal = retVal.replace(_.group(0), match.group(int(_.group(1)) if _.group(1).isdigit() else _.group(1)))
|
|
|
|
else:
|
|
|
|
break
|
2013-02-14 18:41:27 +04:00
|
|
|
|
2013-02-14 18:32:16 +04:00
|
|
|
return retVal
|
|
|
|
|
2013-05-29 16:26:00 +04:00
|
|
|
if kb.processUserMarks is None and CUSTOM_INJECTION_MARK_CHAR in conf.data:
|
2013-05-25 20:52:59 +04:00
|
|
|
message = "custom injection marking character ('%s') found in option " % CUSTOM_INJECTION_MARK_CHAR
|
|
|
|
message += "'--data'. Do you want to process it? [Y/n/q] "
|
2012-10-04 14:05:59 +04:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
if test and test[0] in ("q", "Q"):
|
2012-12-06 17:14:19 +04:00
|
|
|
raise SqlmapUserQuitException
|
2013-05-25 20:52:59 +04:00
|
|
|
else:
|
|
|
|
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
|
|
|
|
2014-08-31 00:15:14 +04:00
|
|
|
if kb.processUserMarks and "=%s" % CUSTOM_INJECTION_MARK_CHAR in conf.data:
|
|
|
|
warnMsg = "it seems that you've provided empty parameter value(s) "
|
|
|
|
warnMsg += "for testing. Please, always use only valid parameter values "
|
|
|
|
warnMsg += "so sqlmap could be able to run properly"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2013-05-25 20:52:59 +04:00
|
|
|
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
|
|
|
if re.search(JSON_RECOGNITION_REGEX, conf.data):
|
2014-02-26 11:41:23 +04:00
|
|
|
message = "JSON data found in %s data. " % conf.method
|
2013-05-25 20:52:59 +04:00
|
|
|
message += "Do you want to process it? [Y/n/q] "
|
|
|
|
test = readInput(message, default="Y")
|
|
|
|
if test and test[0] in ("q", "Q"):
|
|
|
|
raise SqlmapUserQuitException
|
|
|
|
elif test[0] not in ("n", "N"):
|
|
|
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
|
|
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
|
|
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)(-?\d[\d\.]*\b)', functools.partial(process, repl=r'\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
2014-10-22 12:28:10 +04:00
|
|
|
match = re.search(r'(?P<name>[^"]+)"\s*:\s*\[([^\]]+)\]', conf.data)
|
|
|
|
if match and not (conf.testParameter and match.group("name") not in conf.testParameter):
|
|
|
|
_ = match.group(2)
|
|
|
|
_ = re.sub(r'("[^"]+)"', '\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR, _)
|
|
|
|
_ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', '\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR, _)
|
|
|
|
conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _))
|
2013-05-25 20:52:59 +04:00
|
|
|
kb.postHint = POST_HINT.JSON
|
|
|
|
|
2014-02-26 11:56:17 +04:00
|
|
|
elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data):
|
|
|
|
message = "JSON-like data found in %s data. " % conf.method
|
|
|
|
message += "Do you want to process it? [Y/n/q] "
|
|
|
|
test = readInput(message, default="Y")
|
|
|
|
if test and test[0] in ("q", "Q"):
|
|
|
|
raise SqlmapUserQuitException
|
|
|
|
elif test[0] not in ("n", "N"):
|
|
|
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
|
|
|
conf.data = re.sub(r"('(?P<name>[^']+)'\s*:\s*'[^']+)'", functools.partial(process, repl=r"\g<1>%s'" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
|
|
|
conf.data = re.sub(r"('(?P<name>[^']+)'\s*:\s*)(-?\d[\d\.]*\b)", functools.partial(process, repl=r"\g<0>%s" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
|
|
|
kb.postHint = POST_HINT.JSON_LIKE
|
|
|
|
|
2014-10-09 17:21:26 +04:00
|
|
|
elif re.search(ARRAY_LIKE_RECOGNITION_REGEX, conf.data):
|
|
|
|
message = "Array-like data found in %s data. " % conf.method
|
|
|
|
message += "Do you want to process it? [Y/n/q] "
|
|
|
|
test = readInput(message, default="Y")
|
|
|
|
if test and test[0] in ("q", "Q"):
|
|
|
|
raise SqlmapUserQuitException
|
|
|
|
elif test[0] not in ("n", "N"):
|
|
|
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
|
|
|
conf.data = re.sub(r"(=[^%s]+)" % DEFAULT_GET_POST_DELIMITER, r"\g<1>%s" % CUSTOM_INJECTION_MARK_CHAR, conf.data)
|
|
|
|
kb.postHint = POST_HINT.ARRAY_LIKE
|
|
|
|
|
2014-04-30 23:25:45 +04:00
|
|
|
elif re.search(XML_RECOGNITION_REGEX, conf.data):
|
2014-02-26 11:41:23 +04:00
|
|
|
message = "SOAP/XML data found in %s data. " % conf.method
|
2013-05-25 20:52:59 +04:00
|
|
|
message += "Do you want to process it? [Y/n/q] "
|
|
|
|
test = readInput(message, default="Y")
|
|
|
|
if test and test[0] in ("q", "Q"):
|
|
|
|
raise SqlmapUserQuitException
|
|
|
|
elif test[0] not in ("n", "N"):
|
|
|
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
|
|
|
conf.data = re.sub(r"(<(?P<name>[^>]+)( [^<]*)?>)([^<]+)(</\2)", functools.partial(process, repl=r"\g<1>\g<4>%s\g<5>" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
|
|
|
kb.postHint = POST_HINT.SOAP if "soap" in conf.data.lower() else POST_HINT.XML
|
|
|
|
|
|
|
|
elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data):
|
|
|
|
message = "Multipart like data found in %s data. " % conf.method
|
|
|
|
message += "Do you want to process it? [Y/n/q] "
|
|
|
|
test = readInput(message, default="Y")
|
|
|
|
if test and test[0] in ("q", "Q"):
|
|
|
|
raise SqlmapUserQuitException
|
|
|
|
elif test[0] not in ("n", "N"):
|
|
|
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
2013-09-05 11:22:11 +04:00
|
|
|
conf.data = re.sub(r"(?si)((Content-Disposition[^\n]+?name\s*=\s*[\"'](?P<name>[^\n]+?)[\"']).+?)(((\r)?\n)+--)", functools.partial(process, repl=r"\g<1>%s\g<4>" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
2013-05-25 20:52:59 +04:00
|
|
|
kb.postHint = POST_HINT.MULTIPART
|
2012-10-16 14:32:58 +04:00
|
|
|
|
2013-03-26 18:07:14 +04:00
|
|
|
if not kb.postHint:
|
|
|
|
if CUSTOM_INJECTION_MARK_CHAR in conf.data: # later processed
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
place = PLACE.POST
|
|
|
|
|
|
|
|
conf.parameters[place] = conf.data
|
|
|
|
paramDict = paramToDict(place, conf.data)
|
|
|
|
|
|
|
|
if paramDict:
|
|
|
|
conf.paramDict[place] = paramDict
|
|
|
|
testableParameters = True
|
2013-03-26 19:04:56 +04:00
|
|
|
else:
|
|
|
|
if CUSTOM_INJECTION_MARK_CHAR not in conf.data: # in case that no usable parameter values has been found
|
|
|
|
conf.parameters[PLACE.POST] = conf.data
|
2010-01-02 05:02:12 +03:00
|
|
|
|
2013-03-26 19:04:56 +04:00
|
|
|
kb.processUserMarks = True if (kb.postHint and CUSTOM_INJECTION_MARK_CHAR in conf.data) else kb.processUserMarks
|
2012-10-16 14:32:58 +04:00
|
|
|
|
2013-10-12 22:41:25 +04:00
|
|
|
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not CUSTOM_INJECTION_MARK_CHAR in (conf.data or ""):
|
2013-04-09 13:48:42 +04:00
|
|
|
warnMsg = "you've provided target URL without any GET "
|
2011-05-08 10:28:58 +04:00
|
|
|
warnMsg += "parameters (e.g. www.site.com/article.php?id=1) "
|
|
|
|
warnMsg += "and without providing any POST parameters "
|
|
|
|
warnMsg += "through --data option"
|
2011-05-08 10:17:43 +04:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
message = "do you want to try URI injections "
|
2013-04-09 13:48:42 +04:00
|
|
|
message += "in the target URL itself? [Y/n/q] "
|
2011-05-08 10:17:43 +04:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
|
2012-04-17 18:23:00 +04:00
|
|
|
if not test or test[0] not in ("n", "N"):
|
2012-04-17 12:41:19 +04:00
|
|
|
conf.url = "%s%s" % (conf.url, CUSTOM_INJECTION_MARK_CHAR)
|
2012-04-17 18:23:00 +04:00
|
|
|
kb.processUserMarks = True
|
2011-05-08 10:17:43 +04:00
|
|
|
elif test[0] in ("q", "Q"):
|
2012-12-06 17:14:19 +04:00
|
|
|
raise SqlmapUserQuitException
|
2011-01-31 23:36:01 +03:00
|
|
|
|
2014-12-15 15:51:00 +03:00
|
|
|
for place, value in ((PLACE.URI, conf.url), (PLACE.CUSTOM_POST, conf.data), (PLACE.CUSTOM_HEADER, str(conf.httpHeaders))):
|
2013-02-14 19:18:16 +04:00
|
|
|
_ = re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or "") if place == PLACE.CUSTOM_HEADER else value or ""
|
2013-02-12 15:06:13 +04:00
|
|
|
if CUSTOM_INJECTION_MARK_CHAR in _:
|
2012-04-17 18:23:00 +04:00
|
|
|
if kb.processUserMarks is None:
|
2013-02-12 15:06:13 +04:00
|
|
|
lut = {PLACE.URI: '-u', PLACE.CUSTOM_POST: '--data', PLACE.CUSTOM_HEADER: '--headers/--user-agent/--referer/--cookie'}
|
2012-07-13 14:13:04 +04:00
|
|
|
message = "custom injection marking character ('%s') found in option " % CUSTOM_INJECTION_MARK_CHAR
|
2013-02-12 15:06:13 +04:00
|
|
|
message += "'%s'. Do you want to process it? [Y/n/q] " % lut[place]
|
2012-04-17 18:23:00 +04:00
|
|
|
test = readInput(message, default="Y")
|
|
|
|
if test and test[0] in ("q", "Q"):
|
2012-12-06 17:14:19 +04:00
|
|
|
raise SqlmapUserQuitException
|
2012-04-17 18:23:00 +04:00
|
|
|
else:
|
|
|
|
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
2011-04-22 01:15:23 +04:00
|
|
|
|
2014-08-31 00:15:14 +04:00
|
|
|
if kb.processUserMarks and "=%s" % CUSTOM_INJECTION_MARK_CHAR in _:
|
|
|
|
warnMsg = "it seems that you've provided empty parameter value(s) "
|
|
|
|
warnMsg += "for testing. Please, always use only valid parameter values "
|
|
|
|
warnMsg += "so sqlmap could be able to run properly"
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
2012-04-17 18:23:00 +04:00
|
|
|
if not kb.processUserMarks:
|
2012-11-05 18:58:54 +04:00
|
|
|
if place == PLACE.URI:
|
2013-01-10 18:41:07 +04:00
|
|
|
query = urlparse.urlsplit(value).query
|
2012-11-05 18:58:54 +04:00
|
|
|
if query:
|
|
|
|
parameters = conf.parameters[PLACE.GET] = query
|
|
|
|
paramDict = paramToDict(PLACE.GET, parameters)
|
|
|
|
|
|
|
|
if paramDict:
|
2012-12-09 00:23:30 +04:00
|
|
|
conf.url = conf.url.split('?')[0]
|
2012-11-05 18:58:54 +04:00
|
|
|
conf.paramDict[PLACE.GET] = paramDict
|
|
|
|
testableParameters = True
|
2013-01-22 17:08:19 +04:00
|
|
|
elif place == PLACE.CUSTOM_POST:
|
|
|
|
conf.parameters[PLACE.POST] = conf.data
|
|
|
|
paramDict = paramToDict(PLACE.POST, conf.data)
|
|
|
|
|
|
|
|
if paramDict:
|
|
|
|
conf.paramDict[PLACE.POST] = paramDict
|
|
|
|
testableParameters = True
|
|
|
|
|
2013-01-13 19:22:43 +04:00
|
|
|
else:
|
|
|
|
conf.parameters[place] = value
|
|
|
|
conf.paramDict[place] = OrderedDict()
|
|
|
|
|
|
|
|
if place == PLACE.CUSTOM_HEADER:
|
|
|
|
for index in xrange(len(conf.httpHeaders)):
|
|
|
|
header, value = conf.httpHeaders[index]
|
2013-02-14 19:18:16 +04:00
|
|
|
if CUSTOM_INJECTION_MARK_CHAR in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value):
|
2013-02-12 15:06:13 +04:00
|
|
|
parts = value.split(CUSTOM_INJECTION_MARK_CHAR)
|
|
|
|
for i in xrange(len(parts) - 1):
|
|
|
|
conf.paramDict[place]["%s #%d%s" % (header, i + 1, CUSTOM_INJECTION_MARK_CHAR)] = "%s,%s" % (header, "".join("%s%s" % (parts[j], CUSTOM_INJECTION_MARK_CHAR if i == j else "") for j in xrange(len(parts))))
|
2013-01-13 19:22:43 +04:00
|
|
|
conf.httpHeaders[index] = (header, value.replace(CUSTOM_INJECTION_MARK_CHAR, ""))
|
|
|
|
else:
|
|
|
|
parts = value.split(CUSTOM_INJECTION_MARK_CHAR)
|
2011-04-22 01:15:23 +04:00
|
|
|
|
2013-01-13 19:22:43 +04:00
|
|
|
for i in xrange(len(parts) - 1):
|
|
|
|
conf.paramDict[place]["%s#%d%s" % (("%s " % kb.postHint) if kb.postHint else "", i + 1, CUSTOM_INJECTION_MARK_CHAR)] = "".join("%s%s" % (parts[j], CUSTOM_INJECTION_MARK_CHAR if i == j else "") for j in xrange(len(parts)))
|
2011-04-22 01:15:23 +04:00
|
|
|
|
2013-01-13 19:22:43 +04:00
|
|
|
if place == PLACE.URI and PLACE.GET in conf.paramDict:
|
|
|
|
del conf.paramDict[PLACE.GET]
|
|
|
|
elif place == PLACE.CUSTOM_POST and PLACE.POST in conf.paramDict:
|
|
|
|
del conf.paramDict[PLACE.POST]
|
2011-04-22 01:15:23 +04:00
|
|
|
|
2013-01-13 19:22:43 +04:00
|
|
|
testableParameters = True
|
2012-04-17 18:23:00 +04:00
|
|
|
|
|
|
|
if kb.processUserMarks:
|
2013-02-12 15:06:13 +04:00
|
|
|
for item in ("url", "data", "agent", "referer", "cookie"):
|
|
|
|
if conf.get(item):
|
|
|
|
conf[item] = conf[item].replace(CUSTOM_INJECTION_MARK_CHAR, "")
|
2010-09-22 15:56:35 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
# Perform checks on Cookie parameters
|
|
|
|
if conf.cookie:
|
2010-11-08 12:44:32 +03:00
|
|
|
conf.parameters[PLACE.COOKIE] = conf.cookie
|
2012-04-17 18:23:00 +04:00
|
|
|
paramDict = paramToDict(PLACE.COOKIE, conf.cookie)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-04-17 18:23:00 +04:00
|
|
|
if paramDict:
|
|
|
|
conf.paramDict[PLACE.COOKIE] = paramDict
|
|
|
|
testableParameters = True
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-12-20 16:52:41 +04:00
|
|
|
# Perform checks on header values
|
2008-10-15 19:38:22 +04:00
|
|
|
if conf.httpHeaders:
|
|
|
|
for httpHeader, headerValue in conf.httpHeaders:
|
2012-03-16 15:18:18 +04:00
|
|
|
# Url encoding of the header values should be avoided
|
|
|
|
# Reference: http://stackoverflow.com/questions/5085904/is-ok-to-urlencode-the-value-in-headerlocation-value
|
|
|
|
|
2012-08-31 12:43:06 +04:00
|
|
|
httpHeader = httpHeader.title()
|
2012-07-26 14:26:57 +04:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
if httpHeader == HTTP_HEADER.USER_AGENT:
|
2012-07-26 14:26:57 +04:00
|
|
|
conf.parameters[PLACE.USER_AGENT] = urldecode(headerValue)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2011-11-21 00:14:47 +04:00
|
|
|
condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES)))
|
2008-10-15 19:38:22 +04:00
|
|
|
|
|
|
|
if condition:
|
2012-07-26 14:26:57 +04:00
|
|
|
conf.paramDict[PLACE.USER_AGENT] = {PLACE.USER_AGENT: headerValue}
|
2012-04-17 18:23:00 +04:00
|
|
|
testableParameters = True
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
elif httpHeader == HTTP_HEADER.REFERER:
|
2011-02-12 02:07:03 +03:00
|
|
|
conf.parameters[PLACE.REFERER] = urldecode(headerValue)
|
|
|
|
|
2011-11-21 00:14:47 +04:00
|
|
|
condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES)))
|
2011-02-12 02:07:03 +03:00
|
|
|
|
|
|
|
if condition:
|
2012-07-13 14:22:37 +04:00
|
|
|
conf.paramDict[PLACE.REFERER] = {PLACE.REFERER: headerValue}
|
2012-04-17 18:23:00 +04:00
|
|
|
testableParameters = True
|
2011-02-12 02:07:03 +03:00
|
|
|
|
2013-03-20 14:10:24 +04:00
|
|
|
elif httpHeader == HTTP_HEADER.HOST:
|
2011-12-20 16:52:41 +04:00
|
|
|
conf.parameters[PLACE.HOST] = urldecode(headerValue)
|
|
|
|
|
|
|
|
condition = any((not conf.testParameter, intersect(conf.testParameter, HOST_ALIASES)))
|
|
|
|
|
|
|
|
if condition:
|
2012-07-13 14:22:37 +04:00
|
|
|
conf.paramDict[PLACE.HOST] = {PLACE.HOST: headerValue}
|
2012-04-17 18:23:00 +04:00
|
|
|
testableParameters = True
|
2011-12-20 16:52:41 +04:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
if not conf.parameters:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "you did not provide any GET, POST and Cookie "
|
2011-12-20 16:52:41 +04:00
|
|
|
errMsg += "parameter, neither an User-Agent, Referer or Host header value"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-04-17 18:23:00 +04:00
|
|
|
elif not testableParameters:
|
2011-04-30 17:20:05 +04:00
|
|
|
errMsg = "all testable parameters you provided are not present "
|
2013-02-14 18:34:03 +04:00
|
|
|
errMsg += "within the given request data"
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2014-10-23 13:23:53 +04:00
|
|
|
if conf.csrfToken:
|
2014-10-24 11:37:51 +04:00
|
|
|
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}):
|
2014-11-17 13:50:05 +03:00
|
|
|
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken
|
2014-10-24 11:37:51 +04:00
|
|
|
errMsg += "found in provided GET, POST, Cookie or header values"
|
2014-10-23 13:23:53 +04:00
|
|
|
raise SqlmapGenericException(errMsg)
|
2014-10-23 13:42:30 +04:00
|
|
|
else:
|
2014-10-24 11:37:51 +04:00
|
|
|
for place in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
|
2014-10-23 13:42:30 +04:00
|
|
|
for parameter in conf.paramDict.get(place, {}):
|
2014-10-23 16:03:44 +04:00
|
|
|
if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES):
|
2014-11-17 13:50:05 +03:00
|
|
|
message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter)
|
2014-10-23 13:42:30 +04:00
|
|
|
message += "Do you want sqlmap to automatically update it in further requests? [y/N] "
|
|
|
|
test = readInput(message, default="N")
|
|
|
|
if test and test[0] in ("y", "Y"):
|
|
|
|
conf.csrfToken = parameter
|
|
|
|
break
|
2014-10-23 13:23:53 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setHashDB():
|
2011-09-26 17:01:43 +04:00
|
|
|
"""
|
|
|
|
Check and set the HashDB SQLite file for query resume functionality.
|
|
|
|
"""
|
2011-11-22 12:39:13 +04:00
|
|
|
|
2011-09-26 17:01:43 +04:00
|
|
|
if not conf.hashDBFile:
|
2013-11-12 17:08:41 +04:00
|
|
|
conf.hashDBFile = conf.sessionFile or os.path.join(conf.outputPath, "session.sqlite")
|
2011-09-26 17:01:43 +04:00
|
|
|
|
|
|
|
if os.path.exists(conf.hashDBFile):
|
|
|
|
if conf.flushSession:
|
|
|
|
try:
|
|
|
|
os.remove(conf.hashDBFile)
|
2012-07-07 21:02:46 +04:00
|
|
|
logger.info("flushing session file")
|
2011-09-26 17:01:43 +04:00
|
|
|
except OSError, msg:
|
2012-07-07 21:02:46 +04:00
|
|
|
errMsg = "unable to flush the session file (%s)" % msg
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapFilePathException(errMsg)
|
2011-09-26 17:01:43 +04:00
|
|
|
|
|
|
|
conf.hashDB = HashDB(conf.hashDBFile)
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _resumeHashDBValues():
|
2011-11-22 12:39:13 +04:00
|
|
|
"""
|
|
|
|
Resume stored data values from HashDB
|
|
|
|
"""
|
|
|
|
|
2012-03-13 02:55:57 +04:00
|
|
|
kb.absFilePaths = hashDBRetrieve(HASHDB_KEYS.KB_ABS_FILE_PATHS, True) or kb.absFilePaths
|
|
|
|
kb.chars = hashDBRetrieve(HASHDB_KEYS.KB_CHARS, True) or kb.chars
|
|
|
|
kb.dynamicMarkings = hashDBRetrieve(HASHDB_KEYS.KB_DYNAMIC_MARKINGS, True) or kb.dynamicMarkings
|
|
|
|
kb.brute.tables = hashDBRetrieve(HASHDB_KEYS.KB_BRUTE_TABLES, True) or kb.brute.tables
|
|
|
|
kb.brute.columns = hashDBRetrieve(HASHDB_KEYS.KB_BRUTE_COLUMNS, True) or kb.brute.columns
|
|
|
|
kb.xpCmdshellAvailable = hashDBRetrieve(HASHDB_KEYS.KB_XP_CMDSHELL_AVAILABLE) or kb.xpCmdshellAvailable
|
|
|
|
|
|
|
|
conf.tmpPath = conf.tmpPath or hashDBRetrieve(HASHDB_KEYS.CONF_TMP_PATH)
|
|
|
|
|
|
|
|
for injection in hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []:
|
2014-05-29 11:33:14 +04:00
|
|
|
if isinstance(injection, InjectionDict) and injection.place in conf.paramDict and \
|
2012-03-13 02:55:57 +04:00
|
|
|
injection.parameter in conf.paramDict[injection.place]:
|
|
|
|
|
|
|
|
if not conf.tech or intersect(conf.tech, injection.data.keys()):
|
|
|
|
if intersect(conf.tech, injection.data.keys()):
|
|
|
|
injection.data = dict(filter(lambda (key, item): key in conf.tech, injection.data.items()))
|
|
|
|
|
|
|
|
if injection not in kb.injections:
|
|
|
|
kb.injections.append(injection)
|
2012-02-27 17:44:07 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
_resumeDBMS()
|
|
|
|
_resumeOS()
|
2012-06-21 14:09:10 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _resumeDBMS():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
2012-06-21 14:09:10 +04:00
|
|
|
Resume stored DBMS information from HashDB
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
value = hashDBRetrieve(HASHDB_KEYS.DBMS)
|
2010-04-12 13:35:20 +04:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
if not value:
|
|
|
|
return
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
dbms = value.lower()
|
|
|
|
dbmsVersion = [UNKNOWN_DBMS_VERSION]
|
|
|
|
_ = "(%s)" % ("|".join([alias for alias in SUPPORTED_DBMS]))
|
|
|
|
_ = re.search("%s ([\d\.]+)" % _, dbms, re.I)
|
|
|
|
|
|
|
|
if _:
|
|
|
|
dbms = _.group(1).lower()
|
|
|
|
dbmsVersion = [_.group(2)]
|
|
|
|
|
|
|
|
if conf.dbms:
|
2012-10-23 16:37:45 +04:00
|
|
|
check = True
|
2013-04-15 12:33:25 +04:00
|
|
|
for aliases, _, _, _ in DBMS_DICT.values():
|
2012-10-23 16:37:45 +04:00
|
|
|
if conf.dbms.lower() in aliases and dbms not in aliases:
|
|
|
|
check = False
|
|
|
|
break
|
|
|
|
|
|
|
|
if not check:
|
|
|
|
message = "you provided '%s' as a back-end DBMS, " % conf.dbms
|
2012-06-21 14:09:10 +04:00
|
|
|
message += "but from a past scan information on the target URL "
|
2012-10-23 16:38:24 +04:00
|
|
|
message += "sqlmap assumes the back-end DBMS is '%s'. " % dbms
|
2012-06-21 14:09:10 +04:00
|
|
|
message += "Do you really want to force the back-end "
|
|
|
|
message += "DBMS value? [y/N] "
|
|
|
|
test = readInput(message, default="N")
|
|
|
|
|
|
|
|
if not test or test[0] in ("n", "N"):
|
|
|
|
conf.dbms = None
|
|
|
|
Backend.setDbms(dbms)
|
|
|
|
Backend.setVersionList(dbmsVersion)
|
|
|
|
else:
|
|
|
|
infoMsg = "resuming back-end DBMS '%s' " % dbms
|
|
|
|
logger.info(infoMsg)
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
Backend.setDbms(dbms)
|
|
|
|
Backend.setVersionList(dbmsVersion)
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _resumeOS():
|
2012-06-21 14:09:10 +04:00
|
|
|
"""
|
|
|
|
Resume stored OS information from HashDB
|
|
|
|
"""
|
2010-04-12 13:35:20 +04:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
value = hashDBRetrieve(HASHDB_KEYS.OS)
|
2010-04-12 13:35:20 +04:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
if not value:
|
|
|
|
return
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
os = value
|
2010-11-03 13:08:27 +03:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
if os and os != 'None':
|
|
|
|
infoMsg = "resuming back-end DBMS operating system '%s' " % os
|
|
|
|
logger.info(infoMsg)
|
2010-05-11 17:36:30 +04:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
if conf.os and conf.os.lower() != os.lower():
|
|
|
|
message = "you provided '%s' as back-end DBMS operating " % conf.os
|
|
|
|
message += "system, but from a past scan information on the "
|
|
|
|
message += "target URL sqlmap assumes the back-end DBMS "
|
|
|
|
message += "operating system is %s. " % os
|
|
|
|
message += "Do you really want to force the back-end DBMS "
|
|
|
|
message += "OS value? [y/N] "
|
|
|
|
test = readInput(message, default="N")
|
2010-12-01 01:40:25 +03:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
if not test or test[0] in ("n", "N"):
|
|
|
|
conf.os = os
|
2010-03-04 16:01:18 +03:00
|
|
|
else:
|
2012-06-21 14:09:10 +04:00
|
|
|
conf.os = os
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-06-21 14:09:10 +04:00
|
|
|
Backend.setOs(conf.os)
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _setResultsFile():
|
2011-05-16 02:21:38 +04:00
|
|
|
"""
|
2012-02-22 14:40:11 +04:00
|
|
|
Create results file for storing results of running in a
|
2011-05-16 02:21:38 +04:00
|
|
|
multiple target mode.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.multipleTargets:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not conf.resultsFP:
|
2013-11-12 17:08:41 +04:00
|
|
|
conf.resultsFilename = os.path.join(paths.SQLMAP_OUTPUT_PATH, time.strftime(RESULTS_FILE_FORMAT).lower())
|
2014-12-12 06:40:44 +03:00
|
|
|
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
2013-04-09 13:48:42 +04:00
|
|
|
conf.resultsFP.writelines("Target URL,Place,Parameter,Techniques%s" % os.linesep)
|
2011-05-16 02:21:38 +04:00
|
|
|
|
2012-09-11 21:45:40 +04:00
|
|
|
logger.info("using '%s' as the CSV results file in multiple targets mode" % conf.resultsFilename)
|
2011-05-16 02:21:38 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _createFilesDir():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Create the file directory.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not conf.rFile:
|
|
|
|
return
|
|
|
|
|
|
|
|
conf.filePath = paths.SQLMAP_FILES_PATH % conf.hostname
|
|
|
|
|
|
|
|
if not os.path.isdir(conf.filePath):
|
2014-04-12 19:22:33 +04:00
|
|
|
try:
|
|
|
|
os.makedirs(conf.filePath, 0755)
|
|
|
|
except OSError, ex:
|
|
|
|
tempDir = tempfile.mkdtemp(prefix="sqlmapfiles")
|
|
|
|
warnMsg = "unable to create files directory "
|
2014-12-15 09:18:47 +03:00
|
|
|
warnMsg += "'%s' (%s). " % (conf.filePath, getUnicode(ex))
|
2014-04-12 19:22:33 +04:00
|
|
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.filePath = tempDir
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _createDumpDir():
|
2008-10-15 19:38:22 +04:00
|
|
|
"""
|
|
|
|
Create the dump directory.
|
|
|
|
"""
|
|
|
|
|
2010-06-02 15:01:41 +04:00
|
|
|
if not conf.dumpTable and not conf.dumpAll and not conf.search:
|
2008-10-15 19:38:22 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
conf.dumpPath = paths.SQLMAP_DUMP_PATH % conf.hostname
|
|
|
|
|
|
|
|
if not os.path.isdir(conf.dumpPath):
|
2013-10-11 23:16:48 +04:00
|
|
|
try:
|
|
|
|
os.makedirs(conf.dumpPath, 0755)
|
|
|
|
except OSError, ex:
|
|
|
|
tempDir = tempfile.mkdtemp(prefix="sqlmapdump")
|
|
|
|
warnMsg = "unable to create dump directory "
|
2014-12-15 09:18:47 +03:00
|
|
|
warnMsg += "'%s' (%s). " % (conf.dumpPath, getUnicode(ex))
|
2013-10-11 23:16:48 +04:00
|
|
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.dumpPath = tempDir
|
2008-10-15 19:38:22 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _configureDumper():
|
2011-02-27 15:17:41 +03:00
|
|
|
if hasattr(conf, 'xmlFile') and conf.xmlFile:
|
2010-05-28 20:43:04 +04:00
|
|
|
conf.dumper = xmldumper
|
|
|
|
else:
|
|
|
|
conf.dumper = dumper
|
|
|
|
|
|
|
|
conf.dumper.setOutputFile()
|
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
def _createTargetDirs():
|
2009-09-26 03:03:45 +04:00
|
|
|
"""
|
|
|
|
Create the output directory.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
2010-12-09 00:16:18 +03:00
|
|
|
try:
|
2014-07-02 12:09:19 +04:00
|
|
|
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
|
|
|
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
|
|
|
warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH
|
|
|
|
logger.warn(warnMsg)
|
2014-12-12 11:58:42 +03:00
|
|
|
except (OSError, IOError), ex:
|
2014-11-11 13:38:14 +03:00
|
|
|
try:
|
|
|
|
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
|
|
|
except IOError, _:
|
|
|
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
|
|
|
errMsg += "Please make sure that your disk is not full and "
|
|
|
|
errMsg += "that you have sufficient write permissions to "
|
|
|
|
errMsg += "create temporary files and/or directories"
|
|
|
|
raise SqlmapGenericException(errMsg)
|
2014-07-02 12:09:19 +04:00
|
|
|
warnMsg = "unable to create regular output directory "
|
2014-12-15 09:18:47 +03:00
|
|
|
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex))
|
2014-07-02 12:09:19 +04:00
|
|
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
paths.SQLMAP_OUTPUT_PATH = tempDir
|
2014-06-04 18:35:07 +04:00
|
|
|
|
2014-11-03 01:38:52 +03:00
|
|
|
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
2009-09-26 03:03:45 +04:00
|
|
|
|
|
|
|
if not os.path.isdir(conf.outputPath):
|
2010-12-09 00:16:18 +03:00
|
|
|
try:
|
|
|
|
os.makedirs(conf.outputPath, 0755)
|
2014-12-12 11:58:42 +03:00
|
|
|
except (OSError, IOError), ex:
|
2014-11-11 13:38:14 +03:00
|
|
|
try:
|
|
|
|
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
|
|
|
except IOError, _:
|
|
|
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
|
|
|
errMsg += "Please make sure that your disk is not full and "
|
|
|
|
errMsg += "that you have sufficient write permissions to "
|
|
|
|
errMsg += "create temporary files and/or directories"
|
|
|
|
raise SqlmapGenericException(errMsg)
|
2010-12-09 12:24:20 +03:00
|
|
|
warnMsg = "unable to create output directory "
|
2014-12-15 11:13:13 +03:00
|
|
|
warnMsg += "'%s' (%s). " % (conf.outputPath, getUnicode(ex))
|
2013-10-11 23:16:48 +04:00
|
|
|
warnMsg += "Using temporary directory '%s' instead" % tempDir
|
2010-12-09 00:16:18 +03:00
|
|
|
logger.warn(warnMsg)
|
|
|
|
|
|
|
|
conf.outputPath = tempDir
|
2009-09-26 03:03:45 +04:00
|
|
|
|
2012-10-30 03:20:49 +04:00
|
|
|
try:
|
2013-01-17 17:17:39 +04:00
|
|
|
with codecs.open(os.path.join(conf.outputPath, "target.txt"), "w+", UNICODE_ENCODING) as f:
|
|
|
|
f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname)
|
|
|
|
f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET))
|
|
|
|
if conf.data:
|
2014-09-01 18:16:12 +04:00
|
|
|
f.write("\n\n%s" % getUnicode(conf.data))
|
2012-12-30 14:22:23 +04:00
|
|
|
except IOError, ex:
|
2014-12-15 15:36:08 +03:00
|
|
|
if "denied" in getUnicode(ex):
|
2012-10-30 03:20:49 +04:00
|
|
|
errMsg = "you don't have enough permissions "
|
|
|
|
else:
|
|
|
|
errMsg = "something went wrong while trying "
|
2012-12-30 14:22:23 +04:00
|
|
|
errMsg += "to write to the output directory '%s' (%s)" % (paths.SQLMAP_OUTPUT_PATH, ex)
|
2012-10-30 03:20:49 +04:00
|
|
|
|
2013-01-04 02:20:55 +04:00
|
|
|
raise SqlmapMissingPrivileges(errMsg)
|
2012-08-15 18:37:18 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
_createDumpDir()
|
|
|
|
_createFilesDir()
|
|
|
|
_configureDumper()
|
2009-09-26 03:03:45 +04:00
|
|
|
|
2013-04-10 21:33:31 +04:00
|
|
|
def _restoreMergedOptions():
|
2011-01-02 13:37:32 +03:00
|
|
|
"""
|
2013-04-10 21:33:31 +04:00
|
|
|
Restore merged options (command line, configuration file and default values)
|
|
|
|
that could be possibly changed during the testing of previous target.
|
2011-01-02 13:37:32 +03:00
|
|
|
"""
|
2013-04-07 23:32:03 +04:00
|
|
|
|
2013-04-10 21:33:31 +04:00
|
|
|
for option in RESTORE_MERGED_OPTIONS:
|
|
|
|
conf[option] = mergedOptions[option]
|
2011-01-02 13:37:32 +03:00
|
|
|
|
2008-10-15 19:38:22 +04:00
|
|
|
def initTargetEnv():
|
|
|
|
"""
|
|
|
|
Initialize target environment.
|
|
|
|
"""
|
|
|
|
|
2008-11-28 01:33:33 +03:00
|
|
|
if conf.multipleTargets:
|
2011-09-26 00:36:32 +04:00
|
|
|
if conf.hashDB:
|
|
|
|
conf.hashDB.close()
|
|
|
|
|
2010-04-06 14:15:19 +04:00
|
|
|
if conf.cj:
|
2012-03-08 14:19:34 +04:00
|
|
|
resetCookieJar(conf.cj)
|
2010-04-06 14:15:19 +04:00
|
|
|
|
2011-04-30 17:20:05 +04:00
|
|
|
conf.paramDict = {}
|
|
|
|
conf.parameters = {}
|
2011-09-26 00:36:32 +04:00
|
|
|
conf.hashDBFile = None
|
2010-10-19 22:17:34 +04:00
|
|
|
|
2012-12-06 17:14:19 +04:00
|
|
|
_setKnowledgeBaseAttributes(False)
|
2013-04-10 21:33:31 +04:00
|
|
|
_restoreMergedOptions()
|
2012-12-06 17:14:19 +04:00
|
|
|
_setDBMS()
|
2008-11-28 01:33:33 +03:00
|
|
|
|
2013-01-19 21:06:36 +04:00
|
|
|
if conf.data:
|
2013-01-19 21:40:44 +04:00
|
|
|
class _(unicode):
|
|
|
|
pass
|
|
|
|
|
2014-03-21 23:28:16 +04:00
|
|
|
for key, value in conf.httpHeaders:
|
|
|
|
if key.upper() == HTTP_HEADER.CONTENT_TYPE.upper():
|
|
|
|
kb.postUrlEncode = "urlencoded" in value
|
|
|
|
break
|
|
|
|
if kb.postUrlEncode:
|
|
|
|
original = conf.data
|
|
|
|
conf.data = _(urldecode(conf.data))
|
|
|
|
setattr(conf.data, UNENCODED_ORIGINAL_VALUE, original)
|
|
|
|
kb.postSpaceToPlus = '+' in original
|
2013-01-19 21:06:36 +04:00
|
|
|
|
2010-03-15 14:33:34 +03:00
|
|
|
def setupTargetEnv():
|
2012-12-06 17:14:19 +04:00
|
|
|
_createTargetDirs()
|
|
|
|
_setRequestParams()
|
|
|
|
_setHashDB()
|
|
|
|
_resumeHashDBValues()
|
|
|
|
_setResultsFile()
|
|
|
|
_setAuthCred()
|