mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-01-25 00:34:28 +03:00
First commit regarding #739
This commit is contained in:
parent
53578bcb7c
commit
961d2b24d1
|
@ -5,6 +5,7 @@ Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import binascii
|
||||||
import codecs
|
import codecs
|
||||||
import contextlib
|
import contextlib
|
||||||
import cookielib
|
import cookielib
|
||||||
|
@ -101,7 +102,10 @@ from lib.core.settings import BOUNDED_INJECTION_MARKER
|
||||||
from lib.core.settings import BRUTE_DOC_ROOT_PREFIXES
|
from lib.core.settings import BRUTE_DOC_ROOT_PREFIXES
|
||||||
from lib.core.settings import BRUTE_DOC_ROOT_SUFFIXES
|
from lib.core.settings import BRUTE_DOC_ROOT_SUFFIXES
|
||||||
from lib.core.settings import BRUTE_DOC_ROOT_TARGET_MARK
|
from lib.core.settings import BRUTE_DOC_ROOT_TARGET_MARK
|
||||||
|
from lib.core.settings import BURP_REQUEST_REGEX
|
||||||
|
from lib.core.settings import BURP_XML_HISTORY_REGEX
|
||||||
from lib.core.settings import DBMS_DIRECTORY_DICT
|
from lib.core.settings import DBMS_DIRECTORY_DICT
|
||||||
|
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
|
@ -139,6 +143,7 @@ from lib.core.settings import PARTIAL_VALUE_MARKER
|
||||||
from lib.core.settings import PAYLOAD_DELIMITER
|
from lib.core.settings import PAYLOAD_DELIMITER
|
||||||
from lib.core.settings import PLATFORM
|
from lib.core.settings import PLATFORM
|
||||||
from lib.core.settings import PRINTABLE_CHAR_REGEX
|
from lib.core.settings import PRINTABLE_CHAR_REGEX
|
||||||
|
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
||||||
from lib.core.settings import PUSH_VALUE_EXCEPTION_RETRY_COUNT
|
from lib.core.settings import PUSH_VALUE_EXCEPTION_RETRY_COUNT
|
||||||
from lib.core.settings import PYVERSION
|
from lib.core.settings import PYVERSION
|
||||||
from lib.core.settings import REFERER_ALIASES
|
from lib.core.settings import REFERER_ALIASES
|
||||||
|
@ -161,6 +166,7 @@ from lib.core.settings import URLENCODE_CHAR_LIMIT
|
||||||
from lib.core.settings import URLENCODE_FAILSAFE_CHARS
|
from lib.core.settings import URLENCODE_FAILSAFE_CHARS
|
||||||
from lib.core.settings import USER_AGENT_ALIASES
|
from lib.core.settings import USER_AGENT_ALIASES
|
||||||
from lib.core.settings import VERSION_STRING
|
from lib.core.settings import VERSION_STRING
|
||||||
|
from lib.core.settings import WEBSCARAB_SPLITTER
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.utils.sqlalchemy import _sqlalchemy
|
from lib.utils.sqlalchemy import _sqlalchemy
|
||||||
from thirdparty.clientform.clientform import ParseResponse
|
from thirdparty.clientform.clientform import ParseResponse
|
||||||
|
@ -4468,6 +4474,195 @@ def pollProcess(process, suppress_errors=False):
|
||||||
|
|
||||||
break
|
break
|
||||||
|
|
||||||
|
def parseRequestFile(reqFile):
|
||||||
|
"""
|
||||||
|
Parses WebScarab and Burp logs and adds results to the target URL list
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _parseWebScarabLog(content):
|
||||||
|
"""
|
||||||
|
Parses WebScarab logs (POST method not supported)
|
||||||
|
"""
|
||||||
|
|
||||||
|
reqResList = content.split(WEBSCARAB_SPLITTER)
|
||||||
|
|
||||||
|
for request in reqResList:
|
||||||
|
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
|
||||||
|
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
|
||||||
|
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
|
||||||
|
|
||||||
|
if not method or not url:
|
||||||
|
logger.debug("not a valid WebScarab log data")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if method.upper() == HTTPMETHOD.POST:
|
||||||
|
warnMsg = "POST requests from WebScarab logs aren't supported "
|
||||||
|
warnMsg += "as their body content is stored in separate files. "
|
||||||
|
warnMsg += "Nevertheless you can use -r to load them individually."
|
||||||
|
logger.warning(warnMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
||||||
|
yield (url, method, None, cookie, None)
|
||||||
|
|
||||||
|
def _parseBurpLog(content):
|
||||||
|
"""
|
||||||
|
Parses Burp logs
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not re.search(BURP_REQUEST_REGEX, content, re.I | re.S):
|
||||||
|
if re.search(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
||||||
|
reqResList = []
|
||||||
|
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
||||||
|
port, request = match.groups()
|
||||||
|
try:
|
||||||
|
request = request.decode("base64")
|
||||||
|
except binascii.Error:
|
||||||
|
continue
|
||||||
|
_ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request)
|
||||||
|
if _:
|
||||||
|
host = _.group(0).strip()
|
||||||
|
if not re.search(r":\d+\Z", host):
|
||||||
|
request = request.replace(host, "%s:%d" % (host, int(port)))
|
||||||
|
reqResList.append(request)
|
||||||
|
else:
|
||||||
|
reqResList = [content]
|
||||||
|
else:
|
||||||
|
reqResList = re.finditer(BURP_REQUEST_REGEX, content, re.I | re.S)
|
||||||
|
|
||||||
|
for match in reqResList:
|
||||||
|
request = match if isinstance(match, basestring) else match.group(0)
|
||||||
|
request = re.sub(r"\A[^\w]+", "", request)
|
||||||
|
|
||||||
|
schemePort = re.search(r"(http[\w]*)\:\/\/.*?\:([\d]+).+?={10,}", request, re.I | re.S)
|
||||||
|
|
||||||
|
if schemePort:
|
||||||
|
scheme = schemePort.group(1)
|
||||||
|
port = schemePort.group(2)
|
||||||
|
request = re.sub(r"\n=+\Z", "", request.split(schemePort.group(0))[-1].lstrip())
|
||||||
|
else:
|
||||||
|
scheme, port = None, None
|
||||||
|
|
||||||
|
if not re.search(r"^[\n]*(%s).*?\sHTTP\/" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), request, re.I | re.M):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if re.search(r"^[\n]*%s.*?\.(%s)\sHTTP\/" % (HTTPMETHOD.GET, "|".join(CRAWL_EXCLUDE_EXTENSIONS)), request, re.I | re.M):
|
||||||
|
continue
|
||||||
|
|
||||||
|
getPostReq = False
|
||||||
|
url = None
|
||||||
|
host = None
|
||||||
|
method = None
|
||||||
|
data = None
|
||||||
|
cookie = None
|
||||||
|
params = False
|
||||||
|
newline = None
|
||||||
|
lines = request.split('\n')
|
||||||
|
headers = []
|
||||||
|
|
||||||
|
for index in xrange(len(lines)):
|
||||||
|
line = lines[index]
|
||||||
|
|
||||||
|
if not line.strip() and index == len(lines) - 1:
|
||||||
|
break
|
||||||
|
|
||||||
|
newline = "\r\n" if line.endswith('\r') else '\n'
|
||||||
|
line = line.strip('\r')
|
||||||
|
match = re.search(r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None
|
||||||
|
|
||||||
|
if len(line.strip()) == 0 and method and method != HTTPMETHOD.GET and data is None:
|
||||||
|
data = ""
|
||||||
|
params = True
|
||||||
|
|
||||||
|
elif match:
|
||||||
|
method = match.group(1)
|
||||||
|
url = match.group(2)
|
||||||
|
|
||||||
|
if any(_ in line for _ in ('?', '=', kb.customInjectionMark)):
|
||||||
|
params = True
|
||||||
|
|
||||||
|
getPostReq = True
|
||||||
|
|
||||||
|
# POST parameters
|
||||||
|
elif data is not None and params:
|
||||||
|
data += "%s%s" % (line, newline)
|
||||||
|
|
||||||
|
# GET parameters
|
||||||
|
elif "?" in line and "=" in line and ": " not in line:
|
||||||
|
params = True
|
||||||
|
|
||||||
|
# Headers
|
||||||
|
elif re.search(r"\A\S+:", line):
|
||||||
|
key, value = line.split(":", 1)
|
||||||
|
value = value.strip().replace("\r", "").replace("\n", "")
|
||||||
|
|
||||||
|
# Cookie and Host headers
|
||||||
|
if key.upper() == HTTP_HEADER.COOKIE.upper():
|
||||||
|
cookie = value
|
||||||
|
elif key.upper() == HTTP_HEADER.HOST.upper():
|
||||||
|
if '://' in value:
|
||||||
|
scheme, value = value.split('://')[:2]
|
||||||
|
splitValue = value.split(":")
|
||||||
|
host = splitValue[0]
|
||||||
|
|
||||||
|
if len(splitValue) > 1:
|
||||||
|
port = filterStringValue(splitValue[1], "[0-9]")
|
||||||
|
|
||||||
|
# Avoid to add a static content length header to
|
||||||
|
# headers and consider the following lines as
|
||||||
|
# POSTed data
|
||||||
|
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
|
||||||
|
params = True
|
||||||
|
|
||||||
|
# Avoid proxy and connection type related headers
|
||||||
|
elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION):
|
||||||
|
headers.append((getUnicode(key), getUnicode(value)))
|
||||||
|
|
||||||
|
if kb.customInjectionMark in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""):
|
||||||
|
params = True
|
||||||
|
|
||||||
|
data = data.rstrip("\r\n") if data else data
|
||||||
|
|
||||||
|
if getPostReq and (params or cookie):
|
||||||
|
if not port and isinstance(scheme, basestring) and scheme.lower() == "https":
|
||||||
|
port = "443"
|
||||||
|
elif not scheme and port == "443":
|
||||||
|
scheme = "https"
|
||||||
|
|
||||||
|
if conf.forceSSL:
|
||||||
|
scheme = "https"
|
||||||
|
port = port or "443"
|
||||||
|
|
||||||
|
if not host:
|
||||||
|
errMsg = "invalid format of a request file"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if not url.startswith("http"):
|
||||||
|
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
||||||
|
scheme = None
|
||||||
|
port = None
|
||||||
|
|
||||||
|
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
||||||
|
yield (url, conf.method or method, data, cookie, tuple(headers))
|
||||||
|
|
||||||
|
checkFile(reqFile)
|
||||||
|
try:
|
||||||
|
with openFile(reqFile, "rb") as f:
|
||||||
|
content = f.read()
|
||||||
|
except (IOError, OSError, MemoryError), ex:
|
||||||
|
errMsg = "something went wrong while trying "
|
||||||
|
errMsg += "to read the content of file '%s' ('%s')" % (reqFile, getSafeExString(ex))
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
|
if conf.scope:
|
||||||
|
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
||||||
|
|
||||||
|
for target in _parseBurpLog(content):
|
||||||
|
yield target
|
||||||
|
|
||||||
|
for target in _parseWebScarabLog(content):
|
||||||
|
yield target
|
||||||
|
|
||||||
def getSafeExString(ex, encoding=None):
|
def getSafeExString(ex, encoding=None):
|
||||||
"""
|
"""
|
||||||
Safe way how to get the proper exception represtation as a string
|
Safe way how to get the proper exception represtation as a string
|
||||||
|
|
|
@ -5,7 +5,6 @@ Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import binascii
|
|
||||||
import cookielib
|
import cookielib
|
||||||
import glob
|
import glob
|
||||||
import inspect
|
import inspect
|
||||||
|
@ -47,6 +46,7 @@ from lib.core.common import getUnicode
|
||||||
from lib.core.common import normalizePath
|
from lib.core.common import normalizePath
|
||||||
from lib.core.common import ntToPosixSlashes
|
from lib.core.common import ntToPosixSlashes
|
||||||
from lib.core.common import openFile
|
from lib.core.common import openFile
|
||||||
|
from lib.core.common import parseRequestFile
|
||||||
from lib.core.common import parseTargetDirect
|
from lib.core.common import parseTargetDirect
|
||||||
from lib.core.common import parseTargetUrl
|
from lib.core.common import parseTargetUrl
|
||||||
from lib.core.common import paths
|
from lib.core.common import paths
|
||||||
|
@ -99,10 +99,7 @@ from lib.core.exception import SqlmapUnsupportedDBMSException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.log import FORMATTER
|
from lib.core.log import FORMATTER
|
||||||
from lib.core.optiondict import optDict
|
from lib.core.optiondict import optDict
|
||||||
from lib.core.settings import BURP_REQUEST_REGEX
|
|
||||||
from lib.core.settings import BURP_XML_HISTORY_REGEX
|
|
||||||
from lib.core.settings import CODECS_LIST_PAGE
|
from lib.core.settings import CODECS_LIST_PAGE
|
||||||
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
from lib.core.settings import DBMS_ALIASES
|
from lib.core.settings import DBMS_ALIASES
|
||||||
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
||||||
|
@ -119,7 +116,6 @@ from lib.core.settings import MAX_NUMBER_OF_THREADS
|
||||||
from lib.core.settings import NULL
|
from lib.core.settings import NULL
|
||||||
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
||||||
from lib.core.settings import PRECONNECT_CANDIDATE_TIMEOUT
|
from lib.core.settings import PRECONNECT_CANDIDATE_TIMEOUT
|
||||||
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
|
||||||
from lib.core.settings import SITE
|
from lib.core.settings import SITE
|
||||||
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
||||||
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
||||||
|
@ -131,7 +127,6 @@ from lib.core.settings import UNION_CHAR_REGEX
|
||||||
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
||||||
from lib.core.settings import URI_INJECTABLE_REGEX
|
from lib.core.settings import URI_INJECTABLE_REGEX
|
||||||
from lib.core.settings import VERSION_STRING
|
from lib.core.settings import VERSION_STRING
|
||||||
from lib.core.settings import WEBSCARAB_SPLITTER
|
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.core.threads import setDaemon
|
from lib.core.threads import setDaemon
|
||||||
from lib.core.update import update
|
from lib.core.update import update
|
||||||
|
@ -173,201 +168,6 @@ try:
|
||||||
except NameError:
|
except NameError:
|
||||||
WindowsError = None
|
WindowsError = None
|
||||||
|
|
||||||
def _feedTargetsDict(reqFile, addedTargetUrls):
|
|
||||||
"""
|
|
||||||
Parses web scarab and burp logs and adds results to the target URL list
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _parseWebScarabLog(content):
|
|
||||||
"""
|
|
||||||
Parses web scarab logs (POST method not supported)
|
|
||||||
"""
|
|
||||||
|
|
||||||
reqResList = content.split(WEBSCARAB_SPLITTER)
|
|
||||||
|
|
||||||
for request in reqResList:
|
|
||||||
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
|
|
||||||
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
|
|
||||||
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
|
|
||||||
|
|
||||||
if not method or not url:
|
|
||||||
logger.debug("not a valid WebScarab log data")
|
|
||||||
continue
|
|
||||||
|
|
||||||
if method.upper() == HTTPMETHOD.POST:
|
|
||||||
warnMsg = "POST requests from WebScarab logs aren't supported "
|
|
||||||
warnMsg += "as their body content is stored in separate files. "
|
|
||||||
warnMsg += "Nevertheless you can use -r to load them individually."
|
|
||||||
logger.warning(warnMsg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
|
||||||
if not kb.targets or url not in addedTargetUrls:
|
|
||||||
kb.targets.add((url, method, None, cookie, None))
|
|
||||||
addedTargetUrls.add(url)
|
|
||||||
|
|
||||||
def _parseBurpLog(content):
|
|
||||||
"""
|
|
||||||
Parses burp logs
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not re.search(BURP_REQUEST_REGEX, content, re.I | re.S):
|
|
||||||
if re.search(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
|
||||||
reqResList = []
|
|
||||||
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
|
||||||
port, request = match.groups()
|
|
||||||
try:
|
|
||||||
request = request.decode("base64")
|
|
||||||
except binascii.Error:
|
|
||||||
continue
|
|
||||||
_ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request)
|
|
||||||
if _:
|
|
||||||
host = _.group(0).strip()
|
|
||||||
if not re.search(r":\d+\Z", host):
|
|
||||||
request = request.replace(host, "%s:%d" % (host, int(port)))
|
|
||||||
reqResList.append(request)
|
|
||||||
else:
|
|
||||||
reqResList = [content]
|
|
||||||
else:
|
|
||||||
reqResList = re.finditer(BURP_REQUEST_REGEX, content, re.I | re.S)
|
|
||||||
|
|
||||||
for match in reqResList:
|
|
||||||
request = match if isinstance(match, basestring) else match.group(0)
|
|
||||||
request = re.sub(r"\A[^\w]+", "", request)
|
|
||||||
|
|
||||||
schemePort = re.search(r"(http[\w]*)\:\/\/.*?\:([\d]+).+?={10,}", request, re.I | re.S)
|
|
||||||
|
|
||||||
if schemePort:
|
|
||||||
scheme = schemePort.group(1)
|
|
||||||
port = schemePort.group(2)
|
|
||||||
request = re.sub(r"\n=+\Z", "", request.split(schemePort.group(0))[-1].lstrip())
|
|
||||||
else:
|
|
||||||
scheme, port = None, None
|
|
||||||
|
|
||||||
if not re.search(r"^[\n]*(%s).*?\sHTTP\/" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), request, re.I | re.M):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if re.search(r"^[\n]*%s.*?\.(%s)\sHTTP\/" % (HTTPMETHOD.GET, "|".join(CRAWL_EXCLUDE_EXTENSIONS)), request, re.I | re.M):
|
|
||||||
continue
|
|
||||||
|
|
||||||
getPostReq = False
|
|
||||||
url = None
|
|
||||||
host = None
|
|
||||||
method = None
|
|
||||||
data = None
|
|
||||||
cookie = None
|
|
||||||
params = False
|
|
||||||
newline = None
|
|
||||||
lines = request.split('\n')
|
|
||||||
headers = []
|
|
||||||
|
|
||||||
for index in xrange(len(lines)):
|
|
||||||
line = lines[index]
|
|
||||||
|
|
||||||
if not line.strip() and index == len(lines) - 1:
|
|
||||||
break
|
|
||||||
|
|
||||||
newline = "\r\n" if line.endswith('\r') else '\n'
|
|
||||||
line = line.strip('\r')
|
|
||||||
match = re.search(r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None
|
|
||||||
|
|
||||||
if len(line.strip()) == 0 and method and method != HTTPMETHOD.GET and data is None:
|
|
||||||
data = ""
|
|
||||||
params = True
|
|
||||||
|
|
||||||
elif match:
|
|
||||||
method = match.group(1)
|
|
||||||
url = match.group(2)
|
|
||||||
|
|
||||||
if any(_ in line for _ in ('?', '=', kb.customInjectionMark)):
|
|
||||||
params = True
|
|
||||||
|
|
||||||
getPostReq = True
|
|
||||||
|
|
||||||
# POST parameters
|
|
||||||
elif data is not None and params:
|
|
||||||
data += "%s%s" % (line, newline)
|
|
||||||
|
|
||||||
# GET parameters
|
|
||||||
elif "?" in line and "=" in line and ": " not in line:
|
|
||||||
params = True
|
|
||||||
|
|
||||||
# Headers
|
|
||||||
elif re.search(r"\A\S+:", line):
|
|
||||||
key, value = line.split(":", 1)
|
|
||||||
value = value.strip().replace("\r", "").replace("\n", "")
|
|
||||||
|
|
||||||
# Cookie and Host headers
|
|
||||||
if key.upper() == HTTP_HEADER.COOKIE.upper():
|
|
||||||
cookie = value
|
|
||||||
elif key.upper() == HTTP_HEADER.HOST.upper():
|
|
||||||
if '://' in value:
|
|
||||||
scheme, value = value.split('://')[:2]
|
|
||||||
splitValue = value.split(":")
|
|
||||||
host = splitValue[0]
|
|
||||||
|
|
||||||
if len(splitValue) > 1:
|
|
||||||
port = filterStringValue(splitValue[1], "[0-9]")
|
|
||||||
|
|
||||||
# Avoid to add a static content length header to
|
|
||||||
# headers and consider the following lines as
|
|
||||||
# POSTed data
|
|
||||||
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
|
|
||||||
params = True
|
|
||||||
|
|
||||||
# Avoid proxy and connection type related headers
|
|
||||||
elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION):
|
|
||||||
headers.append((getUnicode(key), getUnicode(value)))
|
|
||||||
|
|
||||||
if kb.customInjectionMark in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""):
|
|
||||||
params = True
|
|
||||||
|
|
||||||
data = data.rstrip("\r\n") if data else data
|
|
||||||
|
|
||||||
if getPostReq and (params or cookie):
|
|
||||||
if not port and isinstance(scheme, basestring) and scheme.lower() == "https":
|
|
||||||
port = "443"
|
|
||||||
elif not scheme and port == "443":
|
|
||||||
scheme = "https"
|
|
||||||
|
|
||||||
if conf.forceSSL:
|
|
||||||
scheme = "https"
|
|
||||||
port = port or "443"
|
|
||||||
|
|
||||||
if not host:
|
|
||||||
errMsg = "invalid format of a request file"
|
|
||||||
raise SqlmapSyntaxException(errMsg)
|
|
||||||
|
|
||||||
if not url.startswith("http"):
|
|
||||||
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
|
||||||
scheme = None
|
|
||||||
port = None
|
|
||||||
|
|
||||||
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
|
||||||
if not kb.targets or url not in addedTargetUrls:
|
|
||||||
kb.targets.add((url, conf.method or method, data, cookie, tuple(headers)))
|
|
||||||
addedTargetUrls.add(url)
|
|
||||||
|
|
||||||
checkFile(reqFile)
|
|
||||||
try:
|
|
||||||
with openFile(reqFile, "rb") as f:
|
|
||||||
content = f.read()
|
|
||||||
except (IOError, OSError, MemoryError), ex:
|
|
||||||
errMsg = "something went wrong while trying "
|
|
||||||
errMsg += "to read the content of file '%s' ('%s')" % (reqFile, getSafeExString(ex))
|
|
||||||
raise SqlmapSystemException(errMsg)
|
|
||||||
|
|
||||||
if conf.scope:
|
|
||||||
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
|
||||||
|
|
||||||
_parseBurpLog(content)
|
|
||||||
_parseWebScarabLog(content)
|
|
||||||
|
|
||||||
if not addedTargetUrls:
|
|
||||||
errMsg = "unable to find usable request(s) "
|
|
||||||
errMsg += "in provided file ('%s')" % reqFile
|
|
||||||
raise SqlmapGenericException(errMsg)
|
|
||||||
|
|
||||||
def _loadQueries():
|
def _loadQueries():
|
||||||
"""
|
"""
|
||||||
Loads queries from 'xml/queries.xml' file.
|
Loads queries from 'xml/queries.xml' file.
|
||||||
|
@ -413,7 +213,7 @@ def _setMultipleTargets():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
initialTargetsCount = len(kb.targets)
|
initialTargetsCount = len(kb.targets)
|
||||||
addedTargetUrls = set()
|
seen = set()
|
||||||
|
|
||||||
if not conf.logFile:
|
if not conf.logFile:
|
||||||
return
|
return
|
||||||
|
@ -426,7 +226,11 @@ def _setMultipleTargets():
|
||||||
raise SqlmapFilePathException(errMsg)
|
raise SqlmapFilePathException(errMsg)
|
||||||
|
|
||||||
if os.path.isfile(conf.logFile):
|
if os.path.isfile(conf.logFile):
|
||||||
_feedTargetsDict(conf.logFile, addedTargetUrls)
|
for target in parseRequestFile(conf.logFile):
|
||||||
|
url = target[0]
|
||||||
|
if url not in seen:
|
||||||
|
kb.targets.add(target)
|
||||||
|
seen.add(url)
|
||||||
|
|
||||||
elif os.path.isdir(conf.logFile):
|
elif os.path.isdir(conf.logFile):
|
||||||
files = os.listdir(conf.logFile)
|
files = os.listdir(conf.logFile)
|
||||||
|
@ -436,7 +240,11 @@ def _setMultipleTargets():
|
||||||
if not re.search(r"([\d]+)\-request", reqFile):
|
if not re.search(r"([\d]+)\-request", reqFile):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
_feedTargetsDict(os.path.join(conf.logFile, reqFile), addedTargetUrls)
|
for target in parseRequestFile(os.path.join(conf.logFile, reqFile)):
|
||||||
|
url = target[0]
|
||||||
|
if url not in seen:
|
||||||
|
kb.targets.add(target)
|
||||||
|
seen.add(url)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
errMsg = "the specified list of targets is not a file "
|
errMsg = "the specified list of targets is not a file "
|
||||||
|
@ -480,7 +288,7 @@ def _setRequestFromFile():
|
||||||
if not conf.requestFile:
|
if not conf.requestFile:
|
||||||
return
|
return
|
||||||
|
|
||||||
addedTargetUrls = set()
|
seen = set()
|
||||||
|
|
||||||
conf.requestFile = safeExpandUser(conf.requestFile)
|
conf.requestFile = safeExpandUser(conf.requestFile)
|
||||||
|
|
||||||
|
@ -492,7 +300,11 @@ def _setRequestFromFile():
|
||||||
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
_feedTargetsDict(conf.requestFile, addedTargetUrls)
|
for target in parseRequestFile(conf.requestFile):
|
||||||
|
url = target[0]
|
||||||
|
if url not in seen:
|
||||||
|
kb.targets.add(target)
|
||||||
|
seen.add(url)
|
||||||
|
|
||||||
def _setCrawler():
|
def _setCrawler():
|
||||||
if not conf.crawlDepth:
|
if not conf.crawlDepth:
|
||||||
|
|
|
@ -19,7 +19,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
|
|
||||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||||
VERSION = "1.2.6.21"
|
VERSION = "1.2.6.22"
|
||||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||||
|
|
|
@ -28,7 +28,7 @@ c7443613a0a2505b1faec931cee2a6ef lib/controller/handler.py
|
||||||
1e5532ede194ac9c083891c2f02bca93 lib/controller/__init__.py
|
1e5532ede194ac9c083891c2f02bca93 lib/controller/__init__.py
|
||||||
0adf547455a76dc71e6a599e52da1ed9 lib/core/agent.py
|
0adf547455a76dc71e6a599e52da1ed9 lib/core/agent.py
|
||||||
fd8f239e259afaf5f24bcf34a0ad187f lib/core/bigarray.py
|
fd8f239e259afaf5f24bcf34a0ad187f lib/core/bigarray.py
|
||||||
5fb7d0339b64859c6d2bac3341ce496c lib/core/common.py
|
e57bda5d5e3011b4e2d8e0a21f546e77 lib/core/common.py
|
||||||
0d082da16c388b3445e656e0760fb582 lib/core/convert.py
|
0d082da16c388b3445e656e0760fb582 lib/core/convert.py
|
||||||
9f87391b6a3395f7f50830b391264f27 lib/core/data.py
|
9f87391b6a3395f7f50830b391264f27 lib/core/data.py
|
||||||
72016ea5c994a711a262fd64572a0fcd lib/core/datatype.py
|
72016ea5c994a711a262fd64572a0fcd lib/core/datatype.py
|
||||||
|
@ -41,14 +41,14 @@ cada93357a7321655927fc9625b3bfec lib/core/exception.py
|
||||||
1e5532ede194ac9c083891c2f02bca93 lib/core/__init__.py
|
1e5532ede194ac9c083891c2f02bca93 lib/core/__init__.py
|
||||||
458a194764805cd8312c14ecd4be4d1e lib/core/log.py
|
458a194764805cd8312c14ecd4be4d1e lib/core/log.py
|
||||||
c9a56e58984420a5abb7a3f7aadc196d lib/core/optiondict.py
|
c9a56e58984420a5abb7a3f7aadc196d lib/core/optiondict.py
|
||||||
332d4bb299a0edbc42100e3cfbead285 lib/core/option.py
|
90a061be1a0658c2bdb197511b61d6fa lib/core/option.py
|
||||||
c8c386d644d57c659d74542f5f57f632 lib/core/patch.py
|
c8c386d644d57c659d74542f5f57f632 lib/core/patch.py
|
||||||
7cfd04e583cca782b843f6f6d973981a lib/core/profiling.py
|
7cfd04e583cca782b843f6f6d973981a lib/core/profiling.py
|
||||||
6f654e1715571eff68a0f8af3d62dcf8 lib/core/readlineng.py
|
6f654e1715571eff68a0f8af3d62dcf8 lib/core/readlineng.py
|
||||||
0c3eef46bdbf87e29a3f95f90240d192 lib/core/replication.py
|
0c3eef46bdbf87e29a3f95f90240d192 lib/core/replication.py
|
||||||
a7db43859b61569b601b97f187dd31c5 lib/core/revision.py
|
a7db43859b61569b601b97f187dd31c5 lib/core/revision.py
|
||||||
fcb74fcc9577523524659ec49e2e964b lib/core/session.py
|
fcb74fcc9577523524659ec49e2e964b lib/core/session.py
|
||||||
070499b0c66d5dd7c95dd75ba7d5abe6 lib/core/settings.py
|
dd2c8c71982173660be8b77f2af0fc3e lib/core/settings.py
|
||||||
0dfc2ed40adf72e302291f6ecd4406f6 lib/core/shell.py
|
0dfc2ed40adf72e302291f6ecd4406f6 lib/core/shell.py
|
||||||
a7edc9250d13af36ac0108f259859c19 lib/core/subprocessng.py
|
a7edc9250d13af36ac0108f259859c19 lib/core/subprocessng.py
|
||||||
36bd2dc292c0e10e39bd9c43b77fe1bc lib/core/target.py
|
36bd2dc292c0e10e39bd9c43b77fe1bc lib/core/target.py
|
||||||
|
|
Loading…
Reference in New Issue
Block a user