mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-08-01 19:00:10 +03:00
Merge remote-tracking branch 'sqlmapproject/master'
This commit is contained in:
commit
a32ca93b52
|
@ -19,15 +19,18 @@ from optparse import OptionParser
|
|||
# Regex used for recognition of hex encoded characters
|
||||
HEX_ENCODED_CHAR_REGEX = r"(?P<result>\\x[0-9A-Fa-f]{2})"
|
||||
|
||||
# Regex used for recognition of representation for hex encoded invalid unicode characters
|
||||
INVALID_UNICODE_CHAR_REGEX = r"(?P<result>\\\?[0-9A-Fa-f]{2})"
|
||||
|
||||
# Raw chars that will be safe encoded to their slash (\) representations (e.g. newline to \n)
|
||||
SAFE_ENCODE_SLASH_REPLACEMENTS = "\t\n\r\x0b\x0c"
|
||||
|
||||
# Characters that don't need to be safe encoded
|
||||
SAFE_CHARS = "".join(filter(lambda x: x not in SAFE_ENCODE_SLASH_REPLACEMENTS, string.printable.replace('\\', '')))
|
||||
|
||||
# Prefix used for hex encoded values
|
||||
HEX_ENCODED_PREFIX = r"\x"
|
||||
|
||||
# Strings used for temporary marking of hex encoded prefixes (to prevent double encoding)
|
||||
HEX_ENCODED_PREFIX_MARKER = "__HEX_ENCODED_PREFIX__"
|
||||
|
||||
# String used for temporary marking of slash characters
|
||||
SLASH_MARKER = "__SLASH__"
|
||||
|
||||
|
@ -45,6 +48,7 @@ def safecharencode(value):
|
|||
|
||||
if isinstance(value, basestring):
|
||||
if any(_ not in SAFE_CHARS for _ in value):
|
||||
retVal = retVal.replace(HEX_ENCODED_PREFIX, HEX_ENCODED_PREFIX_MARKER)
|
||||
retVal = retVal.replace('\\', SLASH_MARKER)
|
||||
|
||||
for char in SAFE_ENCODE_SLASH_REPLACEMENTS:
|
||||
|
@ -53,6 +57,7 @@ def safecharencode(value):
|
|||
retVal = reduce(lambda x, y: x + (y if (y in string.printable or isinstance(value, unicode) and ord(y) >= 160) else '\\x%02x' % ord(y)), retVal, (unicode if isinstance(value, unicode) else str)())
|
||||
|
||||
retVal = retVal.replace(SLASH_MARKER, "\\\\")
|
||||
retVal = retVal.replace(HEX_ENCODED_PREFIX_MARKER, HEX_ENCODED_PREFIX)
|
||||
elif isinstance(value, list):
|
||||
for i in xrange(len(value)):
|
||||
retVal[i] = safecharencode(value[i])
|
||||
|
@ -83,12 +88,6 @@ def safechardecode(value, binary=False):
|
|||
if binary:
|
||||
if isinstance(retVal, unicode):
|
||||
retVal = retVal.encode("utf8")
|
||||
while True:
|
||||
match = re.search(INVALID_UNICODE_CHAR_REGEX, retVal)
|
||||
if match:
|
||||
retVal = retVal.replace(match.group("result"), chr(ord(binascii.unhexlify(match.group("result").lstrip("\\?")))))
|
||||
else:
|
||||
break
|
||||
|
||||
elif isinstance(value, (list, tuple)):
|
||||
for i in xrange(len(value)):
|
||||
|
|
|
@ -1053,7 +1053,7 @@ class Agent(object):
|
|||
"""
|
||||
|
||||
_ = re.escape(PAYLOAD_DELIMITER)
|
||||
return re.sub("(?s)(%s.*?%s)" % (_, _), ("%s%s%s" % (PAYLOAD_DELIMITER, payload, PAYLOAD_DELIMITER)).replace("\\", r"\\"), value) if value else value
|
||||
return re.sub("(?s)(%s.*?%s)" % (_, _), ("%s%s%s" % (PAYLOAD_DELIMITER, getUnicode(payload), PAYLOAD_DELIMITER)).replace("\\", r"\\"), value) if value else value
|
||||
|
||||
def runAsDBMSUser(self, query):
|
||||
if conf.dbmsCred and "Ad Hoc Distributed Queries" not in query:
|
||||
|
|
|
@ -2946,7 +2946,14 @@ def decodeIntToUnicode(value):
|
|||
_ = "%x" % value
|
||||
if len(_) % 2 == 1:
|
||||
_ = "0%s" % _
|
||||
retVal = getUnicode(hexdecode(_), encoding="UTF-16" if Backend.isDbms(DBMS.MSSQL) else None)
|
||||
raw = hexdecode(_)
|
||||
|
||||
if Backend.isDbms(DBMS.MSSQL):
|
||||
retVal = getUnicode(raw, "UTF-16-BE")
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.ORACLE):
|
||||
retVal = unichr(value)
|
||||
else:
|
||||
retVal = getUnicode(raw, conf.charset)
|
||||
else:
|
||||
retVal = getUnicode(chr(value))
|
||||
except:
|
||||
|
|
|
@ -81,7 +81,7 @@ class HTTPMETHOD:
|
|||
POST = "POST"
|
||||
HEAD = "HEAD"
|
||||
PUT = "PUT"
|
||||
DELETE = "DETELE"
|
||||
DELETE = "DELETE"
|
||||
TRACE = "TRACE"
|
||||
OPTIONS = "OPTIONS"
|
||||
CONNECT = "CONNECT"
|
||||
|
|
|
@ -122,6 +122,7 @@ from lib.core.settings import NULL
|
|||
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
||||
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
||||
from lib.core.settings import SITE
|
||||
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
||||
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
||||
from lib.core.settings import SUPPORTED_DBMS
|
||||
from lib.core.settings import SUPPORTED_OS
|
||||
|
@ -1014,10 +1015,50 @@ def _setDNSCache():
|
|||
kb.cache[args] = socket._getaddrinfo(*args, **kwargs)
|
||||
return kb.cache[args]
|
||||
|
||||
if not hasattr(socket, '_getaddrinfo'):
|
||||
if not hasattr(socket, "_getaddrinfo"):
|
||||
socket._getaddrinfo = socket.getaddrinfo
|
||||
socket.getaddrinfo = _getaddrinfo
|
||||
|
||||
def _setSocketPreConnect():
|
||||
"""
|
||||
Makes a pre-connect version of socket.connect
|
||||
"""
|
||||
|
||||
def _():
|
||||
while kb.threadContinue:
|
||||
try:
|
||||
for key in socket._ready:
|
||||
if len(socket._ready[key]) < SOCKET_PRE_CONNECT_QUEUE_SIZE:
|
||||
family, type, proto, address = key
|
||||
s = socket.socket(family, type, proto)
|
||||
s._connect(address)
|
||||
with kb.locks.socket:
|
||||
socket._ready[key].append(s._sock)
|
||||
except socket.error:
|
||||
pass
|
||||
finally:
|
||||
time.sleep(0.01)
|
||||
|
||||
def connect(self, address):
|
||||
found = False
|
||||
key = (self.family, self.type, self.proto, address)
|
||||
with kb.locks.socket:
|
||||
if key not in socket._ready:
|
||||
socket._ready[key] = []
|
||||
if len(socket._ready[key]) > 0:
|
||||
self._sock = socket._ready[key].pop(0)
|
||||
found = True
|
||||
if not found:
|
||||
self._connect(address)
|
||||
|
||||
if not hasattr(socket.socket, "_connect"):
|
||||
socket._ready = {}
|
||||
socket.socket._connect = socket.socket.connect
|
||||
socket.socket.connect = connect
|
||||
|
||||
thread = threading.Thread(target=_)
|
||||
thread.start()
|
||||
|
||||
def _setHTTPHandlers():
|
||||
"""
|
||||
Check and set the HTTP/SOCKS proxy for all HTTP requests.
|
||||
|
@ -1803,7 +1844,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||
kb.lastParserStatus = None
|
||||
|
||||
kb.locks = AttribDict()
|
||||
for _ in ("cache", "count", "index", "io", "limit", "log", "redirect", "request", "value"):
|
||||
for _ in ("cache", "count", "index", "io", "limit", "log", "socket", "redirect", "request", "value"):
|
||||
kb.locks[_] = threading.Lock()
|
||||
|
||||
kb.matchRatio = None
|
||||
|
@ -2517,6 +2558,7 @@ def init():
|
|||
_setHTTPAuthentication()
|
||||
_setHTTPHandlers()
|
||||
_setDNSCache()
|
||||
_setSocketPreConnect()
|
||||
_setSafeVisit()
|
||||
_doSearch()
|
||||
_setBulkMultipleTargets()
|
||||
|
|
|
@ -20,7 +20,7 @@ from lib.core.revision import getRevisionNumber
|
|||
# sqlmap version and site
|
||||
VERSION = "1.0-dev"
|
||||
REVISION = getRevisionNumber()
|
||||
VERSION_STRING = "sqlmap/%s%s" % (VERSION, "-%s" % REVISION if REVISION else "-nongit-%s" % time.strftime("%Y%m%d", time.gmtime(os.path.getctime(__file__))))
|
||||
VERSION_STRING = "sqlmap/%s%s" % (VERSION, "-%s" % REVISION if REVISION else "-nongit-%s%04x" % (time.strftime("%Y%m%d", time.gmtime(os.path.getmtime(__file__))), os.path.getsize(os.path.join(os.path.dirname(__file__), "common.py")) & 0xffff))
|
||||
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
||||
SITE = "http://sqlmap.org"
|
||||
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
||||
|
@ -466,6 +466,9 @@ ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
|||
# Approximate chunk length (in bytes) used by BigArray objects (only last chunk and cached one are held in memory)
|
||||
BIGARRAY_CHUNK_SIZE = 1024 * 1024
|
||||
|
||||
# Maximum number of socket pre-connects
|
||||
SOCKET_PRE_CONNECT_QUEUE_SIZE = 3
|
||||
|
||||
# Only console display last n table rows
|
||||
TRIM_STDOUT_DUMP_SIZE = 256
|
||||
|
||||
|
@ -587,7 +590,7 @@ EVENTVALIDATION_REGEX = r'(?i)(?P<name>__EVENTVALIDATION[^"]*)[^>]+value="(?P<re
|
|||
LIMITED_ROWS_TEST_NUMBER = 15
|
||||
|
||||
# Format used for representing invalid unicode characters
|
||||
INVALID_UNICODE_CHAR_FORMAT = r"\?%02x"
|
||||
INVALID_UNICODE_CHAR_FORMAT = r"\x%02x"
|
||||
|
||||
# Regular expression for XML POST data
|
||||
XML_RECOGNITION_REGEX = r"(?s)\A\s*<[^>]+>(.+>)?\s*\Z"
|
||||
|
|
|
@ -7,7 +7,6 @@ See the file 'doc/COPYING' for copying permission
|
|||
|
||||
import atexit
|
||||
import os
|
||||
import rlcompleter
|
||||
|
||||
from lib.core import readlineng as readline
|
||||
from lib.core.data import logger
|
||||
|
@ -16,6 +15,29 @@ from lib.core.enums import AUTOCOMPLETE_TYPE
|
|||
from lib.core.enums import OS
|
||||
from lib.core.settings import MAX_HISTORY_LENGTH
|
||||
|
||||
try:
|
||||
import rlcompleter
|
||||
|
||||
class CompleterNG(rlcompleter.Completer):
|
||||
def global_matches(self, text):
|
||||
"""
|
||||
Compute matches when text is a simple name.
|
||||
Return a list of all names currently defined in self.namespace
|
||||
that match.
|
||||
"""
|
||||
|
||||
matches = []
|
||||
n = len(text)
|
||||
|
||||
for ns in (self.namespace,):
|
||||
for word in ns:
|
||||
if word[:n] == text:
|
||||
matches.append(word)
|
||||
|
||||
return matches
|
||||
except:
|
||||
readline._readline = None
|
||||
|
||||
def readlineAvailable():
|
||||
"""
|
||||
Check if the readline is available. By default
|
||||
|
@ -74,24 +96,6 @@ def loadHistory(completion=None):
|
|||
warnMsg = "there was a problem loading the history file '%s' (%s)" % (historyPath, msg)
|
||||
logger.warn(warnMsg)
|
||||
|
||||
class CompleterNG(rlcompleter.Completer):
|
||||
def global_matches(self, text):
|
||||
"""
|
||||
Compute matches when text is a simple name.
|
||||
Return a list of all names currently defined in self.namespace
|
||||
that match.
|
||||
"""
|
||||
|
||||
matches = []
|
||||
n = len(text)
|
||||
|
||||
for ns in (self.namespace,):
|
||||
for word in ns:
|
||||
if word[:n] == text:
|
||||
matches.append(word)
|
||||
|
||||
return matches
|
||||
|
||||
def autoCompletion(completion=None, os=None, commands=None):
|
||||
if not readlineAvailable():
|
||||
return
|
||||
|
|
|
@ -318,39 +318,46 @@ def _setRequestParams():
|
|||
|
||||
# Perform checks on header values
|
||||
if conf.httpHeaders:
|
||||
for httpHeader, headerValue in conf.httpHeaders:
|
||||
for httpHeader, headerValue in list(conf.httpHeaders):
|
||||
# Url encoding of the header values should be avoided
|
||||
# Reference: http://stackoverflow.com/questions/5085904/is-ok-to-urlencode-the-value-in-headerlocation-value
|
||||
|
||||
httpHeader = httpHeader.title()
|
||||
|
||||
if httpHeader == HTTP_HEADER.USER_AGENT:
|
||||
if httpHeader.title() == HTTP_HEADER.USER_AGENT:
|
||||
conf.parameters[PLACE.USER_AGENT] = urldecode(headerValue)
|
||||
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES)))
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES, True)))
|
||||
|
||||
if condition:
|
||||
conf.paramDict[PLACE.USER_AGENT] = {PLACE.USER_AGENT: headerValue}
|
||||
testableParameters = True
|
||||
|
||||
elif httpHeader == HTTP_HEADER.REFERER:
|
||||
elif httpHeader.title() == HTTP_HEADER.REFERER:
|
||||
conf.parameters[PLACE.REFERER] = urldecode(headerValue)
|
||||
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES)))
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES, True)))
|
||||
|
||||
if condition:
|
||||
conf.paramDict[PLACE.REFERER] = {PLACE.REFERER: headerValue}
|
||||
testableParameters = True
|
||||
|
||||
elif httpHeader == HTTP_HEADER.HOST:
|
||||
elif httpHeader.title() == HTTP_HEADER.HOST:
|
||||
conf.parameters[PLACE.HOST] = urldecode(headerValue)
|
||||
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, HOST_ALIASES)))
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, HOST_ALIASES, True)))
|
||||
|
||||
if condition:
|
||||
conf.paramDict[PLACE.HOST] = {PLACE.HOST: headerValue}
|
||||
testableParameters = True
|
||||
|
||||
else:
|
||||
condition = intersect(conf.testParameter, [httpHeader], True)
|
||||
|
||||
if condition:
|
||||
conf.parameters[PLACE.CUSTOM_HEADER] = str(conf.httpHeaders)
|
||||
conf.paramDict[PLACE.CUSTOM_HEADER] = {httpHeader: "%s,%s%s" % (httpHeader, headerValue, CUSTOM_INJECTION_MARK_CHAR)}
|
||||
conf.httpHeaders = [(header, value.replace(CUSTOM_INJECTION_MARK_CHAR, "")) for header, value in conf.httpHeaders]
|
||||
testableParameters = True
|
||||
|
||||
if not conf.parameters:
|
||||
errMsg = "you did not provide any GET, POST and Cookie "
|
||||
errMsg += "parameter, neither an User-Agent, Referer or Host header value"
|
||||
|
|
|
@ -476,7 +476,7 @@ class Connect(object):
|
|||
status = getUnicode(conn.msg)
|
||||
|
||||
if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing:
|
||||
url = extractRegexResult(META_REFRESH_REGEX, page)
|
||||
refresh = extractRegexResult(META_REFRESH_REGEX, page)
|
||||
|
||||
debugMsg = "got HTML meta refresh header"
|
||||
logger.debug(debugMsg)
|
||||
|
@ -491,13 +491,14 @@ class Connect(object):
|
|||
kb.alwaysRefresh = choice not in ("n", "N")
|
||||
|
||||
if kb.alwaysRefresh:
|
||||
if url.lower().startswith('http://'):
|
||||
kwargs['url'] = url
|
||||
if re.search(r"\Ahttps?://", refresh, re.I):
|
||||
url = refresh
|
||||
else:
|
||||
kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url
|
||||
url = urlparse.urljoin(url, refresh)
|
||||
|
||||
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
|
||||
kwargs['refreshing'] = True
|
||||
kwargs['url'] = url
|
||||
kwargs['get'] = None
|
||||
kwargs['post'] = None
|
||||
|
||||
|
@ -659,7 +660,7 @@ class Connect(object):
|
|||
if conn and getattr(conn, "redurl", None):
|
||||
_ = urlparse.urlsplit(conn.redurl)
|
||||
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
||||
requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % re.escape(getUnicode(_)), requestMsg, 1)
|
||||
requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
|
||||
|
||||
if kb.resendPostOnRedirect is False:
|
||||
requestMsg = re.sub("(\[#\d+\]:\n)POST ", "\g<1>GET ", requestMsg)
|
||||
|
|
|
@ -38,9 +38,9 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
|||
|
||||
if headers:
|
||||
if "location" in headers:
|
||||
retVal = headers.getheaders("location")[0].split("?")[0]
|
||||
retVal = headers.getheaders("location")[0]
|
||||
elif "uri" in headers:
|
||||
retVal = headers.getheaders("uri")[0].split("?")[0]
|
||||
retVal = headers.getheaders("uri")[0]
|
||||
|
||||
return retVal
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ See the file 'doc/COPYING' for copying permission
|
|||
import threading
|
||||
import time
|
||||
|
||||
from extra.safe2bin.safe2bin import safechardecode
|
||||
from extra.safe2bin.safe2bin import safecharencode
|
||||
from lib.core.agent import agent
|
||||
from lib.core.common import Backend
|
||||
|
@ -18,6 +19,7 @@ from lib.core.common import decodeIntToUnicode
|
|||
from lib.core.common import filterControlChars
|
||||
from lib.core.common import getCharset
|
||||
from lib.core.common import getCounter
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import goGoodSamaritan
|
||||
from lib.core.common import getPartRun
|
||||
from lib.core.common import hashDBRetrieve
|
||||
|
@ -589,6 +591,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
|||
raise KeyboardInterrupt
|
||||
|
||||
_ = finalValue or partialValue
|
||||
|
||||
return getCounter(kb.technique), safecharencode(_) if kb.safeCharEncode else _
|
||||
|
||||
def queryOutputLength(expression, payload):
|
||||
|
|
|
@ -37,6 +37,7 @@ from lib.core.dicts import FROM_DUMMY_TABLE
|
|||
from lib.core.enums import DBMS
|
||||
from lib.core.enums import HASHDB_KEYS
|
||||
from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.exception import SqlmapDataException
|
||||
from lib.core.settings import CHECK_ZERO_COLUMNS_THRESHOLD
|
||||
from lib.core.settings import MIN_ERROR_CHUNK_LENGTH
|
||||
from lib.core.settings import MAX_ERROR_CHUNK_LENGTH
|
||||
|
@ -345,7 +346,14 @@ def errorUse(expression, dump=False):
|
|||
numThreads = min(conf.threads, (stopLimit - startLimit))
|
||||
|
||||
threadData = getCurrentThreadData()
|
||||
threadData.shared.limits = iter(xrange(startLimit, stopLimit))
|
||||
|
||||
try:
|
||||
threadData.shared.limits = iter(xrange(startLimit, stopLimit))
|
||||
except OverflowError:
|
||||
errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit)
|
||||
errMsg += "with switch '--fresh-queries'"
|
||||
raise SqlmapDataException(errMsg)
|
||||
|
||||
threadData.shared.value = BigArray()
|
||||
threadData.shared.buffered = []
|
||||
threadData.shared.counter = 0
|
||||
|
|
|
@ -120,8 +120,10 @@ def _findUnionCharCount(comment, place, parameter, value, prefix, suffix, where=
|
|||
break
|
||||
|
||||
if not retVal:
|
||||
ratios.pop(ratios.index(min_))
|
||||
ratios.pop(ratios.index(max_))
|
||||
if min_ in ratios:
|
||||
ratios.pop(ratios.index(min_))
|
||||
if max_ in ratios:
|
||||
ratios.pop(ratios.index(max_))
|
||||
|
||||
minItem, maxItem = None, None
|
||||
|
||||
|
|
|
@ -43,6 +43,7 @@ from lib.core.data import queries
|
|||
from lib.core.dicts import FROM_DUMMY_TABLE
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.enums import PAYLOAD
|
||||
from lib.core.exception import SqlmapDataException
|
||||
from lib.core.exception import SqlmapSyntaxException
|
||||
from lib.core.settings import MAX_BUFFERED_PARTIAL_UNION_LENGTH
|
||||
from lib.core.settings import SQL_SCALAR_REGEX
|
||||
|
@ -231,7 +232,14 @@ def unionUse(expression, unpack=True, dump=False):
|
|||
return value
|
||||
|
||||
threadData = getCurrentThreadData()
|
||||
threadData.shared.limits = iter(xrange(startLimit, stopLimit))
|
||||
|
||||
try:
|
||||
threadData.shared.limits = iter(xrange(startLimit, stopLimit))
|
||||
except OverflowError:
|
||||
errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit)
|
||||
errMsg += "with switch '--fresh-queries'"
|
||||
raise SqlmapDataException(errMsg)
|
||||
|
||||
numThreads = min(conf.threads, (stopLimit - startLimit))
|
||||
threadData.shared.value = BigArray()
|
||||
threadData.shared.buffered = []
|
||||
|
|
|
@ -11,6 +11,7 @@ import shutil
|
|||
import stat
|
||||
import string
|
||||
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.data import logger
|
||||
|
||||
def purge(directory):
|
||||
|
@ -79,4 +80,4 @@ def purge(directory):
|
|||
try:
|
||||
shutil.rmtree(directory)
|
||||
except OSError, ex:
|
||||
logger.error("problem occurred while removing directory '%s' ('%s')" % (directory, unicode(ex)))
|
||||
logger.error("problem occurred while removing directory '%s' ('%s')" % (directory, getSafeExString(ex)))
|
||||
|
|
|
@ -60,7 +60,7 @@ class Takeover(GenericTakeover):
|
|||
else:
|
||||
self.__plugindir = "%s/lib/mysql/plugin" % self.__basedir
|
||||
|
||||
self.__plugindir = ntToPosixSlashes(normalizePath(self.__plugindir))
|
||||
self.__plugindir = ntToPosixSlashes(normalizePath(self.__plugindir))
|
||||
|
||||
self.udfRemoteFile = "%s/%s.%s" % (self.__plugindir, self.udfSharedLibName, self.udfSharedLibExt)
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ from lib.core.data import conf
|
|||
from lib.core.data import logger
|
||||
from lib.core.dicts import SQL_STATEMENTS
|
||||
from lib.core.enums import AUTOCOMPLETE_TYPE
|
||||
from lib.core.exception import SqlmapNoneDataException
|
||||
from lib.core.settings import NULL
|
||||
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
||||
from lib.core.shell import autoCompletion
|
||||
|
@ -35,38 +36,42 @@ class Custom:
|
|||
sqlType = None
|
||||
query = query.rstrip(';')
|
||||
|
||||
for sqlTitle, sqlStatements in SQL_STATEMENTS.items():
|
||||
for sqlStatement in sqlStatements:
|
||||
if query.lower().startswith(sqlStatement):
|
||||
sqlType = sqlTitle
|
||||
break
|
||||
try:
|
||||
for sqlTitle, sqlStatements in SQL_STATEMENTS.items():
|
||||
for sqlStatement in sqlStatements:
|
||||
if query.lower().startswith(sqlStatement):
|
||||
sqlType = sqlTitle
|
||||
break
|
||||
|
||||
if not any(_ in query.upper() for _ in ("OPENROWSET", "INTO")) and (not sqlType or "SELECT" in sqlType):
|
||||
infoMsg = "fetching %s query output: '%s'" % (sqlType if sqlType is not None else "SQL", query)
|
||||
logger.info(infoMsg)
|
||||
if not any(_ in query.upper() for _ in ("OPENROWSET", "INTO")) and (not sqlType or "SELECT" in sqlType):
|
||||
infoMsg = "fetching %s query output: '%s'" % (sqlType if sqlType is not None else "SQL", query)
|
||||
logger.info(infoMsg)
|
||||
|
||||
output = inject.getValue(query, fromUser=True)
|
||||
output = inject.getValue(query, fromUser=True)
|
||||
|
||||
return output
|
||||
elif not isStackingAvailable() and not conf.direct:
|
||||
warnMsg = "execution of custom SQL queries is only "
|
||||
warnMsg += "available when stacked queries are supported"
|
||||
logger.warn(warnMsg)
|
||||
return output
|
||||
elif not isStackingAvailable() and not conf.direct:
|
||||
warnMsg = "execution of custom SQL queries is only "
|
||||
warnMsg += "available when stacked queries are supported"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
return None
|
||||
else:
|
||||
if sqlType:
|
||||
debugMsg = "executing %s query: '%s'" % (sqlType if sqlType is not None else "SQL", query)
|
||||
return None
|
||||
else:
|
||||
debugMsg = "executing unknown SQL type query: '%s'" % query
|
||||
logger.debug(debugMsg)
|
||||
if sqlType:
|
||||
debugMsg = "executing %s query: '%s'" % (sqlType if sqlType is not None else "SQL", query)
|
||||
else:
|
||||
debugMsg = "executing unknown SQL type query: '%s'" % query
|
||||
logger.debug(debugMsg)
|
||||
|
||||
inject.goStacked(query)
|
||||
inject.goStacked(query)
|
||||
|
||||
debugMsg = "done"
|
||||
logger.debug(debugMsg)
|
||||
debugMsg = "done"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
output = NULL
|
||||
output = NULL
|
||||
|
||||
except SqlmapNoneDataException, ex:
|
||||
logger.warn(ex)
|
||||
|
||||
return output
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ class Enumeration(Custom, Databases, Entries, Search, Users):
|
|||
kb.data.banner = None
|
||||
kb.data.hostname = ""
|
||||
kb.data.processChar = None
|
||||
kb.data.characterSet = None
|
||||
|
||||
Custom.__init__(self)
|
||||
Databases.__init__(self)
|
||||
|
|
|
@ -52,7 +52,12 @@ class Filesystem:
|
|||
|
||||
lengthQuery = "SELECT DATALENGTH(%s) FROM %s" % (self.tblField, self.fileTblName)
|
||||
|
||||
localFileSize = os.path.getsize(localFile)
|
||||
try:
|
||||
localFileSize = os.path.getsize(localFile)
|
||||
except OSError:
|
||||
warnMsg = "file '%s' is missing" % localFile
|
||||
logger.warn(warnMsg)
|
||||
localFileSize = 0
|
||||
|
||||
if fileRead and Backend.isDbms(DBMS.PGSQL):
|
||||
logger.info("length of read file '%s' cannot be checked on PostgreSQL" % remoteFile)
|
||||
|
|
|
@ -142,11 +142,18 @@ def main():
|
|||
errMsg = unhandledExceptionMessage()
|
||||
excMsg = traceback.format_exc()
|
||||
|
||||
if "No space left" in excMsg:
|
||||
if any(_ in excMsg for _ in ("No space left", "Disk quota exceeded")):
|
||||
errMsg = "no space left on output device"
|
||||
logger.error(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif "bad marshal data (unknown type code)" in excMsg:
|
||||
match = re.search(r"\s*(.+)\s+ValueError", excMsg)
|
||||
errMsg = "one of your .pyc files are corrupted%s" % (" ('%s')" % match.group(1) if match else "")
|
||||
errMsg += ". Please delete .pyc files on your system to fix the problem"
|
||||
logger.error(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
for match in re.finditer(r'File "(.+?)", line', excMsg):
|
||||
file_ = match.group(1)
|
||||
file_ = os.path.relpath(file_, os.path.dirname(__file__))
|
||||
|
|
24
waf/Newdefend.py
Normal file
24
waf/Newdefend.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.settings import WAF_ATTACK_VECTORS
|
||||
|
||||
__product__ = "Newdefend Web Application Firewall (Newdefend)"
|
||||
|
||||
def detect(get_page):
|
||||
retval = False
|
||||
|
||||
for vector in WAF_ATTACK_VECTORS:
|
||||
page, headers, code = get_page(get=vector)
|
||||
retval = re.search(r"newdefend", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None
|
||||
if retval:
|
||||
break
|
||||
|
||||
return retval
|
|
@ -7,6 +7,7 @@ See the file 'doc/COPYING' for copying permission
|
|||
|
||||
import re
|
||||
|
||||
from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.settings import WAF_ATTACK_VECTORS
|
||||
|
||||
__product__ = "Yunjiasu Web Application Firewall (Baidu)"
|
||||
|
@ -17,6 +18,7 @@ def detect(get_page):
|
|||
for vector in WAF_ATTACK_VECTORS:
|
||||
page, headers, code = get_page(get=vector)
|
||||
retval = re.search(r"fhl", headers.get("X-Server", ""), re.I) is not None
|
||||
retval |= re.search(r"yunjiasu-nginx", headers.get(HTTP_HEADER.SERVER), re.I) is not None
|
||||
if retval:
|
||||
break
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ def detect(get_page):
|
|||
|
||||
for vector in WAF_ATTACK_VECTORS:
|
||||
page, headers, code = get_page(get=vector)
|
||||
retval = code == 501 and re.search(r"Reference #[0-9A-Fa-f.]+", page, re.I) is not None
|
||||
retval = code in (400, 501) and re.search(r"Reference #[0-9A-Fa-f.]+", page, re.I) is not None
|
||||
if retval:
|
||||
break
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user