mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 09:36:35 +03:00
Minor update
This commit is contained in:
parent
c27820dc0e
commit
7356293007
|
@ -17,7 +17,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
|||
from lib.core.enums import OS
|
||||
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.3.3.62"
|
||||
VERSION = "1.3.3.63"
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||
|
|
36
thirdparty/beautifulsoup/beautifulsoup.py
vendored
36
thirdparty/beautifulsoup/beautifulsoup.py
vendored
|
@ -77,18 +77,17 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE, DAMMIT.
|
|||
|
||||
"""
|
||||
from __future__ import generators
|
||||
from __future__ import print_function
|
||||
|
||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||
__version__ = "3.2.1"
|
||||
__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson"
|
||||
__license__ = "New-style BSD"
|
||||
|
||||
from sgmllib import SGMLParser, SGMLParseError
|
||||
import codecs
|
||||
import markupbase
|
||||
import types
|
||||
import re
|
||||
import sgmllib
|
||||
|
||||
try:
|
||||
from htmlentitydefs import name2codepoint
|
||||
except ImportError:
|
||||
|
@ -98,6 +97,16 @@ try:
|
|||
except NameError:
|
||||
from sets import Set as set
|
||||
|
||||
try:
|
||||
import sgmllib
|
||||
except ImportError:
|
||||
from lib.utils import sgmllib
|
||||
|
||||
try:
|
||||
import markupbase
|
||||
except ImportError:
|
||||
import _markupbase as markupbase
|
||||
|
||||
#These hacks make Beautiful Soup able to parse XML with namespaces
|
||||
sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*')
|
||||
markupbase._declname_match = re.compile(r'[a-zA-Z][-_.:a-zA-Z0-9]*\s*').match
|
||||
|
@ -559,10 +568,11 @@ class Tag(PageElement):
|
|||
self.escapeUnrecognizedEntities = parser.escapeUnrecognizedEntities
|
||||
|
||||
# Convert any HTML, XML, or numeric entities in the attribute values.
|
||||
convert = lambda (k, val): (k,
|
||||
# Reference: https://github.com/pkrumins/xgoogle/pull/16/commits/3dba1165c436b0d6e5bdbd09e53ca0dbf8a043f8
|
||||
convert = lambda k_val: (k_val[0],
|
||||
re.sub("&(#\d+|#x[0-9a-fA-F]+|\w+);",
|
||||
self._convertEntities,
|
||||
val))
|
||||
k_val[1]))
|
||||
self.attrs = map(convert, self.attrs)
|
||||
|
||||
def getString(self):
|
||||
|
@ -1040,7 +1050,7 @@ def buildTagMap(default, *args):
|
|||
|
||||
# Now, the parser classes.
|
||||
|
||||
class BeautifulStoneSoup(Tag, SGMLParser):
|
||||
class BeautifulStoneSoup(Tag, sgmllib.SGMLParser):
|
||||
|
||||
"""This class contains the basic parser and search code. It defines
|
||||
a parser that knows nothing about tag behavior except for the
|
||||
|
@ -1141,7 +1151,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
self.escapeUnrecognizedEntities = False
|
||||
|
||||
self.instanceSelfClosingTags = buildTagMap(None, selfClosingTags)
|
||||
SGMLParser.__init__(self)
|
||||
sgmllib.SGMLParser.__init__(self)
|
||||
|
||||
if hasattr(markup, 'read'): # It's a file-type object.
|
||||
markup = markup.read()
|
||||
|
@ -1190,7 +1200,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
del(self.markupMassage)
|
||||
self.reset()
|
||||
|
||||
SGMLParser.feed(self, markup)
|
||||
sgmllib.SGMLParser.feed(self, markup)
|
||||
# Close out any unfinished strings and close all the open tags.
|
||||
self.endData()
|
||||
while self.currentTag.name != self.ROOT_TAG_NAME:
|
||||
|
@ -1203,7 +1213,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
|
||||
if methodName.startswith('start_') or methodName.startswith('end_') \
|
||||
or methodName.startswith('do_'):
|
||||
return SGMLParser.__getattr__(self, methodName)
|
||||
return sgmllib.SGMLParser.__getattr__(self, methodName)
|
||||
elif not methodName.startswith('__'):
|
||||
return Tag.__getattr__(self, methodName)
|
||||
else:
|
||||
|
@ -1218,7 +1228,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
def reset(self):
|
||||
Tag.__init__(self, self, self.ROOT_TAG_NAME)
|
||||
self.hidden = 1
|
||||
SGMLParser.reset(self)
|
||||
sgmllib.SGMLParser.reset(self)
|
||||
self.currentData = []
|
||||
self.currentTag = None
|
||||
self.tagStack = []
|
||||
|
@ -1464,8 +1474,8 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
self._toStringSubclass(data, CData)
|
||||
else:
|
||||
try:
|
||||
j = SGMLParser.parse_declaration(self, i)
|
||||
except SGMLParseError:
|
||||
j = sgmllib.SGMLParser.parse_declaration(self, i)
|
||||
except sgmllib.SGMLParseError:
|
||||
toHandle = self.rawdata[i:]
|
||||
self.handle_data(toHandle)
|
||||
j = i + len(toHandle)
|
||||
|
@ -2018,4 +2028,4 @@ class UnicodeDammit:
|
|||
if __name__ == '__main__':
|
||||
import sys
|
||||
soup = BeautifulSoup(sys.stdin)
|
||||
print soup.prettify()
|
||||
print(soup.prettify())
|
||||
|
|
56
thirdparty/keepalive/keepalive.py
vendored
56
thirdparty/keepalive/keepalive.py
vendored
|
@ -103,7 +103,7 @@ EXTRA ATTRIBUTES AND METHODS
|
|||
|
||||
"""
|
||||
|
||||
# $Id: keepalive.py,v 1.17 2006/12/08 00:14:16 mstenner Exp $
|
||||
from __future__ import print_function
|
||||
|
||||
import urllib2
|
||||
import httplib
|
||||
|
@ -487,7 +487,7 @@ def error_handler(url):
|
|||
urllib2.install_opener(opener)
|
||||
pos = {0: 'off', 1: 'on'}
|
||||
for i in (0, 1):
|
||||
print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
|
||||
print(" fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i))
|
||||
HANDLE_ERRORS = i
|
||||
try:
|
||||
fo = urllib2.urlopen(url)
|
||||
|
@ -496,13 +496,13 @@ def error_handler(url):
|
|||
try: status, reason = fo.status, fo.reason
|
||||
except AttributeError: status, reason = None, None
|
||||
except IOError as e:
|
||||
print " EXCEPTION: %s" % e
|
||||
print(" EXCEPTION: %s" % e)
|
||||
raise
|
||||
else:
|
||||
print " status = %s, reason = %s" % (status, reason)
|
||||
print(" status = %s, reason = %s" % (status, reason))
|
||||
HANDLE_ERRORS = orig
|
||||
hosts = keepalive_handler.open_connections()
|
||||
print "open connections:", hosts
|
||||
print("open connections:", hosts)
|
||||
keepalive_handler.close_all()
|
||||
|
||||
def continuity(url):
|
||||
|
@ -516,7 +516,7 @@ def continuity(url):
|
|||
foo = fo.read()
|
||||
fo.close()
|
||||
m = md5.new(foo)
|
||||
print format % ('normal urllib', m.hexdigest())
|
||||
print(format % ('normal urllib', m.hexdigest()))
|
||||
|
||||
# now install the keepalive handler and try again
|
||||
opener = urllib2.build_opener(HTTPHandler())
|
||||
|
@ -526,7 +526,7 @@ def continuity(url):
|
|||
foo = fo.read()
|
||||
fo.close()
|
||||
m = md5.new(foo)
|
||||
print format % ('keepalive read', m.hexdigest())
|
||||
print(format % ('keepalive read', m.hexdigest()))
|
||||
|
||||
fo = urllib2.urlopen(url)
|
||||
foo = ''
|
||||
|
@ -536,25 +536,25 @@ def continuity(url):
|
|||
else: break
|
||||
fo.close()
|
||||
m = md5.new(foo)
|
||||
print format % ('keepalive readline', m.hexdigest())
|
||||
print(format % ('keepalive readline', m.hexdigest()))
|
||||
|
||||
def comp(N, url):
|
||||
print ' making %i connections to:\n %s' % (N, url)
|
||||
print(' making %i connections to:\n %s' % (N, url))
|
||||
|
||||
sys.stdout.write(' first using the normal urllib handlers')
|
||||
# first use normal opener
|
||||
opener = urllib2.build_opener()
|
||||
urllib2.install_opener(opener)
|
||||
t1 = fetch(N, url)
|
||||
print ' TIME: %.3f s' % t1
|
||||
print(' TIME: %.3f s' % t1)
|
||||
|
||||
sys.stdout.write(' now using the keepalive handler ')
|
||||
# now install the keepalive handler and try again
|
||||
opener = urllib2.build_opener(HTTPHandler())
|
||||
urllib2.install_opener(opener)
|
||||
t2 = fetch(N, url)
|
||||
print ' TIME: %.3f s' % t2
|
||||
print ' improvement factor: %.2f' % (t1/t2, )
|
||||
print(' TIME: %.3f s' % t2)
|
||||
print(' improvement factor: %.2f' % (t1/t2, ))
|
||||
|
||||
def fetch(N, url, delay=0):
|
||||
import time
|
||||
|
@ -572,7 +572,7 @@ def fetch(N, url, delay=0):
|
|||
for i in lens[1:]:
|
||||
j = j + 1
|
||||
if not i == lens[0]:
|
||||
print "WARNING: inconsistent length on read %i: %i" % (j, i)
|
||||
print("WARNING: inconsistent length on read %i: %i" % (j, i))
|
||||
|
||||
return diff
|
||||
|
||||
|
@ -580,16 +580,16 @@ def test_timeout(url):
|
|||
global DEBUG
|
||||
dbbackup = DEBUG
|
||||
class FakeLogger:
|
||||
def debug(self, msg, *args): print msg % args
|
||||
def debug(self, msg, *args): print(msg % args)
|
||||
info = warning = error = debug
|
||||
DEBUG = FakeLogger()
|
||||
print " fetching the file to establish a connection"
|
||||
print(" fetching the file to establish a connection")
|
||||
fo = urllib2.urlopen(url)
|
||||
data1 = fo.read()
|
||||
fo.close()
|
||||
|
||||
i = 20
|
||||
print " waiting %i seconds for the server to close the connection" % i
|
||||
print(" waiting %i seconds for the server to close the connection" % i)
|
||||
while i > 0:
|
||||
sys.stdout.write('\r %2i' % i)
|
||||
sys.stdout.flush()
|
||||
|
@ -597,33 +597,33 @@ def test_timeout(url):
|
|||
i -= 1
|
||||
sys.stderr.write('\r')
|
||||
|
||||
print " fetching the file a second time"
|
||||
print(" fetching the file a second time")
|
||||
fo = urllib2.urlopen(url)
|
||||
data2 = fo.read()
|
||||
fo.close()
|
||||
|
||||
if data1 == data2:
|
||||
print ' data are identical'
|
||||
print(' data are identical')
|
||||
else:
|
||||
print ' ERROR: DATA DIFFER'
|
||||
print(' ERROR: DATA DIFFER')
|
||||
|
||||
DEBUG = dbbackup
|
||||
|
||||
|
||||
def test(url, N=10):
|
||||
print "checking error hander (do this on a non-200)"
|
||||
print("checking error hander (do this on a non-200)")
|
||||
try: error_handler(url)
|
||||
except IOError as e:
|
||||
print "exiting - exception will prevent further tests"
|
||||
print("exiting - exception will prevent further tests")
|
||||
sys.exit()
|
||||
print
|
||||
print "performing continuity test (making sure stuff isn't corrupted)"
|
||||
print()
|
||||
print("performing continuity test (making sure stuff isn't corrupted)")
|
||||
continuity(url)
|
||||
print
|
||||
print "performing speed comparison"
|
||||
print()
|
||||
print("performing speed comparison")
|
||||
comp(N, url)
|
||||
print
|
||||
print "performing dropped-connection check"
|
||||
print()
|
||||
print("performing dropped-connection check")
|
||||
test_timeout(url)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -633,6 +633,6 @@ if __name__ == '__main__':
|
|||
N = int(sys.argv[1])
|
||||
url = sys.argv[2]
|
||||
except:
|
||||
print "%s <integer> <url>" % sys.argv[0]
|
||||
print("%s <integer> <url>" % sys.argv[0])
|
||||
else:
|
||||
test(url, N)
|
||||
|
|
Loading…
Reference in New Issue
Block a user