mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 01:26:42 +03:00
removed all trailing spaces from blank lines
This commit is contained in:
parent
977df7276d
commit
6adee3792a
|
@ -1279,7 +1279,7 @@ class Control:
|
|||
|
||||
def get_labels(self):
|
||||
"""Return all labels (Label instances) for this control.
|
||||
|
||||
|
||||
If the control was surrounded by a <label> tag, that will be the first
|
||||
label; all other labels, connected by 'for' and 'id', are in the order
|
||||
that appear in the HTML.
|
||||
|
@ -1622,12 +1622,12 @@ class Item:
|
|||
|
||||
def get_labels(self):
|
||||
"""Return all labels (Label instances) for this item.
|
||||
|
||||
|
||||
For items that represent radio buttons or checkboxes, if the item was
|
||||
surrounded by a <label> tag, that will be the first label; all other
|
||||
labels, connected by 'for' and 'id', are in the order that appear in
|
||||
the HTML.
|
||||
|
||||
|
||||
For items that represent select options, if the option had a label
|
||||
attribute, that will be the first label. If the option has contents
|
||||
(text within the option tags) and it is not the same as the label
|
||||
|
@ -1936,9 +1936,9 @@ class ListControl(Control):
|
|||
|
||||
def toggle_single(self, by_label=None):
|
||||
"""Deprecated: toggle the selection of the single item in this control.
|
||||
|
||||
|
||||
Raises ItemCountError if the control does not contain only one item.
|
||||
|
||||
|
||||
by_label argument is ignored, and included only for backwards
|
||||
compatibility.
|
||||
|
||||
|
@ -1953,9 +1953,9 @@ class ListControl(Control):
|
|||
|
||||
def set_single(self, selected, by_label=None):
|
||||
"""Deprecated: set the selection of the single item in this control.
|
||||
|
||||
|
||||
Raises ItemCountError if the control does not contain only one item.
|
||||
|
||||
|
||||
by_label argument is ignored, and included only for backwards
|
||||
compatibility.
|
||||
|
||||
|
@ -2067,7 +2067,7 @@ class ListControl(Control):
|
|||
# RFC 1866 if the _select_default attribute is set, and Netscape and IE
|
||||
# otherwise. RFC 1866 and HTML 4 are always violated insofar as you
|
||||
# can deselect all items in a RadioControl.
|
||||
|
||||
|
||||
for o in self.items:
|
||||
# set items' controls to self, now that we've merged
|
||||
o.__dict__["_control"] = self
|
||||
|
|
|
@ -56,7 +56,7 @@ def main():
|
|||
|
||||
except (OptionError, TypeError), e:
|
||||
parser.error(e)
|
||||
|
||||
|
||||
if not os.path.isfile(args.inputFile):
|
||||
print 'ERROR: the provided input file \'%s\' is not a regular file' % args.inputFile
|
||||
sys.exit(1)
|
||||
|
|
|
@ -58,7 +58,7 @@ def convert(inputFile):
|
|||
script += "w\nq\n"
|
||||
|
||||
return script
|
||||
|
||||
|
||||
def main(inputFile, outputFile):
|
||||
if not os.path.isfile(inputFile):
|
||||
print "ERROR: the provided input file '%s' is not a regular file" % inputFile
|
||||
|
@ -73,7 +73,7 @@ def main(inputFile, outputFile):
|
|||
sys.stdout.close()
|
||||
else:
|
||||
print script
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
usage = "%s -i <input file> [-o <output file>]" % sys.argv[0]
|
||||
parser = OptionParser(usage=usage, version="0.1")
|
||||
|
|
|
@ -79,7 +79,7 @@ def ratio(numerator, denominator):
|
|||
|
||||
class UndefinedEvent(Exception):
|
||||
"""Raised when attempting to get an event which is undefined."""
|
||||
|
||||
|
||||
def __init__(self, event):
|
||||
Exception.__init__(self)
|
||||
self.event = event
|
||||
|
@ -111,7 +111,7 @@ class Event(object):
|
|||
assert val1 is not None
|
||||
assert val2 is not None
|
||||
return self._aggregator(val1, val2)
|
||||
|
||||
|
||||
def format(self, val):
|
||||
"""Format an event value."""
|
||||
assert val is not None
|
||||
|
@ -145,13 +145,13 @@ class Object(object):
|
|||
|
||||
def __contains__(self, event):
|
||||
return event in self.events
|
||||
|
||||
|
||||
def __getitem__(self, event):
|
||||
try:
|
||||
return self.events[event]
|
||||
except KeyError:
|
||||
raise UndefinedEvent(event)
|
||||
|
||||
|
||||
def __setitem__(self, event, value):
|
||||
if value is None:
|
||||
if event in self.events:
|
||||
|
@ -162,7 +162,7 @@ class Object(object):
|
|||
|
||||
class Call(Object):
|
||||
"""A call between functions.
|
||||
|
||||
|
||||
There should be at most one call object for every pair of functions.
|
||||
"""
|
||||
|
||||
|
@ -186,7 +186,7 @@ class Function(Object):
|
|||
self.called = None
|
||||
self.weight = None
|
||||
self.cycle = None
|
||||
|
||||
|
||||
def add_call(self, call):
|
||||
if call.callee_id in self.calls:
|
||||
sys.stderr.write('warning: overwriting call from function %s to %s\n' % (str(self.id), str(call.callee_id)))
|
||||
|
@ -261,7 +261,7 @@ class Profile(Object):
|
|||
sys.stderr.write("Cycle:\n")
|
||||
for member in cycle.functions:
|
||||
sys.stderr.write("\tFunction %s\n" % member.name)
|
||||
|
||||
|
||||
def _tarjan(self, function, order, stack, orders, lowlinks, visited):
|
||||
"""Tarjan's strongly connected components algorithm.
|
||||
|
||||
|
@ -365,7 +365,7 @@ class Profile(Object):
|
|||
total += self._integrate_call(call, outevent, inevent)
|
||||
function[outevent] = total
|
||||
return function[outevent]
|
||||
|
||||
|
||||
def _integrate_call(self, call, outevent, inevent):
|
||||
assert outevent not in call
|
||||
assert call.ratio is not None
|
||||
|
@ -387,7 +387,7 @@ class Profile(Object):
|
|||
subtotal += self._integrate_call(call, outevent, inevent)
|
||||
total += subtotal
|
||||
cycle[outevent] = total
|
||||
|
||||
|
||||
# Compute the time propagated to callers of this cycle
|
||||
callees = {}
|
||||
for function in self.functions.itervalues():
|
||||
|
@ -399,7 +399,7 @@ class Profile(Object):
|
|||
callees[callee] += call.ratio
|
||||
except KeyError:
|
||||
callees[callee] = call.ratio
|
||||
|
||||
|
||||
for member in cycle.functions:
|
||||
member[outevent] = outevent.null()
|
||||
|
||||
|
@ -521,7 +521,7 @@ class Profile(Object):
|
|||
call = function.calls[callee_id]
|
||||
if callee_id not in self.functions or call.weight is not None and call.weight < edge_thres:
|
||||
del function.calls[callee_id]
|
||||
|
||||
|
||||
def dump(self):
|
||||
for function in self.functions.itervalues():
|
||||
sys.stderr.write('Function %s:\n' % (function.name,))
|
||||
|
@ -548,7 +548,7 @@ class Struct:
|
|||
if attrs is None:
|
||||
attrs = {}
|
||||
self.__dict__['_attrs'] = attrs
|
||||
|
||||
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
return self._attrs[name]
|
||||
|
@ -563,7 +563,7 @@ class Struct:
|
|||
|
||||
def __repr__(self):
|
||||
return repr(self._attrs)
|
||||
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
"""Raised when parsing to signal mismatches."""
|
||||
|
@ -586,7 +586,7 @@ class Parser:
|
|||
def parse(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
|
||||
class LineParser(Parser):
|
||||
"""Base class for parsers that read line-based formats."""
|
||||
|
||||
|
@ -652,21 +652,21 @@ class XmlTokenizer:
|
|||
self.index = 0
|
||||
self.final = False
|
||||
self.skip_ws = skip_ws
|
||||
|
||||
|
||||
self.character_pos = 0, 0
|
||||
self.character_data = ''
|
||||
|
||||
|
||||
self.parser = xml.parsers.expat.ParserCreate()
|
||||
self.parser.StartElementHandler = self.handle_element_start
|
||||
self.parser.EndElementHandler = self.handle_element_end
|
||||
self.parser.CharacterDataHandler = self.handle_character_data
|
||||
|
||||
|
||||
def handle_element_start(self, name, attributes):
|
||||
self.finish_character_data()
|
||||
line, column = self.pos()
|
||||
token = XmlToken(XML_ELEMENT_START, name, attributes, line, column)
|
||||
self.tokens.append(token)
|
||||
|
||||
|
||||
def handle_element_end(self, name):
|
||||
self.finish_character_data()
|
||||
line, column = self.pos()
|
||||
|
@ -677,7 +677,7 @@ class XmlTokenizer:
|
|||
if not self.character_data:
|
||||
self.character_pos = self.pos()
|
||||
self.character_data += data
|
||||
|
||||
|
||||
def finish_character_data(self):
|
||||
if self.character_data:
|
||||
if not self.skip_ws or not self.character_data.isspace():
|
||||
|
@ -685,7 +685,7 @@ class XmlTokenizer:
|
|||
token = XmlToken(XML_CHARACTER_DATA, self.character_data, None, line, column)
|
||||
self.tokens.append(token)
|
||||
self.character_data = ''
|
||||
|
||||
|
||||
def next(self):
|
||||
size = 16*1024
|
||||
while self.index >= len(self.tokens) and not self.final:
|
||||
|
@ -730,13 +730,13 @@ class XmlParser(Parser):
|
|||
Parser.__init__(self)
|
||||
self.tokenizer = XmlTokenizer(fp)
|
||||
self.consume()
|
||||
|
||||
|
||||
def consume(self):
|
||||
self.token = self.tokenizer.next()
|
||||
|
||||
def match_element_start(self, name):
|
||||
return self.token.type == XML_ELEMENT_START and self.token.name_or_data == name
|
||||
|
||||
|
||||
def match_element_end(self, name):
|
||||
return self.token.type == XML_ELEMENT_END and self.token.name_or_data == name
|
||||
|
||||
|
@ -750,7 +750,7 @@ class XmlParser(Parser):
|
|||
attrs = self.token.attrs
|
||||
self.consume()
|
||||
return attrs
|
||||
|
||||
|
||||
def element_end(self, name):
|
||||
while self.token.type == XML_CHARACTER_DATA:
|
||||
self.consume()
|
||||
|
@ -880,7 +880,7 @@ class GprofParser(Parser):
|
|||
line = lines.pop(0)
|
||||
if line.startswith('['):
|
||||
break
|
||||
|
||||
|
||||
# read function parent line
|
||||
mo = self._cg_parent_re.match(line)
|
||||
if not mo:
|
||||
|
@ -901,7 +901,7 @@ class GprofParser(Parser):
|
|||
|
||||
while lines:
|
||||
line = lines.pop(0)
|
||||
|
||||
|
||||
# read function subroutine line
|
||||
mo = self._cg_child_re.match(line)
|
||||
if not mo:
|
||||
|
@ -911,7 +911,7 @@ class GprofParser(Parser):
|
|||
else:
|
||||
child = self.translate(mo)
|
||||
children.append(child)
|
||||
|
||||
|
||||
function.parents = parents
|
||||
function.children = children
|
||||
|
||||
|
@ -936,7 +936,7 @@ class GprofParser(Parser):
|
|||
continue
|
||||
call = self.translate(mo)
|
||||
cycle.functions.append(call)
|
||||
|
||||
|
||||
self.cycles[cycle.cycle] = cycle
|
||||
|
||||
def parse_cg_entry(self, lines):
|
||||
|
@ -965,14 +965,14 @@ class GprofParser(Parser):
|
|||
else:
|
||||
entry_lines.append(line)
|
||||
line = self.readline()
|
||||
|
||||
|
||||
def parse(self):
|
||||
self.parse_cg()
|
||||
self.fp.close()
|
||||
|
||||
profile = Profile()
|
||||
profile[TIME] = 0.0
|
||||
|
||||
|
||||
cycles = {}
|
||||
for index in self.cycles.iterkeys():
|
||||
cycles[index] = Cycle()
|
||||
|
@ -987,11 +987,11 @@ class GprofParser(Parser):
|
|||
call = Call(entry.index)
|
||||
call[CALLS] = entry.called_self
|
||||
function.called += entry.called_self
|
||||
|
||||
|
||||
# populate the function calls
|
||||
for child in entry.children:
|
||||
call = Call(child.index)
|
||||
|
||||
|
||||
assert child.called is not None
|
||||
call[CALLS] = child.called
|
||||
|
||||
|
@ -1034,7 +1034,7 @@ class GprofParser(Parser):
|
|||
|
||||
class CallgrindParser(LineParser):
|
||||
"""Parser for valgrind's callgrind tool.
|
||||
|
||||
|
||||
See also:
|
||||
- http://valgrind.org/docs/manual/cl-format.html
|
||||
"""
|
||||
|
@ -1171,7 +1171,7 @@ class CallgrindParser(LineParser):
|
|||
else:
|
||||
callee = self.get_callee()
|
||||
callee.called += calls
|
||||
|
||||
|
||||
try:
|
||||
call = function.calls[callee.id]
|
||||
except KeyError:
|
||||
|
@ -1319,7 +1319,7 @@ class CallgrindParser(LineParser):
|
|||
|
||||
class OprofileParser(LineParser):
|
||||
"""Parser for oprofile callgraph output.
|
||||
|
||||
|
||||
See also:
|
||||
- http://oprofile.sourceforge.net/doc/opreport.html#opreport-callgraph
|
||||
"""
|
||||
|
@ -1348,7 +1348,7 @@ class OprofileParser(LineParser):
|
|||
self.update_subentries_dict(callers_total, callers)
|
||||
function_total.samples += function.samples
|
||||
self.update_subentries_dict(callees_total, callees)
|
||||
|
||||
|
||||
def update_subentries_dict(self, totals, partials):
|
||||
for partial in partials.itervalues():
|
||||
try:
|
||||
|
@ -1357,7 +1357,7 @@ class OprofileParser(LineParser):
|
|||
totals[partial.id] = partial
|
||||
else:
|
||||
total.samples += partial.samples
|
||||
|
||||
|
||||
def parse(self):
|
||||
# read lookahead
|
||||
self.readline()
|
||||
|
@ -1369,7 +1369,7 @@ class OprofileParser(LineParser):
|
|||
profile = Profile()
|
||||
|
||||
reverse_call_samples = {}
|
||||
|
||||
|
||||
# populate the profile
|
||||
profile[SAMPLES] = 0
|
||||
for _callers, _function, _callees in self.entries.itervalues():
|
||||
|
@ -1392,7 +1392,7 @@ class OprofileParser(LineParser):
|
|||
call = Call(_callee.id)
|
||||
call[SAMPLES2] = _callee.samples
|
||||
function.add_call(call)
|
||||
|
||||
|
||||
# compute derived data
|
||||
profile.validate()
|
||||
profile.find_cycles()
|
||||
|
@ -1478,7 +1478,7 @@ class OprofileParser(LineParser):
|
|||
def match_primary(self):
|
||||
line = self.lookahead()
|
||||
return not line[:1].isspace()
|
||||
|
||||
|
||||
def match_secondary(self):
|
||||
line = self.lookahead()
|
||||
return line[:1].isspace()
|
||||
|
@ -1546,7 +1546,7 @@ class SysprofParser(XmlParser):
|
|||
|
||||
def build_profile(self, objects, nodes):
|
||||
profile = Profile()
|
||||
|
||||
|
||||
profile[SAMPLES] = 0
|
||||
for id, object in objects.iteritems():
|
||||
# Ignore fake objects (process names, modules, "Everything", "kernel", etc.)
|
||||
|
@ -1620,7 +1620,7 @@ class SharkParser(LineParser):
|
|||
else:
|
||||
function_total, callees_total = entry
|
||||
function_total.samples += function.samples
|
||||
|
||||
|
||||
def add_callee(self, function, callee):
|
||||
func, callees = self.entries[function.id]
|
||||
try:
|
||||
|
@ -1629,7 +1629,7 @@ class SharkParser(LineParser):
|
|||
callees[callee.id] = callee
|
||||
else:
|
||||
entry.samples += callee.samples
|
||||
|
||||
|
||||
def parse(self):
|
||||
self.readline()
|
||||
self.readline()
|
||||
|
@ -1667,9 +1667,9 @@ class SharkParser(LineParser):
|
|||
# if the callstack has had an entry, it's this functions caller
|
||||
if prefix > 0:
|
||||
self.add_callee(self.stack[prefix - 1], entry)
|
||||
|
||||
|
||||
self.add_entry(entry)
|
||||
|
||||
|
||||
profile = Profile()
|
||||
profile[SAMPLES] = 0
|
||||
for _function, _callees in self.entries.itervalues():
|
||||
|
@ -1685,7 +1685,7 @@ class SharkParser(LineParser):
|
|||
call = Call(_callee.id)
|
||||
call[SAMPLES] = _callee.samples
|
||||
function.add_call(call)
|
||||
|
||||
|
||||
# compute derived data
|
||||
profile.validate()
|
||||
profile.find_cycles()
|
||||
|
@ -1723,7 +1723,7 @@ class XPerfParser(Parser):
|
|||
self.parse_header(row)
|
||||
for row in it:
|
||||
self.parse_row(row)
|
||||
|
||||
|
||||
# compute derived data
|
||||
self.profile.validate()
|
||||
self.profile.find_cycles()
|
||||
|
@ -1751,7 +1751,7 @@ class XPerfParser(Parser):
|
|||
else:
|
||||
break
|
||||
fields[name] = value
|
||||
|
||||
|
||||
process = fields['Process Name']
|
||||
symbol = fields['Module'] + '!' + fields['Function']
|
||||
weight = fields['Weight']
|
||||
|
@ -1817,7 +1817,7 @@ class SleepyParser(Parser):
|
|||
self.calls = {}
|
||||
|
||||
self.profile = Profile()
|
||||
|
||||
|
||||
_symbol_re = re.compile(
|
||||
r'^(?P<id>\w+)' +
|
||||
r'\s+"(?P<module>[^"]*)"' +
|
||||
|
@ -1832,7 +1832,7 @@ class SleepyParser(Parser):
|
|||
mo = self._symbol_re.match(line)
|
||||
if mo:
|
||||
symbol_id, module, procname, sourcefile, sourceline = mo.groups()
|
||||
|
||||
|
||||
function_id = ':'.join([module, procname])
|
||||
|
||||
try:
|
||||
|
@ -1858,7 +1858,7 @@ class SleepyParser(Parser):
|
|||
|
||||
callee[SAMPLES] += samples
|
||||
self.profile[SAMPLES] += samples
|
||||
|
||||
|
||||
for caller in callstack[1:]:
|
||||
try:
|
||||
call = caller.calls[callee.id]
|
||||
|
@ -2030,7 +2030,7 @@ class AQtimeParser(XmlParser):
|
|||
profile[TOTAL_TIME] = profile[TIME]
|
||||
profile.ratio(TOTAL_TIME_RATIO, TOTAL_TIME)
|
||||
return profile
|
||||
|
||||
|
||||
def build_function(self, fields):
|
||||
function = Function(self.build_id(fields), self.build_name(fields))
|
||||
function[TIME] = fields['Time']
|
||||
|
@ -2187,10 +2187,10 @@ class Theme:
|
|||
|
||||
def color(self, weight):
|
||||
weight = min(max(weight, 0.0), 1.0)
|
||||
|
||||
|
||||
hmin, smin, lmin = self.mincolor
|
||||
hmax, smax, lmax = self.maxcolor
|
||||
|
||||
|
||||
if self.skew < 0:
|
||||
raise ValueError("Skew must be greater than 0")
|
||||
elif self.skew == 1.0:
|
||||
|
@ -2488,7 +2488,7 @@ class Main:
|
|||
self.theme = self.themes[self.options.theme]
|
||||
except KeyError:
|
||||
parser.error('invalid colormap \'%s\'' % self.options.theme)
|
||||
|
||||
|
||||
# set skew on the theme now that it has been picked.
|
||||
if self.options.theme_skew:
|
||||
self.theme.skew = self.options.theme_skew
|
||||
|
@ -2547,7 +2547,7 @@ class Main:
|
|||
parser.error('invalid format \'%s\'' % self.options.format)
|
||||
|
||||
self.profile = parser.parse()
|
||||
|
||||
|
||||
if self.options.output is None:
|
||||
self.output = sys.stdout
|
||||
else:
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
keepalive_handler = HTTPHandler()
|
||||
opener = urllib2.build_opener(keepalive_handler)
|
||||
urllib2.install_opener(opener)
|
||||
|
||||
|
||||
fo = urllib2.urlopen('http://www.python.org')
|
||||
|
||||
To remove the handler, simply re-run build_opener with no arguments, and
|
||||
|
@ -84,7 +84,7 @@ HANDLE_ERRORS = 1
|
|||
class HTTPHandler(urllib2.HTTPHandler):
|
||||
def __init__(self):
|
||||
self._connections = {}
|
||||
|
||||
|
||||
def close_connection(self, host):
|
||||
"""close connection to <host>
|
||||
host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
|
||||
|
@ -105,16 +105,16 @@ class HTTPHandler(urllib2.HTTPHandler):
|
|||
for _, conn in self._connections.items():
|
||||
conn.close()
|
||||
self._connections = {}
|
||||
|
||||
|
||||
def _remove_connection(self, host, close=0):
|
||||
key = self._get_connection_key(host)
|
||||
if self._connections.has_key(key):
|
||||
if close: self._connections[key].close()
|
||||
del self._connections[key]
|
||||
|
||||
|
||||
def _get_connection_key(self, host):
|
||||
return (threading.currentThread(), host)
|
||||
|
||||
|
||||
def _start_connection(self, h, req):
|
||||
h.clearheaders()
|
||||
try:
|
||||
|
@ -160,7 +160,7 @@ class HTTPHandler(urllib2.HTTPHandler):
|
|||
else:
|
||||
try: r = h.getresponse()
|
||||
except httplib.ResponseNotReady, e: r = None
|
||||
|
||||
|
||||
if r is None or r.version == 9:
|
||||
# httplib falls back to assuming HTTP 0.9 if it gets a
|
||||
# bad header back. This is most likely to happen if
|
||||
|
@ -180,7 +180,7 @@ class HTTPHandler(urllib2.HTTPHandler):
|
|||
except socket.error, err:
|
||||
if h: h.close()
|
||||
raise urllib2.URLError(err)
|
||||
|
||||
|
||||
# if not a persistent connection, don't try to reuse it
|
||||
if r.will_close: self._remove_connection(host)
|
||||
|
||||
|
@ -225,7 +225,7 @@ class HTTPResponse(httplib.HTTPResponse):
|
|||
# although read() never adds to the buffer.
|
||||
# Both readline and readlines have been stolen with almost no
|
||||
# modification from socket.py
|
||||
|
||||
|
||||
|
||||
def __init__(self, sock, debuglevel=0, strict=0, method=None):
|
||||
if method: # the httplib in python 2.3 uses the method arg
|
||||
|
@ -244,7 +244,7 @@ class HTTPResponse(httplib.HTTPResponse):
|
|||
def close_connection(self):
|
||||
self.close()
|
||||
self._handler._remove_connection(self._host, close=1)
|
||||
|
||||
|
||||
def info(self):
|
||||
return self.msg
|
||||
|
||||
|
@ -367,7 +367,7 @@ def error_handler(url):
|
|||
def continuity(url):
|
||||
import md5
|
||||
format = '%25s: %s'
|
||||
|
||||
|
||||
# first fetch the file with the normal http handler
|
||||
opener = urllib2.build_opener()
|
||||
urllib2.install_opener(opener)
|
||||
|
@ -414,7 +414,7 @@ def comp(N, url):
|
|||
t2 = fetch(N, url)
|
||||
print ' TIME: %.3f s' % t2
|
||||
print ' improvement factor: %.2f' % (t1/t2, )
|
||||
|
||||
|
||||
def fetch(N, url, delay=0):
|
||||
lens = []
|
||||
starttime = time.time()
|
||||
|
@ -446,7 +446,7 @@ def test(url, N=10):
|
|||
print
|
||||
print "performing speed comparison"
|
||||
comp(N, url)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import time
|
||||
import sys
|
||||
|
|
|
@ -42,7 +42,7 @@ def _patch_minidom():
|
|||
minidom.Text.writexml = _writexml_text
|
||||
minidom.Element.writexml = _writexml_element
|
||||
minidom.Node.toprettyxml = _toprettyxml_node
|
||||
|
||||
|
||||
def _collapse(node):
|
||||
for child in node.childNodes:
|
||||
if child.nodeType == Node.TEXT_NODE and len(child.data.strip()) == 0:
|
||||
|
@ -52,17 +52,17 @@ def _collapse(node):
|
|||
|
||||
def _writexml_text(self, writer, indent="", addindent="", newl=""):
|
||||
minidom._write_data(writer, "%s"%(self.data.strip()))
|
||||
|
||||
|
||||
def _writexml_element(self, writer, indent="", addindent="", newl=""):
|
||||
# indent = current indentation
|
||||
# addindent = indentation to add to higher levels
|
||||
# newl = newline string
|
||||
writer.write(indent+"<" + self.tagName)
|
||||
|
||||
|
||||
attrs = self._get_attributes()
|
||||
a_names = attrs.keys()
|
||||
a_names.sort()
|
||||
|
||||
|
||||
for a_name in a_names:
|
||||
writer.write(" %s=\"" % a_name)
|
||||
minidom._write_data(writer, attrs[a_name].value)
|
||||
|
@ -80,7 +80,7 @@ def _writexml_element(self, writer, indent="", addindent="", newl=""):
|
|||
writer.write("%s</%s>%s" % (indent,self.tagName,newl))
|
||||
else:
|
||||
writer.write("/>%s"%(newl))
|
||||
|
||||
|
||||
def _toprettyxml_node(self, indent="\t", newl="\n", encoding = None):
|
||||
_collapse(self)
|
||||
# indent = the indentation string to prepend, per level
|
||||
|
|
|
@ -453,7 +453,7 @@ class XDotAttrParser:
|
|||
self.parser = parser
|
||||
self.buf = self.unescape(buf)
|
||||
self.pos = 0
|
||||
|
||||
|
||||
self.pen = Pen()
|
||||
self.shapes = []
|
||||
|
||||
|
@ -549,7 +549,7 @@ class XDotAttrParser:
|
|||
b = b*s
|
||||
a = 1.0
|
||||
return r, g, b, a
|
||||
|
||||
|
||||
sys.stderr.write("unknown color '%s'\n" % c)
|
||||
return None
|
||||
|
||||
|
@ -615,7 +615,7 @@ class XDotAttrParser:
|
|||
break
|
||||
|
||||
return self.shapes
|
||||
|
||||
|
||||
def transform(self, x, y):
|
||||
return self.parser.transform(x, y)
|
||||
|
||||
|
@ -677,7 +677,7 @@ class ParseError(Exception):
|
|||
|
||||
def __str__(self):
|
||||
return ':'.join([str(part) for part in (self.filename, self.line, self.col, self.msg) if part != None])
|
||||
|
||||
|
||||
|
||||
class Scanner:
|
||||
"""Stateless scanner."""
|
||||
|
@ -921,7 +921,7 @@ class DotLexer(Lexer):
|
|||
text = text.replace('\\\r\n', '')
|
||||
text = text.replace('\\\r', '')
|
||||
text = text.replace('\\\n', '')
|
||||
|
||||
|
||||
text = text.replace('\\r', '\r')
|
||||
text = text.replace('\\n', '\n')
|
||||
text = text.replace('\\t', '\t')
|
||||
|
@ -1062,7 +1062,7 @@ class XDotParser(DotParser):
|
|||
def __init__(self, xdotcode):
|
||||
lexer = DotLexer(buf = xdotcode)
|
||||
DotParser.__init__(self, lexer)
|
||||
|
||||
|
||||
self.nodes = []
|
||||
self.edges = []
|
||||
self.shapes = []
|
||||
|
@ -1091,7 +1091,7 @@ class XDotParser(DotParser):
|
|||
self.height = ymax - ymin
|
||||
|
||||
self.top_graph = False
|
||||
|
||||
|
||||
for attr in ("_draw_", "_ldraw_", "_hdraw_", "_tdraw_", "_hldraw_", "_tldraw_"):
|
||||
if attr in attrs:
|
||||
parser = XDotAttrParser(self, attrs[attr])
|
||||
|
@ -1122,7 +1122,7 @@ class XDotParser(DotParser):
|
|||
pos = attrs['pos']
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
|
||||
points = self.parse_edge_pos(pos)
|
||||
shapes = []
|
||||
for attr in ("_draw_", "_ldraw_", "_hdraw_", "_tdraw_", "_hldraw_", "_tldraw_"):
|
||||
|
|
|
@ -21,7 +21,7 @@ class MagicException(Exception): pass
|
|||
class Magic:
|
||||
"""
|
||||
Magic is a wrapper around the libmagic C library.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, mime=False, magic_file=None):
|
||||
|
@ -30,12 +30,12 @@ class Magic:
|
|||
|
||||
mime - if True, mimetypes are returned instead of textual descriptions
|
||||
magic_file - use a mime database other than the system default
|
||||
|
||||
|
||||
"""
|
||||
flags = MAGIC_NONE
|
||||
if mime:
|
||||
flags |= MAGIC_MIME
|
||||
|
||||
|
||||
self.cookie = magic_open(flags)
|
||||
|
||||
magic_load(self.cookie, magic_file)
|
||||
|
@ -54,7 +54,7 @@ class Magic:
|
|||
|
||||
if not os.path.exists(filename):
|
||||
raise IOError("File does not exist: " + filename)
|
||||
|
||||
|
||||
return magic_file(self.cookie, filename)
|
||||
|
||||
def __del__(self):
|
||||
|
|
|
@ -47,11 +47,11 @@ class MultipartPostHandler(urllib2.BaseHandler):
|
|||
|
||||
def http_request(self, request):
|
||||
data = request.get_data()
|
||||
|
||||
|
||||
if data is not None and type(data) != str:
|
||||
v_files = []
|
||||
v_vars = []
|
||||
|
||||
|
||||
try:
|
||||
for(key, value) in data.items():
|
||||
if type(value) == file or hasattr(value, 'file'):
|
||||
|
|
|
@ -311,5 +311,5 @@ def start():
|
|||
|
||||
if conf.loggedToOut:
|
||||
logger.info("Fetched data logged to text files under '%s'" % conf.outputPath)
|
||||
|
||||
|
||||
return True
|
||||
|
|
|
@ -313,7 +313,7 @@ class Agent:
|
|||
fieldsToCastStr = fieldsSelect.groups()[0]
|
||||
elif fieldsNoSelect:
|
||||
fieldsToCastStr = fieldsNoSelect
|
||||
|
||||
|
||||
if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I): #function
|
||||
fieldsToCastList = [fieldsToCastStr]
|
||||
else:
|
||||
|
@ -531,7 +531,7 @@ class Agent:
|
|||
if kb.dbms in ( DBMS.MYSQL, DBMS.POSTGRESQL, DBMS.SQLITE ):
|
||||
limitStr = queries[kb.dbms].limit.query % (num, 1)
|
||||
limitedQuery += " %s" % limitStr
|
||||
|
||||
|
||||
elif kb.dbms == DBMS.FIREBIRD:
|
||||
limitStr = queries[kb.dbms].limit.query % (num+1, num+1)
|
||||
limitedQuery += " %s" % limitStr
|
||||
|
|
|
@ -1256,7 +1256,7 @@ def readCachedFileContent(filename, mode='rb'):
|
|||
xfile.close()
|
||||
|
||||
kb.locks.cacheLock.release()
|
||||
|
||||
|
||||
return kb.cache.content[filename]
|
||||
|
||||
def readXmlFile(xmlFile):
|
||||
|
|
|
@ -39,7 +39,7 @@ def hexdecode(string):
|
|||
string = string[2:]
|
||||
|
||||
return string.decode("hex")
|
||||
|
||||
|
||||
def hexencode(string):
|
||||
return string.encode("hex")
|
||||
|
||||
|
|
|
@ -611,7 +611,7 @@ def __setHTTPProxy():
|
|||
errMsg = "Proxy authentication credentials "
|
||||
errMsg += "value must be in format username:password"
|
||||
raise sqlmapSyntaxException, errMsg
|
||||
|
||||
|
||||
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
|
||||
__proxyString = "%s@" % conf.pCred
|
||||
|
||||
|
|
|
@ -89,4 +89,3 @@ class Replication:
|
|||
def __del__(self):
|
||||
self.cursor.close()
|
||||
self.connection.close()
|
||||
|
|
@ -180,13 +180,13 @@ def __setOutputResume():
|
|||
continue
|
||||
|
||||
url, _, _, expression, value = line
|
||||
|
||||
|
||||
if not value:
|
||||
continue
|
||||
|
||||
|
||||
if url[0] == "[":
|
||||
url = url[1:]
|
||||
|
||||
|
||||
value = value.rstrip('\r\n') # Strips both chars independently
|
||||
|
||||
if url not in ( conf.url, conf.hostname ):
|
||||
|
@ -197,9 +197,9 @@ def __setOutputResume():
|
|||
kb.resumedQueries[url][expression] = value
|
||||
__url_cache.add(url)
|
||||
__expression_cache[url] = set(expression)
|
||||
|
||||
|
||||
resumeConfKb(expression, url, value)
|
||||
|
||||
|
||||
if expression not in __expression_cache[url]:
|
||||
kb.resumedQueries[url][expression] = value
|
||||
__expression_cache[url].add(value)
|
||||
|
|
|
@ -116,7 +116,7 @@ def decodePage(page, contentEncoding, contentType):
|
|||
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(page))
|
||||
|
||||
page = data.read()
|
||||
|
||||
|
||||
#http://stackoverflow.com/questions/1020892/python-urllib2-read-to-unicode
|
||||
if contentType and (contentType.find('charset=') != -1):
|
||||
charset = checkCharEncoding(contentType.split('charset=')[-1])
|
||||
|
|
|
@ -69,7 +69,7 @@ class Abstraction(Web, UDF, xp_cmdshell):
|
|||
message = "do you want to retrieve the command standard "
|
||||
message += "output? [Y/n/a] "
|
||||
getOutput = readInput(message, default="Y")
|
||||
|
||||
|
||||
if getOutput in ("a", "A"):
|
||||
self.alwaysRetrieveCmdOutput = True
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@ class Registry:
|
|||
logger.debug("reading registry key '%s' value '%s'" % (regKey, regValue))
|
||||
|
||||
data = self.evalCmd(self.__batPathRemote)
|
||||
|
||||
|
||||
if data and not parse:
|
||||
pattern = ' '
|
||||
index = data.find(pattern)
|
||||
|
|
|
@ -60,7 +60,7 @@ class UPX:
|
|||
|
||||
logger.debug("executing local command: %s" % self.__upxCmd)
|
||||
process = execute(self.__upxCmd, shell=True, stdout=PIPE, stderr=STDOUT)
|
||||
|
||||
|
||||
dataToStdout("\r[%s] [INFO] compression in progress " % time.strftime("%X"))
|
||||
pollProcess(process)
|
||||
upxStdout, upxStderr = process.communicate()
|
||||
|
|
|
@ -236,7 +236,7 @@ class Web:
|
|||
|
||||
self.webBackdoorUrl = "%s/%s" % (self.webBaseUrl, backdoorName)
|
||||
self.webDirectory = directory
|
||||
|
||||
|
||||
infoMsg = "the backdoor has probably been successfully "
|
||||
infoMsg += "uploaded on '%s', go with your browser " % self.webDirectory
|
||||
infoMsg += "to '%s' and enjoy it!" % self.webBackdoorUrl
|
||||
|
|
|
@ -22,7 +22,7 @@ class _Getch:
|
|||
self.impl = _GetchUnix()
|
||||
|
||||
def __call__(self): return self.impl()
|
||||
|
||||
|
||||
|
||||
class _GetchUnix:
|
||||
def __init__(self):
|
||||
|
|
|
@ -21,7 +21,7 @@ class Enumeration(GenericEnumeration):
|
|||
logger.warn(warnMsg)
|
||||
|
||||
return []
|
||||
|
||||
|
||||
def getPasswordHashes(self):
|
||||
warnMsg = "on Firebird it is not possible to enumerate the user password hashes"
|
||||
logger.warn(warnMsg)
|
||||
|
|
|
@ -138,7 +138,7 @@ class Fingerprint(GenericFingerprint):
|
|||
|
||||
if not conf.extensiveFp:
|
||||
return True
|
||||
|
||||
|
||||
kb.dbmsVersion = [self.__sysTablesCheck()]
|
||||
|
||||
return True
|
||||
|
|
|
@ -16,7 +16,7 @@ from plugins.generic.enumeration import Enumeration as GenericEnumeration
|
|||
class Enumeration(GenericEnumeration):
|
||||
def __init__(self):
|
||||
GenericEnumeration.__init__(self, DBMS.MAXDB)
|
||||
|
||||
|
||||
kb.data.processChar = lambda x: x.replace('_', ' ') if x else x
|
||||
|
||||
def getDbs(self):
|
||||
|
|
|
@ -251,7 +251,7 @@ class Fingerprint(GenericFingerprint):
|
|||
logger.warn(warnMsg)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def checkDbmsOs(self, detailed=False):
|
||||
if kb.os:
|
||||
return
|
||||
|
|
|
@ -101,7 +101,7 @@ class Fingerprint(GenericFingerprint):
|
|||
|
||||
if not conf.extensiveFp:
|
||||
return True
|
||||
|
||||
|
||||
for version in range(12, 16):
|
||||
randInt = randomInt()
|
||||
query = " AND @@VERSION_NUMBER/1000=%d" % version
|
||||
|
|
|
@ -34,7 +34,7 @@ class Connector:
|
|||
infoMsg = "connection to %s server %s" % (conf.dbms, self.hostname)
|
||||
infoMsg += ":%d established" % self.port
|
||||
logger.info(infoMsg)
|
||||
|
||||
|
||||
def closed(self):
|
||||
infoMsg = "connection to %s server %s" % (conf.dbms, self.hostname)
|
||||
infoMsg += ":%d closed" % self.port
|
||||
|
|
|
@ -900,7 +900,7 @@ class Enumeration:
|
|||
logger.warn(warnMsg)
|
||||
|
||||
conf.db = self.getCurrentDb()
|
||||
|
||||
|
||||
firebirdTypes = {
|
||||
"261":"BLOB",
|
||||
"14":"CHAR",
|
||||
|
|
Loading…
Reference in New Issue
Block a user