minor refactoring

This commit is contained in:
Miroslav Stampar 2012-02-16 09:46:41 +00:00
parent bcf9fc6c6f
commit e1f86c97c4
8 changed files with 176 additions and 154 deletions

99
lib/core/bigarray.py Normal file
View File

@ -0,0 +1,99 @@
#!/usr/bin/env python
"""
$Id$
Copyright (c) 2006-2012 sqlmap developers (http://www.sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import pickle
import tempfile
from lib.core.settings import BIGARRAY_CHUNK_LENGTH
class BigArray(list):
"""
List-like object used for storing large amounts of data (disk cached)
"""
def __init__(self):
self.chunks = [[]]
self.cache = None
self.length = 0
self.filenames = set()
def append(self, value):
self.chunks[-1].append(value)
if len(self.chunks[-1]) >= BIGARRAY_CHUNK_LENGTH:
filename = self._dump(self.chunks[-1])
del(self.chunks[-1][:])
self.chunks[-1] = filename
self.chunks.append([])
def pop(self):
if len(self.chunks[-1]) < 1:
self.chunks.pop()
with open(self.chunks[-1], 'rb') as fp:
self.chunks[-1] = pickle.load(fp)
return self.chunks[-1].pop()
def index(self, value):
for index in xrange(len(self)):
if self[index] == value:
return index
return ValueError, "%s is not in list" % value
def _dump(self, value):
handle, filename = tempfile.mkstemp()
self.filenames.add(filename)
os.close(handle)
with open(filename, 'w+b') as fp:
pickle.dump(value, fp)
return filename
def _checkcache(self, index):
if (self.cache and self.cache[0] != index and self.cache[2]):
filename = self._dump(self.cache[1])
self.chunks[self.cache[0]] = filename
if not (self.cache and self.cache[0] == index):
with open(self.chunks[index], 'rb') as fp:
self.cache = (index, pickle.load(fp), False)
def __getitem__(self, y):
index = y / BIGARRAY_CHUNK_LENGTH
offset = y % BIGARRAY_CHUNK_LENGTH
chunk = self.chunks[index]
if isinstance(chunk, list):
return chunk[offset]
else:
self._checkcache(index)
return self.cache[1][offset]
def __setitem__(self, y, value):
index = y / BIGARRAY_CHUNK_LENGTH
offset = y % BIGARRAY_CHUNK_LENGTH
chunk = self.chunks[index]
if isinstance(chunk, list):
chunk[offset] = value
else:
self._checkcache(index)
self.cache[1][offset] = value
self.cache[2] = True # dirty flag
def __repr__(self):
return "%s%s" % ("..." if len(self.chunks) > 1 else "", self.chunks[-1].__repr__())
def __iter__(self):
for i in xrange(len(self)):
yield self[i]
def __len__(self):
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * BIGARRAY_CHUNK_LENGTH + len(self.chunks[-1])
def __del__(self):
for filename in self.filenames:
try:
os.remove(filename)
except OSError:
pass

View File

@ -24,7 +24,6 @@ import socket
import string
import struct
import sys
import tempfile
import time
import types
import urllib
@ -50,6 +49,7 @@ from extra.clientform.clientform import ParseError
from extra.cloak.cloak import decloak
from extra.magic import magic
from extra.odict.odict import OrderedDict
from lib.core.bigarray import BigArray
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
@ -76,7 +76,6 @@ from lib.core.exception import sqlmapMissingDependence
from lib.core.exception import sqlmapSilentQuitException
from lib.core.exception import sqlmapSyntaxException
from lib.core.optiondict import optDict
from lib.core.settings import BIGARRAY_CHUNK_LENGTH
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
from lib.core.settings import DUMMY_USER_INJECTION
@ -156,153 +155,6 @@ class UnicodeRawConfigParser(RawConfigParser):
fp.write("\n")
class Wordlist:
"""
Iterator for looping over a large dictionaries
"""
def __init__(self, filenames):
self.filenames = filenames
self.fp = None
self.index = 0
self.iter = None
self.custom = []
self.adjust()
self.lock = None
def __iter__(self):
return self
def adjust(self):
self.closeFP()
if self.index > len(self.filenames):
raise StopIteration
elif self.index == len(self.filenames):
if self.custom:
self.iter = iter(self.custom)
else:
raise StopIteration
else:
current = self.filenames[self.index]
infoMsg = "loading dictionary from '%s'" % current
singleTimeLogMessage(infoMsg)
self.fp = open(current, "r")
self.iter = iter(self.fp)
self.index += 1
def append(self, value):
self.custom.append(value)
def closeFP(self):
if self.fp:
self.fp.close()
self.fp = None
def next(self):
retVal = None
if self.lock:
self.lock.acquire()
try:
retVal = self.iter.next().rstrip()
except StopIteration:
self.adjust()
retVal = self.iter.next().rstrip()
finally:
if self.lock:
self.lock.release()
return retVal
def rewind(self):
self.index = 0
self.adjust()
class BigArray(list):
"""
List-like object used for storing large amounts of data (disk cached)
"""
def __init__(self):
self.chunks = [[]]
self.cache = None
self.length = 0
self.filenames = set()
def append(self, value):
self.chunks[-1].append(value)
if len(self.chunks[-1]) >= BIGARRAY_CHUNK_LENGTH:
filename = self._dump(self.chunks[-1])
del(self.chunks[-1][:])
self.chunks[-1] = filename
self.chunks.append([])
def pop(self):
if len(self.chunks[-1]) < 1:
self.chunks.pop()
with open(self.chunks[-1], 'rb') as fp:
self.chunks[-1] = pickle.load(fp)
return self.chunks[-1].pop()
def index(self, value):
for index in xrange(len(self)):
if self[index] == value:
return index
return ValueError, "%s is not in list" % value
def _dump(self, value):
handle, filename = tempfile.mkstemp()
self.filenames.add(filename)
os.close(handle)
with open(filename, 'w+b') as fp:
pickle.dump(value, fp)
return filename
def _checkcache(self, index):
if (self.cache and self.cache[0] != index and self.cache[2]):
filename = self._dump(self.cache[1])
self.chunks[self.cache[0]] = filename
if not (self.cache and self.cache[0] == index):
with open(self.chunks[index], 'rb') as fp:
self.cache = (index, pickle.load(fp), False)
def __getitem__(self, y):
index = y / BIGARRAY_CHUNK_LENGTH
offset = y % BIGARRAY_CHUNK_LENGTH
chunk = self.chunks[index]
if isinstance(chunk, list):
return chunk[offset]
else:
self._checkcache(index)
return self.cache[1][offset]
def __setitem__(self, y, value):
index = y / BIGARRAY_CHUNK_LENGTH
offset = y % BIGARRAY_CHUNK_LENGTH
chunk = self.chunks[index]
if isinstance(chunk, list):
chunk[offset] = value
else:
self._checkcache(index)
self.cache[1][offset] = value
self.cache[2] = True # dirty flag
def __repr__(self):
return "%s%s" % ("..." if len(self.chunks) > 1 else "", self.chunks[-1].__repr__())
def __iter__(self):
for i in xrange(len(self)):
yield self[i]
def __len__(self):
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * BIGARRAY_CHUNK_LENGTH + len(self.chunks[-1])
def __del__(self):
for filename in self.filenames:
try:
os.remove(filename)
except OSError:
pass
class DynamicContentItem:
"""
Represents line in content page with dynamic properties (candidate

71
lib/core/wordlist.py Normal file
View File

@ -0,0 +1,71 @@
#!/usr/bin/env python
"""
$Id$
Copyright (c) 2006-2012 sqlmap developers (http://www.sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.common import singleTimeLogMessage
class Wordlist:
"""
Iterator for looping over a large dictionaries
"""
def __init__(self, filenames):
self.filenames = filenames
self.fp = None
self.index = 0
self.iter = None
self.custom = []
self.adjust()
self.lock = None
def __iter__(self):
return self
def adjust(self):
self.closeFP()
if self.index > len(self.filenames):
raise StopIteration
elif self.index == len(self.filenames):
if self.custom:
self.iter = iter(self.custom)
else:
raise StopIteration
else:
current = self.filenames[self.index]
infoMsg = "loading dictionary from '%s'" % current
singleTimeLogMessage(infoMsg)
self.fp = open(current, "r")
self.iter = iter(self.fp)
self.index += 1
def append(self, value):
self.custom.append(value)
def closeFP(self):
if self.fp:
self.fp.close()
self.fp = None
def next(self):
retVal = None
if self.lock:
self.lock.acquire()
try:
retVal = self.iter.next().rstrip()
except StopIteration:
self.adjust()
retVal = self.iter.next().rstrip()
finally:
if self.lock:
self.lock.release()
return retVal
def rewind(self):
self.index = 0
self.adjust()

View File

@ -11,8 +11,8 @@ import re
import time
from lib.core.agent import agent
from lib.core.bigarray import BigArray
from lib.core.common import Backend
from lib.core.common import BigArray
from lib.core.common import calculateDeltaSeconds
from lib.core.common import cleanQuery
from lib.core.common import dataToSessionFile

View File

@ -12,8 +12,8 @@ import threading
import time
from lib.core.agent import agent
from lib.core.bigarray import BigArray
from lib.core.common import Backend
from lib.core.common import BigArray
from lib.core.common import calculateDeltaSeconds
from lib.core.common import dataToStdout
from lib.core.common import extractRegexResult

View File

@ -12,8 +12,8 @@ import threading
import time
from lib.core.agent import agent
from lib.core.bigarray import BigArray
from lib.core.common import Backend
from lib.core.common import BigArray
from lib.core.common import calculateDeltaSeconds
from lib.core.common import clearConsoleLine
from lib.core.common import dataToStdout

View File

@ -46,7 +46,6 @@ from lib.core.common import paths
from lib.core.common import readInput
from lib.core.common import singleTimeLogMessage
from lib.core.common import singleTimeWarnMessage
from lib.core.common import Wordlist
from lib.core.convert import hexdecode
from lib.core.convert import hexencode
from lib.core.convert import utf8encode
@ -68,6 +67,7 @@ from lib.core.settings import PYVERSION
from lib.core.settings import ML
from lib.core.settings import UNICODE_ENCODING
from lib.core.settings import ROTATING_CHARS
from lib.core.wordlist import Wordlist
def mysql_passwd(password, uppercase=True):
"""

View File

@ -11,9 +11,9 @@ import re
import time
from lib.core.agent import agent
from lib.core.bigarray import BigArray
from lib.core.common import arrayizeValue
from lib.core.common import Backend
from lib.core.common import BigArray
from lib.core.common import clearConsoleLine
from lib.core.common import dataToStdout
from lib.core.common import filterPairValues