diff --git a/_sqlmap.py b/_sqlmap.py index ae05a126a..e2f249ab3 100755 --- a/_sqlmap.py +++ b/_sqlmap.py @@ -34,7 +34,6 @@ from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger from lib.core.data import paths -from lib.core.dump import dumper from lib.core.common import unhandledExceptionMessage from lib.core.exception import exceptionsTuple from lib.core.exception import sqlmapSilentQuitException @@ -123,8 +122,6 @@ def main(): except KeyboardInterrupt: pass - dumper.flush() - # Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program if conf.get("threads", 0) > 1 or conf.get("dnsServer", None): os._exit(0) diff --git a/doc/THANKS b/doc/THANKS index 7fae471db..88153115b 100644 --- a/doc/THANKS +++ b/doc/THANKS @@ -15,6 +15,9 @@ David Alvarez Sergio Alves for reporting a bug +Thomas Anderson + for reporting a bug + Chip Andrews for his excellent work maintaining the SQL Server versions database at SQLSecurity.com and permission to implement the update feature diff --git a/lib/core/dump.py b/lib/core/dump.py index fca238742..5bec4dde2 100644 --- a/lib/core/dump.py +++ b/lib/core/dump.py @@ -30,7 +30,6 @@ from lib.core.enums import DBMS from lib.core.exception import sqlmapValueException from lib.core.replication import Replication from lib.core.settings import BLANK -from lib.core.settings import BUFFERED_LOG_SIZE from lib.core.settings import NULL from lib.core.settings import TRIM_STDOUT_DUMP_SIZE from lib.core.settings import UNICODE_ENCODING @@ -45,7 +44,6 @@ class Dump: def __init__(self): self._outputFile = None self._outputFP = None - self._outputBP = None self._lock = threading.Lock() def _write(self, data, n=True, console=True): @@ -56,41 +54,27 @@ class Dump: if kb.get("multiThreadMode"): self._lock.acquire() - self._outputBP.write(text) - - if self._outputBP.tell() > BUFFERED_LOG_SIZE: - self.flush() + self._outputFP.write(text) if kb.get("multiThreadMode"): self._lock.release() kb.dataOutputFlag = True - def flush(self): - if self._outputBP and self._outputFP and self._outputBP.tell() > 0: - _ = self._outputBP.getvalue() - self._outputBP.truncate(0) - self._outputFP.write(_) - def _formatString(self, inpStr): return restoreDumpMarkedChars(getUnicode(inpStr)) def setOutputFile(self): self._outputFile = "%s%slog" % (conf.outputPath, os.sep) self._outputFP = codecs.open(self._outputFile, "ab", UNICODE_ENCODING) - self._outputBP = StringIO.StringIO() def getOutputFile(self): - self.flush() return self._outputFile def string(self, header, data, sort=True): if isinstance(data, (list, tuple, set)): self.lister(header, data, sort) - - return - - if data: + elif data: data = self._formatString(getUnicode(data)) if data[-1] == '\n': diff --git a/lib/core/settings.py b/lib/core/settings.py index 1c27b716e..862eea66b 100644 --- a/lib/core/settings.py +++ b/lib/core/settings.py @@ -467,9 +467,6 @@ SLOW_ORDER_COUNT_THRESHOLD = 10000 # Give up on hash recognition if nothing was found in first given number of rows HASH_RECOGNITION_QUIT_THRESHOLD = 10000 -# Size of a buffer used for log file output -BUFFERED_LOG_SIZE = 10000 - # Maximum number of redirections to any single URL - this is needed because of the state that cookies introduce MAX_SINGLE_URL_REDIRECTIONS = 4