minor optimization

This commit is contained in:
Miroslav Stampar 2011-12-28 15:59:30 +00:00
parent 6c49af090c
commit 37d78ffe01
3 changed files with 30 additions and 13 deletions

View File

@ -35,6 +35,7 @@ from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.data import paths
from lib.core.dump import dumper
from lib.core.common import unhandledExceptionMessage
from lib.core.exception import exceptionsTuple
from lib.core.exception import sqlmapSilentQuitException
@ -44,7 +45,6 @@ from lib.core.profiling import profile
from lib.core.settings import LEGAL_DISCLAIMER
from lib.core.testing import smokeTest
from lib.core.testing import liveTest
from lib.core.xmldump import closeDumper
from lib.parse.cmdline import cmdLineParser
def modulePath():
@ -85,27 +85,23 @@ def main():
except sqlmapUserQuitException:
errMsg = "user quit"
logger.error(errMsg)
closeDumper(False, errMsg)
except sqlmapSilentQuitException:
closeDumper(False)
pass
except exceptionsTuple, e:
e = getUnicode(e)
logger.critical(e)
closeDumper(False, e)
except KeyboardInterrupt:
print
errMsg = "user aborted"
logger.error(errMsg)
closeDumper(False, errMsg)
except EOFError:
print
errMsg = "exit"
logger.error(errMsg)
closeDumper(False, errMsg)
except SystemExit:
pass
@ -115,10 +111,6 @@ def main():
errMsg = unhandledExceptionMessage()
logger.critical(errMsg)
traceback.print_exc()
closeDumper(False, errMsg)
else:
closeDumper(True)
finally:
dataToStdout("\n[*] shutting down at %s\n\n" % time.strftime("%X"), forceOutput=True)
@ -126,12 +118,14 @@ def main():
kb.threadContinue = False
kb.threadException = True
if conf.get('hashDB', None):
if conf.get("hashDB", None):
try:
conf.hashDB.flush(True)
except KeyboardInterrupt:
pass
dumper.flush()
# Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program
if hasattr(conf, "threads") and conf.threads > 1:
os._exit(0)

View File

@ -8,8 +8,10 @@ See the file 'doc/COPYING' for copying permission
"""
import codecs
import cStringIO
import re
import os
import threading
from lib.core.common import Backend
from lib.core.common import dataToDumpFile
@ -26,6 +28,7 @@ from lib.core.data import logger
from lib.core.enums import DBMS
from lib.core.exception import sqlmapValueException
from lib.core.replication import Replication
from lib.core.settings import BUFFERED_LOG_SIZE
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
from lib.core.settings import UNICODE_ENCODING
@ -39,23 +42,40 @@ class Dump:
def __init__(self):
self.__outputFile = None
self.__outputFP = None
self.__outputBP = None
self.__lock = threading.Lock()
def __write(self, data, n=True, console=True):
text = "%s%s" % (data, "\n" if n else " ")
if console:
dataToStdout(text)
self.__outputFP.write(text)
self.__outputFP.flush()
if kb.get("multiThreadMode"):
self.__lock.acquire()
self.__outputBP.write(text)
if self.__outputBP.tell() > BUFFERED_LOG_SIZE:
self.flush()
if kb.get("multiThreadMode"):
self.__lock.release()
kb.dataOutputFlag = True
def flush(self):
if self.__outputBP and self.__outputFP and self.__outputBP.tell() > 0:
_ = self.__outputBP.getvalue()
self.__outputBP.reset()
self.__outputFP.write(_)
def __formatString(self, inpStr):
return restoreDumpMarkedChars(getUnicode(inpStr))
def setOutputFile(self):
self.__outputFile = "%s%slog" % (conf.outputPath, os.sep)
self.__outputFP = codecs.open(self.__outputFile, "ab", UNICODE_ENCODING)
self.__outputBP = cStringIO.StringIO()
def getOutputFile(self):
return self.__outputFile

View File

@ -432,3 +432,6 @@ SLOW_ORDER_COUNT_THRESHOLD = 10000
# Give up on hash recognition if nothing was found in first given number of rows
HASH_RECOGNITION_QUIT_THRESHOLD = 10000
# Size of a buffer used for log file output
BUFFERED_LOG_SIZE = 10000