mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 17:46:37 +03:00
minor optimization
This commit is contained in:
parent
6c49af090c
commit
37d78ffe01
16
_sqlmap.py
16
_sqlmap.py
|
@ -35,6 +35,7 @@ from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
from lib.core.dump import dumper
|
||||||
from lib.core.common import unhandledExceptionMessage
|
from lib.core.common import unhandledExceptionMessage
|
||||||
from lib.core.exception import exceptionsTuple
|
from lib.core.exception import exceptionsTuple
|
||||||
from lib.core.exception import sqlmapSilentQuitException
|
from lib.core.exception import sqlmapSilentQuitException
|
||||||
|
@ -44,7 +45,6 @@ from lib.core.profiling import profile
|
||||||
from lib.core.settings import LEGAL_DISCLAIMER
|
from lib.core.settings import LEGAL_DISCLAIMER
|
||||||
from lib.core.testing import smokeTest
|
from lib.core.testing import smokeTest
|
||||||
from lib.core.testing import liveTest
|
from lib.core.testing import liveTest
|
||||||
from lib.core.xmldump import closeDumper
|
|
||||||
from lib.parse.cmdline import cmdLineParser
|
from lib.parse.cmdline import cmdLineParser
|
||||||
|
|
||||||
def modulePath():
|
def modulePath():
|
||||||
|
@ -85,27 +85,23 @@ def main():
|
||||||
except sqlmapUserQuitException:
|
except sqlmapUserQuitException:
|
||||||
errMsg = "user quit"
|
errMsg = "user quit"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
closeDumper(False, errMsg)
|
|
||||||
|
|
||||||
except sqlmapSilentQuitException:
|
except sqlmapSilentQuitException:
|
||||||
closeDumper(False)
|
pass
|
||||||
|
|
||||||
except exceptionsTuple, e:
|
except exceptionsTuple, e:
|
||||||
e = getUnicode(e)
|
e = getUnicode(e)
|
||||||
logger.critical(e)
|
logger.critical(e)
|
||||||
closeDumper(False, e)
|
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print
|
print
|
||||||
errMsg = "user aborted"
|
errMsg = "user aborted"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
closeDumper(False, errMsg)
|
|
||||||
|
|
||||||
except EOFError:
|
except EOFError:
|
||||||
print
|
print
|
||||||
errMsg = "exit"
|
errMsg = "exit"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
closeDumper(False, errMsg)
|
|
||||||
|
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
pass
|
pass
|
||||||
|
@ -115,10 +111,6 @@ def main():
|
||||||
errMsg = unhandledExceptionMessage()
|
errMsg = unhandledExceptionMessage()
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
closeDumper(False, errMsg)
|
|
||||||
|
|
||||||
else:
|
|
||||||
closeDumper(True)
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
dataToStdout("\n[*] shutting down at %s\n\n" % time.strftime("%X"), forceOutput=True)
|
dataToStdout("\n[*] shutting down at %s\n\n" % time.strftime("%X"), forceOutput=True)
|
||||||
|
@ -126,12 +118,14 @@ def main():
|
||||||
kb.threadContinue = False
|
kb.threadContinue = False
|
||||||
kb.threadException = True
|
kb.threadException = True
|
||||||
|
|
||||||
if conf.get('hashDB', None):
|
if conf.get("hashDB", None):
|
||||||
try:
|
try:
|
||||||
conf.hashDB.flush(True)
|
conf.hashDB.flush(True)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
dumper.flush()
|
||||||
|
|
||||||
# Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program
|
# Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program
|
||||||
if hasattr(conf, "threads") and conf.threads > 1:
|
if hasattr(conf, "threads") and conf.threads > 1:
|
||||||
os._exit(0)
|
os._exit(0)
|
||||||
|
|
|
@ -8,8 +8,10 @@ See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
|
import cStringIO
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
|
import threading
|
||||||
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import dataToDumpFile
|
from lib.core.common import dataToDumpFile
|
||||||
|
@ -26,6 +28,7 @@ from lib.core.data import logger
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.exception import sqlmapValueException
|
from lib.core.exception import sqlmapValueException
|
||||||
from lib.core.replication import Replication
|
from lib.core.replication import Replication
|
||||||
|
from lib.core.settings import BUFFERED_LOG_SIZE
|
||||||
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
|
||||||
|
@ -39,23 +42,40 @@ class Dump:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.__outputFile = None
|
self.__outputFile = None
|
||||||
self.__outputFP = None
|
self.__outputFP = None
|
||||||
|
self.__outputBP = None
|
||||||
|
self.__lock = threading.Lock()
|
||||||
|
|
||||||
def __write(self, data, n=True, console=True):
|
def __write(self, data, n=True, console=True):
|
||||||
text = "%s%s" % (data, "\n" if n else " ")
|
text = "%s%s" % (data, "\n" if n else " ")
|
||||||
if console:
|
if console:
|
||||||
dataToStdout(text)
|
dataToStdout(text)
|
||||||
|
|
||||||
self.__outputFP.write(text)
|
if kb.get("multiThreadMode"):
|
||||||
self.__outputFP.flush()
|
self.__lock.acquire()
|
||||||
|
|
||||||
|
self.__outputBP.write(text)
|
||||||
|
|
||||||
|
if self.__outputBP.tell() > BUFFERED_LOG_SIZE:
|
||||||
|
self.flush()
|
||||||
|
|
||||||
|
if kb.get("multiThreadMode"):
|
||||||
|
self.__lock.release()
|
||||||
|
|
||||||
kb.dataOutputFlag = True
|
kb.dataOutputFlag = True
|
||||||
|
|
||||||
|
def flush(self):
|
||||||
|
if self.__outputBP and self.__outputFP and self.__outputBP.tell() > 0:
|
||||||
|
_ = self.__outputBP.getvalue()
|
||||||
|
self.__outputBP.reset()
|
||||||
|
self.__outputFP.write(_)
|
||||||
|
|
||||||
def __formatString(self, inpStr):
|
def __formatString(self, inpStr):
|
||||||
return restoreDumpMarkedChars(getUnicode(inpStr))
|
return restoreDumpMarkedChars(getUnicode(inpStr))
|
||||||
|
|
||||||
def setOutputFile(self):
|
def setOutputFile(self):
|
||||||
self.__outputFile = "%s%slog" % (conf.outputPath, os.sep)
|
self.__outputFile = "%s%slog" % (conf.outputPath, os.sep)
|
||||||
self.__outputFP = codecs.open(self.__outputFile, "ab", UNICODE_ENCODING)
|
self.__outputFP = codecs.open(self.__outputFile, "ab", UNICODE_ENCODING)
|
||||||
|
self.__outputBP = cStringIO.StringIO()
|
||||||
|
|
||||||
def getOutputFile(self):
|
def getOutputFile(self):
|
||||||
return self.__outputFile
|
return self.__outputFile
|
||||||
|
|
|
@ -432,3 +432,6 @@ SLOW_ORDER_COUNT_THRESHOLD = 10000
|
||||||
|
|
||||||
# Give up on hash recognition if nothing was found in first given number of rows
|
# Give up on hash recognition if nothing was found in first given number of rows
|
||||||
HASH_RECOGNITION_QUIT_THRESHOLD = 10000
|
HASH_RECOGNITION_QUIT_THRESHOLD = 10000
|
||||||
|
|
||||||
|
# Size of a buffer used for log file output
|
||||||
|
BUFFERED_LOG_SIZE = 10000
|
||||||
|
|
Loading…
Reference in New Issue
Block a user