mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 01:26:42 +03:00
speed of --replicate is now vastly improved
This commit is contained in:
parent
96da7ba4eb
commit
866cdb4cf7
|
@ -177,7 +177,7 @@ def safechardecode(value):
|
|||
else:
|
||||
break
|
||||
|
||||
elif isinstance(value, list):
|
||||
elif isinstance(value, (list, tuple)):
|
||||
for i in xrange(len(value)):
|
||||
retVal[i] = safechardecode(value[i])
|
||||
|
||||
|
|
|
@ -358,6 +358,9 @@ class Dump:
|
|||
if not conf.multipleTargets and not conf.replicate:
|
||||
dataToDumpFile(dumpFP, "\n")
|
||||
|
||||
if conf.replicate:
|
||||
rtable.beginTransaction()
|
||||
|
||||
for i in range(count):
|
||||
field = 1
|
||||
values = []
|
||||
|
@ -398,11 +401,12 @@ class Dump:
|
|||
self.__write("%s\n" % separator)
|
||||
|
||||
if conf.replicate:
|
||||
rtable.endTransaction()
|
||||
logger.info("Table '%s.%s' dumped to sqlite3 file '%s'" % (db, table, replication.dbpath))
|
||||
|
||||
elif not conf.multipleTargets:
|
||||
dataToDumpFile(dumpFP, "\n")
|
||||
dumpFP.close()
|
||||
|
||||
logger.info("Table '%s.%s' dumped to CSV file '%s'" % (db, table, dumpFileName))
|
||||
|
||||
def dbColumns(self, dbColumns, colConsider, dbs):
|
||||
|
|
|
@ -71,6 +71,16 @@ class Replication:
|
|||
errMsg = "wrong number of columns used in replicating insert"
|
||||
raise sqlmapValueException, errMsg
|
||||
|
||||
def beginTransaction(self):
|
||||
"""
|
||||
Great speed improvement can be gained by using explicit transactions around multiple inserts.
|
||||
Reference: http://stackoverflow.com/questions/4719836/python-and-sqlite3-adding-thousands-of-rows
|
||||
"""
|
||||
self.parent.cursor.execute('BEGIN TRANSACTION')
|
||||
|
||||
def endTransaction(self):
|
||||
self.parent.cursor.execute('END TRANSACTION')
|
||||
|
||||
def select(self, condition=None):
|
||||
"""
|
||||
This function is used for selecting row(s) from current table.
|
||||
|
|
Loading…
Reference in New Issue
Block a user