mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 09:36:35 +03:00
few bug fixes (NTLM credential parsing was wrong), some switch reordering (few Misc to General), implemented --check-waf switch (irony is that this will also be called highly experimental/unstable while other things will be called "major/turbo/super bug fix/implementation")
This commit is contained in:
parent
b8ffcf9495
commit
93b296e02c
|
@ -57,6 +57,7 @@ from lib.core.settings import CONSTANT_RATIO
|
||||||
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
||||||
from lib.core.settings import LOWER_RATIO_BOUND
|
from lib.core.settings import LOWER_RATIO_BOUND
|
||||||
from lib.core.settings import UPPER_RATIO_BOUND
|
from lib.core.settings import UPPER_RATIO_BOUND
|
||||||
|
from lib.core.settings import IDS_WAF_CHECK_PAYLOAD
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
from lib.request.inject import checkBooleanExpression
|
from lib.request.inject import checkBooleanExpression
|
||||||
|
@ -832,6 +833,60 @@ def checkRegexp():
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def checkWaf():
|
||||||
|
"""
|
||||||
|
Reference: http://seclists.org/nmap-dev/2011/q2/att-1005/http-waf-detect.nse
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not conf.checkWaf:
|
||||||
|
return False
|
||||||
|
|
||||||
|
infoMsg = "testing if the target is protected by "
|
||||||
|
infoMsg += "some kind of WAF/IPS/IDS"
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
retVal = False
|
||||||
|
|
||||||
|
backup = dict(conf.parameters)
|
||||||
|
|
||||||
|
conf.parameters = dict(backup)
|
||||||
|
conf.parameters[PLACE.GET] = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + "&"
|
||||||
|
conf.parameters[PLACE.GET] += "%s=%d %s" % (randomStr(), randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
||||||
|
|
||||||
|
kb.matchRatio = None
|
||||||
|
_ = Request.queryPage()
|
||||||
|
|
||||||
|
if kb.errorIsNone and kb.matchRatio is None:
|
||||||
|
kb.matchRatio = LOWER_RATIO_BOUND
|
||||||
|
|
||||||
|
conf.parameters = dict(backup)
|
||||||
|
conf.parameters[PLACE.GET] = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + "&"
|
||||||
|
conf.parameters[PLACE.GET] += "%s=%d" % (randomStr(), randomInt())
|
||||||
|
|
||||||
|
trueResult = Request.queryPage()
|
||||||
|
|
||||||
|
if trueResult:
|
||||||
|
conf.parameters = dict(backup)
|
||||||
|
conf.parameters[PLACE.GET] = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + "&"
|
||||||
|
conf.parameters[PLACE.GET] += "%s=%d %s" % (randomStr(), randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
||||||
|
|
||||||
|
falseResult = Request.queryPage()
|
||||||
|
|
||||||
|
if not falseResult:
|
||||||
|
retVal = True
|
||||||
|
|
||||||
|
conf.parameters = dict(backup)
|
||||||
|
|
||||||
|
if retVal:
|
||||||
|
warnMsg = "it appears that the target is protected. "
|
||||||
|
warnMsg += "please consider usage of tampering scripts"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
else:
|
||||||
|
infoMsg = "it appears that the target is not protected"
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
return retVal
|
||||||
|
|
||||||
def checkNullConnection():
|
def checkNullConnection():
|
||||||
"""
|
"""
|
||||||
Reference: http://www.wisec.it/sectou.php?id=472f952d79293
|
Reference: http://www.wisec.it/sectou.php?id=472f952d79293
|
||||||
|
|
|
@ -18,6 +18,7 @@ from lib.controller.checks import checkString
|
||||||
from lib.controller.checks import checkRegexp
|
from lib.controller.checks import checkRegexp
|
||||||
from lib.controller.checks import checkConnection
|
from lib.controller.checks import checkConnection
|
||||||
from lib.controller.checks import checkNullConnection
|
from lib.controller.checks import checkNullConnection
|
||||||
|
from lib.controller.checks import checkWaf
|
||||||
from lib.controller.checks import heuristicCheckSqlInjection
|
from lib.controller.checks import heuristicCheckSqlInjection
|
||||||
from lib.controller.checks import simpletonCheckSqlInjection
|
from lib.controller.checks import simpletonCheckSqlInjection
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
|
@ -320,6 +321,9 @@ def start():
|
||||||
if not checkConnection(suppressOutput=conf.forms) or not checkString() or not checkRegexp():
|
if not checkConnection(suppressOutput=conf.forms) or not checkString() or not checkRegexp():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if conf.checkWaf:
|
||||||
|
checkWaf()
|
||||||
|
|
||||||
if conf.nullConnection:
|
if conf.nullConnection:
|
||||||
checkNullConnection()
|
checkNullConnection()
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@ See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
|
import copy
|
||||||
import ctypes
|
import ctypes
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
|
@ -1924,7 +1925,7 @@ def pushValue(value):
|
||||||
Push value to the stack (thread dependent)
|
Push value to the stack (thread dependent)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
getCurrentThreadData().valueStack.append(value)
|
getCurrentThreadData().valueStack.append(copy.deepcopy(value))
|
||||||
|
|
||||||
def popValue():
|
def popValue():
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -85,6 +85,7 @@ class MOBILES:
|
||||||
NOKIA = "Nokia N97;Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/10.0.012; Profile/MIDP-2.1 Configuration/CLDC-1.1; en-us) AppleWebKit/525 (KHTML, like Gecko) WicKed/7.1.12344"
|
NOKIA = "Nokia N97;Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/10.0.012; Profile/MIDP-2.1 Configuration/CLDC-1.1; en-us) AppleWebKit/525 (KHTML, like Gecko) WicKed/7.1.12344"
|
||||||
|
|
||||||
class HTTPHEADER:
|
class HTTPHEADER:
|
||||||
|
ACCEPT = "Accept"
|
||||||
ACCEPT_ENCODING = "Accept-Encoding"
|
ACCEPT_ENCODING = "Accept-Encoding"
|
||||||
AUTHORIZATION = "Authorization"
|
AUTHORIZATION = "Authorization"
|
||||||
CONNECTION = "Connection"
|
CONNECTION = "Connection"
|
||||||
|
|
|
@ -981,8 +981,8 @@ def __setPrefixSuffix():
|
||||||
else:
|
else:
|
||||||
boundary.ptype = 1
|
boundary.ptype = 1
|
||||||
|
|
||||||
# Prepend user's provided boundaries to all others boundaries
|
# user who knows for --prefix/--suffix doesn't want other combinations
|
||||||
conf.boundaries.insert(0, boundary)
|
conf.boundaries = [boundary]
|
||||||
|
|
||||||
def __setHTTPAuthentication():
|
def __setHTTPAuthentication():
|
||||||
"""
|
"""
|
||||||
|
@ -1021,7 +1021,7 @@ def __setHTTPAuthentication():
|
||||||
errMsg = "HTTP %s authentication credentials " % aTypeLower
|
errMsg = "HTTP %s authentication credentials " % aTypeLower
|
||||||
errMsg += "value must be in format username:password"
|
errMsg += "value must be in format username:password"
|
||||||
elif aTypeLower == "ntlm":
|
elif aTypeLower == "ntlm":
|
||||||
regExp = "^(.*?)\\\(.*?):(.*?)$"
|
regExp = "^(.*\\\\.*):(.*?)$"
|
||||||
errMsg = "HTTP NTLM authentication credentials value must "
|
errMsg = "HTTP NTLM authentication credentials value must "
|
||||||
errMsg += "be in format DOMAIN\username:password"
|
errMsg += "be in format DOMAIN\username:password"
|
||||||
|
|
||||||
|
|
|
@ -156,26 +156,25 @@ optDict = {
|
||||||
"trafficFile": "string",
|
"trafficFile": "string",
|
||||||
"batch": "boolean",
|
"batch": "boolean",
|
||||||
"charset": "string",
|
"charset": "string",
|
||||||
|
"crawlDepth": "integer",
|
||||||
"eta": "boolean",
|
"eta": "boolean",
|
||||||
"flushSession": "boolean",
|
"flushSession": "boolean",
|
||||||
"forms": "boolean",
|
"forms": "boolean",
|
||||||
"freshQueries": "boolean",
|
"freshQueries": "boolean",
|
||||||
"updateAll": "boolean"
|
"parseErrors": "boolean",
|
||||||
|
"replicate": "boolean",
|
||||||
|
"updateAll": "boolean",
|
||||||
|
"tor": "boolean"
|
||||||
},
|
},
|
||||||
|
|
||||||
"Miscellaneous": {
|
"Miscellaneous": {
|
||||||
"beep": "boolean",
|
"beep": "boolean",
|
||||||
"checkPayload": "boolean",
|
"checkPayload": "boolean",
|
||||||
"cleanup": "boolean",
|
"cleanup": "boolean",
|
||||||
"crawlDepth": "integer",
|
|
||||||
"dependencies": "boolean",
|
"dependencies": "boolean",
|
||||||
"forms": "boolean",
|
|
||||||
"googlePage": "integer",
|
"googlePage": "integer",
|
||||||
"mobile": "boolean",
|
"mobile": "boolean",
|
||||||
"pageRank": "boolean",
|
"pageRank": "boolean",
|
||||||
"parseErrors": "boolean",
|
|
||||||
"replicate": "boolean",
|
|
||||||
"tor": "boolean",
|
|
||||||
"wizard": "boolean",
|
"wizard": "boolean",
|
||||||
"verbose": "integer"
|
"verbose": "integer"
|
||||||
},
|
},
|
||||||
|
|
|
@ -80,6 +80,9 @@ UNION_STDEV_COEFF = 7
|
||||||
# length of queue for candidates for time delay adjustment
|
# length of queue for candidates for time delay adjustment
|
||||||
TIME_DELAY_CANDIDATES = 3
|
TIME_DELAY_CANDIDATES = 3
|
||||||
|
|
||||||
|
# standard value for HTTP Accept header
|
||||||
|
HTTP_ACCEPT_HEADER_VALUE = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
|
||||||
|
|
||||||
# HTTP timeout in silent mode
|
# HTTP timeout in silent mode
|
||||||
HTTP_SILENT_TIMEOUT = 3
|
HTTP_SILENT_TIMEOUT = 3
|
||||||
|
|
||||||
|
@ -370,3 +373,6 @@ BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)"
|
||||||
|
|
||||||
# Template used for common column existence check
|
# Template used for common column existence check
|
||||||
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
||||||
|
|
||||||
|
# Payload used for checking of existence of IDS/WAF (dummier the better)
|
||||||
|
IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM information_schema.tables"
|
||||||
|
|
|
@ -471,6 +471,9 @@ def cmdLineParser():
|
||||||
general.add_option("--charset", dest="charset",
|
general.add_option("--charset", dest="charset",
|
||||||
help="Force character encoding used for data retrieval")
|
help="Force character encoding used for data retrieval")
|
||||||
|
|
||||||
|
general.add_option("--crawl", dest="crawlDepth", type="int",
|
||||||
|
help="Crawl the website starting from the target url")
|
||||||
|
|
||||||
general.add_option("--eta", dest="eta",
|
general.add_option("--eta", dest="eta",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Display for each output the "
|
help="Display for each output the "
|
||||||
|
@ -480,14 +483,30 @@ def cmdLineParser():
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Flush session file for current target")
|
help="Flush session file for current target")
|
||||||
|
|
||||||
|
general.add_option("--forms", dest="forms",
|
||||||
|
action="store_true",
|
||||||
|
help="Parse and test forms on target url")
|
||||||
|
|
||||||
general.add_option("--fresh-queries", dest="freshQueries",
|
general.add_option("--fresh-queries", dest="freshQueries",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Ignores query results stored in session file")
|
help="Ignores query results stored in session file")
|
||||||
|
|
||||||
|
general.add_option("--parse-errors", dest="parseErrors",
|
||||||
|
action="store_true",
|
||||||
|
help="Parse and display DBMS error messages from responses")
|
||||||
|
|
||||||
|
general.add_option("--replicate", dest="replicate",
|
||||||
|
action="store_true",
|
||||||
|
help="Replicate dumped data into a sqlite3 database")
|
||||||
|
|
||||||
general.add_option("--save", dest="saveCmdline",
|
general.add_option("--save", dest="saveCmdline",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Save options on a configuration INI file")
|
help="Save options on a configuration INI file")
|
||||||
|
|
||||||
|
general.add_option("--tor", dest="tor",
|
||||||
|
action="store_true",
|
||||||
|
help="Use default Tor (Vidalia/Privoxy/Polipo) proxy address")
|
||||||
|
|
||||||
general.add_option("--update", dest="updateAll",
|
general.add_option("--update", dest="updateAll",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Update sqlmap")
|
help="Update sqlmap")
|
||||||
|
@ -504,24 +523,21 @@ def cmdLineParser():
|
||||||
|
|
||||||
miscellaneous.add_option("--check-payload", dest="checkPayload",
|
miscellaneous.add_option("--check-payload", dest="checkPayload",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="IDS detection testing of injection payloads")
|
help="Offline WAF/IPS/IDS payload detection testing")
|
||||||
|
|
||||||
|
miscellaneous.add_option("--check-waf", dest="checkWaf",
|
||||||
|
action="store_true",
|
||||||
|
help="Check for existence of WAF/IPS/IDS protection")
|
||||||
|
|
||||||
miscellaneous.add_option("--cleanup", dest="cleanup",
|
miscellaneous.add_option("--cleanup", dest="cleanup",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Clean up the DBMS by sqlmap specific "
|
help="Clean up the DBMS by sqlmap specific "
|
||||||
"UDF and tables")
|
"UDF and tables")
|
||||||
|
|
||||||
miscellaneous.add_option("--crawl", dest="crawlDepth", type="int",
|
|
||||||
help="Crawl the website starting from the target url")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--dependencies", dest="dependencies",
|
miscellaneous.add_option("--dependencies", dest="dependencies",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Check for missing sqlmap dependencies")
|
help="Check for missing sqlmap dependencies")
|
||||||
|
|
||||||
miscellaneous.add_option("--forms", dest="forms",
|
|
||||||
action="store_true",
|
|
||||||
help="Parse and test forms on target url")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--gpage", dest="googlePage", type="int",
|
miscellaneous.add_option("--gpage", dest="googlePage", type="int",
|
||||||
help="Use Google dork results from specified page number")
|
help="Use Google dork results from specified page number")
|
||||||
|
|
||||||
|
@ -533,18 +549,6 @@ def cmdLineParser():
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Display page rank (PR) for Google dork results")
|
help="Display page rank (PR) for Google dork results")
|
||||||
|
|
||||||
miscellaneous.add_option("--parse-errors", dest="parseErrors",
|
|
||||||
action="store_true",
|
|
||||||
help="Parse and display DBMS error messages from responses")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--replicate", dest="replicate",
|
|
||||||
action="store_true",
|
|
||||||
help="Replicate dumped data into a sqlite3 database")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--tor", dest="tor",
|
|
||||||
action="store_true",
|
|
||||||
help="Use default Tor (Vidalia/Privoxy/Polipo) proxy address")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--wizard", dest="wizard",
|
miscellaneous.add_option("--wizard", dest="wizard",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Simple wizard interface for beginner users")
|
help="Simple wizard interface for beginner users")
|
||||||
|
|
|
@ -27,6 +27,7 @@ from lib.core.common import singleTimeLogMessage
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.exception import sqlmapDataException
|
||||||
from lib.core.settings import ML
|
from lib.core.settings import ML
|
||||||
from lib.core.settings import META_CHARSET_REGEX
|
from lib.core.settings import META_CHARSET_REGEX
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
@ -172,7 +173,12 @@ def decodePage(page, contentEncoding, contentType):
|
||||||
else:
|
else:
|
||||||
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(page))
|
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(page))
|
||||||
|
|
||||||
page = data.read()
|
try:
|
||||||
|
page = data.read()
|
||||||
|
except Exception, msg:
|
||||||
|
errMsg = "detected invalid data for declared content "
|
||||||
|
errMsg += "encoding '%s' ('%s')" % (contentEncoding, msg)
|
||||||
|
singleTimeLogMessage(errMsg, logging.ERROR)
|
||||||
|
|
||||||
if not conf.charset:
|
if not conf.charset:
|
||||||
httpCharset, metaCharset = None, None
|
httpCharset, metaCharset = None, None
|
||||||
|
|
|
@ -45,6 +45,7 @@ from lib.core.enums import PAYLOAD
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
from lib.core.exception import sqlmapConnectionException
|
from lib.core.exception import sqlmapConnectionException
|
||||||
from lib.core.exception import sqlmapSyntaxException
|
from lib.core.exception import sqlmapSyntaxException
|
||||||
|
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
||||||
from lib.core.settings import HTTP_SILENT_TIMEOUT
|
from lib.core.settings import HTTP_SILENT_TIMEOUT
|
||||||
from lib.core.settings import META_REFRESH_REGEX
|
from lib.core.settings import META_REFRESH_REGEX
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
|
@ -224,6 +225,8 @@ class Connect:
|
||||||
if kb.proxyAuthHeader:
|
if kb.proxyAuthHeader:
|
||||||
headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
||||||
|
|
||||||
|
headers[HTTPHEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
||||||
|
|
||||||
headers[HTTPHEADER.HOST] = urlparse.urlparse(url).netloc
|
headers[HTTPHEADER.HOST] = urlparse.urlparse(url).netloc
|
||||||
|
|
||||||
if any(map(lambda x: headers[HTTPHEADER.HOST].endswith(':%d' % x), [80, 443])):
|
if any(map(lambda x: headers[HTTPHEADER.HOST].endswith(':%d' % x), [80, 443])):
|
||||||
|
@ -498,10 +501,11 @@ class Connect:
|
||||||
page = None
|
page = None
|
||||||
pageLength = None
|
pageLength = None
|
||||||
uri = None
|
uri = None
|
||||||
raise404 = place != PLACE.URI if raise404 is None else raise404
|
|
||||||
|
|
||||||
if not place:
|
if not place:
|
||||||
place = kb.injection.place
|
place = kb.injection.place or PLACE.GET
|
||||||
|
|
||||||
|
raise404 = place != PLACE.URI if raise404 is None else raise404
|
||||||
|
|
||||||
payload = agent.extractPayload(value)
|
payload = agent.extractPayload(value)
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
|
|
|
@ -429,7 +429,7 @@ def dictionaryAttack(attack_dict):
|
||||||
|
|
||||||
if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC):
|
if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC):
|
||||||
for suffix in suffix_list:
|
for suffix in suffix_list:
|
||||||
if not attack_info or processException:
|
if len(attack_info) == len(results) or processException:
|
||||||
break
|
break
|
||||||
|
|
||||||
if suffix:
|
if suffix:
|
||||||
|
@ -496,8 +496,9 @@ def dictionaryAttack(attack_dict):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if PYVERSION >= "2.6":
|
if PYVERSION >= "2.6":
|
||||||
infoMsg = "starting %d hash attack processes " % multiprocessing.cpu_count()
|
if multiprocessing.cpu_count() > 1:
|
||||||
singleTimeLogMessage(infoMsg)
|
infoMsg = "starting %d processes " % multiprocessing.cpu_count()
|
||||||
|
singleTimeLogMessage(infoMsg)
|
||||||
|
|
||||||
processes = []
|
processes = []
|
||||||
retVal = multiprocessing.Queue()
|
retVal = multiprocessing.Queue()
|
||||||
|
@ -523,7 +524,7 @@ def dictionaryAttack(attack_dict):
|
||||||
warnMsg = "user aborted during dictionary attack phase"
|
warnMsg = "user aborted during dictionary attack phase"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
results = [retVal.get() for i in xrange(retVal.qsize())] if retVal else []
|
results.extend([retVal.get() for i in xrange(retVal.qsize())] if retVal else [])
|
||||||
|
|
||||||
clearConsoleLine()
|
clearConsoleLine()
|
||||||
|
|
||||||
|
@ -599,8 +600,9 @@ def dictionaryAttack(attack_dict):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if PYVERSION >= "2.6":
|
if PYVERSION >= "2.6":
|
||||||
infoMsg = "starting %d hash attack processes " % multiprocessing.cpu_count()
|
if multiprocessing.cpu_count() > 1:
|
||||||
singleTimeLogMessage(infoMsg)
|
infoMsg = "starting %d processes " % multiprocessing.cpu_count()
|
||||||
|
singleTimeLogMessage(infoMsg)
|
||||||
|
|
||||||
processes = []
|
processes = []
|
||||||
retVal = multiprocessing.Queue()
|
retVal = multiprocessing.Queue()
|
||||||
|
|
47
sqlmap.conf
47
sqlmap.conf
|
@ -513,6 +513,11 @@ batch = False
|
||||||
# Force character encoding used for data retrieval.
|
# Force character encoding used for data retrieval.
|
||||||
charset =
|
charset =
|
||||||
|
|
||||||
|
# Crawl the website starting from the target url
|
||||||
|
# Valid: integer
|
||||||
|
# Default: 0
|
||||||
|
crawlDepth = 0
|
||||||
|
|
||||||
# Retrieve each query output length and calculate the estimated time of
|
# Retrieve each query output length and calculate the estimated time of
|
||||||
# arrival in real time.
|
# arrival in real time.
|
||||||
# Valid: True or False
|
# Valid: True or False
|
||||||
|
@ -522,10 +527,26 @@ eta = False
|
||||||
# Valid: True or False
|
# Valid: True or False
|
||||||
flushSession = False
|
flushSession = False
|
||||||
|
|
||||||
|
# Parse and test forms on target url
|
||||||
|
# Valid: True or False
|
||||||
|
forms = False
|
||||||
|
|
||||||
# Ignores query results stored in session file.
|
# Ignores query results stored in session file.
|
||||||
# Valid: True or False
|
# Valid: True or False
|
||||||
freshQueries = False
|
freshQueries = False
|
||||||
|
|
||||||
|
# Parse and display DBMS error messages from responses.
|
||||||
|
# Valid: True or False
|
||||||
|
parseErrors = False
|
||||||
|
|
||||||
|
# Replicate dumped data into a sqlite3 database.
|
||||||
|
# Valid: True or False
|
||||||
|
replicate = False
|
||||||
|
|
||||||
|
# Use default Tor (Vidalia/Privoxy/Polipo) proxy address.
|
||||||
|
# Valid: True or False
|
||||||
|
tor = False
|
||||||
|
|
||||||
# Update sqlmap.
|
# Update sqlmap.
|
||||||
# Valid: True or False
|
# Valid: True or False
|
||||||
updateAll = False
|
updateAll = False
|
||||||
|
@ -536,26 +557,20 @@ updateAll = False
|
||||||
# Alert with audio beep when sql injection found.
|
# Alert with audio beep when sql injection found.
|
||||||
beep = False
|
beep = False
|
||||||
|
|
||||||
# IDS detection testing of injection payloads.
|
# Offline WAF/IPS/IDS payload detection testing.
|
||||||
checkPayload = False
|
checkPayload = False
|
||||||
|
|
||||||
|
# Check for existence of WAF/IPS/IDS protection.
|
||||||
|
checkWaf = False
|
||||||
|
|
||||||
# Clean up the DBMS by sqlmap specific UDF and tables.
|
# Clean up the DBMS by sqlmap specific UDF and tables.
|
||||||
# Valid: True or False
|
# Valid: True or False
|
||||||
cleanup = False
|
cleanup = False
|
||||||
|
|
||||||
# Crawl the website starting from the target url
|
|
||||||
# Valid: integer
|
|
||||||
# Default: 0
|
|
||||||
crawlDepth = 0
|
|
||||||
|
|
||||||
# Show which sqlmap dependencies are not available.
|
# Show which sqlmap dependencies are not available.
|
||||||
# Valid: True or False
|
# Valid: True or False
|
||||||
dependencies = False
|
dependencies = False
|
||||||
|
|
||||||
# Parse and test forms on target url
|
|
||||||
# Valid: True or False
|
|
||||||
forms = False
|
|
||||||
|
|
||||||
# Use Google dork results from specified page number.
|
# Use Google dork results from specified page number.
|
||||||
# Valid: integer
|
# Valid: integer
|
||||||
# Default: 1
|
# Default: 1
|
||||||
|
@ -569,18 +584,6 @@ mobile = False
|
||||||
# Valid: True or False
|
# Valid: True or False
|
||||||
pageRank = False
|
pageRank = False
|
||||||
|
|
||||||
# Parse and display DBMS error messages from responses.
|
|
||||||
# Valid: True or False
|
|
||||||
parseErrors = False
|
|
||||||
|
|
||||||
# Replicate dumped data into a sqlite3 database.
|
|
||||||
# Valid: True or False
|
|
||||||
replicate = False
|
|
||||||
|
|
||||||
# Use default Tor (Vidalia/Privoxy/Polipo) proxy address.
|
|
||||||
# Valid: True or False
|
|
||||||
tor = False
|
|
||||||
|
|
||||||
# Simple wizard interface for beginner users.
|
# Simple wizard interface for beginner users.
|
||||||
# Valid: True or False
|
# Valid: True or False
|
||||||
wizard = False
|
wizard = False
|
||||||
|
|
Loading…
Reference in New Issue
Block a user