mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-22 17:46:37 +03:00
More replacements for refactoring.
Minor layout adjustments. Alignment of conffile/optiondict/cmdline parameters.
This commit is contained in:
parent
eb999de0f1
commit
78d7b17483
|
@ -377,7 +377,7 @@ def checkNullConnection():
|
||||||
infoMsg = "NULL connection is supported with HEAD header"
|
infoMsg = "NULL connection is supported with HEAD header"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
else:
|
else:
|
||||||
page, headers = Request.getPage(auxHeaders={"Range":"bytes=-1"})
|
page, headers = Request.getPage(auxHeaders={NULLCONNECTION.RANGE: "bytes=-1"})
|
||||||
if page and len(page) == 1 and 'Content-Range' in headers:
|
if page and len(page) == 1 and 'Content-Range' in headers:
|
||||||
kb.nullConnection = NULLCONNECTION.RANGE
|
kb.nullConnection = NULLCONNECTION.RANGE
|
||||||
|
|
||||||
|
|
|
@ -21,8 +21,6 @@ from lib.controller.checks import checkNullConnection
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import paramToDict
|
from lib.core.common import paramToDict
|
||||||
from lib.core.common import parseTargetUrl
|
from lib.core.common import parseTargetUrl
|
||||||
from lib.core.common import popValue
|
|
||||||
from lib.core.common import pushValue
|
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
|
@ -197,7 +195,7 @@ def start():
|
||||||
setCookieAsInjectable = False
|
setCookieAsInjectable = False
|
||||||
|
|
||||||
if setCookieAsInjectable:
|
if setCookieAsInjectable:
|
||||||
conf.httpHeaders.append(("Cookie", cookieStr))
|
conf.httpHeaders.append((PLACE.COOKIE, cookieStr))
|
||||||
conf.parameters[PLACE.COOKIE] = cookieStr
|
conf.parameters[PLACE.COOKIE] = cookieStr
|
||||||
__paramDict = paramToDict(PLACE.COOKIE, cookieStr)
|
__paramDict = paramToDict(PLACE.COOKIE, cookieStr)
|
||||||
|
|
||||||
|
@ -214,6 +212,7 @@ def start():
|
||||||
|
|
||||||
# Do a little prioritization reorder of a testable parameter list
|
# Do a little prioritization reorder of a testable parameter list
|
||||||
parameters = conf.parameters.keys()
|
parameters = conf.parameters.keys()
|
||||||
|
|
||||||
for place in (PLACE.URI, PLACE.POST, PLACE.GET):
|
for place in (PLACE.URI, PLACE.POST, PLACE.GET):
|
||||||
if place in parameters:
|
if place in parameters:
|
||||||
parameters.remove(place)
|
parameters.remove(place)
|
||||||
|
|
|
@ -1570,7 +1570,9 @@ def runningAsAdmin():
|
||||||
|
|
||||||
def logHTTPTraffic(requestLogMsg, responseLogMsg):
|
def logHTTPTraffic(requestLogMsg, responseLogMsg):
|
||||||
kb.locks.reqLock.acquire()
|
kb.locks.reqLock.acquire()
|
||||||
|
|
||||||
dataToTrafficFile("%s\n" % requestLogMsg)
|
dataToTrafficFile("%s\n" % requestLogMsg)
|
||||||
dataToTrafficFile("%s\n" % responseLogMsg)
|
dataToTrafficFile("%s\n" % responseLogMsg)
|
||||||
dataToTrafficFile("%s\n" % (160*'#'))
|
dataToTrafficFile("\n%s\n\n" % (76 * '#'))
|
||||||
|
|
||||||
kb.locks.reqLock.release()
|
kb.locks.reqLock.release()
|
||||||
|
|
|
@ -43,6 +43,7 @@ from lib.core.data import paths
|
||||||
from lib.core.data import queries
|
from lib.core.data import queries
|
||||||
from lib.core.datatype import advancedDict
|
from lib.core.datatype import advancedDict
|
||||||
from lib.core.enums import HTTPMETHOD
|
from lib.core.enums import HTTPMETHOD
|
||||||
|
from lib.core.enums import PLACE
|
||||||
from lib.core.enums import PRIORITY
|
from lib.core.enums import PRIORITY
|
||||||
from lib.core.exception import sqlmapFilePathException
|
from lib.core.exception import sqlmapFilePathException
|
||||||
from lib.core.exception import sqlmapGenericException
|
from lib.core.exception import sqlmapGenericException
|
||||||
|
@ -847,19 +848,19 @@ def __setHTTPUserAgent():
|
||||||
debugMsg = "setting the HTTP User-Agent header"
|
debugMsg = "setting the HTTP User-Agent header"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
conf.httpHeaders.append(("User-Agent", conf.agent))
|
conf.httpHeaders.append((PLACE.UA, conf.agent))
|
||||||
return
|
return
|
||||||
|
|
||||||
if not conf.userAgentsFile:
|
if not conf.userAgentsFile:
|
||||||
addDefaultUserAgent = True
|
addDefaultUserAgent = True
|
||||||
|
|
||||||
for header, _ in conf.httpHeaders:
|
for header, _ in conf.httpHeaders:
|
||||||
if header == "User-Agent":
|
if header == PLACE.UA:
|
||||||
addDefaultUserAgent = False
|
addDefaultUserAgent = False
|
||||||
break
|
break
|
||||||
|
|
||||||
if addDefaultUserAgent:
|
if addDefaultUserAgent:
|
||||||
conf.httpHeaders.append(("User-Agent", __defaultHTTPUserAgent()))
|
conf.httpHeaders.append((PLACE.UA, __defaultHTTPUserAgent()))
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -875,7 +876,7 @@ def __setHTTPUserAgent():
|
||||||
warnMsg += "file '%s'" % conf.userAgentsFile
|
warnMsg += "file '%s'" % conf.userAgentsFile
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
conf.httpHeaders.append(("User-Agent", __defaultHTTPUserAgent()))
|
conf.httpHeaders.append((PLACE.UA, __defaultHTTPUserAgent()))
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -887,7 +888,7 @@ def __setHTTPUserAgent():
|
||||||
__userAgent = kb.userAgents[randomRange(stop=__count)]
|
__userAgent = kb.userAgents[randomRange(stop=__count)]
|
||||||
|
|
||||||
__userAgent = sanitizeStr(__userAgent)
|
__userAgent = sanitizeStr(__userAgent)
|
||||||
conf.httpHeaders.append(("User-Agent", __userAgent))
|
conf.httpHeaders.append((PLACE.UA, __userAgent))
|
||||||
|
|
||||||
logMsg = "fetched random HTTP User-Agent header from "
|
logMsg = "fetched random HTTP User-Agent header from "
|
||||||
logMsg += "file '%s': %s" % (conf.userAgentsFile, __userAgent)
|
logMsg += "file '%s': %s" % (conf.userAgentsFile, __userAgent)
|
||||||
|
@ -914,7 +915,7 @@ def __setHTTPCookies():
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
conf.httpHeaders.append(("Connection", "Keep-Alive"))
|
conf.httpHeaders.append(("Connection", "Keep-Alive"))
|
||||||
conf.httpHeaders.append(("Cookie", conf.cookie))
|
conf.httpHeaders.append((PLACE.COOKIE, conf.cookie))
|
||||||
|
|
||||||
def __setHTTPTimeout():
|
def __setHTTPTimeout():
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -59,14 +59,17 @@ optDict = {
|
||||||
"os": "string",
|
"os": "string",
|
||||||
"prefix": "string",
|
"prefix": "string",
|
||||||
"postfix": "string",
|
"postfix": "string",
|
||||||
|
"tamper": "string"
|
||||||
|
},
|
||||||
|
|
||||||
|
"Detection": {
|
||||||
"string": "string",
|
"string": "string",
|
||||||
"regexp": "string",
|
"regexp": "string",
|
||||||
"eString": "string",
|
"eString": "string",
|
||||||
"eRegexp": "string",
|
"eRegexp": "string",
|
||||||
"thold": "float",
|
"thold": "float",
|
||||||
"textOnly": "boolean",
|
"textOnly": "boolean",
|
||||||
"longestCommon": "boolean",
|
"longestCommon": "boolean"
|
||||||
"tamper": "string"
|
|
||||||
},
|
},
|
||||||
|
|
||||||
"Techniques": {
|
"Techniques": {
|
||||||
|
@ -147,6 +150,7 @@ optDict = {
|
||||||
"Miscellaneous": {
|
"Miscellaneous": {
|
||||||
"xmlFile": "string",
|
"xmlFile": "string",
|
||||||
"sessionFile": "string",
|
"sessionFile": "string",
|
||||||
|
"trafficFile": "string",
|
||||||
"flushSession": "boolean",
|
"flushSession": "boolean",
|
||||||
"forms": "boolean",
|
"forms": "boolean",
|
||||||
"eta": "boolean",
|
"eta": "boolean",
|
||||||
|
@ -155,6 +159,8 @@ optDict = {
|
||||||
"batch": "boolean",
|
"batch": "boolean",
|
||||||
"cleanup": "boolean",
|
"cleanup": "boolean",
|
||||||
"replicate": "boolean",
|
"replicate": "boolean",
|
||||||
|
"checkPayload": "boolean",
|
||||||
|
"beep": "boolean",
|
||||||
"verbose": "integer"
|
"verbose": "integer"
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -99,12 +99,12 @@ def __setRequestParams():
|
||||||
# Perform checks on User-Agent header value
|
# Perform checks on User-Agent header value
|
||||||
if conf.httpHeaders:
|
if conf.httpHeaders:
|
||||||
for httpHeader, headerValue in conf.httpHeaders:
|
for httpHeader, headerValue in conf.httpHeaders:
|
||||||
if httpHeader == "User-Agent":
|
if httpHeader == PLACE.UA:
|
||||||
# No need for url encoding/decoding the user agent
|
# No need for url encoding/decoding the user agent
|
||||||
conf.parameters[PLACE.UA] = headerValue
|
conf.parameters[PLACE.UA] = headerValue
|
||||||
|
|
||||||
condition = not conf.testParameter
|
condition = not conf.testParameter
|
||||||
condition |= "User-Agent" in conf.testParameter
|
condition |= PLACE.UA in conf.testParameter
|
||||||
condition |= "user-agent" in conf.testParameter
|
condition |= "user-agent" in conf.testParameter
|
||||||
condition |= "useragent" in conf.testParameter
|
condition |= "useragent" in conf.testParameter
|
||||||
condition |= "ua" in conf.testParameter
|
condition |= "ua" in conf.testParameter
|
||||||
|
|
|
@ -457,8 +457,8 @@ def cmdLineParser():
|
||||||
"on a session file")
|
"on a session file")
|
||||||
|
|
||||||
miscellaneous.add_option("-t", dest="trafficFile",
|
miscellaneous.add_option("-t", dest="trafficFile",
|
||||||
help="Save all HTTP traffic data "
|
help="Log all HTTP traffic into a "
|
||||||
"into a textual file")
|
"textual file")
|
||||||
|
|
||||||
miscellaneous.add_option("--flush-session", dest="flushSession",
|
miscellaneous.add_option("--flush-session", dest="flushSession",
|
||||||
action="store_true", default=False,
|
action="store_true", default=False,
|
||||||
|
|
|
@ -21,6 +21,7 @@ from lib.core.common import posixToNtSlashes
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.enums import PLACE
|
||||||
from lib.parse.headers import headersParser
|
from lib.parse.headers import headersParser
|
||||||
from lib.parse.html import htmlParser
|
from lib.parse.html import htmlParser
|
||||||
|
|
||||||
|
@ -33,9 +34,9 @@ def forgeHeaders(cookie, ua):
|
||||||
headers = {}
|
headers = {}
|
||||||
|
|
||||||
for header, value in conf.httpHeaders:
|
for header, value in conf.httpHeaders:
|
||||||
if cookie and header == "Cookie":
|
if cookie and header == PLACE.COOKIE:
|
||||||
headers[header] = cookie
|
headers[header] = cookie
|
||||||
elif ua and header == "User-Agent":
|
elif ua and header == PLACE.UA:
|
||||||
headers[header] = ua
|
headers[header] = ua
|
||||||
else:
|
else:
|
||||||
headers[header] = value
|
headers[header] = value
|
||||||
|
|
|
@ -166,7 +166,7 @@ class Connect:
|
||||||
|
|
||||||
requestHeaders += "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])
|
requestHeaders += "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])
|
||||||
|
|
||||||
if not req.has_header("Cookie") and cookieStr:
|
if not req.has_header(PLACE.COOKIE) and cookieStr:
|
||||||
requestHeaders += "\n%s" % cookieStr[:-2]
|
requestHeaders += "\n%s" % cookieStr[:-2]
|
||||||
|
|
||||||
if not req.has_header("Connection"):
|
if not req.has_header("Connection"):
|
||||||
|
@ -283,7 +283,7 @@ class Connect:
|
||||||
page = sanitizeAsciiString(page)
|
page = sanitizeAsciiString(page)
|
||||||
parseResponse(page, responseHeaders)
|
parseResponse(page, responseHeaders)
|
||||||
|
|
||||||
responseMsg += "[#%d] (%s - %d):\n" % (requestID, status, code)
|
responseMsg += "[#%d] (%d %s):\n" % (requestID, code, status)
|
||||||
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, value) for (key, value) in responseHeaders.items()])
|
logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, value) for (key, value) in responseHeaders.items()])
|
||||||
|
|
||||||
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page))
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page))
|
||||||
|
@ -370,7 +370,7 @@ class Connect:
|
||||||
if not auxHeaders:
|
if not auxHeaders:
|
||||||
auxHeaders = {}
|
auxHeaders = {}
|
||||||
|
|
||||||
auxHeaders["Range"] = "bytes=-1"
|
auxHeaders[NULLCONNECTION.RANGE] = "bytes=-1"
|
||||||
|
|
||||||
_, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404)
|
_, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404)
|
||||||
|
|
||||||
|
|
|
@ -499,6 +499,9 @@ xmlFile =
|
||||||
# Save and resume all data retrieved on a session file.
|
# Save and resume all data retrieved on a session file.
|
||||||
sessionFile =
|
sessionFile =
|
||||||
|
|
||||||
|
# Log all HTTP traffic into a textual file.
|
||||||
|
trafficFile =
|
||||||
|
|
||||||
# Flush session file for current target.
|
# Flush session file for current target.
|
||||||
# Valid: True or False
|
# Valid: True or False
|
||||||
flushSession = False
|
flushSession = False
|
||||||
|
@ -533,6 +536,12 @@ cleanup = False
|
||||||
# Valid: True or False
|
# Valid: True or False
|
||||||
replicate = False
|
replicate = False
|
||||||
|
|
||||||
|
# IDS detection testing of injection payload.
|
||||||
|
checkPayload = False
|
||||||
|
|
||||||
|
# Alert with audio beep when sql injection found.
|
||||||
|
beep = False
|
||||||
|
|
||||||
# Verbosity level.
|
# Verbosity level.
|
||||||
# Valid: integer between 0 and 6
|
# Valid: integer between 0 and 6
|
||||||
# 0: Show only critical messages
|
# 0: Show only critical messages
|
||||||
|
|
Loading…
Reference in New Issue
Block a user