From 74477732370e604d30a562d70fa54301cff5c4e4 Mon Sep 17 00:00:00 2001 From: stamparm Date: Wed, 20 Mar 2013 11:10:24 +0100 Subject: [PATCH] Update for consistency (all other enums are using _ in between words) --- lib/controller/checks.py | 8 ++--- lib/core/common.py | 10 +++--- lib/core/enums.py | 2 +- lib/core/option.py | 36 +++++++++---------- lib/core/target.py | 8 ++--- lib/request/basic.py | 16 ++++----- lib/request/connect.py | 64 +++++++++++++++++----------------- lib/request/redirecthandler.py | 10 +++--- waf/airlock.py | 4 +-- waf/barracuda.py | 4 +-- waf/bigip.py | 6 ++-- waf/binarysec.py | 4 +-- waf/ciscoacexml.py | 4 +-- waf/cloudflare.py | 6 ++-- waf/denyall.py | 4 +-- waf/fortiweb.py | 4 +-- waf/hyperguard.py | 4 +-- waf/incapsula.py | 4 +-- waf/jiasule.py | 4 +-- waf/modsecurity.py | 4 +-- waf/netcontinuum.py | 4 +-- waf/netscaler.py | 6 ++-- waf/profense.py | 6 ++-- waf/secureiis.py | 4 +-- waf/teros.py | 4 +-- waf/trafficshield.py | 6 ++-- waf/uspses.py | 4 +-- waf/webknight.py | 4 +-- 28 files changed, 122 insertions(+), 122 deletions(-) diff --git a/lib/controller/checks.py b/lib/controller/checks.py index 15e3dc200..ae7de1840 100644 --- a/lib/controller/checks.py +++ b/lib/controller/checks.py @@ -48,7 +48,7 @@ from lib.core.decorators import cachedmethod from lib.core.dicts import FROM_DUMMY_TABLE from lib.core.enums import DBMS from lib.core.enums import HEURISTIC_TEST -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.enums import HTTPMETHOD from lib.core.enums import NULLCONNECTION from lib.core.enums import PAYLOAD @@ -1118,15 +1118,15 @@ def checkNullConnection(): try: page, headers, _ = Request.getPage(method=HTTPMETHOD.HEAD) - if not page and HTTPHEADER.CONTENT_LENGTH in (headers or {}): + if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}): kb.nullConnection = NULLCONNECTION.HEAD infoMsg = "NULL connection is supported with HEAD header" logger.info(infoMsg) else: - page, headers, _ = Request.getPage(auxHeaders={HTTPHEADER.RANGE: "bytes=-1"}) + page, headers, _ = Request.getPage(auxHeaders={HTTP_HEADER.RANGE: "bytes=-1"}) - if page and len(page) == 1 and HTTPHEADER.CONTENT_RANGE in (headers or {}): + if page and len(page) == 1 and HTTP_HEADER.CONTENT_RANGE in (headers or {}): kb.nullConnection = NULLCONNECTION.RANGE infoMsg = "NULL connection is supported with GET header " diff --git a/lib/core/common.py b/lib/core/common.py index 66a3d9b69..0d3f0004f 100644 --- a/lib/core/common.py +++ b/lib/core/common.py @@ -64,7 +64,7 @@ from lib.core.enums import CHARSET_TYPE from lib.core.enums import DBMS from lib.core.enums import EXPECTED from lib.core.enums import HEURISTIC_TEST -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.enums import HTTPMETHOD from lib.core.enums import OS from lib.core.enums import PLACE @@ -1188,14 +1188,14 @@ def parseTargetUrl(): if not conf.referer and intersect(REFERER_ALIASES, conf.testParameter, True): debugMsg = "setting the HTTP Referer header to the target url" logger.debug(debugMsg) - conf.httpHeaders = filter(lambda (key, value): key != HTTPHEADER.REFERER, conf.httpHeaders) - conf.httpHeaders.append((HTTPHEADER.REFERER, conf.url)) + conf.httpHeaders = filter(lambda (key, value): key != HTTP_HEADER.REFERER, conf.httpHeaders) + conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.url)) if not conf.host and intersect(HOST_ALIASES, conf.testParameter, True): debugMsg = "setting the HTTP Host header to the target url" logger.debug(debugMsg) - conf.httpHeaders = filter(lambda (key, value): key != HTTPHEADER.HOST, conf.httpHeaders) - conf.httpHeaders.append((HTTPHEADER.HOST, getHostHeader(conf.url))) + conf.httpHeaders = filter(lambda (key, value): key != HTTP_HEADER.HOST, conf.httpHeaders) + conf.httpHeaders.append((HTTP_HEADER.HOST, getHostHeader(conf.url))) if conf.url != originalUrl: kb.originalUrls[conf.url] = originalUrl diff --git a/lib/core/enums.py b/lib/core/enums.py index 686297c7c..7efb9ef6f 100644 --- a/lib/core/enums.py +++ b/lib/core/enums.py @@ -133,7 +133,7 @@ class DUMP_FORMAT: HTML = "HTML" SQLITE = "SQLITE" -class HTTPHEADER: +class HTTP_HEADER: ACCEPT = "Accept" ACCEPT_CHARSET = "Accept-Charset" ACCEPT_ENCODING = "Accept-Encoding" diff --git a/lib/core/option.py b/lib/core/option.py index 242eab549..f503ede1a 100644 --- a/lib/core/option.py +++ b/lib/core/option.py @@ -67,7 +67,7 @@ from lib.core.enums import ADJUST_TIME_DELAY from lib.core.enums import AUTH_TYPE from lib.core.enums import CUSTOM_LOGGING from lib.core.enums import DUMP_FORMAT -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.enums import HTTPMETHOD from lib.core.enums import MOBILES from lib.core.enums import PAYLOAD @@ -292,9 +292,9 @@ def _feedTargetsDict(reqFile, addedTargetUrls): key, value = line.split(": ", 1) # Cookie and Host headers - if key.upper() == HTTPHEADER.COOKIE.upper(): + if key.upper() == HTTP_HEADER.COOKIE.upper(): cookie = value - elif key.upper() == HTTPHEADER.HOST.upper(): + elif key.upper() == HTTP_HEADER.HOST.upper(): if '://' in value: scheme, value = value.split('://')[:2] splitValue = value.split(":") @@ -306,11 +306,11 @@ def _feedTargetsDict(reqFile, addedTargetUrls): # Avoid to add a static content length header to # conf.httpHeaders and consider the following lines as # POSTed data - if key.upper() == HTTPHEADER.CONTENT_LENGTH.upper(): + if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper(): params = True # Avoid proxy and connection type related headers - elif key not in (HTTPHEADER.PROXY_CONNECTION, HTTPHEADER.CONNECTION): + elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION): conf.httpHeaders.append((getUnicode(key), getUnicode(value))) if CUSTOM_INJECTION_MARK_CHAR in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""): @@ -1190,16 +1190,16 @@ def _setHTTPExtraHeaders(): raise SqlmapSyntaxException(errMsg) elif not conf.httpHeaders or len(conf.httpHeaders) == 1: - conf.httpHeaders.append((HTTPHEADER.ACCEPT_LANGUAGE, "en-us,en;q=0.5")) + conf.httpHeaders.append((HTTP_HEADER.ACCEPT_LANGUAGE, "en-us,en;q=0.5")) if not conf.charset: - conf.httpHeaders.append((HTTPHEADER.ACCEPT_CHARSET, "ISO-8859-15,utf-8;q=0.7,*;q=0.7")) + conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "ISO-8859-15,utf-8;q=0.7,*;q=0.7")) else: - conf.httpHeaders.append((HTTPHEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset)) + conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset)) # Invalidating any caching mechanism in between # Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html - conf.httpHeaders.append((HTTPHEADER.CACHE_CONTROL, "no-cache,no-store")) - conf.httpHeaders.append((HTTPHEADER.PRAGMA, "no-cache")) + conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache,no-store")) + conf.httpHeaders.append((HTTP_HEADER.PRAGMA, "no-cache")) def _defaultHTTPUserAgent(): """ @@ -1243,24 +1243,24 @@ def _setHTTPUserAgent(): except: item = MOBILES.IPHONE - conf.httpHeaders.append((HTTPHEADER.USER_AGENT, item[1])) + conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, item[1])) elif conf.agent: debugMsg = "setting the HTTP User-Agent header" logger.debug(debugMsg) - conf.httpHeaders.append((HTTPHEADER.USER_AGENT, conf.agent)) + conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, conf.agent)) elif not conf.randomAgent: _ = True for header, _ in conf.httpHeaders: - if header == HTTPHEADER.USER_AGENT: + if header == HTTP_HEADER.USER_AGENT: _ = False break if _: - conf.httpHeaders.append((HTTPHEADER.USER_AGENT, _defaultHTTPUserAgent())) + conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, _defaultHTTPUserAgent())) else: if not kb.userAgents: @@ -1275,7 +1275,7 @@ def _setHTTPUserAgent(): warnMsg += "file '%s'" % paths.USER_AGENTS logger.warn(warnMsg) - conf.httpHeaders.append((HTTPHEADER.USER_AGENT, _defaultHTTPUserAgent())) + conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, _defaultHTTPUserAgent())) return count = len(kb.userAgents) @@ -1286,7 +1286,7 @@ def _setHTTPUserAgent(): userAgent = kb.userAgents[randomRange(stop=count - 1)] userAgent = sanitizeStr(userAgent) - conf.httpHeaders.append((HTTPHEADER.USER_AGENT, userAgent)) + conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, userAgent)) infoMsg = "fetched random HTTP User-Agent header from " infoMsg += "file '%s': %s" % (paths.USER_AGENTS, userAgent) @@ -1301,7 +1301,7 @@ def _setHTTPReferer(): debugMsg = "setting the HTTP Referer header" logger.debug(debugMsg) - conf.httpHeaders.append((HTTPHEADER.REFERER, conf.referer)) + conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.referer)) def _setHTTPCookies(): """ @@ -1312,7 +1312,7 @@ def _setHTTPCookies(): debugMsg = "setting the HTTP Cookie header" logger.debug(debugMsg) - conf.httpHeaders.append((HTTPHEADER.COOKIE, conf.cookie)) + conf.httpHeaders.append((HTTP_HEADER.COOKIE, conf.cookie)) def _setHTTPTimeout(): """ diff --git a/lib/core/target.py b/lib/core/target.py index 8b5c029f4..abb7c3d08 100644 --- a/lib/core/target.py +++ b/lib/core/target.py @@ -28,7 +28,7 @@ from lib.core.data import paths from lib.core.dicts import DBMS_DICT from lib.core.dump import dumper from lib.core.enums import HASHDB_KEYS -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.enums import HTTPMETHOD from lib.core.enums import PLACE from lib.core.enums import POST_HINT @@ -247,7 +247,7 @@ def _setRequestParams(): httpHeader = httpHeader.title() - if httpHeader == HTTPHEADER.USER_AGENT: + if httpHeader == HTTP_HEADER.USER_AGENT: conf.parameters[PLACE.USER_AGENT] = urldecode(headerValue) condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES))) @@ -256,7 +256,7 @@ def _setRequestParams(): conf.paramDict[PLACE.USER_AGENT] = {PLACE.USER_AGENT: headerValue} testableParameters = True - elif httpHeader == HTTPHEADER.REFERER: + elif httpHeader == HTTP_HEADER.REFERER: conf.parameters[PLACE.REFERER] = urldecode(headerValue) condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES))) @@ -265,7 +265,7 @@ def _setRequestParams(): conf.paramDict[PLACE.REFERER] = {PLACE.REFERER: headerValue} testableParameters = True - elif httpHeader == HTTPHEADER.HOST: + elif httpHeader == HTTP_HEADER.HOST: conf.parameters[PLACE.HOST] = urldecode(headerValue) condition = any((not conf.testParameter, intersect(conf.testParameter, HOST_ALIASES))) diff --git a/lib/request/basic.py b/lib/request/basic.py index 692b6ba07..0fbf1418d 100644 --- a/lib/request/basic.py +++ b/lib/request/basic.py @@ -23,7 +23,7 @@ from lib.core.common import singleTimeWarnMessage from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.enums import PLACE from lib.core.exception import SqlmapCompressionException from lib.core.htmlentities import htmlEntities @@ -56,28 +56,28 @@ def forgeHeaders(items=None): headers = dict(("-".join(_.capitalize() for _ in key.split('-')), value) for (key, value) in headers.items()) if conf.cj: - if HTTPHEADER.COOKIE in headers: + if HTTP_HEADER.COOKIE in headers: for cookie in conf.cj: - if ("%s=" % cookie.name) in headers[HTTPHEADER.COOKIE]: + if ("%s=" % cookie.name) in headers[HTTP_HEADER.COOKIE]: if kb.mergeCookies is None: - message = "you provided a HTTP %s header value. " % HTTPHEADER.COOKIE + message = "you provided a HTTP %s header value. " % HTTP_HEADER.COOKIE message += "The target url provided its own cookies within " - message += "the HTTP %s header which intersect with yours. " % HTTPHEADER.SET_COOKIE + message += "the HTTP %s header which intersect with yours. " % HTTP_HEADER.SET_COOKIE message += "Do you want to merge them in futher requests? [Y/n] " _ = readInput(message, default="Y") kb.mergeCookies = not _ or _[0] in ("y", "Y") if kb.mergeCookies: _ = lambda x: re.sub("(?i)%s=[^%s]+" % (cookie.name, DEFAULT_COOKIE_DELIMITER), "%s=%s" % (cookie.name, cookie.value), x) - headers[HTTPHEADER.COOKIE] = _(headers[HTTPHEADER.COOKIE]) + headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE]) if PLACE.COOKIE in conf.parameters: conf.parameters[PLACE.COOKIE] = _(conf.parameters[PLACE.COOKIE]) - conf.httpHeaders = [(item[0], item[1] if item[0] != HTTPHEADER.COOKIE else _(item[1])) for item in conf.httpHeaders] + conf.httpHeaders = [(item[0], item[1] if item[0] != HTTP_HEADER.COOKIE else _(item[1])) for item in conf.httpHeaders] elif not kb.testMode: - headers[HTTPHEADER.COOKIE] += "%s %s=%s" % (DEFAULT_COOKIE_DELIMITER, cookie.name, cookie.value) + headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (DEFAULT_COOKIE_DELIMITER, cookie.name, cookie.value) if kb.testMode: resetCookieJar(conf.cj) diff --git a/lib/request/connect.py b/lib/request/connect.py index 5ab266838..44d3b2422 100644 --- a/lib/request/connect.py +++ b/lib/request/connect.py @@ -49,7 +49,7 @@ from lib.core.dicts import POST_HINT_CONTENT_TYPES from lib.core.enums import ADJUST_TIME_DELAY from lib.core.enums import AUTH_TYPE from lib.core.enums import CUSTOM_LOGGING -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.enums import HTTPMETHOD from lib.core.enums import NULLCONNECTION from lib.core.enums import PAYLOAD @@ -145,8 +145,8 @@ class Connect(object): if not kb.dnsMode and conn: headers = conn.info() - if headers and (headers.getheader(HTTPHEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\ - or "text" not in headers.getheader(HTTPHEADER.CONTENT_TYPE, "").lower()): + if headers and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\ + or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()): retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE) if len(retVal) == MAX_CONNECTION_TOTAL_SIZE: warnMsg = "large compressed response detected. Disabling compression" @@ -267,7 +267,7 @@ class Connect(object): page = Connect._connReadProxy(conn) responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() - page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) + page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) return page @@ -295,29 +295,29 @@ class Connect(object): requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers - headers = forgeHeaders({HTTPHEADER.COOKIE: cookie, HTTPHEADER.USER_AGENT: ua, HTTPHEADER.REFERER: referer}) + headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer}) if kb.authHeader: - headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader + headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: - headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader + headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader - headers[HTTPHEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE - headers[HTTPHEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if method != HTTPMETHOD.HEAD and kb.pageCompress else "identity" - headers[HTTPHEADER.HOST] = host or getHostHeader(url) + headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE + headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if method != HTTPMETHOD.HEAD and kb.pageCompress else "identity" + headers[HTTP_HEADER.HOST] = host or getHostHeader(url) - if post is not None and HTTPHEADER.CONTENT_TYPE not in headers: - headers[HTTPHEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE) + if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers: + headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE) - if headers.get(HTTPHEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]: - warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTPHEADER.CONTENT_TYPE + if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]: + warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE warnMsg += "Will try to reconstruct" singleTimeWarnMessage(warnMsg) boundary = findMultipartPostBoundary(conf.data) if boundary: - headers[HTTPHEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTPHEADER.CONTENT_TYPE], boundary) + headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary) if auxHeaders: for key, item in auxHeaders.items(): @@ -337,17 +337,17 @@ class Connect(object): requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()) - if not getRequestHeader(req, HTTPHEADER.COOKIE) and conf.cj: + if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post is not None: - if not getRequestHeader(req, HTTPHEADER.CONTENT_LENGTH): - requestHeaders += "\n%s: %d" % (string.capwords(HTTPHEADER.CONTENT_LENGTH), len(post)) + if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH): + requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post)) - if not getRequestHeader(req, HTTPHEADER.CONNECTION): - requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION + if not getRequestHeader(req, HTTP_HEADER.CONNECTION): + requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION requestMsg += "\n%s" % requestHeaders @@ -362,11 +362,11 @@ class Connect(object): conn = urllib2.urlopen(req) - if not kb.authHeader and getRequestHeader(req, HTTPHEADER.AUTHORIZATION) and conf.aType == AUTH_TYPE.BASIC: - kb.authHeader = getRequestHeader(req, HTTPHEADER.AUTHORIZATION) + if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and conf.aType == AUTH_TYPE.BASIC: + kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) - if not kb.proxyAuthHeader and getRequestHeader(req, HTTPHEADER.PROXY_AUTHORIZATION): - kb.proxyAuthHeader = getRequestHeader(req, HTTPHEADER.PROXY_AUTHORIZATION) + if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION): + kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION) # Return response object if response: @@ -384,7 +384,7 @@ class Connect(object): code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() - page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) + page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing: @@ -436,7 +436,7 @@ class Connect(object): page = e.read() responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() - page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) + page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code @@ -599,7 +599,7 @@ class Connect(object): if skipUrlEncode is None and conf.httpHeaders: headers = dict(conf.httpHeaders) - _ = max(headers[_] if _.upper() == HTTPHEADER.CONTENT_TYPE.upper() else None for _ in headers.keys()) + _ = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys()) if _ and "urlencoded" not in _: skipUrlEncode = True @@ -802,15 +802,15 @@ class Connect(object): if not auxHeaders: auxHeaders = {} - auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" + auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if headers: - if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: - pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) - elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: - pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) + if kb.nullConnection == NULLCONNECTION.HEAD and HTTP_HEADER.CONTENT_LENGTH in headers: + pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH]) + elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers: + pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: try: diff --git a/lib/request/redirecthandler.py b/lib/request/redirecthandler.py index c6a382138..b4c08f8cf 100644 --- a/lib/request/redirecthandler.py +++ b/lib/request/redirecthandler.py @@ -15,7 +15,7 @@ from lib.core.common import getUnicode from lib.core.common import logHTTPTraffic from lib.core.common import readInput from lib.core.enums import CUSTOM_LOGGING -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.enums import HTTPMETHOD from lib.core.enums import REDIRECTION from lib.core.exception import SqlmapConnectionException @@ -82,7 +82,7 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler): except: pass - content = decodePage(content, headers.get(HTTPHEADER.CONTENT_ENCODING), headers.get(HTTPHEADER.CONTENT_TYPE)) + content = decodePage(content, headers.get(HTTP_HEADER.CONTENT_ENCODING), headers.get(HTTP_HEADER.CONTENT_TYPE)) threadData = getCurrentThreadData() threadData.lastRedirectMsg = (threadData.lastRequestUID, content) @@ -110,9 +110,9 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler): self._ask_redirect_choice(code, redurl, req.get_method()) if redurl and kb.redirectChoice == REDIRECTION.YES: - req.headers[HTTPHEADER.HOST] = getHostHeader(redurl) - if headers and HTTPHEADER.SET_COOKIE in headers: - req.headers[HTTPHEADER.COOKIE] = headers[HTTPHEADER.SET_COOKIE].split(DEFAULT_COOKIE_DELIMITER)[0] + req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl) + if headers and HTTP_HEADER.SET_COOKIE in headers: + req.headers[HTTP_HEADER.COOKIE] = headers[HTTP_HEADER.SET_COOKIE].split(DEFAULT_COOKIE_DELIMITER)[0] result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers) else: result = fp diff --git a/waf/airlock.py b/waf/airlock.py index 5dc17c537..4038adbb1 100644 --- a/waf/airlock.py +++ b/waf/airlock.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "Airlock (Phion/Ergon)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"\AAL[_-]?(SESS|LB)=", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None + retval = re.search(r"\AAL[_-]?(SESS|LB)=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None if retval: break diff --git a/waf/barracuda.py b/waf/barracuda.py index 9f40a5853..740178ff1 100644 --- a/waf/barracuda.py +++ b/waf/barracuda.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "Barracuda Web Application Firewall (Barracuda Networks)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"\Abarra_counter_session=", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None + retval = re.search(r"\Abarra_counter_session=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None if retval: break diff --git a/waf/bigip.py b/waf/bigip.py index e5d5d6e0d..75e8c1d01 100644 --- a/waf/bigip.py +++ b/waf/bigip.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "BIG-IP Application Security Manager (F5 Networks)" @@ -18,8 +18,8 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) retval = headers.get("X-Cnection", "").lower() == "close" - retval |= re.search(r"\ATS[a-zA-Z0-9]{3,6}=", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None - retval |= re.search(r"BigIP|BIGipServer", headers.get(HTTPHEADER.SERVER, ""), re.I) is not None + retval |= re.search(r"\ATS[a-zA-Z0-9]{3,6}=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None + retval |= re.search(r"BigIP|BIGipServer", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None if retval: break diff --git a/waf/binarysec.py b/waf/binarysec.py index bdf0bf9ca..1f7e7ec95 100644 --- a/waf/binarysec.py +++ b/waf/binarysec.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "BinarySEC Web Application Firewall (BinarySEC)" @@ -18,7 +18,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) retval = any(headers.get(_) for _ in ("x-binarysec-via", "x-binarysec-nocache")) - retval |= re.search(r"BinarySec", headers.get(HTTPHEADER.SERVER, ""), re.I) is not None + retval |= re.search(r"BinarySec", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None if retval: break diff --git a/waf/ciscoacexml.py b/waf/ciscoacexml.py index 114756cb4..a384ccec0 100644 --- a/waf/ciscoacexml.py +++ b/waf/ciscoacexml.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "Cisco ACE XML Gateway (Cisco Systems)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"ACE XML Gateway", headers.get(HTTPHEADER.SERVER, ""), re.I) is not None + retval = re.search(r"ACE XML Gateway", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None if retval: break diff --git a/waf/cloudflare.py b/waf/cloudflare.py index 500dc858d..be293b7c6 100644 --- a/waf/cloudflare.py +++ b/waf/cloudflare.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "CloudFlare Web Application Firewall (CloudFlare)" @@ -17,8 +17,8 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"cloudflare-nginx", headers.get(HTTPHEADER.SERVER, ""), re.I) is not None - retval |= re.search(r"\A__cfduid=", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None + retval = re.search(r"cloudflare-nginx", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None + retval |= re.search(r"\A__cfduid=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None if retval: break diff --git a/waf/denyall.py b/waf/denyall.py index 17a6ff4e9..8819206b5 100644 --- a/waf/denyall.py +++ b/waf/denyall.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "Deny All Web Application Firewall (DenyAll)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"\Asessioncookie=", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None + retval = re.search(r"\Asessioncookie=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None retval |= code == 200 and re.search(r"\ACondition Intercepted", page, re.I) is not None if retval: break diff --git a/waf/fortiweb.py b/waf/fortiweb.py index 360b2486f..2ad3d26fc 100644 --- a/waf/fortiweb.py +++ b/waf/fortiweb.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "FortiWeb Web Application Firewall (Fortinet Inc.)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"\AFORTIWAFSID=", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None + retval = re.search(r"\AFORTIWAFSID=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None if retval: break diff --git a/waf/hyperguard.py b/waf/hyperguard.py index e260c2df6..8061c2ef4 100644 --- a/waf/hyperguard.py +++ b/waf/hyperguard.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "Hyperguard Web Application Firewall (art of defence Inc.)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"\AODSESSION=", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None + retval = re.search(r"\AODSESSION=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None if retval: break diff --git a/waf/incapsula.py b/waf/incapsula.py index 2a42d6acd..0ba8138a4 100644 --- a/waf/incapsula.py +++ b/waf/incapsula.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "Incapsula Web Application Firewall (Incapsula/Imperva)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"incap_ses|visid_incap", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None + retval = re.search(r"incap_ses|visid_incap", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None retval |= re.search(r"Incapsula", headers.get("X-CDN", ""), re.I) is not None if retval: break diff --git a/waf/jiasule.py b/waf/jiasule.py index c8976db26..fa180e32d 100644 --- a/waf/jiasule.py +++ b/waf/jiasule.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "Jiasule Web Application Firewall (Jiasule)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"jiasule-WAF", headers.get(HTTPHEADER.SERVER, ""), re.I) is not None + retval = re.search(r"jiasule-WAF", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None retval |= re.search(r"static\.jiasule\.com/static/js/http_error\.js", page, re.I) is not None if retval: break diff --git a/waf/modsecurity.py b/waf/modsecurity.py index dfecb1ff0..be056cc45 100644 --- a/waf/modsecurity.py +++ b/waf/modsecurity.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "ModSecurity: Open Source Web Application Firewall (Trustwave)" @@ -18,7 +18,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) retval = code == 501 and re.search(r"Reference #[0-9A-Fa-f.]+", page, re.I) is None - retval |= re.search(r"Mod_Security|NOYB", headers.get(HTTPHEADER.SERVER, ""), re.I) is not None + retval |= re.search(r"Mod_Security|NOYB", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None if retval: break diff --git a/waf/netcontinuum.py b/waf/netcontinuum.py index 7e83e2b6b..8ee0fc8c6 100644 --- a/waf/netcontinuum.py +++ b/waf/netcontinuum.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "NetContinuum Web Application Firewall (NetContinuum/Barracuda Networks)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"\ANCI__SessionId=", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None + retval = re.search(r"\ANCI__SessionId=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None if retval: break diff --git a/waf/netscaler.py b/waf/netscaler.py index 7301376b5..b62edfdc0 100644 --- a/waf/netscaler.py +++ b/waf/netscaler.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "NetScaler (Citrix Systems)" @@ -18,8 +18,8 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) retval = re.search(r"\Aclose", headers.get("Cneonction", "") or headers.get("nnCoection", ""), re.I) is not None - retval = re.search(r"\A(ns_af=|citrix_ns_id|NSC_)", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None - retval |= re.search(r"\ANS-CACHE", headers.get(HTTPHEADER.VIA, ""), re.I) is not None + retval = re.search(r"\A(ns_af=|citrix_ns_id|NSC_)", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None + retval |= re.search(r"\ANS-CACHE", headers.get(HTTP_HEADER.VIA, ""), re.I) is not None if retval: break diff --git a/waf/profense.py b/waf/profense.py index ff94fe787..d5346cac1 100644 --- a/waf/profense.py +++ b/waf/profense.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "Profense Web Application Firewall (Armorlogic)" @@ -17,8 +17,8 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"\APLBSID=", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None - retval |= re.search(r"Profense", headers.get(HTTPHEADER.SERVER, ""), re.I) is not None + retval = re.search(r"\APLBSID=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None + retval |= re.search(r"Profense", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None if retval: break diff --git a/waf/secureiis.py b/waf/secureiis.py index 541f95668..0082a8b62 100644 --- a/waf/secureiis.py +++ b/waf/secureiis.py @@ -5,13 +5,13 @@ Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER __product__ = "SecureIIS Web Server Security (BeyondTrust)" def detect(get_page): page, headers, code = get_page() retval = code != 404 - page, headers, code = get_page(auxHeaders={HTTPHEADER.TRANSFER_ENCODING: 'a' * 1025, HTTPHEADER.ACCEPT_ENCODING: "identity"}) + page, headers, code = get_page(auxHeaders={HTTP_HEADER.TRANSFER_ENCODING: 'a' * 1025, HTTP_HEADER.ACCEPT_ENCODING: "identity"}) retval = retval and code == 404 return retval diff --git a/waf/teros.py b/waf/teros.py index d2d571baa..c2e693d81 100644 --- a/waf/teros.py +++ b/waf/teros.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "Teros/Citrix Application Firewall Enterprise (Teros/Citrix Systems)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"\Ast8(id|_wat|_wlf)", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None + retval = re.search(r"\Ast8(id|_wat|_wlf)", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None if retval: break diff --git a/waf/trafficshield.py b/waf/trafficshield.py index 5909f709d..bcc5b59a5 100644 --- a/waf/trafficshield.py +++ b/waf/trafficshield.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "TrafficShield (F5 Networks)" @@ -17,8 +17,8 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"F5-TrafficShield", headers.get(HTTPHEADER.SERVER, ""), re.I) is not None - retval |= re.search(r"\AASINFO=", headers.get(HTTPHEADER.SET_COOKIE, ""), re.I) is not None + retval = re.search(r"F5-TrafficShield", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None + retval |= re.search(r"\AASINFO=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None if retval: break diff --git a/waf/uspses.py b/waf/uspses.py index f972a32b2..b6362242e 100644 --- a/waf/uspses.py +++ b/waf/uspses.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "USP Secure Entry Server (United Security Providers)" @@ -17,7 +17,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) - retval = re.search(r"Secure Entry Server", headers.get(HTTPHEADER.SERVER, ""), re.I) is not None + retval = re.search(r"Secure Entry Server", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None if retval: break diff --git a/waf/webknight.py b/waf/webknight.py index 051a64c6e..e17b18a6b 100644 --- a/waf/webknight.py +++ b/waf/webknight.py @@ -7,7 +7,7 @@ See the file 'doc/COPYING' for copying permission import re -from lib.core.enums import HTTPHEADER +from lib.core.enums import HTTP_HEADER from lib.core.settings import WAF_ATTACK_VECTORS __product__ = "WebKnight Application Firewall (AQTRONIX)" @@ -18,7 +18,7 @@ def detect(get_page): for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) retVal = code == 999 - retval |= re.search(r"WebKnight", headers.get(HTTPHEADER.SERVER, ""), re.I) is not None + retval |= re.search(r"WebKnight", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None if retVal: break