mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-05-21 13:26:07 +03:00
God help us all with this Python3 non-sense
This commit is contained in:
parent
2dbd0267a1
commit
2f53014685
|
@ -6,7 +6,6 @@ See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import httplib
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
|
@ -106,6 +105,7 @@ from lib.request.inject import checkBooleanExpression
|
||||||
from lib.request.templates import getPageTemplate
|
from lib.request.templates import getPageTemplate
|
||||||
from lib.techniques.union.test import unionTest
|
from lib.techniques.union.test import unionTest
|
||||||
from lib.techniques.union.use import configUnion
|
from lib.techniques.union.use import configUnion
|
||||||
|
from thirdparty.six.moves import http_client as _http_client
|
||||||
|
|
||||||
def checkSqlInjection(place, parameter, value):
|
def checkSqlInjection(place, parameter, value):
|
||||||
# Store here the details about boundaries and payload used to
|
# Store here the details about boundaries and payload used to
|
||||||
|
@ -1337,7 +1337,7 @@ def checkWaf():
|
||||||
if any((conf.string, conf.notString, conf.regexp, conf.dummy, conf.offline, conf.skipWaf)):
|
if any((conf.string, conf.notString, conf.regexp, conf.dummy, conf.offline, conf.skipWaf)):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if kb.originalCode == httplib.NOT_FOUND:
|
if kb.originalCode == _http_client.NOT_FOUND:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
_ = hashDBRetrieve(HASHDB_KEYS.CHECK_WAF_RESULT, True)
|
_ = hashDBRetrieve(HASHDB_KEYS.CHECK_WAF_RESULT, True)
|
||||||
|
@ -1623,7 +1623,7 @@ def checkConnection(suppressOutput=False):
|
||||||
warnMsg += "any addressing issues"
|
warnMsg += "any addressing issues"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
if any(code in kb.httpErrorCodes for code in (httplib.NOT_FOUND, )):
|
if any(code in kb.httpErrorCodes for code in (_http_client.NOT_FOUND, )):
|
||||||
errMsg = getSafeExString(ex)
|
errMsg = getSafeExString(ex)
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
|
|
||||||
|
|
|
@ -61,7 +61,6 @@ from lib.core.defaults import defaults
|
||||||
from lib.core.dicts import DBMS_DICT
|
from lib.core.dicts import DBMS_DICT
|
||||||
from lib.core.dicts import DEFAULT_DOC_ROOTS
|
from lib.core.dicts import DEFAULT_DOC_ROOTS
|
||||||
from lib.core.dicts import DEPRECATED_OPTIONS
|
from lib.core.dicts import DEPRECATED_OPTIONS
|
||||||
from lib.core.dicts import HTTP_RESPONSES
|
|
||||||
from lib.core.dicts import SQL_STATEMENTS
|
from lib.core.dicts import SQL_STATEMENTS
|
||||||
from lib.core.enums import ADJUST_TIME_DELAY
|
from lib.core.enums import ADJUST_TIME_DELAY
|
||||||
from lib.core.enums import CONTENT_STATUS
|
from lib.core.enums import CONTENT_STATUS
|
||||||
|
@ -174,6 +173,7 @@ from thirdparty.colorama.initialise import init as coloramainit
|
||||||
from thirdparty.magic import magic
|
from thirdparty.magic import magic
|
||||||
from thirdparty.odict import OrderedDict
|
from thirdparty.odict import OrderedDict
|
||||||
from thirdparty.six.moves import configparser as _configparser
|
from thirdparty.six.moves import configparser as _configparser
|
||||||
|
from thirdparty.six.moves import http_client as _http_client
|
||||||
from thirdparty.six.moves import urllib as _urllib
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
from thirdparty.termcolor.termcolor import colored
|
from thirdparty.termcolor.termcolor import colored
|
||||||
|
|
||||||
|
@ -3301,9 +3301,9 @@ def showHttpErrorCodes():
|
||||||
|
|
||||||
if kb.httpErrorCodes:
|
if kb.httpErrorCodes:
|
||||||
warnMsg = "HTTP error codes detected during run:\n"
|
warnMsg = "HTTP error codes detected during run:\n"
|
||||||
warnMsg += ", ".join("%d (%s) - %d times" % (code, HTTP_RESPONSES[code] if code in HTTP_RESPONSES else '?', count) for code, count in kb.httpErrorCodes.items())
|
warnMsg += ", ".join("%d (%s) - %d times" % (code, _http_client.responses[code] if code in _http_client.responses else '?', count) for code, count in kb.httpErrorCodes.items())
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
if any((str(_).startswith('4') or str(_).startswith('5')) and _ != 500 and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()):
|
if any((str(_).startswith('4') or str(_).startswith('5')) and _ != _http_client.INTERNAL_SERVER_ERROR and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()):
|
||||||
msg = "too many 4xx and/or 5xx HTTP error codes "
|
msg = "too many 4xx and/or 5xx HTTP error codes "
|
||||||
msg += "could mean that some kind of protection is involved (e.g. WAF)"
|
msg += "could mean that some kind of protection is involved (e.g. WAF)"
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
|
|
|
@ -330,47 +330,3 @@ PART_RUN_CONTENT_TYPES = {
|
||||||
"osCmd": CONTENT_TYPE.OS_CMD,
|
"osCmd": CONTENT_TYPE.OS_CMD,
|
||||||
"regRead": CONTENT_TYPE.REG_READ
|
"regRead": CONTENT_TYPE.REG_READ
|
||||||
}
|
}
|
||||||
|
|
||||||
HTTP_RESPONSES = {
|
|
||||||
200: "OK",
|
|
||||||
201: "Created",
|
|
||||||
202: "Accepted",
|
|
||||||
203: "Non-Authoritative Information",
|
|
||||||
204: "No Content",
|
|
||||||
205: "Reset Content",
|
|
||||||
206: "Partial Content",
|
|
||||||
100: "Continue",
|
|
||||||
101: "Switching Protocols",
|
|
||||||
300: "Multiple Choices",
|
|
||||||
301: "Moved Permanently",
|
|
||||||
302: "Found",
|
|
||||||
303: "See Other",
|
|
||||||
304: "Not Modified",
|
|
||||||
305: "Use Proxy",
|
|
||||||
306: "(Unused)",
|
|
||||||
307: "Temporary Redirect",
|
|
||||||
400: "Bad Request",
|
|
||||||
401: "Unauthorized",
|
|
||||||
402: "Payment Required",
|
|
||||||
403: "Forbidden",
|
|
||||||
404: "Not Found",
|
|
||||||
405: "Method Not Allowed",
|
|
||||||
406: "Not Acceptable",
|
|
||||||
407: "Proxy Authentication Required",
|
|
||||||
408: "Request Timeout",
|
|
||||||
409: "Conflict",
|
|
||||||
410: "Gone",
|
|
||||||
411: "Length Required",
|
|
||||||
412: "Precondition Failed",
|
|
||||||
413: "Request Entity Too Large",
|
|
||||||
414: "Request-URI Too Long",
|
|
||||||
415: "Unsupported Media Type",
|
|
||||||
416: "Requested Range Not Satisfiable",
|
|
||||||
417: "Expectation Failed",
|
|
||||||
500: "Internal Server Error",
|
|
||||||
501: "Not Implemented",
|
|
||||||
502: "Bad Gateway",
|
|
||||||
503: "Service Unavailable",
|
|
||||||
504: "Gateway Timeout",
|
|
||||||
505: "HTTP Version Not Supported"
|
|
||||||
}
|
|
||||||
|
|
|
@ -5,9 +5,7 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import cookielib
|
|
||||||
import glob
|
import glob
|
||||||
import httplib
|
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
@ -19,7 +17,6 @@ import tempfile
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import urllib2
|
import urllib2
|
||||||
import urlparse
|
|
||||||
|
|
||||||
import lib.controller.checks
|
import lib.controller.checks
|
||||||
import lib.core.common
|
import lib.core.common
|
||||||
|
@ -153,14 +150,17 @@ from lib.utils.purge import purge
|
||||||
from thirdparty.keepalive import keepalive
|
from thirdparty.keepalive import keepalive
|
||||||
from thirdparty.multipart import multipartpost
|
from thirdparty.multipart import multipartpost
|
||||||
from thirdparty.oset.pyoset import oset
|
from thirdparty.oset.pyoset import oset
|
||||||
|
from thirdparty.six.moves import http_client as _http_client
|
||||||
|
from thirdparty.six.moves import http_cookiejar as _http_cookiejar
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
from thirdparty.socks import socks
|
from thirdparty.socks import socks
|
||||||
from xml.etree.ElementTree import ElementTree
|
from xml.etree.ElementTree import ElementTree
|
||||||
|
|
||||||
authHandler = urllib2.BaseHandler()
|
authHandler = _urllib.request.BaseHandler()
|
||||||
chunkedHandler = ChunkedHandler()
|
chunkedHandler = ChunkedHandler()
|
||||||
httpsHandler = HTTPSHandler()
|
httpsHandler = HTTPSHandler()
|
||||||
keepAliveHandler = keepalive.HTTPHandler()
|
keepAliveHandler = keepalive.HTTPHandler()
|
||||||
proxyHandler = urllib2.ProxyHandler()
|
proxyHandler = _urllib.request.ProxyHandler()
|
||||||
redirectHandler = SmartRedirectHandler()
|
redirectHandler = SmartRedirectHandler()
|
||||||
rangeHandler = HTTPRangeHandler()
|
rangeHandler = HTTPRangeHandler()
|
||||||
multipartPostHandler = multipartpost.MultipartPostHandler()
|
multipartPostHandler = multipartpost.MultipartPostHandler()
|
||||||
|
@ -1053,7 +1053,7 @@ def _setHTTPHandlers():
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_ = urlparse.urlsplit(conf.proxy)
|
_ = _urllib.parse.urlsplit(conf.proxy)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, getSafeExString(ex))
|
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, getSafeExString(ex))
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
@ -1090,9 +1090,9 @@ def _setHTTPHandlers():
|
||||||
proxyHandler.proxies = {}
|
proxyHandler.proxies = {}
|
||||||
|
|
||||||
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password)
|
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password)
|
||||||
socks.wrapmodule(urllib2)
|
socks.wrapmodule(_http_client)
|
||||||
else:
|
else:
|
||||||
socks.unwrapmodule(urllib2)
|
socks.unwrapmodule(_http_client)
|
||||||
|
|
||||||
if conf.proxyCred:
|
if conf.proxyCred:
|
||||||
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
|
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
|
||||||
|
@ -1112,12 +1112,12 @@ def _setHTTPHandlers():
|
||||||
|
|
||||||
if not conf.dropSetCookie:
|
if not conf.dropSetCookie:
|
||||||
if not conf.loadCookies:
|
if not conf.loadCookies:
|
||||||
conf.cj = cookielib.CookieJar()
|
conf.cj = _http_cookiejar.CookieJar()
|
||||||
else:
|
else:
|
||||||
conf.cj = cookielib.MozillaCookieJar()
|
conf.cj = _http_cookiejar.MozillaCookieJar()
|
||||||
resetCookieJar(conf.cj)
|
resetCookieJar(conf.cj)
|
||||||
|
|
||||||
handlers.append(urllib2.HTTPCookieProcessor(conf.cj))
|
handlers.append(_urllib.request.HTTPCookieProcessor(conf.cj))
|
||||||
|
|
||||||
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
|
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
|
||||||
if conf.keepAlive:
|
if conf.keepAlive:
|
||||||
|
@ -1133,8 +1133,8 @@ def _setHTTPHandlers():
|
||||||
else:
|
else:
|
||||||
handlers.append(keepAliveHandler)
|
handlers.append(keepAliveHandler)
|
||||||
|
|
||||||
opener = urllib2.build_opener(*handlers)
|
opener = _urllib.request.build_opener(*handlers)
|
||||||
urllib2.install_opener(opener)
|
_urllib.request.install_opener(opener)
|
||||||
|
|
||||||
def _setSafeVisit():
|
def _setSafeVisit():
|
||||||
"""
|
"""
|
||||||
|
@ -1166,7 +1166,7 @@ def _setSafeVisit():
|
||||||
if value.endswith(":443"):
|
if value.endswith(":443"):
|
||||||
scheme = "https"
|
scheme = "https"
|
||||||
value = "%s://%s" % (scheme, value)
|
value = "%s://%s" % (scheme, value)
|
||||||
kb.safeReq.url = urlparse.urljoin(value, kb.safeReq.url)
|
kb.safeReq.url = _urllib.parse.urljoin(value, kb.safeReq.url)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -1289,7 +1289,7 @@ def _setHTTPAuthentication():
|
||||||
conf.authUsername = aCredRegExp.group(1)
|
conf.authUsername = aCredRegExp.group(1)
|
||||||
conf.authPassword = aCredRegExp.group(2)
|
conf.authPassword = aCredRegExp.group(2)
|
||||||
|
|
||||||
kb.passwordMgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
kb.passwordMgr = _urllib.request.HTTPPasswordMgrWithDefaultRealm()
|
||||||
|
|
||||||
_setAuthCred()
|
_setAuthCred()
|
||||||
|
|
||||||
|
@ -1297,7 +1297,7 @@ def _setHTTPAuthentication():
|
||||||
authHandler = SmartHTTPBasicAuthHandler(kb.passwordMgr)
|
authHandler = SmartHTTPBasicAuthHandler(kb.passwordMgr)
|
||||||
|
|
||||||
elif authType == AUTH_TYPE.DIGEST:
|
elif authType == AUTH_TYPE.DIGEST:
|
||||||
authHandler = urllib2.HTTPDigestAuthHandler(kb.passwordMgr)
|
authHandler = _urllib.request.HTTPDigestAuthHandler(kb.passwordMgr)
|
||||||
|
|
||||||
elif authType == AUTH_TYPE.NTLM:
|
elif authType == AUTH_TYPE.NTLM:
|
||||||
try:
|
try:
|
||||||
|
@ -1459,7 +1459,7 @@ def _setHostname():
|
||||||
|
|
||||||
if conf.url:
|
if conf.url:
|
||||||
try:
|
try:
|
||||||
conf.hostname = urlparse.urlsplit(conf.url).netloc.split(':')[0]
|
conf.hostname = _urllib.parse.urlsplit(conf.url).netloc.split(':')[0]
|
||||||
except ValueError as ex:
|
except ValueError as ex:
|
||||||
errMsg = "problem occurred while "
|
errMsg = "problem occurred while "
|
||||||
errMsg += "parsing an URL '%s' ('%s')" % (conf.url, getSafeExString(ex))
|
errMsg += "parsing an URL '%s' ('%s')" % (conf.url, getSafeExString(ex))
|
||||||
|
@ -1783,8 +1783,8 @@ def _cleanupEnvironment():
|
||||||
Cleanup environment (e.g. from leftovers after --sqlmap-shell).
|
Cleanup environment (e.g. from leftovers after --sqlmap-shell).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if issubclass(urllib2.socket.socket, socks.socksocket):
|
if issubclass(_http_client.socket.socket, socks.socksocket):
|
||||||
socks.unwrapmodule(urllib2)
|
socks.unwrapmodule(_http_client)
|
||||||
|
|
||||||
if hasattr(socket, "_ready"):
|
if hasattr(socket, "_ready"):
|
||||||
socket._ready.clear()
|
socket._ready.clear()
|
||||||
|
@ -2312,11 +2312,11 @@ def _setTorSocksProxySettings():
|
||||||
|
|
||||||
# SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29)
|
# SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29)
|
||||||
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, port)
|
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, port)
|
||||||
socks.wrapmodule(urllib2)
|
socks.wrapmodule(_http_client)
|
||||||
|
|
||||||
def _setHttpChunked():
|
def _setHttpChunked():
|
||||||
if conf.chunked and conf.data:
|
if conf.chunked and conf.data:
|
||||||
httplib.HTTPConnection._set_content_length = lambda self, a, b: None
|
_http_client.HTTPConnection._set_content_length = lambda self, a, b: None
|
||||||
|
|
||||||
def _checkWebSocket():
|
def _checkWebSocket():
|
||||||
if conf.url and (conf.url.startswith("ws:/") or conf.url.startswith("wss:/")):
|
if conf.url and (conf.url.startswith("ws:/") or conf.url.startswith("wss:/")):
|
||||||
|
|
|
@ -6,9 +6,9 @@ See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
import httplib
|
|
||||||
|
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
|
from thirdparty.six.moves import http_client as _http_client
|
||||||
|
|
||||||
def dirtyPatches():
|
def dirtyPatches():
|
||||||
"""
|
"""
|
||||||
|
@ -16,7 +16,7 @@ def dirtyPatches():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
# accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
||||||
httplib._MAXLINE = 1 * 1024 * 1024
|
_http_client._MAXLINE = 1 * 1024 * 1024
|
||||||
|
|
||||||
# add support for inet_pton() on Windows OS
|
# add support for inet_pton() on Windows OS
|
||||||
if IS_WIN:
|
if IS_WIN:
|
||||||
|
|
|
@ -17,7 +17,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
|
|
||||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||||
VERSION = "1.3.3.56"
|
VERSION = "1.3.3.57"
|
||||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||||
|
|
|
@ -12,7 +12,6 @@ import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import urlparse
|
|
||||||
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
|
@ -74,6 +73,7 @@ from lib.core.settings import USER_AGENT_ALIASES
|
||||||
from lib.core.settings import XML_RECOGNITION_REGEX
|
from lib.core.settings import XML_RECOGNITION_REGEX
|
||||||
from lib.utils.hashdb import HashDB
|
from lib.utils.hashdb import HashDB
|
||||||
from thirdparty.odict import OrderedDict
|
from thirdparty.odict import OrderedDict
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
def _setRequestParams():
|
def _setRequestParams():
|
||||||
"""
|
"""
|
||||||
|
@ -276,7 +276,7 @@ def _setRequestParams():
|
||||||
|
|
||||||
if not kb.processUserMarks:
|
if not kb.processUserMarks:
|
||||||
if place == PLACE.URI:
|
if place == PLACE.URI:
|
||||||
query = urlparse.urlsplit(value).query
|
query = _urllib.parse.urlsplit(value).query
|
||||||
if query:
|
if query:
|
||||||
parameters = conf.parameters[PLACE.GET] = query
|
parameters = conf.parameters[PLACE.GET] = query
|
||||||
paramDict = paramToDict(PLACE.GET, parameters)
|
paramDict = paramToDict(PLACE.GET, parameters)
|
||||||
|
|
|
@ -13,6 +13,7 @@ import threading
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from lib.core.compat import WichmannHill
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
@ -57,7 +58,7 @@ class _ThreadData(threading.local):
|
||||||
self.lastRequestMsg = None
|
self.lastRequestMsg = None
|
||||||
self.lastRequestUID = 0
|
self.lastRequestUID = 0
|
||||||
self.lastRedirectURL = None
|
self.lastRedirectURL = None
|
||||||
self.random = random.WichmannHill()
|
self.random = WichmannHill()
|
||||||
self.resumed = False
|
self.resumed = False
|
||||||
self.retriesCount = 0
|
self.retriesCount = 0
|
||||||
self.seqMatcher = difflib.SequenceMatcher(None)
|
self.seqMatcher = difflib.SequenceMatcher(None)
|
||||||
|
|
|
@ -5,7 +5,6 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import httplib
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
|
@ -14,6 +13,7 @@ from lib.core.data import logger
|
||||||
from lib.core.exception import SqlmapSyntaxException
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
from thirdparty.oset.pyoset import oset
|
from thirdparty.oset.pyoset import oset
|
||||||
|
from thirdparty.six.moves import http_client as _http_client
|
||||||
|
|
||||||
abortedFlag = None
|
abortedFlag = None
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ def parseSitemap(url, retVal=None):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
|
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
|
||||||
except httplib.InvalidURL:
|
except _http_client.InvalidURL:
|
||||||
errMsg = "invalid URL given for sitemap ('%s')" % url
|
errMsg = "invalid URL given for sitemap ('%s')" % url
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
|
|
@ -5,15 +5,15 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import urllib2
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
class SmartHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
class SmartHTTPBasicAuthHandler(_urllib.request.HTTPBasicAuthHandler):
|
||||||
"""
|
"""
|
||||||
Reference: http://selenic.com/hg/rev/6c51a5056020
|
Reference: http://selenic.com/hg/rev/6c51a5056020
|
||||||
Fix for a: http://bugs.python.org/issue8797
|
Fix for a: http://bugs.python.org/issue8797
|
||||||
"""
|
"""
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs)
|
_urllib.request.HTTPBasicAuthHandler.__init__(self, *args, **kwargs)
|
||||||
self.retried_req = set()
|
self.retried_req = set()
|
||||||
self.retried_count = 0
|
self.retried_count = 0
|
||||||
|
|
||||||
|
@ -30,8 +30,8 @@ class SmartHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
||||||
self.retried_count = 0
|
self.retried_count = 0
|
||||||
else:
|
else:
|
||||||
if self.retried_count > 5:
|
if self.retried_count > 5:
|
||||||
raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed", headers, None)
|
raise _urllib.error.HTTPError(req.get_full_url(), 401, "basic auth failed", headers, None)
|
||||||
else:
|
else:
|
||||||
self.retried_count += 1
|
self.retried_count += 1
|
||||||
|
|
||||||
return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed(self, auth_header, host, req, headers)
|
return _urllib.request.HTTPBasicAuthHandler.http_error_auth_reqed(self, auth_header, host, req, headers)
|
||||||
|
|
|
@ -5,37 +5,35 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
class ChunkedHandler(urllib2.HTTPHandler):
|
class ChunkedHandler(_urllib.request.HTTPHandler):
|
||||||
"""
|
"""
|
||||||
Ensures that urllib2.HTTPHandler is working properly in case of Chunked Transfer-Encoding
|
Ensures that HTTPHandler is working properly in case of Chunked Transfer-Encoding
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _http_request(self, request):
|
def _http_request(self, request):
|
||||||
host = request.get_host()
|
host = request.get_host()
|
||||||
if not host:
|
if not host:
|
||||||
raise urllib2.URLError('no host given')
|
raise _urllib.error.URLError("no host given")
|
||||||
|
|
||||||
if request.has_data(): # POST
|
if request.has_data(): # POST
|
||||||
data = request.get_data()
|
data = request.get_data()
|
||||||
if not request.has_header('Content-type'):
|
if not request.has_header("Content-type"):
|
||||||
request.add_unredirected_header(
|
request.add_unredirected_header(
|
||||||
'Content-type',
|
"Content-type",
|
||||||
'application/x-www-form-urlencoded')
|
"application/x-www-form-urlencoded")
|
||||||
if not request.has_header('Content-length') and not conf.chunked:
|
if not request.has_header("Content-length") and not conf.chunked:
|
||||||
request.add_unredirected_header(
|
request.add_unredirected_header(
|
||||||
'Content-length', '%d' % len(data))
|
"Content-length", "%d" % len(data))
|
||||||
|
|
||||||
sel_host = host
|
sel_host = host
|
||||||
if request.has_proxy():
|
if request.has_proxy():
|
||||||
scheme, sel = urllib2.splittype(request.get_selector())
|
sel_host = _urllib.parse.urlsplit(request.get_selector()).netloc
|
||||||
sel_host, sel_path = urllib2.splithost(sel)
|
|
||||||
|
|
||||||
if not request.has_header('Host'):
|
if not request.has_header("Host"):
|
||||||
request.add_unredirected_header('Host', sel_host)
|
request.add_unredirected_header("Host", sel_host)
|
||||||
for name, value in self.parent.addheaders:
|
for name, value in self.parent.addheaders:
|
||||||
name = name.capitalize()
|
name = name.capitalize()
|
||||||
if not request.has_header(name):
|
if not request.has_header(name):
|
||||||
|
|
|
@ -62,7 +62,6 @@ from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
from lib.core.decorators import stackedmethod
|
from lib.core.decorators import stackedmethod
|
||||||
from lib.core.dicts import HTTP_RESPONSES
|
|
||||||
from lib.core.dicts import POST_HINT_CONTENT_TYPES
|
from lib.core.dicts import POST_HINT_CONTENT_TYPES
|
||||||
from lib.core.enums import ADJUST_TIME_DELAY
|
from lib.core.enums import ADJUST_TIME_DELAY
|
||||||
from lib.core.enums import AUTH_TYPE
|
from lib.core.enums import AUTH_TYPE
|
||||||
|
@ -425,7 +424,7 @@ class Connect(object):
|
||||||
page = ws.recv()
|
page = ws.recv()
|
||||||
ws.close()
|
ws.close()
|
||||||
code = ws.status
|
code = ws.status
|
||||||
status = HTTP_RESPONSES[code]
|
status = _http_client.responses[code]
|
||||||
|
|
||||||
class _(dict):
|
class _(dict):
|
||||||
pass
|
pass
|
||||||
|
@ -641,7 +640,7 @@ class Connect(object):
|
||||||
if ignoreTimeout:
|
if ignoreTimeout:
|
||||||
return None if not conf.ignoreTimeouts else "", None, None
|
return None if not conf.ignoreTimeouts else "", None, None
|
||||||
else:
|
else:
|
||||||
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, HTTP_RESPONSES[ex.code])
|
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, _http_client.responses[ex.code])
|
||||||
if threadData.retriesCount < conf.retries and not kb.threadException:
|
if threadData.retriesCount < conf.retries and not kb.threadException:
|
||||||
warnMsg += ". sqlmap is going to retry the request"
|
warnMsg += ". sqlmap is going to retry the request"
|
||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
|
|
|
@ -6,10 +6,8 @@ See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import distutils.version
|
import distutils.version
|
||||||
import httplib
|
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import urllib2
|
|
||||||
|
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
|
@ -17,6 +15,8 @@ from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.settings import PYVERSION
|
from lib.core.settings import PYVERSION
|
||||||
|
from thirdparty.six.moves import http_client as _http_client
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
ssl = None
|
ssl = None
|
||||||
try:
|
try:
|
||||||
|
@ -27,7 +27,7 @@ except ImportError:
|
||||||
|
|
||||||
_protocols = filter(None, (getattr(ssl, _, None) for _ in ("PROTOCOL_TLSv1_2", "PROTOCOL_TLSv1_1", "PROTOCOL_TLSv1", "PROTOCOL_SSLv3", "PROTOCOL_SSLv23", "PROTOCOL_SSLv2")))
|
_protocols = filter(None, (getattr(ssl, _, None) for _ in ("PROTOCOL_TLSv1_2", "PROTOCOL_TLSv1_1", "PROTOCOL_TLSv1", "PROTOCOL_SSLv3", "PROTOCOL_SSLv23", "PROTOCOL_SSLv2")))
|
||||||
|
|
||||||
class HTTPSConnection(httplib.HTTPSConnection):
|
class HTTPSConnection(_http_client.HTTPSConnection):
|
||||||
"""
|
"""
|
||||||
Connection class that enables usage of newer SSL protocols.
|
Connection class that enables usage of newer SSL protocols.
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
|
_http_client.HTTPSConnection.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
def create_sock():
|
def create_sock():
|
||||||
|
@ -63,7 +63,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
sock.close()
|
sock.close()
|
||||||
except (ssl.SSLError, socket.error, httplib.BadStatusLine) as ex:
|
except (ssl.SSLError, socket.error, _http_client.BadStatusLine) as ex:
|
||||||
self._tunnel_host = None
|
self._tunnel_host = None
|
||||||
logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex))
|
logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex))
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
sock.close()
|
sock.close()
|
||||||
except (ssl.SSLError, socket.error, httplib.BadStatusLine) as ex:
|
except (ssl.SSLError, socket.error, _http_client.BadStatusLine) as ex:
|
||||||
self._tunnel_host = None
|
self._tunnel_host = None
|
||||||
logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex))
|
logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex))
|
||||||
|
|
||||||
|
@ -94,14 +94,14 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
||||||
errMsg += " (please retry with Python >= 2.7.9)"
|
errMsg += " (please retry with Python >= 2.7.9)"
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
class HTTPSHandler(urllib2.HTTPSHandler):
|
class HTTPSHandler(_urllib.request.HTTPSHandler):
|
||||||
def https_open(self, req):
|
def https_open(self, req):
|
||||||
return self.do_open(HTTPSConnection if ssl else httplib.HTTPSConnection, req)
|
return self.do_open(HTTPSConnection if ssl else _http_client.HTTPSConnection, req)
|
||||||
|
|
||||||
# Bug fix (http://bugs.python.org/issue17849)
|
# Bug fix (http://bugs.python.org/issue17849)
|
||||||
|
|
||||||
def _(self, *args):
|
def _(self, *args):
|
||||||
return self._readline()
|
return self._readline()
|
||||||
|
|
||||||
httplib.LineAndFileWrapper._readline = httplib.LineAndFileWrapper.readline
|
_http_client.LineAndFileWrapper._readline = _http_client.LineAndFileWrapper.readline
|
||||||
httplib.LineAndFileWrapper.readline = _
|
_http_client.LineAndFileWrapper.readline = _
|
||||||
|
|
|
@ -5,15 +5,15 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import urllib2
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
class MethodRequest(urllib2.Request):
|
class MethodRequest(_urllib.request.Request):
|
||||||
"""
|
"""
|
||||||
Used to create HEAD/PUT/DELETE/... requests with urllib2
|
Used to create HEAD/PUT/DELETE/... requests with urllib
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def set_method(self, method):
|
def set_method(self, method):
|
||||||
self.method = method.upper()
|
self.method = method.upper()
|
||||||
|
|
||||||
def get_method(self):
|
def get_method(self):
|
||||||
return getattr(self, 'method', urllib2.Request.get_method(self))
|
return getattr(self, 'method', _urllib.request.Request.get_method(self))
|
||||||
|
|
|
@ -5,16 +5,15 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import httplib
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
|
from thirdparty.six.moves import http_client as _http_client
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
class HTTPSPKIAuthHandler(urllib2.HTTPSHandler):
|
class HTTPSPKIAuthHandler(_urllib.request.HTTPSHandler):
|
||||||
def __init__(self, auth_file):
|
def __init__(self, auth_file):
|
||||||
urllib2.HTTPSHandler.__init__(self)
|
_urllib.request.HTTPSHandler.__init__(self)
|
||||||
self.auth_file = auth_file
|
self.auth_file = auth_file
|
||||||
|
|
||||||
def https_open(self, req):
|
def https_open(self, req):
|
||||||
|
@ -23,7 +22,7 @@ class HTTPSPKIAuthHandler(urllib2.HTTPSHandler):
|
||||||
def getConnection(self, host, timeout=None):
|
def getConnection(self, host, timeout=None):
|
||||||
try:
|
try:
|
||||||
# Reference: https://docs.python.org/2/library/ssl.html#ssl.SSLContext.load_cert_chain
|
# Reference: https://docs.python.org/2/library/ssl.html#ssl.SSLContext.load_cert_chain
|
||||||
return httplib.HTTPSConnection(host, cert_file=self.auth_file, key_file=self.auth_file, timeout=conf.timeout)
|
return _http_client.HTTPSConnection(host, cert_file=self.auth_file, key_file=self.auth_file, timeout=conf.timeout)
|
||||||
except IOError as ex:
|
except IOError as ex:
|
||||||
errMsg = "error occurred while using key "
|
errMsg = "error occurred while using key "
|
||||||
errMsg += "file '%s' ('%s')" % (self.auth_file, getSafeExString(ex))
|
errMsg += "file '%s' ('%s')" % (self.auth_file, getSafeExString(ex))
|
||||||
|
|
|
@ -5,41 +5,19 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import urllib
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
class HTTPRangeHandler(urllib2.BaseHandler):
|
class HTTPRangeHandler(_urllib.request.BaseHandler):
|
||||||
"""
|
"""
|
||||||
Handler that enables HTTP Range headers.
|
Handler that enables HTTP Range headers.
|
||||||
|
|
||||||
Reference: http://stackoverflow.com/questions/1971240/python-seek-on-remote-file
|
Reference: http://stackoverflow.com/questions/1971240/python-seek-on-remote-file
|
||||||
|
|
||||||
This was extremely simple. The Range header is a HTTP feature to
|
|
||||||
begin with so all this class does is tell urllib2 that the
|
|
||||||
"206 Partial Content" response from the HTTP server is what we
|
|
||||||
expected.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
import urllib2
|
|
||||||
import byterange
|
|
||||||
|
|
||||||
range_handler = range.HTTPRangeHandler()
|
|
||||||
opener = urllib2.build_opener(range_handler)
|
|
||||||
|
|
||||||
# install it
|
|
||||||
urllib2.install_opener(opener)
|
|
||||||
|
|
||||||
# create Request and set Range header
|
|
||||||
req = urllib2.Request('https://www.python.org/')
|
|
||||||
req.header['Range'] = 'bytes=30-50'
|
|
||||||
f = urllib2.urlopen(req)
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def http_error_206(self, req, fp, code, msg, hdrs):
|
def http_error_206(self, req, fp, code, msg, hdrs):
|
||||||
# 206 Partial Content Response
|
# 206 Partial Content Response
|
||||||
r = urllib.addinfourl(fp, hdrs, req.get_full_url())
|
r = _urllib.response.addinfourl(fp, hdrs, req.get_full_url())
|
||||||
r.code = code
|
r.code = code
|
||||||
r.msg = msg
|
r.msg = msg
|
||||||
return r
|
return r
|
||||||
|
|
|
@ -8,8 +8,6 @@ See the file 'LICENSE' for copying permission
|
||||||
import io
|
import io
|
||||||
import time
|
import time
|
||||||
import types
|
import types
|
||||||
import urllib2
|
|
||||||
import urlparse
|
|
||||||
|
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
|
@ -32,8 +30,9 @@ from lib.core.settings import MAX_TOTAL_REDIRECTIONS
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.request.basic import decodePage
|
from lib.request.basic import decodePage
|
||||||
from lib.request.basic import parseResponse
|
from lib.request.basic import parseResponse
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
class SmartRedirectHandler(_urllib.request.HTTPRedirectHandler):
|
||||||
def _get_header_redirect(self, headers):
|
def _get_header_redirect(self, headers):
|
||||||
retVal = None
|
retVal = None
|
||||||
|
|
||||||
|
@ -66,7 +65,7 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||||
|
|
||||||
def _redirect_request(self, req, fp, code, msg, headers, newurl):
|
def _redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||||
newurl = newurl.replace(' ', '%20')
|
newurl = newurl.replace(' ', '%20')
|
||||||
return urllib2.Request(newurl, data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host())
|
return _urllib.request.Request(newurl, data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host())
|
||||||
|
|
||||||
def http_error_302(self, req, fp, code, msg, headers):
|
def http_error_302(self, req, fp, code, msg, headers):
|
||||||
start = time.time()
|
start = time.time()
|
||||||
|
@ -109,8 +108,8 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||||
|
|
||||||
if redurl:
|
if redurl:
|
||||||
try:
|
try:
|
||||||
if not urlparse.urlsplit(redurl).netloc:
|
if not _urllib.parse.urlsplit(redurl).netloc:
|
||||||
redurl = urlparse.urljoin(req.get_full_url(), redurl)
|
redurl = _urllib.parse.urljoin(req.get_full_url(), redurl)
|
||||||
|
|
||||||
self._infinite_loop_check(req)
|
self._infinite_loop_check(req)
|
||||||
self._ask_redirect_choice(code, redurl, req.get_method())
|
self._ask_redirect_choice(code, redurl, req.get_method())
|
||||||
|
@ -139,8 +138,8 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||||
req.headers[HTTP_HEADER.COOKIE] = delimiter.join("%s=%s" % (key, cookies[key]) for key in cookies)
|
req.headers[HTTP_HEADER.COOKIE] = delimiter.join("%s=%s" % (key, cookies[key]) for key in cookies)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
result = _urllib.request.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
||||||
except urllib2.HTTPError as ex:
|
except _urllib.error.HTTPError as ex:
|
||||||
result = ex
|
result = ex
|
||||||
|
|
||||||
# Dirty hack for http://bugs.python.org/issue15701
|
# Dirty hack for http://bugs.python.org/issue15701
|
||||||
|
|
|
@ -10,7 +10,6 @@ import os
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
import tempfile
|
import tempfile
|
||||||
import urlparse
|
|
||||||
|
|
||||||
from extra.cloak.cloak import decloak
|
from extra.cloak.cloak import decloak
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
|
@ -52,6 +51,7 @@ from lib.core.settings import SHELL_WRITABLE_DIR_TAG
|
||||||
from lib.core.settings import VIEWSTATE_REGEX
|
from lib.core.settings import VIEWSTATE_REGEX
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
from thirdparty.oset.pyoset import oset
|
from thirdparty.oset.pyoset import oset
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
class Web:
|
class Web:
|
||||||
"""
|
"""
|
||||||
|
@ -256,7 +256,7 @@ class Web:
|
||||||
directories.extend(getAutoDirectories())
|
directories.extend(getAutoDirectories())
|
||||||
directories = list(oset(directories))
|
directories = list(oset(directories))
|
||||||
|
|
||||||
path = urlparse.urlparse(conf.url).path or '/'
|
path = _urllib.parse.urlparse(conf.url).path or '/'
|
||||||
path = re.sub(r"/[^/]*\.\w+\Z", '/', path)
|
path = re.sub(r"/[^/]*\.\w+\Z", '/', path)
|
||||||
if path != '/':
|
if path != '/':
|
||||||
_ = []
|
_ = []
|
||||||
|
@ -295,7 +295,7 @@ class Web:
|
||||||
|
|
||||||
for match in re.finditer('/', directory):
|
for match in re.finditer('/', directory):
|
||||||
self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/'))
|
self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/'))
|
||||||
self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName)
|
self.webStagerUrl = _urllib.parse.urljoin(self.webBaseUrl, stagerName)
|
||||||
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
|
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
|
@ -332,7 +332,7 @@ class Web:
|
||||||
|
|
||||||
for match in re.finditer('/', directory):
|
for match in re.finditer('/', directory):
|
||||||
self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/'))
|
self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/'))
|
||||||
self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName)
|
self.webStagerUrl = _urllib.parse.urljoin(self.webBaseUrl, stagerName)
|
||||||
|
|
||||||
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
|
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
|
@ -9,7 +9,6 @@ See the file 'LICENSE' for copying permission
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import httplib
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
@ -19,7 +18,6 @@ import sqlite3
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import urllib2
|
|
||||||
|
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
|
@ -57,6 +55,8 @@ from thirdparty.bottle.bottle import request
|
||||||
from thirdparty.bottle.bottle import response
|
from thirdparty.bottle.bottle import response
|
||||||
from thirdparty.bottle.bottle import run
|
from thirdparty.bottle.bottle import run
|
||||||
from thirdparty.bottle.bottle import server_names
|
from thirdparty.bottle.bottle import server_names
|
||||||
|
from thirdparty.six.moves import http_client as _http_client
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
# Global data storage
|
# Global data storage
|
||||||
class DataStore(object):
|
class DataStore(object):
|
||||||
|
@ -716,8 +716,8 @@ def _client(url, options=None):
|
||||||
if DataStore.username or DataStore.password:
|
if DataStore.username or DataStore.password:
|
||||||
headers["Authorization"] = "Basic %s" % base64encode("%s:%s" % (DataStore.username or "", DataStore.password or ""))
|
headers["Authorization"] = "Basic %s" % base64encode("%s:%s" % (DataStore.username or "", DataStore.password or ""))
|
||||||
|
|
||||||
req = urllib2.Request(url, data, headers)
|
req = _urllib.request.Request(url, data, headers)
|
||||||
response = urllib2.urlopen(req)
|
response = _urllib.request.urlopen(req)
|
||||||
text = response.read()
|
text = response.read()
|
||||||
except:
|
except:
|
||||||
if options:
|
if options:
|
||||||
|
@ -746,7 +746,7 @@ def client(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, username=Non
|
||||||
try:
|
try:
|
||||||
_client(addr)
|
_client(addr)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if not isinstance(ex, urllib2.HTTPError) or ex.code == httplib.UNAUTHORIZED:
|
if not isinstance(ex, _urllib.error.HTTPError) or ex.code == _http_client.UNAUTHORIZED:
|
||||||
errMsg = "There has been a problem while connecting to the "
|
errMsg = "There has been a problem while connecting to the "
|
||||||
errMsg += "REST-JSON API server at '%s' " % addr
|
errMsg += "REST-JSON API server at '%s' " % addr
|
||||||
errMsg += "(%s)" % ex
|
errMsg += "(%s)" % ex
|
||||||
|
|
|
@ -5,10 +5,8 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import httplib
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import urlparse
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
@ -34,6 +32,8 @@ from lib.parse.sitemap import parseSitemap
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
from thirdparty.beautifulsoup.beautifulsoup import BeautifulSoup
|
from thirdparty.beautifulsoup.beautifulsoup import BeautifulSoup
|
||||||
from thirdparty.oset.pyoset import oset
|
from thirdparty.oset.pyoset import oset
|
||||||
|
from thirdparty.six.moves import http_client as _http_client
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
|
|
||||||
def crawl(target):
|
def crawl(target):
|
||||||
try:
|
try:
|
||||||
|
@ -70,7 +70,7 @@ def crawl(target):
|
||||||
except SqlmapSyntaxException:
|
except SqlmapSyntaxException:
|
||||||
errMsg = "invalid URL detected. skipping '%s'" % current
|
errMsg = "invalid URL detected. skipping '%s'" % current
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
except httplib.InvalidURL as ex:
|
except _http_client.InvalidURL as ex:
|
||||||
errMsg = "invalid URL detected ('%s'). skipping " % getSafeExString(ex)
|
errMsg = "invalid URL detected ('%s'). skipping " % getSafeExString(ex)
|
||||||
errMsg += "URL '%s'" % current
|
errMsg += "URL '%s'" % current
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
|
@ -96,7 +96,7 @@ def crawl(target):
|
||||||
if href:
|
if href:
|
||||||
if threadData.lastRedirectURL and threadData.lastRedirectURL[0] == threadData.lastRequestUID:
|
if threadData.lastRedirectURL and threadData.lastRedirectURL[0] == threadData.lastRequestUID:
|
||||||
current = threadData.lastRedirectURL[1]
|
current = threadData.lastRedirectURL[1]
|
||||||
url = urlparse.urljoin(current, href)
|
url = _urllib.parse.urljoin(current, href)
|
||||||
|
|
||||||
# flag to know if we are dealing with the same target host
|
# flag to know if we are dealing with the same target host
|
||||||
_ = checkSameHost(url, target)
|
_ = checkSameHost(url, target)
|
||||||
|
@ -135,7 +135,7 @@ def crawl(target):
|
||||||
if readInput(message, default='N', boolean=True):
|
if readInput(message, default='N', boolean=True):
|
||||||
found = True
|
found = True
|
||||||
items = None
|
items = None
|
||||||
url = urlparse.urljoin(target, "/sitemap.xml")
|
url = _urllib.parse.urljoin(target, "/sitemap.xml")
|
||||||
try:
|
try:
|
||||||
items = parseSitemap(url)
|
items = parseSitemap(url)
|
||||||
except SqlmapConnectionException as ex:
|
except SqlmapConnectionException as ex:
|
||||||
|
|
|
@ -8,7 +8,6 @@ See the file 'LICENSE' for copying permission
|
||||||
import base64
|
import base64
|
||||||
import BaseHTTPServer
|
import BaseHTTPServer
|
||||||
import datetime
|
import datetime
|
||||||
import httplib
|
|
||||||
import io
|
import io
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
@ -157,12 +156,12 @@ class Response:
|
||||||
altered = status_line + "\r\n" + remain
|
altered = status_line + "\r\n" + remain
|
||||||
comment = first_line
|
comment = first_line
|
||||||
|
|
||||||
response = httplib.HTTPResponse(FakeSocket(altered))
|
response = _http_client.HTTPResponse(FakeSocket(altered))
|
||||||
response.begin()
|
response.begin()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content = response.read(-1)
|
content = response.read(-1)
|
||||||
except httplib.IncompleteRead:
|
except _http_client.IncompleteRead:
|
||||||
content = raw[raw.find("\r\n\r\n") + 4:].rstrip("\r\n")
|
content = raw[raw.find("\r\n\r\n") + 4:].rstrip("\r\n")
|
||||||
|
|
||||||
return cls(httpVersion="HTTP/1.1" if response.version == 11 else "HTTP/1.0",
|
return cls(httpVersion="HTTP/1.1" if response.version == 11 else "HTTP/1.0",
|
||||||
|
|
|
@ -5,11 +5,8 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import httplib
|
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import urllib
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
|
@ -34,6 +31,8 @@ from lib.core.settings import GOOGLE_REGEX
|
||||||
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
from lib.request.basic import decodePage
|
from lib.request.basic import decodePage
|
||||||
|
from thirdparty.six.moves import http_client as _http_client
|
||||||
|
from thirdparty.six.moves import urllib as _urllib
|
||||||
from thirdparty.socks import socks
|
from thirdparty.socks import socks
|
||||||
|
|
||||||
def _search(dork):
|
def _search(dork):
|
||||||
|
@ -52,8 +51,8 @@ def _search(dork):
|
||||||
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
||||||
|
|
||||||
try:
|
try:
|
||||||
req = urllib2.Request("https://www.google.com/ncr", headers=headers)
|
req = _urllib.request.Request("https://www.google.com/ncr", headers=headers)
|
||||||
conn = urllib2.urlopen(req)
|
conn = _urllib.request.urlopen(req)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex)
|
errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex)
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
@ -67,11 +66,11 @@ def _search(dork):
|
||||||
url += "&start=%d" % ((gpage - 1) * 100)
|
url += "&start=%d" % ((gpage - 1) * 100)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
req = urllib2.Request(url, headers=headers)
|
req = _urllib.request.Request(url, headers=headers)
|
||||||
conn = urllib2.urlopen(req)
|
conn = _urllib.request.urlopen(req)
|
||||||
|
|
||||||
requestMsg = "HTTP request:\nGET %s" % url
|
requestMsg = "HTTP request:\nGET %s" % url
|
||||||
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
requestMsg += " %s" % _http_client.HTTPException._http_vsn_str
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
||||||
|
|
||||||
page = conn.read()
|
page = conn.read()
|
||||||
|
@ -88,7 +87,7 @@ def _search(dork):
|
||||||
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
||||||
|
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||||
except urllib2.HTTPError as ex:
|
except _urllib.error.HTTPError as ex:
|
||||||
try:
|
try:
|
||||||
page = ex.read()
|
page = ex.read()
|
||||||
except Exception as _:
|
except Exception as _:
|
||||||
|
@ -96,11 +95,11 @@ def _search(dork):
|
||||||
warnMsg += "an error page information (%s)" % getSafeExString(_)
|
warnMsg += "an error page information (%s)" % getSafeExString(_)
|
||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
return None
|
return None
|
||||||
except (urllib2.URLError, httplib.error, socket.error, socket.timeout, socks.ProxyError):
|
except (_urllib.error.URLError, _http_client.error, socket.error, socket.timeout, socks.ProxyError):
|
||||||
errMsg = "unable to connect to Google"
|
errMsg = "unable to connect to Google"
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
retVal = [urllib.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)]
|
retVal = [_urllib.parse.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)]
|
||||||
|
|
||||||
if not retVal and "detected unusual traffic" in page:
|
if not retVal and "detected unusual traffic" in page:
|
||||||
warnMsg = "Google has detected 'unusual' traffic from "
|
warnMsg = "Google has detected 'unusual' traffic from "
|
||||||
|
@ -129,11 +128,11 @@ def _search(dork):
|
||||||
regex = DUCKDUCKGO_REGEX
|
regex = DUCKDUCKGO_REGEX
|
||||||
|
|
||||||
try:
|
try:
|
||||||
req = urllib2.Request(url, data=data, headers=headers)
|
req = _urllib.request.Request(url, data=data, headers=headers)
|
||||||
conn = urllib2.urlopen(req)
|
conn = _urllib.request.urlopen(req)
|
||||||
|
|
||||||
requestMsg = "HTTP request:\nGET %s" % url
|
requestMsg = "HTTP request:\nGET %s" % url
|
||||||
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
requestMsg += " %s" % _http_client.HTTPException._http_vsn_str
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
||||||
|
|
||||||
page = conn.read()
|
page = conn.read()
|
||||||
|
@ -150,7 +149,7 @@ def _search(dork):
|
||||||
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
||||||
|
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||||
except urllib2.HTTPError as ex:
|
except _urllib.error.HTTPError as ex:
|
||||||
try:
|
try:
|
||||||
page = ex.read()
|
page = ex.read()
|
||||||
page = decodePage(page, ex.headers.get("Content-Encoding"), ex.headers.get("Content-Type"))
|
page = decodePage(page, ex.headers.get("Content-Encoding"), ex.headers.get("Content-Type"))
|
||||||
|
@ -163,7 +162,7 @@ def _search(dork):
|
||||||
errMsg = "unable to connect"
|
errMsg = "unable to connect"
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
retVal = [urllib.unquote(match.group(1).replace("&", "&")) for match in re.finditer(regex, page, re.I | re.S)]
|
retVal = [_urllib.parse.unquote(match.group(1).replace("&", "&")) for match in re.finditer(regex, page, re.I | re.S)]
|
||||||
|
|
||||||
if not retVal and "issue with the Tor Exit Node you are currently using" in page:
|
if not retVal and "issue with the Tor Exit Node you are currently using" in page:
|
||||||
warnMsg = "DuckDuckGo has detected 'unusual' traffic from "
|
warnMsg = "DuckDuckGo has detected 'unusual' traffic from "
|
||||||
|
|
17
thirdparty/clientform/clientform.py
vendored
17
thirdparty/clientform/clientform.py
vendored
|
@ -66,17 +66,6 @@ __all__ = ['AmbiguityError', 'CheckboxControl', 'Control',
|
||||||
'SubmitButtonControl', 'SubmitControl', 'TextControl',
|
'SubmitButtonControl', 'SubmitControl', 'TextControl',
|
||||||
'TextareaControl', 'XHTMLCompatibleFormParser']
|
'TextareaControl', 'XHTMLCompatibleFormParser']
|
||||||
|
|
||||||
try: True
|
|
||||||
except NameError:
|
|
||||||
True = 1
|
|
||||||
False = 0
|
|
||||||
|
|
||||||
try: bool
|
|
||||||
except NameError:
|
|
||||||
def bool(expr):
|
|
||||||
if expr: return True
|
|
||||||
else: return False
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import logging
|
import logging
|
||||||
import inspect
|
import inspect
|
||||||
|
@ -792,7 +781,7 @@ else:
|
||||||
def feed(self, data):
|
def feed(self, data):
|
||||||
try:
|
try:
|
||||||
HTMLParser.HTMLParser.feed(self, data)
|
HTMLParser.HTMLParser.feed(self, data)
|
||||||
except HTMLParser.HTMLParseError, exc:
|
except HTMLParser.HTMLParseError as exc:
|
||||||
raise ParseError(exc)
|
raise ParseError(exc)
|
||||||
|
|
||||||
def start_option(self, attrs):
|
def start_option(self, attrs):
|
||||||
|
@ -870,7 +859,7 @@ class FormParser(_AbstractSgmllibParser, sgmllib.SGMLParser):
|
||||||
def feed(self, data):
|
def feed(self, data):
|
||||||
try:
|
try:
|
||||||
sgmllib.SGMLParser.feed(self, data)
|
sgmllib.SGMLParser.feed(self, data)
|
||||||
except SGMLLIB_PARSEERROR, exc:
|
except SGMLLIB_PARSEERROR as exc:
|
||||||
raise ParseError(exc)
|
raise ParseError(exc)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
|
@ -896,7 +885,7 @@ def _create_bs_classes(bs,
|
||||||
def feed(self, data):
|
def feed(self, data):
|
||||||
try:
|
try:
|
||||||
self.bs_base_class.feed(self, data)
|
self.bs_base_class.feed(self, data)
|
||||||
except SGMLLIB_PARSEERROR, exc:
|
except SGMLLIB_PARSEERROR as exc:
|
||||||
raise ParseError(exc)
|
raise ParseError(exc)
|
||||||
def close(self):
|
def close(self):
|
||||||
self.bs_base_class.close(self)
|
self.bs_base_class.close(self)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user