God help us all with this Python3 non-sense

This commit is contained in:
Miroslav Stampar 2019-03-27 13:33:46 +01:00
parent 2dbd0267a1
commit 2f53014685
23 changed files with 118 additions and 201 deletions

View File

@ -6,7 +6,6 @@ See the file 'LICENSE' for copying permission
"""
import copy
import httplib
import logging
import os
import random
@ -106,6 +105,7 @@ from lib.request.inject import checkBooleanExpression
from lib.request.templates import getPageTemplate
from lib.techniques.union.test import unionTest
from lib.techniques.union.use import configUnion
from thirdparty.six.moves import http_client as _http_client
def checkSqlInjection(place, parameter, value):
# Store here the details about boundaries and payload used to
@ -1337,7 +1337,7 @@ def checkWaf():
if any((conf.string, conf.notString, conf.regexp, conf.dummy, conf.offline, conf.skipWaf)):
return None
if kb.originalCode == httplib.NOT_FOUND:
if kb.originalCode == _http_client.NOT_FOUND:
return None
_ = hashDBRetrieve(HASHDB_KEYS.CHECK_WAF_RESULT, True)
@ -1623,7 +1623,7 @@ def checkConnection(suppressOutput=False):
warnMsg += "any addressing issues"
singleTimeWarnMessage(warnMsg)
if any(code in kb.httpErrorCodes for code in (httplib.NOT_FOUND, )):
if any(code in kb.httpErrorCodes for code in (_http_client.NOT_FOUND, )):
errMsg = getSafeExString(ex)
logger.critical(errMsg)

View File

@ -61,7 +61,6 @@ from lib.core.defaults import defaults
from lib.core.dicts import DBMS_DICT
from lib.core.dicts import DEFAULT_DOC_ROOTS
from lib.core.dicts import DEPRECATED_OPTIONS
from lib.core.dicts import HTTP_RESPONSES
from lib.core.dicts import SQL_STATEMENTS
from lib.core.enums import ADJUST_TIME_DELAY
from lib.core.enums import CONTENT_STATUS
@ -174,6 +173,7 @@ from thirdparty.colorama.initialise import init as coloramainit
from thirdparty.magic import magic
from thirdparty.odict import OrderedDict
from thirdparty.six.moves import configparser as _configparser
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import urllib as _urllib
from thirdparty.termcolor.termcolor import colored
@ -3301,9 +3301,9 @@ def showHttpErrorCodes():
if kb.httpErrorCodes:
warnMsg = "HTTP error codes detected during run:\n"
warnMsg += ", ".join("%d (%s) - %d times" % (code, HTTP_RESPONSES[code] if code in HTTP_RESPONSES else '?', count) for code, count in kb.httpErrorCodes.items())
warnMsg += ", ".join("%d (%s) - %d times" % (code, _http_client.responses[code] if code in _http_client.responses else '?', count) for code, count in kb.httpErrorCodes.items())
logger.warn(warnMsg)
if any((str(_).startswith('4') or str(_).startswith('5')) and _ != 500 and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()):
if any((str(_).startswith('4') or str(_).startswith('5')) and _ != _http_client.INTERNAL_SERVER_ERROR and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()):
msg = "too many 4xx and/or 5xx HTTP error codes "
msg += "could mean that some kind of protection is involved (e.g. WAF)"
logger.debug(msg)

View File

@ -330,47 +330,3 @@ PART_RUN_CONTENT_TYPES = {
"osCmd": CONTENT_TYPE.OS_CMD,
"regRead": CONTENT_TYPE.REG_READ
}
HTTP_RESPONSES = {
200: "OK",
201: "Created",
202: "Accepted",
203: "Non-Authoritative Information",
204: "No Content",
205: "Reset Content",
206: "Partial Content",
100: "Continue",
101: "Switching Protocols",
300: "Multiple Choices",
301: "Moved Permanently",
302: "Found",
303: "See Other",
304: "Not Modified",
305: "Use Proxy",
306: "(Unused)",
307: "Temporary Redirect",
400: "Bad Request",
401: "Unauthorized",
402: "Payment Required",
403: "Forbidden",
404: "Not Found",
405: "Method Not Allowed",
406: "Not Acceptable",
407: "Proxy Authentication Required",
408: "Request Timeout",
409: "Conflict",
410: "Gone",
411: "Length Required",
412: "Precondition Failed",
413: "Request Entity Too Large",
414: "Request-URI Too Long",
415: "Unsupported Media Type",
416: "Requested Range Not Satisfiable",
417: "Expectation Failed",
500: "Internal Server Error",
501: "Not Implemented",
502: "Bad Gateway",
503: "Service Unavailable",
504: "Gateway Timeout",
505: "HTTP Version Not Supported"
}

View File

@ -5,9 +5,7 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import cookielib
import glob
import httplib
import inspect
import logging
import os
@ -19,7 +17,6 @@ import tempfile
import threading
import time
import urllib2
import urlparse
import lib.controller.checks
import lib.core.common
@ -153,14 +150,17 @@ from lib.utils.purge import purge
from thirdparty.keepalive import keepalive
from thirdparty.multipart import multipartpost
from thirdparty.oset.pyoset import oset
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import http_cookiejar as _http_cookiejar
from thirdparty.six.moves import urllib as _urllib
from thirdparty.socks import socks
from xml.etree.ElementTree import ElementTree
authHandler = urllib2.BaseHandler()
authHandler = _urllib.request.BaseHandler()
chunkedHandler = ChunkedHandler()
httpsHandler = HTTPSHandler()
keepAliveHandler = keepalive.HTTPHandler()
proxyHandler = urllib2.ProxyHandler()
proxyHandler = _urllib.request.ProxyHandler()
redirectHandler = SmartRedirectHandler()
rangeHandler = HTTPRangeHandler()
multipartPostHandler = multipartpost.MultipartPostHandler()
@ -1053,7 +1053,7 @@ def _setHTTPHandlers():
logger.debug(debugMsg)
try:
_ = urlparse.urlsplit(conf.proxy)
_ = _urllib.parse.urlsplit(conf.proxy)
except Exception as ex:
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, getSafeExString(ex))
raise SqlmapSyntaxException(errMsg)
@ -1090,9 +1090,9 @@ def _setHTTPHandlers():
proxyHandler.proxies = {}
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password)
socks.wrapmodule(urllib2)
socks.wrapmodule(_http_client)
else:
socks.unwrapmodule(urllib2)
socks.unwrapmodule(_http_client)
if conf.proxyCred:
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
@ -1112,12 +1112,12 @@ def _setHTTPHandlers():
if not conf.dropSetCookie:
if not conf.loadCookies:
conf.cj = cookielib.CookieJar()
conf.cj = _http_cookiejar.CookieJar()
else:
conf.cj = cookielib.MozillaCookieJar()
conf.cj = _http_cookiejar.MozillaCookieJar()
resetCookieJar(conf.cj)
handlers.append(urllib2.HTTPCookieProcessor(conf.cj))
handlers.append(_urllib.request.HTTPCookieProcessor(conf.cj))
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
if conf.keepAlive:
@ -1133,8 +1133,8 @@ def _setHTTPHandlers():
else:
handlers.append(keepAliveHandler)
opener = urllib2.build_opener(*handlers)
urllib2.install_opener(opener)
opener = _urllib.request.build_opener(*handlers)
_urllib.request.install_opener(opener)
def _setSafeVisit():
"""
@ -1166,7 +1166,7 @@ def _setSafeVisit():
if value.endswith(":443"):
scheme = "https"
value = "%s://%s" % (scheme, value)
kb.safeReq.url = urlparse.urljoin(value, kb.safeReq.url)
kb.safeReq.url = _urllib.parse.urljoin(value, kb.safeReq.url)
else:
break
@ -1289,7 +1289,7 @@ def _setHTTPAuthentication():
conf.authUsername = aCredRegExp.group(1)
conf.authPassword = aCredRegExp.group(2)
kb.passwordMgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
kb.passwordMgr = _urllib.request.HTTPPasswordMgrWithDefaultRealm()
_setAuthCred()
@ -1297,7 +1297,7 @@ def _setHTTPAuthentication():
authHandler = SmartHTTPBasicAuthHandler(kb.passwordMgr)
elif authType == AUTH_TYPE.DIGEST:
authHandler = urllib2.HTTPDigestAuthHandler(kb.passwordMgr)
authHandler = _urllib.request.HTTPDigestAuthHandler(kb.passwordMgr)
elif authType == AUTH_TYPE.NTLM:
try:
@ -1459,7 +1459,7 @@ def _setHostname():
if conf.url:
try:
conf.hostname = urlparse.urlsplit(conf.url).netloc.split(':')[0]
conf.hostname = _urllib.parse.urlsplit(conf.url).netloc.split(':')[0]
except ValueError as ex:
errMsg = "problem occurred while "
errMsg += "parsing an URL '%s' ('%s')" % (conf.url, getSafeExString(ex))
@ -1783,8 +1783,8 @@ def _cleanupEnvironment():
Cleanup environment (e.g. from leftovers after --sqlmap-shell).
"""
if issubclass(urllib2.socket.socket, socks.socksocket):
socks.unwrapmodule(urllib2)
if issubclass(_http_client.socket.socket, socks.socksocket):
socks.unwrapmodule(_http_client)
if hasattr(socket, "_ready"):
socket._ready.clear()
@ -2312,11 +2312,11 @@ def _setTorSocksProxySettings():
# SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29)
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, port)
socks.wrapmodule(urllib2)
socks.wrapmodule(_http_client)
def _setHttpChunked():
if conf.chunked and conf.data:
httplib.HTTPConnection._set_content_length = lambda self, a, b: None
_http_client.HTTPConnection._set_content_length = lambda self, a, b: None
def _checkWebSocket():
if conf.url and (conf.url.startswith("ws:/") or conf.url.startswith("wss:/")):

View File

@ -6,9 +6,9 @@ See the file 'LICENSE' for copying permission
"""
import codecs
import httplib
from lib.core.settings import IS_WIN
from thirdparty.six.moves import http_client as _http_client
def dirtyPatches():
"""
@ -16,7 +16,7 @@ def dirtyPatches():
"""
# accept overly long result lines (e.g. SQLi results in HTTP header responses)
httplib._MAXLINE = 1 * 1024 * 1024
_http_client._MAXLINE = 1 * 1024 * 1024
# add support for inet_pton() on Windows OS
if IS_WIN:

View File

@ -17,7 +17,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
from lib.core.enums import OS
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.3.3.56"
VERSION = "1.3.3.57"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

View File

@ -12,7 +12,6 @@ import subprocess
import sys
import tempfile
import time
import urlparse
from lib.core.common import Backend
from lib.core.common import getSafeExString
@ -74,6 +73,7 @@ from lib.core.settings import USER_AGENT_ALIASES
from lib.core.settings import XML_RECOGNITION_REGEX
from lib.utils.hashdb import HashDB
from thirdparty.odict import OrderedDict
from thirdparty.six.moves import urllib as _urllib
def _setRequestParams():
"""
@ -276,7 +276,7 @@ def _setRequestParams():
if not kb.processUserMarks:
if place == PLACE.URI:
query = urlparse.urlsplit(value).query
query = _urllib.parse.urlsplit(value).query
if query:
parameters = conf.parameters[PLACE.GET] = query
paramDict = paramToDict(PLACE.GET, parameters)

View File

@ -13,6 +13,7 @@ import threading
import time
import traceback
from lib.core.compat import WichmannHill
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
@ -57,7 +58,7 @@ class _ThreadData(threading.local):
self.lastRequestMsg = None
self.lastRequestUID = 0
self.lastRedirectURL = None
self.random = random.WichmannHill()
self.random = WichmannHill()
self.resumed = False
self.retriesCount = 0
self.seqMatcher = difflib.SequenceMatcher(None)

View File

@ -5,7 +5,6 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import httplib
import re
from lib.core.common import readInput
@ -14,6 +13,7 @@ from lib.core.data import logger
from lib.core.exception import SqlmapSyntaxException
from lib.request.connect import Connect as Request
from thirdparty.oset.pyoset import oset
from thirdparty.six.moves import http_client as _http_client
abortedFlag = None
@ -30,7 +30,7 @@ def parseSitemap(url, retVal=None):
try:
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
except httplib.InvalidURL:
except _http_client.InvalidURL:
errMsg = "invalid URL given for sitemap ('%s')" % url
raise SqlmapSyntaxException(errMsg)

View File

@ -5,15 +5,15 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import urllib2
from thirdparty.six.moves import urllib as _urllib
class SmartHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
class SmartHTTPBasicAuthHandler(_urllib.request.HTTPBasicAuthHandler):
"""
Reference: http://selenic.com/hg/rev/6c51a5056020
Fix for a: http://bugs.python.org/issue8797
"""
def __init__(self, *args, **kwargs):
urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs)
_urllib.request.HTTPBasicAuthHandler.__init__(self, *args, **kwargs)
self.retried_req = set()
self.retried_count = 0
@ -30,8 +30,8 @@ class SmartHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
self.retried_count = 0
else:
if self.retried_count > 5:
raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed", headers, None)
raise _urllib.error.HTTPError(req.get_full_url(), 401, "basic auth failed", headers, None)
else:
self.retried_count += 1
return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed(self, auth_header, host, req, headers)
return _urllib.request.HTTPBasicAuthHandler.http_error_auth_reqed(self, auth_header, host, req, headers)

View File

@ -5,37 +5,35 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import urllib2
from lib.core.data import conf
from thirdparty.six.moves import urllib as _urllib
class ChunkedHandler(urllib2.HTTPHandler):
class ChunkedHandler(_urllib.request.HTTPHandler):
"""
Ensures that urllib2.HTTPHandler is working properly in case of Chunked Transfer-Encoding
Ensures that HTTPHandler is working properly in case of Chunked Transfer-Encoding
"""
def _http_request(self, request):
host = request.get_host()
if not host:
raise urllib2.URLError('no host given')
raise _urllib.error.URLError("no host given")
if request.has_data(): # POST
data = request.get_data()
if not request.has_header('Content-type'):
if not request.has_header("Content-type"):
request.add_unredirected_header(
'Content-type',
'application/x-www-form-urlencoded')
if not request.has_header('Content-length') and not conf.chunked:
"Content-type",
"application/x-www-form-urlencoded")
if not request.has_header("Content-length") and not conf.chunked:
request.add_unredirected_header(
'Content-length', '%d' % len(data))
"Content-length", "%d" % len(data))
sel_host = host
if request.has_proxy():
scheme, sel = urllib2.splittype(request.get_selector())
sel_host, sel_path = urllib2.splithost(sel)
sel_host = _urllib.parse.urlsplit(request.get_selector()).netloc
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host)
if not request.has_header("Host"):
request.add_unredirected_header("Host", sel_host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):

View File

@ -62,7 +62,6 @@ from lib.core.data import kb
from lib.core.data import logger
from lib.core.datatype import AttribDict
from lib.core.decorators import stackedmethod
from lib.core.dicts import HTTP_RESPONSES
from lib.core.dicts import POST_HINT_CONTENT_TYPES
from lib.core.enums import ADJUST_TIME_DELAY
from lib.core.enums import AUTH_TYPE
@ -425,7 +424,7 @@ class Connect(object):
page = ws.recv()
ws.close()
code = ws.status
status = HTTP_RESPONSES[code]
status = _http_client.responses[code]
class _(dict):
pass
@ -641,7 +640,7 @@ class Connect(object):
if ignoreTimeout:
return None if not conf.ignoreTimeouts else "", None, None
else:
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, HTTP_RESPONSES[ex.code])
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, _http_client.responses[ex.code])
if threadData.retriesCount < conf.retries and not kb.threadException:
warnMsg += ". sqlmap is going to retry the request"
logger.critical(warnMsg)

View File

@ -6,10 +6,8 @@ See the file 'LICENSE' for copying permission
"""
import distutils.version
import httplib
import re
import socket
import urllib2
from lib.core.common import getSafeExString
from lib.core.data import conf
@ -17,6 +15,8 @@ from lib.core.data import kb
from lib.core.data import logger
from lib.core.exception import SqlmapConnectionException
from lib.core.settings import PYVERSION
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import urllib as _urllib
ssl = None
try:
@ -27,7 +27,7 @@ except ImportError:
_protocols = filter(None, (getattr(ssl, _, None) for _ in ("PROTOCOL_TLSv1_2", "PROTOCOL_TLSv1_1", "PROTOCOL_TLSv1", "PROTOCOL_SSLv3", "PROTOCOL_SSLv23", "PROTOCOL_SSLv2")))
class HTTPSConnection(httplib.HTTPSConnection):
class HTTPSConnection(_http_client.HTTPSConnection):
"""
Connection class that enables usage of newer SSL protocols.
@ -35,7 +35,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
"""
def __init__(self, *args, **kwargs):
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
_http_client.HTTPSConnection.__init__(self, *args, **kwargs)
def connect(self):
def create_sock():
@ -63,7 +63,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
break
else:
sock.close()
except (ssl.SSLError, socket.error, httplib.BadStatusLine) as ex:
except (ssl.SSLError, socket.error, _http_client.BadStatusLine) as ex:
self._tunnel_host = None
logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex))
@ -83,7 +83,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
break
else:
sock.close()
except (ssl.SSLError, socket.error, httplib.BadStatusLine) as ex:
except (ssl.SSLError, socket.error, _http_client.BadStatusLine) as ex:
self._tunnel_host = None
logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex))
@ -94,14 +94,14 @@ class HTTPSConnection(httplib.HTTPSConnection):
errMsg += " (please retry with Python >= 2.7.9)"
raise SqlmapConnectionException(errMsg)
class HTTPSHandler(urllib2.HTTPSHandler):
class HTTPSHandler(_urllib.request.HTTPSHandler):
def https_open(self, req):
return self.do_open(HTTPSConnection if ssl else httplib.HTTPSConnection, req)
return self.do_open(HTTPSConnection if ssl else _http_client.HTTPSConnection, req)
# Bug fix (http://bugs.python.org/issue17849)
def _(self, *args):
return self._readline()
httplib.LineAndFileWrapper._readline = httplib.LineAndFileWrapper.readline
httplib.LineAndFileWrapper.readline = _
_http_client.LineAndFileWrapper._readline = _http_client.LineAndFileWrapper.readline
_http_client.LineAndFileWrapper.readline = _

View File

@ -5,15 +5,15 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import urllib2
from thirdparty.six.moves import urllib as _urllib
class MethodRequest(urllib2.Request):
class MethodRequest(_urllib.request.Request):
"""
Used to create HEAD/PUT/DELETE/... requests with urllib2
Used to create HEAD/PUT/DELETE/... requests with urllib
"""
def set_method(self, method):
self.method = method.upper()
def get_method(self):
return getattr(self, 'method', urllib2.Request.get_method(self))
return getattr(self, 'method', _urllib.request.Request.get_method(self))

View File

@ -5,16 +5,15 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import httplib
import urllib2
from lib.core.data import conf
from lib.core.common import getSafeExString
from lib.core.exception import SqlmapConnectionException
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import urllib as _urllib
class HTTPSPKIAuthHandler(urllib2.HTTPSHandler):
class HTTPSPKIAuthHandler(_urllib.request.HTTPSHandler):
def __init__(self, auth_file):
urllib2.HTTPSHandler.__init__(self)
_urllib.request.HTTPSHandler.__init__(self)
self.auth_file = auth_file
def https_open(self, req):
@ -23,7 +22,7 @@ class HTTPSPKIAuthHandler(urllib2.HTTPSHandler):
def getConnection(self, host, timeout=None):
try:
# Reference: https://docs.python.org/2/library/ssl.html#ssl.SSLContext.load_cert_chain
return httplib.HTTPSConnection(host, cert_file=self.auth_file, key_file=self.auth_file, timeout=conf.timeout)
return _http_client.HTTPSConnection(host, cert_file=self.auth_file, key_file=self.auth_file, timeout=conf.timeout)
except IOError as ex:
errMsg = "error occurred while using key "
errMsg += "file '%s' ('%s')" % (self.auth_file, getSafeExString(ex))

View File

@ -5,41 +5,19 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import urllib
import urllib2
from lib.core.exception import SqlmapConnectionException
from thirdparty.six.moves import urllib as _urllib
class HTTPRangeHandler(urllib2.BaseHandler):
class HTTPRangeHandler(_urllib.request.BaseHandler):
"""
Handler that enables HTTP Range headers.
Reference: http://stackoverflow.com/questions/1971240/python-seek-on-remote-file
This was extremely simple. The Range header is a HTTP feature to
begin with so all this class does is tell urllib2 that the
"206 Partial Content" response from the HTTP server is what we
expected.
Example:
import urllib2
import byterange
range_handler = range.HTTPRangeHandler()
opener = urllib2.build_opener(range_handler)
# install it
urllib2.install_opener(opener)
# create Request and set Range header
req = urllib2.Request('https://www.python.org/')
req.header['Range'] = 'bytes=30-50'
f = urllib2.urlopen(req)
"""
def http_error_206(self, req, fp, code, msg, hdrs):
# 206 Partial Content Response
r = urllib.addinfourl(fp, hdrs, req.get_full_url())
r = _urllib.response.addinfourl(fp, hdrs, req.get_full_url())
r.code = code
r.msg = msg
return r

View File

@ -8,8 +8,6 @@ See the file 'LICENSE' for copying permission
import io
import time
import types
import urllib2
import urlparse
from lib.core.data import conf
from lib.core.data import kb
@ -32,8 +30,9 @@ from lib.core.settings import MAX_TOTAL_REDIRECTIONS
from lib.core.threads import getCurrentThreadData
from lib.request.basic import decodePage
from lib.request.basic import parseResponse
from thirdparty.six.moves import urllib as _urllib
class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
class SmartRedirectHandler(_urllib.request.HTTPRedirectHandler):
def _get_header_redirect(self, headers):
retVal = None
@ -66,7 +65,7 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
def _redirect_request(self, req, fp, code, msg, headers, newurl):
newurl = newurl.replace(' ', '%20')
return urllib2.Request(newurl, data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host())
return _urllib.request.Request(newurl, data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host())
def http_error_302(self, req, fp, code, msg, headers):
start = time.time()
@ -109,8 +108,8 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
if redurl:
try:
if not urlparse.urlsplit(redurl).netloc:
redurl = urlparse.urljoin(req.get_full_url(), redurl)
if not _urllib.parse.urlsplit(redurl).netloc:
redurl = _urllib.parse.urljoin(req.get_full_url(), redurl)
self._infinite_loop_check(req)
self._ask_redirect_choice(code, redurl, req.get_method())
@ -139,8 +138,8 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
req.headers[HTTP_HEADER.COOKIE] = delimiter.join("%s=%s" % (key, cookies[key]) for key in cookies)
try:
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
except urllib2.HTTPError as ex:
result = _urllib.request.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
except _urllib.error.HTTPError as ex:
result = ex
# Dirty hack for http://bugs.python.org/issue15701

View File

@ -10,7 +10,6 @@ import os
import posixpath
import re
import tempfile
import urlparse
from extra.cloak.cloak import decloak
from lib.core.agent import agent
@ -52,6 +51,7 @@ from lib.core.settings import SHELL_WRITABLE_DIR_TAG
from lib.core.settings import VIEWSTATE_REGEX
from lib.request.connect import Connect as Request
from thirdparty.oset.pyoset import oset
from thirdparty.six.moves import urllib as _urllib
class Web:
"""
@ -256,7 +256,7 @@ class Web:
directories.extend(getAutoDirectories())
directories = list(oset(directories))
path = urlparse.urlparse(conf.url).path or '/'
path = _urllib.parse.urlparse(conf.url).path or '/'
path = re.sub(r"/[^/]*\.\w+\Z", '/', path)
if path != '/':
_ = []
@ -295,7 +295,7 @@ class Web:
for match in re.finditer('/', directory):
self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/'))
self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName)
self.webStagerUrl = _urllib.parse.urljoin(self.webBaseUrl, stagerName)
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
logger.debug(debugMsg)
@ -332,7 +332,7 @@ class Web:
for match in re.finditer('/', directory):
self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/'))
self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName)
self.webStagerUrl = _urllib.parse.urljoin(self.webBaseUrl, stagerName)
debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl
logger.debug(debugMsg)

View File

@ -9,7 +9,6 @@ See the file 'LICENSE' for copying permission
from __future__ import print_function
import contextlib
import httplib
import logging
import os
import re
@ -19,7 +18,6 @@ import sqlite3
import sys
import tempfile
import time
import urllib2
from lib.core.common import dataToStdout
from lib.core.common import getSafeExString
@ -57,6 +55,8 @@ from thirdparty.bottle.bottle import request
from thirdparty.bottle.bottle import response
from thirdparty.bottle.bottle import run
from thirdparty.bottle.bottle import server_names
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import urllib as _urllib
# Global data storage
class DataStore(object):
@ -716,8 +716,8 @@ def _client(url, options=None):
if DataStore.username or DataStore.password:
headers["Authorization"] = "Basic %s" % base64encode("%s:%s" % (DataStore.username or "", DataStore.password or ""))
req = urllib2.Request(url, data, headers)
response = urllib2.urlopen(req)
req = _urllib.request.Request(url, data, headers)
response = _urllib.request.urlopen(req)
text = response.read()
except:
if options:
@ -746,7 +746,7 @@ def client(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, username=Non
try:
_client(addr)
except Exception as ex:
if not isinstance(ex, urllib2.HTTPError) or ex.code == httplib.UNAUTHORIZED:
if not isinstance(ex, _urllib.error.HTTPError) or ex.code == _http_client.UNAUTHORIZED:
errMsg = "There has been a problem while connecting to the "
errMsg += "REST-JSON API server at '%s' " % addr
errMsg += "(%s)" % ex

View File

@ -5,10 +5,8 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import httplib
import os
import re
import urlparse
import tempfile
import time
@ -34,6 +32,8 @@ from lib.parse.sitemap import parseSitemap
from lib.request.connect import Connect as Request
from thirdparty.beautifulsoup.beautifulsoup import BeautifulSoup
from thirdparty.oset.pyoset import oset
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import urllib as _urllib
def crawl(target):
try:
@ -70,7 +70,7 @@ def crawl(target):
except SqlmapSyntaxException:
errMsg = "invalid URL detected. skipping '%s'" % current
logger.critical(errMsg)
except httplib.InvalidURL as ex:
except _http_client.InvalidURL as ex:
errMsg = "invalid URL detected ('%s'). skipping " % getSafeExString(ex)
errMsg += "URL '%s'" % current
logger.critical(errMsg)
@ -96,7 +96,7 @@ def crawl(target):
if href:
if threadData.lastRedirectURL and threadData.lastRedirectURL[0] == threadData.lastRequestUID:
current = threadData.lastRedirectURL[1]
url = urlparse.urljoin(current, href)
url = _urllib.parse.urljoin(current, href)
# flag to know if we are dealing with the same target host
_ = checkSameHost(url, target)
@ -135,7 +135,7 @@ def crawl(target):
if readInput(message, default='N', boolean=True):
found = True
items = None
url = urlparse.urljoin(target, "/sitemap.xml")
url = _urllib.parse.urljoin(target, "/sitemap.xml")
try:
items = parseSitemap(url)
except SqlmapConnectionException as ex:

View File

@ -8,7 +8,6 @@ See the file 'LICENSE' for copying permission
import base64
import BaseHTTPServer
import datetime
import httplib
import io
import re
import time
@ -157,12 +156,12 @@ class Response:
altered = status_line + "\r\n" + remain
comment = first_line
response = httplib.HTTPResponse(FakeSocket(altered))
response = _http_client.HTTPResponse(FakeSocket(altered))
response.begin()
try:
content = response.read(-1)
except httplib.IncompleteRead:
except _http_client.IncompleteRead:
content = raw[raw.find("\r\n\r\n") + 4:].rstrip("\r\n")
return cls(httpVersion="HTTP/1.1" if response.version == 11 else "HTTP/1.0",

View File

@ -5,11 +5,8 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import httplib
import re
import socket
import urllib
import urllib2
from lib.core.common import getSafeExString
from lib.core.common import getUnicode
@ -34,6 +31,8 @@ from lib.core.settings import GOOGLE_REGEX
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
from lib.core.settings import UNICODE_ENCODING
from lib.request.basic import decodePage
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import urllib as _urllib
from thirdparty.socks import socks
def _search(dork):
@ -52,8 +51,8 @@ def _search(dork):
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE
try:
req = urllib2.Request("https://www.google.com/ncr", headers=headers)
conn = urllib2.urlopen(req)
req = _urllib.request.Request("https://www.google.com/ncr", headers=headers)
conn = _urllib.request.urlopen(req)
except Exception as ex:
errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex)
raise SqlmapConnectionException(errMsg)
@ -67,11 +66,11 @@ def _search(dork):
url += "&start=%d" % ((gpage - 1) * 100)
try:
req = urllib2.Request(url, headers=headers)
conn = urllib2.urlopen(req)
req = _urllib.request.Request(url, headers=headers)
conn = _urllib.request.urlopen(req)
requestMsg = "HTTP request:\nGET %s" % url
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
requestMsg += " %s" % _http_client.HTTPException._http_vsn_str
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
page = conn.read()
@ -88,7 +87,7 @@ def _search(dork):
responseMsg += "%s\n%s\n" % (responseHeaders, page)
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
except urllib2.HTTPError as ex:
except _urllib.error.HTTPError as ex:
try:
page = ex.read()
except Exception as _:
@ -96,11 +95,11 @@ def _search(dork):
warnMsg += "an error page information (%s)" % getSafeExString(_)
logger.critical(warnMsg)
return None
except (urllib2.URLError, httplib.error, socket.error, socket.timeout, socks.ProxyError):
except (_urllib.error.URLError, _http_client.error, socket.error, socket.timeout, socks.ProxyError):
errMsg = "unable to connect to Google"
raise SqlmapConnectionException(errMsg)
retVal = [urllib.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)]
retVal = [_urllib.parse.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)]
if not retVal and "detected unusual traffic" in page:
warnMsg = "Google has detected 'unusual' traffic from "
@ -129,11 +128,11 @@ def _search(dork):
regex = DUCKDUCKGO_REGEX
try:
req = urllib2.Request(url, data=data, headers=headers)
conn = urllib2.urlopen(req)
req = _urllib.request.Request(url, data=data, headers=headers)
conn = _urllib.request.urlopen(req)
requestMsg = "HTTP request:\nGET %s" % url
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
requestMsg += " %s" % _http_client.HTTPException._http_vsn_str
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
page = conn.read()
@ -150,7 +149,7 @@ def _search(dork):
responseMsg += "%s\n%s\n" % (responseHeaders, page)
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
except urllib2.HTTPError as ex:
except _urllib.error.HTTPError as ex:
try:
page = ex.read()
page = decodePage(page, ex.headers.get("Content-Encoding"), ex.headers.get("Content-Type"))
@ -163,7 +162,7 @@ def _search(dork):
errMsg = "unable to connect"
raise SqlmapConnectionException(errMsg)
retVal = [urllib.unquote(match.group(1).replace("&amp;", "&")) for match in re.finditer(regex, page, re.I | re.S)]
retVal = [_urllib.parse.unquote(match.group(1).replace("&amp;", "&")) for match in re.finditer(regex, page, re.I | re.S)]
if not retVal and "issue with the Tor Exit Node you are currently using" in page:
warnMsg = "DuckDuckGo has detected 'unusual' traffic from "

View File

@ -66,17 +66,6 @@ __all__ = ['AmbiguityError', 'CheckboxControl', 'Control',
'SubmitButtonControl', 'SubmitControl', 'TextControl',
'TextareaControl', 'XHTMLCompatibleFormParser']
try: True
except NameError:
True = 1
False = 0
try: bool
except NameError:
def bool(expr):
if expr: return True
else: return False
try:
import logging
import inspect
@ -792,7 +781,7 @@ else:
def feed(self, data):
try:
HTMLParser.HTMLParser.feed(self, data)
except HTMLParser.HTMLParseError, exc:
except HTMLParser.HTMLParseError as exc:
raise ParseError(exc)
def start_option(self, attrs):
@ -870,7 +859,7 @@ class FormParser(_AbstractSgmllibParser, sgmllib.SGMLParser):
def feed(self, data):
try:
sgmllib.SGMLParser.feed(self, data)
except SGMLLIB_PARSEERROR, exc:
except SGMLLIB_PARSEERROR as exc:
raise ParseError(exc)
def close(self):
@ -896,7 +885,7 @@ def _create_bs_classes(bs,
def feed(self, data):
try:
self.bs_base_class.feed(self, data)
except SGMLLIB_PARSEERROR, exc:
except SGMLLIB_PARSEERROR as exc:
raise ParseError(exc)
def close(self):
self.bs_base_class.close(self)