mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2024-11-24 18:43:47 +03:00
StringIO is bad m'kay (python3 this and that)
This commit is contained in:
parent
4b75ca15e8
commit
8d89389c36
|
@ -15,6 +15,7 @@ import getpass
|
||||||
import hashlib
|
import hashlib
|
||||||
import httplib
|
import httplib
|
||||||
import inspect
|
import inspect
|
||||||
|
import io
|
||||||
import json
|
import json
|
||||||
import keyword
|
import keyword
|
||||||
import locale
|
import locale
|
||||||
|
@ -40,7 +41,6 @@ import unicodedata
|
||||||
|
|
||||||
from ConfigParser import DEFAULTSECT
|
from ConfigParser import DEFAULTSECT
|
||||||
from ConfigParser import RawConfigParser
|
from ConfigParser import RawConfigParser
|
||||||
from StringIO import StringIO
|
|
||||||
from difflib import SequenceMatcher
|
from difflib import SequenceMatcher
|
||||||
from math import sqrt
|
from math import sqrt
|
||||||
from optparse import OptionValueError
|
from optparse import OptionValueError
|
||||||
|
@ -158,7 +158,6 @@ from lib.core.settings import REFLECTED_REPLACEMENT_REGEX
|
||||||
from lib.core.settings import REFLECTED_REPLACEMENT_TIMEOUT
|
from lib.core.settings import REFLECTED_REPLACEMENT_TIMEOUT
|
||||||
from lib.core.settings import REFLECTED_VALUE_MARKER
|
from lib.core.settings import REFLECTED_VALUE_MARKER
|
||||||
from lib.core.settings import REFLECTIVE_MISS_THRESHOLD
|
from lib.core.settings import REFLECTIVE_MISS_THRESHOLD
|
||||||
from lib.core.settings import SAFE_VARIABLE_MARKER
|
|
||||||
from lib.core.settings import SENSITIVE_DATA_REGEX
|
from lib.core.settings import SENSITIVE_DATA_REGEX
|
||||||
from lib.core.settings import SENSITIVE_OPTIONS
|
from lib.core.settings import SENSITIVE_OPTIONS
|
||||||
from lib.core.settings import STDIN_PIPE_DASH
|
from lib.core.settings import STDIN_PIPE_DASH
|
||||||
|
@ -2079,7 +2078,7 @@ def parseXmlFile(xmlFile, handler):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with contextlib.closing(StringIO(readCachedFileContent(xmlFile))) as stream:
|
with contextlib.closing(io.StringIO(readCachedFileContent(xmlFile))) as stream:
|
||||||
parse(stream, handler)
|
parse(stream, handler)
|
||||||
except (SAXParseException, UnicodeError) as ex:
|
except (SAXParseException, UnicodeError) as ex:
|
||||||
errMsg = "something appears to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
|
@ -3322,7 +3321,7 @@ def openFile(filename, mode='r', encoding=UNICODE_ENCODING, errors="replace", bu
|
||||||
if filename not in kb.cache.content:
|
if filename not in kb.cache.content:
|
||||||
kb.cache.content[filename] = sys.stdin.read()
|
kb.cache.content[filename] = sys.stdin.read()
|
||||||
|
|
||||||
return contextlib.closing(StringIO(readCachedFileContent(filename)))
|
return contextlib.closing(io.StringIO(readCachedFileContent(filename)))
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
return codecs.open(filename, mode, encoding, errors, buffering)
|
return codecs.open(filename, mode, encoding, errors, buffering)
|
||||||
|
@ -4107,9 +4106,9 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||||
set([(u'/input.php', 'POST', u'id=1', None, None)])
|
set([(u'/input.php', 'POST', u'id=1', None, None)])
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class _(StringIO):
|
class _(io.BytesIO):
|
||||||
def __init__(self, content, url):
|
def __init__(self, content, url):
|
||||||
StringIO.__init__(self, unicodeencode(content, kb.pageEncoding) if isinstance(content, unicode) else content)
|
io.BytesIO.__init__(self, unicodeencode(content, kb.pageEncoding) if isinstance(content, unicode) else content)
|
||||||
self._url = url
|
self._url = url
|
||||||
|
|
||||||
def geturl(self):
|
def geturl(self):
|
||||||
|
|
|
@ -13,9 +13,9 @@ finally:
|
||||||
import pickle as picklePy
|
import pickle as picklePy
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
|
import io
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import StringIO
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
|
@ -84,7 +84,7 @@ def base64unpickle(value, unsafe=False):
|
||||||
self.load_reduce()
|
self.load_reduce()
|
||||||
|
|
||||||
def loads(str):
|
def loads(str):
|
||||||
f = StringIO.StringIO(str)
|
f = io.BytesIO(str)
|
||||||
if unsafe:
|
if unsafe:
|
||||||
unpickler = picklePy.Unpickler(f)
|
unpickler = picklePy.Unpickler(f)
|
||||||
unpickler.dispatch[picklePy.REDUCE] = _
|
unpickler.dispatch[picklePy.REDUCE] = _
|
||||||
|
|
|
@ -125,7 +125,6 @@ from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
||||||
from lib.core.settings import SUPPORTED_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
from lib.core.settings import SUPPORTED_OS
|
from lib.core.settings import SUPPORTED_OS
|
||||||
from lib.core.settings import TIME_DELAY_CANDIDATES
|
from lib.core.settings import TIME_DELAY_CANDIDATES
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
|
||||||
from lib.core.settings import UNION_CHAR_REGEX
|
from lib.core.settings import UNION_CHAR_REGEX
|
||||||
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
||||||
from lib.core.settings import URI_INJECTABLE_REGEX
|
from lib.core.settings import URI_INJECTABLE_REGEX
|
||||||
|
|
|
@ -19,7 +19,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
|
|
||||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||||
VERSION = "1.3.3.49"
|
VERSION = "1.3.3.50"
|
||||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||||
|
|
|
@ -7,9 +7,9 @@ See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
import gzip
|
import gzip
|
||||||
|
import io
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import StringIO
|
|
||||||
import struct
|
import struct
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
|
@ -273,9 +273,9 @@ def decodePage(page, contentEncoding, contentType):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if contentEncoding == "deflate":
|
if contentEncoding == "deflate":
|
||||||
data = StringIO.StringIO(zlib.decompress(page, -15)) # Reference: http://stackoverflow.com/questions/1089662/python-inflate-and-deflate-implementations
|
data = io.BytesIO(zlib.decompress(page, -15)) # Reference: http://stackoverflow.com/questions/1089662/python-inflate-and-deflate-implementations
|
||||||
else:
|
else:
|
||||||
data = gzip.GzipFile("", "rb", 9, StringIO.StringIO(page))
|
data = gzip.GzipFile("", "rb", 9, io.BytesIO(page))
|
||||||
size = struct.unpack("<l", page[-4:])[0] # Reference: http://pydoc.org/get.cgi/usr/local/lib/python2.5/gzip.py
|
size = struct.unpack("<l", page[-4:])[0] # Reference: http://pydoc.org/get.cgi/usr/local/lib/python2.5/gzip.py
|
||||||
if size > MAX_CONNECTION_TOTAL_SIZE:
|
if size > MAX_CONNECTION_TOTAL_SIZE:
|
||||||
raise Exception("size too large")
|
raise Exception("size too large")
|
||||||
|
|
|
@ -5,13 +5,12 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
import time
|
import time
|
||||||
import types
|
import types
|
||||||
import urllib2
|
import urllib2
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
@ -165,7 +164,7 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||||
except:
|
except:
|
||||||
redurl = None
|
redurl = None
|
||||||
result = fp
|
result = fp
|
||||||
fp.read = StringIO("").read
|
fp.read = io.BytesIO("").read
|
||||||
else:
|
else:
|
||||||
result = fp
|
result = fp
|
||||||
|
|
||||||
|
|
|
@ -5,10 +5,10 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
import StringIO
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ class Web:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
if content is not None:
|
if content is not None:
|
||||||
stream = StringIO.StringIO(content) # string content
|
stream = io.BytesIO(content) # string content
|
||||||
|
|
||||||
return self._webFileStreamUpload(stream, destFileName, directory)
|
return self._webFileStreamUpload(stream, destFileName, directory)
|
||||||
|
|
||||||
|
|
|
@ -9,8 +9,8 @@ import base64
|
||||||
import BaseHTTPServer
|
import BaseHTTPServer
|
||||||
import datetime
|
import datetime
|
||||||
import httplib
|
import httplib
|
||||||
|
import io
|
||||||
import re
|
import re
|
||||||
import StringIO
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from lib.core.bigarray import BigArray
|
from lib.core.bigarray import BigArray
|
||||||
|
@ -149,11 +149,11 @@ class Response:
|
||||||
comment = ""
|
comment = ""
|
||||||
|
|
||||||
if altered.startswith("HTTP response [") or altered.startswith("HTTP redirect ["):
|
if altered.startswith("HTTP response [") or altered.startswith("HTTP redirect ["):
|
||||||
io = StringIO.StringIO(raw)
|
stream = io.StringIO(raw)
|
||||||
first_line = io.readline()
|
first_line = stream.readline()
|
||||||
parts = cls.extract_status.search(first_line)
|
parts = cls.extract_status.search(first_line)
|
||||||
status_line = "HTTP/1.0 %s %s" % (parts.group(1), parts.group(2))
|
status_line = "HTTP/1.0 %s %s" % (parts.group(1), parts.group(2))
|
||||||
remain = io.read()
|
remain = stream.read()
|
||||||
altered = status_line + "\r\n" + remain
|
altered = status_line + "\r\n" + remain
|
||||||
comment = first_line
|
comment = first_line
|
||||||
|
|
||||||
|
@ -203,7 +203,7 @@ class FakeSocket:
|
||||||
# https://stackoverflow.com/questions/24728088/python-parse-http-response-string
|
# https://stackoverflow.com/questions/24728088/python-parse-http-response-string
|
||||||
|
|
||||||
def __init__(self, response_text):
|
def __init__(self, response_text):
|
||||||
self._file = StringIO.StringIO(response_text)
|
self._file = io.StringIO(response_text)
|
||||||
|
|
||||||
def makefile(self, *args, **kwargs):
|
def makefile(self, *args, **kwargs):
|
||||||
return self._file
|
return self._file
|
||||||
|
@ -214,7 +214,7 @@ class HTTPRequest(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||||
|
|
||||||
def __init__(self, request_text):
|
def __init__(self, request_text):
|
||||||
self.comment = None
|
self.comment = None
|
||||||
self.rfile = StringIO.StringIO(request_text)
|
self.rfile = io.StringIO(request_text)
|
||||||
self.raw_requestline = self.rfile.readline()
|
self.raw_requestline = self.rfile.readline()
|
||||||
|
|
||||||
if self.raw_requestline.startswith("HTTP request ["):
|
if self.raw_requestline.startswith("HTTP request ["):
|
||||||
|
|
4
thirdparty/multipart/multipartpost.py
vendored
4
thirdparty/multipart/multipartpost.py
vendored
|
@ -20,11 +20,11 @@ License along with this library; if not, write to the Free Software
|
||||||
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
import mimetools
|
import mimetools
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import stat
|
import stat
|
||||||
import StringIO
|
|
||||||
import sys
|
import sys
|
||||||
import urllib
|
import urllib
|
||||||
import urllib2
|
import urllib2
|
||||||
|
@ -53,7 +53,7 @@ class MultipartPostHandler(urllib2.BaseHandler):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for(key, value) in data.items():
|
for(key, value) in data.items():
|
||||||
if isinstance(value, file) or hasattr(value, "file") or isinstance(value, StringIO.StringIO):
|
if isinstance(value, file) or hasattr(value, "file") or isinstance(value, io.IOBase):
|
||||||
v_files.append((key, value))
|
v_files.append((key, value))
|
||||||
else:
|
else:
|
||||||
v_vars.append((key, value))
|
v_vars.append((key, value))
|
||||||
|
|
Loading…
Reference in New Issue
Block a user