2014-06-10 13:12:53 +04:00
|
|
|
"""
|
|
|
|
Helper functions.
|
|
|
|
"""
|
2019-07-06 23:40:53 +03:00
|
|
|
|
|
|
|
import logging
|
|
|
|
import os
|
2020-02-14 04:08:44 +03:00
|
|
|
import shutil
|
2014-06-10 13:12:53 +04:00
|
|
|
import sys
|
2020-05-25 23:21:51 +03:00
|
|
|
import sysconfig
|
2014-06-26 04:08:24 +04:00
|
|
|
import tempfile
|
2019-10-07 16:28:36 +03:00
|
|
|
from io import BytesIO
|
2014-06-10 13:12:53 +04:00
|
|
|
|
2020-01-30 17:56:07 +03:00
|
|
|
import pytest
|
2020-09-01 20:16:46 +03:00
|
|
|
|
2020-02-18 01:03:32 +03:00
|
|
|
from PIL import Image, ImageMath, features
|
2016-12-07 03:07:20 +03:00
|
|
|
|
2017-11-17 22:47:05 +03:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
HAS_UPLOADER = False
|
2017-12-13 15:47:11 +03:00
|
|
|
|
2019-06-13 18:53:42 +03:00
|
|
|
if os.environ.get("SHOW_ERRORS", None):
|
2018-01-27 09:07:24 +03:00
|
|
|
# local img.show for errors.
|
2018-03-06 11:53:07 +03:00
|
|
|
HAS_UPLOADER = True
|
2018-03-03 12:54:00 +03:00
|
|
|
|
2017-12-13 15:47:11 +03:00
|
|
|
class test_image_results:
|
2019-10-12 13:59:03 +03:00
|
|
|
@staticmethod
|
|
|
|
def upload(a, b):
|
2017-12-13 15:47:11 +03:00
|
|
|
a.show()
|
|
|
|
b.show()
|
2019-06-13 18:53:42 +03:00
|
|
|
|
|
|
|
|
2019-10-12 13:59:03 +03:00
|
|
|
elif "GITHUB_ACTIONS" in os.environ:
|
|
|
|
HAS_UPLOADER = True
|
|
|
|
|
|
|
|
class test_image_results:
|
|
|
|
@staticmethod
|
|
|
|
def upload(a, b):
|
|
|
|
dir_errors = os.path.join(os.path.dirname(__file__), "errors")
|
|
|
|
os.makedirs(dir_errors, exist_ok=True)
|
|
|
|
tmpdir = tempfile.mkdtemp(dir=dir_errors)
|
|
|
|
a.save(os.path.join(tmpdir, "a.png"))
|
|
|
|
b.save(os.path.join(tmpdir, "b.png"))
|
|
|
|
return tmpdir
|
|
|
|
|
|
|
|
|
2017-12-13 15:47:11 +03:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
import test_image_results
|
2019-06-13 18:53:42 +03:00
|
|
|
|
2017-12-13 15:47:11 +03:00
|
|
|
HAS_UPLOADER = True
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
2016-12-07 03:07:20 +03:00
|
|
|
|
|
|
|
def convert_to_comparable(a, b):
|
|
|
|
new_a, new_b = a, b
|
2019-06-13 18:53:42 +03:00
|
|
|
if a.mode == "P":
|
|
|
|
new_a = Image.new("L", a.size)
|
|
|
|
new_b = Image.new("L", b.size)
|
2016-12-07 03:07:20 +03:00
|
|
|
new_a.putdata(a.getdata())
|
|
|
|
new_b.putdata(b.getdata())
|
2019-06-13 18:53:42 +03:00
|
|
|
elif a.mode == "I;16":
|
|
|
|
new_a = a.convert("I")
|
|
|
|
new_b = b.convert("I")
|
2016-12-07 03:07:20 +03:00
|
|
|
return new_a, new_b
|
|
|
|
|
2014-06-10 13:12:53 +04:00
|
|
|
|
2020-01-30 17:56:07 +03:00
|
|
|
def assert_deep_equal(a, b, msg=None):
|
|
|
|
try:
|
2020-07-16 12:43:29 +03:00
|
|
|
assert len(a) == len(b), msg or f"got length {len(a)}, expected {len(b)}"
|
2020-01-30 17:56:07 +03:00
|
|
|
except Exception:
|
|
|
|
assert a == b, msg
|
|
|
|
|
|
|
|
|
|
|
|
def assert_image(im, mode, size, msg=None):
|
|
|
|
if mode is not None:
|
2020-07-16 12:43:29 +03:00
|
|
|
assert im.mode == mode, (
|
|
|
|
msg or f"got mode {repr(im.mode)}, expected {repr(mode)}"
|
2019-06-13 18:53:42 +03:00
|
|
|
)
|
2020-01-30 17:56:07 +03:00
|
|
|
|
|
|
|
if size is not None:
|
2020-07-16 12:43:29 +03:00
|
|
|
assert im.size == size, (
|
|
|
|
msg or f"got size {repr(im.size)}, expected {repr(size)}"
|
2019-06-13 18:53:42 +03:00
|
|
|
)
|
2014-06-10 13:12:53 +04:00
|
|
|
|
2016-12-07 03:07:20 +03:00
|
|
|
|
2020-01-30 17:56:07 +03:00
|
|
|
def assert_image_equal(a, b, msg=None):
|
2020-07-16 12:43:29 +03:00
|
|
|
assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}"
|
|
|
|
assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}"
|
2020-01-30 17:56:07 +03:00
|
|
|
if a.tobytes() != b.tobytes():
|
|
|
|
if HAS_UPLOADER:
|
|
|
|
try:
|
|
|
|
url = test_image_results.upload(a, b)
|
2020-07-16 12:43:29 +03:00
|
|
|
logger.error(f"Url for test images: {url}")
|
2020-01-30 17:56:07 +03:00
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
|
|
|
assert False, msg or "got different content"
|
|
|
|
|
|
|
|
|
|
|
|
def assert_image_equal_tofile(a, filename, msg=None, mode=None):
|
|
|
|
with Image.open(filename) as img:
|
|
|
|
if mode:
|
|
|
|
img = img.convert(mode)
|
|
|
|
assert_image_equal(a, img, msg)
|
|
|
|
|
|
|
|
|
|
|
|
def assert_image_similar(a, b, epsilon, msg=None):
|
2020-07-16 12:43:29 +03:00
|
|
|
assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}"
|
|
|
|
assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}"
|
2020-01-30 17:56:07 +03:00
|
|
|
|
|
|
|
a, b = convert_to_comparable(a, b)
|
|
|
|
|
|
|
|
diff = 0
|
|
|
|
for ach, bch in zip(a.split(), b.split()):
|
|
|
|
chdiff = ImageMath.eval("abs(a - b)", a=ach, b=bch).convert("L")
|
|
|
|
diff += sum(i * num for i, num in enumerate(chdiff.histogram()))
|
|
|
|
|
|
|
|
ave_diff = diff / (a.size[0] * a.size[1])
|
|
|
|
try:
|
|
|
|
assert epsilon >= ave_diff, (
|
2020-07-16 12:43:29 +03:00
|
|
|
(msg or "")
|
|
|
|
+ f" average pixel value difference {ave_diff:.4f} > epsilon {epsilon:.4f}"
|
|
|
|
)
|
2020-01-30 17:56:07 +03:00
|
|
|
except Exception as e:
|
|
|
|
if HAS_UPLOADER:
|
|
|
|
try:
|
|
|
|
url = test_image_results.upload(a, b)
|
2020-07-16 12:43:29 +03:00
|
|
|
logger.error(f"Url for test images: {url}")
|
2020-01-30 17:56:07 +03:00
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
raise e
|
|
|
|
|
2016-12-07 03:07:20 +03:00
|
|
|
|
2020-01-30 17:56:07 +03:00
|
|
|
def assert_image_similar_tofile(a, filename, epsilon, msg=None, mode=None):
|
|
|
|
with Image.open(filename) as img:
|
|
|
|
if mode:
|
|
|
|
img = img.convert(mode)
|
|
|
|
assert_image_similar(a, img, epsilon, msg)
|
|
|
|
|
|
|
|
|
|
|
|
def assert_all_same(items, msg=None):
|
|
|
|
assert items.count(items[0]) == len(items), msg
|
|
|
|
|
|
|
|
|
|
|
|
def assert_not_all_same(items, msg=None):
|
|
|
|
assert items.count(items[0]) != len(items), msg
|
|
|
|
|
|
|
|
|
|
|
|
def assert_tuple_approx_equal(actuals, targets, threshold, msg):
|
|
|
|
"""Tests if actuals has values within threshold from targets"""
|
|
|
|
value = True
|
|
|
|
for i, target in enumerate(targets):
|
|
|
|
value *= target - threshold <= actuals[i] <= target + threshold
|
|
|
|
|
|
|
|
assert value, msg + ": " + repr(actuals) + " != " + repr(targets)
|
|
|
|
|
|
|
|
|
2020-02-18 01:03:32 +03:00
|
|
|
def skip_unless_feature(feature):
|
2020-07-16 12:43:29 +03:00
|
|
|
reason = f"{feature} not available"
|
2020-02-18 01:03:32 +03:00
|
|
|
return pytest.mark.skipif(not features.check(feature), reason=reason)
|
|
|
|
|
|
|
|
|
2020-03-02 17:02:19 +03:00
|
|
|
@pytest.mark.skipif(sys.platform.startswith("win32"), reason="Requires Unix or macOS")
|
|
|
|
class PillowLeakTestCase:
|
2018-05-13 13:55:04 +03:00
|
|
|
# requires unix/macOS
|
2017-12-19 18:16:15 +03:00
|
|
|
iterations = 100 # count
|
|
|
|
mem_limit = 512 # k
|
2017-12-19 18:12:58 +03:00
|
|
|
|
2017-09-02 19:03:38 +03:00
|
|
|
def _get_mem_usage(self):
|
|
|
|
"""
|
|
|
|
Gets the RUSAGE memory usage, returns in K. Encapsulates the difference
|
2018-05-13 13:55:04 +03:00
|
|
|
between macOS and Linux rss reporting
|
2017-09-02 19:03:38 +03:00
|
|
|
|
2018-05-13 13:52:11 +03:00
|
|
|
:returns: memory usage in kilobytes
|
2017-09-02 19:03:38 +03:00
|
|
|
"""
|
2017-12-19 18:12:58 +03:00
|
|
|
|
2020-09-01 20:16:46 +03:00
|
|
|
from resource import RUSAGE_SELF, getrusage
|
2019-06-13 18:53:42 +03:00
|
|
|
|
2017-09-02 19:03:38 +03:00
|
|
|
mem = getrusage(RUSAGE_SELF).ru_maxrss
|
2019-06-13 18:53:42 +03:00
|
|
|
if sys.platform == "darwin":
|
2017-09-02 19:03:38 +03:00
|
|
|
# man 2 getrusage:
|
2018-06-24 15:32:25 +03:00
|
|
|
# ru_maxrss
|
|
|
|
# This is the maximum resident set size utilized (in bytes).
|
2018-03-04 06:24:36 +03:00
|
|
|
return mem / 1024 # Kb
|
2017-09-02 19:03:38 +03:00
|
|
|
else:
|
|
|
|
# linux
|
|
|
|
# man 2 getrusage
|
|
|
|
# ru_maxrss (since Linux 2.6.32)
|
2017-10-26 08:20:16 +03:00
|
|
|
# This is the maximum resident set size used (in kilobytes).
|
|
|
|
return mem # Kb
|
2017-09-02 19:03:38 +03:00
|
|
|
|
|
|
|
def _test_leak(self, core):
|
|
|
|
start_mem = self._get_mem_usage()
|
|
|
|
for cycle in range(self.iterations):
|
|
|
|
core()
|
2019-06-13 18:53:42 +03:00
|
|
|
mem = self._get_mem_usage() - start_mem
|
2020-07-16 12:43:29 +03:00
|
|
|
msg = f"memory usage limit exceeded in iteration {cycle}"
|
2020-02-22 16:06:21 +03:00
|
|
|
assert mem < self.mem_limit, msg
|
2017-09-02 19:03:38 +03:00
|
|
|
|
2014-07-07 21:03:50 +04:00
|
|
|
|
2014-06-24 15:03:10 +04:00
|
|
|
# helpers
|
2014-06-10 13:12:53 +04:00
|
|
|
|
|
|
|
|
|
|
|
def fromstring(data):
|
|
|
|
return Image.open(BytesIO(data))
|
|
|
|
|
|
|
|
|
2014-10-01 17:50:33 +04:00
|
|
|
def tostring(im, string_format, **options):
|
2014-06-10 13:12:53 +04:00
|
|
|
out = BytesIO()
|
2014-10-01 17:50:33 +04:00
|
|
|
im.save(out, string_format, **options)
|
2014-06-10 13:12:53 +04:00
|
|
|
return out.getvalue()
|
|
|
|
|
|
|
|
|
2014-11-19 03:41:44 +03:00
|
|
|
def hopper(mode=None, cache={}):
|
|
|
|
if mode is None:
|
|
|
|
# Always return fresh not-yet-loaded version of image.
|
|
|
|
# Operations on not-yet-loaded images is separate class of errors
|
|
|
|
# what we should catch.
|
|
|
|
return Image.open("Tests/images/hopper.ppm")
|
|
|
|
# Use caching to reduce reading from disk but so an original copy is
|
2014-10-29 22:28:29 +03:00
|
|
|
# returned each time and the cached image isn't modified by tests
|
2014-09-04 09:03:55 +04:00
|
|
|
# (for fast, isolated, repeatable tests).
|
2014-10-29 22:28:29 +03:00
|
|
|
im = cache.get(mode)
|
2014-09-04 09:03:55 +04:00
|
|
|
if im is None:
|
2014-11-19 03:41:44 +03:00
|
|
|
if mode == "F":
|
2014-09-05 13:36:24 +04:00
|
|
|
im = hopper("L").convert(mode)
|
2014-09-04 09:03:55 +04:00
|
|
|
elif mode[:4] == "I;16":
|
2014-09-05 13:36:24 +04:00
|
|
|
im = hopper("I").convert(mode)
|
2014-09-04 09:03:55 +04:00
|
|
|
else:
|
2014-11-19 03:41:44 +03:00
|
|
|
im = hopper().convert(mode)
|
|
|
|
cache[mode] = im
|
2014-10-29 22:28:29 +03:00
|
|
|
return im.copy()
|
2014-06-10 13:12:53 +04:00
|
|
|
|
2014-06-28 02:12:37 +04:00
|
|
|
|
|
|
|
def djpeg_available():
|
2020-02-14 04:08:44 +03:00
|
|
|
return bool(shutil.which("djpeg"))
|
2014-06-28 02:12:37 +04:00
|
|
|
|
2014-07-07 21:03:50 +04:00
|
|
|
|
2014-06-28 02:12:37 +04:00
|
|
|
def cjpeg_available():
|
2020-02-14 04:08:44 +03:00
|
|
|
return bool(shutil.which("cjpeg"))
|
2014-06-28 02:12:37 +04:00
|
|
|
|
2014-07-07 21:03:50 +04:00
|
|
|
|
2014-06-28 02:12:37 +04:00
|
|
|
def netpbm_available():
|
2020-02-14 04:08:44 +03:00
|
|
|
return bool(shutil.which("ppmquant") and shutil.which("ppmtogif"))
|
2014-07-07 21:03:50 +04:00
|
|
|
|
2014-06-28 02:12:37 +04:00
|
|
|
|
2014-07-05 21:25:16 +04:00
|
|
|
def imagemagick_available():
|
2020-02-14 04:08:44 +03:00
|
|
|
return bool(IMCONVERT and shutil.which(IMCONVERT))
|
2014-09-22 04:50:07 +04:00
|
|
|
|
2015-06-20 06:43:14 +03:00
|
|
|
|
2015-06-16 23:35:34 +03:00
|
|
|
def on_appveyor():
|
2019-06-13 18:53:42 +03:00
|
|
|
return "APPVEYOR" in os.environ
|
2014-09-22 04:50:07 +04:00
|
|
|
|
2017-12-19 18:16:15 +03:00
|
|
|
|
2019-09-24 19:15:31 +03:00
|
|
|
def on_github_actions():
|
|
|
|
return "GITHUB_ACTIONS" in os.environ
|
|
|
|
|
|
|
|
|
2019-09-21 15:28:05 +03:00
|
|
|
def on_ci():
|
2020-04-16 20:42:29 +03:00
|
|
|
# GitHub Actions, Travis and AppVeyor have "CI"
|
|
|
|
return "CI" in os.environ
|
2019-09-21 15:28:05 +03:00
|
|
|
|
|
|
|
|
2020-01-05 00:07:59 +03:00
|
|
|
def is_big_endian():
|
2020-01-08 16:22:53 +03:00
|
|
|
return sys.byteorder == "big"
|
2020-01-05 00:07:59 +03:00
|
|
|
|
|
|
|
|
2019-09-25 12:46:54 +03:00
|
|
|
def is_win32():
|
|
|
|
return sys.platform.startswith("win32")
|
|
|
|
|
|
|
|
|
|
|
|
def is_pypy():
|
|
|
|
return hasattr(sys, "pypy_translation_info")
|
2019-09-21 15:28:05 +03:00
|
|
|
|
|
|
|
|
2020-05-23 22:24:11 +03:00
|
|
|
def is_mingw():
|
|
|
|
return sysconfig.get_platform() == "mingw"
|
|
|
|
|
|
|
|
|
2019-06-13 18:53:42 +03:00
|
|
|
if sys.platform == "win32":
|
|
|
|
IMCONVERT = os.environ.get("MAGICK_HOME", "")
|
2014-09-22 04:50:07 +04:00
|
|
|
if IMCONVERT:
|
2019-06-13 18:53:42 +03:00
|
|
|
IMCONVERT = os.path.join(IMCONVERT, "convert.exe")
|
2014-09-22 04:50:07 +04:00
|
|
|
else:
|
2019-06-13 18:53:42 +03:00
|
|
|
IMCONVERT = "convert"
|
2016-09-19 02:59:34 +03:00
|
|
|
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2019-09-30 17:56:31 +03:00
|
|
|
class cached_property:
|
2016-09-19 02:59:34 +03:00
|
|
|
def __init__(self, func):
|
|
|
|
self.func = func
|
|
|
|
|
|
|
|
def __get__(self, instance, cls=None):
|
|
|
|
result = instance.__dict__[self.func.__name__] = self.func(instance)
|
|
|
|
return result
|