2014-06-10 13:12:53 +04:00
|
|
|
"""
|
|
|
|
Helper functions.
|
|
|
|
"""
|
2019-07-06 23:40:53 +03:00
|
|
|
|
|
|
|
import logging
|
|
|
|
import os
|
2014-06-10 13:12:53 +04:00
|
|
|
import sys
|
2014-06-26 04:08:24 +04:00
|
|
|
import tempfile
|
2016-11-07 15:33:46 +03:00
|
|
|
import unittest
|
2014-06-10 13:12:53 +04:00
|
|
|
|
2016-12-07 03:07:20 +03:00
|
|
|
from PIL import Image, ImageMath
|
|
|
|
|
2017-11-17 22:47:05 +03:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
HAS_UPLOADER = False
|
2017-12-13 15:47:11 +03:00
|
|
|
|
2019-06-13 18:53:42 +03:00
|
|
|
if os.environ.get("SHOW_ERRORS", None):
|
2018-01-27 09:07:24 +03:00
|
|
|
# local img.show for errors.
|
2018-03-06 11:53:07 +03:00
|
|
|
HAS_UPLOADER = True
|
2018-03-03 12:54:00 +03:00
|
|
|
|
2017-12-13 15:47:11 +03:00
|
|
|
class test_image_results:
|
|
|
|
@classmethod
|
|
|
|
def upload(self, a, b):
|
|
|
|
a.show()
|
|
|
|
b.show()
|
2019-06-13 18:53:42 +03:00
|
|
|
|
|
|
|
|
2017-12-13 15:47:11 +03:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
import test_image_results
|
2019-06-13 18:53:42 +03:00
|
|
|
|
2017-12-13 15:47:11 +03:00
|
|
|
HAS_UPLOADER = True
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
2016-12-07 03:07:20 +03:00
|
|
|
|
|
|
|
def convert_to_comparable(a, b):
|
|
|
|
new_a, new_b = a, b
|
2019-06-13 18:53:42 +03:00
|
|
|
if a.mode == "P":
|
|
|
|
new_a = Image.new("L", a.size)
|
|
|
|
new_b = Image.new("L", b.size)
|
2016-12-07 03:07:20 +03:00
|
|
|
new_a.putdata(a.getdata())
|
|
|
|
new_b.putdata(b.getdata())
|
2019-06-13 18:53:42 +03:00
|
|
|
elif a.mode == "I;16":
|
|
|
|
new_a = a.convert("I")
|
|
|
|
new_b = b.convert("I")
|
2016-12-07 03:07:20 +03:00
|
|
|
return new_a, new_b
|
|
|
|
|
2014-06-10 13:12:53 +04:00
|
|
|
|
|
|
|
class PillowTestCase(unittest.TestCase):
|
2014-06-26 04:08:24 +04:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
unittest.TestCase.__init__(self, *args, **kwargs)
|
2014-07-07 21:03:50 +04:00
|
|
|
# holds last result object passed to run method:
|
|
|
|
self.currentResult = None
|
2014-06-10 13:12:53 +04:00
|
|
|
|
|
|
|
def run(self, result=None):
|
|
|
|
self.currentResult = result # remember result for use later
|
|
|
|
unittest.TestCase.run(self, result) # call superclass run method
|
|
|
|
|
2014-06-26 04:08:24 +04:00
|
|
|
def delete_tempfile(self, path):
|
2014-06-10 13:12:53 +04:00
|
|
|
try:
|
|
|
|
ok = self.currentResult.wasSuccessful()
|
2017-12-13 16:53:15 +03:00
|
|
|
except AttributeError: # for pytest
|
|
|
|
ok = True
|
2014-06-10 13:12:53 +04:00
|
|
|
|
|
|
|
if ok:
|
|
|
|
# only clean out tempfiles if test passed
|
|
|
|
try:
|
2014-06-26 04:08:24 +04:00
|
|
|
os.remove(path)
|
2014-06-10 13:12:53 +04:00
|
|
|
except OSError:
|
2014-06-26 04:08:24 +04:00
|
|
|
pass # report?
|
|
|
|
else:
|
2014-07-07 21:03:50 +04:00
|
|
|
print("=== orphaned temp file: %s" % path)
|
2014-06-10 13:12:53 +04:00
|
|
|
|
|
|
|
def assert_deep_equal(self, a, b, msg=None):
|
|
|
|
try:
|
|
|
|
self.assertEqual(
|
2019-09-30 17:56:31 +03:00
|
|
|
len(a),
|
|
|
|
len(b),
|
|
|
|
msg or "got length {}, expected {}".format(len(a), len(b)),
|
2019-06-13 18:53:42 +03:00
|
|
|
)
|
2014-06-10 13:12:53 +04:00
|
|
|
self.assertTrue(
|
2019-09-30 17:56:31 +03:00
|
|
|
all(x == y for x, y in zip(a, b)),
|
|
|
|
msg or "got {}, expected {}".format(a, b),
|
2019-06-13 18:53:42 +03:00
|
|
|
)
|
2018-11-17 00:51:52 +03:00
|
|
|
except Exception:
|
2014-06-10 13:12:53 +04:00
|
|
|
self.assertEqual(a, b, msg)
|
|
|
|
|
|
|
|
def assert_image(self, im, mode, size, msg=None):
|
|
|
|
if mode is not None:
|
|
|
|
self.assertEqual(
|
2019-09-30 17:56:31 +03:00
|
|
|
im.mode,
|
|
|
|
mode,
|
|
|
|
msg or "got mode {!r}, expected {!r}".format(im.mode, mode),
|
2019-06-13 18:53:42 +03:00
|
|
|
)
|
2014-06-10 13:12:53 +04:00
|
|
|
|
|
|
|
if size is not None:
|
|
|
|
self.assertEqual(
|
2019-09-30 17:56:31 +03:00
|
|
|
im.size,
|
|
|
|
size,
|
|
|
|
msg or "got size {!r}, expected {!r}".format(im.size, size),
|
2019-06-13 18:53:42 +03:00
|
|
|
)
|
2014-06-10 13:12:53 +04:00
|
|
|
|
|
|
|
def assert_image_equal(self, a, b, msg=None):
|
|
|
|
self.assertEqual(
|
2019-09-30 17:56:31 +03:00
|
|
|
a.mode, b.mode, msg or "got mode {!r}, expected {!r}".format(a.mode, b.mode)
|
2019-06-13 18:53:42 +03:00
|
|
|
)
|
2014-06-10 13:12:53 +04:00
|
|
|
self.assertEqual(
|
2019-09-30 17:56:31 +03:00
|
|
|
a.size, b.size, msg or "got size {!r}, expected {!r}".format(a.size, b.size)
|
2019-06-13 18:53:42 +03:00
|
|
|
)
|
2014-06-30 01:39:32 +04:00
|
|
|
if a.tobytes() != b.tobytes():
|
2017-11-17 22:47:05 +03:00
|
|
|
if HAS_UPLOADER:
|
|
|
|
try:
|
|
|
|
url = test_image_results.upload(a, b)
|
2017-12-19 18:16:15 +03:00
|
|
|
logger.error("Url for test images: %s" % url)
|
2018-10-01 13:22:18 +03:00
|
|
|
except Exception:
|
2017-11-17 22:47:05 +03:00
|
|
|
pass
|
2017-12-19 18:12:58 +03:00
|
|
|
|
2014-06-30 01:39:32 +04:00
|
|
|
self.fail(msg or "got different content")
|
2014-06-10 13:12:53 +04:00
|
|
|
|
2017-12-13 15:47:11 +03:00
|
|
|
def assert_image_equal_tofile(self, a, filename, msg=None, mode=None):
|
|
|
|
with Image.open(filename) as img:
|
|
|
|
if mode:
|
|
|
|
img = img.convert(mode)
|
|
|
|
self.assert_image_equal(a, img, msg)
|
2018-01-27 09:07:24 +03:00
|
|
|
|
2014-06-10 13:12:53 +04:00
|
|
|
def assert_image_similar(self, a, b, epsilon, msg=None):
|
|
|
|
epsilon = float(epsilon)
|
|
|
|
self.assertEqual(
|
2019-09-30 17:56:31 +03:00
|
|
|
a.mode, b.mode, msg or "got mode {!r}, expected {!r}".format(a.mode, b.mode)
|
2019-06-13 18:53:42 +03:00
|
|
|
)
|
2014-06-10 13:12:53 +04:00
|
|
|
self.assertEqual(
|
2019-09-30 17:56:31 +03:00
|
|
|
a.size, b.size, msg or "got size {!r}, expected {!r}".format(a.size, b.size)
|
2019-06-13 18:53:42 +03:00
|
|
|
)
|
2014-06-10 13:12:53 +04:00
|
|
|
|
2016-12-07 03:07:20 +03:00
|
|
|
a, b = convert_to_comparable(a, b)
|
|
|
|
|
2016-12-07 04:39:36 +03:00
|
|
|
diff = 0
|
2016-12-07 03:07:20 +03:00
|
|
|
for ach, bch in zip(a.split(), b.split()):
|
2019-06-13 18:53:42 +03:00
|
|
|
chdiff = ImageMath.eval("abs(a - b)", a=ach, b=bch).convert("L")
|
2016-12-07 04:39:36 +03:00
|
|
|
diff += sum(i * num for i, num in enumerate(chdiff.histogram()))
|
2016-12-07 03:07:20 +03:00
|
|
|
|
2019-06-13 18:53:42 +03:00
|
|
|
ave_diff = float(diff) / (a.size[0] * a.size[1])
|
2017-11-17 22:47:05 +03:00
|
|
|
try:
|
|
|
|
self.assertGreaterEqual(
|
2019-06-13 18:53:42 +03:00
|
|
|
epsilon,
|
|
|
|
ave_diff,
|
|
|
|
(msg or "")
|
|
|
|
+ " average pixel value difference %.4f > epsilon %.4f"
|
|
|
|
% (ave_diff, epsilon),
|
|
|
|
)
|
2019-01-14 13:55:47 +03:00
|
|
|
except Exception as e:
|
2017-11-17 22:47:05 +03:00
|
|
|
if HAS_UPLOADER:
|
|
|
|
try:
|
|
|
|
url = test_image_results.upload(a, b)
|
2017-12-19 18:16:15 +03:00
|
|
|
logger.error("Url for test images: %s" % url)
|
2018-11-17 00:51:52 +03:00
|
|
|
except Exception:
|
2017-11-17 22:47:05 +03:00
|
|
|
pass
|
2019-01-14 13:55:47 +03:00
|
|
|
raise e
|
2014-06-10 13:12:53 +04:00
|
|
|
|
2019-06-13 18:53:42 +03:00
|
|
|
def assert_image_similar_tofile(self, a, filename, epsilon, msg=None, mode=None):
|
2017-12-13 15:47:11 +03:00
|
|
|
with Image.open(filename) as img:
|
|
|
|
if mode:
|
|
|
|
img = img.convert(mode)
|
|
|
|
self.assert_image_similar(a, img, epsilon, msg)
|
|
|
|
|
2016-08-09 03:11:35 +03:00
|
|
|
def assert_warning(self, warn_class, func, *args, **kwargs):
|
2014-06-10 13:12:53 +04:00
|
|
|
import warnings
|
|
|
|
|
|
|
|
with warnings.catch_warnings(record=True) as w:
|
|
|
|
# Cause all warnings to always be triggered.
|
|
|
|
warnings.simplefilter("always")
|
|
|
|
|
|
|
|
# Hopefully trigger a warning.
|
2016-08-09 03:11:35 +03:00
|
|
|
result = func(*args, **kwargs)
|
2014-06-10 13:12:53 +04:00
|
|
|
|
|
|
|
# Verify some things.
|
2014-12-30 17:20:42 +03:00
|
|
|
if warn_class is None:
|
2019-06-13 18:53:42 +03:00
|
|
|
self.assertEqual(
|
|
|
|
len(w), 0, "Expected no warnings, got %s" % [v.category for v in w]
|
|
|
|
)
|
2014-12-30 17:20:42 +03:00
|
|
|
else:
|
|
|
|
self.assertGreaterEqual(len(w), 1)
|
|
|
|
found = False
|
|
|
|
for v in w:
|
|
|
|
if issubclass(v.category, warn_class):
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
self.assertTrue(found)
|
2014-06-10 13:12:53 +04:00
|
|
|
return result
|
|
|
|
|
2017-12-19 18:12:58 +03:00
|
|
|
def assert_all_same(self, items, msg=None):
|
2018-08-04 21:08:40 +03:00
|
|
|
self.assertEqual(items.count(items[0]), len(items), msg)
|
2017-12-19 18:12:58 +03:00
|
|
|
|
|
|
|
def assert_not_all_same(self, items, msg=None):
|
2018-08-04 21:08:40 +03:00
|
|
|
self.assertNotEqual(items.count(items[0]), len(items), msg)
|
2017-12-19 18:12:58 +03:00
|
|
|
|
2018-07-09 17:04:48 +03:00
|
|
|
def assert_tuple_approx_equal(self, actuals, targets, threshold, msg):
|
|
|
|
"""Tests if actuals has values within threshold from targets"""
|
|
|
|
|
|
|
|
value = True
|
|
|
|
for i, target in enumerate(targets):
|
2019-06-13 18:53:42 +03:00
|
|
|
value *= target - threshold <= actuals[i] <= target + threshold
|
2018-07-09 17:04:48 +03:00
|
|
|
|
2019-06-13 18:53:42 +03:00
|
|
|
self.assertTrue(value, msg + ": " + repr(actuals) + " != " + repr(targets))
|
2018-07-09 17:04:48 +03:00
|
|
|
|
2019-06-13 18:53:42 +03:00
|
|
|
def skipKnownBadTest(self, msg=None, platform=None, travis=None, interpreter=None):
|
2014-07-05 21:56:40 +04:00
|
|
|
# Skip if platform/travis matches, and
|
|
|
|
# PILLOW_RUN_KNOWN_BAD is not true in the environment.
|
2019-06-13 18:53:42 +03:00
|
|
|
if os.environ.get("PILLOW_RUN_KNOWN_BAD", False):
|
|
|
|
print(os.environ.get("PILLOW_RUN_KNOWN_BAD", False))
|
2014-07-05 21:56:40 +04:00
|
|
|
return
|
|
|
|
|
|
|
|
skip = True
|
|
|
|
if platform is not None:
|
|
|
|
skip = sys.platform.startswith(platform)
|
|
|
|
if travis is not None:
|
2019-06-13 18:53:42 +03:00
|
|
|
skip = skip and (travis == bool(os.environ.get("TRAVIS", False)))
|
2014-07-24 01:31:49 +04:00
|
|
|
if interpreter is not None:
|
2019-06-13 18:53:42 +03:00
|
|
|
skip = skip and (
|
|
|
|
interpreter == "pypy" and hasattr(sys, "pypy_version_info")
|
|
|
|
)
|
2014-07-05 21:56:40 +04:00
|
|
|
if skip:
|
|
|
|
self.skipTest(msg or "Known Bad Test")
|
|
|
|
|
2014-06-26 04:08:24 +04:00
|
|
|
def tempfile(self, template):
|
|
|
|
assert template[:5] in ("temp.", "temp_")
|
2016-12-28 01:54:10 +03:00
|
|
|
fd, path = tempfile.mkstemp(template[4:], template[:4])
|
2014-06-26 04:08:24 +04:00
|
|
|
os.close(fd)
|
2014-07-07 21:03:50 +04:00
|
|
|
|
|
|
|
self.addCleanup(self.delete_tempfile, path)
|
2014-06-26 04:08:24 +04:00
|
|
|
return path
|
2014-06-10 13:12:53 +04:00
|
|
|
|
2014-07-05 21:25:16 +04:00
|
|
|
def open_withImagemagick(self, f):
|
|
|
|
if not imagemagick_available():
|
2019-09-30 17:56:31 +03:00
|
|
|
raise OSError()
|
2014-07-05 21:25:16 +04:00
|
|
|
|
|
|
|
outfile = self.tempfile("temp.png")
|
2014-09-22 04:50:07 +04:00
|
|
|
if command_succeeds([IMCONVERT, f, outfile]):
|
2014-07-05 21:25:16 +04:00
|
|
|
return Image.open(outfile)
|
2019-09-30 17:56:31 +03:00
|
|
|
raise OSError()
|
2014-07-07 21:03:50 +04:00
|
|
|
|
2017-12-19 18:16:15 +03:00
|
|
|
|
2019-06-13 18:53:42 +03:00
|
|
|
@unittest.skipIf(sys.platform.startswith("win32"), "requires Unix or macOS")
|
2017-09-02 19:03:38 +03:00
|
|
|
class PillowLeakTestCase(PillowTestCase):
|
2018-05-13 13:55:04 +03:00
|
|
|
# requires unix/macOS
|
2017-12-19 18:16:15 +03:00
|
|
|
iterations = 100 # count
|
|
|
|
mem_limit = 512 # k
|
2017-12-19 18:12:58 +03:00
|
|
|
|
2017-09-02 19:03:38 +03:00
|
|
|
def _get_mem_usage(self):
|
|
|
|
"""
|
|
|
|
Gets the RUSAGE memory usage, returns in K. Encapsulates the difference
|
2018-05-13 13:55:04 +03:00
|
|
|
between macOS and Linux rss reporting
|
2017-09-02 19:03:38 +03:00
|
|
|
|
2018-05-13 13:52:11 +03:00
|
|
|
:returns: memory usage in kilobytes
|
2017-09-02 19:03:38 +03:00
|
|
|
"""
|
2017-12-19 18:12:58 +03:00
|
|
|
|
2017-12-19 18:16:15 +03:00
|
|
|
from resource import getrusage, RUSAGE_SELF
|
2019-06-13 18:53:42 +03:00
|
|
|
|
2017-09-02 19:03:38 +03:00
|
|
|
mem = getrusage(RUSAGE_SELF).ru_maxrss
|
2019-06-13 18:53:42 +03:00
|
|
|
if sys.platform == "darwin":
|
2017-09-02 19:03:38 +03:00
|
|
|
# man 2 getrusage:
|
2018-06-24 15:32:25 +03:00
|
|
|
# ru_maxrss
|
|
|
|
# This is the maximum resident set size utilized (in bytes).
|
2018-03-04 06:24:36 +03:00
|
|
|
return mem / 1024 # Kb
|
2017-09-02 19:03:38 +03:00
|
|
|
else:
|
|
|
|
# linux
|
|
|
|
# man 2 getrusage
|
|
|
|
# ru_maxrss (since Linux 2.6.32)
|
2017-10-26 08:20:16 +03:00
|
|
|
# This is the maximum resident set size used (in kilobytes).
|
|
|
|
return mem # Kb
|
2017-09-02 19:03:38 +03:00
|
|
|
|
|
|
|
def _test_leak(self, core):
|
|
|
|
start_mem = self._get_mem_usage()
|
|
|
|
for cycle in range(self.iterations):
|
|
|
|
core()
|
2019-06-13 18:53:42 +03:00
|
|
|
mem = self._get_mem_usage() - start_mem
|
|
|
|
msg = "memory usage limit exceeded in iteration %d" % cycle
|
2018-09-27 13:35:00 +03:00
|
|
|
self.assertLess(mem, self.mem_limit, msg)
|
2017-09-02 19:03:38 +03:00
|
|
|
|
2014-07-07 21:03:50 +04:00
|
|
|
|
2014-06-24 15:03:10 +04:00
|
|
|
# helpers
|
2014-06-10 13:12:53 +04:00
|
|
|
|
|
|
|
|
|
|
|
def fromstring(data):
|
|
|
|
from io import BytesIO
|
2019-06-13 18:53:42 +03:00
|
|
|
|
2014-06-10 13:12:53 +04:00
|
|
|
return Image.open(BytesIO(data))
|
|
|
|
|
|
|
|
|
2014-10-01 17:50:33 +04:00
|
|
|
def tostring(im, string_format, **options):
|
2014-06-10 13:12:53 +04:00
|
|
|
from io import BytesIO
|
2019-06-13 18:53:42 +03:00
|
|
|
|
2014-06-10 13:12:53 +04:00
|
|
|
out = BytesIO()
|
2014-10-01 17:50:33 +04:00
|
|
|
im.save(out, string_format, **options)
|
2014-06-10 13:12:53 +04:00
|
|
|
return out.getvalue()
|
|
|
|
|
|
|
|
|
2014-11-19 03:41:44 +03:00
|
|
|
def hopper(mode=None, cache={}):
|
|
|
|
if mode is None:
|
|
|
|
# Always return fresh not-yet-loaded version of image.
|
|
|
|
# Operations on not-yet-loaded images is separate class of errors
|
|
|
|
# what we should catch.
|
|
|
|
return Image.open("Tests/images/hopper.ppm")
|
|
|
|
# Use caching to reduce reading from disk but so an original copy is
|
2014-10-29 22:28:29 +03:00
|
|
|
# returned each time and the cached image isn't modified by tests
|
2014-09-04 09:03:55 +04:00
|
|
|
# (for fast, isolated, repeatable tests).
|
2014-10-29 22:28:29 +03:00
|
|
|
im = cache.get(mode)
|
2014-09-04 09:03:55 +04:00
|
|
|
if im is None:
|
2014-11-19 03:41:44 +03:00
|
|
|
if mode == "F":
|
2014-09-05 13:36:24 +04:00
|
|
|
im = hopper("L").convert(mode)
|
2014-09-04 09:03:55 +04:00
|
|
|
elif mode[:4] == "I;16":
|
2014-09-05 13:36:24 +04:00
|
|
|
im = hopper("I").convert(mode)
|
2014-09-04 09:03:55 +04:00
|
|
|
else:
|
2014-11-19 03:41:44 +03:00
|
|
|
im = hopper().convert(mode)
|
|
|
|
cache[mode] = im
|
2014-10-29 22:28:29 +03:00
|
|
|
return im.copy()
|
2014-06-10 13:12:53 +04:00
|
|
|
|
2014-06-28 02:12:37 +04:00
|
|
|
|
|
|
|
def command_succeeds(cmd):
|
|
|
|
"""
|
|
|
|
Runs the command, which must be a list of strings. Returns True if the
|
|
|
|
command succeeds, or False if an OSError was raised by subprocess.Popen.
|
|
|
|
"""
|
|
|
|
import subprocess
|
2019-06-13 18:53:42 +03:00
|
|
|
|
|
|
|
with open(os.devnull, "wb") as f:
|
2014-06-28 02:12:37 +04:00
|
|
|
try:
|
2016-11-24 04:48:56 +03:00
|
|
|
subprocess.call(cmd, stdout=f, stderr=subprocess.STDOUT)
|
2014-06-28 02:12:37 +04:00
|
|
|
except OSError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2014-07-07 21:03:50 +04:00
|
|
|
|
2014-06-28 02:12:37 +04:00
|
|
|
def djpeg_available():
|
2019-06-13 18:53:42 +03:00
|
|
|
return command_succeeds(["djpeg", "-version"])
|
2014-06-28 02:12:37 +04:00
|
|
|
|
2014-07-07 21:03:50 +04:00
|
|
|
|
2014-06-28 02:12:37 +04:00
|
|
|
def cjpeg_available():
|
2019-06-13 18:53:42 +03:00
|
|
|
return command_succeeds(["cjpeg", "-version"])
|
2014-06-28 02:12:37 +04:00
|
|
|
|
2014-07-07 21:03:50 +04:00
|
|
|
|
2014-06-28 02:12:37 +04:00
|
|
|
def netpbm_available():
|
2019-06-13 18:53:42 +03:00
|
|
|
return command_succeeds(["ppmquant", "--version"]) and command_succeeds(
|
|
|
|
["ppmtogif", "--version"]
|
|
|
|
)
|
2014-07-07 21:03:50 +04:00
|
|
|
|
2014-06-28 02:12:37 +04:00
|
|
|
|
2014-07-05 21:25:16 +04:00
|
|
|
def imagemagick_available():
|
2019-06-13 18:53:42 +03:00
|
|
|
return IMCONVERT and command_succeeds([IMCONVERT, "-version"])
|
2014-09-22 04:50:07 +04:00
|
|
|
|
2015-06-20 06:43:14 +03:00
|
|
|
|
2015-06-16 23:35:34 +03:00
|
|
|
def on_appveyor():
|
2019-06-13 18:53:42 +03:00
|
|
|
return "APPVEYOR" in os.environ
|
2014-09-22 04:50:07 +04:00
|
|
|
|
2017-12-19 18:16:15 +03:00
|
|
|
|
2019-09-21 15:28:05 +03:00
|
|
|
def on_ci():
|
|
|
|
# Travis and AppVeyor have "CI"
|
|
|
|
# Azure Pipelines has "TF_BUILD"
|
|
|
|
return "CI" in os.environ or "TF_BUILD" in os.environ
|
|
|
|
|
|
|
|
|
2019-06-13 18:53:42 +03:00
|
|
|
if sys.platform == "win32":
|
|
|
|
IMCONVERT = os.environ.get("MAGICK_HOME", "")
|
2014-09-22 04:50:07 +04:00
|
|
|
if IMCONVERT:
|
2019-06-13 18:53:42 +03:00
|
|
|
IMCONVERT = os.path.join(IMCONVERT, "convert.exe")
|
2014-09-22 04:50:07 +04:00
|
|
|
else:
|
2019-06-13 18:53:42 +03:00
|
|
|
IMCONVERT = "convert"
|
2016-09-19 02:59:34 +03:00
|
|
|
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2017-02-20 19:49:44 +03:00
|
|
|
def distro():
|
2019-06-13 18:53:42 +03:00
|
|
|
if os.path.exists("/etc/os-release"):
|
|
|
|
with open("/etc/os-release", "r") as f:
|
2017-02-20 19:49:44 +03:00
|
|
|
for line in f:
|
2019-06-13 18:53:42 +03:00
|
|
|
if "ID=" in line:
|
|
|
|
return line.strip().split("=")[1]
|
2016-09-19 02:59:34 +03:00
|
|
|
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2019-09-30 17:56:31 +03:00
|
|
|
class cached_property:
|
2016-09-19 02:59:34 +03:00
|
|
|
def __init__(self, func):
|
|
|
|
self.func = func
|
|
|
|
|
|
|
|
def __get__(self, instance, cls=None):
|
|
|
|
result = instance.__dict__[self.func.__name__] = self.func(instance)
|
|
|
|
return result
|