Pillow/Tests/test_file_libtiff.py

1143 lines
42 KiB
Python
Raw Normal View History

import base64
2014-11-16 07:38:52 +03:00
import io
import itertools
import os
2020-06-15 16:32:30 +03:00
import re
import sys
from collections import namedtuple
import pytest
2020-09-01 20:16:46 +03:00
from PIL import Image, ImageFilter, ImageOps, TiffImagePlugin, TiffTags, features
2021-11-23 00:10:18 +03:00
from PIL.TiffImagePlugin import SAMPLEFORMAT, STRIPOFFSETS, SUBIFD
from .helper import (
assert_image_equal,
assert_image_equal_tofile,
assert_image_similar,
assert_image_similar_tofile,
hopper,
2021-04-10 00:33:21 +03:00
mark_if_feature_version,
2021-04-10 17:58:01 +03:00
skip_unless_feature,
)
@skip_unless_feature("libtiff")
2020-03-02 17:02:19 +03:00
class LibTiffTestCase:
def _assert_noerr(self, tmp_path, im):
2014-06-10 13:10:47 +04:00
"""Helper tests that assert basic sanity about the g4 tiff reading"""
# 1 bit
assert im.mode == "1"
2014-06-10 13:10:47 +04:00
# Does the data actually load
im.load()
im.getdata()
2014-06-10 13:10:47 +04:00
try:
assert im._compression == "group4"
2018-06-11 09:00:57 +03:00
except AttributeError:
2014-06-10 13:10:47 +04:00
print("No _compression")
2015-04-24 02:26:52 +03:00
print(dir(im))
2014-06-10 13:10:47 +04:00
# can we write it back out, in a different form.
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.png")
2014-06-10 13:10:47 +04:00
im.save(out)
2016-11-23 17:24:40 +03:00
out_bytes = io.BytesIO()
2019-06-13 18:54:11 +03:00
im.save(out_bytes, format="tiff", compression="group4")
2017-04-20 14:14:23 +03:00
class TestFileLibTiff(LibTiffTestCase):
2020-06-15 16:32:30 +03:00
def test_version(self):
assert re.search(r"\d+\.\d+\.\d+$", features.version_codec("libtiff"))
2020-03-02 17:02:19 +03:00
def test_g4_tiff(self, tmp_path):
2014-06-10 13:10:47 +04:00
"""Test the ordinary file path load path"""
2015-04-02 11:45:24 +03:00
test_file = "Tests/images/hopper_g4_500.tif"
2019-11-25 23:03:23 +03:00
with Image.open(test_file) as im:
assert im.size == (500, 500)
2020-03-02 17:02:19 +03:00
self._assert_noerr(tmp_path, im)
2020-03-02 17:02:19 +03:00
def test_g4_large(self, tmp_path):
2015-04-24 11:24:52 +03:00
test_file = "Tests/images/pport_g4.tif"
2019-11-25 23:03:23 +03:00
with Image.open(test_file) as im:
2020-03-02 17:02:19 +03:00
self._assert_noerr(tmp_path, im)
2020-03-02 17:02:19 +03:00
def test_g4_tiff_file(self, tmp_path):
2014-06-10 13:10:47 +04:00
"""Testing the string load path"""
2015-04-02 11:45:24 +03:00
test_file = "Tests/images/hopper_g4_500.tif"
2019-06-13 18:54:11 +03:00
with open(test_file, "rb") as f:
2019-11-25 23:03:23 +03:00
with Image.open(f) as im:
assert im.size == (500, 500)
2020-03-02 17:02:19 +03:00
self._assert_noerr(tmp_path, im)
2014-06-10 13:10:47 +04:00
2020-03-02 17:02:19 +03:00
def test_g4_tiff_bytesio(self, tmp_path):
2014-06-10 13:10:47 +04:00
"""Testing the stringio loading code path"""
2015-04-02 11:45:24 +03:00
test_file = "Tests/images/hopper_g4_500.tif"
2014-11-16 07:38:52 +03:00
s = io.BytesIO()
2019-06-13 18:54:11 +03:00
with open(test_file, "rb") as f:
2014-06-10 13:10:47 +04:00
s.write(f.read())
s.seek(0)
2019-11-25 23:03:23 +03:00
with Image.open(s) as im:
assert im.size == (500, 500)
2020-03-02 17:02:19 +03:00
self._assert_noerr(tmp_path, im)
2014-06-10 13:10:47 +04:00
2020-03-02 17:02:19 +03:00
def test_g4_non_disk_file_object(self, tmp_path):
"""Testing loading from non-disk non-BytesIO file object"""
test_file = "Tests/images/hopper_g4_500.tif"
s = io.BytesIO()
with open(test_file, "rb") as f:
s.write(f.read())
s.seek(0)
r = io.BufferedReader(s)
2019-11-25 23:03:23 +03:00
with Image.open(r) as im:
assert im.size == (500, 500)
2020-03-02 17:02:19 +03:00
self._assert_noerr(tmp_path, im)
2014-06-10 13:10:47 +04:00
def test_g4_eq_png(self):
2021-08-12 14:50:09 +03:00
"""Checking that we're actually getting the data that we expect"""
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/hopper_bw_500.png") as png:
assert_image_equal_tofile(png, "Tests/images/hopper_g4_500.tif")
2014-06-10 13:10:47 +04:00
# see https://github.com/python-pillow/Pillow/issues/279
def test_g4_fillorder_eq_png(self):
2021-08-12 14:50:09 +03:00
"""Checking that we're actually getting the data that we expect"""
with Image.open("Tests/images/g4-fillorder-test.tif") as g4:
assert_image_equal_tofile(g4, "Tests/images/g4-fillorder-test.png")
2014-06-10 13:10:47 +04:00
2020-03-02 17:02:19 +03:00
def test_g4_write(self, tmp_path):
2014-06-10 13:10:47 +04:00
"""Checking to see that the saved image is the same as what we wrote"""
2015-04-02 11:45:24 +03:00
test_file = "Tests/images/hopper_g4_500.tif"
2019-11-25 23:03:23 +03:00
with Image.open(test_file) as orig:
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2022-01-15 01:02:31 +03:00
rot = orig.transpose(Image.Transpose.ROTATE_90)
assert rot.size == (500, 500)
2019-11-25 23:03:23 +03:00
rot.save(out)
2014-06-10 13:10:47 +04:00
2019-11-25 23:03:23 +03:00
with Image.open(out) as reread:
assert reread.size == (500, 500)
2020-03-02 17:02:19 +03:00
self._assert_noerr(tmp_path, reread)
assert_image_equal(reread, rot)
assert reread.info["compression"] == "group4"
2014-06-10 13:10:47 +04:00
assert reread.info["compression"] == orig.info["compression"]
2014-06-10 13:10:47 +04:00
assert orig.tobytes() != reread.tobytes()
2014-06-10 13:10:47 +04:00
def test_adobe_deflate_tiff(self):
2015-04-02 11:45:24 +03:00
test_file = "Tests/images/tiff_adobe_deflate.tif"
2019-11-25 23:03:23 +03:00
with Image.open(test_file) as im:
assert im.mode == "RGB"
assert im.size == (278, 374)
assert im.tile[0][:3] == ("libtiff", (0, 0, 278, 374), 0)
2019-11-25 23:03:23 +03:00
im.load()
2014-06-10 13:10:47 +04:00
assert_image_equal_tofile(im, "Tests/images/tiff_adobe_deflate.png")
2022-08-23 14:41:32 +03:00
@pytest.mark.parametrize("legacy_api", (False, True))
def test_write_metadata(self, legacy_api, tmp_path):
2021-08-12 14:50:09 +03:00
"""Test metadata writing through libtiff"""
2022-08-23 14:41:32 +03:00
f = str(tmp_path / "temp.tiff")
with Image.open("Tests/images/hopper_g4.tif") as img:
img.save(f, tiffinfo=img.tag)
if legacy_api:
original = img.tag.named()
else:
original = img.tag_v2.named()
# PhotometricInterpretation is set from SAVE_INFO,
# not the original image.
ignored = [
"StripByteCounts",
"RowsPerStrip",
"PageNumber",
"PhotometricInterpretation",
]
with Image.open(f) as loaded:
if legacy_api:
reloaded = loaded.tag.named()
else:
reloaded = loaded.tag_v2.named()
for tag, value in itertools.chain(reloaded.items(), original.items()):
if tag not in ignored:
val = original[tag]
if tag.endswith("Resolution"):
if legacy_api:
assert val[0][0] / val[0][1] == (
4294967295 / 113653537
), f"{tag} didn't roundtrip"
else:
2022-08-23 14:41:32 +03:00
assert val == 37.79000115940079, f"{tag} didn't roundtrip"
else:
assert val == value, f"{tag} didn't roundtrip"
2014-06-10 13:10:47 +04:00
2022-08-23 14:41:32 +03:00
# https://github.com/python-pillow/Pillow/issues/1561
requested_fields = ["StripByteCounts", "RowsPerStrip", "StripOffsets"]
for field in requested_fields:
assert field in reloaded, f"{field} not in metadata"
2015-12-14 23:24:01 +03:00
@pytest.mark.valgrind_known_error(reason="Known invalid metadata")
2020-03-02 17:02:19 +03:00
def test_additional_metadata(self, tmp_path):
2016-01-01 16:30:40 +03:00
# these should not crash. Seriously dummy data, most of it doesn't make
# any sense, so we're running up against limits where we're asking
# libtiff to do stupid things.
2016-02-05 01:57:13 +03:00
2016-01-01 16:30:40 +03:00
# Get the list of the ones that we should be able to write
2019-06-13 18:54:11 +03:00
core_items = {
tag: info
for tag, info in ((s, TiffTags.lookup(s)) for s in TiffTags.LIBTIFF_CORE)
if info.type is not None
}
2016-02-05 01:57:13 +03:00
2016-08-04 09:40:12 +03:00
# Exclude ones that have special meaning
# that we're already testing them
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/hopper_g4.tif") as im:
for tag in im.tag_v2:
try:
del core_items[tag]
except KeyError:
pass
2020-05-26 09:38:38 +03:00
del core_items[320] # colormap is special, tested below
2019-11-25 23:03:23 +03:00
# Type codes:
# 2: "ascii",
# 3: "short",
# 4: "long",
# 5: "rational",
# 12: "double",
# Type: dummy value
values = {
2: "test",
3: 1,
2022-03-04 08:42:24 +03:00
4: 2**20,
2019-11-25 23:03:23 +03:00
5: TiffImagePlugin.IFDRational(100, 1),
12: 1.05,
}
new_ifd = TiffImagePlugin.ImageFileDirectory_v2()
for tag, info in core_items.items():
if info.length == 1:
new_ifd[tag] = values[info.type]
if info.length == 0:
new_ifd[tag] = tuple(values[info.type] for _ in range(3))
else:
new_ifd[tag] = tuple(values[info.type] for _ in range(info.length))
2016-01-01 16:30:40 +03:00
2019-11-25 23:03:23 +03:00
# Extra samples really doesn't make sense in this application.
del new_ifd[338]
2016-01-01 16:30:40 +03:00
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2019-11-25 23:03:23 +03:00
TiffImagePlugin.WRITE_LIBTIFF = True
2016-01-01 16:30:40 +03:00
2019-11-25 23:03:23 +03:00
im.save(out, tiffinfo=new_ifd)
2016-01-01 16:30:40 +03:00
2016-02-05 01:57:13 +03:00
TiffImagePlugin.WRITE_LIBTIFF = False
2016-01-01 16:30:40 +03:00
2020-03-02 17:02:19 +03:00
def test_custom_metadata(self, tmp_path):
tc = namedtuple("test_case", "value,type,supported_by_default")
2018-10-25 11:45:13 +03:00
custom = {
37000 + k: v
for k, v in enumerate(
[
tc(4, TiffTags.SHORT, True),
tc(123456789, TiffTags.LONG, True),
tc(-4, TiffTags.SIGNED_BYTE, False),
tc(-4, TiffTags.SIGNED_SHORT, False),
tc(-123456789, TiffTags.SIGNED_LONG, False),
tc(TiffImagePlugin.IFDRational(4, 7), TiffTags.RATIONAL, True),
tc(4.25, TiffTags.FLOAT, True),
tc(4.25, TiffTags.DOUBLE, True),
tc("custom tag value", TiffTags.ASCII, True),
tc(b"custom tag value", TiffTags.BYTE, True),
tc((4, 5, 6), TiffTags.SHORT, True),
tc((123456789, 9, 34, 234, 219387, 92432323), TiffTags.LONG, True),
tc((-4, 9, 10), TiffTags.SIGNED_BYTE, False),
tc((-4, 5, 6), TiffTags.SIGNED_SHORT, False),
tc(
(-123456789, 9, 34, 234, 219387, -92432323),
TiffTags.SIGNED_LONG,
False,
),
tc((4.25, 5.25), TiffTags.FLOAT, True),
tc((4.25, 5.25), TiffTags.DOUBLE, True),
# array of TIFF_BYTE requires bytes instead of tuple for backwards
# compatibility
tc(bytes([4]), TiffTags.BYTE, True),
tc(bytes((4, 9, 10)), TiffTags.BYTE, True),
]
)
2018-10-25 11:45:13 +03:00
}
libtiffs = [False]
2019-12-31 01:04:25 +03:00
if Image.core.libtiff_support_custom_tags:
libtiffs.append(True)
for libtiff in libtiffs:
2018-10-25 11:45:13 +03:00
TiffImagePlugin.WRITE_LIBTIFF = libtiff
2019-02-22 15:17:26 +03:00
def check_tags(tiffinfo):
im = hopper()
2018-10-25 11:45:13 +03:00
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2019-02-22 15:17:26 +03:00
im.save(out, tiffinfo=tiffinfo)
2018-10-25 11:45:13 +03:00
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
with Image.open(out) as reloaded:
for tag, value in tiffinfo.items():
reloaded_value = reloaded.tag_v2[tag]
if (
isinstance(reloaded_value, TiffImagePlugin.IFDRational)
and libtiff
):
# libtiff does not support real RATIONALS
2020-02-22 19:07:04 +03:00
assert (
round(abs(float(reloaded_value) - float(value)), 7) == 0
)
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
continue
2019-02-22 15:17:26 +03:00
assert reloaded_value == value
2019-02-22 15:17:26 +03:00
# Test with types
ifd = TiffImagePlugin.ImageFileDirectory_v2()
for tag, tagdata in custom.items():
ifd[tag] = tagdata.value
ifd.tagtype[tag] = tagdata.type
2019-02-22 15:17:26 +03:00
check_tags(ifd)
# Test without types. This only works for some types, int for example are
# always encoded as LONG and not SIGNED_LONG.
check_tags(
{
tag: tagdata.value
for tag, tagdata in custom.items()
if tagdata.supported_by_default
}
)
2019-02-22 15:17:26 +03:00
TiffImagePlugin.WRITE_LIBTIFF = False
2018-10-25 11:45:13 +03:00
2021-01-28 12:57:24 +03:00
def test_subifd(self, tmp_path):
outfile = str(tmp_path / "temp.tif")
with Image.open("Tests/images/g4_orientation_6.tif") as im:
im.tag_v2[SUBIFD] = 10000
# Should not segfault
im.save(outfile)
2020-05-03 12:41:38 +03:00
def test_xmlpacket_tag(self, tmp_path):
TiffImagePlugin.WRITE_LIBTIFF = True
out = str(tmp_path / "temp.tif")
hopper().save(out, tiffinfo={700: b"xmlpacket tag"})
TiffImagePlugin.WRITE_LIBTIFF = False
with Image.open(out) as reloaded:
if 700 in reloaded.tag_v2:
assert reloaded.tag_v2[700] == b"xmlpacket tag"
2020-03-02 17:02:19 +03:00
def test_int_dpi(self, tmp_path):
2016-12-13 23:49:47 +03:00
# issue #1765
2019-06-13 18:54:11 +03:00
im = hopper("RGB")
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2016-12-13 23:49:47 +03:00
TiffImagePlugin.WRITE_LIBTIFF = True
im.save(out, dpi=(72, 72))
TiffImagePlugin.WRITE_LIBTIFF = False
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
with Image.open(out) as reloaded:
assert reloaded.info["dpi"] == (72.0, 72.0)
2016-12-13 23:49:47 +03:00
2020-03-02 17:02:19 +03:00
def test_g3_compression(self, tmp_path):
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/hopper_g4_500.tif") as i:
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2019-11-25 23:03:23 +03:00
i.save(out, compression="group3")
2014-06-10 13:10:47 +04:00
2019-11-25 23:03:23 +03:00
with Image.open(out) as reread:
assert reread.info["compression"] == "group3"
assert_image_equal(reread, i)
2014-06-10 13:10:47 +04:00
2020-03-02 17:02:19 +03:00
def test_little_endian(self, tmp_path):
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/16bit.deflate.tif") as im:
assert im.getpixel((0, 0)) == 480
assert im.mode == "I;16"
2019-11-25 23:03:23 +03:00
b = im.tobytes()
# Bytes are in image native order (little endian)
assert b[0] == ord(b"\xe0")
assert b[1] == ord(b"\x01")
2019-11-25 23:03:23 +03:00
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2019-11-25 23:03:23 +03:00
# out = "temp.le.tif"
im.save(out)
with Image.open(out) as reread:
assert reread.info["compression"] == im.info["compression"]
assert reread.getpixel((0, 0)) == 480
2014-06-10 13:10:47 +04:00
# UNDONE - libtiff defaults to writing in native endian, so
# on big endian, we'll get back mode = 'I;16B' here.
2020-03-02 17:02:19 +03:00
def test_big_endian(self, tmp_path):
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/16bit.MM.deflate.tif") as im:
assert im.getpixel((0, 0)) == 480
assert im.mode == "I;16B"
2014-06-10 13:10:47 +04:00
2019-11-25 23:03:23 +03:00
b = im.tobytes()
2014-06-10 13:10:47 +04:00
2019-11-25 23:03:23 +03:00
# Bytes are in image native order (big endian)
assert b[0] == ord(b"\x01")
assert b[1] == ord(b"\xe0")
2014-06-10 13:10:47 +04:00
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2019-11-25 23:03:23 +03:00
im.save(out)
with Image.open(out) as reread:
assert reread.info["compression"] == im.info["compression"]
assert reread.getpixel((0, 0)) == 480
2014-06-10 13:10:47 +04:00
2020-03-02 17:02:19 +03:00
def test_g4_string_info(self, tmp_path):
2014-06-10 13:10:47 +04:00
"""Tests String data in info directory"""
2015-04-02 11:45:24 +03:00
test_file = "Tests/images/hopper_g4_500.tif"
2019-11-25 23:03:23 +03:00
with Image.open(test_file) as orig:
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2014-06-10 13:10:47 +04:00
2019-11-25 23:03:23 +03:00
orig.tag[269] = "temp.tif"
orig.save(out)
2014-06-10 13:10:47 +04:00
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
with Image.open(out) as reread:
assert "temp.tif" == reread.tag_v2[269]
assert "temp.tif" == reread.tag[269][0]
2014-06-10 13:10:47 +04:00
def test_12bit_rawmode(self):
2020-09-01 20:16:46 +03:00
"""Are we generating the same interpretation
of the image as Imagemagick is?"""
2014-06-10 13:10:47 +04:00
TiffImagePlugin.READ_LIBTIFF = True
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/12bit.cropped.tif") as im:
im.load()
TiffImagePlugin.READ_LIBTIFF = False
# to make the target --
# convert 12bit.cropped.tif -depth 16 tmp.tif
# convert tmp.tif -evaluate RightShift 4 12in16bit2.tif
# imagemagick will auto scale so that a 12bit FFF is 16bit FFF0,
# so we need to unshift so that the integer values are the same.
2014-06-10 13:10:47 +04:00
assert_image_equal_tofile(im, "Tests/images/12in16bit.tif")
2014-06-10 13:10:47 +04:00
2020-03-02 17:02:19 +03:00
def test_blur(self, tmp_path):
2014-06-10 13:10:47 +04:00
# test case from irc, how to do blur on b/w image
# and save to compressed tif.
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/pport_g4.tif") as im:
im = im.convert("L")
2014-06-10 13:10:47 +04:00
im = im.filter(ImageFilter.GaussianBlur(4))
2019-06-13 18:54:11 +03:00
im.save(out, compression="tiff_adobe_deflate")
assert_image_equal_tofile(im, out)
2020-03-02 17:02:19 +03:00
def test_compressions(self, tmp_path):
# Test various tiff compressions and assert similar image content but reduced
# file sizes.
2019-06-13 18:54:11 +03:00
im = hopper("RGB")
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
im.save(out)
size_raw = os.path.getsize(out)
2019-06-13 18:54:11 +03:00
for compression in ("packbits", "tiff_lzw"):
2014-06-10 13:10:47 +04:00
im.save(out, compression=compression)
size_compressed = os.path.getsize(out)
assert_image_equal_tofile(im, out)
2013-11-06 08:53:18 +04:00
2019-06-13 18:54:11 +03:00
im.save(out, compression="jpeg")
size_jpeg = os.path.getsize(out)
2019-11-25 23:03:23 +03:00
with Image.open(out) as im2:
assert_image_similar(im, im2, 30)
im.save(out, compression="jpeg", quality=30)
size_jpeg_30 = os.path.getsize(out)
assert_image_similar_tofile(im2, out, 30)
assert size_raw > size_compressed
assert size_compressed > size_jpeg
assert size_jpeg > size_jpeg_30
def test_tiff_jpeg_compression(self, tmp_path):
im = hopper("RGB")
out = str(tmp_path / "temp.tif")
im.save(out, compression="tiff_jpeg")
with Image.open(out) as reloaded:
assert reloaded.info["compression"] == "jpeg"
def test_tiff_deflate_compression(self, tmp_path):
im = hopper("RGB")
out = str(tmp_path / "temp.tif")
im.save(out, compression="tiff_deflate")
with Image.open(out) as reloaded:
assert reloaded.info["compression"] == "tiff_adobe_deflate"
2020-03-02 17:02:19 +03:00
def test_quality(self, tmp_path):
im = hopper("RGB")
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
with pytest.raises(ValueError):
im.save(out, compression="tiff_lzw", quality=50)
with pytest.raises(ValueError):
im.save(out, compression="jpeg", quality=-1)
with pytest.raises(ValueError):
im.save(out, compression="jpeg", quality=101)
with pytest.raises(ValueError):
im.save(out, compression="jpeg", quality="good")
im.save(out, compression="jpeg", quality=0)
im.save(out, compression="jpeg", quality=100)
2020-03-02 17:02:19 +03:00
def test_cmyk_save(self, tmp_path):
2019-06-13 18:54:11 +03:00
im = hopper("CMYK")
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2014-06-10 13:10:47 +04:00
2019-06-13 18:54:11 +03:00
im.save(out, compression="tiff_adobe_deflate")
assert_image_equal_tofile(im, out)
2022-04-21 04:26:34 +03:00
@pytest.mark.parametrize("im", (hopper("P"), Image.new("P", (1, 1), "#000")))
def test_palette_save(self, im, tmp_path):
2020-05-26 09:38:38 +03:00
out = str(tmp_path / "temp.tif")
TiffImagePlugin.WRITE_LIBTIFF = True
im.save(out)
TiffImagePlugin.WRITE_LIBTIFF = False
with Image.open(out) as reloaded:
# colormap/palette tag
assert len(reloaded.tag_v2[320]) == 768
2022-10-03 08:57:42 +03:00
@pytest.mark.parametrize("compression", ("tiff_ccitt", "group3", "group4"))
2022-10-03 09:48:27 +03:00
def test_bw_compression_w_rgb(self, compression, tmp_path):
2019-06-13 18:54:11 +03:00
im = hopper("RGB")
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
with pytest.raises(OSError):
2022-10-03 08:57:42 +03:00
im.save(out, compression=compression)
2014-06-10 13:10:47 +04:00
def test_fp_leak(self):
2014-09-23 17:16:04 +04:00
im = Image.open("Tests/images/hopper_g4_500.tif")
2014-06-10 13:10:47 +04:00
fn = im.fp.fileno()
2014-06-10 13:10:47 +04:00
os.fstat(fn)
im.load() # this should close it.
with pytest.raises(OSError):
os.fstat(fn)
2014-06-10 13:10:47 +04:00
im = None # this should force even more closed.
with pytest.raises(OSError):
os.fstat(fn)
with pytest.raises(OSError):
os.close(fn)
2014-08-21 08:43:46 +04:00
def test_multipage(self):
# issue #862
TiffImagePlugin.READ_LIBTIFF = True
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
with Image.open("Tests/images/multipage.tiff") as im:
# file is a multipage tiff, 10x10 green, 10x10 red, 20x20 blue
2014-08-21 08:43:46 +04:00
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
im.seek(0)
assert im.size == (10, 10)
assert im.convert("RGB").getpixel((0, 0)) == (0, 128, 0)
assert im.tag.next
2014-08-21 08:43:46 +04:00
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
im.seek(1)
assert im.size == (10, 10)
assert im.convert("RGB").getpixel((0, 0)) == (255, 0, 0)
assert im.tag.next
2014-08-21 08:43:46 +04:00
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
im.seek(2)
assert not im.tag.next
assert im.size == (20, 20)
assert im.convert("RGB").getpixel((0, 0)) == (0, 0, 255)
2014-08-21 08:43:46 +04:00
TiffImagePlugin.READ_LIBTIFF = False
2016-11-17 15:43:11 +03:00
def test_multipage_nframes(self):
# issue #862
TiffImagePlugin.READ_LIBTIFF = True
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
with Image.open("Tests/images/multipage.tiff") as im:
frames = im.n_frames
assert frames == 3
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
for _ in range(frames):
im.seek(0)
# Should not raise ValueError: I/O operation on closed file
im.load()
2016-11-17 15:43:11 +03:00
TiffImagePlugin.READ_LIBTIFF = False
def test_multipage_seek_backwards(self):
TiffImagePlugin.READ_LIBTIFF = True
with Image.open("Tests/images/multipage.tiff") as im:
im.seek(1)
im.load()
im.seek(0)
assert im.convert("RGB").getpixel((0, 0)) == (0, 128, 0)
TiffImagePlugin.READ_LIBTIFF = False
2014-08-21 08:43:46 +04:00
def test__next(self):
TiffImagePlugin.READ_LIBTIFF = True
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/hopper.tif") as im:
assert not im.tag.next
2019-11-25 23:03:23 +03:00
im.load()
assert not im.tag.next
2014-10-29 21:07:20 +03:00
def test_4bit(self):
# Arrange
test_file = "Tests/images/hopper_gray_4bpp.tif"
original = hopper("L")
# Act
TiffImagePlugin.READ_LIBTIFF = True
2019-11-25 23:03:23 +03:00
with Image.open(test_file) as im:
TiffImagePlugin.READ_LIBTIFF = False
2014-10-29 21:07:20 +03:00
2019-11-25 23:03:23 +03:00
# Assert
assert im.size == (128, 128)
assert im.mode == "L"
assert_image_similar(im, original, 7.3)
2014-10-29 21:07:20 +03:00
def test_gray_semibyte_per_pixel(self):
test_files = (
(
2016-08-04 09:40:12 +03:00
24.8, # epsilon
( # group
"Tests/images/tiff_gray_2_4_bpp/hopper2.tif",
"Tests/images/tiff_gray_2_4_bpp/hopper2I.tif",
"Tests/images/tiff_gray_2_4_bpp/hopper2R.tif",
"Tests/images/tiff_gray_2_4_bpp/hopper2IR.tif",
2019-06-13 18:54:11 +03:00
),
),
(
2016-08-04 09:40:12 +03:00
7.3, # epsilon
( # group
"Tests/images/tiff_gray_2_4_bpp/hopper4.tif",
"Tests/images/tiff_gray_2_4_bpp/hopper4I.tif",
"Tests/images/tiff_gray_2_4_bpp/hopper4R.tif",
"Tests/images/tiff_gray_2_4_bpp/hopper4IR.tif",
2019-06-13 18:54:11 +03:00
),
),
)
original = hopper("L")
for epsilon, group in test_files:
2019-11-25 23:03:23 +03:00
with Image.open(group[0]) as im:
assert im.size == (128, 128)
assert im.mode == "L"
assert_image_similar(im, original, epsilon)
for file in group[1:]:
2019-11-25 23:03:23 +03:00
with Image.open(file) as im2:
assert im2.size == (128, 128)
assert im2.mode == "L"
assert_image_equal(im, im2)
2014-11-16 07:38:52 +03:00
def test_save_bytesio(self):
# PR 1011
# Test TIFF saving to io.BytesIO() object.
2014-11-16 07:38:52 +03:00
TiffImagePlugin.WRITE_LIBTIFF = True
TiffImagePlugin.READ_LIBTIFF = True
# Generate test image
pilim = hopper()
def save_bytesio(compression=None):
buffer_io = io.BytesIO()
pilim.save(buffer_io, format="tiff", compression=compression)
buffer_io.seek(0)
assert_image_similar_tofile(pilim, buffer_io, 0)
save_bytesio()
2019-06-13 18:54:11 +03:00
save_bytesio("raw")
2014-11-16 07:38:52 +03:00
save_bytesio("packbits")
save_bytesio("tiff_lzw")
2014-11-16 07:38:52 +03:00
TiffImagePlugin.WRITE_LIBTIFF = False
TiffImagePlugin.READ_LIBTIFF = False
2014-10-29 21:07:20 +03:00
2021-07-09 18:20:36 +03:00
def test_save_ycbcr(self, tmp_path):
im = hopper("YCbCr")
outfile = str(tmp_path / "temp.tif")
im.save(outfile, compression="jpeg")
with Image.open(outfile) as reloaded:
assert reloaded.tag_v2[530] == (1, 1)
assert reloaded.tag_v2[532] == (0, 255, 128, 255, 128, 255)
2023-04-01 04:21:16 +03:00
def test_exif_ifd(self, tmp_path):
2023-04-01 02:27:39 +03:00
outfile = str(tmp_path / "temp.tif")
with Image.open("Tests/images/tiff_adobe_deflate.tif") as im:
2023-04-01 04:21:16 +03:00
assert im.tag_v2[34665] == 125456
2023-04-01 02:27:39 +03:00
im.save(outfile)
2023-04-01 04:21:16 +03:00
with Image.open(outfile) as reloaded:
if Image.core.libtiff_support_custom_tags:
assert reloaded.tag_v2[34665] == 125456
2020-03-02 17:02:19 +03:00
def test_crashing_metadata(self, tmp_path):
# issue 1597
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/rdf.tif") as im:
2020-03-02 17:02:19 +03:00
out = str(tmp_path / "temp.tif")
2019-11-25 23:03:23 +03:00
TiffImagePlugin.WRITE_LIBTIFF = True
# this shouldn't crash
im.save(out, format="TIFF")
TiffImagePlugin.WRITE_LIBTIFF = False
2020-03-02 17:02:19 +03:00
def test_page_number_x_0(self, tmp_path):
# Issue 973
# Test TIFF with tag 297 (Page Number) having value of 0 0.
# The first number is the current page number.
# The second is the total number of pages, zero means not available.
2020-03-02 17:02:19 +03:00
outfile = str(tmp_path / "temp.tif")
# Created by printing a page in Chrome to PDF, then:
# /usr/bin/gs -q -sDEVICE=tiffg3 -sOutputFile=total-pages-zero.tif
# -dNOPAUSE /tmp/test.pdf -c quit
infile = "Tests/images/total-pages-zero.tif"
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
with Image.open(infile) as im:
# Should not divide by zero
im.save(outfile)
2020-03-02 17:02:19 +03:00
def test_fd_duplication(self, tmp_path):
2016-09-30 00:29:19 +03:00
# https://github.com/python-pillow/Pillow/issues/1651
2020-03-02 17:02:19 +03:00
tmpfile = str(tmp_path / "temp.tif")
2019-06-13 18:54:11 +03:00
with open(tmpfile, "wb") as f:
with open("Tests/images/g4-multi.tiff", "rb") as src:
2016-09-30 00:29:19 +03:00
f.write(src.read())
im = Image.open(tmpfile)
2018-06-15 12:55:48 +03:00
im.n_frames
2016-09-30 00:29:19 +03:00
im.close()
# Should not raise PermissionError.
os.remove(tmpfile)
2016-09-30 00:29:19 +03:00
def test_read_icc(self):
with Image.open("Tests/images/hopper.iccprofile.tif") as img:
2019-06-13 18:54:11 +03:00
icc = img.info.get("icc_profile")
assert icc is not None
TiffImagePlugin.READ_LIBTIFF = True
with Image.open("Tests/images/hopper.iccprofile.tif") as img:
2019-06-13 18:54:11 +03:00
icc_libtiff = img.info.get("icc_profile")
assert icc_libtiff is not None
TiffImagePlugin.READ_LIBTIFF = False
assert icc == icc_libtiff
2020-05-06 13:12:59 +03:00
def test_write_icc(self, tmp_path):
def check_write(libtiff):
TiffImagePlugin.WRITE_LIBTIFF = libtiff
with Image.open("Tests/images/hopper.iccprofile.tif") as img:
icc_profile = img.info["icc_profile"]
out = str(tmp_path / "temp.tif")
img.save(out, icc_profile=icc_profile)
with Image.open(out) as reloaded:
assert icc_profile == reloaded.info["icc_profile"]
libtiffs = []
if Image.core.libtiff_support_custom_tags:
libtiffs.append(True)
libtiffs.append(False)
for libtiff in libtiffs:
check_write(libtiff)
def test_multipage_compression(self):
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
with Image.open("Tests/images/compression.tif") as im:
im.seek(0)
assert im._compression == "tiff_ccitt"
assert im.size == (10, 10)
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
im.seek(1)
assert im._compression == "packbits"
assert im.size == (10, 10)
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
im.load()
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
im.seek(0)
assert im._compression == "tiff_ccitt"
assert im.size == (10, 10)
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
im.load()
2020-03-02 17:02:19 +03:00
def test_save_tiff_with_jpegtables(self, tmp_path):
# Arrange
2020-03-02 17:02:19 +03:00
outfile = str(tmp_path / "temp.tif")
# Created with ImageMagick: convert hopper.jpg hopper_jpg.tif
# Contains JPEGTables (347) tag
infile = "Tests/images/hopper_jpg.tif"
2019-11-25 23:03:23 +03:00
with Image.open(infile) as im:
# Act / Assert
# Should not raise UnicodeDecodeError or anything else
im.save(outfile)
2019-05-08 22:08:17 +03:00
def test_16bit_RGB_tiff(self):
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/tiff_16bit_RGB.tiff") as im:
assert im.mode == "RGB"
assert im.size == (100, 40)
assert im.tile, [
(
"libtiff",
(0, 0, 100, 40),
0,
("RGB;16N", "tiff_adobe_deflate", False, 8),
)
]
2019-11-25 23:03:23 +03:00
im.load()
2019-05-08 22:08:17 +03:00
assert_image_equal_tofile(im, "Tests/images/tiff_16bit_RGB_target.png")
2019-05-08 22:08:17 +03:00
def test_16bit_RGBa_tiff(self):
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/tiff_16bit_RGBa.tiff") as im:
assert im.mode == "RGBA"
assert im.size == (100, 40)
assert im.tile, [
2020-09-01 20:16:46 +03:00
("libtiff", (0, 0, 100, 40), 0, ("RGBa;16N", "tiff_lzw", False, 38236))
]
2019-11-25 23:03:23 +03:00
im.load()
assert_image_equal_tofile(im, "Tests/images/tiff_16bit_RGBa_target.png")
@skip_unless_feature("jpg")
def test_gimp_tiff(self):
# Read TIFF JPEG images from GIMP [@PIL168]
filename = "Tests/images/pil168.tif"
2019-11-25 23:03:23 +03:00
with Image.open(filename) as im:
assert im.mode == "RGB"
assert im.size == (256, 256)
assert im.tile == [
("libtiff", (0, 0, 256, 256), 0, ("RGB", "jpeg", False, 5122))
]
2019-11-25 23:03:23 +03:00
im.load()
assert_image_equal_tofile(im, "Tests/images/pil168.png")
def test_sampleformat(self):
# https://github.com/python-pillow/Pillow/issues/1466
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/copyleft.tiff") as im:
assert im.mode == "RGB"
assert_image_equal_tofile(im, "Tests/images/copyleft.png", mode="RGB")
2021-11-23 00:10:18 +03:00
def test_sampleformat_write(self, tmp_path):
im = Image.new("F", (1, 1))
out = str(tmp_path / "temp.tif")
TiffImagePlugin.WRITE_LIBTIFF = True
im.save(out)
TiffImagePlugin.WRITE_LIBTIFF = False
with Image.open(out) as reloaded:
assert reloaded.mode == "F"
assert reloaded.getexif()[SAMPLEFORMAT] == 3
2022-09-05 11:13:52 +03:00
def test_lzma(self, capfd):
try:
with Image.open("Tests/images/hopper_lzma.tif") as im:
assert im.mode == "RGB"
assert im.size == (128, 128)
assert im.format == "TIFF"
im2 = hopper()
assert_image_similar(im, im2, 5)
except OSError:
captured = capfd.readouterr()
if "LZMA compression support is not configured" in captured.err:
pytest.skip("LZMA compression support is not configured")
sys.stdout.write(captured.out)
sys.stderr.write(captured.err)
2022-09-05 11:13:52 +03:00
raise
2022-09-05 12:58:42 +03:00
def test_webp(self, capfd):
try:
with Image.open("Tests/images/hopper_webp.tif") as im:
assert im.mode == "RGB"
assert im.size == (128, 128)
assert im.format == "TIFF"
assert_image_similar_tofile(im, "Tests/images/hopper_webp.png", 1)
except OSError:
captured = capfd.readouterr()
if "WEBP compression support is not configured" in captured.err:
pytest.skip("WEBP compression support is not configured")
if (
"Compression scheme 50001 strip decoding is not implemented"
in captured.err
):
pytest.skip(
"Compression scheme 50001 strip decoding is not implemented"
)
sys.stdout.write(captured.out)
sys.stderr.write(captured.err)
2022-09-05 12:58:42 +03:00
raise
def test_lzw(self):
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/hopper_lzw.tif") as im:
assert im.mode == "RGB"
assert im.size == (128, 128)
assert im.format == "TIFF"
2019-11-25 23:03:23 +03:00
im2 = hopper()
assert_image_similar(im, im2, 5)
def test_strip_cmyk_jpeg(self):
infile = "Tests/images/tiff_strip_cmyk_jpeg.tif"
2019-11-25 23:03:23 +03:00
with Image.open(infile) as im:
assert_image_similar_tofile(im, "Tests/images/pil_sample_cmyk.jpg", 0.5)
2019-04-30 17:42:30 +03:00
def test_strip_cmyk_16l_jpeg(self):
infile = "Tests/images/tiff_strip_cmyk_16l_jpeg.tif"
2019-11-25 23:03:23 +03:00
with Image.open(infile) as im:
assert_image_similar_tofile(im, "Tests/images/pil_sample_cmyk.jpg", 0.5)
2019-04-30 17:42:30 +03:00
2021-04-10 17:58:01 +03:00
@mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
)
def test_strip_ycbcr_jpeg_2x2_sampling(self):
infile = "Tests/images/tiff_strip_ycbcr_jpeg_2x2_sampling.tif"
2019-11-25 23:03:23 +03:00
with Image.open(infile) as im:
assert_image_similar_tofile(im, "Tests/images/flower.jpg", 1.2)
2021-04-10 17:58:01 +03:00
@mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
)
def test_strip_ycbcr_jpeg_1x1_sampling(self):
infile = "Tests/images/tiff_strip_ycbcr_jpeg_1x1_sampling.tif"
2019-11-25 23:03:23 +03:00
with Image.open(infile) as im:
assert_image_similar_tofile(im, "Tests/images/flower2.jpg", 0.01)
def test_tiled_cmyk_jpeg(self):
infile = "Tests/images/tiff_tiled_cmyk_jpeg.tif"
2019-11-25 23:03:23 +03:00
with Image.open(infile) as im:
assert_image_similar_tofile(im, "Tests/images/pil_sample_cmyk.jpg", 0.5)
2021-04-10 17:58:01 +03:00
@mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
)
def test_tiled_ycbcr_jpeg_1x1_sampling(self):
infile = "Tests/images/tiff_tiled_ycbcr_jpeg_1x1_sampling.tif"
2019-11-25 23:03:23 +03:00
with Image.open(infile) as im:
assert_image_similar_tofile(im, "Tests/images/flower2.jpg", 0.01)
2021-04-10 17:58:01 +03:00
@mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
)
def test_tiled_ycbcr_jpeg_2x2_sampling(self):
infile = "Tests/images/tiff_tiled_ycbcr_jpeg_2x2_sampling.tif"
2019-11-25 23:03:23 +03:00
with Image.open(infile) as im:
assert_image_similar_tofile(im, "Tests/images/flower.jpg", 1.5)
def test_strip_planar_rgb(self):
# gdal_translate -co TILED=no -co INTERLEAVE=BAND -co COMPRESS=LZW \
# tiff_strip_raw.tif tiff_strip_planar_lzw.tiff
infile = "Tests/images/tiff_strip_planar_lzw.tiff"
with Image.open(infile) as im:
assert_image_equal_tofile(im, "Tests/images/tiff_adobe_deflate.png")
def test_tiled_planar_rgb(self):
# gdal_translate -co TILED=yes -co INTERLEAVE=BAND -co COMPRESS=LZW \
# tiff_tiled_raw.tif tiff_tiled_planar_lzw.tiff
infile = "Tests/images/tiff_tiled_planar_lzw.tiff"
with Image.open(infile) as im:
assert_image_equal_tofile(im, "Tests/images/tiff_adobe_deflate.png")
def test_tiled_planar_16bit_RGB(self):
# gdal_translate -co TILED=yes -co INTERLEAVE=BAND -co COMPRESS=LZW \
# tiff_16bit_RGB.tiff tiff_tiled_planar_16bit_RGB.tiff
with Image.open("Tests/images/tiff_tiled_planar_16bit_RGB.tiff") as im:
assert_image_equal_tofile(im, "Tests/images/tiff_16bit_RGB_target.png")
def test_strip_planar_16bit_RGB(self):
# gdal_translate -co TILED=no -co INTERLEAVE=BAND -co COMPRESS=LZW \
# tiff_16bit_RGB.tiff tiff_strip_planar_16bit_RGB.tiff
with Image.open("Tests/images/tiff_strip_planar_16bit_RGB.tiff") as im:
assert_image_equal_tofile(im, "Tests/images/tiff_16bit_RGB_target.png")
def test_tiled_planar_16bit_RGBa(self):
# gdal_translate -co TILED=yes \
# -co INTERLEAVE=BAND -co COMPRESS=LZW -co ALPHA=PREMULTIPLIED \
# tiff_16bit_RGBa.tiff tiff_tiled_planar_16bit_RGBa.tiff
with Image.open("Tests/images/tiff_tiled_planar_16bit_RGBa.tiff") as im:
assert_image_equal_tofile(im, "Tests/images/tiff_16bit_RGBa_target.png")
def test_strip_planar_16bit_RGBa(self):
# gdal_translate -co TILED=no \
# -co INTERLEAVE=BAND -co COMPRESS=LZW -co ALPHA=PREMULTIPLIED \
# tiff_16bit_RGBa.tiff tiff_strip_planar_16bit_RGBa.tiff
with Image.open("Tests/images/tiff_strip_planar_16bit_RGBa.tiff") as im:
assert_image_equal_tofile(im, "Tests/images/tiff_16bit_RGBa_target.png")
2021-11-18 14:01:53 +03:00
@pytest.mark.parametrize("compression", (None, "jpeg"))
def test_block_tile_tags(self, compression, tmp_path):
im = hopper()
out = str(tmp_path / "temp.tif")
tags = {
TiffImagePlugin.TILEWIDTH: 256,
TiffImagePlugin.TILELENGTH: 256,
TiffImagePlugin.TILEOFFSETS: 256,
TiffImagePlugin.TILEBYTECOUNTS: 256,
}
im.save(out, exif=tags, compression=compression)
with Image.open(out) as reloaded:
for tag in tags:
2021-11-18 14:01:53 +03:00
assert tag not in reloaded.getexif()
def test_old_style_jpeg(self):
with Image.open("Tests/images/old-style-jpeg-compression.tif") as im:
assert_image_equal_tofile(im, "Tests/images/old-style-jpeg-compression.png")
def test_open_missing_samplesperpixel(self):
with Image.open(
"Tests/images/old-style-jpeg-compression-no-samplesperpixel.tif"
) as im:
assert_image_equal_tofile(im, "Tests/images/old-style-jpeg-compression.png")
2019-09-13 15:36:26 +03:00
@pytest.mark.parametrize(
"file_name, mode, size, tile",
[
(
"tiff_wrong_bits_per_sample.tiff",
"RGBA",
(52, 53),
[("raw", (0, 0, 52, 53), 160, ("RGBA", 0, 1))],
),
(
"tiff_wrong_bits_per_sample_2.tiff",
"RGB",
(16, 16),
[("raw", (0, 0, 16, 16), 8, ("RGB", 0, 1))],
),
(
"tiff_wrong_bits_per_sample_3.tiff",
"RGBA",
(512, 256),
[("libtiff", (0, 0, 512, 256), 0, ("RGBA", "tiff_lzw", False, 48782))],
),
],
)
def test_wrong_bits_per_sample(self, file_name, mode, size, tile):
with Image.open("Tests/images/" + file_name) as im:
assert im.mode == mode
assert im.size == size
assert im.tile == tile
im.load()
2019-09-18 15:07:17 +03:00
def test_no_rows_per_strip(self):
# This image does not have a RowsPerStrip TIFF tag
infile = "Tests/images/no_rows_per_strip.tif"
Improve handling of file resources Follow Python's file object semantics. User code is responsible for closing resources (usually through a context manager) in a deterministic way. To achieve this, remove __del__ functions. These functions used to closed open file handlers in an attempt to silence Python ResourceWarnings. However, using __del__ has the following drawbacks: - __del__ isn't called until the object's reference count reaches 0. Therefore, resource handlers remain open or in use longer than necessary. - The __del__ method isn't guaranteed to execute on system exit. See the Python documentation: https://docs.python.org/3/reference/datamodel.html#object.__del__ > It is not guaranteed that __del__() methods are called for objects > that still exist when the interpreter exits. - Exceptions that occur inside __del__ are ignored instead of raised. This has the potential of hiding bugs. This is also in the Python documentation: > Warning: Due to the precarious circumstances under which __del__() > methods are invoked, exceptions that occur during their execution > are ignored, and a warning is printed to sys.stderr instead. Instead, always close resource handlers when they are no longer in use. This will close the file handler at a specified point in the user's code and not wait until the interpreter chooses to. It is always guaranteed to run. And, if an exception occurs while closing the file handler, the bug will not be ignored. Now, when code receives a ResourceWarning, it will highlight an area that is mishandling resources. It should not simply be silenced, but fixed by closing resources with a context manager. All warnings that were emitted during tests have been cleaned up. To enable warnings, I passed the `-Wa` CLI option to Python. This exposed some mishandling of resources in ImageFile.__init__() and SpiderImagePlugin.loadImageSeries(), they too were fixed.
2019-05-25 19:30:58 +03:00
with Image.open(infile) as im:
im.load()
assert im.size == (950, 975)
2019-09-20 22:59:29 +03:00
2019-09-13 15:36:26 +03:00
def test_orientation(self):
2019-11-25 23:03:23 +03:00
with Image.open("Tests/images/g4_orientation_1.tif") as base_im:
for i in range(2, 9):
with Image.open("Tests/images/g4_orientation_" + str(i) + ".tif") as im:
assert 274 in im.tag_v2
2019-11-25 23:03:23 +03:00
im.load()
assert 274 not in im.tag_v2
2019-09-13 15:36:26 +03:00
assert_image_similar(base_im, im, 0.7)
def test_exif_transpose(self):
with Image.open("Tests/images/g4_orientation_1.tif") as base_im:
for i in range(2, 9):
with Image.open("Tests/images/g4_orientation_" + str(i) + ".tif") as im:
im = ImageOps.exif_transpose(im)
assert_image_similar(base_im, im, 0.7)
@pytest.mark.valgrind_known_error(reason="Backtrace in Python Core")
def test_sampleformat_not_corrupted(self):
# Assert that a TIFF image with SampleFormat=UINT tag is not corrupted
# when saving to a new file.
# Pillow 6.0 fails with "OSError: cannot identify image file".
tiff = io.BytesIO(
base64.b64decode(
b"SUkqAAgAAAAPAP4ABAABAAAAAAAAAAABBAABAAAAAQAAAAEBBAABAAAAAQAA"
b"AAIBAwADAAAAwgAAAAMBAwABAAAACAAAAAYBAwABAAAAAgAAABEBBAABAAAA"
b"4AAAABUBAwABAAAAAwAAABYBBAABAAAAAQAAABcBBAABAAAACwAAABoBBQAB"
b"AAAAyAAAABsBBQABAAAA0AAAABwBAwABAAAAAQAAACgBAwABAAAAAQAAAFMB"
b"AwADAAAA2AAAAAAAAAAIAAgACAABAAAAAQAAAAEAAAABAAAAAQABAAEAAAB4"
b"nGNgYAAAAAMAAQ=="
)
)
out = io.BytesIO()
with Image.open(tiff) as im:
im.save(out, format="tiff")
out.seek(0)
with Image.open(out) as im:
im.load()
def test_realloc_overflow(self):
TiffImagePlugin.READ_LIBTIFF = True
with Image.open("Tests/images/tiff_overflow_rows_per_strip.tif") as im:
with pytest.raises(OSError) as e:
im.load()
# Assert that the error code is IMAGING_CODEC_MEMORY
assert str(e.value) == "-9"
TiffImagePlugin.READ_LIBTIFF = False
2021-06-03 15:53:41 +03:00
2021-07-07 12:16:44 +03:00
@pytest.mark.parametrize("compression", ("tiff_adobe_deflate", "jpeg"))
def test_save_multistrip(self, compression, tmp_path):
2021-06-03 15:53:41 +03:00
im = hopper("RGB").resize((256, 256))
out = str(tmp_path / "temp.tif")
2021-07-07 12:16:44 +03:00
im.save(out, compression=compression)
2021-06-03 15:53:41 +03:00
with Image.open(out) as im:
# Assert that there are multiple strips
assert len(im.tag_v2[STRIPOFFSETS]) > 1
2021-10-08 04:53:53 +03:00
@pytest.mark.parametrize("argument", (True, False))
def test_save_single_strip(self, argument, tmp_path):
2021-10-08 04:53:53 +03:00
im = hopper("RGB").resize((256, 256))
out = str(tmp_path / "temp.tif")
if not argument:
TiffImagePlugin.STRIP_SIZE = 2**18
2021-10-08 04:53:53 +03:00
try:
arguments = {"compression": "tiff_adobe_deflate"}
if argument:
arguments["strip_size"] = 2**18
im.save(out, **arguments)
2021-10-08 04:53:53 +03:00
with Image.open(out) as im:
assert len(im.tag_v2[STRIPOFFSETS]) == 1
finally:
TiffImagePlugin.STRIP_SIZE = 65536
2021-10-13 15:50:23 +03:00
@pytest.mark.parametrize("compression", ("tiff_adobe_deflate", None))
def test_save_zero(self, compression, tmp_path):
im = Image.new("RGB", (0, 0))
out = str(tmp_path / "temp.tif")
with pytest.raises(SystemError):
im.save(out, compression=compression)
2023-03-04 03:44:45 +03:00
def test_save_many_compressed(self, tmp_path):
im = hopper()
out = str(tmp_path / "temp.tif")
for _ in range(10000):
im.save(out, compression="jpeg")
2023-03-12 14:30:15 +03:00
@pytest.mark.parametrize(
"path, sizes",
(
("Tests/images/hopper.tif", ()),
("Tests/images/child_ifd.tiff", (16, 8)),
("Tests/images/child_ifd_jpeg.tiff", (20,)),
),
)
def test_get_child_images(self, path, sizes):
with Image.open(path) as im:
ims = im.get_child_images()
assert len(ims) == len(sizes)
for i, im in enumerate(ims):
w = sizes[i]
expected = Image.new("RGB", (w, w), "#f00")
assert_image_similar(im, expected, 1)