Pillow/Tests/test_image_getextrema.py
Martin Packman 0b3036454c Give correct extrema for I;16 format images
Currently gives None for a 16 bit greyscale image rather than the
true min and max values in the 0-65536 range. The internal
ImagingGetProjection function already supports I;16 but the
_getextrema needs to know to unpack the result.
2018-09-18 13:48:12 +01:00

34 lines
1.0 KiB
Python

from PIL import Image
from helper import unittest, PillowTestCase, hopper
class TestImageGetExtrema(PillowTestCase):
def test_extrema(self):
def extrema(mode):
return hopper(mode).getextrema()
self.assertEqual(extrema("1"), (0, 255))
self.assertEqual(extrema("L"), (0, 255))
self.assertEqual(extrema("I"), (0, 255))
self.assertEqual(extrema("F"), (0, 255))
self.assertEqual(extrema("P"), (0, 225)) # fixed palette
self.assertEqual(
extrema("RGB"), ((0, 255), (0, 255), (0, 255)))
self.assertEqual(
extrema("RGBA"), ((0, 255), (0, 255), (0, 255), (255, 255)))
self.assertEqual(
extrema("CMYK"), (((0, 255), (0, 255), (0, 255), (0, 0))))
self.assertEqual(extrema("I;16"), (0, 255))
def test_true_16(self):
im = Image.open("Tests/images/16_bit_noise.tif")
self.assertEqual(im.mode, 'I;16')
extrema = im.getextrema()
self.assertEqual(extrema, (106, 285))
if __name__ == '__main__':
unittest.main()