mirror of
https://github.com/python-pillow/Pillow.git
synced 2024-12-25 17:36:18 +03:00
Merge pull request #574 from wiredfool/convert_trns
Indexed Transparency handled for conversions between L, RGB, and P modes
This commit is contained in:
commit
6e2075e25d
|
@ -4,6 +4,12 @@ Changelog (Pillow)
|
||||||
2.4.0 (unreleased)
|
2.4.0 (unreleased)
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
|
- Indexed Transparency handled for conversions between L, RGB, and P modes. Fixes #510
|
||||||
|
[wiredfool]
|
||||||
|
|
||||||
|
- Conversions enabled from RGBA->P, Fixes #544
|
||||||
|
[wiredfool]
|
||||||
|
|
||||||
- Improved icns support
|
- Improved icns support
|
||||||
[al45tair]
|
[al45tair]
|
||||||
|
|
||||||
|
|
80
PIL/Image.py
80
PIL/Image.py
|
@ -735,21 +735,65 @@ class Image:
|
||||||
im = self.im.convert_matrix(mode, matrix)
|
im = self.im.convert_matrix(mode, matrix)
|
||||||
return self._new(im)
|
return self._new(im)
|
||||||
|
|
||||||
|
if mode == "P" and self.mode == "RGBA":
|
||||||
|
return self.quantize(colors)
|
||||||
|
|
||||||
|
trns = None
|
||||||
|
delete_trns = False
|
||||||
|
# transparency handling
|
||||||
|
if "transparency" in self.info and self.info['transparency'] is not None:
|
||||||
|
if self.mode in ('L', 'RGB') and mode == 'RGBA':
|
||||||
|
# Use transparent conversion to promote from transparent
|
||||||
|
# color to an alpha channel.
|
||||||
|
return self._new(self.im.convert_transparent(
|
||||||
|
mode, self.info['transparency']))
|
||||||
|
elif self.mode in ('L', 'RGB', 'P') and mode in ('L', 'RGB', 'P'):
|
||||||
|
t = self.info['transparency']
|
||||||
|
if isinstance(t, bytes):
|
||||||
|
# Dragons. This can't be represented by a single color
|
||||||
|
warnings.warn('Palette images with Transparency expressed '+
|
||||||
|
' in bytes should be converted to RGBA images')
|
||||||
|
delete_trns = True
|
||||||
|
else:
|
||||||
|
# get the new transparency color.
|
||||||
|
# use existing conversions
|
||||||
|
trns_im = Image()._new(core.new(self.mode, (1,1)))
|
||||||
|
if self.mode == 'P':
|
||||||
|
trns_im.putpalette(self.palette)
|
||||||
|
trns_im.putpixel((0,0), t)
|
||||||
|
|
||||||
|
if mode in ('L','RGB'):
|
||||||
|
trns_im = trns_im.convert(mode)
|
||||||
|
else:
|
||||||
|
# can't just retrieve the palette number, got to do it
|
||||||
|
# after quantization.
|
||||||
|
trns_im = trns_im.convert('RGB')
|
||||||
|
trns = trns_im.getpixel((0,0))
|
||||||
|
|
||||||
|
|
||||||
if mode == "P" and palette == ADAPTIVE:
|
if mode == "P" and palette == ADAPTIVE:
|
||||||
im = self.im.quantize(colors)
|
im = self.im.quantize(colors)
|
||||||
new = self._new(im)
|
new = self._new(im)
|
||||||
from PIL import ImagePalette
|
from PIL import ImagePalette
|
||||||
new.palette = ImagePalette.raw("RGB", new.im.getpalette("RGB"))
|
new.palette = ImagePalette.raw("RGB", new.im.getpalette("RGB"))
|
||||||
|
if delete_trns:
|
||||||
|
# This could possibly happen if we requantize to fewer colors.
|
||||||
|
# The transparency would be totally off in that case.
|
||||||
|
del(new.info['transparency'])
|
||||||
|
if trns is not None:
|
||||||
|
try:
|
||||||
|
new.info['transparency'] = new.palette.getcolor(trns)
|
||||||
|
except:
|
||||||
|
# if we can't make a transparent color, don't leave the old
|
||||||
|
# transparency hanging around to mess us up.
|
||||||
|
del(new.info['transparency'])
|
||||||
|
warnings.warn("Couldn't allocate palette entry for transparency")
|
||||||
return new
|
return new
|
||||||
|
|
||||||
# colorspace conversion
|
# colorspace conversion
|
||||||
if dither is None:
|
if dither is None:
|
||||||
dither = FLOYDSTEINBERG
|
dither = FLOYDSTEINBERG
|
||||||
|
|
||||||
# Use transparent conversion to promote from transparent color to an alpha channel.
|
|
||||||
if self.mode in ("L", "RGB") and mode == "RGBA" and "transparency" in self.info:
|
|
||||||
return self._new(self.im.convert_transparent(mode, self.info['transparency']))
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
im = self.im.convert(mode, dither)
|
im = self.im.convert(mode, dither)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
@ -760,9 +804,22 @@ class Image:
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ValueError("illegal conversion")
|
raise ValueError("illegal conversion")
|
||||||
|
|
||||||
return self._new(im)
|
new_im = self._new(im)
|
||||||
|
if delete_trns:
|
||||||
|
#crash fail if we leave a bytes transparency in an rgb/l mode.
|
||||||
|
del(new.info['transparency'])
|
||||||
|
if trns is not None:
|
||||||
|
if new_im.mode == 'P':
|
||||||
|
try:
|
||||||
|
new_im.info['transparency'] = new_im.palette.getcolor(trns)
|
||||||
|
except:
|
||||||
|
del(new_im.info['transparency'])
|
||||||
|
warnings.warn("Couldn't allocate palette entry for transparency")
|
||||||
|
else:
|
||||||
|
new_im.info['transparency'] = trns
|
||||||
|
return new_im
|
||||||
|
|
||||||
def quantize(self, colors=256, method=0, kmeans=0, palette=None):
|
def quantize(self, colors=256, method=None, kmeans=0, palette=None):
|
||||||
|
|
||||||
# methods:
|
# methods:
|
||||||
# 0 = median cut
|
# 0 = median cut
|
||||||
|
@ -774,6 +831,17 @@ class Image:
|
||||||
|
|
||||||
self.load()
|
self.load()
|
||||||
|
|
||||||
|
if method is None:
|
||||||
|
# defaults:
|
||||||
|
method = 0
|
||||||
|
if self.mode == 'RGBA':
|
||||||
|
method = 2
|
||||||
|
|
||||||
|
if self.mode == 'RGBA' and method != 2:
|
||||||
|
# Caller specified an invalid mode.
|
||||||
|
raise ValueError('Fast Octree (method == 2) is the ' +
|
||||||
|
' only valid method for quantizing RGBA images')
|
||||||
|
|
||||||
if palette:
|
if palette:
|
||||||
# use palette from reference image
|
# use palette from reference image
|
||||||
palette.load()
|
palette.load()
|
||||||
|
|
|
@ -46,5 +46,67 @@ def test_16bit_workaround():
|
||||||
im = Image.open('Tests/images/16bit.cropped.tif')
|
im = Image.open('Tests/images/16bit.cropped.tif')
|
||||||
_test_float_conversion(im.convert('I'))
|
_test_float_conversion(im.convert('I'))
|
||||||
|
|
||||||
|
def test_rgba_p():
|
||||||
|
im = lena('RGBA')
|
||||||
|
im.putalpha(lena('L'))
|
||||||
|
|
||||||
|
converted = im.convert('P')
|
||||||
|
comparable = converted.convert('RGBA')
|
||||||
|
|
||||||
|
assert_image_similar(im, comparable, 20)
|
||||||
|
|
||||||
|
def test_trns_p():
|
||||||
|
im = lena('P')
|
||||||
|
im.info['transparency']=0
|
||||||
|
|
||||||
|
f = tempfile('temp.png')
|
||||||
|
|
||||||
|
l = im.convert('L')
|
||||||
|
assert_equal(l.info['transparency'], 0) # undone
|
||||||
|
assert_no_exception(lambda: l.save(f))
|
||||||
|
|
||||||
|
|
||||||
|
rgb = im.convert('RGB')
|
||||||
|
assert_equal(rgb.info['transparency'], (0,0,0)) # undone
|
||||||
|
assert_no_exception(lambda: rgb.save(f))
|
||||||
|
|
||||||
|
def test_trns_l():
|
||||||
|
im = lena('L')
|
||||||
|
im.info['transparency'] = 128
|
||||||
|
|
||||||
|
f = tempfile('temp.png')
|
||||||
|
|
||||||
|
rgb = im.convert('RGB')
|
||||||
|
assert_equal(rgb.info['transparency'], (128,128,128)) # undone
|
||||||
|
assert_no_exception(lambda: rgb.save(f))
|
||||||
|
|
||||||
|
p = im.convert('P')
|
||||||
|
assert_true('transparency' in p.info)
|
||||||
|
assert_no_exception(lambda: p.save(f))
|
||||||
|
|
||||||
|
p = assert_warning(UserWarning,
|
||||||
|
lambda: im.convert('P', palette = Image.ADAPTIVE))
|
||||||
|
assert_false('transparency' in p.info)
|
||||||
|
assert_no_exception(lambda: p.save(f))
|
||||||
|
|
||||||
|
|
||||||
|
def test_trns_RGB():
|
||||||
|
im = lena('RGB')
|
||||||
|
im.info['transparency'] = im.getpixel((0,0))
|
||||||
|
|
||||||
|
f = tempfile('temp.png')
|
||||||
|
|
||||||
|
l = im.convert('L')
|
||||||
|
assert_equal(l.info['transparency'], l.getpixel((0,0))) # undone
|
||||||
|
assert_no_exception(lambda: l.save(f))
|
||||||
|
|
||||||
|
p = im.convert('P')
|
||||||
|
assert_true('transparency' in p.info)
|
||||||
|
assert_no_exception(lambda: p.save(f))
|
||||||
|
|
||||||
|
p = assert_warning(UserWarning,
|
||||||
|
lambda: im.convert('P', palette = Image.ADAPTIVE))
|
||||||
|
assert_false('transparency' in p.info)
|
||||||
|
assert_no_exception(lambda: p.save(f))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -20,3 +20,8 @@ def test_octree_quantize():
|
||||||
assert_image(im, "P", im.size)
|
assert_image(im, "P", im.size)
|
||||||
|
|
||||||
assert len(im.getcolors()) == 100
|
assert len(im.getcolors()) == 100
|
||||||
|
|
||||||
|
def test_rgba_quantize():
|
||||||
|
im = lena('RGBA')
|
||||||
|
assert_no_exception(lambda: im.quantize())
|
||||||
|
assert_exception(Exception, lambda: im.quantize(method=0))
|
||||||
|
|
|
@ -121,9 +121,9 @@ def assert_no_exception(func):
|
||||||
def assert_warning(warn_class, func):
|
def assert_warning(warn_class, func):
|
||||||
# note: this assert calls func three times!
|
# note: this assert calls func three times!
|
||||||
import warnings
|
import warnings
|
||||||
def warn_error(message, category, **options):
|
def warn_error(message, category=UserWarning, **options):
|
||||||
raise category(message)
|
raise category(message)
|
||||||
def warn_ignore(message, category, **options):
|
def warn_ignore(message, category=UserWarning, **options):
|
||||||
pass
|
pass
|
||||||
warn = warnings.warn
|
warn = warnings.warn
|
||||||
result = None
|
result = None
|
||||||
|
|
Loading…
Reference in New Issue
Block a user