Merge remote-tracking branch 'radarhere/icns'

This commit is contained in:
Andrew Murray 2020-05-11 20:06:48 +10:00
commit 8c37960412
2 changed files with 21 additions and 16 deletions

View File

@ -200,12 +200,16 @@ attributes before loading the file::
ICNS
^^^^
Pillow reads and (macOS only) writes macOS ``.icns`` files. By default, the
Pillow reads and writes macOS ``.icns`` files. By default, the
largest available icon is read, though you can override this by setting the
:py:attr:`~PIL.Image.Image.size` property before calling
:py:meth:`~PIL.Image.Image.load`. The :py:meth:`~PIL.Image.Image.open` method
sets the following :py:attr:`~PIL.Image.Image.info` property:
.. note::
Prior to version 7.2.0, Pillow could only write ICNS files on macOS.
**sizes**
A list of supported sizes found in this icon file; these are a
3-tuple, ``(width, height, scale)``, where ``scale`` is 2 for a retina

View File

@ -300,13 +300,12 @@ class IcnsImageFile(ImageFile.ImageFile):
self.load_end()
def to_int(s):
def _to_int(s):
b = s.encode("ascii")
return (b[0] << 24) | (b[1] << 16) | (b[2] << 8) | b[3]
MAGIC = to_int("icns")
HEADER_SIZE = 8
MAGIC = _to_int("icns")
TOC = "TOC "
@ -326,32 +325,34 @@ def _save(im, fp, filename):
file_size = 0
entries = []
provided_images = {im.width: im for im in im.encoderinfo.get("append_images", [])}
for index, s in enumerate(sizes):
temp = io.BytesIO()
temp_sizes = {s: io.BytesIO() for s in set(sizes)}
for s, temp in temp_sizes.items():
nb = provided_images[s] if s in provided_images else im.resize((s, s))
nb.save(temp, "png")
for index, s in enumerate(sizes):
temp = temp_sizes[s]
file_size += len(temp.getvalue())
entries.append(
{"type": size_str[index], "size": len(temp.getvalue()), "stream": temp}
)
# Header
fp.write(struct.pack("i", MAGIC)[::-1])
fp.write(struct.pack("i", file_size)[::-1])
fp.write(struct.pack("<i", MAGIC)[::-1])
fp.write(struct.pack("<i", file_size)[::-1])
# TOC
toc_size = HEADER_SIZE + (len(entries) * HEADER_SIZE)
fp.write(struct.pack("i", to_int(TOC))[::-1])
fp.write(struct.pack("i", toc_size)[::-1])
toc_size = HEADERSIZE + (len(entries) * HEADERSIZE)
fp.write(struct.pack("<i", _to_int(TOC))[::-1])
fp.write(struct.pack("<i", toc_size)[::-1])
for e in entries:
fp.write(struct.pack("i", to_int(e.get("type")))[::-1])
fp.write(struct.pack("i", HEADER_SIZE + e.get("size"))[::-1])
fp.write(struct.pack("<i", _to_int(e["type"]))[::-1])
fp.write(struct.pack("<i", HEADERSIZE + e["size"])[::-1])
# Data
for index, e in enumerate(entries):
fp.write(struct.pack("i", to_int(e.get("type")))[::-1])
fp.write(struct.pack("i", HEADER_SIZE + e.get("size"))[::-1])
fp.write(e.get("stream").getvalue())
fp.write(struct.pack("<i", _to_int(e["type"]))[::-1])
fp.write(struct.pack("<i", HEADERSIZE + e["size"])[::-1])
fp.write(e["stream"].getvalue())
fp.flush()