2010-07-31 06:52:47 +04:00
|
|
|
#
|
|
|
|
# The Python Imaging Library.
|
|
|
|
# $Id$
|
|
|
|
#
|
|
|
|
# TIFF file handling
|
|
|
|
#
|
|
|
|
# TIFF is a flexible, if somewhat aged, image file format originally
|
|
|
|
# defined by Aldus. Although TIFF supports a wide variety of pixel
|
|
|
|
# layouts and compression methods, the name doesn't really stand for
|
|
|
|
# "thousands of incompatible file formats," it just feels that way.
|
|
|
|
#
|
|
|
|
# To read TIFF data from a stream, the stream must be seekable. For
|
|
|
|
# progressive decoding, make sure to use TIFF files where the tag
|
|
|
|
# directory is placed first in the file.
|
|
|
|
#
|
|
|
|
# History:
|
|
|
|
# 1995-09-01 fl Created
|
|
|
|
# 1996-05-04 fl Handle JPEGTABLES tag
|
|
|
|
# 1996-05-18 fl Fixed COLORMAP support
|
|
|
|
# 1997-01-05 fl Fixed PREDICTOR support
|
|
|
|
# 1997-08-27 fl Added support for rational tags (from Perry Stoll)
|
|
|
|
# 1998-01-10 fl Fixed seek/tell (from Jan Blom)
|
|
|
|
# 1998-07-15 fl Use private names for internal variables
|
|
|
|
# 1999-06-13 fl Rewritten for PIL 1.0 (1.0)
|
|
|
|
# 2000-10-11 fl Additional fixes for Python 2.0 (1.1)
|
|
|
|
# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2)
|
|
|
|
# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3)
|
|
|
|
# 2001-12-18 fl Added workaround for broken Matrox library
|
|
|
|
# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart)
|
|
|
|
# 2003-05-19 fl Check FILLORDER tag
|
|
|
|
# 2003-09-26 fl Added RGBa support
|
|
|
|
# 2004-02-24 fl Added DPI support; fixed rational write support
|
|
|
|
# 2005-02-07 fl Added workaround for broken Corel Draw 10 files
|
|
|
|
# 2006-01-09 fl Added support for float/double tags (from Russell Nelson)
|
|
|
|
#
|
|
|
|
# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved.
|
|
|
|
# Copyright (c) 1995-1997 by Fredrik Lundh
|
|
|
|
#
|
|
|
|
# See the README file for information on usage and redistribution.
|
|
|
|
#
|
2014-12-29 18:48:01 +03:00
|
|
|
import io
|
2012-10-16 01:19:55 +04:00
|
|
|
import itertools
|
2020-04-13 00:16:46 +03:00
|
|
|
import logging
|
2013-03-09 07:51:59 +04:00
|
|
|
import os
|
2014-12-29 18:48:01 +03:00
|
|
|
import struct
|
|
|
|
import warnings
|
2019-09-26 15:12:28 +03:00
|
|
|
from collections.abc import MutableMapping
|
2019-07-06 23:40:53 +03:00
|
|
|
from fractions import Fraction
|
|
|
|
from numbers import Number, Rational
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2021-08-06 16:50:52 +03:00
|
|
|
from . import Image, ImageFile, ImageOps, ImagePalette, TiffTags
|
2020-05-08 19:48:02 +03:00
|
|
|
from ._binary import o8
|
2016-02-05 01:57:13 +03:00
|
|
|
from .TiffTags import TYPES
|
2015-12-30 01:02:11 +03:00
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
logger = logging.getLogger(__name__)
|
2015-08-25 15:27:18 +03:00
|
|
|
|
2014-06-03 14:02:44 +04:00
|
|
|
# Set these to true to force use of libtiff for reading or writing.
|
2013-11-22 08:33:16 +04:00
|
|
|
READ_LIBTIFF = False
|
2014-07-28 20:00:06 +04:00
|
|
|
WRITE_LIBTIFF = False
|
2015-09-10 18:49:37 +03:00
|
|
|
IFD_LEGACY_API = True
|
2021-10-01 14:50:02 +03:00
|
|
|
STRIP_SIZE = 65536
|
2013-11-22 08:33:16 +04:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
II = b"II" # little-endian (Intel style)
|
|
|
|
MM = b"MM" # big-endian (Motorola style)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# Read TIFF files
|
|
|
|
|
|
|
|
# a few tag names, just to make the code below a bit more readable
|
|
|
|
IMAGEWIDTH = 256
|
|
|
|
IMAGELENGTH = 257
|
|
|
|
BITSPERSAMPLE = 258
|
|
|
|
COMPRESSION = 259
|
|
|
|
PHOTOMETRIC_INTERPRETATION = 262
|
|
|
|
FILLORDER = 266
|
|
|
|
IMAGEDESCRIPTION = 270
|
|
|
|
STRIPOFFSETS = 273
|
|
|
|
SAMPLESPERPIXEL = 277
|
|
|
|
ROWSPERSTRIP = 278
|
|
|
|
STRIPBYTECOUNTS = 279
|
|
|
|
X_RESOLUTION = 282
|
|
|
|
Y_RESOLUTION = 283
|
|
|
|
PLANAR_CONFIGURATION = 284
|
|
|
|
RESOLUTION_UNIT = 296
|
2019-05-21 14:18:09 +03:00
|
|
|
TRANSFERFUNCTION = 301
|
2010-07-31 06:52:47 +04:00
|
|
|
SOFTWARE = 305
|
|
|
|
DATE_TIME = 306
|
|
|
|
ARTIST = 315
|
|
|
|
PREDICTOR = 317
|
|
|
|
COLORMAP = 320
|
2021-11-18 14:01:53 +03:00
|
|
|
TILEWIDTH = 322
|
|
|
|
TILELENGTH = 323
|
2010-07-31 06:52:47 +04:00
|
|
|
TILEOFFSETS = 324
|
2021-11-18 14:01:53 +03:00
|
|
|
TILEBYTECOUNTS = 325
|
2020-12-22 03:38:02 +03:00
|
|
|
SUBIFD = 330
|
2010-07-31 06:52:47 +04:00
|
|
|
EXTRASAMPLES = 338
|
|
|
|
SAMPLEFORMAT = 339
|
|
|
|
JPEGTABLES = 347
|
2021-07-09 18:20:36 +03:00
|
|
|
YCBCRSUBSAMPLING = 530
|
2019-05-21 14:18:09 +03:00
|
|
|
REFERENCEBLACKWHITE = 532
|
2010-07-31 06:52:47 +04:00
|
|
|
COPYRIGHT = 33432
|
2014-07-28 20:00:06 +04:00
|
|
|
IPTC_NAA_CHUNK = 33723 # newsphoto properties
|
|
|
|
PHOTOSHOP_CHUNK = 34377 # photoshop properties
|
2010-07-31 06:52:47 +04:00
|
|
|
ICCPROFILE = 34675
|
|
|
|
EXIFIFD = 34665
|
|
|
|
XMP = 700
|
2019-06-04 14:30:13 +03:00
|
|
|
JPEGQUALITY = 65537 # pseudo-tag by libtiff
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-28 14:53:25 +03:00
|
|
|
# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java
|
2013-09-27 21:41:27 +04:00
|
|
|
IMAGEJ_META_DATA_BYTE_COUNTS = 50838
|
|
|
|
IMAGEJ_META_DATA = 50839
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
COMPRESSION_INFO = {
|
|
|
|
# Compression => pil compression name
|
|
|
|
1: "raw",
|
|
|
|
2: "tiff_ccitt",
|
|
|
|
3: "group3",
|
|
|
|
4: "group4",
|
|
|
|
5: "tiff_lzw",
|
2014-07-28 20:00:06 +04:00
|
|
|
6: "tiff_jpeg", # obsolete
|
2010-07-31 06:52:47 +04:00
|
|
|
7: "jpeg",
|
2013-07-01 18:45:42 +04:00
|
|
|
8: "tiff_adobe_deflate",
|
2014-07-28 20:00:06 +04:00
|
|
|
32771: "tiff_raw_16", # 16-bit padding
|
2013-07-01 18:45:42 +04:00
|
|
|
32773: "packbits",
|
|
|
|
32809: "tiff_thunderscan",
|
|
|
|
32946: "tiff_deflate",
|
|
|
|
34676: "tiff_sgilog",
|
|
|
|
34677: "tiff_sgilog24",
|
2019-01-07 06:49:00 +03:00
|
|
|
34925: "lzma",
|
|
|
|
50000: "zstd",
|
|
|
|
50001: "webp",
|
2010-07-31 06:52:47 +04:00
|
|
|
}
|
|
|
|
|
2016-11-07 15:33:46 +03:00
|
|
|
COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()}
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
OPEN_INFO = {
|
|
|
|
# (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample,
|
|
|
|
# ExtraSamples) => mode, rawmode
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 0, (1,), 1, (1,), ()): ("1", "1;I"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 0, (1,), 1, (1,), ()): ("1", "1;I"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (1,), ()): ("1", "1"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 1, (1,), ()): ("1", "1"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 2, (1,), ()): ("1", "1;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 2, (1,), ()): ("1", "1;R"),
|
2016-03-29 09:21:42 +03:00
|
|
|
(II, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
|
|
|
|
(MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
|
|
|
|
(II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
|
|
|
|
(MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
|
|
|
|
(II, 1, (1,), 1, (2,), ()): ("L", "L;2"),
|
|
|
|
(MM, 1, (1,), 1, (2,), ()): ("L", "L;2"),
|
|
|
|
(II, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
|
|
|
|
(MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
|
|
|
|
(II, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
|
|
|
|
(MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
|
|
|
|
(II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
|
|
|
|
(MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 1, (1,), 1, (4,), ()): ("L", "L;4"),
|
2016-03-29 09:21:42 +03:00
|
|
|
(MM, 1, (1,), 1, (4,), ()): ("L", "L;4"),
|
|
|
|
(II, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
|
|
|
|
(MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 0, (1,), 1, (8,), ()): ("L", "L;I"),
|
|
|
|
(MM, 0, (1,), 1, (8,), ()): ("L", "L;I"),
|
|
|
|
(II, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
|
|
|
|
(MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (8,), ()): ("L", "L"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 1, (8,), ()): ("L", "L"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 2, (8,), ()): ("L", "L;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 2, (8,), ()): ("L", "L;R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"),
|
|
|
|
(II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"),
|
2017-09-20 12:26:14 +03:00
|
|
|
(II, 1, (2,), 1, (16,), ()): ("I", "I;16S"),
|
|
|
|
(MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 0, (3,), 1, (32,), ()): ("F", "F;32F"),
|
|
|
|
(MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (32,), ()): ("I", "I;32N"),
|
|
|
|
(II, 1, (2,), 1, (32,), ()): ("I", "I;32S"),
|
2017-09-20 12:26:14 +03:00
|
|
|
(MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 1, (3,), 1, (32,), ()): ("F", "F;32F"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
|
|
|
|
(MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
|
2015-06-17 00:16:56 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"),
|
2018-01-06 18:27:54 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"),
|
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
|
2018-09-05 17:45:03 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
|
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
|
2018-09-05 17:36:27 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
|
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
|
2017-08-21 16:28:29 +03:00
|
|
|
(II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"),
|
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"),
|
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16B"),
|
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"),
|
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (1,), ()): ("P", "P;1"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (1,), ()): ("P", "P;1"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (2,), ()): ("P", "P;2"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (2,), ()): ("P", "P;2"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (4,), ()): ("P", "P;4"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (4,), ()): ("P", "P;4"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (8,), ()): ("P", "P"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (8,), ()): ("P", "P"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (8,), ()): ("P", "P;R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (8,), ()): ("P", "P;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
|
2018-01-06 17:55:29 +03:00
|
|
|
(II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
|
|
|
|
(MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
|
|
|
|
(II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
|
|
|
|
(MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
|
2019-04-30 17:42:30 +03:00
|
|
|
(II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"),
|
2018-12-09 08:04:34 +03:00
|
|
|
# JPEG compressed images handled by LibTiff and auto-converted to RGBX
|
2018-09-11 19:26:25 +03:00
|
|
|
# Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel
|
2018-12-09 08:04:34 +03:00
|
|
|
(II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
|
|
|
|
(MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
|
2010-07-31 06:52:47 +04:00
|
|
|
}
|
|
|
|
|
2017-08-21 17:09:35 +03:00
|
|
|
PREFIXES = [
|
|
|
|
b"MM\x00\x2A", # Valid TIFF header with big-endian byte order
|
|
|
|
b"II\x2A\x00", # Valid TIFF header with little-endian byte order
|
2017-08-21 17:14:33 +03:00
|
|
|
b"MM\x2A\x00", # Invalid TIFF header, assume big-endian
|
|
|
|
b"II\x00\x2A", # Invalid TIFF header, assume little-endian
|
2017-08-21 17:09:35 +03:00
|
|
|
]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def _accept(prefix):
|
|
|
|
return prefix[:4] in PREFIXES
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
def _limit_rational(val, max_val):
|
|
|
|
inv = abs(val) > 1
|
2015-10-25 15:49:45 +03:00
|
|
|
n_d = IFDRational(1 / val if inv else val).limit_rational(max_val)
|
2014-12-29 18:48:01 +03:00
|
|
|
return n_d[::-1] if inv else n_d
|
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2019-12-31 11:12:33 +03:00
|
|
|
def _limit_signed_rational(val, max_val, min_val):
|
|
|
|
frac = Fraction(val)
|
|
|
|
n_d = frac.numerator, frac.denominator
|
2019-07-20 00:12:16 +03:00
|
|
|
|
2019-12-31 11:12:33 +03:00
|
|
|
if min(n_d) < min_val:
|
|
|
|
n_d = _limit_rational(val, abs(min_val))
|
2019-07-20 00:12:16 +03:00
|
|
|
|
2019-12-31 11:12:33 +03:00
|
|
|
if max(n_d) > max_val:
|
|
|
|
val = Fraction(*n_d)
|
|
|
|
n_d = _limit_rational(val, max_val)
|
2019-07-20 00:12:16 +03:00
|
|
|
|
2019-12-31 11:12:33 +03:00
|
|
|
return n_d
|
2019-07-20 00:12:16 +03:00
|
|
|
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
##
|
|
|
|
# Wrapper for TIFF IFDs.
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
_load_dispatch = {}
|
|
|
|
_write_dispatch = {}
|
|
|
|
|
2016-02-05 01:57:13 +03:00
|
|
|
|
2015-11-18 19:51:57 +03:00
|
|
|
class IFDRational(Rational):
|
2020-09-01 20:16:46 +03:00
|
|
|
"""Implements a rational class where 0/0 is a legal value to match
|
2015-10-25 15:49:45 +03:00
|
|
|
the in the wild use of exif rationals.
|
|
|
|
|
|
|
|
e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used
|
|
|
|
"""
|
|
|
|
|
|
|
|
""" If the denominator is 0, store this as a float('nan'), otherwise store
|
|
|
|
as a fractions.Fraction(). Delegate as appropriate
|
|
|
|
|
|
|
|
"""
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
__slots__ = ("_numerator", "_denominator", "_val")
|
2015-10-25 15:49:45 +03:00
|
|
|
|
|
|
|
def __init__(self, value, denominator=1):
|
|
|
|
"""
|
|
|
|
:param value: either an integer numerator, a
|
|
|
|
float/rational/other number, or an IFDRational
|
|
|
|
:param denominator: Optional integer denominator
|
|
|
|
"""
|
2016-10-31 03:43:32 +03:00
|
|
|
if isinstance(value, IFDRational):
|
2015-11-18 20:00:15 +03:00
|
|
|
self._numerator = value.numerator
|
2019-09-30 14:18:52 +03:00
|
|
|
self._denominator = value.denominator
|
2015-10-25 15:49:45 +03:00
|
|
|
self._val = value._val
|
|
|
|
return
|
|
|
|
|
2019-09-30 14:18:52 +03:00
|
|
|
if isinstance(value, Fraction):
|
|
|
|
self._numerator = value.numerator
|
|
|
|
self._denominator = value.denominator
|
|
|
|
else:
|
|
|
|
self._numerator = value
|
|
|
|
self._denominator = denominator
|
|
|
|
|
2015-10-25 15:49:45 +03:00
|
|
|
if denominator == 0:
|
2019-03-21 16:28:20 +03:00
|
|
|
self._val = float("nan")
|
2015-11-18 19:51:57 +03:00
|
|
|
elif denominator == 1:
|
2016-11-07 15:33:46 +03:00
|
|
|
self._val = Fraction(value)
|
2015-11-18 19:51:57 +03:00
|
|
|
else:
|
|
|
|
self._val = Fraction(value, denominator)
|
|
|
|
|
2015-11-18 20:00:15 +03:00
|
|
|
@property
|
|
|
|
def numerator(a):
|
|
|
|
return a._numerator
|
|
|
|
|
|
|
|
@property
|
|
|
|
def denominator(a):
|
|
|
|
return a._denominator
|
|
|
|
|
2015-10-25 15:49:45 +03:00
|
|
|
def limit_rational(self, max_denominator):
|
|
|
|
"""
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2015-10-25 15:49:45 +03:00
|
|
|
:param max_denominator: Integer, the maximum denominator value
|
|
|
|
:returns: Tuple of (numerator, denominator)
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.denominator == 0:
|
|
|
|
return (self.numerator, self.denominator)
|
|
|
|
|
|
|
|
f = self._val.limit_denominator(max_denominator)
|
|
|
|
return (f.numerator, f.denominator)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return str(float(self._val))
|
2015-10-25 17:17:50 +03:00
|
|
|
|
2015-10-25 17:49:52 +03:00
|
|
|
def __hash__(self):
|
|
|
|
return self._val.__hash__()
|
|
|
|
|
2016-02-05 01:57:13 +03:00
|
|
|
def __eq__(self, other):
|
2021-04-17 08:39:42 +03:00
|
|
|
val = self._val
|
2020-09-01 20:16:46 +03:00
|
|
|
if isinstance(other, IFDRational):
|
|
|
|
other = other._val
|
2021-04-17 08:39:42 +03:00
|
|
|
if isinstance(other, float):
|
|
|
|
val = float(val)
|
|
|
|
return val == other
|
2015-11-18 19:51:57 +03:00
|
|
|
|
2021-04-27 10:54:44 +03:00
|
|
|
def __getstate__(self):
|
|
|
|
return [self._val, self._numerator, self._denominator]
|
|
|
|
|
|
|
|
def __setstate__(self, state):
|
|
|
|
IFDRational.__init__(self, 0)
|
|
|
|
_val, _numerator, _denominator = state
|
|
|
|
self._val = _val
|
|
|
|
self._numerator = _numerator
|
|
|
|
self._denominator = _denominator
|
|
|
|
|
2015-11-18 19:51:57 +03:00
|
|
|
def _delegate(op):
|
|
|
|
def delegate(self, *args):
|
2016-02-05 01:57:13 +03:00
|
|
|
return getattr(self._val, op)(*args)
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2015-11-18 19:51:57 +03:00
|
|
|
return delegate
|
|
|
|
|
2019-10-07 12:09:16 +03:00
|
|
|
""" a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul',
|
|
|
|
'truediv', 'rtruediv', 'floordiv', 'rfloordiv',
|
|
|
|
'mod','rmod', 'pow','rpow', 'pos', 'neg',
|
|
|
|
'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool',
|
2015-12-28 00:04:23 +03:00
|
|
|
'ceil', 'floor', 'round']
|
2016-11-19 03:19:43 +03:00
|
|
|
print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a))
|
2015-11-18 19:51:57 +03:00
|
|
|
"""
|
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
__add__ = _delegate("__add__")
|
|
|
|
__radd__ = _delegate("__radd__")
|
|
|
|
__sub__ = _delegate("__sub__")
|
|
|
|
__rsub__ = _delegate("__rsub__")
|
|
|
|
__mul__ = _delegate("__mul__")
|
|
|
|
__rmul__ = _delegate("__rmul__")
|
|
|
|
__truediv__ = _delegate("__truediv__")
|
|
|
|
__rtruediv__ = _delegate("__rtruediv__")
|
|
|
|
__floordiv__ = _delegate("__floordiv__")
|
|
|
|
__rfloordiv__ = _delegate("__rfloordiv__")
|
|
|
|
__mod__ = _delegate("__mod__")
|
|
|
|
__rmod__ = _delegate("__rmod__")
|
|
|
|
__pow__ = _delegate("__pow__")
|
|
|
|
__rpow__ = _delegate("__rpow__")
|
|
|
|
__pos__ = _delegate("__pos__")
|
|
|
|
__neg__ = _delegate("__neg__")
|
|
|
|
__abs__ = _delegate("__abs__")
|
|
|
|
__trunc__ = _delegate("__trunc__")
|
|
|
|
__lt__ = _delegate("__lt__")
|
|
|
|
__gt__ = _delegate("__gt__")
|
|
|
|
__le__ = _delegate("__le__")
|
|
|
|
__ge__ = _delegate("__ge__")
|
2019-10-07 12:09:16 +03:00
|
|
|
__bool__ = _delegate("__bool__")
|
2019-03-21 16:28:20 +03:00
|
|
|
__ceil__ = _delegate("__ceil__")
|
|
|
|
__floor__ = _delegate("__floor__")
|
|
|
|
__round__ = _delegate("__round__")
|
2015-10-25 17:49:52 +03:00
|
|
|
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2018-05-06 15:31:43 +03:00
|
|
|
class ImageFileDirectory_v2(MutableMapping):
|
2014-12-29 18:48:01 +03:00
|
|
|
"""This class represents a TIFF tag directory. To speed things up, we
|
|
|
|
don't decode tags unless they're asked for.
|
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Exposes a dictionary interface of the tags in the directory::
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
ifd = ImageFileDirectory_v2()
|
|
|
|
ifd[key] = 'Some Data'
|
2019-02-19 11:45:53 +03:00
|
|
|
ifd.tagtype[key] = TiffTags.ASCII
|
2015-09-14 17:01:57 +03:00
|
|
|
print(ifd[key])
|
|
|
|
'Some Data'
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Individual values are returned as the strings or numbers, sequences are
|
2015-09-22 13:31:59 +03:00
|
|
|
returned as tuples of the values.
|
2013-10-08 03:59:37 +04:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
The tiff metadata type of each item is stored in a dictionary of
|
|
|
|
tag types in
|
2020-09-01 20:16:46 +03:00
|
|
|
:attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types
|
2015-09-14 17:01:57 +03:00
|
|
|
are read from a tiff file, guessed from the type added, or added
|
|
|
|
manually.
|
2013-10-08 03:59:37 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
Data Structures:
|
2015-09-14 17:01:57 +03:00
|
|
|
|
2021-09-11 12:24:24 +03:00
|
|
|
* ``self.tagtype = {}``
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2021-09-11 12:48:43 +03:00
|
|
|
* Key: numerical TIFF tag number
|
2018-06-24 15:32:25 +03:00
|
|
|
* Value: integer corresponding to the data type from
|
2021-09-11 12:24:24 +03:00
|
|
|
:py:data:`.TiffTags.TYPES`
|
2015-09-14 17:01:57 +03:00
|
|
|
|
2021-09-11 12:24:24 +03:00
|
|
|
.. versionadded:: 3.0.0
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2021-09-11 12:48:43 +03:00
|
|
|
'Internal' data structures:
|
2021-09-11 12:24:24 +03:00
|
|
|
|
|
|
|
* ``self._tags_v2 = {}``
|
|
|
|
|
2021-09-11 12:48:43 +03:00
|
|
|
* Key: numerical TIFF tag number
|
2021-09-11 12:24:24 +03:00
|
|
|
* Value: decoded data, as tuple for multiple values
|
|
|
|
|
|
|
|
* ``self._tagdata = {}``
|
|
|
|
|
2021-09-11 12:48:43 +03:00
|
|
|
* Key: numerical TIFF tag number
|
2021-09-11 12:24:24 +03:00
|
|
|
* Value: undecoded byte string from file
|
|
|
|
|
|
|
|
* ``self._tags_v1 = {}``
|
|
|
|
|
2021-09-11 12:48:43 +03:00
|
|
|
* Key: numerical TIFF tag number
|
2021-09-11 12:24:24 +03:00
|
|
|
* Value: decoded data in the v1 format
|
|
|
|
|
|
|
|
Tags will be found in the private attributes ``self._tagdata``, and in
|
|
|
|
``self._tags_v2`` once decoded.
|
|
|
|
|
|
|
|
``self.legacy_api`` is a value for internal use, and shouldn't be changed
|
|
|
|
from outside code. In cooperation with
|
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`, if ``legacy_api``
|
|
|
|
is true, then decoded tags will be populated into both ``_tags_v1`` and
|
|
|
|
``_tags_v2``. ``_tags_v2`` will be used if this IFD is used in the TIFF
|
|
|
|
save routine. Tags should be read from ``_tags_v1`` if
|
|
|
|
``legacy_api == true``.
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
"""
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2021-06-24 12:56:01 +03:00
|
|
|
def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None, group=None):
|
2014-12-30 13:57:45 +03:00
|
|
|
"""Initialize an ImageFileDirectory.
|
|
|
|
|
|
|
|
To construct an ImageFileDirectory from a real file, pass the 8-byte
|
|
|
|
magic header to the constructor. To only set the endianness, pass it
|
|
|
|
as the 'prefix' keyword argument.
|
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
:param ifh: One of the accepted magic headers (cf. PREFIXES); also sets
|
2014-12-30 13:57:45 +03:00
|
|
|
endianness.
|
2015-09-14 17:01:57 +03:00
|
|
|
:param prefix: Override the endianness of the file.
|
2013-10-08 03:59:37 +04:00
|
|
|
"""
|
2014-12-30 13:57:45 +03:00
|
|
|
if ifh[:4] not in PREFIXES:
|
2020-07-16 12:43:29 +03:00
|
|
|
raise SyntaxError(f"not a TIFF file (header {repr(ifh)} not valid)")
|
2014-12-30 13:57:45 +03:00
|
|
|
self._prefix = prefix if prefix is not None else ifh[:2]
|
|
|
|
if self._prefix == MM:
|
2014-12-29 18:48:01 +03:00
|
|
|
self._endian = ">"
|
2014-12-30 13:57:45 +03:00
|
|
|
elif self._prefix == II:
|
2014-12-29 18:48:01 +03:00
|
|
|
self._endian = "<"
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2014-12-30 13:57:45 +03:00
|
|
|
raise SyntaxError("not a TIFF IFD")
|
2021-06-24 12:56:01 +03:00
|
|
|
self.group = group
|
2020-07-23 15:40:02 +03:00
|
|
|
self.tagtype = {}
|
|
|
|
""" Dictionary of tag types """
|
2010-07-31 06:52:47 +04:00
|
|
|
self.reset()
|
2019-10-29 14:42:34 +03:00
|
|
|
(self.next,) = self._unpack("L", ifh[4:])
|
2015-09-11 20:09:14 +03:00
|
|
|
self._legacy_api = False
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
prefix = property(lambda self: self._prefix)
|
|
|
|
offset = property(lambda self: self._offset)
|
2015-03-01 06:44:38 +03:00
|
|
|
legacy_api = property(lambda self: self._legacy_api)
|
|
|
|
|
|
|
|
@legacy_api.setter
|
|
|
|
def legacy_api(self, value):
|
2015-09-13 16:01:01 +03:00
|
|
|
raise Exception("Not allowing setting of legacy api")
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def reset(self):
|
2015-09-22 13:31:59 +03:00
|
|
|
self._tags_v1 = {} # will remain empty if legacy_api is false
|
|
|
|
self._tags_v2 = {} # main tag storage
|
2014-12-29 18:48:01 +03:00
|
|
|
self._tagdata = {}
|
2019-03-21 16:28:20 +03:00
|
|
|
self.tagtype = {} # added 2008-06-05 by Florian Hoech
|
2014-12-29 18:48:01 +03:00
|
|
|
self._next = None
|
|
|
|
self._offset = None
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2013-03-07 15:59:52 +04:00
|
|
|
def __str__(self):
|
2014-12-29 18:48:01 +03:00
|
|
|
return str(dict(self))
|
2013-03-07 15:59:52 +04:00
|
|
|
|
|
|
|
def named(self):
|
2014-07-28 20:00:06 +04:00
|
|
|
"""
|
2015-09-14 17:01:57 +03:00
|
|
|
:returns: dict of name|key: value
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-05-29 07:59:54 +03:00
|
|
|
Returns the complete tag dictionary, with named tags where possible.
|
2014-07-28 20:00:06 +04:00
|
|
|
"""
|
2021-06-24 12:56:01 +03:00
|
|
|
return {
|
|
|
|
TiffTags.lookup(code, self.group).name: value
|
|
|
|
for code, value in self.items()
|
|
|
|
}
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
def __len__(self):
|
2015-09-13 16:01:01 +03:00
|
|
|
return len(set(self._tagdata) | set(self._tags_v2))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
def __getitem__(self, tag):
|
2015-09-13 16:01:01 +03:00
|
|
|
if tag not in self._tags_v2: # unpack on the fly
|
2014-12-29 18:48:01 +03:00
|
|
|
data = self._tagdata[tag]
|
|
|
|
typ = self.tagtype[tag]
|
|
|
|
size, handler = self._load_dispatch[typ]
|
2015-09-14 14:35:09 +03:00
|
|
|
self[tag] = handler(self, data, self.legacy_api) # check type
|
2015-09-13 16:01:01 +03:00
|
|
|
val = self._tags_v2[tag]
|
2015-03-01 06:44:38 +03:00
|
|
|
if self.legacy_api and not isinstance(val, (tuple, bytes)):
|
2019-03-21 16:28:20 +03:00
|
|
|
val = (val,)
|
2015-03-01 06:44:38 +03:00
|
|
|
return val
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2012-10-16 01:19:55 +04:00
|
|
|
def __contains__(self, tag):
|
2015-09-13 16:01:01 +03:00
|
|
|
return tag in self._tags_v2 or tag in self._tagdata
|
2012-10-16 01:19:55 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def __setitem__(self, tag, value):
|
2015-09-13 16:01:01 +03:00
|
|
|
self._setitem(tag, value, self.legacy_api)
|
|
|
|
|
|
|
|
def _setitem(self, tag, value, legacy_api):
|
2014-12-29 18:48:01 +03:00
|
|
|
basetypes = (Number, bytes, str)
|
2012-10-16 01:19:55 +04:00
|
|
|
|
2021-06-24 12:56:01 +03:00
|
|
|
info = TiffTags.lookup(tag, self.group)
|
2014-12-29 18:48:01 +03:00
|
|
|
values = [value] if isinstance(value, basetypes) else value
|
2013-02-26 15:12:11 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
if tag not in self.tagtype:
|
2015-12-30 01:00:36 +03:00
|
|
|
if info.type:
|
2014-12-29 18:48:01 +03:00
|
|
|
self.tagtype[tag] = info.type
|
2015-12-30 01:00:36 +03:00
|
|
|
else:
|
2019-02-19 11:45:53 +03:00
|
|
|
self.tagtype[tag] = TiffTags.UNDEFINED
|
2015-12-30 01:00:36 +03:00
|
|
|
if all(isinstance(v, IFDRational) for v in values):
|
2019-08-12 12:42:32 +03:00
|
|
|
self.tagtype[tag] = (
|
|
|
|
TiffTags.RATIONAL
|
|
|
|
if all(v >= 0 for v in values)
|
|
|
|
else TiffTags.SIGNED_RATIONAL
|
|
|
|
)
|
2015-12-30 01:00:36 +03:00
|
|
|
elif all(isinstance(v, int) for v in values):
|
2019-08-29 12:36:46 +03:00
|
|
|
if all(0 <= v < 2 ** 16 for v in values):
|
|
|
|
self.tagtype[tag] = TiffTags.SHORT
|
2019-12-31 03:58:39 +03:00
|
|
|
elif all(-(2 ** 15) < v < 2 ** 15 for v in values):
|
2019-08-29 12:36:46 +03:00
|
|
|
self.tagtype[tag] = TiffTags.SIGNED_SHORT
|
2014-12-29 18:48:01 +03:00
|
|
|
else:
|
2019-08-12 12:42:32 +03:00
|
|
|
self.tagtype[tag] = (
|
|
|
|
TiffTags.LONG
|
|
|
|
if all(v >= 0 for v in values)
|
|
|
|
else TiffTags.SIGNED_LONG
|
|
|
|
)
|
2014-12-29 18:48:01 +03:00
|
|
|
elif all(isinstance(v, float) for v in values):
|
2018-12-29 08:14:29 +03:00
|
|
|
self.tagtype[tag] = TiffTags.DOUBLE
|
2020-05-06 13:12:59 +03:00
|
|
|
elif all(isinstance(v, str) for v in values):
|
|
|
|
self.tagtype[tag] = TiffTags.ASCII
|
|
|
|
elif all(isinstance(v, bytes) for v in values):
|
|
|
|
self.tagtype[tag] = TiffTags.BYTE
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2019-09-26 15:12:28 +03:00
|
|
|
if self.tagtype[tag] == TiffTags.UNDEFINED:
|
2019-03-21 16:28:20 +03:00
|
|
|
values = [
|
2021-04-25 14:37:45 +03:00
|
|
|
v.encode("ascii", "replace") if isinstance(v, str) else v
|
|
|
|
for v in values
|
2019-03-21 16:28:20 +03:00
|
|
|
]
|
2018-12-29 08:14:29 +03:00
|
|
|
elif self.tagtype[tag] == TiffTags.RATIONAL:
|
2019-03-21 16:28:20 +03:00
|
|
|
values = [float(v) if isinstance(v, int) else v for v in values]
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2020-08-13 14:36:39 +03:00
|
|
|
is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict)
|
|
|
|
if not is_ifd:
|
|
|
|
values = tuple(info.cvt_enum(value) for value in values)
|
2015-09-13 16:01:01 +03:00
|
|
|
|
|
|
|
dest = self._tags_v1 if legacy_api else self._tags_v2
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2017-09-08 21:15:45 +03:00
|
|
|
# Three branches:
|
|
|
|
# Spec'd length == 1, Actual length 1, store as element
|
|
|
|
# Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed.
|
|
|
|
# No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple.
|
|
|
|
# Don't mess with the legacy api, since it's frozen.
|
2020-08-13 14:36:39 +03:00
|
|
|
if not is_ifd and (
|
2020-05-05 14:20:59 +03:00
|
|
|
(info.length == 1)
|
|
|
|
or self.tagtype[tag] == TiffTags.BYTE
|
|
|
|
or (info.length is None and len(values) == 1 and not legacy_api)
|
2019-03-21 16:28:20 +03:00
|
|
|
):
|
2017-09-08 21:15:45 +03:00
|
|
|
# Don't mess with the legacy api, since it's frozen.
|
2018-12-29 08:14:29 +03:00
|
|
|
if legacy_api and self.tagtype[tag] in [
|
|
|
|
TiffTags.RATIONAL,
|
2019-03-21 16:28:20 +03:00
|
|
|
TiffTags.SIGNED_RATIONAL,
|
2018-12-29 08:14:29 +03:00
|
|
|
]: # rationals
|
2019-03-21 16:28:20 +03:00
|
|
|
values = (values,)
|
2017-09-08 21:15:45 +03:00
|
|
|
try:
|
2019-10-29 14:42:34 +03:00
|
|
|
(dest[tag],) = values
|
2017-09-08 21:15:45 +03:00
|
|
|
except ValueError:
|
|
|
|
# We've got a builtin tag with 1 expected entry
|
|
|
|
warnings.warn(
|
2020-07-16 12:43:29 +03:00
|
|
|
f"Metadata Warning, tag {tag} had too many entries: "
|
|
|
|
f"{len(values)}, expected 1"
|
2019-03-21 16:28:20 +03:00
|
|
|
)
|
2017-09-08 21:15:45 +03:00
|
|
|
dest[tag] = values[0]
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
else:
|
2017-09-08 21:15:45 +03:00
|
|
|
# Spec'd length > 1 or undefined
|
|
|
|
# Unspec'd, and length > 1
|
2015-09-13 16:01:01 +03:00
|
|
|
dest[tag] = values
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
def __delitem__(self, tag):
|
2015-09-13 16:01:01 +03:00
|
|
|
self._tags_v2.pop(tag, None)
|
|
|
|
self._tags_v1.pop(tag, None)
|
2014-12-29 18:48:01 +03:00
|
|
|
self._tagdata.pop(tag, None)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
def __iter__(self):
|
2015-09-13 16:01:01 +03:00
|
|
|
return iter(set(self._tagdata) | set(self._tags_v2))
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2014-12-30 13:57:45 +03:00
|
|
|
def _unpack(self, fmt, data):
|
2014-12-29 18:48:01 +03:00
|
|
|
return struct.unpack(self._endian + fmt, data)
|
|
|
|
|
2014-12-30 13:57:45 +03:00
|
|
|
def _pack(self, fmt, *values):
|
2014-12-29 18:48:01 +03:00
|
|
|
return struct.pack(self._endian + fmt, *values)
|
|
|
|
|
|
|
|
def _register_loader(idx, size):
|
|
|
|
def decorator(func):
|
2017-01-17 16:22:18 +03:00
|
|
|
from .TiffTags import TYPES
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
if func.__name__.startswith("load_"):
|
|
|
|
TYPES[idx] = func.__name__[5:].replace("_", " ")
|
2018-11-13 15:01:09 +03:00
|
|
|
_load_dispatch[idx] = size, func # noqa: F821
|
2014-12-29 18:48:01 +03:00
|
|
|
return func
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
return decorator
|
|
|
|
|
|
|
|
def _register_writer(idx):
|
|
|
|
def decorator(func):
|
2018-11-13 15:01:09 +03:00
|
|
|
_write_dispatch[idx] = func # noqa: F821
|
2014-12-29 18:48:01 +03:00
|
|
|
return func
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
return decorator
|
|
|
|
|
|
|
|
def _register_basic(idx_fmt_name):
|
2017-01-17 16:22:18 +03:00
|
|
|
from .TiffTags import TYPES
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
idx, fmt, name = idx_fmt_name
|
|
|
|
TYPES[idx] = name
|
|
|
|
size = struct.calcsize("=" + fmt)
|
2019-03-21 16:28:20 +03:00
|
|
|
_load_dispatch[idx] = ( # noqa: F821
|
|
|
|
size,
|
|
|
|
lambda self, data, legacy_api=True: (
|
2021-10-15 13:10:22 +03:00
|
|
|
self._unpack(f"{len(data) // size}{fmt}", data)
|
2019-03-21 16:28:20 +03:00
|
|
|
),
|
|
|
|
)
|
2018-11-13 15:01:09 +03:00
|
|
|
_write_dispatch[idx] = lambda self, *values: ( # noqa: F821
|
2019-03-21 16:28:20 +03:00
|
|
|
b"".join(self._pack(fmt, value) for value in values)
|
|
|
|
)
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
list(
|
|
|
|
map(
|
|
|
|
_register_basic,
|
|
|
|
[
|
|
|
|
(TiffTags.SHORT, "H", "short"),
|
|
|
|
(TiffTags.LONG, "L", "long"),
|
|
|
|
(TiffTags.SIGNED_BYTE, "b", "signed byte"),
|
|
|
|
(TiffTags.SIGNED_SHORT, "h", "signed short"),
|
|
|
|
(TiffTags.SIGNED_LONG, "l", "signed long"),
|
|
|
|
(TiffTags.FLOAT, "f", "float"),
|
|
|
|
(TiffTags.DOUBLE, "d", "double"),
|
2020-10-14 15:37:54 +03:00
|
|
|
(TiffTags.IFD, "L", "long"),
|
2019-03-21 16:28:20 +03:00
|
|
|
],
|
|
|
|
)
|
|
|
|
)
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
@_register_loader(1, 1) # Basic type, except for the legacy API.
|
2015-09-14 14:35:09 +03:00
|
|
|
def load_byte(self, data, legacy_api=True):
|
2016-06-26 14:06:56 +03:00
|
|
|
return data
|
2015-03-01 06:44:38 +03:00
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
@_register_writer(1) # Basic type, except for the legacy API.
|
2015-03-01 06:44:38 +03:00
|
|
|
def write_byte(self, data):
|
|
|
|
return data
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_loader(2, 1)
|
2015-09-14 14:35:09 +03:00
|
|
|
def load_string(self, data, legacy_api=True):
|
2014-12-29 18:48:01 +03:00
|
|
|
if data.endswith(b"\0"):
|
2010-07-31 06:52:47 +04:00
|
|
|
data = data[:-1]
|
2014-12-29 18:48:01 +03:00
|
|
|
return data.decode("latin-1", "replace")
|
|
|
|
|
|
|
|
@_register_writer(2)
|
|
|
|
def write_string(self, value):
|
|
|
|
# remerge of https://github.com/python-pillow/Pillow/pull/1416
|
2019-03-21 16:28:20 +03:00
|
|
|
return b"" + value.encode("ascii", "replace") + b"\0"
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_loader(5, 8)
|
2015-09-14 14:35:09 +03:00
|
|
|
def load_rational(self, data, legacy_api=True):
|
2021-10-15 13:10:22 +03:00
|
|
|
vals = self._unpack(f"{len(data) // 4}L", data)
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
def combine(a, b):
|
|
|
|
return (a, b) if legacy_api else IFDRational(a, b)
|
|
|
|
|
|
|
|
return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_writer(5)
|
|
|
|
def write_rational(self, *values):
|
2019-03-21 16:28:20 +03:00
|
|
|
return b"".join(
|
2019-07-20 00:12:16 +03:00
|
|
|
self._pack("2L", *_limit_rational(frac, 2 ** 32 - 1)) for frac in values
|
2019-03-21 16:28:20 +03:00
|
|
|
)
|
2014-12-29 18:48:01 +03:00
|
|
|
|
|
|
|
@_register_loader(7, 1)
|
2015-09-14 14:35:09 +03:00
|
|
|
def load_undefined(self, data, legacy_api=True):
|
2010-07-31 06:52:47 +04:00
|
|
|
return data
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_writer(7)
|
|
|
|
def write_undefined(self, value):
|
|
|
|
return value
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_loader(10, 8)
|
2015-09-14 14:35:09 +03:00
|
|
|
def load_signed_rational(self, data, legacy_api=True):
|
2021-10-15 13:10:22 +03:00
|
|
|
vals = self._unpack(f"{len(data) // 4}l", data)
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
def combine(a, b):
|
|
|
|
return (a, b) if legacy_api else IFDRational(a, b)
|
|
|
|
|
|
|
|
return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_writer(10)
|
|
|
|
def write_signed_rational(self, *values):
|
2019-03-21 16:28:20 +03:00
|
|
|
return b"".join(
|
2019-12-31 03:58:39 +03:00
|
|
|
self._pack("2l", *_limit_signed_rational(frac, 2 ** 31 - 1, -(2 ** 31)))
|
2019-08-12 12:42:32 +03:00
|
|
|
for frac in values
|
2019-03-21 16:28:20 +03:00
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
def _ensure_read(self, fp, size):
|
|
|
|
ret = fp.read(size)
|
|
|
|
if len(ret) != size:
|
2019-09-30 17:56:31 +03:00
|
|
|
raise OSError(
|
2019-03-21 16:28:20 +03:00
|
|
|
"Corrupt EXIF data. "
|
2020-07-16 12:43:29 +03:00
|
|
|
f"Expecting to read {size} bytes but only got {len(ret)}. "
|
2019-03-21 16:28:20 +03:00
|
|
|
)
|
2015-09-11 20:45:10 +03:00
|
|
|
return ret
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
def load(self, fp):
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
self.reset()
|
|
|
|
self._offset = fp.tell()
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
try:
|
2015-09-22 13:31:59 +03:00
|
|
|
for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]):
|
2019-03-21 16:28:20 +03:00
|
|
|
tag, typ, count, data = self._unpack("HHL4s", self._ensure_read(fp, 12))
|
2020-04-13 00:16:46 +03:00
|
|
|
|
2021-06-24 12:56:01 +03:00
|
|
|
tagname = TiffTags.lookup(tag, self.group).name
|
2020-04-13 00:16:46 +03:00
|
|
|
typname = TYPES.get(typ, "unknown")
|
2020-07-16 12:43:29 +03:00
|
|
|
msg = f"tag: {tagname} ({tag}) - type: {typname} ({typ})"
|
2015-09-11 20:45:10 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
unit_size, handler = self._load_dispatch[typ]
|
|
|
|
except KeyError:
|
2020-07-16 12:43:29 +03:00
|
|
|
logger.debug(msg + f" - unsupported type {typ}")
|
2015-09-11 20:45:10 +03:00
|
|
|
continue # ignore unsupported type
|
|
|
|
size = count * unit_size
|
|
|
|
if size > 4:
|
|
|
|
here = fp.tell()
|
2019-10-29 14:42:34 +03:00
|
|
|
(offset,) = self._unpack("L", data)
|
2020-07-16 12:43:29 +03:00
|
|
|
msg += f" Tag Location: {here} - Data Location: {offset}"
|
2015-09-11 20:45:10 +03:00
|
|
|
fp.seek(offset)
|
|
|
|
data = ImageFile._safe_read(fp, size)
|
|
|
|
fp.seek(here)
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2015-09-11 20:45:10 +03:00
|
|
|
data = data[:size]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
if len(data) != size:
|
2019-03-21 16:28:20 +03:00
|
|
|
warnings.warn(
|
|
|
|
"Possibly corrupt EXIF data. "
|
2020-07-16 12:43:29 +03:00
|
|
|
f"Expecting to read {size} bytes but only got {len(data)}."
|
|
|
|
f" Skipping tag {tag}"
|
2019-03-21 16:28:20 +03:00
|
|
|
)
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug(msg)
|
2015-09-11 20:45:10 +03:00
|
|
|
continue
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2016-07-12 18:09:02 +03:00
|
|
|
if not data:
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug(msg)
|
2016-07-12 18:09:02 +03:00
|
|
|
continue
|
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
self._tagdata[tag] = data
|
|
|
|
self.tagtype[tag] = typ
|
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
msg += " - value: " + (
|
2020-07-02 00:05:47 +03:00
|
|
|
"<table: %d bytes>" % size if size > 32 else repr(data)
|
2020-04-13 00:16:46 +03:00
|
|
|
)
|
|
|
|
logger.debug(msg)
|
2015-09-11 20:45:10 +03:00
|
|
|
|
2019-10-29 14:42:34 +03:00
|
|
|
(self.next,) = self._unpack("L", self._ensure_read(fp, 4))
|
2019-09-30 17:56:31 +03:00
|
|
|
except OSError as msg:
|
2015-09-11 20:45:10 +03:00
|
|
|
warnings.warn(str(msg))
|
|
|
|
return
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2019-03-31 00:09:01 +03:00
|
|
|
def tobytes(self, offset=0):
|
2014-12-29 18:48:01 +03:00
|
|
|
# FIXME What about tagdata?
|
2019-03-12 02:27:43 +03:00
|
|
|
result = self._pack("H", len(self._tags_v2))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
entries = []
|
2019-03-12 02:27:43 +03:00
|
|
|
offset = offset + len(result) + len(self._tags_v2) * 12 + 4
|
2010-07-31 06:52:47 +04:00
|
|
|
stripoffsets = None
|
|
|
|
|
|
|
|
# pass 1: convert tags to binary format
|
2014-12-29 18:48:01 +03:00
|
|
|
# always write tags in ascending order
|
2015-09-13 16:01:01 +03:00
|
|
|
for tag, value in sorted(self._tags_v2.items()):
|
2014-12-29 18:48:01 +03:00
|
|
|
if tag == STRIPOFFSETS:
|
|
|
|
stripoffsets = len(entries)
|
|
|
|
typ = self.tagtype.get(tag)
|
2020-09-03 18:31:03 +03:00
|
|
|
logger.debug(f"Tag {tag}, Type: {typ}, Value: {repr(value)}")
|
2020-08-13 14:36:39 +03:00
|
|
|
is_ifd = typ == TiffTags.LONG and isinstance(value, dict)
|
|
|
|
if is_ifd:
|
|
|
|
if self._endian == "<":
|
|
|
|
ifh = b"II\x2A\x00\x08\x00\x00\x00"
|
|
|
|
else:
|
|
|
|
ifh = b"MM\x00\x2A\x00\x00\x00\x08"
|
2021-06-24 12:56:01 +03:00
|
|
|
ifd = ImageFileDirectory_v2(ifh, group=tag)
|
2021-06-25 12:46:58 +03:00
|
|
|
values = self._tags_v2[tag]
|
|
|
|
for ifd_tag, ifd_value in values.items():
|
2020-08-13 14:36:39 +03:00
|
|
|
ifd[ifd_tag] = ifd_value
|
|
|
|
data = ifd.tobytes(offset)
|
|
|
|
else:
|
|
|
|
values = value if isinstance(value, tuple) else (value,)
|
|
|
|
data = self._write_dispatch[typ](self, *values)
|
2020-04-13 00:16:46 +03:00
|
|
|
|
2021-06-24 12:56:01 +03:00
|
|
|
tagname = TiffTags.lookup(tag, self.group).name
|
2020-08-13 14:36:39 +03:00
|
|
|
typname = "ifd" if is_ifd else TYPES.get(typ, "unknown")
|
2020-07-16 12:43:29 +03:00
|
|
|
msg = f"save: {tagname} ({tag}) - type: {typname} ({typ})"
|
2020-04-13 00:16:46 +03:00
|
|
|
msg += " - value: " + (
|
|
|
|
"<table: %d bytes>" % len(data) if len(data) >= 16 else str(values)
|
|
|
|
)
|
|
|
|
logger.debug(msg)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
# count is sum of lengths for string and arbitrary data
|
2020-08-13 14:36:39 +03:00
|
|
|
if is_ifd:
|
|
|
|
count = 1
|
|
|
|
elif typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]:
|
2018-12-29 08:14:29 +03:00
|
|
|
count = len(data)
|
|
|
|
else:
|
|
|
|
count = len(values)
|
2014-12-29 18:48:01 +03:00
|
|
|
# figure out if data fits into the entry
|
|
|
|
if len(data) <= 4:
|
|
|
|
entries.append((tag, typ, count, data.ljust(4, b"\0"), b""))
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2019-03-21 16:28:20 +03:00
|
|
|
entries.append((tag, typ, count, self._pack("L", offset), data))
|
2015-09-22 13:31:59 +03:00
|
|
|
offset += (len(data) + 1) // 2 * 2 # pad to word
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# update strip offset data to point beyond auxiliary data
|
|
|
|
if stripoffsets is not None:
|
2014-12-29 18:48:01 +03:00
|
|
|
tag, typ, count, value, data = entries[stripoffsets]
|
|
|
|
if data:
|
2019-03-21 16:28:20 +03:00
|
|
|
raise NotImplementedError("multistrip support not yet implemented")
|
2014-12-30 13:57:45 +03:00
|
|
|
value = self._pack("L", self._unpack("L", value)[0] + offset)
|
2014-12-29 18:48:01 +03:00
|
|
|
entries[stripoffsets] = tag, typ, count, value, data
|
|
|
|
|
|
|
|
# pass 2: write entries to file
|
|
|
|
for tag, typ, count, value, data in entries:
|
2020-07-16 12:43:29 +03:00
|
|
|
logger.debug(f"{tag} {typ} {count} {repr(value)} {repr(data)}")
|
2019-03-12 02:27:43 +03:00
|
|
|
result += self._pack("HHL4s", tag, typ, count, value)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# -- overwrite here for multi-page --
|
2019-03-12 02:27:43 +03:00
|
|
|
result += b"\0\0\0\0" # end of entries
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# pass 3: write auxiliary data to file
|
2014-12-29 18:48:01 +03:00
|
|
|
for tag, typ, count, value, data in entries:
|
2019-03-12 02:27:43 +03:00
|
|
|
result += data
|
2010-07-31 06:52:47 +04:00
|
|
|
if len(data) & 1:
|
2019-03-12 02:27:43 +03:00
|
|
|
result += b"\0"
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def save(self, fp):
|
|
|
|
|
|
|
|
if fp.tell() == 0: # skip TIFF header on subsequent pages
|
|
|
|
# tiff header -- PIL always starts the first IFD at offset 8
|
|
|
|
fp.write(self._prefix + self._pack("HL", 42, 8))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2019-03-12 02:27:43 +03:00
|
|
|
offset = fp.tell()
|
2019-03-31 00:09:01 +03:00
|
|
|
result = self.tobytes(offset)
|
2019-03-12 02:27:43 +03:00
|
|
|
fp.write(result)
|
|
|
|
return offset + len(result)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
ImageFileDirectory_v2._load_dispatch = _load_dispatch
|
|
|
|
ImageFileDirectory_v2._write_dispatch = _write_dispatch
|
2014-12-29 18:48:01 +03:00
|
|
|
for idx, name in TYPES.items():
|
|
|
|
name = name.replace(" ", "_")
|
2015-09-11 20:09:14 +03:00
|
|
|
setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1])
|
|
|
|
setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx])
|
2014-12-29 18:48:01 +03:00
|
|
|
del _load_dispatch, _write_dispatch, idx, name
|
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
|
|
|
|
# Legacy ImageFileDirectory support.
|
2015-09-11 20:09:14 +03:00
|
|
|
class ImageFileDirectory_v1(ImageFileDirectory_v2):
|
2015-09-23 15:14:06 +03:00
|
|
|
"""This class represents the **legacy** interface to a TIFF tag directory.
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Exposes a dictionary interface of the tags in the directory::
|
|
|
|
|
|
|
|
ifd = ImageFileDirectory_v1()
|
|
|
|
ifd[key] = 'Some Data'
|
2019-02-19 11:45:53 +03:00
|
|
|
ifd.tagtype[key] = TiffTags.ASCII
|
2016-11-19 03:19:43 +03:00
|
|
|
print(ifd[key])
|
2015-09-14 17:01:57 +03:00
|
|
|
('Some Data',)
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Also contains a dictionary of tag types as read from the tiff image file,
|
2020-09-01 20:16:46 +03:00
|
|
|
:attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`.
|
2015-09-14 17:01:57 +03:00
|
|
|
|
|
|
|
Values are returned as a tuple.
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
.. deprecated:: 3.0.0
|
|
|
|
"""
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
def __init__(self, *args, **kwargs):
|
2016-11-05 20:31:11 +03:00
|
|
|
super().__init__(*args, **kwargs)
|
2015-09-22 13:31:59 +03:00
|
|
|
self._legacy_api = True
|
2015-09-11 20:09:14 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
tags = property(lambda self: self._tags_v1)
|
2015-09-11 20:09:14 +03:00
|
|
|
tagdata = property(lambda self: self._tagdata)
|
|
|
|
|
2020-09-01 20:16:46 +03:00
|
|
|
# defined in ImageFileDirectory_v2
|
|
|
|
tagtype: dict
|
|
|
|
"""Dictionary of tag types"""
|
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
@classmethod
|
|
|
|
def from_v2(cls, original):
|
2020-09-01 20:16:46 +03:00
|
|
|
"""Returns an
|
2015-09-14 17:01:57 +03:00
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
|
|
|
instance with the same data as is contained in the original
|
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
|
|
|
instance.
|
|
|
|
|
|
|
|
:returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
"""
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
ifd = cls(prefix=original.prefix)
|
|
|
|
ifd._tagdata = original._tagdata
|
|
|
|
ifd.tagtype = original.tagtype
|
2015-09-22 13:31:59 +03:00
|
|
|
ifd.next = original.next # an indicator for multipage tiffs
|
2015-09-11 20:09:14 +03:00
|
|
|
return ifd
|
2015-09-13 16:01:01 +03:00
|
|
|
|
|
|
|
def to_v2(self):
|
2020-09-01 20:16:46 +03:00
|
|
|
"""Returns an
|
2015-09-14 17:01:57 +03:00
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
|
|
|
instance with the same data as is contained in the original
|
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
|
|
|
instance.
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
:returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
|
|
|
|
|
|
|
"""
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
ifd = ImageFileDirectory_v2(prefix=self.prefix)
|
|
|
|
ifd._tagdata = dict(self._tagdata)
|
|
|
|
ifd.tagtype = dict(self.tagtype)
|
|
|
|
ifd._tags_v2 = dict(self._tags_v2)
|
|
|
|
return ifd
|
|
|
|
|
|
|
|
def __contains__(self, tag):
|
|
|
|
return tag in self._tags_v1 or tag in self._tagdata
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
def __len__(self):
|
|
|
|
return len(set(self._tagdata) | set(self._tags_v1))
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return iter(set(self._tagdata) | set(self._tags_v1))
|
|
|
|
|
|
|
|
def __setitem__(self, tag, value):
|
2015-09-22 13:31:59 +03:00
|
|
|
for legacy_api in (False, True):
|
2015-09-13 16:01:01 +03:00
|
|
|
self._setitem(tag, value, legacy_api)
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
def __getitem__(self, tag):
|
|
|
|
if tag not in self._tags_v1: # unpack on the fly
|
|
|
|
data = self._tagdata[tag]
|
|
|
|
typ = self.tagtype[tag]
|
|
|
|
size, handler = self._load_dispatch[typ]
|
|
|
|
for legacy in (False, True):
|
2015-09-14 14:35:09 +03:00
|
|
|
self._setitem(tag, handler(self, data, legacy), legacy)
|
2015-09-13 16:01:01 +03:00
|
|
|
val = self._tags_v1[tag]
|
|
|
|
if not isinstance(val, (tuple, bytes)):
|
2019-03-21 16:28:20 +03:00
|
|
|
val = (val,)
|
2015-09-13 16:01:01 +03:00
|
|
|
return val
|
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
# undone -- switch this pointer when IFD_LEGACY_API == False
|
|
|
|
ImageFileDirectory = ImageFileDirectory_v1
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
##
|
|
|
|
# Image plugin for TIFF files.
|
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
class TiffImageFile(ImageFile.ImageFile):
|
|
|
|
|
|
|
|
format = "TIFF"
|
|
|
|
format_description = "Adobe TIFF"
|
2017-03-15 02:16:38 +03:00
|
|
|
_close_exclusive_fp_after_loading = False
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2020-07-23 15:40:02 +03:00
|
|
|
def __init__(self, fp=None, filename=None):
|
|
|
|
self.tag_v2 = None
|
|
|
|
""" Image file directory (tag dictionary) """
|
|
|
|
|
|
|
|
self.tag = None
|
|
|
|
""" Legacy tag entries """
|
|
|
|
|
|
|
|
super().__init__(fp, filename)
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def _open(self):
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Open the first image in a TIFF file"""
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# Header
|
|
|
|
ifh = self.fp.read(8)
|
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag_v2 = ImageFileDirectory_v2(ifh)
|
|
|
|
|
2020-07-23 18:04:19 +03:00
|
|
|
# legacy IFD entries will be filled in later
|
2020-07-23 15:40:02 +03:00
|
|
|
self.ifd = None
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# setup frame pointers
|
2015-09-11 20:09:14 +03:00
|
|
|
self.__first = self.__next = self.tag_v2.next
|
2010-07-31 06:52:47 +04:00
|
|
|
self.__frame = -1
|
|
|
|
self.__fp = self.fp
|
2015-04-15 03:43:05 +03:00
|
|
|
self._frame_pos = []
|
|
|
|
self._n_frames = None
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("*** TiffImageFile._open ***")
|
2020-07-16 12:43:29 +03:00
|
|
|
logger.debug(f"- __first: {self.__first}")
|
|
|
|
logger.debug(f"- ifh: {repr(ifh)}") # Use repr to avoid str(bytes)
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
# and load the first frame
|
2010-07-31 06:52:47 +04:00
|
|
|
self._seek(0)
|
|
|
|
|
2015-04-15 03:43:05 +03:00
|
|
|
@property
|
|
|
|
def n_frames(self):
|
|
|
|
if self._n_frames is None:
|
|
|
|
current = self.tell()
|
2019-03-13 11:07:55 +03:00
|
|
|
self._seek(len(self._frame_pos))
|
2019-03-13 10:28:17 +03:00
|
|
|
while self._n_frames is None:
|
|
|
|
self._seek(self.tell() + 1)
|
2015-04-15 03:43:05 +03:00
|
|
|
self.seek(current)
|
|
|
|
return self._n_frames
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def seek(self, frame):
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Select a given frame as current image"""
|
2017-09-30 06:32:43 +03:00
|
|
|
if not self._seek_check(frame):
|
|
|
|
return
|
|
|
|
self._seek(frame)
|
2014-08-27 02:30:03 +04:00
|
|
|
# Create a new core image object on second and
|
|
|
|
# subsequent frames in the image. Image may be
|
|
|
|
# different size/mode.
|
|
|
|
Image._decompression_bomb_check(self.size)
|
|
|
|
self.im = Image.core.new(self.mode, self.size)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
def _seek(self, frame):
|
|
|
|
self.fp = self.__fp
|
2021-04-26 13:27:34 +03:00
|
|
|
|
|
|
|
# reset buffered io handle in case fp
|
|
|
|
# was passed to libtiff, invalidating the buffer
|
|
|
|
self.fp.tell()
|
|
|
|
|
2015-04-15 03:43:05 +03:00
|
|
|
while len(self._frame_pos) <= frame:
|
2010-07-31 06:52:47 +04:00
|
|
|
if not self.__next:
|
2012-10-11 07:52:53 +04:00
|
|
|
raise EOFError("no more images in TIFF file")
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug(
|
2020-07-16 12:43:29 +03:00
|
|
|
f"Seeking to frame {frame}, on frame {self.__frame}, "
|
|
|
|
f"__next {self.__next}, location: {self.fp.tell()}"
|
2020-04-13 00:16:46 +03:00
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
self.fp.seek(self.__next)
|
2015-04-15 03:43:05 +03:00
|
|
|
self._frame_pos.append(self.__next)
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("Loading tags, location: %s" % self.fp.tell())
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag_v2.load(self.fp)
|
2021-05-07 17:25:47 +03:00
|
|
|
if self.tag_v2.next in self._frame_pos:
|
|
|
|
# This IFD has already been processed
|
|
|
|
# Declare this to be the end of the image
|
|
|
|
self.__next = 0
|
|
|
|
else:
|
|
|
|
self.__next = self.tag_v2.next
|
2019-03-13 10:28:17 +03:00
|
|
|
if self.__next == 0:
|
|
|
|
self._n_frames = frame + 1
|
2019-03-13 12:44:58 +03:00
|
|
|
if len(self._frame_pos) == 1:
|
2020-04-13 07:37:49 +03:00
|
|
|
self.is_animated = self.__next != 0
|
2014-05-10 08:36:15 +04:00
|
|
|
self.__frame += 1
|
2015-04-15 03:43:05 +03:00
|
|
|
self.fp.seek(self._frame_pos[frame])
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag_v2.load(self.fp)
|
2015-09-22 13:31:59 +03:00
|
|
|
# fill the legacy tag/ifd entries
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2)
|
2015-04-15 03:43:05 +03:00
|
|
|
self.__frame = frame
|
2010-07-31 06:52:47 +04:00
|
|
|
self._setup()
|
2014-10-02 11:43:22 +04:00
|
|
|
|
2015-04-15 03:43:05 +03:00
|
|
|
def tell(self):
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Return the current frame number"""
|
2010-07-31 06:52:47 +04:00
|
|
|
return self.__frame
|
|
|
|
|
2021-06-12 05:17:38 +03:00
|
|
|
def getxmp(self):
|
|
|
|
"""
|
|
|
|
Returns a dictionary containing the XMP tags.
|
2021-06-30 04:28:00 +03:00
|
|
|
Requires defusedxml to be installed.
|
2021-06-12 05:17:38 +03:00
|
|
|
:returns: XMP tags in a dictionary.
|
|
|
|
"""
|
2021-06-12 06:57:14 +03:00
|
|
|
return self._getxmp(self.tag_v2[700]) if 700 in self.tag_v2 else {}
|
2021-06-12 05:17:38 +03:00
|
|
|
|
2016-03-27 14:18:39 +03:00
|
|
|
def load(self):
|
2020-04-17 15:13:14 +03:00
|
|
|
if self.tile and self.use_load_libtiff:
|
2016-03-27 14:18:39 +03:00
|
|
|
return self._load_libtiff()
|
2019-09-30 17:56:31 +03:00
|
|
|
return super().load()
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2017-01-02 02:17:39 +03:00
|
|
|
def load_end(self):
|
2019-09-13 15:36:26 +03:00
|
|
|
if self._tile_orientation:
|
|
|
|
method = {
|
|
|
|
2: Image.FLIP_LEFT_RIGHT,
|
|
|
|
3: Image.ROTATE_180,
|
|
|
|
4: Image.FLIP_TOP_BOTTOM,
|
|
|
|
5: Image.TRANSPOSE,
|
|
|
|
6: Image.ROTATE_270,
|
|
|
|
7: Image.TRANSVERSE,
|
|
|
|
8: Image.ROTATE_90,
|
|
|
|
}.get(self._tile_orientation)
|
|
|
|
if method is not None:
|
|
|
|
self.im = self.im.transpose(method)
|
|
|
|
self._size = self.im.size
|
|
|
|
|
2017-01-02 02:17:39 +03:00
|
|
|
# allow closing if we're on the first frame, there's no next
|
|
|
|
# This is the ImageFile.load path only, libtiff specific below.
|
2020-04-13 07:37:49 +03:00
|
|
|
if not self.is_animated:
|
2017-03-15 02:16:38 +03:00
|
|
|
self._close_exclusive_fp_after_loading = True
|
2017-01-02 02:17:39 +03:00
|
|
|
|
2021-07-04 05:33:55 +03:00
|
|
|
# reset buffered io handle in case fp
|
|
|
|
# was passed to libtiff, invalidating the buffer
|
|
|
|
self.fp.tell()
|
|
|
|
|
|
|
|
# load IFD data from fp before it is closed
|
|
|
|
exif = self.getexif()
|
|
|
|
for key in TiffTags.TAGS_V2_GROUPS.keys():
|
|
|
|
if key not in exif:
|
|
|
|
continue
|
|
|
|
exif.get_ifd(key)
|
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
def _load_libtiff(self):
|
2020-09-01 20:16:46 +03:00
|
|
|
"""Overload method triggered when we detect a compressed tiff
|
|
|
|
Calls out to libtiff"""
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2020-04-17 15:13:14 +03:00
|
|
|
Image.Image.load(self)
|
2013-03-09 07:51:59 +04:00
|
|
|
|
|
|
|
self.load_prepare()
|
2013-05-08 00:23:51 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
if not len(self.tile) == 1:
|
2019-09-30 17:56:31 +03:00
|
|
|
raise OSError("Not exactly one tile")
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
# (self._compression, (extents tuple),
|
|
|
|
# 0, (rawmode, self._compression, fp))
|
2015-04-08 14:12:37 +03:00
|
|
|
extents = self.tile[0][1]
|
2019-04-17 08:01:21 +03:00
|
|
|
args = list(self.tile[0][3])
|
2016-09-30 00:14:51 +03:00
|
|
|
|
|
|
|
# To be nice on memory footprint, if there's a
|
|
|
|
# file descriptor, use that instead of reading
|
|
|
|
# into a string in python.
|
|
|
|
# libtiff closes the file descriptor, so pass in a dup.
|
|
|
|
try:
|
|
|
|
fp = hasattr(self.fp, "fileno") and os.dup(self.fp.fileno())
|
|
|
|
# flush the file descriptor, prevents error on pypy 2.4+
|
2019-09-26 15:12:28 +03:00
|
|
|
# should also eliminate the need for fp.tell
|
2016-09-30 00:14:51 +03:00
|
|
|
# in _seek
|
|
|
|
if hasattr(self.fp, "flush"):
|
|
|
|
self.fp.flush()
|
2019-09-30 17:56:31 +03:00
|
|
|
except OSError:
|
2020-04-07 09:58:21 +03:00
|
|
|
# io.BytesIO have a fileno, but returns an OSError if
|
2016-09-30 00:14:51 +03:00
|
|
|
# it doesn't use a file descriptor.
|
|
|
|
fp = False
|
2016-10-31 03:43:32 +03:00
|
|
|
|
2016-09-30 00:14:51 +03:00
|
|
|
if fp:
|
|
|
|
args[2] = fp
|
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
decoder = Image._getdecoder(
|
|
|
|
self.mode, "libtiff", tuple(args), self.decoderconfig
|
|
|
|
)
|
2013-03-09 07:51:59 +04:00
|
|
|
try:
|
2014-03-28 09:19:39 +04:00
|
|
|
decoder.setimage(self.im, extents)
|
2020-06-21 13:13:35 +03:00
|
|
|
except ValueError as e:
|
|
|
|
raise OSError("Couldn't set the image") from e
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2020-04-13 07:37:49 +03:00
|
|
|
close_self_fp = self._exclusive_fp and not self.is_animated
|
2013-03-14 21:36:15 +04:00
|
|
|
if hasattr(self.fp, "getvalue"):
|
2013-03-09 07:51:59 +04:00
|
|
|
# We've got a stringio like thing passed in. Yay for all in memory.
|
|
|
|
# The decoder needs the entire file in one shot, so there's not
|
|
|
|
# a lot we can do here other than give it the entire file.
|
2014-07-28 20:00:06 +04:00
|
|
|
# unless we could do something like get the address of the
|
|
|
|
# underlying string for stringio.
|
2013-03-14 21:36:15 +04:00
|
|
|
#
|
|
|
|
# Rearranging for supporting byteio items, since they have a fileno
|
2020-04-07 09:58:21 +03:00
|
|
|
# that returns an OSError if there's no underlying fp. Easier to
|
2015-05-29 07:59:54 +03:00
|
|
|
# deal with here by reordering.
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("have getvalue. just sending in a string from getvalue")
|
2014-07-28 20:00:06 +04:00
|
|
|
n, err = decoder.decode(self.fp.getvalue())
|
2019-07-30 21:26:04 +03:00
|
|
|
elif fp:
|
2013-03-14 21:36:15 +04:00
|
|
|
# we've got a actual file on disk, pass in the fp.
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("have fileno, calling fileno version of the decoder.")
|
2019-08-03 14:43:01 +03:00
|
|
|
if not close_self_fp:
|
|
|
|
self.fp.seek(0)
|
2014-07-28 20:00:06 +04:00
|
|
|
# 4 bytes, otherwise the trace might error out
|
|
|
|
n, err = decoder.decode(b"fpfp")
|
2013-03-09 07:51:59 +04:00
|
|
|
else:
|
|
|
|
# we have something else.
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("don't have fileno or getvalue. just reading")
|
2019-07-30 21:28:44 +03:00
|
|
|
self.fp.seek(0)
|
2013-05-08 00:23:51 +04:00
|
|
|
# UNDONE -- so much for that buffer size thing.
|
2014-07-28 20:00:06 +04:00
|
|
|
n, err = decoder.decode(self.fp.read())
|
2013-05-08 00:23:51 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
self.tile = []
|
|
|
|
self.readonly = 0
|
2019-09-13 15:36:26 +03:00
|
|
|
|
|
|
|
self.load_end()
|
|
|
|
|
2014-03-28 09:18:40 +04:00
|
|
|
# libtiff closed the fp in a, we need to close self.fp, if possible
|
2019-08-03 14:43:01 +03:00
|
|
|
if close_self_fp:
|
2019-03-13 12:44:58 +03:00
|
|
|
self.fp.close()
|
|
|
|
self.fp = None # might be shared
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2014-03-28 09:19:39 +04:00
|
|
|
if err < 0:
|
2019-09-30 17:56:31 +03:00
|
|
|
raise OSError(err)
|
2013-03-09 07:51:59 +04:00
|
|
|
|
|
|
|
return Image.Image.load(self)
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def _setup(self):
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Setup this image object based on current tags"""
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
if 0xBC01 in self.tag_v2:
|
2019-09-30 17:56:31 +03:00
|
|
|
raise OSError("Windows Media Photo files not yet supported")
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# extract relevant tags
|
2015-09-11 20:09:14 +03:00
|
|
|
self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)]
|
|
|
|
self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# photometric is a required tag, but not everyone is reading
|
|
|
|
# the specification
|
2015-09-11 20:09:14 +03:00
|
|
|
photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2018-12-09 08:04:34 +03:00
|
|
|
# old style jpeg compression images most certainly are YCbCr
|
|
|
|
if self._compression == "tiff_jpeg":
|
|
|
|
photo = 6
|
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
fillorder = self.tag_v2.get(FILLORDER, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("*** Summary ***")
|
2020-07-16 12:43:29 +03:00
|
|
|
logger.debug(f"- compression: {self._compression}")
|
|
|
|
logger.debug(f"- photometric_interpretation: {photo}")
|
|
|
|
logger.debug(f"- planar_configuration: {self._planar_configuration}")
|
|
|
|
logger.debug(f"- fill_order: {fillorder}")
|
|
|
|
logger.debug(f"- YCbCr subsampling: {self.tag.get(530)}")
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# size
|
2019-09-29 07:15:48 +03:00
|
|
|
xsize = int(self.tag_v2.get(IMAGEWIDTH))
|
|
|
|
ysize = int(self.tag_v2.get(IMAGELENGTH))
|
2018-09-30 05:58:02 +03:00
|
|
|
self._size = xsize, ysize
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2020-07-16 12:43:29 +03:00
|
|
|
logger.debug(f"- size: {self.size}")
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2016-05-10 15:31:36 +03:00
|
|
|
sampleFormat = self.tag_v2.get(SAMPLEFORMAT, (1,))
|
2019-03-21 16:28:20 +03:00
|
|
|
if len(sampleFormat) > 1 and max(sampleFormat) == min(sampleFormat) == 1:
|
2015-10-04 00:46:01 +03:00
|
|
|
# SAMPLEFORMAT is properly per band, so an RGB image will
|
|
|
|
# be (1,1,1). But, we don't support per band pixel types,
|
|
|
|
# and anything more than one band is a uint8. So, just
|
|
|
|
# take the first element. Revisit this if adding support
|
|
|
|
# for more exotic images.
|
2016-05-10 15:31:36 +03:00
|
|
|
sampleFormat = (1,)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2017-12-07 18:43:22 +03:00
|
|
|
bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,))
|
|
|
|
extra_tuple = self.tag_v2.get(EXTRASAMPLES, ())
|
2017-12-08 15:37:21 +03:00
|
|
|
if photo in (2, 6, 8): # RGB, YCbCr, LAB
|
2017-12-07 18:43:22 +03:00
|
|
|
bps_count = 3
|
2017-12-08 15:37:21 +03:00
|
|
|
elif photo == 5: # CMYK
|
2017-12-07 18:43:22 +03:00
|
|
|
bps_count = 4
|
|
|
|
else:
|
|
|
|
bps_count = 1
|
|
|
|
bps_count += len(extra_tuple)
|
|
|
|
# Some files have only one value in bps_tuple,
|
|
|
|
# while should have more. Fix it
|
|
|
|
if bps_count > len(bps_tuple) and len(bps_tuple) == 1:
|
|
|
|
bps_tuple = bps_tuple * bps_count
|
|
|
|
|
2021-05-04 09:50:12 +03:00
|
|
|
samplesPerPixel = self.tag_v2.get(
|
|
|
|
SAMPLESPERPIXEL,
|
|
|
|
3 if self._compression == "tiff_jpeg" and photo in (2, 6) else 1,
|
|
|
|
)
|
2021-03-31 22:16:43 +03:00
|
|
|
if len(bps_tuple) != samplesPerPixel:
|
|
|
|
raise SyntaxError("unknown data organization")
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
# mode: check photometric interpretation and bits per pixel
|
2019-03-21 16:28:20 +03:00
|
|
|
key = (
|
|
|
|
self.tag_v2.prefix,
|
|
|
|
photo,
|
|
|
|
sampleFormat,
|
|
|
|
fillorder,
|
|
|
|
bps_tuple,
|
|
|
|
extra_tuple,
|
|
|
|
)
|
2020-07-16 12:43:29 +03:00
|
|
|
logger.debug(f"format key: {key}")
|
2010-07-31 06:52:47 +04:00
|
|
|
try:
|
|
|
|
self.mode, rawmode = OPEN_INFO[key]
|
2020-06-21 13:13:35 +03:00
|
|
|
except KeyError as e:
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("- unsupported format")
|
2020-06-21 13:13:35 +03:00
|
|
|
raise SyntaxError("unknown pixel mode") from e
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2020-07-16 12:43:29 +03:00
|
|
|
logger.debug(f"- raw mode: {rawmode}")
|
|
|
|
logger.debug(f"- pil mode: {self.mode}")
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
self.info["compression"] = self._compression
|
|
|
|
|
2016-02-05 01:57:13 +03:00
|
|
|
xres = self.tag_v2.get(X_RESOLUTION, 1)
|
|
|
|
yres = self.tag_v2.get(Y_RESOLUTION, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
if xres and yres:
|
2017-01-19 19:24:28 +03:00
|
|
|
resunit = self.tag_v2.get(RESOLUTION_UNIT)
|
2014-07-28 20:00:06 +04:00
|
|
|
if resunit == 2: # dots per inch
|
2021-05-10 00:54:47 +03:00
|
|
|
self.info["dpi"] = (xres, yres)
|
2014-07-28 20:00:06 +04:00
|
|
|
elif resunit == 3: # dots per centimeter. convert to dpi
|
2021-05-10 00:54:47 +03:00
|
|
|
self.info["dpi"] = (xres * 2.54, yres * 2.54)
|
2017-04-20 14:14:23 +03:00
|
|
|
elif resunit is None: # used to default to 1, but now 2)
|
2021-05-10 00:54:47 +03:00
|
|
|
self.info["dpi"] = (xres, yres)
|
2017-05-27 23:55:14 +03:00
|
|
|
# For backward compatibility,
|
|
|
|
# we also preserve the old behavior
|
2017-01-19 19:24:28 +03:00
|
|
|
self.info["resolution"] = xres, yres
|
2014-07-28 20:00:06 +04:00
|
|
|
else: # No absolute unit of measurement
|
2010-07-31 06:52:47 +04:00
|
|
|
self.info["resolution"] = xres, yres
|
|
|
|
|
|
|
|
# build tile descriptors
|
2018-06-15 16:44:22 +03:00
|
|
|
x = y = layer = 0
|
2010-07-31 06:52:47 +04:00
|
|
|
self.tile = []
|
2019-03-21 16:28:20 +03:00
|
|
|
self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw"
|
2018-07-03 03:15:24 +03:00
|
|
|
if self.use_load_libtiff:
|
|
|
|
# Decoder expects entire file as one tile.
|
|
|
|
# There's a buffer size limit in load (64k)
|
|
|
|
# so large g4 images will fail if we use that
|
|
|
|
# function.
|
|
|
|
#
|
|
|
|
# Setup the one tile for the whole image, then
|
|
|
|
# use the _load_libtiff function.
|
|
|
|
|
|
|
|
# libtiff handles the fillmode for us, so 1;IR should
|
|
|
|
# actually be 1;I. Including the R double reverses the
|
|
|
|
# bits, so stripes of the image are reversed. See
|
|
|
|
# https://github.com/python-pillow/Pillow/issues/279
|
|
|
|
if fillorder == 2:
|
2018-09-26 19:44:45 +03:00
|
|
|
# Replace fillorder with fillorder=1
|
|
|
|
key = key[:3] + (1,) + key[4:]
|
2020-07-16 12:43:29 +03:00
|
|
|
logger.debug(f"format key: {key}")
|
2018-07-03 03:15:24 +03:00
|
|
|
# this should always work, since all the
|
|
|
|
# fillorder==2 modes have a corresponding
|
|
|
|
# fillorder=1 mode
|
|
|
|
self.mode, rawmode = OPEN_INFO[key]
|
|
|
|
# libtiff always returns the bytes in native order.
|
|
|
|
# we're expecting image byte order. So, if the rawmode
|
|
|
|
# contains I;16, we need to convert from native to image
|
|
|
|
# byte order.
|
2019-03-21 16:28:20 +03:00
|
|
|
if rawmode == "I;16":
|
|
|
|
rawmode = "I;16N"
|
|
|
|
if ";16B" in rawmode:
|
|
|
|
rawmode = rawmode.replace(";16B", ";16N")
|
|
|
|
if ";16L" in rawmode:
|
|
|
|
rawmode = rawmode.replace(";16L", ";16N")
|
2018-07-03 03:15:24 +03:00
|
|
|
|
2021-01-10 02:05:36 +03:00
|
|
|
# YCbCr images with new jpeg compression with pixels in one plane
|
|
|
|
# unpacked straight into RGB values
|
|
|
|
if (
|
|
|
|
photo == 6
|
|
|
|
and self._compression == "jpeg"
|
|
|
|
and self._planar_configuration == 1
|
|
|
|
):
|
|
|
|
rawmode = "RGB"
|
|
|
|
|
2018-07-03 03:15:24 +03:00
|
|
|
# Offset in the tile tuple is 0, we go from 0,0 to
|
|
|
|
# w,h, and we only do this once -- eds
|
2019-04-17 07:20:29 +03:00
|
|
|
a = (rawmode, self._compression, False, self.tag_v2.offset)
|
|
|
|
self.tile.append(("libtiff", (0, 0, xsize, ysize), 0, a))
|
2018-07-03 03:15:24 +03:00
|
|
|
|
|
|
|
elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2:
|
2010-07-31 06:52:47 +04:00
|
|
|
# striped image
|
2018-07-03 03:15:24 +03:00
|
|
|
if STRIPOFFSETS in self.tag_v2:
|
|
|
|
offsets = self.tag_v2[STRIPOFFSETS]
|
|
|
|
h = self.tag_v2.get(ROWSPERSTRIP, ysize)
|
|
|
|
w = self.size[0]
|
2013-03-09 07:51:59 +04:00
|
|
|
else:
|
2018-07-03 03:15:24 +03:00
|
|
|
# tiled image
|
|
|
|
offsets = self.tag_v2[TILEOFFSETS]
|
|
|
|
w = self.tag_v2.get(322)
|
|
|
|
h = self.tag_v2.get(323)
|
|
|
|
|
|
|
|
for offset in offsets:
|
2018-07-17 07:10:57 +03:00
|
|
|
if x + w > xsize:
|
|
|
|
stride = w * sum(bps_tuple) / 8 # bytes per line
|
|
|
|
else:
|
|
|
|
stride = 0
|
|
|
|
|
2018-07-17 08:39:52 +03:00
|
|
|
tile_rawmode = rawmode
|
|
|
|
if self._planar_configuration == 2:
|
|
|
|
# each band on it's own layer
|
|
|
|
tile_rawmode = rawmode[layer]
|
|
|
|
# adjust stride width accordingly
|
|
|
|
stride /= bps_count
|
|
|
|
|
|
|
|
a = (tile_rawmode, int(stride), 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
self.tile.append(
|
2019-03-21 16:28:20 +03:00
|
|
|
(
|
|
|
|
self._compression,
|
|
|
|
(x, y, min(x + w, xsize), min(y + h, ysize)),
|
|
|
|
offset,
|
|
|
|
a,
|
|
|
|
)
|
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
x = x + w
|
|
|
|
if x >= self.size[0]:
|
|
|
|
x, y = 0, y + h
|
|
|
|
if y >= self.size[1]:
|
|
|
|
x = y = 0
|
2018-06-15 16:44:22 +03:00
|
|
|
layer += 1
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("- unsupported data organization")
|
2010-07-31 06:52:47 +04:00
|
|
|
raise SyntaxError("unknown data organization")
|
|
|
|
|
2016-11-04 18:37:49 +03:00
|
|
|
# Fix up info.
|
|
|
|
if ICCPROFILE in self.tag_v2:
|
2019-03-21 16:28:20 +03:00
|
|
|
self.info["icc_profile"] = self.tag_v2[ICCPROFILE]
|
2016-11-04 18:37:49 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
# fixup palette descriptor
|
|
|
|
|
2019-05-11 07:43:48 +03:00
|
|
|
if self.mode in ["P", "PA"]:
|
2015-09-11 20:09:14 +03:00
|
|
|
palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]]
|
py3k: The big push
There are two main issues fixed with this commit:
* bytes vs. str: All file, image, and palette data are now handled as
bytes. A new _binary module consolidates the hacks needed to do this
across Python versions. tostring/fromstring methods have been renamed to
tobytes/frombytes, but the Python 2.6/2.7 versions alias them to the old
names for compatibility. Users should move to tobytes/frombytes.
One other potentially-breaking change is that text data in image files
(such as tags, comments) are now explicitly handled with a specific
character encoding in mind. This works well with the Unicode str in
Python 3, but may trip up old code expecting a straight byte-for-byte
translation to a Python string. This also required a change to Gohlke's
tags tests (in Tests/test_file_png.py) to expect Unicode strings from
the code.
* True div vs. floor div: Many division operations used the "/" operator
to do floor division, which is now the "//" operator in Python 3. These
were fixed.
As of this commit, on the first pass, I have one failing test (improper
handling of a slice object in a C module, test_imagepath.py) in Python 3,
and three that that I haven't tried running yet (test_imagegl,
test_imagegrab, and test_imageqt). I also haven't tested anything on
Windows. All but the three skipped tests run flawlessly against Pythons
2.6 and 2.7.
2012-10-21 01:01:53 +04:00
|
|
|
self.palette = ImagePalette.raw("RGB;L", b"".join(palette))
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2019-09-13 15:36:26 +03:00
|
|
|
self._tile_orientation = self.tag_v2.get(0x0112)
|
|
|
|
|
2018-11-17 13:56:06 +03:00
|
|
|
def _close__fp(self):
|
|
|
|
try:
|
2019-01-04 04:29:23 +03:00
|
|
|
if self.__fp != self.fp:
|
|
|
|
self.__fp.close()
|
2018-11-17 13:56:06 +03:00
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
self.__fp = None
|
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# Write TIFF files
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
# little endian is default except for image modes with
|
2015-05-29 07:59:54 +03:00
|
|
|
# explicit big endian byte-order
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
SAVE_INFO = {
|
2014-07-28 20:00:06 +04:00
|
|
|
# mode => rawmode, byteorder, photometrics,
|
|
|
|
# sampleformat, bitspersample, extra
|
2010-07-31 06:52:47 +04:00
|
|
|
"1": ("1", II, 1, 1, (1,), None),
|
|
|
|
"L": ("L", II, 1, 1, (8,), None),
|
2014-07-28 20:00:06 +04:00
|
|
|
"LA": ("LA", II, 1, 1, (8, 8), 2),
|
2010-07-31 06:52:47 +04:00
|
|
|
"P": ("P", II, 3, 1, (8,), None),
|
2014-07-28 20:00:06 +04:00
|
|
|
"PA": ("PA", II, 3, 1, (8, 8), 2),
|
2010-07-31 06:52:47 +04:00
|
|
|
"I": ("I;32S", II, 1, 2, (32,), None),
|
|
|
|
"I;16": ("I;16", II, 1, 1, (16,), None),
|
|
|
|
"I;16S": ("I;16S", II, 1, 2, (16,), None),
|
|
|
|
"F": ("F;32F", II, 1, 3, (32,), None),
|
2014-07-28 20:00:06 +04:00
|
|
|
"RGB": ("RGB", II, 2, 1, (8, 8, 8), None),
|
|
|
|
"RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0),
|
|
|
|
"RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2),
|
|
|
|
"CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None),
|
|
|
|
"YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None),
|
|
|
|
"LAB": ("LAB", II, 8, 1, (8, 8, 8), None),
|
2010-07-31 06:52:47 +04:00
|
|
|
"I;32BS": ("I;32BS", MM, 1, 2, (32,), None),
|
|
|
|
"I;16B": ("I;16B", MM, 1, 1, (16,), None),
|
|
|
|
"I;16BS": ("I;16BS", MM, 1, 2, (16,), None),
|
|
|
|
"F;32BF": ("F;32BF", MM, 1, 3, (32,), None),
|
|
|
|
}
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def _save(im, fp, filename):
|
|
|
|
|
|
|
|
try:
|
|
|
|
rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode]
|
2020-06-21 13:13:35 +03:00
|
|
|
except KeyError as e:
|
2020-07-16 12:43:29 +03:00
|
|
|
raise OSError(f"cannot write mode {im.mode} as TIFF") from e
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
ifd = ImageFileDirectory_v2(prefix=prefix)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2021-08-05 16:27:08 +03:00
|
|
|
encoderinfo = im.encoderinfo
|
|
|
|
encoderconfig = im.encoderconfig
|
|
|
|
compression = encoderinfo.get("compression", im.info.get("compression"))
|
2018-08-25 01:21:43 +03:00
|
|
|
if compression is None:
|
2019-03-21 16:28:20 +03:00
|
|
|
compression = "raw"
|
2020-05-15 15:37:13 +03:00
|
|
|
elif compression == "tiff_jpeg":
|
|
|
|
# OJPEG is obsolete, so use new-style JPEG compression instead
|
|
|
|
compression = "jpeg"
|
2021-03-19 04:00:29 +03:00
|
|
|
elif compression == "tiff_deflate":
|
|
|
|
compression = "tiff_adobe_deflate"
|
2013-11-22 09:41:54 +04:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
libtiff = WRITE_LIBTIFF or compression != "raw"
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2013-11-06 08:49:09 +04:00
|
|
|
# required for color libtiff images
|
2019-03-21 16:28:20 +03:00
|
|
|
ifd[PLANAR_CONFIGURATION] = getattr(im, "_planar_configuration", 1)
|
2014-06-03 14:02:44 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
ifd[IMAGEWIDTH] = im.size[0]
|
|
|
|
ifd[IMAGELENGTH] = im.size[1]
|
|
|
|
|
2013-10-03 09:06:17 +04:00
|
|
|
# write any arbitrary tags passed in as an ImageFileDirectory
|
2021-09-06 23:33:37 +03:00
|
|
|
if "tiffinfo" in encoderinfo:
|
|
|
|
info = encoderinfo["tiffinfo"]
|
|
|
|
elif "exif" in encoderinfo:
|
|
|
|
info = encoderinfo["exif"]
|
2021-07-04 06:32:41 +03:00
|
|
|
if isinstance(info, bytes):
|
|
|
|
exif = Image.Exif()
|
|
|
|
exif.load(info)
|
|
|
|
info = exif
|
|
|
|
else:
|
|
|
|
info = {}
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("Tiffinfo Keys: %s" % list(info))
|
2015-09-13 16:01:01 +03:00
|
|
|
if isinstance(info, ImageFileDirectory_v1):
|
|
|
|
info = info.to_v2()
|
2014-12-29 18:48:01 +03:00
|
|
|
for key in info:
|
2021-07-04 05:33:55 +03:00
|
|
|
if isinstance(info, Image.Exif) and key in TiffTags.TAGS_V2_GROUPS.keys():
|
|
|
|
ifd[key] = info.get_ifd(key)
|
|
|
|
else:
|
|
|
|
ifd[key] = info.get(key)
|
2013-10-03 09:06:17 +04:00
|
|
|
try:
|
|
|
|
ifd.tagtype[key] = info.tagtype[key]
|
2018-11-17 00:51:52 +03:00
|
|
|
except Exception:
|
2019-02-19 11:49:50 +03:00
|
|
|
pass # might not be an IFD. Might not have populated type
|
2013-10-03 09:06:17 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
# additions written by Greg Couch, gregc@cgl.ucsf.edu
|
|
|
|
# inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com
|
2019-03-21 16:28:20 +03:00
|
|
|
if hasattr(im, "tag_v2"):
|
2010-07-31 06:52:47 +04:00
|
|
|
# preserve tags from original TIFF image file
|
2019-03-21 16:28:20 +03:00
|
|
|
for key in (
|
|
|
|
RESOLUTION_UNIT,
|
|
|
|
X_RESOLUTION,
|
|
|
|
Y_RESOLUTION,
|
|
|
|
IPTC_NAA_CHUNK,
|
|
|
|
PHOTOSHOP_CHUNK,
|
|
|
|
XMP,
|
|
|
|
):
|
2015-09-13 12:53:47 +03:00
|
|
|
if key in im.tag_v2:
|
|
|
|
ifd[key] = im.tag_v2[key]
|
2016-09-16 21:07:25 +03:00
|
|
|
ifd.tagtype[key] = im.tag_v2.tagtype[key]
|
2013-10-08 04:00:20 +04:00
|
|
|
|
2016-08-22 13:47:49 +03:00
|
|
|
# preserve ICC profile (should also work when saving other formats
|
|
|
|
# which support profiles as TIFF) -- 2008-06-06 Florian Hoech
|
2021-08-05 16:27:08 +03:00
|
|
|
icc = encoderinfo.get("icc_profile", im.info.get("icc_profile"))
|
2021-03-10 12:16:49 +03:00
|
|
|
if icc:
|
|
|
|
ifd[ICCPROFILE] = icc
|
2014-06-03 14:02:44 +04:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
for key, name in [
|
|
|
|
(IMAGEDESCRIPTION, "description"),
|
|
|
|
(X_RESOLUTION, "resolution"),
|
|
|
|
(Y_RESOLUTION, "resolution"),
|
|
|
|
(X_RESOLUTION, "x_resolution"),
|
|
|
|
(Y_RESOLUTION, "y_resolution"),
|
|
|
|
(RESOLUTION_UNIT, "resolution_unit"),
|
|
|
|
(SOFTWARE, "software"),
|
|
|
|
(DATE_TIME, "date_time"),
|
|
|
|
(ARTIST, "artist"),
|
|
|
|
(COPYRIGHT, "copyright"),
|
|
|
|
]:
|
2021-08-05 16:27:08 +03:00
|
|
|
if name in encoderinfo:
|
|
|
|
ifd[key] = encoderinfo[name]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2021-08-05 16:27:08 +03:00
|
|
|
dpi = encoderinfo.get("dpi")
|
2010-07-31 06:52:47 +04:00
|
|
|
if dpi:
|
|
|
|
ifd[RESOLUTION_UNIT] = 2
|
2021-04-17 08:42:06 +03:00
|
|
|
ifd[X_RESOLUTION] = dpi[0]
|
|
|
|
ifd[Y_RESOLUTION] = dpi[1]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
if bits != (1,):
|
|
|
|
ifd[BITSPERSAMPLE] = bits
|
|
|
|
if len(bits) != 1:
|
|
|
|
ifd[SAMPLESPERPIXEL] = len(bits)
|
|
|
|
if extra is not None:
|
|
|
|
ifd[EXTRASAMPLES] = extra
|
|
|
|
if format != 1:
|
|
|
|
ifd[SAMPLEFORMAT] = format
|
|
|
|
|
2021-08-05 16:27:08 +03:00
|
|
|
if PHOTOMETRIC_INTERPRETATION not in ifd:
|
|
|
|
ifd[PHOTOMETRIC_INTERPRETATION] = photo
|
2021-08-06 16:50:52 +03:00
|
|
|
elif im.mode in ("1", "L") and ifd[PHOTOMETRIC_INTERPRETATION] == 0:
|
|
|
|
if im.mode == "1":
|
|
|
|
inverted_im = im.copy()
|
|
|
|
px = inverted_im.load()
|
|
|
|
for y in range(inverted_im.height):
|
|
|
|
for x in range(inverted_im.width):
|
|
|
|
px[x, y] = 0 if px[x, y] == 255 else 255
|
|
|
|
im = inverted_im
|
|
|
|
else:
|
|
|
|
im = ImageOps.invert(im)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2019-05-11 07:43:48 +03:00
|
|
|
if im.mode in ["P", "PA"]:
|
2010-07-31 06:52:47 +04:00
|
|
|
lut = im.im.getpalette("RGB", "RGB;L")
|
2020-05-08 19:48:02 +03:00
|
|
|
ifd[COLORMAP] = tuple(v * 256 for v in lut)
|
2010-07-31 06:52:47 +04:00
|
|
|
# data orientation
|
2019-03-21 16:28:20 +03:00
|
|
|
stride = len(bits) * ((im.size[0] * bits[0] + 7) // 8)
|
2021-10-01 14:50:02 +03:00
|
|
|
# aim for given strip size (64 KB by default) when using libtiff writer
|
2021-06-02 13:06:27 +03:00
|
|
|
if libtiff:
|
2021-10-13 15:50:23 +03:00
|
|
|
rows_per_strip = 1 if stride == 0 else min(STRIP_SIZE // stride, im.size[1])
|
2021-07-07 10:38:06 +03:00
|
|
|
# JPEG encoder expects multiple of 8 rows
|
|
|
|
if compression == "jpeg":
|
|
|
|
rows_per_strip = min(((rows_per_strip + 7) // 8) * 8, im.size[1])
|
2021-06-02 13:06:27 +03:00
|
|
|
else:
|
|
|
|
rows_per_strip = im.size[1]
|
2021-10-08 05:12:21 +03:00
|
|
|
if rows_per_strip == 0:
|
|
|
|
rows_per_strip = 1
|
|
|
|
strip_byte_counts = 1 if stride == 0 else stride * rows_per_strip
|
2021-05-27 16:40:04 +03:00
|
|
|
strips_per_image = (im.size[1] + rows_per_strip - 1) // rows_per_strip
|
|
|
|
ifd[ROWSPERSTRIP] = rows_per_strip
|
2020-06-20 15:41:04 +03:00
|
|
|
if strip_byte_counts >= 2 ** 16:
|
2020-05-15 13:47:57 +03:00
|
|
|
ifd.tagtype[STRIPBYTECOUNTS] = TiffTags.LONG
|
2021-06-02 12:28:49 +03:00
|
|
|
ifd[STRIPBYTECOUNTS] = (strip_byte_counts,) * (strips_per_image - 1) + (
|
|
|
|
stride * im.size[1] - strip_byte_counts * (strips_per_image - 1),
|
|
|
|
)
|
|
|
|
ifd[STRIPOFFSETS] = tuple(
|
|
|
|
range(0, strip_byte_counts * strips_per_image, strip_byte_counts)
|
|
|
|
) # this is adjusted by IFD writer
|
2014-07-28 20:00:06 +04:00
|
|
|
# no compression by default:
|
|
|
|
ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2021-07-09 18:20:36 +03:00
|
|
|
if im.mode == "YCbCr":
|
|
|
|
for tag, value in {
|
|
|
|
YCBCRSUBSAMPLING: (1, 1),
|
|
|
|
REFERENCEBLACKWHITE: (0, 255, 128, 255, 128, 255),
|
|
|
|
}.items():
|
|
|
|
ifd.setdefault(tag, value)
|
|
|
|
|
2021-11-18 14:01:53 +03:00
|
|
|
blocklist = [TILEWIDTH, TILELENGTH, TILEOFFSETS, TILEBYTECOUNTS]
|
2013-03-09 07:51:59 +04:00
|
|
|
if libtiff:
|
2021-08-05 16:27:08 +03:00
|
|
|
if "quality" in encoderinfo:
|
|
|
|
quality = encoderinfo["quality"]
|
2019-06-04 14:30:13 +03:00
|
|
|
if not isinstance(quality, int) or quality < 0 or quality > 100:
|
|
|
|
raise ValueError("Invalid quality setting")
|
|
|
|
if compression != "jpeg":
|
|
|
|
raise ValueError(
|
|
|
|
"quality setting only supported for 'jpeg' compression"
|
|
|
|
)
|
|
|
|
ifd[JPEGQUALITY] = quality
|
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("Saving using libtiff encoder")
|
|
|
|
logger.debug("Items: %s" % sorted(ifd.items()))
|
2013-03-09 07:51:59 +04:00
|
|
|
_fp = 0
|
|
|
|
if hasattr(fp, "fileno"):
|
2014-11-14 15:29:10 +03:00
|
|
|
try:
|
|
|
|
fp.seek(0)
|
|
|
|
_fp = os.dup(fp.fileno())
|
|
|
|
except io.UnsupportedOperation:
|
|
|
|
pass
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2019-05-21 14:18:09 +03:00
|
|
|
# optional types for non core tags
|
|
|
|
types = {}
|
2019-06-30 21:48:19 +03:00
|
|
|
# SAMPLEFORMAT is determined by the image format and should not be copied
|
|
|
|
# from legacy_ifd.
|
2015-12-09 23:39:49 +03:00
|
|
|
# STRIPOFFSETS and STRIPBYTECOUNTS are added by the library
|
|
|
|
# based on the data in the strip.
|
2019-05-21 14:18:09 +03:00
|
|
|
# The other tags expect arrays with a certain length (fixed or depending on
|
|
|
|
# BITSPERSAMPLE, etc), passing arrays with a different length will result in
|
|
|
|
# segfaults. Block these tags until we add extra validation.
|
2020-12-22 03:38:02 +03:00
|
|
|
# SUBIFD may also cause a segfault.
|
2021-11-18 14:01:53 +03:00
|
|
|
blocklist += [
|
2019-05-21 14:18:09 +03:00
|
|
|
REFERENCEBLACKWHITE,
|
2019-06-30 21:48:19 +03:00
|
|
|
SAMPLEFORMAT,
|
2019-05-21 14:18:09 +03:00
|
|
|
STRIPBYTECOUNTS,
|
|
|
|
STRIPOFFSETS,
|
|
|
|
TRANSFERFUNCTION,
|
2020-12-22 03:38:02 +03:00
|
|
|
SUBIFD,
|
2019-05-21 14:18:09 +03:00
|
|
|
]
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
atts = {}
|
2014-06-03 14:02:44 +04:00
|
|
|
# bits per sample is a single short in the tiff directory, not a list.
|
2013-11-06 08:49:09 +04:00
|
|
|
atts[BITSPERSAMPLE] = bits[0]
|
2013-10-22 20:18:41 +04:00
|
|
|
# Merge the ones that we have with (optional) more bits from
|
|
|
|
# the original file, e.g x,y resolution so that we can
|
|
|
|
# save(load('')) == original file.
|
2015-09-14 13:03:24 +03:00
|
|
|
legacy_ifd = {}
|
2019-03-21 16:28:20 +03:00
|
|
|
if hasattr(im, "tag"):
|
2015-09-14 13:03:24 +03:00
|
|
|
legacy_ifd = im.tag.to_v2()
|
2019-03-21 16:28:20 +03:00
|
|
|
for tag, value in itertools.chain(
|
|
|
|
ifd.items(), getattr(im, "tag_v2", {}).items(), legacy_ifd.items()
|
|
|
|
):
|
2015-12-30 18:54:14 +03:00
|
|
|
# Libtiff can only process certain core items without adding
|
2018-12-29 07:57:49 +03:00
|
|
|
# them to the custom dictionary.
|
2019-05-21 14:18:09 +03:00
|
|
|
# Custom items are supported for int, float, unicode, string and byte
|
|
|
|
# values. Other types and tuples require a tagtype.
|
2016-02-05 01:57:13 +03:00
|
|
|
if tag not in TiffTags.LIBTIFF_CORE:
|
2020-05-06 13:12:59 +03:00
|
|
|
if not Image.core.libtiff_support_custom_tags:
|
2019-05-21 14:18:09 +03:00
|
|
|
continue
|
|
|
|
|
|
|
|
if tag in ifd.tagtype:
|
|
|
|
types[tag] = ifd.tagtype[tag]
|
2019-09-26 15:12:28 +03:00
|
|
|
elif not (isinstance(value, (int, float, str, bytes))):
|
2018-10-25 11:36:49 +03:00
|
|
|
continue
|
2020-05-06 13:12:59 +03:00
|
|
|
else:
|
|
|
|
type = TiffTags.lookup(tag).type
|
|
|
|
if type:
|
|
|
|
types[tag] = type
|
2015-12-30 18:54:14 +03:00
|
|
|
if tag not in atts and tag not in blocklist:
|
2019-09-26 15:12:28 +03:00
|
|
|
if isinstance(value, str):
|
2019-03-21 16:28:20 +03:00
|
|
|
atts[tag] = value.encode("ascii", "replace") + b"\0"
|
2015-12-30 18:54:14 +03:00
|
|
|
elif isinstance(value, IFDRational):
|
|
|
|
atts[tag] = float(value)
|
2013-10-22 21:10:37 +04:00
|
|
|
else:
|
2015-12-30 18:54:14 +03:00
|
|
|
atts[tag] = value
|
2013-10-21 20:30:06 +04:00
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("Converted items: %s" % sorted(atts.items()))
|
2013-10-22 02:37:20 +04:00
|
|
|
|
2013-11-22 08:57:48 +04:00
|
|
|
# libtiff always expects the bytes in native order.
|
|
|
|
# we're storing image byte order. So, if the rawmode
|
2013-10-22 02:37:20 +04:00
|
|
|
# contains I;16, we need to convert from native to image
|
|
|
|
# byte order.
|
2019-03-21 16:28:20 +03:00
|
|
|
if im.mode in ("I;16B", "I;16"):
|
|
|
|
rawmode = "I;16N"
|
2013-10-22 02:37:20 +04:00
|
|
|
|
2019-06-04 14:30:13 +03:00
|
|
|
# Pass tags as sorted list so that the tags are set in a fixed order.
|
|
|
|
# This is required by libtiff for some tags. For example, the JPEGQUALITY
|
|
|
|
# pseudo tag requires that the COMPRESS tag was already set.
|
|
|
|
tags = list(atts.items())
|
|
|
|
tags.sort()
|
|
|
|
a = (rawmode, compression, _fp, filename, tags, types)
|
2021-08-05 16:27:08 +03:00
|
|
|
e = Image._getencoder(im.mode, "libtiff", a, encoderconfig)
|
2019-03-21 16:28:20 +03:00
|
|
|
e.setimage(im.im, (0, 0) + im.size)
|
2014-01-08 06:21:24 +04:00
|
|
|
while True:
|
2014-07-28 20:00:06 +04:00
|
|
|
# undone, change to self.decodermaxblock:
|
2019-03-21 16:28:20 +03:00
|
|
|
l, s, d = e.encode(16 * 1024)
|
2013-03-09 07:51:59 +04:00
|
|
|
if not _fp:
|
|
|
|
fp.write(d)
|
|
|
|
if s:
|
|
|
|
break
|
|
|
|
if s < 0:
|
2020-07-16 12:43:29 +03:00
|
|
|
raise OSError(f"encoder error {s} when writing image file")
|
2013-05-08 00:23:51 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
else:
|
2021-11-18 14:01:53 +03:00
|
|
|
for tag in blocklist:
|
|
|
|
del ifd[tag]
|
2013-03-09 07:51:59 +04:00
|
|
|
offset = ifd.save(fp)
|
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
ImageFile._save(
|
|
|
|
im, fp, [("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))]
|
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# -- helper for multi-page save --
|
2021-08-05 16:27:08 +03:00
|
|
|
if "_debug_multipage" in encoderinfo:
|
2014-07-28 20:00:06 +04:00
|
|
|
# just to access o32 and o16 (using correct byte order)
|
2010-07-31 06:52:47 +04:00
|
|
|
im._debug_multipage = ifd
|
|
|
|
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2016-09-29 04:16:04 +03:00
|
|
|
class AppendingTiffWriter:
|
|
|
|
fieldSizes = [
|
|
|
|
0, # None
|
|
|
|
1, # byte
|
|
|
|
1, # ascii
|
|
|
|
2, # short
|
|
|
|
4, # long
|
|
|
|
8, # rational
|
|
|
|
1, # sbyte
|
|
|
|
1, # undefined
|
|
|
|
2, # sshort
|
|
|
|
4, # slong
|
|
|
|
8, # srational
|
|
|
|
4, # float
|
|
|
|
8, # double
|
|
|
|
]
|
|
|
|
|
|
|
|
# StripOffsets = 273
|
|
|
|
# FreeOffsets = 288
|
|
|
|
# TileOffsets = 324
|
|
|
|
# JPEGQTables = 519
|
|
|
|
# JPEGDCTables = 520
|
|
|
|
# JPEGACTables = 521
|
2016-11-07 15:33:46 +03:00
|
|
|
Tags = {273, 288, 324, 519, 520, 521}
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def __init__(self, fn, new=False):
|
2019-03-21 16:28:20 +03:00
|
|
|
if hasattr(fn, "read"):
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f = fn
|
|
|
|
self.close_fp = False
|
|
|
|
else:
|
|
|
|
self.name = fn
|
|
|
|
self.close_fp = True
|
|
|
|
try:
|
2019-09-30 17:56:31 +03:00
|
|
|
self.f = open(fn, "w+b" if new else "r+b")
|
|
|
|
except OSError:
|
|
|
|
self.f = open(fn, "w+b")
|
2016-09-29 04:16:04 +03:00
|
|
|
self.beginning = self.f.tell()
|
|
|
|
self.setup()
|
|
|
|
|
|
|
|
def setup(self):
|
|
|
|
# Reset everything.
|
|
|
|
self.f.seek(self.beginning, os.SEEK_SET)
|
|
|
|
|
|
|
|
self.whereToWriteNewIFDOffset = None
|
|
|
|
self.offsetOfNewPage = 0
|
|
|
|
|
|
|
|
self.IIMM = IIMM = self.f.read(4)
|
|
|
|
if not IIMM:
|
|
|
|
# empty file - first page
|
|
|
|
self.isFirst = True
|
|
|
|
return
|
|
|
|
|
|
|
|
self.isFirst = False
|
|
|
|
if IIMM == b"II\x2a\x00":
|
|
|
|
self.setEndian("<")
|
|
|
|
elif IIMM == b"MM\x00\x2a":
|
|
|
|
self.setEndian(">")
|
|
|
|
else:
|
|
|
|
raise RuntimeError("Invalid TIFF file header")
|
|
|
|
|
|
|
|
self.skipIFDs()
|
|
|
|
self.goToEnd()
|
|
|
|
|
|
|
|
def finalize(self):
|
|
|
|
if self.isFirst:
|
|
|
|
return
|
|
|
|
|
|
|
|
# fix offsets
|
|
|
|
self.f.seek(self.offsetOfNewPage)
|
|
|
|
|
|
|
|
IIMM = self.f.read(4)
|
|
|
|
if not IIMM:
|
|
|
|
# raise RuntimeError("nothing written into new page")
|
|
|
|
# Make it easy to finish a frame without committing to a new one.
|
|
|
|
return
|
|
|
|
|
|
|
|
if IIMM != self.IIMM:
|
2019-03-21 16:28:20 +03:00
|
|
|
raise RuntimeError("IIMM of new page doesn't match IIMM of first page")
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
IFDoffset = self.readLong()
|
|
|
|
IFDoffset += self.offsetOfNewPage
|
|
|
|
self.f.seek(self.whereToWriteNewIFDOffset)
|
|
|
|
self.writeLong(IFDoffset)
|
|
|
|
self.f.seek(IFDoffset)
|
|
|
|
self.fixIFD()
|
|
|
|
|
|
|
|
def newFrame(self):
|
|
|
|
# Call this to finish a frame.
|
|
|
|
self.finalize()
|
|
|
|
self.setup()
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, exc_type, exc_value, traceback):
|
|
|
|
if self.close_fp:
|
|
|
|
self.close()
|
|
|
|
return False
|
|
|
|
|
|
|
|
def tell(self):
|
|
|
|
return self.f.tell() - self.offsetOfNewPage
|
|
|
|
|
2019-01-13 05:05:46 +03:00
|
|
|
def seek(self, offset, whence=io.SEEK_SET):
|
2016-09-29 04:16:04 +03:00
|
|
|
if whence == os.SEEK_SET:
|
|
|
|
offset += self.offsetOfNewPage
|
|
|
|
|
|
|
|
self.f.seek(offset, whence)
|
|
|
|
return self.tell()
|
|
|
|
|
|
|
|
def goToEnd(self):
|
|
|
|
self.f.seek(0, os.SEEK_END)
|
|
|
|
pos = self.f.tell()
|
|
|
|
|
|
|
|
# pad to 16 byte boundary
|
|
|
|
padBytes = 16 - pos % 16
|
|
|
|
if 0 < padBytes < 16:
|
2019-10-07 15:40:00 +03:00
|
|
|
self.f.write(bytes(padBytes))
|
2016-09-29 04:16:04 +03:00
|
|
|
self.offsetOfNewPage = self.f.tell()
|
|
|
|
|
|
|
|
def setEndian(self, endian):
|
|
|
|
self.endian = endian
|
|
|
|
self.longFmt = self.endian + "L"
|
|
|
|
self.shortFmt = self.endian + "H"
|
|
|
|
self.tagFormat = self.endian + "HHL"
|
|
|
|
|
|
|
|
def skipIFDs(self):
|
|
|
|
while True:
|
|
|
|
IFDoffset = self.readLong()
|
|
|
|
if IFDoffset == 0:
|
|
|
|
self.whereToWriteNewIFDOffset = self.f.tell() - 4
|
|
|
|
break
|
|
|
|
|
|
|
|
self.f.seek(IFDoffset)
|
|
|
|
numTags = self.readShort()
|
|
|
|
self.f.seek(numTags * 12, os.SEEK_CUR)
|
|
|
|
|
|
|
|
def write(self, data):
|
|
|
|
return self.f.write(data)
|
|
|
|
|
|
|
|
def readShort(self):
|
2019-10-29 14:42:34 +03:00
|
|
|
(value,) = struct.unpack(self.shortFmt, self.f.read(2))
|
2016-09-29 04:16:04 +03:00
|
|
|
return value
|
|
|
|
|
|
|
|
def readLong(self):
|
2019-10-29 14:42:34 +03:00
|
|
|
(value,) = struct.unpack(self.longFmt, self.f.read(4))
|
2016-09-29 04:16:04 +03:00
|
|
|
return value
|
|
|
|
|
|
|
|
def rewriteLastShortToLong(self, value):
|
|
|
|
self.f.seek(-2, os.SEEK_CUR)
|
|
|
|
bytesWritten = self.f.write(struct.pack(self.longFmt, value))
|
|
|
|
if bytesWritten is not None and bytesWritten != 4:
|
2020-07-16 12:43:29 +03:00
|
|
|
raise RuntimeError(f"wrote only {bytesWritten} bytes but wanted 4")
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def rewriteLastShort(self, value):
|
|
|
|
self.f.seek(-2, os.SEEK_CUR)
|
|
|
|
bytesWritten = self.f.write(struct.pack(self.shortFmt, value))
|
|
|
|
if bytesWritten is not None and bytesWritten != 2:
|
2020-07-16 12:43:29 +03:00
|
|
|
raise RuntimeError(f"wrote only {bytesWritten} bytes but wanted 2")
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def rewriteLastLong(self, value):
|
|
|
|
self.f.seek(-4, os.SEEK_CUR)
|
|
|
|
bytesWritten = self.f.write(struct.pack(self.longFmt, value))
|
|
|
|
if bytesWritten is not None and bytesWritten != 4:
|
2020-07-16 12:43:29 +03:00
|
|
|
raise RuntimeError(f"wrote only {bytesWritten} bytes but wanted 4")
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def writeShort(self, value):
|
|
|
|
bytesWritten = self.f.write(struct.pack(self.shortFmt, value))
|
|
|
|
if bytesWritten is not None and bytesWritten != 2:
|
2020-07-16 12:43:29 +03:00
|
|
|
raise RuntimeError(f"wrote only {bytesWritten} bytes but wanted 2")
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def writeLong(self, value):
|
|
|
|
bytesWritten = self.f.write(struct.pack(self.longFmt, value))
|
|
|
|
if bytesWritten is not None and bytesWritten != 4:
|
2020-07-16 12:43:29 +03:00
|
|
|
raise RuntimeError(f"wrote only {bytesWritten} bytes but wanted 4")
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.finalize()
|
|
|
|
self.f.close()
|
|
|
|
|
|
|
|
def fixIFD(self):
|
|
|
|
numTags = self.readShort()
|
|
|
|
|
|
|
|
for i in range(numTags):
|
2019-03-21 16:28:20 +03:00
|
|
|
tag, fieldType, count = struct.unpack(self.tagFormat, self.f.read(8))
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
fieldSize = self.fieldSizes[fieldType]
|
|
|
|
totalSize = fieldSize * count
|
2019-03-21 16:28:20 +03:00
|
|
|
isLocal = totalSize <= 4
|
2016-09-29 04:16:04 +03:00
|
|
|
if not isLocal:
|
|
|
|
offset = self.readLong()
|
|
|
|
offset += self.offsetOfNewPage
|
|
|
|
self.rewriteLastLong(offset)
|
|
|
|
|
|
|
|
if tag in self.Tags:
|
|
|
|
curPos = self.f.tell()
|
|
|
|
|
|
|
|
if isLocal:
|
2019-03-21 16:28:20 +03:00
|
|
|
self.fixOffsets(
|
|
|
|
count, isShort=(fieldSize == 2), isLong=(fieldSize == 4)
|
|
|
|
)
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f.seek(curPos + 4)
|
|
|
|
else:
|
|
|
|
self.f.seek(offset)
|
2019-03-21 16:28:20 +03:00
|
|
|
self.fixOffsets(
|
|
|
|
count, isShort=(fieldSize == 2), isLong=(fieldSize == 4)
|
|
|
|
)
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f.seek(curPos)
|
|
|
|
|
|
|
|
offset = curPos = None
|
|
|
|
|
|
|
|
elif isLocal:
|
|
|
|
# skip the locally stored value that is not an offset
|
|
|
|
self.f.seek(4, os.SEEK_CUR)
|
|
|
|
|
|
|
|
def fixOffsets(self, count, isShort=False, isLong=False):
|
|
|
|
if not isShort and not isLong:
|
|
|
|
raise RuntimeError("offset is neither short nor long")
|
|
|
|
|
|
|
|
for i in range(count):
|
|
|
|
offset = self.readShort() if isShort else self.readLong()
|
|
|
|
offset += self.offsetOfNewPage
|
|
|
|
if isShort and offset >= 65536:
|
|
|
|
# offset is now too large - we must convert shorts to longs
|
|
|
|
if count != 1:
|
|
|
|
raise RuntimeError("not implemented") # XXX TODO
|
|
|
|
|
|
|
|
# simple case - the offset is just one and therefore it is
|
|
|
|
# local (not referenced with another offset)
|
|
|
|
self.rewriteLastShortToLong(offset)
|
|
|
|
self.f.seek(-10, os.SEEK_CUR)
|
2018-12-29 08:14:29 +03:00
|
|
|
self.writeShort(TiffTags.LONG) # rewrite the type to LONG
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f.seek(8, os.SEEK_CUR)
|
|
|
|
elif isShort:
|
|
|
|
self.rewriteLastShort(offset)
|
|
|
|
else:
|
|
|
|
self.rewriteLastLong(offset)
|
|
|
|
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2016-09-29 04:16:04 +03:00
|
|
|
def _save_all(im, fp, filename):
|
Allow to save tiff stacks from separate images
This is a quick solution that will allow to save tiff stacks from
separate images, e.g. from Numpy arrays.
Previously, tiff stacks could be saved only from multiframe images.
This behavior is similar to what is possible now with GIFs.
Note however, that for correct results, all the appended images should
have the same encoder{info,config} properties.
Example:
import numpy as np
from PIL import Image
a = np.ones((100,100,100), dtype=np.uint8)
imlist = []
for m in a:
imlist.append(Image.fromarray(m))
imlist[0].save("test.tif", compression="tiff_deflate", save_all=True,
append_images=imlist[1:])
(Should result in a 100-frame, 100x100 tiff stack.)
Signed-off-by: Leonid Bloch <leonid.bloch@esrf.fr>
2017-02-16 03:54:43 +03:00
|
|
|
encoderinfo = im.encoderinfo.copy()
|
|
|
|
encoderconfig = im.encoderconfig
|
2017-11-06 12:11:29 +03:00
|
|
|
append_images = list(encoderinfo.get("append_images", []))
|
2017-11-04 02:46:15 +03:00
|
|
|
if not hasattr(im, "n_frames") and not append_images:
|
2016-09-29 04:16:04 +03:00
|
|
|
return _save(im, fp, filename)
|
|
|
|
|
|
|
|
cur_idx = im.tell()
|
|
|
|
try:
|
|
|
|
with AppendingTiffWriter(fp) as tf:
|
2019-03-21 16:28:20 +03:00
|
|
|
for ims in [im] + append_images:
|
Allow to save tiff stacks from separate images
This is a quick solution that will allow to save tiff stacks from
separate images, e.g. from Numpy arrays.
Previously, tiff stacks could be saved only from multiframe images.
This behavior is similar to what is possible now with GIFs.
Note however, that for correct results, all the appended images should
have the same encoder{info,config} properties.
Example:
import numpy as np
from PIL import Image
a = np.ones((100,100,100), dtype=np.uint8)
imlist = []
for m in a:
imlist.append(Image.fromarray(m))
imlist[0].save("test.tif", compression="tiff_deflate", save_all=True,
append_images=imlist[1:])
(Should result in a 100-frame, 100x100 tiff stack.)
Signed-off-by: Leonid Bloch <leonid.bloch@esrf.fr>
2017-02-16 03:54:43 +03:00
|
|
|
ims.encoderinfo = encoderinfo
|
|
|
|
ims.encoderconfig = encoderconfig
|
|
|
|
if not hasattr(ims, "n_frames"):
|
|
|
|
nfr = 1
|
|
|
|
else:
|
|
|
|
nfr = ims.n_frames
|
|
|
|
|
|
|
|
for idx in range(nfr):
|
|
|
|
ims.seek(idx)
|
|
|
|
ims.load()
|
|
|
|
_save(ims, tf, filename)
|
|
|
|
tf.newFrame()
|
2016-09-29 04:16:04 +03:00
|
|
|
finally:
|
|
|
|
im.seek(cur_idx)
|
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# Register
|
|
|
|
|
2015-07-04 16:29:58 +03:00
|
|
|
Image.register_open(TiffImageFile.format, TiffImageFile, _accept)
|
|
|
|
Image.register_save(TiffImageFile.format, _save)
|
2016-09-29 04:16:04 +03:00
|
|
|
Image.register_save_all(TiffImageFile.format, _save_all)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2016-04-25 07:59:02 +03:00
|
|
|
Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"])
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-07-04 16:29:58 +03:00
|
|
|
Image.register_mime(TiffImageFile.format, "image/tiff")
|