2010-07-31 06:52:47 +04:00
|
|
|
#
|
|
|
|
# The Python Imaging Library.
|
|
|
|
# $Id$
|
|
|
|
#
|
|
|
|
# TIFF file handling
|
|
|
|
#
|
|
|
|
# TIFF is a flexible, if somewhat aged, image file format originally
|
|
|
|
# defined by Aldus. Although TIFF supports a wide variety of pixel
|
|
|
|
# layouts and compression methods, the name doesn't really stand for
|
|
|
|
# "thousands of incompatible file formats," it just feels that way.
|
|
|
|
#
|
|
|
|
# To read TIFF data from a stream, the stream must be seekable. For
|
|
|
|
# progressive decoding, make sure to use TIFF files where the tag
|
|
|
|
# directory is placed first in the file.
|
|
|
|
#
|
|
|
|
# History:
|
|
|
|
# 1995-09-01 fl Created
|
|
|
|
# 1996-05-04 fl Handle JPEGTABLES tag
|
|
|
|
# 1996-05-18 fl Fixed COLORMAP support
|
|
|
|
# 1997-01-05 fl Fixed PREDICTOR support
|
|
|
|
# 1997-08-27 fl Added support for rational tags (from Perry Stoll)
|
|
|
|
# 1998-01-10 fl Fixed seek/tell (from Jan Blom)
|
|
|
|
# 1998-07-15 fl Use private names for internal variables
|
|
|
|
# 1999-06-13 fl Rewritten for PIL 1.0 (1.0)
|
|
|
|
# 2000-10-11 fl Additional fixes for Python 2.0 (1.1)
|
|
|
|
# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2)
|
|
|
|
# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3)
|
|
|
|
# 2001-12-18 fl Added workaround for broken Matrox library
|
|
|
|
# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart)
|
|
|
|
# 2003-05-19 fl Check FILLORDER tag
|
|
|
|
# 2003-09-26 fl Added RGBa support
|
|
|
|
# 2004-02-24 fl Added DPI support; fixed rational write support
|
|
|
|
# 2005-02-07 fl Added workaround for broken Corel Draw 10 files
|
|
|
|
# 2006-01-09 fl Added support for float/double tags (from Russell Nelson)
|
|
|
|
#
|
|
|
|
# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved.
|
|
|
|
# Copyright (c) 1995-1997 by Fredrik Lundh
|
|
|
|
#
|
|
|
|
# See the README file for information on usage and redistribution.
|
|
|
|
#
|
2023-12-21 14:13:31 +03:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
import io
|
2012-10-16 01:19:55 +04:00
|
|
|
import itertools
|
2020-04-13 00:16:46 +03:00
|
|
|
import logging
|
2022-02-10 04:00:23 +03:00
|
|
|
import math
|
2013-03-09 07:51:59 +04:00
|
|
|
import os
|
2014-12-29 18:48:01 +03:00
|
|
|
import struct
|
|
|
|
import warnings
|
2024-07-29 16:46:07 +03:00
|
|
|
from collections.abc import Iterator, MutableMapping
|
2019-07-06 23:40:53 +03:00
|
|
|
from fractions import Fraction
|
|
|
|
from numbers import Number, Rational
|
2024-07-29 16:46:07 +03:00
|
|
|
from typing import IO, TYPE_CHECKING, Any, Callable, NoReturn, cast
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2023-04-16 07:04:39 +03:00
|
|
|
from . import ExifTags, Image, ImageFile, ImageOps, ImagePalette, TiffTags
|
2022-02-10 04:00:23 +03:00
|
|
|
from ._binary import i16be as i16
|
|
|
|
from ._binary import i32be as i32
|
2020-05-08 19:48:02 +03:00
|
|
|
from ._binary import o8
|
2024-04-22 11:26:20 +03:00
|
|
|
from ._deprecate import deprecate
|
2024-07-29 16:46:07 +03:00
|
|
|
from ._typing import StrOrBytesPath
|
|
|
|
from ._util import is_path
|
2016-02-05 01:57:13 +03:00
|
|
|
from .TiffTags import TYPES
|
2015-12-30 01:02:11 +03:00
|
|
|
|
2024-08-31 11:48:16 +03:00
|
|
|
if TYPE_CHECKING:
|
2024-09-08 01:30:30 +03:00
|
|
|
from ._typing import Buffer, IntegralLike
|
2024-08-31 11:48:16 +03:00
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
logger = logging.getLogger(__name__)
|
2015-08-25 15:27:18 +03:00
|
|
|
|
2014-06-03 14:02:44 +04:00
|
|
|
# Set these to true to force use of libtiff for reading or writing.
|
2013-11-22 08:33:16 +04:00
|
|
|
READ_LIBTIFF = False
|
2014-07-28 20:00:06 +04:00
|
|
|
WRITE_LIBTIFF = False
|
2021-10-01 14:50:02 +03:00
|
|
|
STRIP_SIZE = 65536
|
2013-11-22 08:33:16 +04:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
II = b"II" # little-endian (Intel style)
|
|
|
|
MM = b"MM" # big-endian (Motorola style)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# Read TIFF files
|
|
|
|
|
|
|
|
# a few tag names, just to make the code below a bit more readable
|
2024-03-22 15:43:55 +03:00
|
|
|
OSUBFILETYPE = 255
|
2010-07-31 06:52:47 +04:00
|
|
|
IMAGEWIDTH = 256
|
|
|
|
IMAGELENGTH = 257
|
|
|
|
BITSPERSAMPLE = 258
|
|
|
|
COMPRESSION = 259
|
|
|
|
PHOTOMETRIC_INTERPRETATION = 262
|
|
|
|
FILLORDER = 266
|
|
|
|
IMAGEDESCRIPTION = 270
|
|
|
|
STRIPOFFSETS = 273
|
|
|
|
SAMPLESPERPIXEL = 277
|
|
|
|
ROWSPERSTRIP = 278
|
|
|
|
STRIPBYTECOUNTS = 279
|
|
|
|
X_RESOLUTION = 282
|
|
|
|
Y_RESOLUTION = 283
|
|
|
|
PLANAR_CONFIGURATION = 284
|
|
|
|
RESOLUTION_UNIT = 296
|
2019-05-21 14:18:09 +03:00
|
|
|
TRANSFERFUNCTION = 301
|
2010-07-31 06:52:47 +04:00
|
|
|
SOFTWARE = 305
|
|
|
|
DATE_TIME = 306
|
|
|
|
ARTIST = 315
|
|
|
|
PREDICTOR = 317
|
|
|
|
COLORMAP = 320
|
2021-11-18 14:01:53 +03:00
|
|
|
TILEWIDTH = 322
|
|
|
|
TILELENGTH = 323
|
2010-07-31 06:52:47 +04:00
|
|
|
TILEOFFSETS = 324
|
2021-11-18 14:01:53 +03:00
|
|
|
TILEBYTECOUNTS = 325
|
2020-12-22 03:38:02 +03:00
|
|
|
SUBIFD = 330
|
2010-07-31 06:52:47 +04:00
|
|
|
EXTRASAMPLES = 338
|
|
|
|
SAMPLEFORMAT = 339
|
|
|
|
JPEGTABLES = 347
|
2021-07-09 18:20:36 +03:00
|
|
|
YCBCRSUBSAMPLING = 530
|
2019-05-21 14:18:09 +03:00
|
|
|
REFERENCEBLACKWHITE = 532
|
2010-07-31 06:52:47 +04:00
|
|
|
COPYRIGHT = 33432
|
2014-07-28 20:00:06 +04:00
|
|
|
IPTC_NAA_CHUNK = 33723 # newsphoto properties
|
|
|
|
PHOTOSHOP_CHUNK = 34377 # photoshop properties
|
2010-07-31 06:52:47 +04:00
|
|
|
ICCPROFILE = 34675
|
|
|
|
EXIFIFD = 34665
|
|
|
|
XMP = 700
|
2019-06-04 14:30:13 +03:00
|
|
|
JPEGQUALITY = 65537 # pseudo-tag by libtiff
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-28 14:53:25 +03:00
|
|
|
# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java
|
2013-09-27 21:41:27 +04:00
|
|
|
IMAGEJ_META_DATA_BYTE_COUNTS = 50838
|
|
|
|
IMAGEJ_META_DATA = 50839
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
COMPRESSION_INFO = {
|
|
|
|
# Compression => pil compression name
|
|
|
|
1: "raw",
|
|
|
|
2: "tiff_ccitt",
|
|
|
|
3: "group3",
|
|
|
|
4: "group4",
|
|
|
|
5: "tiff_lzw",
|
2014-07-28 20:00:06 +04:00
|
|
|
6: "tiff_jpeg", # obsolete
|
2010-07-31 06:52:47 +04:00
|
|
|
7: "jpeg",
|
2013-07-01 18:45:42 +04:00
|
|
|
8: "tiff_adobe_deflate",
|
2014-07-28 20:00:06 +04:00
|
|
|
32771: "tiff_raw_16", # 16-bit padding
|
2013-07-01 18:45:42 +04:00
|
|
|
32773: "packbits",
|
|
|
|
32809: "tiff_thunderscan",
|
|
|
|
32946: "tiff_deflate",
|
|
|
|
34676: "tiff_sgilog",
|
|
|
|
34677: "tiff_sgilog24",
|
2019-01-07 06:49:00 +03:00
|
|
|
34925: "lzma",
|
|
|
|
50000: "zstd",
|
|
|
|
50001: "webp",
|
2010-07-31 06:52:47 +04:00
|
|
|
}
|
|
|
|
|
2016-11-07 15:33:46 +03:00
|
|
|
COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()}
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
OPEN_INFO = {
|
|
|
|
# (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample,
|
|
|
|
# ExtraSamples) => mode, rawmode
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 0, (1,), 1, (1,), ()): ("1", "1;I"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 0, (1,), 1, (1,), ()): ("1", "1;I"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (1,), ()): ("1", "1"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 1, (1,), ()): ("1", "1"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 2, (1,), ()): ("1", "1;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 2, (1,), ()): ("1", "1;R"),
|
2016-03-29 09:21:42 +03:00
|
|
|
(II, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
|
|
|
|
(MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
|
|
|
|
(II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
|
|
|
|
(MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
|
|
|
|
(II, 1, (1,), 1, (2,), ()): ("L", "L;2"),
|
|
|
|
(MM, 1, (1,), 1, (2,), ()): ("L", "L;2"),
|
|
|
|
(II, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
|
|
|
|
(MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
|
|
|
|
(II, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
|
|
|
|
(MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
|
|
|
|
(II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
|
|
|
|
(MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 1, (1,), 1, (4,), ()): ("L", "L;4"),
|
2016-03-29 09:21:42 +03:00
|
|
|
(MM, 1, (1,), 1, (4,), ()): ("L", "L;4"),
|
|
|
|
(II, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
|
|
|
|
(MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 0, (1,), 1, (8,), ()): ("L", "L;I"),
|
|
|
|
(MM, 0, (1,), 1, (8,), ()): ("L", "L;I"),
|
|
|
|
(II, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
|
|
|
|
(MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (8,), ()): ("L", "L"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 1, (8,), ()): ("L", "L"),
|
2023-05-04 00:54:30 +03:00
|
|
|
(II, 1, (2,), 1, (8,), ()): ("L", "L"),
|
|
|
|
(MM, 1, (2,), 1, (8,), ()): ("L", "L"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 2, (8,), ()): ("L", "L;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 2, (8,), ()): ("L", "L;R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"),
|
2022-10-05 13:15:45 +03:00
|
|
|
(II, 0, (1,), 1, (16,), ()): ("I;16", "I;16"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"),
|
2022-03-20 05:34:48 +03:00
|
|
|
(II, 1, (1,), 2, (16,), ()): ("I;16", "I;16R"),
|
2017-09-20 12:26:14 +03:00
|
|
|
(II, 1, (2,), 1, (16,), ()): ("I", "I;16S"),
|
|
|
|
(MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 0, (3,), 1, (32,), ()): ("F", "F;32F"),
|
|
|
|
(MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (32,), ()): ("I", "I;32N"),
|
|
|
|
(II, 1, (2,), 1, (32,), ()): ("I", "I;32S"),
|
2017-09-20 12:26:14 +03:00
|
|
|
(MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 1, (3,), 1, (32,), ()): ("F", "F;32F"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
|
|
|
|
(MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
|
2015-06-17 00:16:56 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
|
2024-04-22 01:05:59 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGB", "RGBX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGB", "RGBX"),
|
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGB", "RGBXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGB", "RGBXX"),
|
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGB", "RGBXXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGB", "RGBXXX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
|
2018-09-05 17:45:03 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
|
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
|
2018-09-05 17:36:27 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
|
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
|
2017-08-21 16:28:29 +03:00
|
|
|
(II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"),
|
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"),
|
2024-04-22 01:05:59 +03:00
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGB", "RGBX;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGB", "RGBX;16B"),
|
2017-08-21 16:28:29 +03:00
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"),
|
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (1,), ()): ("P", "P;1"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (1,), ()): ("P", "P;1"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (2,), ()): ("P", "P;2"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (2,), ()): ("P", "P;2"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (4,), ()): ("P", "P;4"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (4,), ()): ("P", "P;4"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (8,), ()): ("P", "P"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (8,), ()): ("P", "P"),
|
2024-04-22 01:11:45 +03:00
|
|
|
(II, 3, (1,), 1, (8, 8), (0,)): ("P", "PX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (8,), ()): ("P", "P;R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (8,), ()): ("P", "P;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
|
2018-01-06 17:55:29 +03:00
|
|
|
(II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
|
|
|
|
(MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
|
|
|
|
(II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
|
|
|
|
(MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
|
2019-04-30 17:42:30 +03:00
|
|
|
(II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"),
|
2024-08-11 14:14:29 +03:00
|
|
|
(MM, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16B"),
|
2023-09-21 13:53:23 +03:00
|
|
|
(II, 6, (1,), 1, (8,), ()): ("L", "L"),
|
|
|
|
(MM, 6, (1,), 1, (8,), ()): ("L", "L"),
|
2018-12-09 08:04:34 +03:00
|
|
|
# JPEG compressed images handled by LibTiff and auto-converted to RGBX
|
2018-09-11 19:26:25 +03:00
|
|
|
# Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel
|
2018-12-09 08:04:34 +03:00
|
|
|
(II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
|
|
|
|
(MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
|
2010-07-31 06:52:47 +04:00
|
|
|
}
|
|
|
|
|
2023-01-07 21:36:17 +03:00
|
|
|
MAX_SAMPLESPERPIXEL = max(len(key_tp[4]) for key_tp in OPEN_INFO)
|
2022-10-28 15:11:25 +03:00
|
|
|
|
2017-08-21 17:09:35 +03:00
|
|
|
PREFIXES = [
|
|
|
|
b"MM\x00\x2A", # Valid TIFF header with big-endian byte order
|
|
|
|
b"II\x2A\x00", # Valid TIFF header with little-endian byte order
|
2017-08-21 17:14:33 +03:00
|
|
|
b"MM\x2A\x00", # Invalid TIFF header, assume big-endian
|
|
|
|
b"II\x00\x2A", # Invalid TIFF header, assume little-endian
|
2022-03-01 01:23:12 +03:00
|
|
|
b"MM\x00\x2B", # BigTIFF with big-endian byte order
|
|
|
|
b"II\x2B\x00", # BigTIFF with little-endian byte order
|
2017-08-21 17:09:35 +03:00
|
|
|
]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-04-22 11:26:20 +03:00
|
|
|
if not getattr(Image.core, "libtiff_support_custom_tags", True):
|
2024-04-22 12:15:38 +03:00
|
|
|
deprecate("Support for LibTIFF earlier than version 4", 12)
|
2024-04-22 11:26:20 +03:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2024-04-06 05:58:53 +03:00
|
|
|
def _accept(prefix: bytes) -> bool:
|
2010-07-31 06:52:47 +04:00
|
|
|
return prefix[:4] in PREFIXES
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2024-07-30 13:20:09 +03:00
|
|
|
def _limit_rational(
|
|
|
|
val: float | Fraction | IFDRational, max_val: int
|
2024-08-31 11:48:16 +03:00
|
|
|
) -> tuple[IntegralLike, IntegralLike]:
|
2024-10-17 02:58:48 +03:00
|
|
|
inv = abs(val) > 1
|
2015-10-25 15:49:45 +03:00
|
|
|
n_d = IFDRational(1 / val if inv else val).limit_rational(max_val)
|
2014-12-29 18:48:01 +03:00
|
|
|
return n_d[::-1] if inv else n_d
|
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2024-08-31 11:48:16 +03:00
|
|
|
def _limit_signed_rational(
|
|
|
|
val: IFDRational, max_val: int, min_val: int
|
|
|
|
) -> tuple[IntegralLike, IntegralLike]:
|
2019-12-31 11:12:33 +03:00
|
|
|
frac = Fraction(val)
|
2024-08-31 11:48:16 +03:00
|
|
|
n_d: tuple[IntegralLike, IntegralLike] = frac.numerator, frac.denominator
|
2019-07-20 00:12:16 +03:00
|
|
|
|
2024-08-31 11:48:16 +03:00
|
|
|
if min(float(i) for i in n_d) < min_val:
|
2019-12-31 11:12:33 +03:00
|
|
|
n_d = _limit_rational(val, abs(min_val))
|
2019-07-20 00:12:16 +03:00
|
|
|
|
2024-08-31 11:48:16 +03:00
|
|
|
n_d_float = tuple(float(i) for i in n_d)
|
|
|
|
if max(n_d_float) > max_val:
|
|
|
|
n_d = _limit_rational(n_d_float[0] / n_d_float[1], max_val)
|
2019-07-20 00:12:16 +03:00
|
|
|
|
2019-12-31 11:12:33 +03:00
|
|
|
return n_d
|
2019-07-20 00:12:16 +03:00
|
|
|
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
##
|
|
|
|
# Wrapper for TIFF IFDs.
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
_load_dispatch = {}
|
|
|
|
_write_dispatch = {}
|
|
|
|
|
2016-02-05 01:57:13 +03:00
|
|
|
|
2024-08-31 11:48:16 +03:00
|
|
|
def _delegate(op: str) -> Any:
|
|
|
|
def delegate(
|
|
|
|
self: IFDRational, *args: tuple[float, ...]
|
|
|
|
) -> bool | float | Fraction:
|
2024-02-10 11:50:45 +03:00
|
|
|
return getattr(self._val, op)(*args)
|
|
|
|
|
|
|
|
return delegate
|
|
|
|
|
|
|
|
|
2015-11-18 19:51:57 +03:00
|
|
|
class IFDRational(Rational):
|
2020-09-01 20:16:46 +03:00
|
|
|
"""Implements a rational class where 0/0 is a legal value to match
|
2015-10-25 15:49:45 +03:00
|
|
|
the in the wild use of exif rationals.
|
|
|
|
|
|
|
|
e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used
|
|
|
|
"""
|
|
|
|
|
|
|
|
""" If the denominator is 0, store this as a float('nan'), otherwise store
|
|
|
|
as a fractions.Fraction(). Delegate as appropriate
|
|
|
|
|
|
|
|
"""
|
2016-03-27 14:18:39 +03:00
|
|
|
|
|
|
|
__slots__ = ("_numerator", "_denominator", "_val")
|
2015-10-25 15:49:45 +03:00
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
def __init__(
|
|
|
|
self, value: float | Fraction | IFDRational, denominator: int = 1
|
|
|
|
) -> None:
|
2015-10-25 15:49:45 +03:00
|
|
|
"""
|
|
|
|
:param value: either an integer numerator, a
|
|
|
|
float/rational/other number, or an IFDRational
|
|
|
|
:param denominator: Optional integer denominator
|
|
|
|
"""
|
2024-07-12 14:16:56 +03:00
|
|
|
self._val: Fraction | float
|
2016-10-31 03:43:32 +03:00
|
|
|
if isinstance(value, IFDRational):
|
2015-11-18 20:00:15 +03:00
|
|
|
self._numerator = value.numerator
|
2019-09-30 14:18:52 +03:00
|
|
|
self._denominator = value.denominator
|
2015-10-25 15:49:45 +03:00
|
|
|
self._val = value._val
|
|
|
|
return
|
|
|
|
|
2019-09-30 14:18:52 +03:00
|
|
|
if isinstance(value, Fraction):
|
|
|
|
self._numerator = value.numerator
|
|
|
|
self._denominator = value.denominator
|
|
|
|
else:
|
2024-08-31 11:48:16 +03:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
self._numerator = cast(IntegralLike, value)
|
|
|
|
else:
|
|
|
|
self._numerator = value
|
2019-09-30 14:18:52 +03:00
|
|
|
self._denominator = denominator
|
|
|
|
|
2015-10-25 15:49:45 +03:00
|
|
|
if denominator == 0:
|
|
|
|
self._val = float("nan")
|
2015-11-18 19:51:57 +03:00
|
|
|
elif denominator == 1:
|
2016-11-07 15:33:46 +03:00
|
|
|
self._val = Fraction(value)
|
2024-07-29 16:46:07 +03:00
|
|
|
elif int(value) == value:
|
|
|
|
self._val = Fraction(int(value), denominator)
|
2015-11-18 19:51:57 +03:00
|
|
|
else:
|
2024-07-29 16:46:07 +03:00
|
|
|
self._val = Fraction(value / denominator)
|
2015-11-18 19:51:57 +03:00
|
|
|
|
2015-11-18 20:00:15 +03:00
|
|
|
@property
|
2024-08-31 11:48:16 +03:00
|
|
|
def numerator(self) -> IntegralLike:
|
2022-04-10 20:08:22 +03:00
|
|
|
return self._numerator
|
2015-11-18 20:00:15 +03:00
|
|
|
|
|
|
|
@property
|
2024-07-29 16:46:07 +03:00
|
|
|
def denominator(self) -> int:
|
2022-04-10 20:08:22 +03:00
|
|
|
return self._denominator
|
2015-11-18 20:00:15 +03:00
|
|
|
|
2024-08-31 11:48:16 +03:00
|
|
|
def limit_rational(self, max_denominator: int) -> tuple[IntegralLike, int]:
|
2015-10-25 15:49:45 +03:00
|
|
|
"""
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2015-10-25 15:49:45 +03:00
|
|
|
:param max_denominator: Integer, the maximum denominator value
|
|
|
|
:returns: Tuple of (numerator, denominator)
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.denominator == 0:
|
2022-04-10 19:25:40 +03:00
|
|
|
return self.numerator, self.denominator
|
2015-10-25 15:49:45 +03:00
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
assert isinstance(self._val, Fraction)
|
2015-10-25 15:49:45 +03:00
|
|
|
f = self._val.limit_denominator(max_denominator)
|
2022-04-10 19:25:40 +03:00
|
|
|
return f.numerator, f.denominator
|
2015-10-25 15:49:45 +03:00
|
|
|
|
2024-05-13 11:47:51 +03:00
|
|
|
def __repr__(self) -> str:
|
2015-10-25 15:49:45 +03:00
|
|
|
return str(float(self._val))
|
2015-10-25 17:17:50 +03:00
|
|
|
|
2024-06-12 14:15:55 +03:00
|
|
|
def __hash__(self) -> int:
|
2015-10-25 17:49:52 +03:00
|
|
|
return self._val.__hash__()
|
|
|
|
|
2024-06-10 07:15:28 +03:00
|
|
|
def __eq__(self, other: object) -> bool:
|
2021-04-17 08:39:42 +03:00
|
|
|
val = self._val
|
2020-09-01 20:16:46 +03:00
|
|
|
if isinstance(other, IFDRational):
|
|
|
|
other = other._val
|
2021-04-17 08:39:42 +03:00
|
|
|
if isinstance(other, float):
|
|
|
|
val = float(val)
|
|
|
|
return val == other
|
2015-11-18 19:51:57 +03:00
|
|
|
|
2024-08-31 11:48:16 +03:00
|
|
|
def __getstate__(self) -> list[float | Fraction | IntegralLike]:
|
2021-04-27 10:54:44 +03:00
|
|
|
return [self._val, self._numerator, self._denominator]
|
|
|
|
|
2024-08-31 11:48:16 +03:00
|
|
|
def __setstate__(self, state: list[float | Fraction | IntegralLike]) -> None:
|
2021-04-27 10:54:44 +03:00
|
|
|
IFDRational.__init__(self, 0)
|
|
|
|
_val, _numerator, _denominator = state
|
2024-08-31 11:48:16 +03:00
|
|
|
assert isinstance(_val, (float, Fraction))
|
2021-04-27 10:54:44 +03:00
|
|
|
self._val = _val
|
2024-08-31 11:48:16 +03:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
self._numerator = cast(IntegralLike, _numerator)
|
|
|
|
else:
|
|
|
|
self._numerator = _numerator
|
2024-07-29 16:46:07 +03:00
|
|
|
assert isinstance(_denominator, int)
|
2021-04-27 10:54:44 +03:00
|
|
|
self._denominator = _denominator
|
|
|
|
|
2019-10-07 12:09:16 +03:00
|
|
|
""" a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul',
|
|
|
|
'truediv', 'rtruediv', 'floordiv', 'rfloordiv',
|
|
|
|
'mod','rmod', 'pow','rpow', 'pos', 'neg',
|
|
|
|
'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool',
|
2015-12-28 00:04:23 +03:00
|
|
|
'ceil', 'floor', 'round']
|
2016-11-19 03:19:43 +03:00
|
|
|
print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a))
|
2015-11-18 19:51:57 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
__add__ = _delegate("__add__")
|
|
|
|
__radd__ = _delegate("__radd__")
|
|
|
|
__sub__ = _delegate("__sub__")
|
|
|
|
__rsub__ = _delegate("__rsub__")
|
|
|
|
__mul__ = _delegate("__mul__")
|
|
|
|
__rmul__ = _delegate("__rmul__")
|
|
|
|
__truediv__ = _delegate("__truediv__")
|
|
|
|
__rtruediv__ = _delegate("__rtruediv__")
|
|
|
|
__floordiv__ = _delegate("__floordiv__")
|
|
|
|
__rfloordiv__ = _delegate("__rfloordiv__")
|
|
|
|
__mod__ = _delegate("__mod__")
|
|
|
|
__rmod__ = _delegate("__rmod__")
|
|
|
|
__pow__ = _delegate("__pow__")
|
|
|
|
__rpow__ = _delegate("__rpow__")
|
|
|
|
__pos__ = _delegate("__pos__")
|
|
|
|
__neg__ = _delegate("__neg__")
|
|
|
|
__abs__ = _delegate("__abs__")
|
|
|
|
__trunc__ = _delegate("__trunc__")
|
|
|
|
__lt__ = _delegate("__lt__")
|
|
|
|
__gt__ = _delegate("__gt__")
|
|
|
|
__le__ = _delegate("__le__")
|
|
|
|
__ge__ = _delegate("__ge__")
|
2019-10-07 12:09:16 +03:00
|
|
|
__bool__ = _delegate("__bool__")
|
2015-12-28 00:04:23 +03:00
|
|
|
__ceil__ = _delegate("__ceil__")
|
|
|
|
__floor__ = _delegate("__floor__")
|
|
|
|
__round__ = _delegate("__round__")
|
2023-03-09 14:21:37 +03:00
|
|
|
# Python >= 3.11
|
|
|
|
if hasattr(Fraction, "__int__"):
|
|
|
|
__int__ = _delegate("__int__")
|
2015-10-25 17:49:52 +03:00
|
|
|
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2024-08-15 01:08:43 +03:00
|
|
|
_LoaderFunc = Callable[["ImageFileDirectory_v2", bytes, bool], Any]
|
|
|
|
|
|
|
|
|
|
|
|
def _register_loader(idx: int, size: int) -> Callable[[_LoaderFunc], _LoaderFunc]:
|
|
|
|
def decorator(func: _LoaderFunc) -> _LoaderFunc:
|
2024-02-10 11:50:45 +03:00
|
|
|
from .TiffTags import TYPES
|
|
|
|
|
|
|
|
if func.__name__.startswith("load_"):
|
|
|
|
TYPES[idx] = func.__name__[5:].replace("_", " ")
|
|
|
|
_load_dispatch[idx] = size, func # noqa: F821
|
|
|
|
return func
|
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
2024-08-31 11:48:16 +03:00
|
|
|
def _register_writer(idx: int) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
|
|
def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
|
2024-02-10 11:50:45 +03:00
|
|
|
_write_dispatch[idx] = func # noqa: F821
|
|
|
|
return func
|
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
2024-07-20 06:14:18 +03:00
|
|
|
def _register_basic(idx_fmt_name: tuple[int, str, str]) -> None:
|
2024-02-10 11:50:45 +03:00
|
|
|
from .TiffTags import TYPES
|
|
|
|
|
|
|
|
idx, fmt, name = idx_fmt_name
|
|
|
|
TYPES[idx] = name
|
2024-05-04 19:21:49 +03:00
|
|
|
size = struct.calcsize(f"={fmt}")
|
2024-08-15 01:08:43 +03:00
|
|
|
|
|
|
|
def basic_handler(
|
|
|
|
self: ImageFileDirectory_v2, data: bytes, legacy_api: bool = True
|
|
|
|
) -> tuple[Any, ...]:
|
|
|
|
return self._unpack(f"{len(data) // size}{fmt}", data)
|
|
|
|
|
|
|
|
_load_dispatch[idx] = size, basic_handler # noqa: F821
|
2024-02-10 11:50:45 +03:00
|
|
|
_write_dispatch[idx] = lambda self, *values: ( # noqa: F821
|
|
|
|
b"".join(self._pack(fmt, value) for value in values)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if TYPE_CHECKING:
|
2024-02-10 13:57:59 +03:00
|
|
|
_IFDv2Base = MutableMapping[int, Any]
|
2024-02-10 11:50:45 +03:00
|
|
|
else:
|
|
|
|
_IFDv2Base = MutableMapping
|
|
|
|
|
|
|
|
|
|
|
|
class ImageFileDirectory_v2(_IFDv2Base):
|
2014-12-29 18:48:01 +03:00
|
|
|
"""This class represents a TIFF tag directory. To speed things up, we
|
|
|
|
don't decode tags unless they're asked for.
|
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Exposes a dictionary interface of the tags in the directory::
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
ifd = ImageFileDirectory_v2()
|
|
|
|
ifd[key] = 'Some Data'
|
2019-02-19 11:45:53 +03:00
|
|
|
ifd.tagtype[key] = TiffTags.ASCII
|
2015-09-14 17:01:57 +03:00
|
|
|
print(ifd[key])
|
|
|
|
'Some Data'
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Individual values are returned as the strings or numbers, sequences are
|
2015-09-22 13:31:59 +03:00
|
|
|
returned as tuples of the values.
|
2013-10-08 03:59:37 +04:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
The tiff metadata type of each item is stored in a dictionary of
|
|
|
|
tag types in
|
2020-09-01 20:16:46 +03:00
|
|
|
:attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types
|
2015-09-14 17:01:57 +03:00
|
|
|
are read from a tiff file, guessed from the type added, or added
|
|
|
|
manually.
|
2013-10-08 03:59:37 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
Data Structures:
|
2015-09-14 17:01:57 +03:00
|
|
|
|
2021-09-11 12:24:24 +03:00
|
|
|
* ``self.tagtype = {}``
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2021-09-11 12:48:43 +03:00
|
|
|
* Key: numerical TIFF tag number
|
2018-06-24 15:32:25 +03:00
|
|
|
* Value: integer corresponding to the data type from
|
2021-09-11 12:24:24 +03:00
|
|
|
:py:data:`.TiffTags.TYPES`
|
2015-09-14 17:01:57 +03:00
|
|
|
|
2021-09-11 12:24:24 +03:00
|
|
|
.. versionadded:: 3.0.0
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2021-09-11 12:48:43 +03:00
|
|
|
'Internal' data structures:
|
2021-09-11 12:24:24 +03:00
|
|
|
|
|
|
|
* ``self._tags_v2 = {}``
|
|
|
|
|
2021-09-11 12:48:43 +03:00
|
|
|
* Key: numerical TIFF tag number
|
2021-09-11 12:24:24 +03:00
|
|
|
* Value: decoded data, as tuple for multiple values
|
|
|
|
|
|
|
|
* ``self._tagdata = {}``
|
|
|
|
|
2021-09-11 12:48:43 +03:00
|
|
|
* Key: numerical TIFF tag number
|
2021-09-11 12:24:24 +03:00
|
|
|
* Value: undecoded byte string from file
|
|
|
|
|
|
|
|
* ``self._tags_v1 = {}``
|
|
|
|
|
2021-09-11 12:48:43 +03:00
|
|
|
* Key: numerical TIFF tag number
|
2021-09-11 12:24:24 +03:00
|
|
|
* Value: decoded data in the v1 format
|
|
|
|
|
|
|
|
Tags will be found in the private attributes ``self._tagdata``, and in
|
|
|
|
``self._tags_v2`` once decoded.
|
|
|
|
|
|
|
|
``self.legacy_api`` is a value for internal use, and shouldn't be changed
|
|
|
|
from outside code. In cooperation with
|
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`, if ``legacy_api``
|
|
|
|
is true, then decoded tags will be populated into both ``_tags_v1`` and
|
|
|
|
``_tags_v2``. ``_tags_v2`` will be used if this IFD is used in the TIFF
|
|
|
|
save routine. Tags should be read from ``_tags_v1`` if
|
|
|
|
``legacy_api == true``.
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
"""
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-08-15 01:08:43 +03:00
|
|
|
_load_dispatch: dict[int, tuple[int, _LoaderFunc]] = {}
|
2024-02-10 11:50:45 +03:00
|
|
|
_write_dispatch: dict[int, Callable[..., Any]] = {}
|
|
|
|
|
2024-06-12 14:15:55 +03:00
|
|
|
def __init__(
|
|
|
|
self,
|
2024-12-30 17:26:13 +03:00
|
|
|
ifh: bytes = b"II\x2A\x00\x00\x00\x00\x00",
|
2024-06-12 14:15:55 +03:00
|
|
|
prefix: bytes | None = None,
|
|
|
|
group: int | None = None,
|
|
|
|
) -> None:
|
2014-12-30 13:57:45 +03:00
|
|
|
"""Initialize an ImageFileDirectory.
|
|
|
|
|
|
|
|
To construct an ImageFileDirectory from a real file, pass the 8-byte
|
|
|
|
magic header to the constructor. To only set the endianness, pass it
|
|
|
|
as the 'prefix' keyword argument.
|
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
:param ifh: One of the accepted magic headers (cf. PREFIXES); also sets
|
2014-12-30 13:57:45 +03:00
|
|
|
endianness.
|
2015-09-14 17:01:57 +03:00
|
|
|
:param prefix: Override the endianness of the file.
|
2013-10-08 03:59:37 +04:00
|
|
|
"""
|
2022-02-26 09:53:27 +03:00
|
|
|
if not _accept(ifh):
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = f"not a TIFF file (header {repr(ifh)} not valid)"
|
|
|
|
raise SyntaxError(msg)
|
2014-12-30 13:57:45 +03:00
|
|
|
self._prefix = prefix if prefix is not None else ifh[:2]
|
|
|
|
if self._prefix == MM:
|
2014-12-29 18:48:01 +03:00
|
|
|
self._endian = ">"
|
2014-12-30 13:57:45 +03:00
|
|
|
elif self._prefix == II:
|
2014-12-29 18:48:01 +03:00
|
|
|
self._endian = "<"
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "not a TIFF IFD"
|
|
|
|
raise SyntaxError(msg)
|
2022-03-01 01:23:12 +03:00
|
|
|
self._bigtiff = ifh[2] == 43
|
2021-06-24 12:56:01 +03:00
|
|
|
self.group = group
|
2024-06-12 14:15:55 +03:00
|
|
|
self.tagtype: dict[int, int] = {}
|
2020-07-23 15:40:02 +03:00
|
|
|
""" Dictionary of tag types """
|
2010-07-31 06:52:47 +04:00
|
|
|
self.reset()
|
2024-07-26 09:42:28 +03:00
|
|
|
self.next = (
|
|
|
|
self._unpack("Q", ifh[8:])[0]
|
|
|
|
if self._bigtiff
|
|
|
|
else self._unpack("L", ifh[4:])[0]
|
2022-03-01 01:23:12 +03:00
|
|
|
)
|
2015-09-11 20:09:14 +03:00
|
|
|
self._legacy_api = False
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
prefix = property(lambda self: self._prefix)
|
|
|
|
offset = property(lambda self: self._offset)
|
2024-02-10 11:50:45 +03:00
|
|
|
|
|
|
|
@property
|
2024-06-12 14:15:55 +03:00
|
|
|
def legacy_api(self) -> bool:
|
2024-02-10 11:50:45 +03:00
|
|
|
return self._legacy_api
|
2015-03-01 06:44:38 +03:00
|
|
|
|
|
|
|
@legacy_api.setter
|
2024-06-12 14:15:55 +03:00
|
|
|
def legacy_api(self, value: bool) -> NoReturn:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "Not allowing setting of legacy api"
|
|
|
|
raise Exception(msg)
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2024-06-12 14:15:55 +03:00
|
|
|
def reset(self) -> None:
|
|
|
|
self._tags_v1: dict[int, Any] = {} # will remain empty if legacy_api is false
|
|
|
|
self._tags_v2: dict[int, Any] = {} # main tag storage
|
|
|
|
self._tagdata: dict[int, bytes] = {}
|
2015-09-22 13:31:59 +03:00
|
|
|
self.tagtype = {} # added 2008-06-05 by Florian Hoech
|
2014-12-29 18:48:01 +03:00
|
|
|
self._next = None
|
2024-08-21 01:05:02 +03:00
|
|
|
self._offset: int | None = None
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-05-13 11:47:51 +03:00
|
|
|
def __str__(self) -> str:
|
2014-12-29 18:48:01 +03:00
|
|
|
return str(dict(self))
|
2013-03-07 15:59:52 +04:00
|
|
|
|
2024-08-21 01:05:02 +03:00
|
|
|
def named(self) -> dict[str, Any]:
|
2014-07-28 20:00:06 +04:00
|
|
|
"""
|
2015-09-14 17:01:57 +03:00
|
|
|
:returns: dict of name|key: value
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-05-29 07:59:54 +03:00
|
|
|
Returns the complete tag dictionary, with named tags where possible.
|
2014-07-28 20:00:06 +04:00
|
|
|
"""
|
2021-06-24 12:56:01 +03:00
|
|
|
return {
|
|
|
|
TiffTags.lookup(code, self.group).name: value
|
|
|
|
for code, value in self.items()
|
|
|
|
}
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-05-13 11:47:51 +03:00
|
|
|
def __len__(self) -> int:
|
2015-09-13 16:01:01 +03:00
|
|
|
return len(set(self._tagdata) | set(self._tags_v2))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-08-21 01:05:02 +03:00
|
|
|
def __getitem__(self, tag: int) -> Any:
|
2015-09-13 16:01:01 +03:00
|
|
|
if tag not in self._tags_v2: # unpack on the fly
|
2014-12-29 18:48:01 +03:00
|
|
|
data = self._tagdata[tag]
|
|
|
|
typ = self.tagtype[tag]
|
|
|
|
size, handler = self._load_dispatch[typ]
|
2015-09-14 14:35:09 +03:00
|
|
|
self[tag] = handler(self, data, self.legacy_api) # check type
|
2015-09-13 16:01:01 +03:00
|
|
|
val = self._tags_v2[tag]
|
2015-03-01 06:44:38 +03:00
|
|
|
if self.legacy_api and not isinstance(val, (tuple, bytes)):
|
|
|
|
val = (val,)
|
|
|
|
return val
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-07-12 14:16:56 +03:00
|
|
|
def __contains__(self, tag: object) -> bool:
|
2015-09-13 16:01:01 +03:00
|
|
|
return tag in self._tags_v2 or tag in self._tagdata
|
2012-10-16 01:19:55 +04:00
|
|
|
|
2024-08-15 01:08:43 +03:00
|
|
|
def __setitem__(self, tag: int, value: Any) -> None:
|
2015-09-13 16:01:01 +03:00
|
|
|
self._setitem(tag, value, self.legacy_api)
|
|
|
|
|
2024-08-15 01:08:43 +03:00
|
|
|
def _setitem(self, tag: int, value: Any, legacy_api: bool) -> None:
|
2014-12-29 18:48:01 +03:00
|
|
|
basetypes = (Number, bytes, str)
|
2012-10-16 01:19:55 +04:00
|
|
|
|
2021-06-24 12:56:01 +03:00
|
|
|
info = TiffTags.lookup(tag, self.group)
|
2014-12-29 18:48:01 +03:00
|
|
|
values = [value] if isinstance(value, basetypes) else value
|
2013-02-26 15:12:11 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
if tag not in self.tagtype:
|
2015-12-30 01:00:36 +03:00
|
|
|
if info.type:
|
2014-12-29 18:48:01 +03:00
|
|
|
self.tagtype[tag] = info.type
|
2015-12-30 01:00:36 +03:00
|
|
|
else:
|
2019-02-19 11:45:53 +03:00
|
|
|
self.tagtype[tag] = TiffTags.UNDEFINED
|
2015-12-30 01:00:36 +03:00
|
|
|
if all(isinstance(v, IFDRational) for v in values):
|
2024-10-17 02:46:26 +03:00
|
|
|
for v in values:
|
|
|
|
assert isinstance(v, IFDRational)
|
|
|
|
if v < 0:
|
|
|
|
self.tagtype[tag] = TiffTags.SIGNED_RATIONAL
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
self.tagtype[tag] = TiffTags.RATIONAL
|
2015-12-30 01:00:36 +03:00
|
|
|
elif all(isinstance(v, int) for v in values):
|
2024-10-17 02:46:26 +03:00
|
|
|
short = True
|
|
|
|
signed_short = True
|
|
|
|
long = True
|
|
|
|
for v in values:
|
|
|
|
assert isinstance(v, int)
|
|
|
|
if short and not (0 <= v < 2**16):
|
|
|
|
short = False
|
|
|
|
if signed_short and not (-(2**15) < v < 2**15):
|
|
|
|
signed_short = False
|
|
|
|
if long and v < 0:
|
|
|
|
long = False
|
|
|
|
if short:
|
2019-08-29 12:36:46 +03:00
|
|
|
self.tagtype[tag] = TiffTags.SHORT
|
2024-10-17 02:46:26 +03:00
|
|
|
elif signed_short:
|
2019-08-29 12:36:46 +03:00
|
|
|
self.tagtype[tag] = TiffTags.SIGNED_SHORT
|
2024-10-17 02:46:26 +03:00
|
|
|
elif long:
|
|
|
|
self.tagtype[tag] = TiffTags.LONG
|
2014-12-29 18:48:01 +03:00
|
|
|
else:
|
2024-10-17 02:46:26 +03:00
|
|
|
self.tagtype[tag] = TiffTags.SIGNED_LONG
|
2014-12-29 18:48:01 +03:00
|
|
|
elif all(isinstance(v, float) for v in values):
|
2018-12-29 08:14:29 +03:00
|
|
|
self.tagtype[tag] = TiffTags.DOUBLE
|
2020-05-06 13:12:59 +03:00
|
|
|
elif all(isinstance(v, str) for v in values):
|
|
|
|
self.tagtype[tag] = TiffTags.ASCII
|
|
|
|
elif all(isinstance(v, bytes) for v in values):
|
|
|
|
self.tagtype[tag] = TiffTags.BYTE
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2019-09-26 15:12:28 +03:00
|
|
|
if self.tagtype[tag] == TiffTags.UNDEFINED:
|
2017-05-27 23:55:14 +03:00
|
|
|
values = [
|
2021-04-25 14:37:45 +03:00
|
|
|
v.encode("ascii", "replace") if isinstance(v, str) else v
|
|
|
|
for v in values
|
2017-05-27 23:55:14 +03:00
|
|
|
]
|
2018-12-29 08:14:29 +03:00
|
|
|
elif self.tagtype[tag] == TiffTags.RATIONAL:
|
2018-09-07 13:35:55 +03:00
|
|
|
values = [float(v) if isinstance(v, int) else v for v in values]
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2020-08-13 14:36:39 +03:00
|
|
|
is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict)
|
|
|
|
if not is_ifd:
|
2024-10-17 02:46:26 +03:00
|
|
|
values = tuple(
|
|
|
|
info.cvt_enum(value) if isinstance(value, str) else value
|
|
|
|
for value in values
|
|
|
|
)
|
2015-09-13 16:01:01 +03:00
|
|
|
|
|
|
|
dest = self._tags_v1 if legacy_api else self._tags_v2
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2017-09-08 21:15:45 +03:00
|
|
|
# Three branches:
|
|
|
|
# Spec'd length == 1, Actual length 1, store as element
|
|
|
|
# Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed.
|
|
|
|
# No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple.
|
|
|
|
# Don't mess with the legacy api, since it's frozen.
|
2020-08-13 14:36:39 +03:00
|
|
|
if not is_ifd and (
|
2020-05-05 14:20:59 +03:00
|
|
|
(info.length == 1)
|
|
|
|
or self.tagtype[tag] == TiffTags.BYTE
|
|
|
|
or (info.length is None and len(values) == 1 and not legacy_api)
|
2018-06-15 16:20:24 +03:00
|
|
|
):
|
2017-09-08 21:15:45 +03:00
|
|
|
# Don't mess with the legacy api, since it's frozen.
|
2018-12-29 08:14:29 +03:00
|
|
|
if legacy_api and self.tagtype[tag] in [
|
|
|
|
TiffTags.RATIONAL,
|
|
|
|
TiffTags.SIGNED_RATIONAL,
|
|
|
|
]: # rationals
|
2015-03-01 06:44:38 +03:00
|
|
|
values = (values,)
|
2017-09-08 21:15:45 +03:00
|
|
|
try:
|
2019-10-29 14:42:34 +03:00
|
|
|
(dest[tag],) = values
|
2017-09-08 21:15:45 +03:00
|
|
|
except ValueError:
|
|
|
|
# We've got a builtin tag with 1 expected entry
|
|
|
|
warnings.warn(
|
2020-07-16 12:43:29 +03:00
|
|
|
f"Metadata Warning, tag {tag} had too many entries: "
|
|
|
|
f"{len(values)}, expected 1"
|
2019-03-21 16:28:20 +03:00
|
|
|
)
|
2017-09-08 21:15:45 +03:00
|
|
|
dest[tag] = values[0]
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
else:
|
2017-09-08 21:15:45 +03:00
|
|
|
# Spec'd length > 1 or undefined
|
|
|
|
# Unspec'd, and length > 1
|
2015-09-13 16:01:01 +03:00
|
|
|
dest[tag] = values
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-06-11 16:26:00 +03:00
|
|
|
def __delitem__(self, tag: int) -> None:
|
2015-09-13 16:01:01 +03:00
|
|
|
self._tags_v2.pop(tag, None)
|
|
|
|
self._tags_v1.pop(tag, None)
|
2014-12-29 18:48:01 +03:00
|
|
|
self._tagdata.pop(tag, None)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
def __iter__(self) -> Iterator[int]:
|
2015-09-13 16:01:01 +03:00
|
|
|
return iter(set(self._tagdata) | set(self._tags_v2))
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2024-08-15 01:08:43 +03:00
|
|
|
def _unpack(self, fmt: str, data: bytes) -> tuple[Any, ...]:
|
2014-12-29 18:48:01 +03:00
|
|
|
return struct.unpack(self._endian + fmt, data)
|
|
|
|
|
2024-08-15 01:08:43 +03:00
|
|
|
def _pack(self, fmt: str, *values: Any) -> bytes:
|
2014-12-29 18:48:01 +03:00
|
|
|
return struct.pack(self._endian + fmt, *values)
|
|
|
|
|
|
|
|
list(
|
|
|
|
map(
|
|
|
|
_register_basic,
|
2018-12-29 08:14:29 +03:00
|
|
|
[
|
|
|
|
(TiffTags.SHORT, "H", "short"),
|
|
|
|
(TiffTags.LONG, "L", "long"),
|
|
|
|
(TiffTags.SIGNED_BYTE, "b", "signed byte"),
|
|
|
|
(TiffTags.SIGNED_SHORT, "h", "signed short"),
|
|
|
|
(TiffTags.SIGNED_LONG, "l", "signed long"),
|
|
|
|
(TiffTags.FLOAT, "f", "float"),
|
|
|
|
(TiffTags.DOUBLE, "d", "double"),
|
2020-10-14 15:37:54 +03:00
|
|
|
(TiffTags.IFD, "L", "long"),
|
2022-03-01 01:23:12 +03:00
|
|
|
(TiffTags.LONG8, "Q", "long8"),
|
2019-03-21 16:28:20 +03:00
|
|
|
],
|
2018-12-29 08:14:29 +03:00
|
|
|
)
|
2019-03-21 16:28:20 +03:00
|
|
|
)
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
@_register_loader(1, 1) # Basic type, except for the legacy API.
|
2024-07-26 09:42:28 +03:00
|
|
|
def load_byte(self, data: bytes, legacy_api: bool = True) -> bytes:
|
2016-06-26 14:06:56 +03:00
|
|
|
return data
|
2015-03-01 06:44:38 +03:00
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
@_register_writer(1) # Basic type, except for the legacy API.
|
2024-07-26 09:42:28 +03:00
|
|
|
def write_byte(self, data: bytes | int | IFDRational) -> bytes:
|
2023-01-13 13:02:42 +03:00
|
|
|
if isinstance(data, IFDRational):
|
|
|
|
data = int(data)
|
2022-11-15 01:06:41 +03:00
|
|
|
if isinstance(data, int):
|
|
|
|
data = bytes((data,))
|
2015-03-01 06:44:38 +03:00
|
|
|
return data
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_loader(2, 1)
|
2024-07-20 06:14:18 +03:00
|
|
|
def load_string(self, data: bytes, legacy_api: bool = True) -> str:
|
2014-12-29 18:48:01 +03:00
|
|
|
if data.endswith(b"\0"):
|
2010-07-31 06:52:47 +04:00
|
|
|
data = data[:-1]
|
2014-12-29 18:48:01 +03:00
|
|
|
return data.decode("latin-1", "replace")
|
|
|
|
|
|
|
|
@_register_writer(2)
|
2024-07-26 09:42:28 +03:00
|
|
|
def write_string(self, value: str | bytes | int) -> bytes:
|
2014-12-29 18:48:01 +03:00
|
|
|
# remerge of https://github.com/python-pillow/Pillow/pull/1416
|
2022-12-13 14:40:55 +03:00
|
|
|
if isinstance(value, int):
|
|
|
|
value = str(value)
|
2022-08-11 13:46:58 +03:00
|
|
|
if not isinstance(value, bytes):
|
|
|
|
value = value.encode("ascii", "replace")
|
|
|
|
return value + b"\0"
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_loader(5, 8)
|
2024-07-30 13:20:09 +03:00
|
|
|
def load_rational(
|
|
|
|
self, data: bytes, legacy_api: bool = True
|
|
|
|
) -> tuple[tuple[int, int] | IFDRational, ...]:
|
2021-10-15 13:10:22 +03:00
|
|
|
vals = self._unpack(f"{len(data) // 4}L", data)
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
def combine(a: int, b: int) -> tuple[int, int] | IFDRational:
|
2017-04-20 14:14:23 +03:00
|
|
|
return (a, b) if legacy_api else IFDRational(a, b)
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2015-03-01 06:44:38 +03:00
|
|
|
return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_writer(5)
|
2024-07-30 13:20:09 +03:00
|
|
|
def write_rational(self, *values: IFDRational) -> bytes:
|
2014-12-30 13:57:45 +03:00
|
|
|
return b"".join(
|
2022-03-04 08:42:24 +03:00
|
|
|
self._pack("2L", *_limit_rational(frac, 2**32 - 1)) for frac in values
|
2014-12-29 18:48:01 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
@_register_loader(7, 1)
|
2024-07-26 09:42:28 +03:00
|
|
|
def load_undefined(self, data: bytes, legacy_api: bool = True) -> bytes:
|
2010-07-31 06:52:47 +04:00
|
|
|
return data
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_writer(7)
|
2024-07-26 09:42:28 +03:00
|
|
|
def write_undefined(self, value: bytes | int | IFDRational) -> bytes:
|
2024-02-28 13:07:15 +03:00
|
|
|
if isinstance(value, IFDRational):
|
|
|
|
value = int(value)
|
2023-02-14 02:52:32 +03:00
|
|
|
if isinstance(value, int):
|
|
|
|
value = str(value).encode("ascii", "replace")
|
2014-12-29 18:48:01 +03:00
|
|
|
return value
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_loader(10, 8)
|
2024-08-15 01:08:43 +03:00
|
|
|
def load_signed_rational(
|
|
|
|
self, data: bytes, legacy_api: bool = True
|
|
|
|
) -> tuple[tuple[int, int] | IFDRational, ...]:
|
2021-10-15 13:10:22 +03:00
|
|
|
vals = self._unpack(f"{len(data) // 4}l", data)
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
def combine(a: int, b: int) -> tuple[int, int] | IFDRational:
|
2017-04-20 14:14:23 +03:00
|
|
|
return (a, b) if legacy_api else IFDRational(a, b)
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2015-03-01 06:44:38 +03:00
|
|
|
return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_writer(10)
|
2024-07-30 13:20:09 +03:00
|
|
|
def write_signed_rational(self, *values: IFDRational) -> bytes:
|
2014-12-30 13:57:45 +03:00
|
|
|
return b"".join(
|
2022-03-04 08:42:24 +03:00
|
|
|
self._pack("2l", *_limit_signed_rational(frac, 2**31 - 1, -(2**31)))
|
2019-08-12 12:42:32 +03:00
|
|
|
for frac in values
|
2014-12-29 18:48:01 +03:00
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-07-12 14:16:56 +03:00
|
|
|
def _ensure_read(self, fp: IO[bytes], size: int) -> bytes:
|
2015-09-11 20:45:10 +03:00
|
|
|
ret = fp.read(size)
|
|
|
|
if len(ret) != size:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = (
|
2015-09-11 20:45:10 +03:00
|
|
|
"Corrupt EXIF data. "
|
2020-07-16 12:43:29 +03:00
|
|
|
f"Expecting to read {size} bytes but only got {len(ret)}. "
|
2015-09-11 20:45:10 +03:00
|
|
|
)
|
2022-12-22 00:51:35 +03:00
|
|
|
raise OSError(msg)
|
2015-09-11 20:45:10 +03:00
|
|
|
return ret
|
|
|
|
|
2024-08-21 01:05:02 +03:00
|
|
|
def load(self, fp: IO[bytes]) -> None:
|
2014-12-29 18:48:01 +03:00
|
|
|
self.reset()
|
|
|
|
self._offset = fp.tell()
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
try:
|
2022-03-01 01:23:12 +03:00
|
|
|
tag_count = (
|
|
|
|
self._unpack("Q", self._ensure_read(fp, 8))
|
|
|
|
if self._bigtiff
|
|
|
|
else self._unpack("H", self._ensure_read(fp, 2))
|
|
|
|
)[0]
|
|
|
|
for i in range(tag_count):
|
|
|
|
tag, typ, count, data = (
|
|
|
|
self._unpack("HHQ8s", self._ensure_read(fp, 20))
|
|
|
|
if self._bigtiff
|
|
|
|
else self._unpack("HHL4s", self._ensure_read(fp, 12))
|
|
|
|
)
|
2020-04-13 00:16:46 +03:00
|
|
|
|
2021-06-24 12:56:01 +03:00
|
|
|
tagname = TiffTags.lookup(tag, self.group).name
|
2020-04-13 00:16:46 +03:00
|
|
|
typname = TYPES.get(typ, "unknown")
|
2020-07-16 12:43:29 +03:00
|
|
|
msg = f"tag: {tagname} ({tag}) - type: {typname} ({typ})"
|
2015-09-11 20:45:10 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
unit_size, handler = self._load_dispatch[typ]
|
|
|
|
except KeyError:
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("%s - unsupported type %s", msg, typ)
|
2015-09-11 20:45:10 +03:00
|
|
|
continue # ignore unsupported type
|
|
|
|
size = count * unit_size
|
2022-03-01 01:23:12 +03:00
|
|
|
if size > (8 if self._bigtiff else 4):
|
2015-09-11 20:45:10 +03:00
|
|
|
here = fp.tell()
|
2022-03-01 01:23:12 +03:00
|
|
|
(offset,) = self._unpack("Q" if self._bigtiff else "L", data)
|
2020-07-16 12:43:29 +03:00
|
|
|
msg += f" Tag Location: {here} - Data Location: {offset}"
|
2015-09-11 20:45:10 +03:00
|
|
|
fp.seek(offset)
|
|
|
|
data = ImageFile._safe_read(fp, size)
|
|
|
|
fp.seek(here)
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2015-09-11 20:45:10 +03:00
|
|
|
data = data[:size]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
if len(data) != size:
|
|
|
|
warnings.warn(
|
|
|
|
"Possibly corrupt EXIF data. "
|
2020-07-16 12:43:29 +03:00
|
|
|
f"Expecting to read {size} bytes but only got {len(data)}."
|
|
|
|
f" Skipping tag {tag}"
|
2019-03-21 16:28:20 +03:00
|
|
|
)
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug(msg)
|
2015-09-11 20:45:10 +03:00
|
|
|
continue
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2016-07-12 18:09:02 +03:00
|
|
|
if not data:
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug(msg)
|
2016-07-12 18:09:02 +03:00
|
|
|
continue
|
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
self._tagdata[tag] = data
|
|
|
|
self.tagtype[tag] = typ
|
|
|
|
|
2024-12-03 01:19:26 +03:00
|
|
|
msg += " - value: "
|
|
|
|
msg += f"<table: {size} bytes>" if size > 32 else repr(data)
|
2024-12-02 21:29:38 +03:00
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug(msg)
|
2015-09-11 20:45:10 +03:00
|
|
|
|
2022-03-01 01:23:12 +03:00
|
|
|
(self.next,) = (
|
|
|
|
self._unpack("Q", self._ensure_read(fp, 8))
|
|
|
|
if self._bigtiff
|
|
|
|
else self._unpack("L", self._ensure_read(fp, 4))
|
|
|
|
)
|
2019-09-30 17:56:31 +03:00
|
|
|
except OSError as msg:
|
2015-09-11 20:45:10 +03:00
|
|
|
warnings.warn(str(msg))
|
|
|
|
return
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2024-12-30 17:26:13 +03:00
|
|
|
def _get_ifh(self):
|
|
|
|
ifh = self._prefix + self._pack("H", 43 if self._bigtiff else 42)
|
|
|
|
if self._bigtiff:
|
|
|
|
ifh += self._pack("HH", 8, 0)
|
|
|
|
ifh += self._pack("Q", 16) if self._bigtiff else self._pack("L", 8)
|
|
|
|
|
|
|
|
return ifh
|
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
def tobytes(self, offset: int = 0) -> bytes:
|
2014-12-29 18:48:01 +03:00
|
|
|
# FIXME What about tagdata?
|
2024-12-30 17:26:13 +03:00
|
|
|
result = self._pack("Q" if self._bigtiff else "H", len(self._tags_v2))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
entries: list[tuple[int, int, int, bytes, bytes]] = []
|
2024-12-30 17:26:13 +03:00
|
|
|
offset += len(result) + len(self._tags_v2) * (20 if self._bigtiff else 12) + 4
|
2010-07-31 06:52:47 +04:00
|
|
|
stripoffsets = None
|
|
|
|
|
|
|
|
# pass 1: convert tags to binary format
|
2014-12-29 18:48:01 +03:00
|
|
|
# always write tags in ascending order
|
2024-12-30 17:26:13 +03:00
|
|
|
fmt = "Q" if self._bigtiff else "L"
|
|
|
|
fmt_size = 8 if self._bigtiff else 4
|
2015-09-13 16:01:01 +03:00
|
|
|
for tag, value in sorted(self._tags_v2.items()):
|
2014-12-29 18:48:01 +03:00
|
|
|
if tag == STRIPOFFSETS:
|
|
|
|
stripoffsets = len(entries)
|
2024-07-29 16:46:07 +03:00
|
|
|
typ = self.tagtype[tag]
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("Tag %s, Type: %s, Value: %s", tag, typ, repr(value))
|
2020-08-13 14:36:39 +03:00
|
|
|
is_ifd = typ == TiffTags.LONG and isinstance(value, dict)
|
|
|
|
if is_ifd:
|
2024-12-30 17:26:13 +03:00
|
|
|
ifd = ImageFileDirectory_v2(self._get_ifh(), group=tag)
|
2021-06-25 12:46:58 +03:00
|
|
|
values = self._tags_v2[tag]
|
|
|
|
for ifd_tag, ifd_value in values.items():
|
2020-08-13 14:36:39 +03:00
|
|
|
ifd[ifd_tag] = ifd_value
|
|
|
|
data = ifd.tobytes(offset)
|
|
|
|
else:
|
|
|
|
values = value if isinstance(value, tuple) else (value,)
|
|
|
|
data = self._write_dispatch[typ](self, *values)
|
2020-04-13 00:16:46 +03:00
|
|
|
|
2021-06-24 12:56:01 +03:00
|
|
|
tagname = TiffTags.lookup(tag, self.group).name
|
2020-08-13 14:36:39 +03:00
|
|
|
typname = "ifd" if is_ifd else TYPES.get(typ, "unknown")
|
2024-12-03 01:19:26 +03:00
|
|
|
msg = f"save: {tagname} ({tag}) - type: {typname} ({typ}) - value: "
|
|
|
|
msg += f"<table: {len(data)} bytes>" if len(data) >= 16 else str(values)
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug(msg)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
# count is sum of lengths for string and arbitrary data
|
2020-08-13 14:36:39 +03:00
|
|
|
if is_ifd:
|
|
|
|
count = 1
|
|
|
|
elif typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]:
|
2018-12-29 08:14:29 +03:00
|
|
|
count = len(data)
|
|
|
|
else:
|
|
|
|
count = len(values)
|
2014-12-29 18:48:01 +03:00
|
|
|
# figure out if data fits into the entry
|
2024-12-30 17:26:13 +03:00
|
|
|
if len(data) <= fmt_size:
|
|
|
|
entries.append((tag, typ, count, data.ljust(fmt_size, b"\0"), b""))
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2024-12-30 17:26:13 +03:00
|
|
|
entries.append((tag, typ, count, self._pack(fmt, offset), data))
|
2015-09-22 13:31:59 +03:00
|
|
|
offset += (len(data) + 1) // 2 * 2 # pad to word
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# update strip offset data to point beyond auxiliary data
|
|
|
|
if stripoffsets is not None:
|
2014-12-29 18:48:01 +03:00
|
|
|
tag, typ, count, value, data = entries[stripoffsets]
|
|
|
|
if data:
|
2024-08-19 04:39:14 +03:00
|
|
|
size, handler = self._load_dispatch[typ]
|
|
|
|
values = [val + offset for val in handler(self, data, self.legacy_api)]
|
|
|
|
data = self._write_dispatch[typ](self, *values)
|
|
|
|
else:
|
2024-12-30 17:26:13 +03:00
|
|
|
value = self._pack(fmt, self._unpack(fmt, value)[0] + offset)
|
2014-12-29 18:48:01 +03:00
|
|
|
entries[stripoffsets] = tag, typ, count, value, data
|
|
|
|
|
|
|
|
# pass 2: write entries to file
|
|
|
|
for tag, typ, count, value, data in entries:
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("%s %s %s %s %s", tag, typ, count, repr(value), repr(data))
|
2024-12-30 17:26:13 +03:00
|
|
|
result += self._pack(
|
|
|
|
"HHQ8s" if self._bigtiff else "HHL4s", tag, typ, count, value
|
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# -- overwrite here for multi-page --
|
2019-03-12 02:27:43 +03:00
|
|
|
result += b"\0\0\0\0" # end of entries
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# pass 3: write auxiliary data to file
|
2014-12-29 18:48:01 +03:00
|
|
|
for tag, typ, count, value, data in entries:
|
2019-03-12 02:27:43 +03:00
|
|
|
result += data
|
2010-07-31 06:52:47 +04:00
|
|
|
if len(data) & 1:
|
2019-03-12 02:27:43 +03:00
|
|
|
result += b"\0"
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2024-07-12 14:16:56 +03:00
|
|
|
def save(self, fp: IO[bytes]) -> int:
|
2019-03-12 02:27:43 +03:00
|
|
|
if fp.tell() == 0: # skip TIFF header on subsequent pages
|
2024-12-30 17:26:13 +03:00
|
|
|
fp.write(self._get_ifh())
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2019-03-12 02:27:43 +03:00
|
|
|
offset = fp.tell()
|
2019-03-31 00:09:01 +03:00
|
|
|
result = self.tobytes(offset)
|
2019-03-12 02:27:43 +03:00
|
|
|
fp.write(result)
|
|
|
|
return offset + len(result)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
ImageFileDirectory_v2._load_dispatch = _load_dispatch
|
|
|
|
ImageFileDirectory_v2._write_dispatch = _write_dispatch
|
2014-12-29 18:48:01 +03:00
|
|
|
for idx, name in TYPES.items():
|
|
|
|
name = name.replace(" ", "_")
|
2024-05-04 19:21:49 +03:00
|
|
|
setattr(ImageFileDirectory_v2, f"load_{name}", _load_dispatch[idx][1])
|
|
|
|
setattr(ImageFileDirectory_v2, f"write_{name}", _write_dispatch[idx])
|
2014-12-29 18:48:01 +03:00
|
|
|
del _load_dispatch, _write_dispatch, idx, name
|
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
|
|
|
|
# Legacy ImageFileDirectory support.
|
2015-09-11 20:09:14 +03:00
|
|
|
class ImageFileDirectory_v1(ImageFileDirectory_v2):
|
2015-09-23 15:14:06 +03:00
|
|
|
"""This class represents the **legacy** interface to a TIFF tag directory.
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Exposes a dictionary interface of the tags in the directory::
|
|
|
|
|
|
|
|
ifd = ImageFileDirectory_v1()
|
|
|
|
ifd[key] = 'Some Data'
|
2019-02-19 11:45:53 +03:00
|
|
|
ifd.tagtype[key] = TiffTags.ASCII
|
2016-11-19 03:19:43 +03:00
|
|
|
print(ifd[key])
|
2015-09-14 17:01:57 +03:00
|
|
|
('Some Data',)
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Also contains a dictionary of tag types as read from the tiff image file,
|
2020-09-01 20:16:46 +03:00
|
|
|
:attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`.
|
2015-09-14 17:01:57 +03:00
|
|
|
|
|
|
|
Values are returned as a tuple.
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
.. deprecated:: 3.0.0
|
|
|
|
"""
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2024-07-28 05:53:02 +03:00
|
|
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
2016-11-05 20:31:11 +03:00
|
|
|
super().__init__(*args, **kwargs)
|
2015-09-22 13:31:59 +03:00
|
|
|
self._legacy_api = True
|
2015-09-11 20:09:14 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
tags = property(lambda self: self._tags_v1)
|
2015-09-11 20:09:14 +03:00
|
|
|
tagdata = property(lambda self: self._tagdata)
|
|
|
|
|
2020-09-01 20:16:46 +03:00
|
|
|
# defined in ImageFileDirectory_v2
|
2024-02-10 11:50:45 +03:00
|
|
|
tagtype: dict[int, int]
|
2020-09-01 20:16:46 +03:00
|
|
|
"""Dictionary of tag types"""
|
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
@classmethod
|
2024-07-20 06:14:18 +03:00
|
|
|
def from_v2(cls, original: ImageFileDirectory_v2) -> ImageFileDirectory_v1:
|
2020-09-01 20:16:46 +03:00
|
|
|
"""Returns an
|
2015-09-14 17:01:57 +03:00
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
|
|
|
instance with the same data as is contained in the original
|
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
|
|
|
instance.
|
|
|
|
|
|
|
|
:returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
"""
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
ifd = cls(prefix=original.prefix)
|
|
|
|
ifd._tagdata = original._tagdata
|
|
|
|
ifd.tagtype = original.tagtype
|
2015-09-22 13:31:59 +03:00
|
|
|
ifd.next = original.next # an indicator for multipage tiffs
|
2015-09-11 20:09:14 +03:00
|
|
|
return ifd
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2024-05-13 11:47:51 +03:00
|
|
|
def to_v2(self) -> ImageFileDirectory_v2:
|
2020-09-01 20:16:46 +03:00
|
|
|
"""Returns an
|
2015-09-14 17:01:57 +03:00
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
|
|
|
instance with the same data as is contained in the original
|
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
|
|
|
instance.
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
:returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
|
|
|
|
|
|
|
"""
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
ifd = ImageFileDirectory_v2(prefix=self.prefix)
|
|
|
|
ifd._tagdata = dict(self._tagdata)
|
|
|
|
ifd.tagtype = dict(self.tagtype)
|
|
|
|
ifd._tags_v2 = dict(self._tags_v2)
|
|
|
|
return ifd
|
|
|
|
|
2024-07-12 14:16:56 +03:00
|
|
|
def __contains__(self, tag: object) -> bool:
|
2015-09-13 16:01:01 +03:00
|
|
|
return tag in self._tags_v1 or tag in self._tagdata
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2024-05-13 11:47:51 +03:00
|
|
|
def __len__(self) -> int:
|
2015-09-13 16:01:01 +03:00
|
|
|
return len(set(self._tagdata) | set(self._tags_v1))
|
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
def __iter__(self) -> Iterator[int]:
|
2015-09-13 16:01:01 +03:00
|
|
|
return iter(set(self._tagdata) | set(self._tags_v1))
|
|
|
|
|
2024-08-15 01:08:43 +03:00
|
|
|
def __setitem__(self, tag: int, value: Any) -> None:
|
2015-09-22 13:31:59 +03:00
|
|
|
for legacy_api in (False, True):
|
2015-09-13 16:01:01 +03:00
|
|
|
self._setitem(tag, value, legacy_api)
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2024-08-21 01:05:02 +03:00
|
|
|
def __getitem__(self, tag: int) -> Any:
|
2015-09-13 16:01:01 +03:00
|
|
|
if tag not in self._tags_v1: # unpack on the fly
|
|
|
|
data = self._tagdata[tag]
|
|
|
|
typ = self.tagtype[tag]
|
|
|
|
size, handler = self._load_dispatch[typ]
|
|
|
|
for legacy in (False, True):
|
2015-09-14 14:35:09 +03:00
|
|
|
self._setitem(tag, handler(self, data, legacy), legacy)
|
2015-09-13 16:01:01 +03:00
|
|
|
val = self._tags_v1[tag]
|
|
|
|
if not isinstance(val, (tuple, bytes)):
|
|
|
|
val = (val,)
|
|
|
|
return val
|
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2024-09-06 12:13:35 +03:00
|
|
|
# undone -- switch this pointer
|
2015-09-11 20:09:14 +03:00
|
|
|
ImageFileDirectory = ImageFileDirectory_v1
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
##
|
|
|
|
# Image plugin for TIFF files.
|
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
class TiffImageFile(ImageFile.ImageFile):
|
|
|
|
format = "TIFF"
|
|
|
|
format_description = "Adobe TIFF"
|
2017-03-15 02:16:38 +03:00
|
|
|
_close_exclusive_fp_after_loading = False
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-08-21 01:05:02 +03:00
|
|
|
def __init__(
|
|
|
|
self,
|
2024-08-29 15:15:43 +03:00
|
|
|
fp: StrOrBytesPath | IO[bytes],
|
2024-08-21 01:05:02 +03:00
|
|
|
filename: str | bytes | None = None,
|
|
|
|
) -> None:
|
|
|
|
self.tag_v2: ImageFileDirectory_v2
|
2020-07-23 15:40:02 +03:00
|
|
|
""" Image file directory (tag dictionary) """
|
|
|
|
|
2024-08-21 01:05:02 +03:00
|
|
|
self.tag: ImageFileDirectory_v1
|
2020-07-23 15:40:02 +03:00
|
|
|
""" Legacy tag entries """
|
|
|
|
|
|
|
|
super().__init__(fp, filename)
|
|
|
|
|
2024-06-11 16:26:00 +03:00
|
|
|
def _open(self) -> None:
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Open the first image in a TIFF file"""
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# Header
|
|
|
|
ifh = self.fp.read(8)
|
2022-03-01 01:23:12 +03:00
|
|
|
if ifh[2] == 43:
|
|
|
|
ifh += self.fp.read(8)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag_v2 = ImageFileDirectory_v2(ifh)
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
# setup frame pointers
|
2015-09-11 20:09:14 +03:00
|
|
|
self.__first = self.__next = self.tag_v2.next
|
2010-07-31 06:52:47 +04:00
|
|
|
self.__frame = -1
|
2022-04-13 02:54:17 +03:00
|
|
|
self._fp = self.fp
|
2024-06-11 16:26:00 +03:00
|
|
|
self._frame_pos: list[int] = []
|
|
|
|
self._n_frames: int | None = None
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("*** TiffImageFile._open ***")
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("- __first: %s", self.__first)
|
|
|
|
logger.debug("- ifh: %s", repr(ifh)) # Use repr to avoid str(bytes)
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
# and load the first frame
|
2010-07-31 06:52:47 +04:00
|
|
|
self._seek(0)
|
|
|
|
|
2015-04-15 03:43:05 +03:00
|
|
|
@property
|
2024-07-28 05:53:02 +03:00
|
|
|
def n_frames(self) -> int:
|
|
|
|
current_n_frames = self._n_frames
|
|
|
|
if current_n_frames is None:
|
2015-04-15 03:43:05 +03:00
|
|
|
current = self.tell()
|
2019-03-13 11:07:55 +03:00
|
|
|
self._seek(len(self._frame_pos))
|
2019-03-13 10:28:17 +03:00
|
|
|
while self._n_frames is None:
|
|
|
|
self._seek(self.tell() + 1)
|
2015-04-15 03:43:05 +03:00
|
|
|
self.seek(current)
|
2024-07-28 05:53:02 +03:00
|
|
|
assert self._n_frames is not None
|
2015-04-15 03:43:05 +03:00
|
|
|
return self._n_frames
|
|
|
|
|
2024-05-04 13:51:54 +03:00
|
|
|
def seek(self, frame: int) -> None:
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Select a given frame as current image"""
|
2017-09-30 06:32:43 +03:00
|
|
|
if not self._seek_check(frame):
|
|
|
|
return
|
|
|
|
self._seek(frame)
|
2024-10-08 12:48:32 +03:00
|
|
|
if self._im is not None and (
|
|
|
|
self.im.size != self._tile_size or self.im.mode != self.mode
|
|
|
|
):
|
|
|
|
# The core image will no longer be used
|
|
|
|
self._im = None
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2024-05-13 11:47:51 +03:00
|
|
|
def _seek(self, frame: int) -> None:
|
2022-04-13 02:54:17 +03:00
|
|
|
self.fp = self._fp
|
2021-04-26 13:27:34 +03:00
|
|
|
|
2015-04-15 03:43:05 +03:00
|
|
|
while len(self._frame_pos) <= frame:
|
2010-07-31 06:52:47 +04:00
|
|
|
if not self.__next:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "no more images in TIFF file"
|
|
|
|
raise EOFError(msg)
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug(
|
2023-09-09 14:03:39 +03:00
|
|
|
"Seeking to frame %s, on frame %s, __next %s, location: %s",
|
|
|
|
frame,
|
|
|
|
self.__frame,
|
|
|
|
self.__next,
|
|
|
|
self.fp.tell(),
|
2020-04-13 00:16:46 +03:00
|
|
|
)
|
2024-03-16 05:33:04 +03:00
|
|
|
if self.__next >= 2**63:
|
|
|
|
msg = "Unable to seek to frame"
|
|
|
|
raise ValueError(msg)
|
2010-07-31 06:52:47 +04:00
|
|
|
self.fp.seek(self.__next)
|
2015-04-15 03:43:05 +03:00
|
|
|
self._frame_pos.append(self.__next)
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("Loading tags, location: %s", self.fp.tell())
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag_v2.load(self.fp)
|
2021-05-07 17:25:47 +03:00
|
|
|
if self.tag_v2.next in self._frame_pos:
|
|
|
|
# This IFD has already been processed
|
|
|
|
# Declare this to be the end of the image
|
|
|
|
self.__next = 0
|
|
|
|
else:
|
|
|
|
self.__next = self.tag_v2.next
|
2019-03-13 10:28:17 +03:00
|
|
|
if self.__next == 0:
|
|
|
|
self._n_frames = frame + 1
|
2019-03-13 12:44:58 +03:00
|
|
|
if len(self._frame_pos) == 1:
|
2020-04-13 07:37:49 +03:00
|
|
|
self.is_animated = self.__next != 0
|
2014-05-10 08:36:15 +04:00
|
|
|
self.__frame += 1
|
2015-04-15 03:43:05 +03:00
|
|
|
self.fp.seek(self._frame_pos[frame])
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag_v2.load(self.fp)
|
2024-05-20 16:11:50 +03:00
|
|
|
if XMP in self.tag_v2:
|
|
|
|
self.info["xmp"] = self.tag_v2[XMP]
|
|
|
|
elif "xmp" in self.info:
|
|
|
|
del self.info["xmp"]
|
2022-05-27 00:54:54 +03:00
|
|
|
self._reload_exif()
|
2015-09-22 13:31:59 +03:00
|
|
|
# fill the legacy tag/ifd entries
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2)
|
2015-04-15 03:43:05 +03:00
|
|
|
self.__frame = frame
|
2010-07-31 06:52:47 +04:00
|
|
|
self._setup()
|
2014-10-02 11:43:22 +04:00
|
|
|
|
2024-05-04 13:51:54 +03:00
|
|
|
def tell(self) -> int:
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Return the current frame number"""
|
2010-07-31 06:52:47 +04:00
|
|
|
return self.__frame
|
|
|
|
|
2024-07-20 06:14:18 +03:00
|
|
|
def get_photoshop_blocks(self) -> dict[int, dict[str, bytes]]:
|
2022-02-10 04:00:23 +03:00
|
|
|
"""
|
|
|
|
Returns a dictionary of Photoshop "Image Resource Blocks".
|
|
|
|
The keys are the image resource ID. For more information, see
|
|
|
|
https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577409_pgfId-1037727
|
|
|
|
|
|
|
|
:returns: Photoshop "Image Resource Blocks" in a dictionary.
|
|
|
|
"""
|
|
|
|
blocks = {}
|
2023-04-16 07:04:39 +03:00
|
|
|
val = self.tag_v2.get(ExifTags.Base.ImageResources)
|
2022-02-10 04:00:23 +03:00
|
|
|
if val:
|
|
|
|
while val[:4] == b"8BIM":
|
|
|
|
id = i16(val[4:6])
|
|
|
|
n = math.ceil((val[6] + 1) / 2) * 2
|
|
|
|
size = i32(val[6 + n : 10 + n])
|
|
|
|
data = val[10 + n : 10 + n + size]
|
|
|
|
blocks[id] = {"data": data}
|
|
|
|
|
|
|
|
val = val[math.ceil((10 + n + size) / 2) * 2 :]
|
|
|
|
return blocks
|
|
|
|
|
2024-07-05 20:55:23 +03:00
|
|
|
def load(self) -> Image.core.PixelAccess | None:
|
2020-04-17 15:13:14 +03:00
|
|
|
if self.tile and self.use_load_libtiff:
|
2016-03-27 14:18:39 +03:00
|
|
|
return self._load_libtiff()
|
2019-09-30 17:56:31 +03:00
|
|
|
return super().load()
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2024-09-18 13:26:06 +03:00
|
|
|
def load_prepare(self) -> None:
|
2024-09-18 15:56:29 +03:00
|
|
|
if self._im is None:
|
2024-10-08 12:48:32 +03:00
|
|
|
Image._decompression_bomb_check(self._tile_size)
|
2024-09-18 15:56:29 +03:00
|
|
|
self.im = Image.core.new(self.mode, self._tile_size)
|
2024-09-18 13:26:06 +03:00
|
|
|
ImageFile.ImageFile.load_prepare(self)
|
|
|
|
|
2024-05-04 13:51:54 +03:00
|
|
|
def load_end(self) -> None:
|
2017-01-02 02:17:39 +03:00
|
|
|
# allow closing if we're on the first frame, there's no next
|
|
|
|
# This is the ImageFile.load path only, libtiff specific below.
|
2020-04-13 07:37:49 +03:00
|
|
|
if not self.is_animated:
|
2017-03-15 02:16:38 +03:00
|
|
|
self._close_exclusive_fp_after_loading = True
|
2017-01-02 02:17:39 +03:00
|
|
|
|
2021-07-04 05:33:55 +03:00
|
|
|
# load IFD data from fp before it is closed
|
|
|
|
exif = self.getexif()
|
2023-01-07 21:36:17 +03:00
|
|
|
for key in TiffTags.TAGS_V2_GROUPS:
|
2021-07-04 05:33:55 +03:00
|
|
|
if key not in exif:
|
|
|
|
continue
|
|
|
|
exif.get_ifd(key)
|
|
|
|
|
2023-09-08 08:05:36 +03:00
|
|
|
ImageOps.exif_transpose(self, in_place=True)
|
2023-09-11 12:02:17 +03:00
|
|
|
if ExifTags.Base.Orientation in self.tag_v2:
|
|
|
|
del self.tag_v2[ExifTags.Base.Orientation]
|
2023-09-08 08:05:36 +03:00
|
|
|
|
2024-07-20 06:14:18 +03:00
|
|
|
def _load_libtiff(self) -> Image.core.PixelAccess | None:
|
2020-09-01 20:16:46 +03:00
|
|
|
"""Overload method triggered when we detect a compressed tiff
|
|
|
|
Calls out to libtiff"""
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2020-04-17 15:13:14 +03:00
|
|
|
Image.Image.load(self)
|
2013-03-09 07:51:59 +04:00
|
|
|
|
|
|
|
self.load_prepare()
|
2013-05-08 00:23:51 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
if not len(self.tile) == 1:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "Not exactly one tile"
|
|
|
|
raise OSError(msg)
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
# (self._compression, (extents tuple),
|
|
|
|
# 0, (rawmode, self._compression, fp))
|
2015-04-08 14:12:37 +03:00
|
|
|
extents = self.tile[0][1]
|
2024-08-29 15:51:15 +03:00
|
|
|
args = self.tile[0][3]
|
2016-09-30 00:14:51 +03:00
|
|
|
|
|
|
|
# To be nice on memory footprint, if there's a
|
|
|
|
# file descriptor, use that instead of reading
|
|
|
|
# into a string in python.
|
|
|
|
try:
|
2023-06-05 04:07:09 +03:00
|
|
|
fp = hasattr(self.fp, "fileno") and self.fp.fileno()
|
2016-09-30 00:14:51 +03:00
|
|
|
# flush the file descriptor, prevents error on pypy 2.4+
|
2019-09-26 15:12:28 +03:00
|
|
|
# should also eliminate the need for fp.tell
|
2016-09-30 00:14:51 +03:00
|
|
|
# in _seek
|
|
|
|
if hasattr(self.fp, "flush"):
|
|
|
|
self.fp.flush()
|
2019-09-30 17:56:31 +03:00
|
|
|
except OSError:
|
2020-04-07 09:58:21 +03:00
|
|
|
# io.BytesIO have a fileno, but returns an OSError if
|
2016-09-30 00:14:51 +03:00
|
|
|
# it doesn't use a file descriptor.
|
|
|
|
fp = False
|
2016-10-31 03:43:32 +03:00
|
|
|
|
2016-09-30 00:14:51 +03:00
|
|
|
if fp:
|
2024-08-29 15:51:15 +03:00
|
|
|
assert isinstance(args, tuple)
|
|
|
|
args_list = list(args)
|
|
|
|
args_list[2] = fp
|
|
|
|
args = tuple(args_list)
|
2016-09-30 00:14:51 +03:00
|
|
|
|
2024-08-29 15:51:15 +03:00
|
|
|
decoder = Image._getdecoder(self.mode, "libtiff", args, self.decoderconfig)
|
2013-03-09 07:51:59 +04:00
|
|
|
try:
|
2014-03-28 09:19:39 +04:00
|
|
|
decoder.setimage(self.im, extents)
|
2020-06-21 13:13:35 +03:00
|
|
|
except ValueError as e:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "Couldn't set the image"
|
|
|
|
raise OSError(msg) from e
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2020-04-13 07:37:49 +03:00
|
|
|
close_self_fp = self._exclusive_fp and not self.is_animated
|
2013-03-14 21:36:15 +04:00
|
|
|
if hasattr(self.fp, "getvalue"):
|
2013-03-09 07:51:59 +04:00
|
|
|
# We've got a stringio like thing passed in. Yay for all in memory.
|
|
|
|
# The decoder needs the entire file in one shot, so there's not
|
|
|
|
# a lot we can do here other than give it the entire file.
|
2014-07-28 20:00:06 +04:00
|
|
|
# unless we could do something like get the address of the
|
|
|
|
# underlying string for stringio.
|
2013-03-14 21:36:15 +04:00
|
|
|
#
|
|
|
|
# Rearranging for supporting byteio items, since they have a fileno
|
2020-04-07 09:58:21 +03:00
|
|
|
# that returns an OSError if there's no underlying fp. Easier to
|
2015-05-29 07:59:54 +03:00
|
|
|
# deal with here by reordering.
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("have getvalue. just sending in a string from getvalue")
|
2014-07-28 20:00:06 +04:00
|
|
|
n, err = decoder.decode(self.fp.getvalue())
|
2019-07-30 21:26:04 +03:00
|
|
|
elif fp:
|
2013-03-14 21:36:15 +04:00
|
|
|
# we've got a actual file on disk, pass in the fp.
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("have fileno, calling fileno version of the decoder.")
|
2019-08-03 14:43:01 +03:00
|
|
|
if not close_self_fp:
|
|
|
|
self.fp.seek(0)
|
2024-11-20 07:40:14 +03:00
|
|
|
# Save and restore the file position, because libtiff will move it
|
2024-11-20 16:21:14 +03:00
|
|
|
# outside of the Python runtime, and that will confuse
|
2024-11-20 07:40:14 +03:00
|
|
|
# io.BufferedReader and possible others.
|
|
|
|
# NOTE: This must use os.lseek(), and not fp.tell()/fp.seek(),
|
|
|
|
# because the buffer read head already may not equal the actual
|
|
|
|
# file position, and fp.seek() may just adjust it's internal
|
|
|
|
# pointer and not actually seek the OS file handle.
|
|
|
|
pos = os.lseek(fp, 0, os.SEEK_CUR)
|
2014-07-28 20:00:06 +04:00
|
|
|
# 4 bytes, otherwise the trace might error out
|
|
|
|
n, err = decoder.decode(b"fpfp")
|
2024-11-20 07:40:14 +03:00
|
|
|
os.lseek(fp, pos, os.SEEK_SET)
|
2013-03-09 07:51:59 +04:00
|
|
|
else:
|
|
|
|
# we have something else.
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("don't have fileno or getvalue. just reading")
|
2019-07-30 21:28:44 +03:00
|
|
|
self.fp.seek(0)
|
2013-05-08 00:23:51 +04:00
|
|
|
# UNDONE -- so much for that buffer size thing.
|
2014-07-28 20:00:06 +04:00
|
|
|
n, err = decoder.decode(self.fp.read())
|
2013-05-08 00:23:51 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
self.tile = []
|
|
|
|
self.readonly = 0
|
2019-09-13 15:36:26 +03:00
|
|
|
|
|
|
|
self.load_end()
|
|
|
|
|
2019-08-03 14:43:01 +03:00
|
|
|
if close_self_fp:
|
2019-03-13 12:44:58 +03:00
|
|
|
self.fp.close()
|
|
|
|
self.fp = None # might be shared
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2014-03-28 09:19:39 +04:00
|
|
|
if err < 0:
|
2019-09-30 17:56:31 +03:00
|
|
|
raise OSError(err)
|
2013-03-09 07:51:59 +04:00
|
|
|
|
|
|
|
return Image.Image.load(self)
|
|
|
|
|
2024-07-12 14:16:56 +03:00
|
|
|
def _setup(self) -> None:
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Setup this image object based on current tags"""
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
if 0xBC01 in self.tag_v2:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "Windows Media Photo files not yet supported"
|
|
|
|
raise OSError(msg)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# extract relevant tags
|
2015-09-11 20:09:14 +03:00
|
|
|
self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)]
|
|
|
|
self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# photometric is a required tag, but not everyone is reading
|
|
|
|
# the specification
|
2015-09-11 20:09:14 +03:00
|
|
|
photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2018-12-09 08:04:34 +03:00
|
|
|
# old style jpeg compression images most certainly are YCbCr
|
|
|
|
if self._compression == "tiff_jpeg":
|
|
|
|
photo = 6
|
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
fillorder = self.tag_v2.get(FILLORDER, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("*** Summary ***")
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("- compression: %s", self._compression)
|
|
|
|
logger.debug("- photometric_interpretation: %s", photo)
|
|
|
|
logger.debug("- planar_configuration: %s", self._planar_configuration)
|
|
|
|
logger.debug("- fill_order: %s", fillorder)
|
2024-08-16 05:03:02 +03:00
|
|
|
logger.debug("- YCbCr subsampling: %s", self.tag_v2.get(YCBCRSUBSAMPLING))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# size
|
2024-11-05 12:15:23 +03:00
|
|
|
try:
|
|
|
|
xsize = self.tag_v2[IMAGEWIDTH]
|
|
|
|
ysize = self.tag_v2[IMAGELENGTH]
|
|
|
|
except KeyError as e:
|
|
|
|
msg = "Missing dimensions"
|
|
|
|
raise TypeError(msg) from e
|
2024-08-21 01:05:02 +03:00
|
|
|
if not isinstance(xsize, int) or not isinstance(ysize, int):
|
|
|
|
msg = "Invalid dimensions"
|
|
|
|
raise ValueError(msg)
|
2024-09-18 15:56:29 +03:00
|
|
|
self._tile_size = xsize, ysize
|
2024-09-18 14:17:49 +03:00
|
|
|
orientation = self.tag_v2.get(ExifTags.Base.Orientation)
|
2024-09-18 15:56:29 +03:00
|
|
|
if orientation in (5, 6, 7, 8):
|
2024-09-18 13:26:06 +03:00
|
|
|
self._size = ysize, xsize
|
|
|
|
else:
|
|
|
|
self._size = xsize, ysize
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("- size: %s", self.size)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2022-04-10 21:23:31 +03:00
|
|
|
sample_format = self.tag_v2.get(SAMPLEFORMAT, (1,))
|
|
|
|
if len(sample_format) > 1 and max(sample_format) == min(sample_format) == 1:
|
2015-10-04 00:46:01 +03:00
|
|
|
# SAMPLEFORMAT is properly per band, so an RGB image will
|
|
|
|
# be (1,1,1). But, we don't support per band pixel types,
|
|
|
|
# and anything more than one band is a uint8. So, just
|
|
|
|
# take the first element. Revisit this if adding support
|
|
|
|
# for more exotic images.
|
2022-04-10 21:23:31 +03:00
|
|
|
sample_format = (1,)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2017-12-07 18:43:22 +03:00
|
|
|
bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,))
|
|
|
|
extra_tuple = self.tag_v2.get(EXTRASAMPLES, ())
|
2017-12-08 15:37:21 +03:00
|
|
|
if photo in (2, 6, 8): # RGB, YCbCr, LAB
|
2017-12-07 18:43:22 +03:00
|
|
|
bps_count = 3
|
2017-12-08 15:37:21 +03:00
|
|
|
elif photo == 5: # CMYK
|
2017-12-07 18:43:22 +03:00
|
|
|
bps_count = 4
|
|
|
|
else:
|
|
|
|
bps_count = 1
|
|
|
|
bps_count += len(extra_tuple)
|
2022-02-09 16:16:33 +03:00
|
|
|
bps_actual_count = len(bps_tuple)
|
2022-05-04 15:11:10 +03:00
|
|
|
samples_per_pixel = self.tag_v2.get(
|
|
|
|
SAMPLESPERPIXEL,
|
|
|
|
3 if self._compression == "tiff_jpeg" and photo in (2, 6) else 1,
|
|
|
|
)
|
2022-10-28 15:11:25 +03:00
|
|
|
|
|
|
|
if samples_per_pixel > MAX_SAMPLESPERPIXEL:
|
|
|
|
# DOS check, samples_per_pixel can be a Long, and we extend the tuple below
|
2022-10-28 18:03:38 +03:00
|
|
|
logger.error(
|
|
|
|
"More samples per pixel than can be decoded: %s", samples_per_pixel
|
|
|
|
)
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "Invalid value for samples per pixel"
|
|
|
|
raise SyntaxError(msg)
|
2022-10-28 15:11:25 +03:00
|
|
|
|
2022-05-04 15:11:10 +03:00
|
|
|
if samples_per_pixel < bps_actual_count:
|
2022-02-10 03:08:25 +03:00
|
|
|
# If a file has more values in bps_tuple than expected,
|
|
|
|
# remove the excess.
|
2022-05-04 15:11:10 +03:00
|
|
|
bps_tuple = bps_tuple[:samples_per_pixel]
|
|
|
|
elif samples_per_pixel > bps_actual_count and bps_actual_count == 1:
|
2022-02-10 03:08:25 +03:00
|
|
|
# If a file has only one value in bps_tuple, when it should have more,
|
|
|
|
# presume it is the same number of bits for all of the samples.
|
2022-05-04 15:11:10 +03:00
|
|
|
bps_tuple = bps_tuple * samples_per_pixel
|
2017-12-07 18:43:22 +03:00
|
|
|
|
2022-04-10 21:23:31 +03:00
|
|
|
if len(bps_tuple) != samples_per_pixel:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "unknown data organization"
|
|
|
|
raise SyntaxError(msg)
|
2021-03-31 22:16:43 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
# mode: check photometric interpretation and bits per pixel
|
2017-12-07 18:43:22 +03:00
|
|
|
key = (
|
|
|
|
self.tag_v2.prefix,
|
|
|
|
photo,
|
2022-04-10 21:23:31 +03:00
|
|
|
sample_format,
|
2017-12-07 18:43:22 +03:00
|
|
|
fillorder,
|
|
|
|
bps_tuple,
|
|
|
|
extra_tuple,
|
|
|
|
)
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("format key: %s", key)
|
2010-07-31 06:52:47 +04:00
|
|
|
try:
|
2023-07-29 02:28:18 +03:00
|
|
|
self._mode, rawmode = OPEN_INFO[key]
|
2020-06-21 13:13:35 +03:00
|
|
|
except KeyError as e:
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("- unsupported format")
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "unknown pixel mode"
|
|
|
|
raise SyntaxError(msg) from e
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("- raw mode: %s", rawmode)
|
|
|
|
logger.debug("- pil mode: %s", self.mode)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
self.info["compression"] = self._compression
|
|
|
|
|
2016-02-05 01:57:13 +03:00
|
|
|
xres = self.tag_v2.get(X_RESOLUTION, 1)
|
|
|
|
yres = self.tag_v2.get(Y_RESOLUTION, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
if xres and yres:
|
2017-01-19 19:24:28 +03:00
|
|
|
resunit = self.tag_v2.get(RESOLUTION_UNIT)
|
2014-07-28 20:00:06 +04:00
|
|
|
if resunit == 2: # dots per inch
|
2021-05-10 00:54:47 +03:00
|
|
|
self.info["dpi"] = (xres, yres)
|
2014-07-28 20:00:06 +04:00
|
|
|
elif resunit == 3: # dots per centimeter. convert to dpi
|
2021-05-10 00:54:47 +03:00
|
|
|
self.info["dpi"] = (xres * 2.54, yres * 2.54)
|
2017-04-20 14:14:23 +03:00
|
|
|
elif resunit is None: # used to default to 1, but now 2)
|
2021-05-10 00:54:47 +03:00
|
|
|
self.info["dpi"] = (xres, yres)
|
2017-05-27 23:55:14 +03:00
|
|
|
# For backward compatibility,
|
|
|
|
# we also preserve the old behavior
|
2017-01-19 19:24:28 +03:00
|
|
|
self.info["resolution"] = xres, yres
|
2014-07-28 20:00:06 +04:00
|
|
|
else: # No absolute unit of measurement
|
2010-07-31 06:52:47 +04:00
|
|
|
self.info["resolution"] = xres, yres
|
|
|
|
|
|
|
|
# build tile descriptors
|
2018-06-15 16:44:22 +03:00
|
|
|
x = y = layer = 0
|
2010-07-31 06:52:47 +04:00
|
|
|
self.tile = []
|
2018-07-17 07:10:57 +03:00
|
|
|
self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw"
|
2018-07-03 03:15:24 +03:00
|
|
|
if self.use_load_libtiff:
|
|
|
|
# Decoder expects entire file as one tile.
|
|
|
|
# There's a buffer size limit in load (64k)
|
|
|
|
# so large g4 images will fail if we use that
|
|
|
|
# function.
|
|
|
|
#
|
|
|
|
# Setup the one tile for the whole image, then
|
|
|
|
# use the _load_libtiff function.
|
|
|
|
|
|
|
|
# libtiff handles the fillmode for us, so 1;IR should
|
|
|
|
# actually be 1;I. Including the R double reverses the
|
|
|
|
# bits, so stripes of the image are reversed. See
|
|
|
|
# https://github.com/python-pillow/Pillow/issues/279
|
|
|
|
if fillorder == 2:
|
2018-09-26 19:44:45 +03:00
|
|
|
# Replace fillorder with fillorder=1
|
|
|
|
key = key[:3] + (1,) + key[4:]
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("format key: %s", key)
|
2018-07-03 03:15:24 +03:00
|
|
|
# this should always work, since all the
|
|
|
|
# fillorder==2 modes have a corresponding
|
|
|
|
# fillorder=1 mode
|
2023-07-29 02:28:18 +03:00
|
|
|
self._mode, rawmode = OPEN_INFO[key]
|
2021-01-10 02:05:36 +03:00
|
|
|
# YCbCr images with new jpeg compression with pixels in one plane
|
|
|
|
# unpacked straight into RGB values
|
|
|
|
if (
|
|
|
|
photo == 6
|
|
|
|
and self._compression == "jpeg"
|
|
|
|
and self._planar_configuration == 1
|
|
|
|
):
|
|
|
|
rawmode = "RGB"
|
2024-12-30 11:37:38 +03:00
|
|
|
# libtiff always returns the bytes in native order.
|
|
|
|
# we're expecting image byte order. So, if the rawmode
|
|
|
|
# contains I;16, we need to convert from native to image
|
|
|
|
# byte order.
|
|
|
|
elif rawmode == "I;16":
|
|
|
|
rawmode = "I;16N"
|
2024-12-30 11:45:46 +03:00
|
|
|
elif rawmode.endswith(";16B") or rawmode.endswith(";16L"):
|
|
|
|
rawmode = rawmode[:-1] + "N"
|
2021-01-10 02:05:36 +03:00
|
|
|
|
2018-07-03 03:15:24 +03:00
|
|
|
# Offset in the tile tuple is 0, we go from 0,0 to
|
|
|
|
# w,h, and we only do this once -- eds
|
2019-04-17 07:20:29 +03:00
|
|
|
a = (rawmode, self._compression, False, self.tag_v2.offset)
|
2024-08-29 15:51:15 +03:00
|
|
|
self.tile.append(ImageFile._Tile("libtiff", (0, 0, xsize, ysize), 0, a))
|
2018-07-03 03:15:24 +03:00
|
|
|
|
|
|
|
elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2:
|
2010-07-31 06:52:47 +04:00
|
|
|
# striped image
|
2018-07-03 03:15:24 +03:00
|
|
|
if STRIPOFFSETS in self.tag_v2:
|
|
|
|
offsets = self.tag_v2[STRIPOFFSETS]
|
|
|
|
h = self.tag_v2.get(ROWSPERSTRIP, ysize)
|
2024-09-18 14:22:59 +03:00
|
|
|
w = xsize
|
2013-03-09 07:51:59 +04:00
|
|
|
else:
|
2018-07-03 03:15:24 +03:00
|
|
|
# tiled image
|
|
|
|
offsets = self.tag_v2[TILEOFFSETS]
|
2024-08-21 01:05:02 +03:00
|
|
|
tilewidth = self.tag_v2.get(TILEWIDTH)
|
2022-08-31 13:09:05 +03:00
|
|
|
h = self.tag_v2.get(TILELENGTH)
|
2024-08-21 01:05:02 +03:00
|
|
|
if not isinstance(tilewidth, int) or not isinstance(h, int):
|
|
|
|
msg = "Invalid tile dimensions"
|
|
|
|
raise ValueError(msg)
|
|
|
|
w = tilewidth
|
2018-07-03 03:15:24 +03:00
|
|
|
|
|
|
|
for offset in offsets:
|
2018-07-17 07:10:57 +03:00
|
|
|
if x + w > xsize:
|
|
|
|
stride = w * sum(bps_tuple) / 8 # bytes per line
|
|
|
|
else:
|
|
|
|
stride = 0
|
|
|
|
|
2018-07-17 08:39:52 +03:00
|
|
|
tile_rawmode = rawmode
|
|
|
|
if self._planar_configuration == 2:
|
|
|
|
# each band on it's own layer
|
|
|
|
tile_rawmode = rawmode[layer]
|
|
|
|
# adjust stride width accordingly
|
|
|
|
stride /= bps_count
|
|
|
|
|
2024-07-12 14:16:56 +03:00
|
|
|
args = (tile_rawmode, int(stride), 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
self.tile.append(
|
2024-08-29 15:51:15 +03:00
|
|
|
ImageFile._Tile(
|
2010-07-31 06:52:47 +04:00
|
|
|
self._compression,
|
2018-07-17 07:10:57 +03:00
|
|
|
(x, y, min(x + w, xsize), min(y + h, ysize)),
|
2018-09-26 19:44:45 +03:00
|
|
|
offset,
|
2024-07-12 14:16:56 +03:00
|
|
|
args,
|
2018-09-26 19:44:45 +03:00
|
|
|
)
|
2019-03-21 16:28:20 +03:00
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
x = x + w
|
2024-09-18 14:22:59 +03:00
|
|
|
if x >= xsize:
|
2010-07-31 06:52:47 +04:00
|
|
|
x, y = 0, y + h
|
2024-09-18 14:22:59 +03:00
|
|
|
if y >= ysize:
|
2010-07-31 06:52:47 +04:00
|
|
|
x = y = 0
|
2018-06-15 16:44:22 +03:00
|
|
|
layer += 1
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("- unsupported data organization")
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "unknown data organization"
|
|
|
|
raise SyntaxError(msg)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2016-11-04 18:37:49 +03:00
|
|
|
# Fix up info.
|
|
|
|
if ICCPROFILE in self.tag_v2:
|
|
|
|
self.info["icc_profile"] = self.tag_v2[ICCPROFILE]
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
# fixup palette descriptor
|
|
|
|
|
2019-05-11 07:43:48 +03:00
|
|
|
if self.mode in ["P", "PA"]:
|
2015-09-11 20:09:14 +03:00
|
|
|
palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]]
|
py3k: The big push
There are two main issues fixed with this commit:
* bytes vs. str: All file, image, and palette data are now handled as
bytes. A new _binary module consolidates the hacks needed to do this
across Python versions. tostring/fromstring methods have been renamed to
tobytes/frombytes, but the Python 2.6/2.7 versions alias them to the old
names for compatibility. Users should move to tobytes/frombytes.
One other potentially-breaking change is that text data in image files
(such as tags, comments) are now explicitly handled with a specific
character encoding in mind. This works well with the Unicode str in
Python 3, but may trip up old code expecting a straight byte-for-byte
translation to a Python string. This also required a change to Gohlke's
tags tests (in Tests/test_file_png.py) to expect Unicode strings from
the code.
* True div vs. floor div: Many division operations used the "/" operator
to do floor division, which is now the "//" operator in Python 3. These
were fixed.
As of this commit, on the first pass, I have one failing test (improper
handling of a slice object in a C module, test_imagepath.py) in Python 3,
and three that that I haven't tried running yet (test_imagegl,
test_imagegrab, and test_imageqt). I also haven't tested anything on
Windows. All but the three skipped tests run flawlessly against Pythons
2.6 and 2.7.
2012-10-21 01:01:53 +04:00
|
|
|
self.palette = ImagePalette.raw("RGB;L", b"".join(palette))
|
2017-05-27 23:55:14 +03:00
|
|
|
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# Write TIFF files
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
# little endian is default except for image modes with
|
2015-05-29 07:59:54 +03:00
|
|
|
# explicit big endian byte-order
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
SAVE_INFO = {
|
2014-07-28 20:00:06 +04:00
|
|
|
# mode => rawmode, byteorder, photometrics,
|
|
|
|
# sampleformat, bitspersample, extra
|
2010-07-31 06:52:47 +04:00
|
|
|
"1": ("1", II, 1, 1, (1,), None),
|
|
|
|
"L": ("L", II, 1, 1, (8,), None),
|
2014-07-28 20:00:06 +04:00
|
|
|
"LA": ("LA", II, 1, 1, (8, 8), 2),
|
2010-07-31 06:52:47 +04:00
|
|
|
"P": ("P", II, 3, 1, (8,), None),
|
2014-07-28 20:00:06 +04:00
|
|
|
"PA": ("PA", II, 3, 1, (8, 8), 2),
|
2010-07-31 06:52:47 +04:00
|
|
|
"I": ("I;32S", II, 1, 2, (32,), None),
|
|
|
|
"I;16": ("I;16", II, 1, 1, (16,), None),
|
|
|
|
"I;16S": ("I;16S", II, 1, 2, (16,), None),
|
|
|
|
"F": ("F;32F", II, 1, 3, (32,), None),
|
2014-07-28 20:00:06 +04:00
|
|
|
"RGB": ("RGB", II, 2, 1, (8, 8, 8), None),
|
|
|
|
"RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0),
|
|
|
|
"RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2),
|
|
|
|
"CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None),
|
|
|
|
"YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None),
|
|
|
|
"LAB": ("LAB", II, 8, 1, (8, 8, 8), None),
|
2010-07-31 06:52:47 +04:00
|
|
|
"I;32BS": ("I;32BS", MM, 1, 2, (32,), None),
|
|
|
|
"I;16B": ("I;16B", MM, 1, 1, (16,), None),
|
|
|
|
"I;16BS": ("I;16BS", MM, 1, 2, (16,), None),
|
|
|
|
"F;32BF": ("F;32BF", MM, 1, 3, (32,), None),
|
|
|
|
}
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2024-09-06 14:33:30 +03:00
|
|
|
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
2010-07-31 06:52:47 +04:00
|
|
|
try:
|
|
|
|
rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode]
|
2020-06-21 13:13:35 +03:00
|
|
|
except KeyError as e:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = f"cannot write mode {im.mode} as TIFF"
|
|
|
|
raise OSError(msg) from e
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2021-08-05 16:27:08 +03:00
|
|
|
encoderinfo = im.encoderinfo
|
|
|
|
encoderconfig = im.encoderconfig
|
2024-12-30 17:26:13 +03:00
|
|
|
|
|
|
|
ifd = ImageFileDirectory_v2(prefix=prefix)
|
2024-12-31 02:16:00 +03:00
|
|
|
if encoderinfo.get("big_tiff"):
|
2024-12-30 17:26:13 +03:00
|
|
|
ifd._bigtiff = True
|
|
|
|
|
2022-04-21 00:58:12 +03:00
|
|
|
try:
|
|
|
|
compression = encoderinfo["compression"]
|
|
|
|
except KeyError:
|
|
|
|
compression = im.info.get("compression")
|
|
|
|
if isinstance(compression, int):
|
|
|
|
# compression value may be from BMP. Ignore it
|
|
|
|
compression = None
|
2018-08-25 01:21:43 +03:00
|
|
|
if compression is None:
|
|
|
|
compression = "raw"
|
2020-05-15 15:37:13 +03:00
|
|
|
elif compression == "tiff_jpeg":
|
|
|
|
# OJPEG is obsolete, so use new-style JPEG compression instead
|
|
|
|
compression = "jpeg"
|
2021-03-19 04:00:29 +03:00
|
|
|
elif compression == "tiff_deflate":
|
|
|
|
compression = "tiff_adobe_deflate"
|
2013-11-22 09:41:54 +04:00
|
|
|
|
2014-06-03 14:02:44 +04:00
|
|
|
libtiff = WRITE_LIBTIFF or compression != "raw"
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2013-11-06 08:49:09 +04:00
|
|
|
# required for color libtiff images
|
2022-01-18 11:40:57 +03:00
|
|
|
ifd[PLANAR_CONFIGURATION] = 1
|
2014-06-03 14:02:44 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
ifd[IMAGEWIDTH] = im.size[0]
|
|
|
|
ifd[IMAGELENGTH] = im.size[1]
|
|
|
|
|
2013-10-03 09:06:17 +04:00
|
|
|
# write any arbitrary tags passed in as an ImageFileDirectory
|
2021-09-06 23:33:37 +03:00
|
|
|
if "tiffinfo" in encoderinfo:
|
|
|
|
info = encoderinfo["tiffinfo"]
|
|
|
|
elif "exif" in encoderinfo:
|
|
|
|
info = encoderinfo["exif"]
|
2021-07-04 06:32:41 +03:00
|
|
|
if isinstance(info, bytes):
|
|
|
|
exif = Image.Exif()
|
|
|
|
exif.load(info)
|
|
|
|
info = exif
|
|
|
|
else:
|
|
|
|
info = {}
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("Tiffinfo Keys: %s", list(info))
|
2015-09-13 16:01:01 +03:00
|
|
|
if isinstance(info, ImageFileDirectory_v1):
|
|
|
|
info = info.to_v2()
|
2014-12-29 18:48:01 +03:00
|
|
|
for key in info:
|
2023-01-07 21:36:17 +03:00
|
|
|
if isinstance(info, Image.Exif) and key in TiffTags.TAGS_V2_GROUPS:
|
2021-07-04 05:33:55 +03:00
|
|
|
ifd[key] = info.get_ifd(key)
|
|
|
|
else:
|
|
|
|
ifd[key] = info.get(key)
|
2013-10-03 09:06:17 +04:00
|
|
|
try:
|
|
|
|
ifd.tagtype[key] = info.tagtype[key]
|
2018-11-17 00:51:52 +03:00
|
|
|
except Exception:
|
2019-02-19 11:49:50 +03:00
|
|
|
pass # might not be an IFD. Might not have populated type
|
2013-10-03 09:06:17 +04:00
|
|
|
|
2024-04-06 12:59:06 +03:00
|
|
|
legacy_ifd = {}
|
|
|
|
if hasattr(im, "tag"):
|
|
|
|
legacy_ifd = im.tag.to_v2()
|
|
|
|
|
|
|
|
supplied_tags = {**legacy_ifd, **getattr(im, "tag_v2", {})}
|
2024-06-25 14:53:17 +03:00
|
|
|
for tag in (
|
|
|
|
# IFD offset that may not be correct in the saved image
|
|
|
|
EXIFIFD,
|
|
|
|
# Determined by the image format and should not be copied from legacy_ifd.
|
|
|
|
SAMPLEFORMAT,
|
|
|
|
):
|
|
|
|
if tag in supplied_tags:
|
|
|
|
del supplied_tags[tag]
|
2024-04-06 12:59:06 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
# additions written by Greg Couch, gregc@cgl.ucsf.edu
|
|
|
|
# inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com
|
2015-09-13 12:53:47 +03:00
|
|
|
if hasattr(im, "tag_v2"):
|
2010-07-31 06:52:47 +04:00
|
|
|
# preserve tags from original TIFF image file
|
2013-10-08 04:00:20 +04:00
|
|
|
for key in (
|
|
|
|
RESOLUTION_UNIT,
|
|
|
|
X_RESOLUTION,
|
|
|
|
Y_RESOLUTION,
|
|
|
|
IPTC_NAA_CHUNK,
|
|
|
|
PHOTOSHOP_CHUNK,
|
|
|
|
XMP,
|
|
|
|
):
|
2015-09-13 12:53:47 +03:00
|
|
|
if key in im.tag_v2:
|
2024-04-06 12:59:06 +03:00
|
|
|
if key == IPTC_NAA_CHUNK and im.tag_v2.tagtype[key] not in (
|
|
|
|
TiffTags.BYTE,
|
|
|
|
TiffTags.UNDEFINED,
|
|
|
|
):
|
|
|
|
del supplied_tags[key]
|
|
|
|
else:
|
|
|
|
ifd[key] = im.tag_v2[key]
|
|
|
|
ifd.tagtype[key] = im.tag_v2.tagtype[key]
|
2013-10-08 04:00:20 +04:00
|
|
|
|
2016-08-22 13:47:49 +03:00
|
|
|
# preserve ICC profile (should also work when saving other formats
|
|
|
|
# which support profiles as TIFF) -- 2008-06-06 Florian Hoech
|
2021-08-05 16:27:08 +03:00
|
|
|
icc = encoderinfo.get("icc_profile", im.info.get("icc_profile"))
|
2021-03-10 12:16:49 +03:00
|
|
|
if icc:
|
|
|
|
ifd[ICCPROFILE] = icc
|
2014-06-03 14:02:44 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
for key, name in [
|
|
|
|
(IMAGEDESCRIPTION, "description"),
|
|
|
|
(X_RESOLUTION, "resolution"),
|
|
|
|
(Y_RESOLUTION, "resolution"),
|
|
|
|
(X_RESOLUTION, "x_resolution"),
|
|
|
|
(Y_RESOLUTION, "y_resolution"),
|
|
|
|
(RESOLUTION_UNIT, "resolution_unit"),
|
|
|
|
(SOFTWARE, "software"),
|
|
|
|
(DATE_TIME, "date_time"),
|
|
|
|
(ARTIST, "artist"),
|
|
|
|
(COPYRIGHT, "copyright"),
|
|
|
|
]:
|
2021-08-05 16:27:08 +03:00
|
|
|
if name in encoderinfo:
|
|
|
|
ifd[key] = encoderinfo[name]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2021-08-05 16:27:08 +03:00
|
|
|
dpi = encoderinfo.get("dpi")
|
2010-07-31 06:52:47 +04:00
|
|
|
if dpi:
|
|
|
|
ifd[RESOLUTION_UNIT] = 2
|
2021-04-17 08:42:06 +03:00
|
|
|
ifd[X_RESOLUTION] = dpi[0]
|
|
|
|
ifd[Y_RESOLUTION] = dpi[1]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
if bits != (1,):
|
|
|
|
ifd[BITSPERSAMPLE] = bits
|
|
|
|
if len(bits) != 1:
|
|
|
|
ifd[SAMPLESPERPIXEL] = len(bits)
|
|
|
|
if extra is not None:
|
|
|
|
ifd[EXTRASAMPLES] = extra
|
|
|
|
if format != 1:
|
|
|
|
ifd[SAMPLEFORMAT] = format
|
|
|
|
|
2021-08-05 16:27:08 +03:00
|
|
|
if PHOTOMETRIC_INTERPRETATION not in ifd:
|
|
|
|
ifd[PHOTOMETRIC_INTERPRETATION] = photo
|
2021-08-06 16:50:52 +03:00
|
|
|
elif im.mode in ("1", "L") and ifd[PHOTOMETRIC_INTERPRETATION] == 0:
|
|
|
|
if im.mode == "1":
|
|
|
|
inverted_im = im.copy()
|
|
|
|
px = inverted_im.load()
|
2024-08-21 01:05:02 +03:00
|
|
|
if px is not None:
|
|
|
|
for y in range(inverted_im.height):
|
|
|
|
for x in range(inverted_im.width):
|
|
|
|
px[x, y] = 0 if px[x, y] == 255 else 255
|
|
|
|
im = inverted_im
|
2021-08-06 16:50:52 +03:00
|
|
|
else:
|
|
|
|
im = ImageOps.invert(im)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2019-05-11 07:43:48 +03:00
|
|
|
if im.mode in ["P", "PA"]:
|
2010-07-31 06:52:47 +04:00
|
|
|
lut = im.im.getpalette("RGB", "RGB;L")
|
2022-04-21 04:26:34 +03:00
|
|
|
colormap = []
|
|
|
|
colors = len(lut) // 3
|
|
|
|
for i in range(3):
|
|
|
|
colormap += [v * 256 for v in lut[colors * i : colors * (i + 1)]]
|
|
|
|
colormap += [0] * (256 - colors)
|
|
|
|
ifd[COLORMAP] = colormap
|
2010-07-31 06:52:47 +04:00
|
|
|
# data orientation
|
2023-12-29 06:15:40 +03:00
|
|
|
w, h = ifd[IMAGEWIDTH], ifd[IMAGELENGTH]
|
|
|
|
stride = len(bits) * ((w * bits[0] + 7) // 8)
|
2023-12-29 14:59:43 +03:00
|
|
|
if ROWSPERSTRIP not in ifd:
|
|
|
|
# aim for given strip size (64 KB by default) when using libtiff writer
|
|
|
|
if libtiff:
|
|
|
|
im_strip_size = encoderinfo.get("strip_size", STRIP_SIZE)
|
2024-01-01 05:41:17 +03:00
|
|
|
rows_per_strip = 1 if stride == 0 else min(im_strip_size // stride, h)
|
2023-12-29 14:59:43 +03:00
|
|
|
# JPEG encoder expects multiple of 8 rows
|
|
|
|
if compression == "jpeg":
|
2024-01-01 05:41:17 +03:00
|
|
|
rows_per_strip = min(((rows_per_strip + 7) // 8) * 8, h)
|
2023-12-29 14:59:43 +03:00
|
|
|
else:
|
2024-01-01 05:41:17 +03:00
|
|
|
rows_per_strip = h
|
2023-12-29 14:59:43 +03:00
|
|
|
if rows_per_strip == 0:
|
|
|
|
rows_per_strip = 1
|
|
|
|
ifd[ROWSPERSTRIP] = rows_per_strip
|
|
|
|
strip_byte_counts = 1 if stride == 0 else stride * ifd[ROWSPERSTRIP]
|
2024-01-01 05:41:17 +03:00
|
|
|
strips_per_image = (h + ifd[ROWSPERSTRIP] - 1) // ifd[ROWSPERSTRIP]
|
2022-03-04 08:42:24 +03:00
|
|
|
if strip_byte_counts >= 2**16:
|
2020-05-15 13:47:57 +03:00
|
|
|
ifd.tagtype[STRIPBYTECOUNTS] = TiffTags.LONG
|
2021-06-02 12:28:49 +03:00
|
|
|
ifd[STRIPBYTECOUNTS] = (strip_byte_counts,) * (strips_per_image - 1) + (
|
2023-12-29 06:15:40 +03:00
|
|
|
stride * h - strip_byte_counts * (strips_per_image - 1),
|
2021-06-02 12:28:49 +03:00
|
|
|
)
|
|
|
|
ifd[STRIPOFFSETS] = tuple(
|
|
|
|
range(0, strip_byte_counts * strips_per_image, strip_byte_counts)
|
|
|
|
) # this is adjusted by IFD writer
|
2014-07-28 20:00:06 +04:00
|
|
|
# no compression by default:
|
|
|
|
ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2021-07-09 18:20:36 +03:00
|
|
|
if im.mode == "YCbCr":
|
2024-08-21 01:05:02 +03:00
|
|
|
for tag, default_value in {
|
2021-07-09 18:20:36 +03:00
|
|
|
YCBCRSUBSAMPLING: (1, 1),
|
|
|
|
REFERENCEBLACKWHITE: (0, 255, 128, 255, 128, 255),
|
|
|
|
}.items():
|
2024-08-21 01:05:02 +03:00
|
|
|
ifd.setdefault(tag, default_value)
|
2021-07-09 18:20:36 +03:00
|
|
|
|
2021-11-18 14:01:53 +03:00
|
|
|
blocklist = [TILEWIDTH, TILELENGTH, TILEOFFSETS, TILEBYTECOUNTS]
|
2013-03-09 07:51:59 +04:00
|
|
|
if libtiff:
|
2021-08-05 16:27:08 +03:00
|
|
|
if "quality" in encoderinfo:
|
|
|
|
quality = encoderinfo["quality"]
|
2019-06-04 14:30:13 +03:00
|
|
|
if not isinstance(quality, int) or quality < 0 or quality > 100:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "Invalid quality setting"
|
|
|
|
raise ValueError(msg)
|
2019-06-04 14:30:13 +03:00
|
|
|
if compression != "jpeg":
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "quality setting only supported for 'jpeg' compression"
|
|
|
|
raise ValueError(msg)
|
2019-06-04 14:30:13 +03:00
|
|
|
ifd[JPEGQUALITY] = quality
|
|
|
|
|
2020-04-13 00:16:46 +03:00
|
|
|
logger.debug("Saving using libtiff encoder")
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("Items: %s", sorted(ifd.items()))
|
2013-03-09 07:51:59 +04:00
|
|
|
_fp = 0
|
|
|
|
if hasattr(fp, "fileno"):
|
2014-11-14 15:29:10 +03:00
|
|
|
try:
|
|
|
|
fp.seek(0)
|
2024-10-11 11:37:56 +03:00
|
|
|
_fp = fp.fileno()
|
2014-11-14 15:29:10 +03:00
|
|
|
except io.UnsupportedOperation:
|
|
|
|
pass
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2019-05-21 14:18:09 +03:00
|
|
|
# optional types for non core tags
|
|
|
|
types = {}
|
2015-12-09 23:39:49 +03:00
|
|
|
# STRIPOFFSETS and STRIPBYTECOUNTS are added by the library
|
|
|
|
# based on the data in the strip.
|
2024-03-22 15:43:55 +03:00
|
|
|
# OSUBFILETYPE is deprecated.
|
2019-05-21 14:18:09 +03:00
|
|
|
# The other tags expect arrays with a certain length (fixed or depending on
|
|
|
|
# BITSPERSAMPLE, etc), passing arrays with a different length will result in
|
|
|
|
# segfaults. Block these tags until we add extra validation.
|
2020-12-22 03:38:02 +03:00
|
|
|
# SUBIFD may also cause a segfault.
|
2021-11-18 14:01:53 +03:00
|
|
|
blocklist += [
|
2024-03-22 15:43:55 +03:00
|
|
|
OSUBFILETYPE,
|
2019-05-21 14:18:09 +03:00
|
|
|
REFERENCEBLACKWHITE,
|
|
|
|
STRIPBYTECOUNTS,
|
|
|
|
STRIPOFFSETS,
|
|
|
|
TRANSFERFUNCTION,
|
2020-12-22 03:38:02 +03:00
|
|
|
SUBIFD,
|
2019-05-21 14:18:09 +03:00
|
|
|
]
|
|
|
|
|
2014-06-03 14:02:44 +04:00
|
|
|
# bits per sample is a single short in the tiff directory, not a list.
|
2024-08-21 01:05:02 +03:00
|
|
|
atts: dict[int, Any] = {BITSPERSAMPLE: bits[0]}
|
2013-10-22 20:18:41 +04:00
|
|
|
# Merge the ones that we have with (optional) more bits from
|
|
|
|
# the original file, e.g x,y resolution so that we can
|
|
|
|
# save(load('')) == original file.
|
2021-11-23 00:10:18 +03:00
|
|
|
for tag, value in itertools.chain(ifd.items(), supplied_tags.items()):
|
2015-12-30 18:54:14 +03:00
|
|
|
# Libtiff can only process certain core items without adding
|
2018-12-29 07:57:49 +03:00
|
|
|
# them to the custom dictionary.
|
2019-05-21 14:18:09 +03:00
|
|
|
# Custom items are supported for int, float, unicode, string and byte
|
|
|
|
# values. Other types and tuples require a tagtype.
|
2016-02-05 01:57:13 +03:00
|
|
|
if tag not in TiffTags.LIBTIFF_CORE:
|
2023-03-17 03:27:58 +03:00
|
|
|
if not getattr(Image.core, "libtiff_support_custom_tags", False):
|
2019-05-21 14:18:09 +03:00
|
|
|
continue
|
|
|
|
|
2024-11-04 12:55:00 +03:00
|
|
|
if tag in TiffTags.TAGS_V2_GROUPS:
|
|
|
|
types[tag] = TiffTags.LONG8
|
|
|
|
elif tag in ifd.tagtype:
|
2019-05-21 14:18:09 +03:00
|
|
|
types[tag] = ifd.tagtype[tag]
|
2019-09-26 15:12:28 +03:00
|
|
|
elif not (isinstance(value, (int, float, str, bytes))):
|
2018-10-25 11:36:49 +03:00
|
|
|
continue
|
2020-05-06 13:12:59 +03:00
|
|
|
else:
|
|
|
|
type = TiffTags.lookup(tag).type
|
|
|
|
if type:
|
|
|
|
types[tag] = type
|
2015-12-30 18:54:14 +03:00
|
|
|
if tag not in atts and tag not in blocklist:
|
2019-09-26 15:12:28 +03:00
|
|
|
if isinstance(value, str):
|
2015-12-30 18:54:14 +03:00
|
|
|
atts[tag] = value.encode("ascii", "replace") + b"\0"
|
|
|
|
elif isinstance(value, IFDRational):
|
|
|
|
atts[tag] = float(value)
|
2013-10-22 21:10:37 +04:00
|
|
|
else:
|
2015-12-30 18:54:14 +03:00
|
|
|
atts[tag] = value
|
2013-10-21 20:30:06 +04:00
|
|
|
|
2021-11-23 00:10:18 +03:00
|
|
|
if SAMPLEFORMAT in atts and len(atts[SAMPLEFORMAT]) == 1:
|
|
|
|
atts[SAMPLEFORMAT] = atts[SAMPLEFORMAT][0]
|
|
|
|
|
2023-09-09 14:03:39 +03:00
|
|
|
logger.debug("Converted items: %s", sorted(atts.items()))
|
2013-10-22 02:37:20 +04:00
|
|
|
|
2013-11-22 08:57:48 +04:00
|
|
|
# libtiff always expects the bytes in native order.
|
|
|
|
# we're storing image byte order. So, if the rawmode
|
2013-10-22 02:37:20 +04:00
|
|
|
# contains I;16, we need to convert from native to image
|
|
|
|
# byte order.
|
|
|
|
if im.mode in ("I;16B", "I;16"):
|
|
|
|
rawmode = "I;16N"
|
|
|
|
|
2019-06-04 14:30:13 +03:00
|
|
|
# Pass tags as sorted list so that the tags are set in a fixed order.
|
|
|
|
# This is required by libtiff for some tags. For example, the JPEGQUALITY
|
|
|
|
# pseudo tag requires that the COMPRESS tag was already set.
|
|
|
|
tags = list(atts.items())
|
|
|
|
tags.sort()
|
|
|
|
a = (rawmode, compression, _fp, filename, tags, types)
|
2024-02-10 11:50:45 +03:00
|
|
|
encoder = Image._getencoder(im.mode, "libtiff", a, encoderconfig)
|
|
|
|
encoder.setimage(im.im, (0, 0) + im.size)
|
2014-01-08 06:21:24 +04:00
|
|
|
while True:
|
2024-10-12 05:44:48 +03:00
|
|
|
errcode, data = encoder.encode(ImageFile.MAXBLOCK)[1:]
|
2013-03-09 07:51:59 +04:00
|
|
|
if not _fp:
|
2023-02-25 05:40:44 +03:00
|
|
|
fp.write(data)
|
|
|
|
if errcode:
|
2013-03-09 07:51:59 +04:00
|
|
|
break
|
2023-02-25 05:40:44 +03:00
|
|
|
if errcode < 0:
|
|
|
|
msg = f"encoder error {errcode} when writing image file"
|
2022-12-22 00:51:35 +03:00
|
|
|
raise OSError(msg)
|
2013-05-08 00:23:51 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
else:
|
2021-11-18 14:01:53 +03:00
|
|
|
for tag in blocklist:
|
|
|
|
del ifd[tag]
|
2013-03-09 07:51:59 +04:00
|
|
|
offset = ifd.save(fp)
|
|
|
|
|
|
|
|
ImageFile._save(
|
2024-08-21 01:05:02 +03:00
|
|
|
im,
|
|
|
|
fp,
|
|
|
|
[ImageFile._Tile("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))],
|
2013-03-09 07:51:59 +04:00
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# -- helper for multi-page save --
|
2021-08-05 16:27:08 +03:00
|
|
|
if "_debug_multipage" in encoderinfo:
|
2014-07-28 20:00:06 +04:00
|
|
|
# just to access o32 and o16 (using correct byte order)
|
2024-08-21 01:05:02 +03:00
|
|
|
setattr(im, "_debug_multipage", ifd)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2024-09-06 14:33:30 +03:00
|
|
|
class AppendingTiffWriter(io.BytesIO):
|
2016-09-29 04:16:04 +03:00
|
|
|
fieldSizes = [
|
|
|
|
0, # None
|
|
|
|
1, # byte
|
|
|
|
1, # ascii
|
|
|
|
2, # short
|
|
|
|
4, # long
|
|
|
|
8, # rational
|
|
|
|
1, # sbyte
|
|
|
|
1, # undefined
|
|
|
|
2, # sshort
|
|
|
|
4, # slong
|
|
|
|
8, # srational
|
|
|
|
4, # float
|
|
|
|
8, # double
|
2023-04-10 02:06:20 +03:00
|
|
|
4, # ifd
|
|
|
|
2, # unicode
|
|
|
|
4, # complex
|
|
|
|
8, # long8
|
2016-09-29 04:16:04 +03:00
|
|
|
]
|
|
|
|
|
2023-11-06 22:12:52 +03:00
|
|
|
Tags = {
|
|
|
|
273, # StripOffsets
|
|
|
|
288, # FreeOffsets
|
|
|
|
324, # TileOffsets
|
|
|
|
519, # JPEGQTables
|
|
|
|
520, # JPEGDCTables
|
|
|
|
521, # JPEGACTables
|
|
|
|
}
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
def __init__(self, fn: StrOrBytesPath | IO[bytes], new: bool = False) -> None:
|
|
|
|
self.f: IO[bytes]
|
|
|
|
if is_path(fn):
|
2016-09-29 04:16:04 +03:00
|
|
|
self.name = fn
|
|
|
|
self.close_fp = True
|
|
|
|
try:
|
2019-09-30 17:56:31 +03:00
|
|
|
self.f = open(fn, "w+b" if new else "r+b")
|
|
|
|
except OSError:
|
|
|
|
self.f = open(fn, "w+b")
|
2024-07-29 16:46:07 +03:00
|
|
|
else:
|
|
|
|
self.f = cast(IO[bytes], fn)
|
|
|
|
self.close_fp = False
|
2016-09-29 04:16:04 +03:00
|
|
|
self.beginning = self.f.tell()
|
|
|
|
self.setup()
|
|
|
|
|
2024-05-04 13:51:54 +03:00
|
|
|
def setup(self) -> None:
|
2016-09-29 04:16:04 +03:00
|
|
|
# Reset everything.
|
|
|
|
self.f.seek(self.beginning, os.SEEK_SET)
|
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
self.whereToWriteNewIFDOffset: int | None = None
|
2016-09-29 04:16:04 +03:00
|
|
|
self.offsetOfNewPage = 0
|
|
|
|
|
2022-04-15 12:00:23 +03:00
|
|
|
self.IIMM = iimm = self.f.read(4)
|
|
|
|
if not iimm:
|
2016-09-29 04:16:04 +03:00
|
|
|
# empty file - first page
|
|
|
|
self.isFirst = True
|
|
|
|
return
|
|
|
|
|
|
|
|
self.isFirst = False
|
2022-04-15 12:00:23 +03:00
|
|
|
if iimm == b"II\x2a\x00":
|
2016-09-29 04:16:04 +03:00
|
|
|
self.setEndian("<")
|
2022-04-15 12:00:23 +03:00
|
|
|
elif iimm == b"MM\x00\x2a":
|
2016-09-29 04:16:04 +03:00
|
|
|
self.setEndian(">")
|
|
|
|
else:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "Invalid TIFF file header"
|
|
|
|
raise RuntimeError(msg)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
self.skipIFDs()
|
|
|
|
self.goToEnd()
|
|
|
|
|
2024-05-04 13:51:54 +03:00
|
|
|
def finalize(self) -> None:
|
2016-09-29 04:16:04 +03:00
|
|
|
if self.isFirst:
|
|
|
|
return
|
|
|
|
|
|
|
|
# fix offsets
|
|
|
|
self.f.seek(self.offsetOfNewPage)
|
|
|
|
|
2022-04-10 21:23:31 +03:00
|
|
|
iimm = self.f.read(4)
|
|
|
|
if not iimm:
|
2016-09-29 04:16:04 +03:00
|
|
|
# Make it easy to finish a frame without committing to a new one.
|
|
|
|
return
|
|
|
|
|
2022-04-10 21:23:31 +03:00
|
|
|
if iimm != self.IIMM:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "IIMM of new page doesn't match IIMM of first page"
|
|
|
|
raise RuntimeError(msg)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2022-04-10 21:23:31 +03:00
|
|
|
ifd_offset = self.readLong()
|
|
|
|
ifd_offset += self.offsetOfNewPage
|
2024-07-29 16:46:07 +03:00
|
|
|
assert self.whereToWriteNewIFDOffset is not None
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f.seek(self.whereToWriteNewIFDOffset)
|
2022-04-10 21:23:31 +03:00
|
|
|
self.writeLong(ifd_offset)
|
|
|
|
self.f.seek(ifd_offset)
|
2016-09-29 04:16:04 +03:00
|
|
|
self.fixIFD()
|
|
|
|
|
2024-05-04 13:51:54 +03:00
|
|
|
def newFrame(self) -> None:
|
2016-09-29 04:16:04 +03:00
|
|
|
# Call this to finish a frame.
|
|
|
|
self.finalize()
|
|
|
|
self.setup()
|
|
|
|
|
2024-06-05 01:29:28 +03:00
|
|
|
def __enter__(self) -> AppendingTiffWriter:
|
2016-09-29 04:16:04 +03:00
|
|
|
return self
|
|
|
|
|
2024-06-11 16:26:00 +03:00
|
|
|
def __exit__(self, *args: object) -> None:
|
2016-09-29 04:16:04 +03:00
|
|
|
if self.close_fp:
|
|
|
|
self.close()
|
|
|
|
|
2024-05-13 11:47:51 +03:00
|
|
|
def tell(self) -> int:
|
2016-09-29 04:16:04 +03:00
|
|
|
return self.f.tell() - self.offsetOfNewPage
|
|
|
|
|
2024-07-29 16:46:07 +03:00
|
|
|
def seek(self, offset: int, whence: int = io.SEEK_SET) -> int:
|
2024-09-06 14:33:30 +03:00
|
|
|
"""
|
|
|
|
:param offset: Distance to seek.
|
|
|
|
:param whence: Whether the distance is relative to the start,
|
|
|
|
end or current position.
|
|
|
|
:returns: The resulting position, relative to the start.
|
|
|
|
"""
|
2016-09-29 04:16:04 +03:00
|
|
|
if whence == os.SEEK_SET:
|
|
|
|
offset += self.offsetOfNewPage
|
|
|
|
|
|
|
|
self.f.seek(offset, whence)
|
|
|
|
return self.tell()
|
|
|
|
|
2024-05-04 13:51:54 +03:00
|
|
|
def goToEnd(self) -> None:
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f.seek(0, os.SEEK_END)
|
|
|
|
pos = self.f.tell()
|
|
|
|
|
|
|
|
# pad to 16 byte boundary
|
2022-04-10 21:23:31 +03:00
|
|
|
pad_bytes = 16 - pos % 16
|
|
|
|
if 0 < pad_bytes < 16:
|
|
|
|
self.f.write(bytes(pad_bytes))
|
2016-09-29 04:16:04 +03:00
|
|
|
self.offsetOfNewPage = self.f.tell()
|
|
|
|
|
2024-06-05 01:29:28 +03:00
|
|
|
def setEndian(self, endian: str) -> None:
|
2016-09-29 04:16:04 +03:00
|
|
|
self.endian = endian
|
2024-05-04 19:21:49 +03:00
|
|
|
self.longFmt = f"{self.endian}L"
|
|
|
|
self.shortFmt = f"{self.endian}H"
|
|
|
|
self.tagFormat = f"{self.endian}HHL"
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2024-05-04 13:51:54 +03:00
|
|
|
def skipIFDs(self) -> None:
|
2016-09-29 04:16:04 +03:00
|
|
|
while True:
|
2022-04-10 21:23:31 +03:00
|
|
|
ifd_offset = self.readLong()
|
|
|
|
if ifd_offset == 0:
|
2016-09-29 04:16:04 +03:00
|
|
|
self.whereToWriteNewIFDOffset = self.f.tell() - 4
|
|
|
|
break
|
|
|
|
|
2022-04-10 21:23:31 +03:00
|
|
|
self.f.seek(ifd_offset)
|
|
|
|
num_tags = self.readShort()
|
|
|
|
self.f.seek(num_tags * 12, os.SEEK_CUR)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2024-09-08 01:30:30 +03:00
|
|
|
def write(self, data: Buffer, /) -> int:
|
2016-09-29 04:16:04 +03:00
|
|
|
return self.f.write(data)
|
|
|
|
|
2024-09-20 15:31:00 +03:00
|
|
|
def _fmt(self, field_size: int) -> str:
|
|
|
|
try:
|
2024-09-25 11:45:16 +03:00
|
|
|
return {2: "H", 4: "L", 8: "Q"}[field_size]
|
2024-09-20 15:31:00 +03:00
|
|
|
except KeyError:
|
|
|
|
msg = "offset is not supported"
|
|
|
|
raise RuntimeError(msg)
|
|
|
|
|
|
|
|
def _read(self, field_size: int) -> int:
|
|
|
|
(value,) = struct.unpack(
|
|
|
|
self.endian + self._fmt(field_size), self.f.read(field_size)
|
|
|
|
)
|
2016-09-29 04:16:04 +03:00
|
|
|
return value
|
|
|
|
|
2024-09-20 15:31:00 +03:00
|
|
|
def readShort(self) -> int:
|
|
|
|
return self._read(2)
|
|
|
|
|
2024-06-11 16:26:00 +03:00
|
|
|
def readLong(self) -> int:
|
2024-09-20 15:31:00 +03:00
|
|
|
return self._read(4)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2024-08-16 17:08:18 +03:00
|
|
|
@staticmethod
|
|
|
|
def _verify_bytes_written(bytes_written: int | None, expected: int) -> None:
|
|
|
|
if bytes_written is not None and bytes_written != expected:
|
|
|
|
msg = f"wrote only {bytes_written} bytes but wanted {expected}"
|
|
|
|
raise RuntimeError(msg)
|
|
|
|
|
2024-06-11 16:26:00 +03:00
|
|
|
def rewriteLastShortToLong(self, value: int) -> None:
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f.seek(-2, os.SEEK_CUR)
|
2022-04-10 21:23:31 +03:00
|
|
|
bytes_written = self.f.write(struct.pack(self.longFmt, value))
|
2024-08-16 17:08:18 +03:00
|
|
|
self._verify_bytes_written(bytes_written, 4)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2024-09-20 15:31:00 +03:00
|
|
|
def _rewriteLast(self, value: int, field_size: int) -> None:
|
|
|
|
self.f.seek(-field_size, os.SEEK_CUR)
|
|
|
|
bytes_written = self.f.write(
|
|
|
|
struct.pack(self.endian + self._fmt(field_size), value)
|
|
|
|
)
|
|
|
|
self._verify_bytes_written(bytes_written, field_size)
|
|
|
|
|
2024-06-11 16:26:00 +03:00
|
|
|
def rewriteLastShort(self, value: int) -> None:
|
2024-09-20 15:31:00 +03:00
|
|
|
return self._rewriteLast(value, 2)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2024-06-11 16:26:00 +03:00
|
|
|
def rewriteLastLong(self, value: int) -> None:
|
2024-09-20 15:31:00 +03:00
|
|
|
return self._rewriteLast(value, 4)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2024-06-11 16:26:00 +03:00
|
|
|
def writeShort(self, value: int) -> None:
|
2022-04-10 21:23:31 +03:00
|
|
|
bytes_written = self.f.write(struct.pack(self.shortFmt, value))
|
2024-08-16 17:08:18 +03:00
|
|
|
self._verify_bytes_written(bytes_written, 2)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2024-06-11 16:26:00 +03:00
|
|
|
def writeLong(self, value: int) -> None:
|
2022-04-10 21:23:31 +03:00
|
|
|
bytes_written = self.f.write(struct.pack(self.longFmt, value))
|
2024-08-16 17:08:18 +03:00
|
|
|
self._verify_bytes_written(bytes_written, 4)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2024-05-04 13:51:54 +03:00
|
|
|
def close(self) -> None:
|
2016-09-29 04:16:04 +03:00
|
|
|
self.finalize()
|
2024-09-06 14:33:30 +03:00
|
|
|
if self.close_fp:
|
|
|
|
self.f.close()
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2024-05-04 13:51:54 +03:00
|
|
|
def fixIFD(self) -> None:
|
2022-04-10 21:23:31 +03:00
|
|
|
num_tags = self.readShort()
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2022-04-10 21:23:31 +03:00
|
|
|
for i in range(num_tags):
|
|
|
|
tag, field_type, count = struct.unpack(self.tagFormat, self.f.read(8))
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2022-04-10 21:23:31 +03:00
|
|
|
field_size = self.fieldSizes[field_type]
|
|
|
|
total_size = field_size * count
|
|
|
|
is_local = total_size <= 4
|
|
|
|
if not is_local:
|
2024-06-11 16:26:00 +03:00
|
|
|
offset = self.readLong() + self.offsetOfNewPage
|
2016-09-29 04:16:04 +03:00
|
|
|
self.rewriteLastLong(offset)
|
|
|
|
|
|
|
|
if tag in self.Tags:
|
2022-04-10 21:23:31 +03:00
|
|
|
cur_pos = self.f.tell()
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2022-04-10 21:23:31 +03:00
|
|
|
if is_local:
|
2024-09-20 15:31:00 +03:00
|
|
|
self._fixOffsets(count, field_size)
|
2022-04-10 21:23:31 +03:00
|
|
|
self.f.seek(cur_pos + 4)
|
2016-09-29 04:16:04 +03:00
|
|
|
else:
|
|
|
|
self.f.seek(offset)
|
2024-09-20 15:31:00 +03:00
|
|
|
self._fixOffsets(count, field_size)
|
2022-04-10 21:23:31 +03:00
|
|
|
self.f.seek(cur_pos)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2022-04-10 21:23:31 +03:00
|
|
|
elif is_local:
|
2016-09-29 04:16:04 +03:00
|
|
|
# skip the locally stored value that is not an offset
|
|
|
|
self.f.seek(4, os.SEEK_CUR)
|
|
|
|
|
2024-09-20 15:31:00 +03:00
|
|
|
def _fixOffsets(self, count: int, field_size: int) -> None:
|
2016-09-29 04:16:04 +03:00
|
|
|
for i in range(count):
|
2024-09-20 15:31:00 +03:00
|
|
|
offset = self._read(field_size)
|
2016-09-29 04:16:04 +03:00
|
|
|
offset += self.offsetOfNewPage
|
2024-09-20 15:31:00 +03:00
|
|
|
if field_size == 2 and offset >= 65536:
|
2016-09-29 04:16:04 +03:00
|
|
|
# offset is now too large - we must convert shorts to longs
|
|
|
|
if count != 1:
|
2022-12-22 00:51:35 +03:00
|
|
|
msg = "not implemented"
|
|
|
|
raise RuntimeError(msg) # XXX TODO
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
# simple case - the offset is just one and therefore it is
|
|
|
|
# local (not referenced with another offset)
|
|
|
|
self.rewriteLastShortToLong(offset)
|
|
|
|
self.f.seek(-10, os.SEEK_CUR)
|
2018-12-29 08:14:29 +03:00
|
|
|
self.writeShort(TiffTags.LONG) # rewrite the type to LONG
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f.seek(8, os.SEEK_CUR)
|
|
|
|
else:
|
2024-09-20 15:31:00 +03:00
|
|
|
self._rewriteLast(offset, field_size)
|
|
|
|
|
|
|
|
def fixOffsets(
|
|
|
|
self, count: int, isShort: bool = False, isLong: bool = False
|
|
|
|
) -> None:
|
|
|
|
if isShort:
|
|
|
|
field_size = 2
|
|
|
|
elif isLong:
|
|
|
|
field_size = 4
|
|
|
|
else:
|
|
|
|
field_size = 0
|
|
|
|
return self._fixOffsets(count, field_size)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2024-06-10 07:15:28 +03:00
|
|
|
def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
Allow to save tiff stacks from separate images
This is a quick solution that will allow to save tiff stacks from
separate images, e.g. from Numpy arrays.
Previously, tiff stacks could be saved only from multiframe images.
This behavior is similar to what is possible now with GIFs.
Note however, that for correct results, all the appended images should
have the same encoder{info,config} properties.
Example:
import numpy as np
from PIL import Image
a = np.ones((100,100,100), dtype=np.uint8)
imlist = []
for m in a:
imlist.append(Image.fromarray(m))
imlist[0].save("test.tif", compression="tiff_deflate", save_all=True,
append_images=imlist[1:])
(Should result in a 100-frame, 100x100 tiff stack.)
Signed-off-by: Leonid Bloch <leonid.bloch@esrf.fr>
2017-02-16 03:54:43 +03:00
|
|
|
encoderinfo = im.encoderinfo.copy()
|
|
|
|
encoderconfig = im.encoderconfig
|
2017-11-06 12:11:29 +03:00
|
|
|
append_images = list(encoderinfo.get("append_images", []))
|
2017-11-04 02:46:15 +03:00
|
|
|
if not hasattr(im, "n_frames") and not append_images:
|
2016-09-29 04:16:04 +03:00
|
|
|
return _save(im, fp, filename)
|
|
|
|
|
|
|
|
cur_idx = im.tell()
|
|
|
|
try:
|
|
|
|
with AppendingTiffWriter(fp) as tf:
|
2017-11-06 12:11:29 +03:00
|
|
|
for ims in [im] + append_images:
|
Allow to save tiff stacks from separate images
This is a quick solution that will allow to save tiff stacks from
separate images, e.g. from Numpy arrays.
Previously, tiff stacks could be saved only from multiframe images.
This behavior is similar to what is possible now with GIFs.
Note however, that for correct results, all the appended images should
have the same encoder{info,config} properties.
Example:
import numpy as np
from PIL import Image
a = np.ones((100,100,100), dtype=np.uint8)
imlist = []
for m in a:
imlist.append(Image.fromarray(m))
imlist[0].save("test.tif", compression="tiff_deflate", save_all=True,
append_images=imlist[1:])
(Should result in a 100-frame, 100x100 tiff stack.)
Signed-off-by: Leonid Bloch <leonid.bloch@esrf.fr>
2017-02-16 03:54:43 +03:00
|
|
|
ims.encoderinfo = encoderinfo
|
|
|
|
ims.encoderconfig = encoderconfig
|
|
|
|
if not hasattr(ims, "n_frames"):
|
|
|
|
nfr = 1
|
|
|
|
else:
|
|
|
|
nfr = ims.n_frames
|
|
|
|
|
|
|
|
for idx in range(nfr):
|
|
|
|
ims.seek(idx)
|
|
|
|
ims.load()
|
|
|
|
_save(ims, tf, filename)
|
|
|
|
tf.newFrame()
|
2016-09-29 04:16:04 +03:00
|
|
|
finally:
|
|
|
|
im.seek(cur_idx)
|
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# Register
|
|
|
|
|
2015-07-04 16:29:58 +03:00
|
|
|
Image.register_open(TiffImageFile.format, TiffImageFile, _accept)
|
|
|
|
Image.register_save(TiffImageFile.format, _save)
|
2016-09-29 04:16:04 +03:00
|
|
|
Image.register_save_all(TiffImageFile.format, _save_all)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2016-04-25 07:59:02 +03:00
|
|
|
Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"])
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-07-04 16:29:58 +03:00
|
|
|
Image.register_mime(TiffImageFile.format, "image/tiff")
|