2010-07-31 06:52:47 +04:00
|
|
|
#
|
|
|
|
# The Python Imaging Library.
|
|
|
|
# $Id$
|
|
|
|
#
|
|
|
|
# TIFF file handling
|
|
|
|
#
|
|
|
|
# TIFF is a flexible, if somewhat aged, image file format originally
|
|
|
|
# defined by Aldus. Although TIFF supports a wide variety of pixel
|
|
|
|
# layouts and compression methods, the name doesn't really stand for
|
|
|
|
# "thousands of incompatible file formats," it just feels that way.
|
|
|
|
#
|
|
|
|
# To read TIFF data from a stream, the stream must be seekable. For
|
|
|
|
# progressive decoding, make sure to use TIFF files where the tag
|
|
|
|
# directory is placed first in the file.
|
|
|
|
#
|
|
|
|
# History:
|
|
|
|
# 1995-09-01 fl Created
|
|
|
|
# 1996-05-04 fl Handle JPEGTABLES tag
|
|
|
|
# 1996-05-18 fl Fixed COLORMAP support
|
|
|
|
# 1997-01-05 fl Fixed PREDICTOR support
|
|
|
|
# 1997-08-27 fl Added support for rational tags (from Perry Stoll)
|
|
|
|
# 1998-01-10 fl Fixed seek/tell (from Jan Blom)
|
|
|
|
# 1998-07-15 fl Use private names for internal variables
|
|
|
|
# 1999-06-13 fl Rewritten for PIL 1.0 (1.0)
|
|
|
|
# 2000-10-11 fl Additional fixes for Python 2.0 (1.1)
|
|
|
|
# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2)
|
|
|
|
# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3)
|
|
|
|
# 2001-12-18 fl Added workaround for broken Matrox library
|
|
|
|
# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart)
|
|
|
|
# 2003-05-19 fl Check FILLORDER tag
|
|
|
|
# 2003-09-26 fl Added RGBa support
|
|
|
|
# 2004-02-24 fl Added DPI support; fixed rational write support
|
|
|
|
# 2005-02-07 fl Added workaround for broken Corel Draw 10 files
|
|
|
|
# 2006-01-09 fl Added support for float/double tags (from Russell Nelson)
|
|
|
|
#
|
|
|
|
# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved.
|
|
|
|
# Copyright (c) 1995-1997 by Fredrik Lundh
|
|
|
|
#
|
|
|
|
# See the README file for information on usage and redistribution.
|
|
|
|
#
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
from __future__ import division, print_function
|
2012-10-16 06:27:35 +04:00
|
|
|
|
2017-01-17 16:22:18 +03:00
|
|
|
from . import Image, ImageFile, ImagePalette, TiffTags
|
|
|
|
from ._binary import i8, o8
|
2018-04-20 02:19:13 +03:00
|
|
|
from ._util import py3
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
from fractions import Fraction
|
2015-10-25 15:49:45 +03:00
|
|
|
from numbers import Number, Rational
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
import io
|
2012-10-16 01:19:55 +04:00
|
|
|
import itertools
|
2013-03-09 07:51:59 +04:00
|
|
|
import os
|
2014-12-29 18:48:01 +03:00
|
|
|
import struct
|
|
|
|
import sys
|
|
|
|
import warnings
|
2018-10-25 11:36:49 +03:00
|
|
|
import distutils.version
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2016-02-05 01:57:13 +03:00
|
|
|
from .TiffTags import TYPES
|
2015-12-30 01:02:11 +03:00
|
|
|
|
2018-05-06 15:31:43 +03:00
|
|
|
try:
|
|
|
|
# Python 3
|
|
|
|
from collections.abc import MutableMapping
|
|
|
|
except ImportError:
|
|
|
|
# Python 2.7
|
|
|
|
from collections import MutableMapping
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2019-02-03 04:35:40 +03:00
|
|
|
# __version__ is deprecated and will be removed in a future version. Use
|
|
|
|
# PIL.__version__ instead.
|
2015-08-25 15:27:18 +03:00
|
|
|
__version__ = "1.3.5"
|
|
|
|
DEBUG = False # Needs to be merged with the new logging approach.
|
|
|
|
|
2014-06-03 14:02:44 +04:00
|
|
|
# Set these to true to force use of libtiff for reading or writing.
|
2013-11-22 08:33:16 +04:00
|
|
|
READ_LIBTIFF = False
|
2014-07-28 20:00:06 +04:00
|
|
|
WRITE_LIBTIFF = False
|
2015-09-10 18:49:37 +03:00
|
|
|
IFD_LEGACY_API = True
|
2013-11-22 08:33:16 +04:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
II = b"II" # little-endian (Intel style)
|
|
|
|
MM = b"MM" # big-endian (Motorola style)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# Read TIFF files
|
|
|
|
|
|
|
|
# a few tag names, just to make the code below a bit more readable
|
|
|
|
IMAGEWIDTH = 256
|
|
|
|
IMAGELENGTH = 257
|
|
|
|
BITSPERSAMPLE = 258
|
|
|
|
COMPRESSION = 259
|
|
|
|
PHOTOMETRIC_INTERPRETATION = 262
|
|
|
|
FILLORDER = 266
|
|
|
|
IMAGEDESCRIPTION = 270
|
|
|
|
STRIPOFFSETS = 273
|
|
|
|
SAMPLESPERPIXEL = 277
|
|
|
|
ROWSPERSTRIP = 278
|
|
|
|
STRIPBYTECOUNTS = 279
|
|
|
|
X_RESOLUTION = 282
|
|
|
|
Y_RESOLUTION = 283
|
|
|
|
PLANAR_CONFIGURATION = 284
|
|
|
|
RESOLUTION_UNIT = 296
|
|
|
|
SOFTWARE = 305
|
|
|
|
DATE_TIME = 306
|
|
|
|
ARTIST = 315
|
|
|
|
PREDICTOR = 317
|
|
|
|
COLORMAP = 320
|
|
|
|
TILEOFFSETS = 324
|
|
|
|
EXTRASAMPLES = 338
|
|
|
|
SAMPLEFORMAT = 339
|
|
|
|
JPEGTABLES = 347
|
|
|
|
COPYRIGHT = 33432
|
2014-07-28 20:00:06 +04:00
|
|
|
IPTC_NAA_CHUNK = 33723 # newsphoto properties
|
|
|
|
PHOTOSHOP_CHUNK = 34377 # photoshop properties
|
2010-07-31 06:52:47 +04:00
|
|
|
ICCPROFILE = 34675
|
|
|
|
EXIFIFD = 34665
|
|
|
|
XMP = 700
|
|
|
|
|
2015-09-28 14:53:25 +03:00
|
|
|
# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java
|
2013-09-27 21:41:27 +04:00
|
|
|
IMAGEJ_META_DATA_BYTE_COUNTS = 50838
|
|
|
|
IMAGEJ_META_DATA = 50839
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
COMPRESSION_INFO = {
|
|
|
|
# Compression => pil compression name
|
|
|
|
1: "raw",
|
|
|
|
2: "tiff_ccitt",
|
|
|
|
3: "group3",
|
|
|
|
4: "group4",
|
|
|
|
5: "tiff_lzw",
|
2014-07-28 20:00:06 +04:00
|
|
|
6: "tiff_jpeg", # obsolete
|
2010-07-31 06:52:47 +04:00
|
|
|
7: "jpeg",
|
2013-07-01 18:45:42 +04:00
|
|
|
8: "tiff_adobe_deflate",
|
2014-07-28 20:00:06 +04:00
|
|
|
32771: "tiff_raw_16", # 16-bit padding
|
2013-07-01 18:45:42 +04:00
|
|
|
32773: "packbits",
|
|
|
|
32809: "tiff_thunderscan",
|
|
|
|
32946: "tiff_deflate",
|
|
|
|
34676: "tiff_sgilog",
|
|
|
|
34677: "tiff_sgilog24",
|
2019-01-07 06:49:00 +03:00
|
|
|
34925: "lzma",
|
|
|
|
50000: "zstd",
|
|
|
|
50001: "webp",
|
2010-07-31 06:52:47 +04:00
|
|
|
}
|
|
|
|
|
2016-11-07 15:33:46 +03:00
|
|
|
COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()}
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
OPEN_INFO = {
|
|
|
|
# (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample,
|
|
|
|
# ExtraSamples) => mode, rawmode
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 0, (1,), 1, (1,), ()): ("1", "1;I"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 0, (1,), 1, (1,), ()): ("1", "1;I"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (1,), ()): ("1", "1"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 1, (1,), ()): ("1", "1"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 2, (1,), ()): ("1", "1;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 2, (1,), ()): ("1", "1;R"),
|
2016-03-29 09:21:42 +03:00
|
|
|
(II, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
|
|
|
|
(MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
|
|
|
|
(II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
|
|
|
|
(MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
|
|
|
|
(II, 1, (1,), 1, (2,), ()): ("L", "L;2"),
|
|
|
|
(MM, 1, (1,), 1, (2,), ()): ("L", "L;2"),
|
|
|
|
(II, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
|
|
|
|
(MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
|
|
|
|
(II, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
|
|
|
|
(MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
|
|
|
|
(II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
|
|
|
|
(MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 1, (1,), 1, (4,), ()): ("L", "L;4"),
|
2016-03-29 09:21:42 +03:00
|
|
|
(MM, 1, (1,), 1, (4,), ()): ("L", "L;4"),
|
|
|
|
(II, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
|
|
|
|
(MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 0, (1,), 1, (8,), ()): ("L", "L;I"),
|
|
|
|
(MM, 0, (1,), 1, (8,), ()): ("L", "L;I"),
|
|
|
|
(II, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
|
|
|
|
(MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (8,), ()): ("L", "L"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 1, (8,), ()): ("L", "L"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 2, (8,), ()): ("L", "L;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 2, (8,), ()): ("L", "L;R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"),
|
|
|
|
(II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"),
|
2017-09-20 12:26:14 +03:00
|
|
|
(II, 1, (2,), 1, (16,), ()): ("I", "I;16S"),
|
|
|
|
(MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 0, (3,), 1, (32,), ()): ("F", "F;32F"),
|
|
|
|
(MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(II, 1, (1,), 1, (32,), ()): ("I", "I;32N"),
|
|
|
|
(II, 1, (2,), 1, (32,), ()): ("I", "I;32S"),
|
2017-09-20 12:26:14 +03:00
|
|
|
(MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 1, (3,), 1, (32,), ()): ("F", "F;32F"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"),
|
2016-03-29 08:57:28 +03:00
|
|
|
(II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
|
|
|
|
(MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
|
2015-06-17 00:16:56 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"),
|
2018-01-06 18:27:54 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"),
|
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
|
2018-09-05 17:45:03 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
|
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
|
2018-09-05 17:36:27 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
|
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
|
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
|
2017-08-21 16:28:29 +03:00
|
|
|
(II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"),
|
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"),
|
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16B"),
|
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"),
|
|
|
|
(II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"),
|
|
|
|
(MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (1,), ()): ("P", "P;1"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (1,), ()): ("P", "P;1"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (2,), ()): ("P", "P;2"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (2,), ()): ("P", "P;2"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (4,), ()): ("P", "P;4"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (4,), ()): ("P", "P;4"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (8,), ()): ("P", "P"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (8,), ()): ("P", "P"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 3, (1,), 2, (8,), ()): ("P", "P;R"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 3, (1,), 2, (8,), ()): ("P", "P;R"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
|
2018-01-06 17:55:29 +03:00
|
|
|
(II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
|
|
|
|
(MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
|
|
|
|
(II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
|
|
|
|
(MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
|
2019-04-30 17:42:30 +03:00
|
|
|
(II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"),
|
2018-12-09 08:04:34 +03:00
|
|
|
# JPEG compressed images handled by LibTiff and auto-converted to RGBX
|
2018-09-11 19:26:25 +03:00
|
|
|
# Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel
|
2018-12-09 08:04:34 +03:00
|
|
|
(II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
|
|
|
|
(MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
|
2015-06-17 00:27:02 +03:00
|
|
|
(II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
|
2014-12-29 18:48:01 +03:00
|
|
|
(MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
|
2010-07-31 06:52:47 +04:00
|
|
|
}
|
|
|
|
|
2017-08-21 17:09:35 +03:00
|
|
|
PREFIXES = [
|
|
|
|
b"MM\x00\x2A", # Valid TIFF header with big-endian byte order
|
|
|
|
b"II\x2A\x00", # Valid TIFF header with little-endian byte order
|
2017-08-21 17:14:33 +03:00
|
|
|
b"MM\x2A\x00", # Invalid TIFF header, assume big-endian
|
|
|
|
b"II\x00\x2A", # Invalid TIFF header, assume little-endian
|
2017-08-21 17:09:35 +03:00
|
|
|
]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def _accept(prefix):
|
|
|
|
return prefix[:4] in PREFIXES
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
def _limit_rational(val, max_val):
|
|
|
|
inv = abs(val) > 1
|
2015-10-25 15:49:45 +03:00
|
|
|
n_d = IFDRational(1 / val if inv else val).limit_rational(max_val)
|
2014-12-29 18:48:01 +03:00
|
|
|
return n_d[::-1] if inv else n_d
|
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2018-10-25 11:36:49 +03:00
|
|
|
def _libtiff_version():
|
|
|
|
return Image.core.libtiff_version.split("\n")[0].split("Version ")[1]
|
|
|
|
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
##
|
|
|
|
# Wrapper for TIFF IFDs.
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
_load_dispatch = {}
|
|
|
|
_write_dispatch = {}
|
|
|
|
|
2016-02-05 01:57:13 +03:00
|
|
|
|
2015-11-18 19:51:57 +03:00
|
|
|
class IFDRational(Rational):
|
2015-10-25 15:49:45 +03:00
|
|
|
""" Implements a rational class where 0/0 is a legal value to match
|
|
|
|
the in the wild use of exif rationals.
|
|
|
|
|
|
|
|
e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used
|
|
|
|
"""
|
|
|
|
|
|
|
|
""" If the denominator is 0, store this as a float('nan'), otherwise store
|
|
|
|
as a fractions.Fraction(). Delegate as appropriate
|
|
|
|
|
|
|
|
"""
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
__slots__ = ("_numerator", "_denominator", "_val")
|
2015-10-25 15:49:45 +03:00
|
|
|
|
|
|
|
def __init__(self, value, denominator=1):
|
|
|
|
"""
|
|
|
|
:param value: either an integer numerator, a
|
|
|
|
float/rational/other number, or an IFDRational
|
|
|
|
:param denominator: Optional integer denominator
|
|
|
|
"""
|
2015-11-18 20:00:15 +03:00
|
|
|
self._denominator = denominator
|
|
|
|
self._numerator = value
|
2015-11-18 19:51:57 +03:00
|
|
|
self._val = float(1)
|
|
|
|
|
2016-10-31 03:43:32 +03:00
|
|
|
if isinstance(value, Fraction):
|
2015-11-18 20:00:15 +03:00
|
|
|
self._numerator = value.numerator
|
|
|
|
self._denominator = value.denominator
|
2015-11-18 19:51:57 +03:00
|
|
|
self._val = value
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2016-10-31 03:43:32 +03:00
|
|
|
if isinstance(value, IFDRational):
|
2015-11-18 20:00:15 +03:00
|
|
|
self._denominator = value.denominator
|
|
|
|
self._numerator = value.numerator
|
2015-10-25 15:49:45 +03:00
|
|
|
self._val = value._val
|
|
|
|
return
|
|
|
|
|
|
|
|
if denominator == 0:
|
2019-03-21 16:28:20 +03:00
|
|
|
self._val = float("nan")
|
2015-10-25 15:49:45 +03:00
|
|
|
return
|
|
|
|
|
2015-11-18 19:51:57 +03:00
|
|
|
elif denominator == 1:
|
2016-11-07 15:33:46 +03:00
|
|
|
self._val = Fraction(value)
|
2015-11-18 19:51:57 +03:00
|
|
|
else:
|
|
|
|
self._val = Fraction(value, denominator)
|
|
|
|
|
2015-11-18 20:00:15 +03:00
|
|
|
@property
|
|
|
|
def numerator(a):
|
|
|
|
return a._numerator
|
|
|
|
|
|
|
|
@property
|
|
|
|
def denominator(a):
|
|
|
|
return a._denominator
|
|
|
|
|
2015-10-25 15:49:45 +03:00
|
|
|
def limit_rational(self, max_denominator):
|
|
|
|
"""
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2015-10-25 15:49:45 +03:00
|
|
|
:param max_denominator: Integer, the maximum denominator value
|
|
|
|
:returns: Tuple of (numerator, denominator)
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.denominator == 0:
|
|
|
|
return (self.numerator, self.denominator)
|
|
|
|
|
|
|
|
f = self._val.limit_denominator(max_denominator)
|
|
|
|
return (f.numerator, f.denominator)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return str(float(self._val))
|
2015-10-25 17:17:50 +03:00
|
|
|
|
2015-10-25 17:49:52 +03:00
|
|
|
def __hash__(self):
|
|
|
|
return self._val.__hash__()
|
|
|
|
|
2016-02-05 01:57:13 +03:00
|
|
|
def __eq__(self, other):
|
2015-11-18 19:51:57 +03:00
|
|
|
return self._val == other
|
|
|
|
|
|
|
|
def _delegate(op):
|
|
|
|
def delegate(self, *args):
|
2016-02-05 01:57:13 +03:00
|
|
|
return getattr(self._val, op)(*args)
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2015-11-18 19:51:57 +03:00
|
|
|
return delegate
|
|
|
|
|
|
|
|
""" a = ['add','radd', 'sub', 'rsub','div', 'rdiv', 'mul', 'rmul',
|
|
|
|
'truediv', 'rtruediv', 'floordiv',
|
|
|
|
'rfloordiv','mod','rmod', 'pow','rpow', 'pos', 'neg',
|
2015-12-28 00:04:23 +03:00
|
|
|
'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'nonzero',
|
|
|
|
'ceil', 'floor', 'round']
|
2016-11-19 03:19:43 +03:00
|
|
|
print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a))
|
2015-11-18 19:51:57 +03:00
|
|
|
"""
|
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
__add__ = _delegate("__add__")
|
|
|
|
__radd__ = _delegate("__radd__")
|
|
|
|
__sub__ = _delegate("__sub__")
|
|
|
|
__rsub__ = _delegate("__rsub__")
|
|
|
|
__div__ = _delegate("__div__")
|
|
|
|
__rdiv__ = _delegate("__rdiv__")
|
|
|
|
__mul__ = _delegate("__mul__")
|
|
|
|
__rmul__ = _delegate("__rmul__")
|
|
|
|
__truediv__ = _delegate("__truediv__")
|
|
|
|
__rtruediv__ = _delegate("__rtruediv__")
|
|
|
|
__floordiv__ = _delegate("__floordiv__")
|
|
|
|
__rfloordiv__ = _delegate("__rfloordiv__")
|
|
|
|
__mod__ = _delegate("__mod__")
|
|
|
|
__rmod__ = _delegate("__rmod__")
|
|
|
|
__pow__ = _delegate("__pow__")
|
|
|
|
__rpow__ = _delegate("__rpow__")
|
|
|
|
__pos__ = _delegate("__pos__")
|
|
|
|
__neg__ = _delegate("__neg__")
|
|
|
|
__abs__ = _delegate("__abs__")
|
|
|
|
__trunc__ = _delegate("__trunc__")
|
|
|
|
__lt__ = _delegate("__lt__")
|
|
|
|
__gt__ = _delegate("__gt__")
|
|
|
|
__le__ = _delegate("__le__")
|
|
|
|
__ge__ = _delegate("__ge__")
|
|
|
|
__nonzero__ = _delegate("__nonzero__")
|
|
|
|
__ceil__ = _delegate("__ceil__")
|
|
|
|
__floor__ = _delegate("__floor__")
|
|
|
|
__round__ = _delegate("__round__")
|
2015-10-25 17:49:52 +03:00
|
|
|
|
2016-03-27 14:18:39 +03:00
|
|
|
|
2018-05-06 15:31:43 +03:00
|
|
|
class ImageFileDirectory_v2(MutableMapping):
|
2014-12-29 18:48:01 +03:00
|
|
|
"""This class represents a TIFF tag directory. To speed things up, we
|
|
|
|
don't decode tags unless they're asked for.
|
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Exposes a dictionary interface of the tags in the directory::
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
ifd = ImageFileDirectory_v2()
|
|
|
|
ifd[key] = 'Some Data'
|
2019-02-19 11:45:53 +03:00
|
|
|
ifd.tagtype[key] = TiffTags.ASCII
|
2015-09-14 17:01:57 +03:00
|
|
|
print(ifd[key])
|
|
|
|
'Some Data'
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Individual values are returned as the strings or numbers, sequences are
|
2015-09-22 13:31:59 +03:00
|
|
|
returned as tuples of the values.
|
2013-10-08 03:59:37 +04:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
The tiff metadata type of each item is stored in a dictionary of
|
|
|
|
tag types in
|
|
|
|
`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types
|
|
|
|
are read from a tiff file, guessed from the type added, or added
|
|
|
|
manually.
|
2013-10-08 03:59:37 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
Data Structures:
|
2015-09-14 17:01:57 +03:00
|
|
|
|
|
|
|
* self.tagtype = {}
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
* Key: numerical tiff tag number
|
2018-06-24 15:32:25 +03:00
|
|
|
* Value: integer corresponding to the data type from
|
|
|
|
~PIL.TiffTags.TYPES`
|
2015-09-14 17:01:57 +03:00
|
|
|
|
|
|
|
.. versionadded:: 3.0.0
|
2015-09-13 16:01:01 +03:00
|
|
|
"""
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
"""
|
|
|
|
Documentation:
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
'internal' data structures:
|
2015-09-13 16:01:01 +03:00
|
|
|
* self._tags_v2 = {} Key: numerical tiff tag number
|
|
|
|
Value: decoded data, as tuple for multiple values
|
2014-12-29 18:48:01 +03:00
|
|
|
* self._tagdata = {} Key: numerical tiff tag number
|
|
|
|
Value: undecoded byte string from file
|
2015-09-13 16:01:01 +03:00
|
|
|
* self._tags_v1 = {} Key: numerical tiff tag number
|
|
|
|
Value: decoded data in the v1 format
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-03-01 06:44:38 +03:00
|
|
|
Tags will be found in the private attributes self._tagdata, and in
|
2015-09-13 16:01:01 +03:00
|
|
|
self._tags_v2 once decoded.
|
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Self.legacy_api is a value for internal use, and shouldn't be
|
|
|
|
changed from outside code. In cooperation with the
|
|
|
|
ImageFileDirectory_v1 class, if legacy_api is true, then decoded
|
|
|
|
tags will be populated into both _tags_v1 and _tags_v2. _Tags_v2
|
|
|
|
will be used if this IFD is used in the TIFF save routine. Tags
|
|
|
|
should be read from tags_v1 if legacy_api == true.
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
"""
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-30 13:57:45 +03:00
|
|
|
def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None):
|
|
|
|
"""Initialize an ImageFileDirectory.
|
|
|
|
|
|
|
|
To construct an ImageFileDirectory from a real file, pass the 8-byte
|
|
|
|
magic header to the constructor. To only set the endianness, pass it
|
|
|
|
as the 'prefix' keyword argument.
|
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
:param ifh: One of the accepted magic headers (cf. PREFIXES); also sets
|
2014-12-30 13:57:45 +03:00
|
|
|
endianness.
|
2015-09-14 17:01:57 +03:00
|
|
|
:param prefix: Override the endianness of the file.
|
2013-10-08 03:59:37 +04:00
|
|
|
"""
|
2014-12-30 13:57:45 +03:00
|
|
|
if ifh[:4] not in PREFIXES:
|
|
|
|
raise SyntaxError("not a TIFF file (header %r not valid)" % ifh)
|
|
|
|
self._prefix = prefix if prefix is not None else ifh[:2]
|
|
|
|
if self._prefix == MM:
|
2014-12-29 18:48:01 +03:00
|
|
|
self._endian = ">"
|
2014-12-30 13:57:45 +03:00
|
|
|
elif self._prefix == II:
|
2014-12-29 18:48:01 +03:00
|
|
|
self._endian = "<"
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2014-12-30 13:57:45 +03:00
|
|
|
raise SyntaxError("not a TIFF IFD")
|
2010-07-31 06:52:47 +04:00
|
|
|
self.reset()
|
2014-12-30 13:57:45 +03:00
|
|
|
self.next, = self._unpack("L", ifh[4:])
|
2015-09-11 20:09:14 +03:00
|
|
|
self._legacy_api = False
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
prefix = property(lambda self: self._prefix)
|
|
|
|
offset = property(lambda self: self._offset)
|
2015-03-01 06:44:38 +03:00
|
|
|
legacy_api = property(lambda self: self._legacy_api)
|
|
|
|
|
|
|
|
@legacy_api.setter
|
|
|
|
def legacy_api(self, value):
|
2015-09-13 16:01:01 +03:00
|
|
|
raise Exception("Not allowing setting of legacy api")
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def reset(self):
|
2015-09-22 13:31:59 +03:00
|
|
|
self._tags_v1 = {} # will remain empty if legacy_api is false
|
|
|
|
self._tags_v2 = {} # main tag storage
|
2014-12-29 18:48:01 +03:00
|
|
|
self._tagdata = {}
|
2019-03-21 16:28:20 +03:00
|
|
|
self.tagtype = {} # added 2008-06-05 by Florian Hoech
|
2014-12-29 18:48:01 +03:00
|
|
|
self._next = None
|
|
|
|
self._offset = None
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2013-03-07 15:59:52 +04:00
|
|
|
def __str__(self):
|
2014-12-29 18:48:01 +03:00
|
|
|
return str(dict(self))
|
2013-03-07 15:59:52 +04:00
|
|
|
|
|
|
|
def named(self):
|
2014-07-28 20:00:06 +04:00
|
|
|
"""
|
2015-09-14 17:01:57 +03:00
|
|
|
:returns: dict of name|key: value
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-05-29 07:59:54 +03:00
|
|
|
Returns the complete tag dictionary, with named tags where possible.
|
2014-07-28 20:00:06 +04:00
|
|
|
"""
|
2019-05-26 16:56:01 +03:00
|
|
|
return {TiffTags.lookup(code).name: value for code, value in self.items()}
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
def __len__(self):
|
2015-09-13 16:01:01 +03:00
|
|
|
return len(set(self._tagdata) | set(self._tags_v2))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
def __getitem__(self, tag):
|
2015-09-13 16:01:01 +03:00
|
|
|
if tag not in self._tags_v2: # unpack on the fly
|
2014-12-29 18:48:01 +03:00
|
|
|
data = self._tagdata[tag]
|
|
|
|
typ = self.tagtype[tag]
|
|
|
|
size, handler = self._load_dispatch[typ]
|
2015-09-14 14:35:09 +03:00
|
|
|
self[tag] = handler(self, data, self.legacy_api) # check type
|
2015-09-13 16:01:01 +03:00
|
|
|
val = self._tags_v2[tag]
|
2015-03-01 06:44:38 +03:00
|
|
|
if self.legacy_api and not isinstance(val, (tuple, bytes)):
|
2019-03-21 16:28:20 +03:00
|
|
|
val = (val,)
|
2015-03-01 06:44:38 +03:00
|
|
|
return val
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2012-10-16 01:19:55 +04:00
|
|
|
def __contains__(self, tag):
|
2015-09-13 16:01:01 +03:00
|
|
|
return tag in self._tags_v2 or tag in self._tagdata
|
2012-10-16 01:19:55 +04:00
|
|
|
|
2018-04-20 02:19:13 +03:00
|
|
|
if not py3:
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2012-10-16 01:19:55 +04:00
|
|
|
def has_key(self, tag):
|
|
|
|
return tag in self
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
def __setitem__(self, tag, value):
|
2015-09-13 16:01:01 +03:00
|
|
|
self._setitem(tag, value, self.legacy_api)
|
|
|
|
|
|
|
|
def _setitem(self, tag, value, legacy_api):
|
2014-12-29 18:48:01 +03:00
|
|
|
basetypes = (Number, bytes, str)
|
2018-04-20 02:19:13 +03:00
|
|
|
if not py3:
|
2019-03-21 16:28:20 +03:00
|
|
|
basetypes += (unicode,) # noqa: F821
|
2012-10-16 01:19:55 +04:00
|
|
|
|
2015-12-30 01:02:11 +03:00
|
|
|
info = TiffTags.lookup(tag)
|
2014-12-29 18:48:01 +03:00
|
|
|
values = [value] if isinstance(value, basetypes) else value
|
2013-02-26 15:12:11 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
if tag not in self.tagtype:
|
2015-12-30 01:00:36 +03:00
|
|
|
if info.type:
|
2014-12-29 18:48:01 +03:00
|
|
|
self.tagtype[tag] = info.type
|
2015-12-30 01:00:36 +03:00
|
|
|
else:
|
2019-02-19 11:45:53 +03:00
|
|
|
self.tagtype[tag] = TiffTags.UNDEFINED
|
2015-12-30 01:00:36 +03:00
|
|
|
if all(isinstance(v, IFDRational) for v in values):
|
2018-12-29 08:14:29 +03:00
|
|
|
self.tagtype[tag] = TiffTags.RATIONAL
|
2015-12-30 01:00:36 +03:00
|
|
|
elif all(isinstance(v, int) for v in values):
|
2014-12-29 18:48:01 +03:00
|
|
|
if all(v < 2 ** 16 for v in values):
|
2018-12-29 08:14:29 +03:00
|
|
|
self.tagtype[tag] = TiffTags.SHORT
|
2014-12-29 18:48:01 +03:00
|
|
|
else:
|
2018-12-29 08:14:29 +03:00
|
|
|
self.tagtype[tag] = TiffTags.LONG
|
2014-12-29 18:48:01 +03:00
|
|
|
elif all(isinstance(v, float) for v in values):
|
2018-12-29 08:14:29 +03:00
|
|
|
self.tagtype[tag] = TiffTags.DOUBLE
|
2014-12-29 18:48:01 +03:00
|
|
|
else:
|
2018-04-20 02:19:13 +03:00
|
|
|
if py3:
|
2014-12-29 18:48:01 +03:00
|
|
|
if all(isinstance(v, str) for v in values):
|
2018-12-29 08:14:29 +03:00
|
|
|
self.tagtype[tag] = TiffTags.ASCII
|
2018-04-20 02:19:13 +03:00
|
|
|
else:
|
|
|
|
# Never treat data as binary by default on Python 2.
|
2018-12-29 08:14:29 +03:00
|
|
|
self.tagtype[tag] = TiffTags.ASCII
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2018-12-29 08:14:29 +03:00
|
|
|
if self.tagtype[tag] == TiffTags.UNDEFINED and py3:
|
2019-03-21 16:28:20 +03:00
|
|
|
values = [
|
|
|
|
value.encode("ascii", "replace") if isinstance(value, str) else value
|
|
|
|
]
|
2018-12-29 08:14:29 +03:00
|
|
|
elif self.tagtype[tag] == TiffTags.RATIONAL:
|
2019-03-21 16:28:20 +03:00
|
|
|
values = [float(v) if isinstance(v, int) else v for v in values]
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
values = tuple(info.cvt_enum(value) for value in values)
|
2015-09-13 16:01:01 +03:00
|
|
|
|
|
|
|
dest = self._tags_v1 if legacy_api else self._tags_v2
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2017-09-08 21:15:45 +03:00
|
|
|
# Three branches:
|
|
|
|
# Spec'd length == 1, Actual length 1, store as element
|
|
|
|
# Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed.
|
|
|
|
# No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple.
|
|
|
|
# Don't mess with the legacy api, since it's frozen.
|
2019-03-21 16:28:20 +03:00
|
|
|
if (info.length == 1) or (
|
|
|
|
info.length is None and len(values) == 1 and not legacy_api
|
|
|
|
):
|
2017-09-08 21:15:45 +03:00
|
|
|
# Don't mess with the legacy api, since it's frozen.
|
2018-12-29 08:14:29 +03:00
|
|
|
if legacy_api and self.tagtype[tag] in [
|
|
|
|
TiffTags.RATIONAL,
|
2019-03-21 16:28:20 +03:00
|
|
|
TiffTags.SIGNED_RATIONAL,
|
2018-12-29 08:14:29 +03:00
|
|
|
]: # rationals
|
2019-03-21 16:28:20 +03:00
|
|
|
values = (values,)
|
2017-09-08 21:15:45 +03:00
|
|
|
try:
|
|
|
|
dest[tag], = values
|
|
|
|
except ValueError:
|
|
|
|
# We've got a builtin tag with 1 expected entry
|
|
|
|
warnings.warn(
|
2019-03-21 16:28:20 +03:00
|
|
|
"Metadata Warning, tag %s had too many entries: %s, expected 1"
|
|
|
|
% (tag, len(values))
|
|
|
|
)
|
2017-09-08 21:15:45 +03:00
|
|
|
dest[tag] = values[0]
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
else:
|
2017-09-08 21:15:45 +03:00
|
|
|
# Spec'd length > 1 or undefined
|
|
|
|
# Unspec'd, and length > 1
|
2015-09-13 16:01:01 +03:00
|
|
|
dest[tag] = values
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
def __delitem__(self, tag):
|
2015-09-13 16:01:01 +03:00
|
|
|
self._tags_v2.pop(tag, None)
|
|
|
|
self._tags_v1.pop(tag, None)
|
2014-12-29 18:48:01 +03:00
|
|
|
self._tagdata.pop(tag, None)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
def __iter__(self):
|
2015-09-13 16:01:01 +03:00
|
|
|
return iter(set(self._tagdata) | set(self._tags_v2))
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2014-12-30 13:57:45 +03:00
|
|
|
def _unpack(self, fmt, data):
|
2014-12-29 18:48:01 +03:00
|
|
|
return struct.unpack(self._endian + fmt, data)
|
|
|
|
|
2014-12-30 13:57:45 +03:00
|
|
|
def _pack(self, fmt, *values):
|
2014-12-29 18:48:01 +03:00
|
|
|
return struct.pack(self._endian + fmt, *values)
|
|
|
|
|
|
|
|
def _register_loader(idx, size):
|
|
|
|
def decorator(func):
|
2017-01-17 16:22:18 +03:00
|
|
|
from .TiffTags import TYPES
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
if func.__name__.startswith("load_"):
|
|
|
|
TYPES[idx] = func.__name__[5:].replace("_", " ")
|
2018-11-13 15:01:09 +03:00
|
|
|
_load_dispatch[idx] = size, func # noqa: F821
|
2014-12-29 18:48:01 +03:00
|
|
|
return func
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
return decorator
|
|
|
|
|
|
|
|
def _register_writer(idx):
|
|
|
|
def decorator(func):
|
2018-11-13 15:01:09 +03:00
|
|
|
_write_dispatch[idx] = func # noqa: F821
|
2014-12-29 18:48:01 +03:00
|
|
|
return func
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
return decorator
|
|
|
|
|
|
|
|
def _register_basic(idx_fmt_name):
|
2017-01-17 16:22:18 +03:00
|
|
|
from .TiffTags import TYPES
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
idx, fmt, name = idx_fmt_name
|
|
|
|
TYPES[idx] = name
|
|
|
|
size = struct.calcsize("=" + fmt)
|
2019-03-21 16:28:20 +03:00
|
|
|
_load_dispatch[idx] = ( # noqa: F821
|
|
|
|
size,
|
|
|
|
lambda self, data, legacy_api=True: (
|
|
|
|
self._unpack("{}{}".format(len(data) // size, fmt), data)
|
|
|
|
),
|
|
|
|
)
|
2018-11-13 15:01:09 +03:00
|
|
|
_write_dispatch[idx] = lambda self, *values: ( # noqa: F821
|
2019-03-21 16:28:20 +03:00
|
|
|
b"".join(self._pack(fmt, value) for value in values)
|
|
|
|
)
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
list(
|
|
|
|
map(
|
|
|
|
_register_basic,
|
|
|
|
[
|
|
|
|
(TiffTags.SHORT, "H", "short"),
|
|
|
|
(TiffTags.LONG, "L", "long"),
|
|
|
|
(TiffTags.SIGNED_BYTE, "b", "signed byte"),
|
|
|
|
(TiffTags.SIGNED_SHORT, "h", "signed short"),
|
|
|
|
(TiffTags.SIGNED_LONG, "l", "signed long"),
|
|
|
|
(TiffTags.FLOAT, "f", "float"),
|
|
|
|
(TiffTags.DOUBLE, "d", "double"),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
)
|
2014-12-29 18:48:01 +03:00
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
@_register_loader(1, 1) # Basic type, except for the legacy API.
|
2015-09-14 14:35:09 +03:00
|
|
|
def load_byte(self, data, legacy_api=True):
|
2016-06-26 14:06:56 +03:00
|
|
|
return data
|
2015-03-01 06:44:38 +03:00
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
@_register_writer(1) # Basic type, except for the legacy API.
|
2015-03-01 06:44:38 +03:00
|
|
|
def write_byte(self, data):
|
|
|
|
return data
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_loader(2, 1)
|
2015-09-14 14:35:09 +03:00
|
|
|
def load_string(self, data, legacy_api=True):
|
2014-12-29 18:48:01 +03:00
|
|
|
if data.endswith(b"\0"):
|
2010-07-31 06:52:47 +04:00
|
|
|
data = data[:-1]
|
2014-12-29 18:48:01 +03:00
|
|
|
return data.decode("latin-1", "replace")
|
|
|
|
|
|
|
|
@_register_writer(2)
|
|
|
|
def write_string(self, value):
|
|
|
|
# remerge of https://github.com/python-pillow/Pillow/pull/1416
|
2018-02-06 19:39:28 +03:00
|
|
|
if sys.version_info.major == 2:
|
2019-03-21 16:28:20 +03:00
|
|
|
value = value.decode("ascii", "replace")
|
|
|
|
return b"" + value.encode("ascii", "replace") + b"\0"
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_loader(5, 8)
|
2015-09-14 14:35:09 +03:00
|
|
|
def load_rational(self, data, legacy_api=True):
|
2016-11-07 15:33:46 +03:00
|
|
|
vals = self._unpack("{}L".format(len(data) // 4), data)
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
def combine(a, b):
|
|
|
|
return (a, b) if legacy_api else IFDRational(a, b)
|
|
|
|
|
|
|
|
return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_writer(5)
|
|
|
|
def write_rational(self, *values):
|
2019-03-21 16:28:20 +03:00
|
|
|
return b"".join(
|
|
|
|
self._pack("2L", *_limit_rational(frac, 2 ** 31)) for frac in values
|
|
|
|
)
|
2014-12-29 18:48:01 +03:00
|
|
|
|
|
|
|
@_register_loader(7, 1)
|
2015-09-14 14:35:09 +03:00
|
|
|
def load_undefined(self, data, legacy_api=True):
|
2010-07-31 06:52:47 +04:00
|
|
|
return data
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_writer(7)
|
|
|
|
def write_undefined(self, value):
|
|
|
|
return value
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_loader(10, 8)
|
2015-09-14 14:35:09 +03:00
|
|
|
def load_signed_rational(self, data, legacy_api=True):
|
2016-11-07 15:33:46 +03:00
|
|
|
vals = self._unpack("{}l".format(len(data) // 4), data)
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
def combine(a, b):
|
|
|
|
return (a, b) if legacy_api else IFDRational(a, b)
|
|
|
|
|
|
|
|
return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
@_register_writer(10)
|
|
|
|
def write_signed_rational(self, *values):
|
2019-03-21 16:28:20 +03:00
|
|
|
return b"".join(
|
|
|
|
self._pack("2L", *_limit_rational(frac, 2 ** 30)) for frac in values
|
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
def _ensure_read(self, fp, size):
|
|
|
|
ret = fp.read(size)
|
|
|
|
if len(ret) != size:
|
2019-03-21 16:28:20 +03:00
|
|
|
raise IOError(
|
|
|
|
"Corrupt EXIF data. "
|
|
|
|
+ "Expecting to read %d bytes but only got %d. " % (size, len(ret))
|
|
|
|
)
|
2015-09-11 20:45:10 +03:00
|
|
|
return ret
|
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
def load(self, fp):
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
self.reset()
|
|
|
|
self._offset = fp.tell()
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
try:
|
2015-09-22 13:31:59 +03:00
|
|
|
for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]):
|
2019-03-21 16:28:20 +03:00
|
|
|
tag, typ, count, data = self._unpack("HHL4s", self._ensure_read(fp, 12))
|
2015-09-10 18:33:14 +03:00
|
|
|
if DEBUG:
|
2015-12-30 01:02:11 +03:00
|
|
|
tagname = TiffTags.lookup(tag).name
|
2015-09-11 20:45:10 +03:00
|
|
|
typname = TYPES.get(typ, "unknown")
|
2019-03-21 16:28:20 +03:00
|
|
|
print(
|
|
|
|
"tag: %s (%d) - type: %s (%d)" % (tagname, tag, typname, typ),
|
|
|
|
end=" ",
|
|
|
|
)
|
2015-09-11 20:45:10 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
unit_size, handler = self._load_dispatch[typ]
|
|
|
|
except KeyError:
|
|
|
|
if DEBUG:
|
|
|
|
print("- unsupported type", typ)
|
|
|
|
continue # ignore unsupported type
|
|
|
|
size = count * unit_size
|
|
|
|
if size > 4:
|
|
|
|
here = fp.tell()
|
|
|
|
offset, = self._unpack("L", data)
|
|
|
|
if DEBUG:
|
2019-03-21 16:28:20 +03:00
|
|
|
print(
|
|
|
|
"Tag Location: %s - Data Location: %s" % (here, offset),
|
|
|
|
end=" ",
|
|
|
|
)
|
2015-09-11 20:45:10 +03:00
|
|
|
fp.seek(offset)
|
|
|
|
data = ImageFile._safe_read(fp, size)
|
|
|
|
fp.seek(here)
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2015-09-11 20:45:10 +03:00
|
|
|
data = data[:size]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
if len(data) != size:
|
2019-03-21 16:28:20 +03:00
|
|
|
warnings.warn(
|
|
|
|
"Possibly corrupt EXIF data. "
|
|
|
|
"Expecting to read %d bytes but only got %d."
|
|
|
|
" Skipping tag %s" % (size, len(data), tag)
|
|
|
|
)
|
2015-09-11 20:45:10 +03:00
|
|
|
continue
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2016-07-12 18:09:02 +03:00
|
|
|
if not data:
|
|
|
|
continue
|
|
|
|
|
2015-09-11 20:45:10 +03:00
|
|
|
self._tagdata[tag] = data
|
|
|
|
self.tagtype[tag] = typ
|
|
|
|
|
|
|
|
if DEBUG:
|
|
|
|
if size > 32:
|
|
|
|
print("- value: <table: %d bytes>" % size)
|
|
|
|
else:
|
|
|
|
print("- value:", self[tag])
|
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
self.next, = self._unpack("L", self._ensure_read(fp, 4))
|
2015-09-11 20:45:10 +03:00
|
|
|
except IOError as msg:
|
|
|
|
warnings.warn(str(msg))
|
|
|
|
return
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2019-03-31 00:09:01 +03:00
|
|
|
def tobytes(self, offset=0):
|
2014-12-29 18:48:01 +03:00
|
|
|
# FIXME What about tagdata?
|
2019-03-12 02:27:43 +03:00
|
|
|
result = self._pack("H", len(self._tags_v2))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
entries = []
|
2019-03-12 02:27:43 +03:00
|
|
|
offset = offset + len(result) + len(self._tags_v2) * 12 + 4
|
2010-07-31 06:52:47 +04:00
|
|
|
stripoffsets = None
|
|
|
|
|
|
|
|
# pass 1: convert tags to binary format
|
2014-12-29 18:48:01 +03:00
|
|
|
# always write tags in ascending order
|
2015-09-13 16:01:01 +03:00
|
|
|
for tag, value in sorted(self._tags_v2.items()):
|
2014-12-29 18:48:01 +03:00
|
|
|
if tag == STRIPOFFSETS:
|
|
|
|
stripoffsets = len(entries)
|
|
|
|
typ = self.tagtype.get(tag)
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2015-08-25 15:27:18 +03:00
|
|
|
print("Tag %s, Type: %s, Value: %s" % (tag, typ, value))
|
2014-12-29 18:48:01 +03:00
|
|
|
values = value if isinstance(value, tuple) else (value,)
|
|
|
|
data = self._write_dispatch[typ](self, *values)
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2015-12-30 01:02:11 +03:00
|
|
|
tagname = TiffTags.lookup(tag).name
|
2014-12-29 18:48:01 +03:00
|
|
|
typname = TYPES.get(typ, "unknown")
|
2019-03-21 16:28:20 +03:00
|
|
|
print(
|
|
|
|
"save: %s (%d) - type: %s (%d)" % (tagname, tag, typname, typ),
|
|
|
|
end=" ",
|
|
|
|
)
|
2014-12-29 18:48:01 +03:00
|
|
|
if len(data) >= 16:
|
|
|
|
print("- value: <table: %d bytes>" % len(data))
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2014-12-29 18:48:01 +03:00
|
|
|
print("- value:", values)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2014-12-29 18:48:01 +03:00
|
|
|
# count is sum of lengths for string and arbitrary data
|
2019-02-22 15:17:26 +03:00
|
|
|
if typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]:
|
2018-12-29 08:14:29 +03:00
|
|
|
count = len(data)
|
|
|
|
else:
|
|
|
|
count = len(values)
|
2014-12-29 18:48:01 +03:00
|
|
|
# figure out if data fits into the entry
|
|
|
|
if len(data) <= 4:
|
|
|
|
entries.append((tag, typ, count, data.ljust(4, b"\0"), b""))
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2019-03-21 16:28:20 +03:00
|
|
|
entries.append((tag, typ, count, self._pack("L", offset), data))
|
2015-09-22 13:31:59 +03:00
|
|
|
offset += (len(data) + 1) // 2 * 2 # pad to word
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# update strip offset data to point beyond auxiliary data
|
|
|
|
if stripoffsets is not None:
|
2014-12-29 18:48:01 +03:00
|
|
|
tag, typ, count, value, data = entries[stripoffsets]
|
|
|
|
if data:
|
2019-03-21 16:28:20 +03:00
|
|
|
raise NotImplementedError("multistrip support not yet implemented")
|
2014-12-30 13:57:45 +03:00
|
|
|
value = self._pack("L", self._unpack("L", value)[0] + offset)
|
2014-12-29 18:48:01 +03:00
|
|
|
entries[stripoffsets] = tag, typ, count, value, data
|
|
|
|
|
|
|
|
# pass 2: write entries to file
|
|
|
|
for tag, typ, count, value, data in entries:
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG > 1:
|
2012-10-16 06:27:35 +04:00
|
|
|
print(tag, typ, count, repr(value), repr(data))
|
2019-03-12 02:27:43 +03:00
|
|
|
result += self._pack("HHL4s", tag, typ, count, value)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# -- overwrite here for multi-page --
|
2019-03-12 02:27:43 +03:00
|
|
|
result += b"\0\0\0\0" # end of entries
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# pass 3: write auxiliary data to file
|
2014-12-29 18:48:01 +03:00
|
|
|
for tag, typ, count, value, data in entries:
|
2019-03-12 02:27:43 +03:00
|
|
|
result += data
|
2010-07-31 06:52:47 +04:00
|
|
|
if len(data) & 1:
|
2019-03-12 02:27:43 +03:00
|
|
|
result += b"\0"
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def save(self, fp):
|
|
|
|
|
|
|
|
if fp.tell() == 0: # skip TIFF header on subsequent pages
|
|
|
|
# tiff header -- PIL always starts the first IFD at offset 8
|
|
|
|
fp.write(self._prefix + self._pack("HL", 42, 8))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2019-03-12 02:27:43 +03:00
|
|
|
offset = fp.tell()
|
2019-03-31 00:09:01 +03:00
|
|
|
result = self.tobytes(offset)
|
2019-03-12 02:27:43 +03:00
|
|
|
fp.write(result)
|
|
|
|
return offset + len(result)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
ImageFileDirectory_v2._load_dispatch = _load_dispatch
|
|
|
|
ImageFileDirectory_v2._write_dispatch = _write_dispatch
|
2014-12-29 18:48:01 +03:00
|
|
|
for idx, name in TYPES.items():
|
|
|
|
name = name.replace(" ", "_")
|
2015-09-11 20:09:14 +03:00
|
|
|
setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1])
|
|
|
|
setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx])
|
2014-12-29 18:48:01 +03:00
|
|
|
del _load_dispatch, _write_dispatch, idx, name
|
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
|
|
|
|
# Legacy ImageFileDirectory support.
|
2015-09-11 20:09:14 +03:00
|
|
|
class ImageFileDirectory_v1(ImageFileDirectory_v2):
|
2015-09-23 15:14:06 +03:00
|
|
|
"""This class represents the **legacy** interface to a TIFF tag directory.
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Exposes a dictionary interface of the tags in the directory::
|
|
|
|
|
|
|
|
ifd = ImageFileDirectory_v1()
|
|
|
|
ifd[key] = 'Some Data'
|
2019-02-19 11:45:53 +03:00
|
|
|
ifd.tagtype[key] = TiffTags.ASCII
|
2016-11-19 03:19:43 +03:00
|
|
|
print(ifd[key])
|
2015-09-14 17:01:57 +03:00
|
|
|
('Some Data',)
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
Also contains a dictionary of tag types as read from the tiff image file,
|
2015-09-22 13:31:59 +03:00
|
|
|
`~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`.
|
2015-09-14 17:01:57 +03:00
|
|
|
|
|
|
|
Values are returned as a tuple.
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
.. deprecated:: 3.0.0
|
|
|
|
"""
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ImageFileDirectory_v2.__init__(self, *args, **kwargs)
|
2015-09-22 13:31:59 +03:00
|
|
|
self._legacy_api = True
|
2015-09-11 20:09:14 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
tags = property(lambda self: self._tags_v1)
|
2015-09-11 20:09:14 +03:00
|
|
|
tagdata = property(lambda self: self._tagdata)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_v2(cls, original):
|
2015-09-14 17:01:57 +03:00
|
|
|
""" Returns an
|
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
|
|
|
instance with the same data as is contained in the original
|
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
|
|
|
instance.
|
|
|
|
|
|
|
|
:returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
"""
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
ifd = cls(prefix=original.prefix)
|
|
|
|
ifd._tagdata = original._tagdata
|
|
|
|
ifd.tagtype = original.tagtype
|
2015-09-22 13:31:59 +03:00
|
|
|
ifd.next = original.next # an indicator for multipage tiffs
|
2015-09-11 20:09:14 +03:00
|
|
|
return ifd
|
2015-09-13 16:01:01 +03:00
|
|
|
|
|
|
|
def to_v2(self):
|
2015-09-14 17:01:57 +03:00
|
|
|
""" Returns an
|
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
|
|
|
instance with the same data as is contained in the original
|
|
|
|
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
|
|
|
instance.
|
2015-09-13 16:01:01 +03:00
|
|
|
|
2015-09-14 17:01:57 +03:00
|
|
|
:returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
|
|
|
|
|
|
|
"""
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
ifd = ImageFileDirectory_v2(prefix=self.prefix)
|
|
|
|
ifd._tagdata = dict(self._tagdata)
|
|
|
|
ifd.tagtype = dict(self.tagtype)
|
|
|
|
ifd._tags_v2 = dict(self._tags_v2)
|
|
|
|
return ifd
|
|
|
|
|
|
|
|
def __contains__(self, tag):
|
|
|
|
return tag in self._tags_v1 or tag in self._tagdata
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
def __len__(self):
|
|
|
|
return len(set(self._tagdata) | set(self._tags_v1))
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return iter(set(self._tagdata) | set(self._tags_v1))
|
|
|
|
|
|
|
|
def __setitem__(self, tag, value):
|
2015-09-22 13:31:59 +03:00
|
|
|
for legacy_api in (False, True):
|
2015-09-13 16:01:01 +03:00
|
|
|
self._setitem(tag, value, legacy_api)
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-13 16:01:01 +03:00
|
|
|
def __getitem__(self, tag):
|
|
|
|
if tag not in self._tags_v1: # unpack on the fly
|
|
|
|
data = self._tagdata[tag]
|
|
|
|
typ = self.tagtype[tag]
|
|
|
|
size, handler = self._load_dispatch[typ]
|
|
|
|
for legacy in (False, True):
|
2015-09-14 14:35:09 +03:00
|
|
|
self._setitem(tag, handler(self, data, legacy), legacy)
|
2015-09-13 16:01:01 +03:00
|
|
|
val = self._tags_v1[tag]
|
|
|
|
if not isinstance(val, (tuple, bytes)):
|
2019-03-21 16:28:20 +03:00
|
|
|
val = (val,)
|
2015-09-13 16:01:01 +03:00
|
|
|
return val
|
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
# undone -- switch this pointer when IFD_LEGACY_API == False
|
|
|
|
ImageFileDirectory = ImageFileDirectory_v1
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2015-09-22 13:31:59 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
##
|
|
|
|
# Image plugin for TIFF files.
|
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
class TiffImageFile(ImageFile.ImageFile):
|
|
|
|
|
|
|
|
format = "TIFF"
|
|
|
|
format_description = "Adobe TIFF"
|
2017-03-15 02:16:38 +03:00
|
|
|
_close_exclusive_fp_after_loading = False
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
def _open(self):
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Open the first image in a TIFF file"""
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# Header
|
|
|
|
ifh = self.fp.read(8)
|
|
|
|
|
|
|
|
# image file directory (tag dictionary)
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag_v2 = ImageFileDirectory_v2(ifh)
|
|
|
|
|
|
|
|
# legacy tag/ifd entries will be filled in later
|
|
|
|
self.tag = self.ifd = None
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# setup frame pointers
|
2015-09-11 20:09:14 +03:00
|
|
|
self.__first = self.__next = self.tag_v2.next
|
2010-07-31 06:52:47 +04:00
|
|
|
self.__frame = -1
|
|
|
|
self.__fp = self.fp
|
2015-04-15 03:43:05 +03:00
|
|
|
self._frame_pos = []
|
|
|
|
self._n_frames = None
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2015-08-25 15:27:18 +03:00
|
|
|
print("*** TiffImageFile._open ***")
|
|
|
|
print("- __first:", self.__first)
|
|
|
|
print("- ifh: ", ifh)
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
# and load the first frame
|
2010-07-31 06:52:47 +04:00
|
|
|
self._seek(0)
|
|
|
|
|
2015-04-15 03:43:05 +03:00
|
|
|
@property
|
|
|
|
def n_frames(self):
|
|
|
|
if self._n_frames is None:
|
|
|
|
current = self.tell()
|
2019-03-13 11:07:55 +03:00
|
|
|
self._seek(len(self._frame_pos))
|
2019-03-13 10:28:17 +03:00
|
|
|
while self._n_frames is None:
|
|
|
|
self._seek(self.tell() + 1)
|
2015-04-15 03:43:05 +03:00
|
|
|
self.seek(current)
|
|
|
|
return self._n_frames
|
|
|
|
|
2015-06-30 06:25:00 +03:00
|
|
|
@property
|
|
|
|
def is_animated(self):
|
|
|
|
return self._is_animated
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def seek(self, frame):
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Select a given frame as current image"""
|
2017-09-30 06:32:43 +03:00
|
|
|
if not self._seek_check(frame):
|
|
|
|
return
|
|
|
|
self._seek(frame)
|
2014-08-27 02:30:03 +04:00
|
|
|
# Create a new core image object on second and
|
|
|
|
# subsequent frames in the image. Image may be
|
|
|
|
# different size/mode.
|
|
|
|
Image._decompression_bomb_check(self.size)
|
|
|
|
self.im = Image.core.new(self.mode, self.size)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
def _seek(self, frame):
|
|
|
|
self.fp = self.__fp
|
2015-04-15 03:43:05 +03:00
|
|
|
while len(self._frame_pos) <= frame:
|
2010-07-31 06:52:47 +04:00
|
|
|
if not self.__next:
|
2012-10-11 07:52:53 +04:00
|
|
|
raise EOFError("no more images in TIFF file")
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2019-03-21 16:28:20 +03:00
|
|
|
print(
|
|
|
|
"Seeking to frame %s, on frame %s, __next %s, location: %s"
|
|
|
|
% (frame, self.__frame, self.__next, self.fp.tell())
|
|
|
|
)
|
2014-09-03 02:08:51 +04:00
|
|
|
# reset python3 buffered io handle in case fp
|
|
|
|
# was passed to libtiff, invalidating the buffer
|
|
|
|
self.fp.tell()
|
2010-07-31 06:52:47 +04:00
|
|
|
self.fp.seek(self.__next)
|
2015-04-15 03:43:05 +03:00
|
|
|
self._frame_pos.append(self.__next)
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2014-10-02 11:45:41 +04:00
|
|
|
print("Loading tags, location: %s" % self.fp.tell())
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag_v2.load(self.fp)
|
|
|
|
self.__next = self.tag_v2.next
|
2019-03-13 10:28:17 +03:00
|
|
|
if self.__next == 0:
|
|
|
|
self._n_frames = frame + 1
|
2019-03-13 12:44:58 +03:00
|
|
|
if len(self._frame_pos) == 1:
|
|
|
|
self._is_animated = self.__next != 0
|
2014-05-10 08:36:15 +04:00
|
|
|
self.__frame += 1
|
2015-04-15 03:43:05 +03:00
|
|
|
self.fp.seek(self._frame_pos[frame])
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag_v2.load(self.fp)
|
2015-09-22 13:31:59 +03:00
|
|
|
# fill the legacy tag/ifd entries
|
2015-09-11 20:09:14 +03:00
|
|
|
self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2)
|
2015-04-15 03:43:05 +03:00
|
|
|
self.__frame = frame
|
2010-07-31 06:52:47 +04:00
|
|
|
self._setup()
|
2014-10-02 11:43:22 +04:00
|
|
|
|
2015-04-15 03:43:05 +03:00
|
|
|
def tell(self):
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Return the current frame number"""
|
2010-07-31 06:52:47 +04:00
|
|
|
return self.__frame
|
|
|
|
|
2018-09-30 08:08:35 +03:00
|
|
|
@property
|
|
|
|
def size(self):
|
|
|
|
return self._size
|
|
|
|
|
|
|
|
@size.setter
|
|
|
|
def size(self, value):
|
|
|
|
warnings.warn(
|
2019-03-21 16:28:20 +03:00
|
|
|
"Setting the size of a TIFF image directly is deprecated, and will"
|
|
|
|
" be removed in a future version. Use the resize method instead.",
|
|
|
|
DeprecationWarning,
|
2018-09-30 08:08:35 +03:00
|
|
|
)
|
|
|
|
self._size = value
|
|
|
|
|
2016-03-27 14:18:39 +03:00
|
|
|
def load(self):
|
|
|
|
if self.use_load_libtiff:
|
|
|
|
return self._load_libtiff()
|
|
|
|
return super(TiffImageFile, self).load()
|
|
|
|
|
2017-01-02 02:17:39 +03:00
|
|
|
def load_end(self):
|
|
|
|
# allow closing if we're on the first frame, there's no next
|
|
|
|
# This is the ImageFile.load path only, libtiff specific below.
|
2019-03-13 12:44:58 +03:00
|
|
|
if not self._is_animated:
|
2017-03-15 02:16:38 +03:00
|
|
|
self._close_exclusive_fp_after_loading = True
|
2017-01-02 02:17:39 +03:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
def _load_libtiff(self):
|
2013-11-22 08:57:48 +04:00
|
|
|
""" Overload method triggered when we detect a compressed tiff
|
|
|
|
Calls out to libtiff """
|
2013-03-09 07:51:59 +04:00
|
|
|
|
|
|
|
pixel = Image.Image.load(self)
|
|
|
|
|
|
|
|
if self.tile is None:
|
|
|
|
raise IOError("cannot load this image")
|
|
|
|
if not self.tile:
|
|
|
|
return pixel
|
|
|
|
|
|
|
|
self.load_prepare()
|
2013-05-08 00:23:51 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
if not len(self.tile) == 1:
|
|
|
|
raise IOError("Not exactly one tile")
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
# (self._compression, (extents tuple),
|
|
|
|
# 0, (rawmode, self._compression, fp))
|
2015-04-08 14:12:37 +03:00
|
|
|
extents = self.tile[0][1]
|
2016-09-30 00:14:51 +03:00
|
|
|
args = list(self.tile[0][3]) + [self.tag_v2.offset]
|
|
|
|
|
|
|
|
# To be nice on memory footprint, if there's a
|
|
|
|
# file descriptor, use that instead of reading
|
|
|
|
# into a string in python.
|
|
|
|
# libtiff closes the file descriptor, so pass in a dup.
|
|
|
|
try:
|
|
|
|
fp = hasattr(self.fp, "fileno") and os.dup(self.fp.fileno())
|
|
|
|
# flush the file descriptor, prevents error on pypy 2.4+
|
|
|
|
# should also eliminate the need for fp.tell for py3
|
|
|
|
# in _seek
|
|
|
|
if hasattr(self.fp, "flush"):
|
|
|
|
self.fp.flush()
|
|
|
|
except IOError:
|
|
|
|
# io.BytesIO have a fileno, but returns an IOError if
|
|
|
|
# it doesn't use a file descriptor.
|
|
|
|
fp = False
|
2016-10-31 03:43:32 +03:00
|
|
|
|
2016-09-30 00:14:51 +03:00
|
|
|
if fp:
|
|
|
|
args[2] = fp
|
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
decoder = Image._getdecoder(
|
|
|
|
self.mode, "libtiff", tuple(args), self.decoderconfig
|
|
|
|
)
|
2013-03-09 07:51:59 +04:00
|
|
|
try:
|
2014-03-28 09:19:39 +04:00
|
|
|
decoder.setimage(self.im, extents)
|
2013-03-09 07:51:59 +04:00
|
|
|
except ValueError:
|
|
|
|
raise IOError("Couldn't set the image")
|
|
|
|
|
2013-03-14 21:36:15 +04:00
|
|
|
if hasattr(self.fp, "getvalue"):
|
2013-03-09 07:51:59 +04:00
|
|
|
# We've got a stringio like thing passed in. Yay for all in memory.
|
|
|
|
# The decoder needs the entire file in one shot, so there's not
|
|
|
|
# a lot we can do here other than give it the entire file.
|
2014-07-28 20:00:06 +04:00
|
|
|
# unless we could do something like get the address of the
|
|
|
|
# underlying string for stringio.
|
2013-03-14 21:36:15 +04:00
|
|
|
#
|
|
|
|
# Rearranging for supporting byteio items, since they have a fileno
|
2014-07-28 20:00:06 +04:00
|
|
|
# that returns an IOError if there's no underlying fp. Easier to
|
2015-05-29 07:59:54 +03:00
|
|
|
# deal with here by reordering.
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2015-08-25 15:27:18 +03:00
|
|
|
print("have getvalue. just sending in a string from getvalue")
|
2014-07-28 20:00:06 +04:00
|
|
|
n, err = decoder.decode(self.fp.getvalue())
|
2013-03-14 21:36:15 +04:00
|
|
|
elif hasattr(self.fp, "fileno"):
|
|
|
|
# we've got a actual file on disk, pass in the fp.
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2015-08-25 15:27:18 +03:00
|
|
|
print("have fileno, calling fileno version of the decoder.")
|
2013-03-14 21:36:15 +04:00
|
|
|
self.fp.seek(0)
|
2014-07-28 20:00:06 +04:00
|
|
|
# 4 bytes, otherwise the trace might error out
|
|
|
|
n, err = decoder.decode(b"fpfp")
|
2013-03-09 07:51:59 +04:00
|
|
|
else:
|
|
|
|
# we have something else.
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2015-08-25 15:27:18 +03:00
|
|
|
print("don't have fileno or getvalue. just reading")
|
2013-05-08 00:23:51 +04:00
|
|
|
# UNDONE -- so much for that buffer size thing.
|
2014-07-28 20:00:06 +04:00
|
|
|
n, err = decoder.decode(self.fp.read())
|
2013-05-08 00:23:51 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
self.tile = []
|
|
|
|
self.readonly = 0
|
2014-03-28 09:18:40 +04:00
|
|
|
# libtiff closed the fp in a, we need to close self.fp, if possible
|
2019-03-13 12:44:58 +03:00
|
|
|
if self._exclusive_fp and not self._is_animated:
|
|
|
|
self.fp.close()
|
|
|
|
self.fp = None # might be shared
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2014-03-28 09:19:39 +04:00
|
|
|
if err < 0:
|
|
|
|
raise IOError(err)
|
2013-03-09 07:51:59 +04:00
|
|
|
|
|
|
|
return Image.Image.load(self)
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def _setup(self):
|
2019-02-03 07:58:24 +03:00
|
|
|
"""Setup this image object based on current tags"""
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
if 0xBC01 in self.tag_v2:
|
2012-10-11 07:52:53 +04:00
|
|
|
raise IOError("Windows Media Photo files not yet supported")
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# extract relevant tags
|
2015-09-11 20:09:14 +03:00
|
|
|
self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)]
|
|
|
|
self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# photometric is a required tag, but not everyone is reading
|
|
|
|
# the specification
|
2015-09-11 20:09:14 +03:00
|
|
|
photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2018-12-09 08:04:34 +03:00
|
|
|
# old style jpeg compression images most certainly are YCbCr
|
|
|
|
if self._compression == "tiff_jpeg":
|
|
|
|
photo = 6
|
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
fillorder = self.tag_v2.get(FILLORDER, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2012-10-16 06:27:35 +04:00
|
|
|
print("*** Summary ***")
|
|
|
|
print("- compression:", self._compression)
|
|
|
|
print("- photometric_interpretation:", photo)
|
|
|
|
print("- planar_configuration:", self._planar_configuration)
|
|
|
|
print("- fill_order:", fillorder)
|
2018-07-03 03:15:24 +03:00
|
|
|
print("- YCbCr subsampling:", self.tag.get(530))
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# size
|
2015-09-11 20:09:14 +03:00
|
|
|
xsize = self.tag_v2.get(IMAGEWIDTH)
|
|
|
|
ysize = self.tag_v2.get(IMAGELENGTH)
|
2018-09-30 05:58:02 +03:00
|
|
|
self._size = xsize, ysize
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2012-10-16 06:27:35 +04:00
|
|
|
print("- size:", self.size)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2016-05-10 15:31:36 +03:00
|
|
|
sampleFormat = self.tag_v2.get(SAMPLEFORMAT, (1,))
|
2019-03-21 16:28:20 +03:00
|
|
|
if len(sampleFormat) > 1 and max(sampleFormat) == min(sampleFormat) == 1:
|
2015-10-04 00:46:01 +03:00
|
|
|
# SAMPLEFORMAT is properly per band, so an RGB image will
|
|
|
|
# be (1,1,1). But, we don't support per band pixel types,
|
|
|
|
# and anything more than one band is a uint8. So, just
|
|
|
|
# take the first element. Revisit this if adding support
|
|
|
|
# for more exotic images.
|
2016-05-10 15:31:36 +03:00
|
|
|
sampleFormat = (1,)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2017-12-07 18:43:22 +03:00
|
|
|
bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,))
|
|
|
|
extra_tuple = self.tag_v2.get(EXTRASAMPLES, ())
|
2017-12-08 15:37:21 +03:00
|
|
|
if photo in (2, 6, 8): # RGB, YCbCr, LAB
|
2017-12-07 18:43:22 +03:00
|
|
|
bps_count = 3
|
2017-12-08 15:37:21 +03:00
|
|
|
elif photo == 5: # CMYK
|
2017-12-07 18:43:22 +03:00
|
|
|
bps_count = 4
|
|
|
|
else:
|
|
|
|
bps_count = 1
|
|
|
|
bps_count += len(extra_tuple)
|
|
|
|
# Some files have only one value in bps_tuple,
|
|
|
|
# while should have more. Fix it
|
|
|
|
if bps_count > len(bps_tuple) and len(bps_tuple) == 1:
|
|
|
|
bps_tuple = bps_tuple * bps_count
|
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
# mode: check photometric interpretation and bits per pixel
|
2019-03-21 16:28:20 +03:00
|
|
|
key = (
|
|
|
|
self.tag_v2.prefix,
|
|
|
|
photo,
|
|
|
|
sampleFormat,
|
|
|
|
fillorder,
|
|
|
|
bps_tuple,
|
|
|
|
extra_tuple,
|
|
|
|
)
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2012-10-16 06:27:35 +04:00
|
|
|
print("format key:", key)
|
2010-07-31 06:52:47 +04:00
|
|
|
try:
|
|
|
|
self.mode, rawmode = OPEN_INFO[key]
|
|
|
|
except KeyError:
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2012-10-16 06:27:35 +04:00
|
|
|
print("- unsupported format")
|
2012-10-11 07:52:53 +04:00
|
|
|
raise SyntaxError("unknown pixel mode")
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2012-10-16 06:27:35 +04:00
|
|
|
print("- raw mode:", rawmode)
|
|
|
|
print("- pil mode:", self.mode)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
self.info["compression"] = self._compression
|
|
|
|
|
2016-02-05 01:57:13 +03:00
|
|
|
xres = self.tag_v2.get(X_RESOLUTION, 1)
|
|
|
|
yres = self.tag_v2.get(Y_RESOLUTION, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
if xres and yres:
|
2017-01-19 19:24:28 +03:00
|
|
|
resunit = self.tag_v2.get(RESOLUTION_UNIT)
|
2014-07-28 20:00:06 +04:00
|
|
|
if resunit == 2: # dots per inch
|
2019-03-30 07:03:57 +03:00
|
|
|
self.info["dpi"] = int(xres + 0.5), int(yres + 0.5)
|
2014-07-28 20:00:06 +04:00
|
|
|
elif resunit == 3: # dots per centimeter. convert to dpi
|
2019-03-30 07:03:57 +03:00
|
|
|
self.info["dpi"] = int(xres * 2.54 + 0.5), int(yres * 2.54 + 0.5)
|
2017-04-20 14:14:23 +03:00
|
|
|
elif resunit is None: # used to default to 1, but now 2)
|
2019-03-30 07:03:57 +03:00
|
|
|
self.info["dpi"] = int(xres + 0.5), int(yres + 0.5)
|
2017-05-27 23:55:14 +03:00
|
|
|
# For backward compatibility,
|
|
|
|
# we also preserve the old behavior
|
2017-01-19 19:24:28 +03:00
|
|
|
self.info["resolution"] = xres, yres
|
2014-07-28 20:00:06 +04:00
|
|
|
else: # No absolute unit of measurement
|
2010-07-31 06:52:47 +04:00
|
|
|
self.info["resolution"] = xres, yres
|
|
|
|
|
|
|
|
# build tile descriptors
|
2018-06-15 16:44:22 +03:00
|
|
|
x = y = layer = 0
|
2010-07-31 06:52:47 +04:00
|
|
|
self.tile = []
|
2019-03-21 16:28:20 +03:00
|
|
|
self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw"
|
2018-07-03 03:15:24 +03:00
|
|
|
if self.use_load_libtiff:
|
|
|
|
# Decoder expects entire file as one tile.
|
|
|
|
# There's a buffer size limit in load (64k)
|
|
|
|
# so large g4 images will fail if we use that
|
|
|
|
# function.
|
|
|
|
#
|
|
|
|
# Setup the one tile for the whole image, then
|
|
|
|
# use the _load_libtiff function.
|
|
|
|
|
|
|
|
# libtiff handles the fillmode for us, so 1;IR should
|
|
|
|
# actually be 1;I. Including the R double reverses the
|
|
|
|
# bits, so stripes of the image are reversed. See
|
|
|
|
# https://github.com/python-pillow/Pillow/issues/279
|
|
|
|
if fillorder == 2:
|
2018-09-26 19:44:45 +03:00
|
|
|
# Replace fillorder with fillorder=1
|
|
|
|
key = key[:3] + (1,) + key[4:]
|
2018-07-03 03:15:24 +03:00
|
|
|
if DEBUG:
|
|
|
|
print("format key:", key)
|
|
|
|
# this should always work, since all the
|
|
|
|
# fillorder==2 modes have a corresponding
|
|
|
|
# fillorder=1 mode
|
|
|
|
self.mode, rawmode = OPEN_INFO[key]
|
|
|
|
# libtiff always returns the bytes in native order.
|
|
|
|
# we're expecting image byte order. So, if the rawmode
|
|
|
|
# contains I;16, we need to convert from native to image
|
|
|
|
# byte order.
|
2019-03-21 16:28:20 +03:00
|
|
|
if rawmode == "I;16":
|
|
|
|
rawmode = "I;16N"
|
|
|
|
if ";16B" in rawmode:
|
|
|
|
rawmode = rawmode.replace(";16B", ";16N")
|
|
|
|
if ";16L" in rawmode:
|
|
|
|
rawmode = rawmode.replace(";16L", ";16N")
|
2018-07-03 03:15:24 +03:00
|
|
|
|
|
|
|
# Offset in the tile tuple is 0, we go from 0,0 to
|
|
|
|
# w,h, and we only do this once -- eds
|
|
|
|
a = (rawmode, self._compression, False)
|
2019-03-21 16:28:20 +03:00
|
|
|
self.tile.append((self._compression, (0, 0, xsize, ysize), 0, a))
|
2018-07-03 03:15:24 +03:00
|
|
|
|
|
|
|
elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2:
|
2010-07-31 06:52:47 +04:00
|
|
|
# striped image
|
2018-07-03 03:15:24 +03:00
|
|
|
if STRIPOFFSETS in self.tag_v2:
|
|
|
|
offsets = self.tag_v2[STRIPOFFSETS]
|
|
|
|
h = self.tag_v2.get(ROWSPERSTRIP, ysize)
|
|
|
|
w = self.size[0]
|
2013-03-09 07:51:59 +04:00
|
|
|
else:
|
2018-07-03 03:15:24 +03:00
|
|
|
# tiled image
|
|
|
|
offsets = self.tag_v2[TILEOFFSETS]
|
|
|
|
w = self.tag_v2.get(322)
|
|
|
|
h = self.tag_v2.get(323)
|
|
|
|
|
|
|
|
for offset in offsets:
|
2018-07-17 07:10:57 +03:00
|
|
|
if x + w > xsize:
|
|
|
|
stride = w * sum(bps_tuple) / 8 # bytes per line
|
|
|
|
else:
|
|
|
|
stride = 0
|
|
|
|
|
2018-07-17 08:39:52 +03:00
|
|
|
tile_rawmode = rawmode
|
|
|
|
if self._planar_configuration == 2:
|
|
|
|
# each band on it's own layer
|
|
|
|
tile_rawmode = rawmode[layer]
|
|
|
|
# adjust stride width accordingly
|
|
|
|
stride /= bps_count
|
|
|
|
|
|
|
|
a = (tile_rawmode, int(stride), 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
self.tile.append(
|
2019-03-21 16:28:20 +03:00
|
|
|
(
|
|
|
|
self._compression,
|
|
|
|
(x, y, min(x + w, xsize), min(y + h, ysize)),
|
|
|
|
offset,
|
|
|
|
a,
|
|
|
|
)
|
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
x = x + w
|
|
|
|
if x >= self.size[0]:
|
|
|
|
x, y = 0, y + h
|
|
|
|
if y >= self.size[1]:
|
|
|
|
x = y = 0
|
2018-06-15 16:44:22 +03:00
|
|
|
layer += 1
|
2010-07-31 06:52:47 +04:00
|
|
|
else:
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2012-10-16 06:27:35 +04:00
|
|
|
print("- unsupported data organization")
|
2010-07-31 06:52:47 +04:00
|
|
|
raise SyntaxError("unknown data organization")
|
|
|
|
|
2016-11-04 18:37:49 +03:00
|
|
|
# Fix up info.
|
|
|
|
if ICCPROFILE in self.tag_v2:
|
2019-03-21 16:28:20 +03:00
|
|
|
self.info["icc_profile"] = self.tag_v2[ICCPROFILE]
|
2016-11-04 18:37:49 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
# fixup palette descriptor
|
|
|
|
|
2019-05-11 07:43:48 +03:00
|
|
|
if self.mode in ["P", "PA"]:
|
2015-09-11 20:09:14 +03:00
|
|
|
palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]]
|
py3k: The big push
There are two main issues fixed with this commit:
* bytes vs. str: All file, image, and palette data are now handled as
bytes. A new _binary module consolidates the hacks needed to do this
across Python versions. tostring/fromstring methods have been renamed to
tobytes/frombytes, but the Python 2.6/2.7 versions alias them to the old
names for compatibility. Users should move to tobytes/frombytes.
One other potentially-breaking change is that text data in image files
(such as tags, comments) are now explicitly handled with a specific
character encoding in mind. This works well with the Unicode str in
Python 3, but may trip up old code expecting a straight byte-for-byte
translation to a Python string. This also required a change to Gohlke's
tags tests (in Tests/test_file_png.py) to expect Unicode strings from
the code.
* True div vs. floor div: Many division operations used the "/" operator
to do floor division, which is now the "//" operator in Python 3. These
were fixed.
As of this commit, on the first pass, I have one failing test (improper
handling of a slice object in a C module, test_imagepath.py) in Python 3,
and three that that I haven't tried running yet (test_imagegl,
test_imagegrab, and test_imageqt). I also haven't tested anything on
Windows. All but the three skipped tests run flawlessly against Pythons
2.6 and 2.7.
2012-10-21 01:01:53 +04:00
|
|
|
self.palette = ImagePalette.raw("RGB;L", b"".join(palette))
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2018-11-17 13:56:06 +03:00
|
|
|
def _close__fp(self):
|
|
|
|
try:
|
2019-01-04 04:29:23 +03:00
|
|
|
if self.__fp != self.fp:
|
|
|
|
self.__fp.close()
|
2018-11-17 13:56:06 +03:00
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
self.__fp = None
|
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# Write TIFF files
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
# little endian is default except for image modes with
|
2015-05-29 07:59:54 +03:00
|
|
|
# explicit big endian byte-order
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
SAVE_INFO = {
|
2014-07-28 20:00:06 +04:00
|
|
|
# mode => rawmode, byteorder, photometrics,
|
|
|
|
# sampleformat, bitspersample, extra
|
2010-07-31 06:52:47 +04:00
|
|
|
"1": ("1", II, 1, 1, (1,), None),
|
|
|
|
"L": ("L", II, 1, 1, (8,), None),
|
2014-07-28 20:00:06 +04:00
|
|
|
"LA": ("LA", II, 1, 1, (8, 8), 2),
|
2010-07-31 06:52:47 +04:00
|
|
|
"P": ("P", II, 3, 1, (8,), None),
|
2014-07-28 20:00:06 +04:00
|
|
|
"PA": ("PA", II, 3, 1, (8, 8), 2),
|
2010-07-31 06:52:47 +04:00
|
|
|
"I": ("I;32S", II, 1, 2, (32,), None),
|
|
|
|
"I;16": ("I;16", II, 1, 1, (16,), None),
|
|
|
|
"I;16S": ("I;16S", II, 1, 2, (16,), None),
|
|
|
|
"F": ("F;32F", II, 1, 3, (32,), None),
|
2014-07-28 20:00:06 +04:00
|
|
|
"RGB": ("RGB", II, 2, 1, (8, 8, 8), None),
|
|
|
|
"RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0),
|
|
|
|
"RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2),
|
|
|
|
"CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None),
|
|
|
|
"YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None),
|
|
|
|
"LAB": ("LAB", II, 8, 1, (8, 8, 8), None),
|
2010-07-31 06:52:47 +04:00
|
|
|
"I;32BS": ("I;32BS", MM, 1, 2, (32,), None),
|
|
|
|
"I;16B": ("I;16B", MM, 1, 1, (16,), None),
|
|
|
|
"I;16BS": ("I;16BS", MM, 1, 2, (16,), None),
|
|
|
|
"F;32BF": ("F;32BF", MM, 1, 3, (32,), None),
|
|
|
|
}
|
|
|
|
|
2014-07-28 20:00:06 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
def _save(im, fp, filename):
|
|
|
|
|
|
|
|
try:
|
|
|
|
rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode]
|
|
|
|
except KeyError:
|
2012-10-11 07:52:53 +04:00
|
|
|
raise IOError("cannot write mode %s as TIFF" % im.mode)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-09-11 20:09:14 +03:00
|
|
|
ifd = ImageFileDirectory_v2(prefix=prefix)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
compression = im.encoderinfo.get("compression", im.info.get("compression"))
|
2018-08-25 01:21:43 +03:00
|
|
|
if compression is None:
|
2019-03-21 16:28:20 +03:00
|
|
|
compression = "raw"
|
2013-11-22 09:41:54 +04:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
libtiff = WRITE_LIBTIFF or compression != "raw"
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2013-11-06 08:49:09 +04:00
|
|
|
# required for color libtiff images
|
2019-03-21 16:28:20 +03:00
|
|
|
ifd[PLANAR_CONFIGURATION] = getattr(im, "_planar_configuration", 1)
|
2014-06-03 14:02:44 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
ifd[IMAGEWIDTH] = im.size[0]
|
|
|
|
ifd[IMAGELENGTH] = im.size[1]
|
|
|
|
|
2013-10-03 09:06:17 +04:00
|
|
|
# write any arbitrary tags passed in as an ImageFileDirectory
|
2014-07-28 20:00:06 +04:00
|
|
|
info = im.encoderinfo.get("tiffinfo", {})
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2014-12-29 18:48:01 +03:00
|
|
|
print("Tiffinfo Keys: %s" % list(info))
|
2015-09-13 16:01:01 +03:00
|
|
|
if isinstance(info, ImageFileDirectory_v1):
|
|
|
|
info = info.to_v2()
|
2014-12-29 18:48:01 +03:00
|
|
|
for key in info:
|
2013-10-03 09:06:17 +04:00
|
|
|
ifd[key] = info.get(key)
|
|
|
|
try:
|
|
|
|
ifd.tagtype[key] = info.tagtype[key]
|
2018-11-17 00:51:52 +03:00
|
|
|
except Exception:
|
2019-02-19 11:49:50 +03:00
|
|
|
pass # might not be an IFD. Might not have populated type
|
2013-10-03 09:06:17 +04:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
# additions written by Greg Couch, gregc@cgl.ucsf.edu
|
|
|
|
# inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com
|
2019-03-21 16:28:20 +03:00
|
|
|
if hasattr(im, "tag_v2"):
|
2010-07-31 06:52:47 +04:00
|
|
|
# preserve tags from original TIFF image file
|
2019-03-21 16:28:20 +03:00
|
|
|
for key in (
|
|
|
|
RESOLUTION_UNIT,
|
|
|
|
X_RESOLUTION,
|
|
|
|
Y_RESOLUTION,
|
|
|
|
IPTC_NAA_CHUNK,
|
|
|
|
PHOTOSHOP_CHUNK,
|
|
|
|
XMP,
|
|
|
|
):
|
2015-09-13 12:53:47 +03:00
|
|
|
if key in im.tag_v2:
|
|
|
|
ifd[key] = im.tag_v2[key]
|
2016-09-16 21:07:25 +03:00
|
|
|
ifd.tagtype[key] = im.tag_v2.tagtype[key]
|
2013-10-08 04:00:20 +04:00
|
|
|
|
2016-08-22 13:47:49 +03:00
|
|
|
# preserve ICC profile (should also work when saving other formats
|
|
|
|
# which support profiles as TIFF) -- 2008-06-06 Florian Hoech
|
|
|
|
if "icc_profile" in im.info:
|
|
|
|
ifd[ICCPROFILE] = im.info["icc_profile"]
|
2014-06-03 14:02:44 +04:00
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
for key, name in [
|
|
|
|
(IMAGEDESCRIPTION, "description"),
|
|
|
|
(X_RESOLUTION, "resolution"),
|
|
|
|
(Y_RESOLUTION, "resolution"),
|
|
|
|
(X_RESOLUTION, "x_resolution"),
|
|
|
|
(Y_RESOLUTION, "y_resolution"),
|
|
|
|
(RESOLUTION_UNIT, "resolution_unit"),
|
|
|
|
(SOFTWARE, "software"),
|
|
|
|
(DATE_TIME, "date_time"),
|
|
|
|
(ARTIST, "artist"),
|
|
|
|
(COPYRIGHT, "copyright"),
|
|
|
|
]:
|
2014-12-10 02:17:33 +03:00
|
|
|
if name in im.encoderinfo:
|
2014-12-29 18:48:01 +03:00
|
|
|
ifd[key] = im.encoderinfo[name]
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
dpi = im.encoderinfo.get("dpi")
|
|
|
|
if dpi:
|
|
|
|
ifd[RESOLUTION_UNIT] = 2
|
2019-03-30 07:03:57 +03:00
|
|
|
ifd[X_RESOLUTION] = int(dpi[0] + 0.5)
|
|
|
|
ifd[Y_RESOLUTION] = int(dpi[1] + 0.5)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
if bits != (1,):
|
|
|
|
ifd[BITSPERSAMPLE] = bits
|
|
|
|
if len(bits) != 1:
|
|
|
|
ifd[SAMPLESPERPIXEL] = len(bits)
|
|
|
|
if extra is not None:
|
|
|
|
ifd[EXTRASAMPLES] = extra
|
|
|
|
if format != 1:
|
|
|
|
ifd[SAMPLEFORMAT] = format
|
|
|
|
|
|
|
|
ifd[PHOTOMETRIC_INTERPRETATION] = photo
|
|
|
|
|
2019-05-11 07:43:48 +03:00
|
|
|
if im.mode in ["P", "PA"]:
|
2010-07-31 06:52:47 +04:00
|
|
|
lut = im.im.getpalette("RGB", "RGB;L")
|
py3k: The big push
There are two main issues fixed with this commit:
* bytes vs. str: All file, image, and palette data are now handled as
bytes. A new _binary module consolidates the hacks needed to do this
across Python versions. tostring/fromstring methods have been renamed to
tobytes/frombytes, but the Python 2.6/2.7 versions alias them to the old
names for compatibility. Users should move to tobytes/frombytes.
One other potentially-breaking change is that text data in image files
(such as tags, comments) are now explicitly handled with a specific
character encoding in mind. This works well with the Unicode str in
Python 3, but may trip up old code expecting a straight byte-for-byte
translation to a Python string. This also required a change to Gohlke's
tags tests (in Tests/test_file_png.py) to expect Unicode strings from
the code.
* True div vs. floor div: Many division operations used the "/" operator
to do floor division, which is now the "//" operator in Python 3. These
were fixed.
As of this commit, on the first pass, I have one failing test (improper
handling of a slice object in a C module, test_imagepath.py) in Python 3,
and three that that I haven't tried running yet (test_imagegl,
test_imagegrab, and test_imageqt). I also haven't tested anything on
Windows. All but the three skipped tests run flawlessly against Pythons
2.6 and 2.7.
2012-10-21 01:01:53 +04:00
|
|
|
ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut)
|
2010-07-31 06:52:47 +04:00
|
|
|
# data orientation
|
2019-03-21 16:28:20 +03:00
|
|
|
stride = len(bits) * ((im.size[0] * bits[0] + 7) // 8)
|
2010-07-31 06:52:47 +04:00
|
|
|
ifd[ROWSPERSTRIP] = im.size[1]
|
|
|
|
ifd[STRIPBYTECOUNTS] = stride * im.size[1]
|
2014-07-28 20:00:06 +04:00
|
|
|
ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer
|
|
|
|
# no compression by default:
|
|
|
|
ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
if libtiff:
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2015-08-25 15:27:18 +03:00
|
|
|
print("Saving using libtiff encoder")
|
2014-12-29 18:48:01 +03:00
|
|
|
print("Items: %s" % sorted(ifd.items()))
|
2013-03-09 07:51:59 +04:00
|
|
|
_fp = 0
|
|
|
|
if hasattr(fp, "fileno"):
|
2014-11-14 15:29:10 +03:00
|
|
|
try:
|
|
|
|
fp.seek(0)
|
|
|
|
_fp = os.dup(fp.fileno())
|
|
|
|
except io.UnsupportedOperation:
|
|
|
|
pass
|
2013-03-09 07:51:59 +04:00
|
|
|
|
2015-12-09 23:39:49 +03:00
|
|
|
# STRIPOFFSETS and STRIPBYTECOUNTS are added by the library
|
|
|
|
# based on the data in the strip.
|
2015-12-30 18:54:14 +03:00
|
|
|
blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS]
|
2014-07-28 20:00:06 +04:00
|
|
|
atts = {}
|
2014-06-03 14:02:44 +04:00
|
|
|
# bits per sample is a single short in the tiff directory, not a list.
|
2013-11-06 08:49:09 +04:00
|
|
|
atts[BITSPERSAMPLE] = bits[0]
|
2013-10-22 20:18:41 +04:00
|
|
|
# Merge the ones that we have with (optional) more bits from
|
|
|
|
# the original file, e.g x,y resolution so that we can
|
|
|
|
# save(load('')) == original file.
|
2015-09-14 13:03:24 +03:00
|
|
|
legacy_ifd = {}
|
2019-03-21 16:28:20 +03:00
|
|
|
if hasattr(im, "tag"):
|
2015-09-14 13:03:24 +03:00
|
|
|
legacy_ifd = im.tag.to_v2()
|
2019-03-21 16:28:20 +03:00
|
|
|
for tag, value in itertools.chain(
|
|
|
|
ifd.items(), getattr(im, "tag_v2", {}).items(), legacy_ifd.items()
|
|
|
|
):
|
2015-12-30 18:54:14 +03:00
|
|
|
# Libtiff can only process certain core items without adding
|
2018-12-29 07:57:49 +03:00
|
|
|
# them to the custom dictionary.
|
|
|
|
# Support for custom items has only been been added
|
|
|
|
# for int, float, unicode, string and byte values
|
2016-02-05 01:57:13 +03:00
|
|
|
if tag not in TiffTags.LIBTIFF_CORE:
|
2018-12-29 07:57:49 +03:00
|
|
|
if TiffTags.lookup(tag).type == TiffTags.UNDEFINED:
|
|
|
|
continue
|
2019-03-21 16:28:20 +03:00
|
|
|
if (
|
|
|
|
distutils.version.StrictVersion(_libtiff_version())
|
|
|
|
< distutils.version.StrictVersion("4.0")
|
|
|
|
) or not (
|
|
|
|
isinstance(value, (int, float, str, bytes))
|
|
|
|
or (not py3 and isinstance(value, unicode)) # noqa: F821
|
|
|
|
):
|
2018-10-25 11:36:49 +03:00
|
|
|
continue
|
2015-12-30 18:54:14 +03:00
|
|
|
if tag not in atts and tag not in blocklist:
|
2018-11-13 15:01:09 +03:00
|
|
|
if isinstance(value, str if py3 else unicode): # noqa: F821
|
2019-03-21 16:28:20 +03:00
|
|
|
atts[tag] = value.encode("ascii", "replace") + b"\0"
|
2015-12-30 18:54:14 +03:00
|
|
|
elif isinstance(value, IFDRational):
|
|
|
|
atts[tag] = float(value)
|
2013-10-22 21:10:37 +04:00
|
|
|
else:
|
2015-12-30 18:54:14 +03:00
|
|
|
atts[tag] = value
|
2013-10-21 20:30:06 +04:00
|
|
|
|
2015-02-06 21:58:07 +03:00
|
|
|
if DEBUG:
|
2014-12-29 18:48:01 +03:00
|
|
|
print("Converted items: %s" % sorted(atts.items()))
|
2013-10-22 02:37:20 +04:00
|
|
|
|
2013-11-22 08:57:48 +04:00
|
|
|
# libtiff always expects the bytes in native order.
|
|
|
|
# we're storing image byte order. So, if the rawmode
|
2013-10-22 02:37:20 +04:00
|
|
|
# contains I;16, we need to convert from native to image
|
|
|
|
# byte order.
|
2019-03-21 16:28:20 +03:00
|
|
|
if im.mode in ("I;16B", "I;16"):
|
|
|
|
rawmode = "I;16N"
|
2013-10-22 02:37:20 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
a = (rawmode, compression, _fp, filename, atts)
|
2019-03-21 16:28:20 +03:00
|
|
|
e = Image._getencoder(im.mode, "libtiff", a, im.encoderconfig)
|
|
|
|
e.setimage(im.im, (0, 0) + im.size)
|
2014-01-08 06:21:24 +04:00
|
|
|
while True:
|
2014-07-28 20:00:06 +04:00
|
|
|
# undone, change to self.decodermaxblock:
|
2019-03-21 16:28:20 +03:00
|
|
|
l, s, d = e.encode(16 * 1024)
|
2013-03-09 07:51:59 +04:00
|
|
|
if not _fp:
|
|
|
|
fp.write(d)
|
|
|
|
if s:
|
|
|
|
break
|
|
|
|
if s < 0:
|
|
|
|
raise IOError("encoder error %d when writing image file" % s)
|
2013-05-08 00:23:51 +04:00
|
|
|
|
2013-03-09 07:51:59 +04:00
|
|
|
else:
|
|
|
|
offset = ifd.save(fp)
|
|
|
|
|
2019-03-21 16:28:20 +03:00
|
|
|
ImageFile._save(
|
|
|
|
im, fp, [("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))]
|
|
|
|
)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
|
|
|
# -- helper for multi-page save --
|
2012-10-16 01:18:27 +04:00
|
|
|
if "_debug_multipage" in im.encoderinfo:
|
2014-07-28 20:00:06 +04:00
|
|
|
# just to access o32 and o16 (using correct byte order)
|
2010-07-31 06:52:47 +04:00
|
|
|
im._debug_multipage = ifd
|
|
|
|
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2016-09-29 04:16:04 +03:00
|
|
|
class AppendingTiffWriter:
|
|
|
|
fieldSizes = [
|
|
|
|
0, # None
|
|
|
|
1, # byte
|
|
|
|
1, # ascii
|
|
|
|
2, # short
|
|
|
|
4, # long
|
|
|
|
8, # rational
|
|
|
|
1, # sbyte
|
|
|
|
1, # undefined
|
|
|
|
2, # sshort
|
|
|
|
4, # slong
|
|
|
|
8, # srational
|
|
|
|
4, # float
|
|
|
|
8, # double
|
|
|
|
]
|
|
|
|
|
|
|
|
# StripOffsets = 273
|
|
|
|
# FreeOffsets = 288
|
|
|
|
# TileOffsets = 324
|
|
|
|
# JPEGQTables = 519
|
|
|
|
# JPEGDCTables = 520
|
|
|
|
# JPEGACTables = 521
|
2016-11-07 15:33:46 +03:00
|
|
|
Tags = {273, 288, 324, 519, 520, 521}
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def __init__(self, fn, new=False):
|
2019-03-21 16:28:20 +03:00
|
|
|
if hasattr(fn, "read"):
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f = fn
|
|
|
|
self.close_fp = False
|
|
|
|
else:
|
|
|
|
self.name = fn
|
|
|
|
self.close_fp = True
|
|
|
|
try:
|
|
|
|
self.f = io.open(fn, "w+b" if new else "r+b")
|
|
|
|
except IOError:
|
|
|
|
self.f = io.open(fn, "w+b")
|
|
|
|
self.beginning = self.f.tell()
|
|
|
|
self.setup()
|
|
|
|
|
|
|
|
def setup(self):
|
|
|
|
# Reset everything.
|
|
|
|
self.f.seek(self.beginning, os.SEEK_SET)
|
|
|
|
|
|
|
|
self.whereToWriteNewIFDOffset = None
|
|
|
|
self.offsetOfNewPage = 0
|
|
|
|
|
|
|
|
self.IIMM = IIMM = self.f.read(4)
|
|
|
|
if not IIMM:
|
|
|
|
# empty file - first page
|
|
|
|
self.isFirst = True
|
|
|
|
return
|
|
|
|
|
|
|
|
self.isFirst = False
|
|
|
|
if IIMM == b"II\x2a\x00":
|
|
|
|
self.setEndian("<")
|
|
|
|
elif IIMM == b"MM\x00\x2a":
|
|
|
|
self.setEndian(">")
|
|
|
|
else:
|
|
|
|
raise RuntimeError("Invalid TIFF file header")
|
|
|
|
|
|
|
|
self.skipIFDs()
|
|
|
|
self.goToEnd()
|
|
|
|
|
|
|
|
def finalize(self):
|
|
|
|
if self.isFirst:
|
|
|
|
return
|
|
|
|
|
|
|
|
# fix offsets
|
|
|
|
self.f.seek(self.offsetOfNewPage)
|
|
|
|
|
|
|
|
IIMM = self.f.read(4)
|
|
|
|
if not IIMM:
|
|
|
|
# raise RuntimeError("nothing written into new page")
|
|
|
|
# Make it easy to finish a frame without committing to a new one.
|
|
|
|
return
|
|
|
|
|
|
|
|
if IIMM != self.IIMM:
|
2019-03-21 16:28:20 +03:00
|
|
|
raise RuntimeError("IIMM of new page doesn't match IIMM of first page")
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
IFDoffset = self.readLong()
|
|
|
|
IFDoffset += self.offsetOfNewPage
|
|
|
|
self.f.seek(self.whereToWriteNewIFDOffset)
|
|
|
|
self.writeLong(IFDoffset)
|
|
|
|
self.f.seek(IFDoffset)
|
|
|
|
self.fixIFD()
|
|
|
|
|
|
|
|
def newFrame(self):
|
|
|
|
# Call this to finish a frame.
|
|
|
|
self.finalize()
|
|
|
|
self.setup()
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, exc_type, exc_value, traceback):
|
|
|
|
if self.close_fp:
|
|
|
|
self.close()
|
|
|
|
return False
|
|
|
|
|
|
|
|
def tell(self):
|
|
|
|
return self.f.tell() - self.offsetOfNewPage
|
|
|
|
|
2019-01-13 05:05:46 +03:00
|
|
|
def seek(self, offset, whence=io.SEEK_SET):
|
2016-09-29 04:16:04 +03:00
|
|
|
if whence == os.SEEK_SET:
|
|
|
|
offset += self.offsetOfNewPage
|
|
|
|
|
|
|
|
self.f.seek(offset, whence)
|
|
|
|
return self.tell()
|
|
|
|
|
|
|
|
def goToEnd(self):
|
|
|
|
self.f.seek(0, os.SEEK_END)
|
|
|
|
pos = self.f.tell()
|
|
|
|
|
|
|
|
# pad to 16 byte boundary
|
|
|
|
padBytes = 16 - pos % 16
|
|
|
|
if 0 < padBytes < 16:
|
|
|
|
self.f.write(bytes(bytearray(padBytes)))
|
|
|
|
self.offsetOfNewPage = self.f.tell()
|
|
|
|
|
|
|
|
def setEndian(self, endian):
|
|
|
|
self.endian = endian
|
|
|
|
self.longFmt = self.endian + "L"
|
|
|
|
self.shortFmt = self.endian + "H"
|
|
|
|
self.tagFormat = self.endian + "HHL"
|
|
|
|
|
|
|
|
def skipIFDs(self):
|
|
|
|
while True:
|
|
|
|
IFDoffset = self.readLong()
|
|
|
|
if IFDoffset == 0:
|
|
|
|
self.whereToWriteNewIFDOffset = self.f.tell() - 4
|
|
|
|
break
|
|
|
|
|
|
|
|
self.f.seek(IFDoffset)
|
|
|
|
numTags = self.readShort()
|
|
|
|
self.f.seek(numTags * 12, os.SEEK_CUR)
|
|
|
|
|
|
|
|
def write(self, data):
|
|
|
|
return self.f.write(data)
|
|
|
|
|
|
|
|
def readShort(self):
|
|
|
|
value, = struct.unpack(self.shortFmt, self.f.read(2))
|
|
|
|
return value
|
|
|
|
|
|
|
|
def readLong(self):
|
|
|
|
value, = struct.unpack(self.longFmt, self.f.read(4))
|
|
|
|
return value
|
|
|
|
|
|
|
|
def rewriteLastShortToLong(self, value):
|
|
|
|
self.f.seek(-2, os.SEEK_CUR)
|
|
|
|
bytesWritten = self.f.write(struct.pack(self.longFmt, value))
|
|
|
|
if bytesWritten is not None and bytesWritten != 4:
|
2019-03-21 16:28:20 +03:00
|
|
|
raise RuntimeError("wrote only %u bytes but wanted 4" % bytesWritten)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def rewriteLastShort(self, value):
|
|
|
|
self.f.seek(-2, os.SEEK_CUR)
|
|
|
|
bytesWritten = self.f.write(struct.pack(self.shortFmt, value))
|
|
|
|
if bytesWritten is not None and bytesWritten != 2:
|
2019-03-21 16:28:20 +03:00
|
|
|
raise RuntimeError("wrote only %u bytes but wanted 2" % bytesWritten)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def rewriteLastLong(self, value):
|
|
|
|
self.f.seek(-4, os.SEEK_CUR)
|
|
|
|
bytesWritten = self.f.write(struct.pack(self.longFmt, value))
|
|
|
|
if bytesWritten is not None and bytesWritten != 4:
|
2019-03-21 16:28:20 +03:00
|
|
|
raise RuntimeError("wrote only %u bytes but wanted 4" % bytesWritten)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def writeShort(self, value):
|
|
|
|
bytesWritten = self.f.write(struct.pack(self.shortFmt, value))
|
|
|
|
if bytesWritten is not None and bytesWritten != 2:
|
2019-03-21 16:28:20 +03:00
|
|
|
raise RuntimeError("wrote only %u bytes but wanted 2" % bytesWritten)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def writeLong(self, value):
|
|
|
|
bytesWritten = self.f.write(struct.pack(self.longFmt, value))
|
|
|
|
if bytesWritten is not None and bytesWritten != 4:
|
2019-03-21 16:28:20 +03:00
|
|
|
raise RuntimeError("wrote only %u bytes but wanted 4" % bytesWritten)
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.finalize()
|
|
|
|
self.f.close()
|
|
|
|
|
|
|
|
def fixIFD(self):
|
|
|
|
numTags = self.readShort()
|
|
|
|
|
|
|
|
for i in range(numTags):
|
2019-03-21 16:28:20 +03:00
|
|
|
tag, fieldType, count = struct.unpack(self.tagFormat, self.f.read(8))
|
2016-09-29 04:16:04 +03:00
|
|
|
|
|
|
|
fieldSize = self.fieldSizes[fieldType]
|
|
|
|
totalSize = fieldSize * count
|
2019-03-21 16:28:20 +03:00
|
|
|
isLocal = totalSize <= 4
|
2016-09-29 04:16:04 +03:00
|
|
|
if not isLocal:
|
|
|
|
offset = self.readLong()
|
|
|
|
offset += self.offsetOfNewPage
|
|
|
|
self.rewriteLastLong(offset)
|
|
|
|
|
|
|
|
if tag in self.Tags:
|
|
|
|
curPos = self.f.tell()
|
|
|
|
|
|
|
|
if isLocal:
|
2019-03-21 16:28:20 +03:00
|
|
|
self.fixOffsets(
|
|
|
|
count, isShort=(fieldSize == 2), isLong=(fieldSize == 4)
|
|
|
|
)
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f.seek(curPos + 4)
|
|
|
|
else:
|
|
|
|
self.f.seek(offset)
|
2019-03-21 16:28:20 +03:00
|
|
|
self.fixOffsets(
|
|
|
|
count, isShort=(fieldSize == 2), isLong=(fieldSize == 4)
|
|
|
|
)
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f.seek(curPos)
|
|
|
|
|
|
|
|
offset = curPos = None
|
|
|
|
|
|
|
|
elif isLocal:
|
|
|
|
# skip the locally stored value that is not an offset
|
|
|
|
self.f.seek(4, os.SEEK_CUR)
|
|
|
|
|
|
|
|
def fixOffsets(self, count, isShort=False, isLong=False):
|
|
|
|
if not isShort and not isLong:
|
|
|
|
raise RuntimeError("offset is neither short nor long")
|
|
|
|
|
|
|
|
for i in range(count):
|
|
|
|
offset = self.readShort() if isShort else self.readLong()
|
|
|
|
offset += self.offsetOfNewPage
|
|
|
|
if isShort and offset >= 65536:
|
|
|
|
# offset is now too large - we must convert shorts to longs
|
|
|
|
if count != 1:
|
|
|
|
raise RuntimeError("not implemented") # XXX TODO
|
|
|
|
|
|
|
|
# simple case - the offset is just one and therefore it is
|
|
|
|
# local (not referenced with another offset)
|
|
|
|
self.rewriteLastShortToLong(offset)
|
|
|
|
self.f.seek(-10, os.SEEK_CUR)
|
2018-12-29 08:14:29 +03:00
|
|
|
self.writeShort(TiffTags.LONG) # rewrite the type to LONG
|
2016-09-29 04:16:04 +03:00
|
|
|
self.f.seek(8, os.SEEK_CUR)
|
|
|
|
elif isShort:
|
|
|
|
self.rewriteLastShort(offset)
|
|
|
|
else:
|
|
|
|
self.rewriteLastLong(offset)
|
|
|
|
|
2017-04-20 14:14:23 +03:00
|
|
|
|
2016-09-29 04:16:04 +03:00
|
|
|
def _save_all(im, fp, filename):
|
Allow to save tiff stacks from separate images
This is a quick solution that will allow to save tiff stacks from
separate images, e.g. from Numpy arrays.
Previously, tiff stacks could be saved only from multiframe images.
This behavior is similar to what is possible now with GIFs.
Note however, that for correct results, all the appended images should
have the same encoder{info,config} properties.
Example:
import numpy as np
from PIL import Image
a = np.ones((100,100,100), dtype=np.uint8)
imlist = []
for m in a:
imlist.append(Image.fromarray(m))
imlist[0].save("test.tif", compression="tiff_deflate", save_all=True,
append_images=imlist[1:])
(Should result in a 100-frame, 100x100 tiff stack.)
Signed-off-by: Leonid Bloch <leonid.bloch@esrf.fr>
2017-02-16 03:54:43 +03:00
|
|
|
encoderinfo = im.encoderinfo.copy()
|
|
|
|
encoderconfig = im.encoderconfig
|
2017-11-06 12:11:29 +03:00
|
|
|
append_images = list(encoderinfo.get("append_images", []))
|
2017-11-04 02:46:15 +03:00
|
|
|
if not hasattr(im, "n_frames") and not append_images:
|
2016-09-29 04:16:04 +03:00
|
|
|
return _save(im, fp, filename)
|
|
|
|
|
|
|
|
cur_idx = im.tell()
|
|
|
|
try:
|
|
|
|
with AppendingTiffWriter(fp) as tf:
|
2019-03-21 16:28:20 +03:00
|
|
|
for ims in [im] + append_images:
|
Allow to save tiff stacks from separate images
This is a quick solution that will allow to save tiff stacks from
separate images, e.g. from Numpy arrays.
Previously, tiff stacks could be saved only from multiframe images.
This behavior is similar to what is possible now with GIFs.
Note however, that for correct results, all the appended images should
have the same encoder{info,config} properties.
Example:
import numpy as np
from PIL import Image
a = np.ones((100,100,100), dtype=np.uint8)
imlist = []
for m in a:
imlist.append(Image.fromarray(m))
imlist[0].save("test.tif", compression="tiff_deflate", save_all=True,
append_images=imlist[1:])
(Should result in a 100-frame, 100x100 tiff stack.)
Signed-off-by: Leonid Bloch <leonid.bloch@esrf.fr>
2017-02-16 03:54:43 +03:00
|
|
|
ims.encoderinfo = encoderinfo
|
|
|
|
ims.encoderconfig = encoderconfig
|
|
|
|
if not hasattr(ims, "n_frames"):
|
|
|
|
nfr = 1
|
|
|
|
else:
|
|
|
|
nfr = ims.n_frames
|
|
|
|
|
|
|
|
for idx in range(nfr):
|
|
|
|
ims.seek(idx)
|
|
|
|
ims.load()
|
|
|
|
_save(ims, tf, filename)
|
|
|
|
tf.newFrame()
|
2016-09-29 04:16:04 +03:00
|
|
|
finally:
|
|
|
|
im.seek(cur_idx)
|
|
|
|
|
2017-05-27 23:55:14 +03:00
|
|
|
|
2010-07-31 06:52:47 +04:00
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# Register
|
|
|
|
|
2015-07-04 16:29:58 +03:00
|
|
|
Image.register_open(TiffImageFile.format, TiffImageFile, _accept)
|
|
|
|
Image.register_save(TiffImageFile.format, _save)
|
2016-09-29 04:16:04 +03:00
|
|
|
Image.register_save_all(TiffImageFile.format, _save_all)
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2016-04-25 07:59:02 +03:00
|
|
|
Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"])
|
2010-07-31 06:52:47 +04:00
|
|
|
|
2015-07-04 16:29:58 +03:00
|
|
|
Image.register_mime(TiffImageFile.format, "image/tiff")
|