[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot] 2025-02-03 17:35:58 +00:00
parent 90d2506074
commit 955d678ca2
23 changed files with 91 additions and 92 deletions

View File

@ -934,7 +934,7 @@ class TestFileJpeg:
def test_jpeg_magic_number(self, monkeypatch: pytest.MonkeyPatch) -> None:
size = 4097
buffer = BytesIO(b"\xFF" * size) # Many xFF bytes
buffer = BytesIO(b"\xff" * size) # Many xFF bytes
max_pos = 0
orig_read = buffer.read

View File

@ -309,7 +309,7 @@ class TestFileLibTiff(LibTiffTestCase):
}
def check_tags(
tiffinfo: TiffImagePlugin.ImageFileDirectory_v2 | dict[int, str]
tiffinfo: TiffImagePlugin.ImageFileDirectory_v2 | dict[int, str],
) -> None:
im = hopper()

View File

@ -264,7 +264,7 @@ def test_pdf_append(tmp_path: Path) -> None:
# append some info
pdf.info.Title = "abc"
pdf.info.Author = "def"
pdf.info.Subject = "ghi\uABCD"
pdf.info.Subject = "ghi\uabcd"
pdf.info.Keywords = "qw)e\\r(ty"
pdf.info.Creator = "hopper()"
pdf.start_writing()
@ -292,7 +292,7 @@ def test_pdf_append(tmp_path: Path) -> None:
assert pdf.info.Title == "abc"
assert pdf.info.Producer == "PdfParser"
assert pdf.info.Keywords == "qw)e\\r(ty"
assert pdf.info.Subject == "ghi\uABCD"
assert pdf.info.Subject == "ghi\uabcd"
assert b"CreationDate" in pdf.info
assert b"ModDate" in pdf.info
check_pdf_pages_consistency(pdf)

View File

@ -49,7 +49,7 @@ def test_sanity() -> None:
(b"P5 3 1 257 \x00\x00\x00\x80\x01\x01", "I", (0, 32640, 65535)),
# P6 with maxval < 255
(
b"P6 3 1 17 \x00\x01\x02\x08\x09\x0A\x0F\x10\x11",
b"P6 3 1 17 \x00\x01\x02\x08\x09\x0a\x0f\x10\x11",
"RGB",
(
(0, 15, 30),
@ -60,7 +60,7 @@ def test_sanity() -> None:
# P6 with maxval > 255
(
b"P6 3 1 257 \x00\x00\x00\x01\x00\x02"
b"\x00\x80\x00\x81\x00\x82\x01\x00\x01\x01\xFF\xFF",
b"\x00\x80\x00\x81\x00\x82\x01\x00\x01\x01\xff\xff",
"RGB",
(
(0, 1, 2),

View File

@ -746,7 +746,7 @@ class TestFileTiff:
assert reread.n_frames == 3
def test_fixoffsets(self) -> None:
b = BytesIO(b"II\x2A\x00\x00\x00\x00\x00")
b = BytesIO(b"II\x2a\x00\x00\x00\x00\x00")
with TiffImagePlugin.AppendingTiffWriter(b) as a:
b.seek(0)
a.fixOffsets(1, isShort=True)
@ -759,14 +759,14 @@ class TestFileTiff:
with pytest.raises(RuntimeError):
a.fixOffsets(1)
b = BytesIO(b"II\x2A\x00\x00\x00\x00\x00")
b = BytesIO(b"II\x2a\x00\x00\x00\x00\x00")
with TiffImagePlugin.AppendingTiffWriter(b) as a:
a.offsetOfNewPage = 2**16
b.seek(0)
a.fixOffsets(1, isShort=True)
b = BytesIO(b"II\x2B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00")
b = BytesIO(b"II\x2b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00")
with TiffImagePlugin.AppendingTiffWriter(b) as a:
a.offsetOfNewPage = 2**32
@ -777,7 +777,7 @@ class TestFileTiff:
a.fixOffsets(1, isLong=True)
def test_appending_tiff_writer_writelong(self) -> None:
data = b"II\x2A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
data = b"II\x2a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b = BytesIO(data)
with TiffImagePlugin.AppendingTiffWriter(b) as a:
a.seek(-4, os.SEEK_CUR)
@ -785,7 +785,7 @@ class TestFileTiff:
assert b.getvalue() == data[:-4] + b"\xff\xff\xff\xff"
def test_appending_tiff_writer_rewritelastshorttolong(self) -> None:
data = b"II\x2A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
data = b"II\x2a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b = BytesIO(data)
with TiffImagePlugin.AppendingTiffWriter(b) as a:
a.seek(-2, os.SEEK_CUR)

View File

@ -71,7 +71,7 @@ def test_load_float_dpi() -> None:
with open("Tests/images/drawing.emf", "rb") as fp:
data = fp.read()
b = BytesIO(data[:8] + b"\x06\xFA" + data[10:])
b = BytesIO(data[:8] + b"\x06\xfa" + data[10:])
with Image.open(b) as im:
assert im.info["dpi"][0] == 2540

View File

@ -812,7 +812,7 @@ def test_rounded_rectangle(
tuple[int, int, int, int]
| tuple[list[int]]
| tuple[tuple[int, int], tuple[int, int]]
)
),
) -> None:
# Arrange
im = Image.new("RGB", (200, 200))

View File

@ -557,7 +557,7 @@ def test_render_empty(font: ImageFont.FreeTypeFont) -> None:
def test_unicode_extended(layout_engine: ImageFont.Layout) -> None:
# issue #3777
text = "A\u278A\U0001F12B"
text = "A\u278a\U0001f12b"
target = "Tests/images/unicode_extended.png"
ttf = ImageFont.truetype(
@ -1026,7 +1026,7 @@ def test_sbix(layout_engine: ImageFont.Layout) -> None:
im = Image.new("RGB", (400, 400), "white")
d = ImageDraw.Draw(im)
d.text((50, 50), "\uE901", font=font, embedded_color=True)
d.text((50, 50), "\ue901", font=font, embedded_color=True)
assert_image_similar_tofile(im, "Tests/images/chromacheck-sbix.png", 1)
except OSError as e: # pragma: no cover
@ -1043,7 +1043,7 @@ def test_sbix_mask(layout_engine: ImageFont.Layout) -> None:
im = Image.new("RGB", (400, 400), "white")
d = ImageDraw.Draw(im)
d.text((50, 50), "\uE901", (100, 0, 0), font=font)
d.text((50, 50), "\ue901", (100, 0, 0), font=font)
assert_image_similar_tofile(im, "Tests/images/chromacheck-sbix_mask.png", 1)
except OSError as e: # pragma: no cover

View File

@ -229,7 +229,7 @@ def test_getlength(
@pytest.mark.parametrize("direction", ("ltr", "ttb"))
@pytest.mark.parametrize(
"text",
("i" + ("\u030C" * 15) + "i", "i" + "\u032C" * 15 + "i", "\u035Cii", "i\u0305i"),
("i" + ("\u030c" * 15) + "i", "i" + "\u032c" * 15 + "i", "\u035cii", "i\u0305i"),
ids=("caron-above", "caron-below", "double-breve", "overline"),
)
def test_getlength_combine(mode: str, direction: str, text: str) -> None:
@ -272,27 +272,27 @@ def test_anchor_ttb(anchor: str) -> None:
combine_tests = (
# extends above (e.g. issue #4553)
("caron", "a\u030C\u030C\u030C\u030C\u030Cb", None, None, 0.08),
("caron_la", "a\u030C\u030C\u030C\u030C\u030Cb", "la", None, 0.08),
("caron_lt", "a\u030C\u030C\u030C\u030C\u030Cb", "lt", None, 0.08),
("caron_ls", "a\u030C\u030C\u030C\u030C\u030Cb", "ls", None, 0.08),
("caron_ttb", "ca" + ("\u030C" * 15) + "b", None, "ttb", 0.3),
("caron_ttb_lt", "ca" + ("\u030C" * 15) + "b", "lt", "ttb", 0.3),
("caron", "a\u030c\u030c\u030c\u030c\u030cb", None, None, 0.08),
("caron_la", "a\u030c\u030c\u030c\u030c\u030cb", "la", None, 0.08),
("caron_lt", "a\u030c\u030c\u030c\u030c\u030cb", "lt", None, 0.08),
("caron_ls", "a\u030c\u030c\u030c\u030c\u030cb", "ls", None, 0.08),
("caron_ttb", "ca" + ("\u030c" * 15) + "b", None, "ttb", 0.3),
("caron_ttb_lt", "ca" + ("\u030c" * 15) + "b", "lt", "ttb", 0.3),
# extends below
("caron_below", "a\u032C\u032C\u032C\u032C\u032Cb", None, None, 0.02),
("caron_below_ld", "a\u032C\u032C\u032C\u032C\u032Cb", "ld", None, 0.02),
("caron_below_lb", "a\u032C\u032C\u032C\u032C\u032Cb", "lb", None, 0.02),
("caron_below_ls", "a\u032C\u032C\u032C\u032C\u032Cb", "ls", None, 0.02),
("caron_below_ttb", "a" + ("\u032C" * 15) + "b", None, "ttb", 0.03),
("caron_below_ttb_lb", "a" + ("\u032C" * 15) + "b", "lb", "ttb", 0.03),
("caron_below", "a\u032c\u032c\u032c\u032c\u032cb", None, None, 0.02),
("caron_below_ld", "a\u032c\u032c\u032c\u032c\u032cb", "ld", None, 0.02),
("caron_below_lb", "a\u032c\u032c\u032c\u032c\u032cb", "lb", None, 0.02),
("caron_below_ls", "a\u032c\u032c\u032c\u032c\u032cb", "ls", None, 0.02),
("caron_below_ttb", "a" + ("\u032c" * 15) + "b", None, "ttb", 0.03),
("caron_below_ttb_lb", "a" + ("\u032c" * 15) + "b", "lb", "ttb", 0.03),
# extends to the right (e.g. issue #3745)
("double_breve_below", "a\u035Ci", None, None, 0.02),
("double_breve_below_ma", "a\u035Ci", "ma", None, 0.02),
("double_breve_below_ra", "a\u035Ci", "ra", None, 0.02),
("double_breve_below_ttb", "a\u035Cb", None, "ttb", 0.02),
("double_breve_below_ttb_rt", "a\u035Cb", "rt", "ttb", 0.02),
("double_breve_below_ttb_mt", "a\u035Cb", "mt", "ttb", 0.02),
("double_breve_below_ttb_st", "a\u035Cb", "st", "ttb", 0.02),
("double_breve_below", "a\u035ci", None, None, 0.02),
("double_breve_below_ma", "a\u035ci", "ma", None, 0.02),
("double_breve_below_ra", "a\u035ci", "ra", None, 0.02),
("double_breve_below_ttb", "a\u035cb", None, "ttb", 0.02),
("double_breve_below_ttb_rt", "a\u035cb", "rt", "ttb", 0.02),
("double_breve_below_ttb_mt", "a\u035cb", "mt", "ttb", 0.02),
("double_breve_below_ttb_st", "a\u035cb", "st", "ttb", 0.02),
# extends to the left (fail=0.064)
("overline", "i\u0305", None, None, 0.02),
("overline_la", "i\u0305", "la", None, 0.02),
@ -346,7 +346,7 @@ def test_combine_multiline(anchor: str, align: str) -> None:
path = f"Tests/images/test_combine_multiline_{anchor}_{align}.png"
f = ImageFont.truetype("Tests/fonts/NotoSans-Regular.ttf", 48)
text = "i\u0305\u035C\ntext" # i with overline and double breve, and a word
text = "i\u0305\u035c\ntext" # i with overline and double breve, and a word
im = Image.new("RGB", (400, 400), "white")
d = ImageDraw.Draw(im)

View File

@ -189,7 +189,7 @@ def test_2bit_palette(tmp_path: Path) -> None:
rgb = b"\x00" * 2 + b"\x01" * 2 + b"\x02" * 2
img = Image.frombytes("P", (6, 1), rgb)
img.putpalette(b"\xFF\x00\x00\x00\xFF\x00\x00\x00\xFF") # RGB
img.putpalette(b"\xff\x00\x00\x00\xff\x00\x00\x00\xff") # RGB
img.save(outfile, format="PNG")
assert_image_equal_tofile(img, outfile)

View File

@ -79,7 +79,7 @@ def test_path_constructors(
),
)
def test_invalid_path_constructors(
coords: tuple[str, str] | Sequence[Sequence[int]]
coords: tuple[str, str] | Sequence[Sequence[int]],
) -> None:
# Act
with pytest.raises(ValueError) as e:

View File

@ -20,10 +20,10 @@ from PIL.PdfParser import (
def test_text_encode_decode() -> None:
assert encode_text("abc") == b"\xFE\xFF\x00a\x00b\x00c"
assert decode_text(b"\xFE\xFF\x00a\x00b\x00c") == "abc"
assert encode_text("abc") == b"\xfe\xff\x00a\x00b\x00c"
assert decode_text(b"\xfe\xff\x00a\x00b\x00c") == "abc"
assert decode_text(b"abc") == "abc"
assert decode_text(b"\x1B a \x1C") == "\u02D9 a \u02DD"
assert decode_text(b"\x1b a \x1c") == "\u02d9 a \u02dd"
def test_indirect_refs() -> None:
@ -45,8 +45,8 @@ def test_parsing() -> None:
assert PdfParser.get_value(b"false%", 0) == (False, 5)
assert PdfParser.get_value(b"null<", 0) == (None, 4)
assert PdfParser.get_value(b"%cmt\n %cmt\n 123\n", 0) == (123, 15)
assert PdfParser.get_value(b"<901FA3>", 0) == (b"\x90\x1F\xA3", 8)
assert PdfParser.get_value(b"asd < 9 0 1 f A > qwe", 3) == (b"\x90\x1F\xA0", 17)
assert PdfParser.get_value(b"<901FA3>", 0) == (b"\x90\x1f\xa3", 8)
assert PdfParser.get_value(b"asd < 9 0 1 f A > qwe", 3) == (b"\x90\x1f\xa0", 17)
assert PdfParser.get_value(b"(asd)", 0) == (b"asd", 5)
assert PdfParser.get_value(b"(asd(qwe)zxc)zzz(aaa)", 0) == (b"asd(qwe)zxc", 13)
assert PdfParser.get_value(b"(Two \\\nwords.)", 0) == (b"Two words.", 14)
@ -56,9 +56,9 @@ def test_parsing() -> None:
assert PdfParser.get_value(b"(One\\(paren).", 0) == (b"One(paren", 12)
assert PdfParser.get_value(b"(One\\)paren).", 0) == (b"One)paren", 12)
assert PdfParser.get_value(b"(\\0053)", 0) == (b"\x053", 7)
assert PdfParser.get_value(b"(\\053)", 0) == (b"\x2B", 6)
assert PdfParser.get_value(b"(\\53)", 0) == (b"\x2B", 5)
assert PdfParser.get_value(b"(\\53a)", 0) == (b"\x2Ba", 6)
assert PdfParser.get_value(b"(\\053)", 0) == (b"\x2b", 6)
assert PdfParser.get_value(b"(\\53)", 0) == (b"\x2b", 5)
assert PdfParser.get_value(b"(\\53a)", 0) == (b"\x2ba", 6)
assert PdfParser.get_value(b"(\\1111)", 0) == (b"\x491", 7)
assert PdfParser.get_value(b" 123 (", 0) == (123, 4)
assert round(abs(PdfParser.get_value(b" 123.4 %", 0)[0] - 123.4), 7) == 0
@ -118,7 +118,7 @@ def test_pdf_repr() -> None:
assert pdf_repr(None) == b"null"
assert pdf_repr(b"a)/b\\(c") == rb"(a\)/b\\\(c)"
assert pdf_repr([123, True, {"a": PdfName(b"b")}]) == b"[ 123 true <<\n/a /b\n>> ]"
assert pdf_repr(PdfBinary(b"\x90\x1F\xA0")) == b"<901FA0>"
assert pdf_repr(PdfBinary(b"\x90\x1f\xa0")) == b"<901FA0>"
def test_duplicate_xref_entry() -> None:

View File

@ -145,7 +145,7 @@ class ImImageFile(ImageFile.ImageFile):
if s == b"\r":
continue
if not s or s == b"\0" or s == b"\x1A":
if not s or s == b"\0" or s == b"\x1a":
break
# FIXME: this may read whole file if not a text file
@ -209,7 +209,7 @@ class ImImageFile(ImageFile.ImageFile):
self._mode = self.info[MODE]
# Skip forward to start of image data
while s and s[:1] != b"\x1A":
while s and s[:1] != b"\x1a":
s = self.fp.read(1)
if not s:
msg = "File truncated"

View File

@ -514,7 +514,7 @@ class ImagePointTransform:
def _getscaleoffset(
expr: Callable[[ImagePointTransform], ImagePointTransform | float]
expr: Callable[[ImagePointTransform], ImagePointTransform | float],
) -> tuple[float, float]:
a = expr(ImagePointTransform(1, 0))
return (a.scale, a.offset) if isinstance(a, ImagePointTransform) else (0, a)
@ -3884,7 +3884,7 @@ class Exif(_ExifBase):
return self._fixup_dict(dict(info))
def _get_head(self) -> bytes:
version = b"\x2B" if self.bigtiff else b"\x2A"
version = b"\x2b" if self.bigtiff else b"\x2a"
if self.endian == "<":
head = b"II" + version + b"\x00" + o32le(8)
else:

View File

@ -55,7 +55,7 @@ class ImtImageFile(ImageFile.ImageFile):
if not s:
break
if s == b"\x0C":
if s == b"\x0c":
# image data begins
self.tile = [
ImageFile._Tile(

View File

@ -325,7 +325,7 @@ MARKER = {
def _accept(prefix: bytes) -> bool:
# Magic number was taken from https://en.wikipedia.org/wiki/JPEG
return prefix[:3] == b"\xFF\xD8\xFF"
return prefix[:3] == b"\xff\xd8\xff"
##
@ -342,7 +342,7 @@ class JpegImageFile(ImageFile.ImageFile):
if not _accept(s):
msg = "not a JPEG file"
raise SyntaxError(msg)
s = b"\xFF"
s = b"\xff"
# Create attributes
self.bits = self.layers = 0
@ -417,7 +417,7 @@ class JpegImageFile(ImageFile.ImageFile):
# Premature EOF.
# Pretend file is finished adding EOI marker
self._ended = True
return b"\xFF\xD9"
return b"\xff\xd9"
return s
@ -712,7 +712,7 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
def validate_qtables(
qtables: (
str | tuple[list[int], ...] | list[list[int]] | dict[int, list[int]] | None
)
),
) -> list[list[int]] | None:
if qtables is None:
return qtables
@ -769,7 +769,7 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
msg = "XMP data is too long"
raise ValueError(msg)
size = o16(2 + overhead_len + len(xmp))
extra += b"\xFF\xE1" + size + b"http://ns.adobe.com/xap/1.0/\x00" + xmp
extra += b"\xff\xe1" + size + b"http://ns.adobe.com/xap/1.0/\x00" + xmp
icc_profile = info.get("icc_profile")
if icc_profile:
@ -783,7 +783,7 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
for marker in markers:
size = o16(2 + overhead_len + len(marker))
extra += (
b"\xFF\xE2"
b"\xff\xe2"
+ size
+ b"ICC_PROFILE\0"
+ o8(i)

View File

@ -51,7 +51,7 @@ def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
if not offsets:
# APP2 marker
im_frame.encoderinfo["extra"] = (
b"\xFF\xE2" + struct.pack(">H", 6 + 82) + b"MPF\0" + b" " * 82
b"\xff\xe2" + struct.pack(">H", 6 + 82) + b"MPF\0" + b" " * 82
)
exif = im_frame.encoderinfo.get("exif")
if isinstance(exif, Image.Exif):
@ -84,7 +84,7 @@ def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
ifd[0xB002] = mpentries
fp.seek(mpf_offset)
fp.write(b"II\x2A\x00" + o32le(8) + ifd.tobytes(8))
fp.write(b"II\x2a\x00" + o32le(8) + ifd.tobytes(8))
fp.seek(0, os.SEEK_END)

View File

@ -188,7 +188,7 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
+ o16(dpi[0])
+ o16(dpi[1])
+ b"\0" * 24
+ b"\xFF" * 24
+ b"\xff" * 24
+ b"\0"
+ o8(planes)
+ o16(stride)

View File

@ -19,14 +19,14 @@ def encode_text(s: str) -> bytes:
PDFDocEncoding = {
0x16: "\u0017",
0x18: "\u02D8",
0x19: "\u02C7",
0x1A: "\u02C6",
0x1B: "\u02D9",
0x1C: "\u02DD",
0x1D: "\u02DB",
0x1E: "\u02DA",
0x1F: "\u02DC",
0x18: "\u02d8",
0x19: "\u02c7",
0x1A: "\u02c6",
0x1B: "\u02d9",
0x1C: "\u02dd",
0x1D: "\u02db",
0x1E: "\u02da",
0x1F: "\u02dc",
0x80: "\u2022",
0x81: "\u2020",
0x82: "\u2021",
@ -36,29 +36,29 @@ PDFDocEncoding = {
0x86: "\u0192",
0x87: "\u2044",
0x88: "\u2039",
0x89: "\u203A",
0x89: "\u203a",
0x8A: "\u2212",
0x8B: "\u2030",
0x8C: "\u201E",
0x8D: "\u201C",
0x8E: "\u201D",
0x8C: "\u201e",
0x8D: "\u201c",
0x8E: "\u201d",
0x8F: "\u2018",
0x90: "\u2019",
0x91: "\u201A",
0x91: "\u201a",
0x92: "\u2122",
0x93: "\uFB01",
0x94: "\uFB02",
0x93: "\ufb01",
0x94: "\ufb02",
0x95: "\u0141",
0x96: "\u0152",
0x97: "\u0160",
0x98: "\u0178",
0x99: "\u017D",
0x99: "\u017d",
0x9A: "\u0131",
0x9B: "\u0142",
0x9C: "\u0153",
0x9D: "\u0161",
0x9E: "\u017E",
0xA0: "\u20AC",
0x9E: "\u017e",
0xA0: "\u20ac",
}

View File

@ -1433,7 +1433,7 @@ def _save(
chunk(fp, b"tRNS", transparency[:alpha_bytes])
else:
transparency = max(0, min(255, transparency))
alpha = b"\xFF" * transparency + b"\0"
alpha = b"\xff" * transparency + b"\0"
chunk(fp, b"tRNS", alpha[:alpha_bytes])
elif im.mode in ("1", "L", "I", "I;16"):
transparency = max(0, min(65535, transparency))

View File

@ -230,7 +230,7 @@ class PpmPlainDecoder(ImageFile.PyDecoder):
msg = b"Invalid token for this mode: %s" % bytes([token])
raise ValueError(msg)
data = (data + tokens)[:total_bytes]
invert = bytes.maketrans(b"01", b"\xFF\x00")
invert = bytes.maketrans(b"01", b"\xff\x00")
return data.translate(invert)
def _decode_blocks(self, maxval: int) -> bytearray:

View File

@ -275,12 +275,12 @@ OPEN_INFO = {
MAX_SAMPLESPERPIXEL = max(len(key_tp[4]) for key_tp in OPEN_INFO)
PREFIXES = [
b"MM\x00\x2A", # Valid TIFF header with big-endian byte order
b"II\x2A\x00", # Valid TIFF header with little-endian byte order
b"MM\x2A\x00", # Invalid TIFF header, assume big-endian
b"II\x00\x2A", # Invalid TIFF header, assume little-endian
b"MM\x00\x2B", # BigTIFF with big-endian byte order
b"II\x2B\x00", # BigTIFF with little-endian byte order
b"MM\x00\x2a", # Valid TIFF header with big-endian byte order
b"II\x2a\x00", # Valid TIFF header with little-endian byte order
b"MM\x2a\x00", # Invalid TIFF header, assume big-endian
b"II\x00\x2a", # Invalid TIFF header, assume little-endian
b"MM\x00\x2b", # BigTIFF with big-endian byte order
b"II\x2b\x00", # BigTIFF with little-endian byte order
]
if not getattr(Image.core, "libtiff_support_custom_tags", True):
@ -582,7 +582,7 @@ class ImageFileDirectory_v2(_IFDv2Base):
def __init__(
self,
ifh: bytes = b"II\x2A\x00\x00\x00\x00\x00",
ifh: bytes = b"II\x2a\x00\x00\x00\x00\x00",
prefix: bytes | None = None,
group: int | None = None,
) -> None:
@ -2047,7 +2047,7 @@ class AppendingTiffWriter(io.BytesIO):
self.offsetOfNewPage = 0
self.IIMM = iimm = self.f.read(4)
self._bigtiff = b"\x2B" in iimm
self._bigtiff = b"\x2b" in iimm
if not iimm:
# empty file - first page
self.isFirst = True

View File

@ -1,5 +1,4 @@
""" Find compiled module linking to Tcl / Tk libraries
"""
"""Find compiled module linking to Tcl / Tk libraries"""
from __future__ import annotations