mirror of
https://github.com/python-pillow/Pillow.git
synced 2024-12-26 18:06:18 +03:00
Merge branch 'main' into plainPPM
This commit is contained in:
commit
d20f39d02e
4
.github/workflows/cifuzz.yml
vendored
4
.github/workflows/cifuzz.yml
vendored
|
@ -31,13 +31,13 @@ jobs:
|
|||
language: python
|
||||
dry-run: false
|
||||
- name: Upload New Crash
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
if: failure() && steps.build.outcome == 'success'
|
||||
with:
|
||||
name: artifacts
|
||||
path: ./out/artifacts
|
||||
- name: Upload Legacy Crash
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
if: steps.run.outcome == 'success'
|
||||
with:
|
||||
name: crash
|
||||
|
|
4
.github/workflows/lint.yml
vendored
4
.github/workflows/lint.yml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
|||
name: Lint
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: pre-commit cache
|
||||
uses: actions/cache@v2
|
||||
|
@ -21,7 +21,7 @@ jobs:
|
|||
lint-pre-commit-
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: pip
|
||||
|
|
2
.github/workflows/test-docker.yml
vendored
2
.github/workflows/test-docker.yml
vendored
|
@ -41,7 +41,7 @@ jobs:
|
|||
name: ${{ matrix.docker }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build system information
|
||||
run: python3 .github/workflows/system-info.py
|
||||
|
|
2
.github/workflows/test-mingw.yml
vendored
2
.github/workflows/test-mingw.yml
vendored
|
@ -29,7 +29,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout Pillow
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up shell
|
||||
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
|
||||
|
|
2
.github/workflows/test-valgrind.yml
vendored
2
.github/workflows/test-valgrind.yml
vendored
|
@ -28,7 +28,7 @@ jobs:
|
|||
name: ${{ matrix.docker }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build system information
|
||||
run: python3 .github/workflows/system-info.py
|
||||
|
|
10
.github/workflows/test-windows.yml
vendored
10
.github/workflows/test-windows.yml
vendored
|
@ -23,17 +23,17 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout Pillow
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Checkout cached dependencies
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: python-pillow/pillow-depends
|
||||
path: winbuild\depends
|
||||
|
||||
# sets env: pythonLocation
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
architecture: ${{ matrix.architecture }}
|
||||
|
@ -156,7 +156,7 @@ jobs:
|
|||
shell: bash
|
||||
|
||||
- name: Upload errors
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
if: failure()
|
||||
with:
|
||||
name: errors
|
||||
|
@ -182,7 +182,7 @@ jobs:
|
|||
winbuild\\build\\build_pillow.cmd --disable-imagequant bdist_wheel
|
||||
shell: cmd
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: "github.event_name != 'pull_request'"
|
||||
with:
|
||||
name: ${{ steps.wheel.outputs.dist }}
|
||||
|
|
6
.github/workflows/test.yml
vendored
6
.github/workflows/test.yml
vendored
|
@ -36,10 +36,10 @@ jobs:
|
|||
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: pip
|
||||
|
@ -84,7 +84,7 @@ jobs:
|
|||
mkdir -p Tests/errors
|
||||
|
||||
- name: Upload errors
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
if: failure()
|
||||
with:
|
||||
name: errors
|
||||
|
|
2
.github/workflows/tidelift.yml
vendored
2
.github/workflows/tidelift.yml
vendored
|
@ -17,7 +17,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Scan
|
||||
uses: tidelift/alignment-action@main
|
||||
env:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: f1d4e742c91dd5179d742b0db9293c4472b765f8 # frozen: 21.12b0
|
||||
rev: fc0be6eb1e2a96091e6f64009ee5e9081bf8b6c6 # frozen: 22.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
args: ["--target-version", "py37"]
|
||||
|
@ -19,7 +19,7 @@ repos:
|
|||
- id: yesqa
|
||||
|
||||
- repo: https://github.com/Lucas-C/pre-commit-hooks
|
||||
rev: 3592548bbd98528887eeed63486cf6c9bae00b98 # frozen: v1.1.10
|
||||
rev: ca52c4245639abd55c970e6bbbca95cab3de22d8 # frozen: v1.1.13
|
||||
hooks:
|
||||
- id: remove-tabs
|
||||
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.opt$)
|
||||
|
|
|
@ -196,6 +196,13 @@ def test__accept_false():
|
|||
assert not output
|
||||
|
||||
|
||||
def test_invalid_file():
|
||||
invalid_file = "Tests/images/flower.jpg"
|
||||
|
||||
with pytest.raises(SyntaxError):
|
||||
DdsImagePlugin.DdsImageFile(invalid_file)
|
||||
|
||||
|
||||
def test_short_header():
|
||||
"""Check a short header"""
|
||||
with open(TEST_FILE_DXT5, "rb") as f:
|
||||
|
|
|
@ -16,6 +16,13 @@ def test_load_dxt1():
|
|||
assert_image_similar(im, target.convert("RGBA"), 15)
|
||||
|
||||
|
||||
def test_invalid_file():
|
||||
invalid_file = "Tests/images/flower.jpg"
|
||||
|
||||
with pytest.raises(SyntaxError):
|
||||
FtexImagePlugin.FtexImageFile(invalid_file)
|
||||
|
||||
|
||||
def test_constants_deprecation():
|
||||
for enum, prefix in {
|
||||
FtexImagePlugin.Format: "FORMAT_",
|
||||
|
|
|
@ -2,7 +2,7 @@ from io import BytesIO
|
|||
|
||||
import pytest
|
||||
|
||||
from PIL import Image
|
||||
from PIL import Image, XbmImagePlugin
|
||||
|
||||
from .helper import hopper
|
||||
|
||||
|
@ -63,6 +63,13 @@ def test_open_filename_with_underscore():
|
|||
assert im.size == (128, 128)
|
||||
|
||||
|
||||
def test_invalid_file():
|
||||
invalid_file = "Tests/images/flower.jpg"
|
||||
|
||||
with pytest.raises(SyntaxError):
|
||||
XbmImagePlugin.XbmImageFile(invalid_file)
|
||||
|
||||
|
||||
def test_save_wrong_mode(tmp_path):
|
||||
im = hopper()
|
||||
out = str(tmp_path / "temp.xbm")
|
||||
|
|
|
@ -115,6 +115,6 @@ def test_pdf_repr():
|
|||
assert pdf_repr(True) == b"true"
|
||||
assert pdf_repr(False) == b"false"
|
||||
assert pdf_repr(None) == b"null"
|
||||
assert pdf_repr(b"a)/b\\(c") == br"(a\)/b\\\(c)"
|
||||
assert pdf_repr(b"a)/b\\(c") == rb"(a\)/b\\\(c)"
|
||||
assert pdf_repr([123, True, {"a": PdfName(b"b")}]) == b"[ 123 true <<\n/a /b\n>> ]"
|
||||
assert pdf_repr(PdfBinary(b"\x90\x1F\xA0")) == b"<901FA0>"
|
||||
|
|
|
@ -210,7 +210,9 @@ class DdsImageFile(ImageFile.ImageFile):
|
|||
format_description = "DirectDraw Surface"
|
||||
|
||||
def _open(self):
|
||||
magic, header_size = struct.unpack("<II", self.fp.read(8))
|
||||
if not _accept(self.fp.read(4)):
|
||||
raise SyntaxError("not a DDS file")
|
||||
(header_size,) = struct.unpack("<I", self.fp.read(4))
|
||||
if header_size != 124:
|
||||
raise OSError(f"Unsupported header size {repr(header_size)}")
|
||||
header_bytes = self.fp.read(header_size - 4)
|
||||
|
|
|
@ -14,6 +14,16 @@ for a region of an image.
|
|||
statistics. You can also pass in a previously calculated histogram.
|
||||
|
||||
:param image: A PIL image, or a precalculated histogram.
|
||||
|
||||
.. note::
|
||||
|
||||
For a PIL image, calculations rely on the
|
||||
:py:meth:`~PIL.Image.Image.histogram` method. The pixel counts are
|
||||
grouped into 256 bins, even if the image has more than 8 bits per
|
||||
channel. So ``I`` and ``F`` mode images have a maximum ``mean``,
|
||||
``median`` and ``rms`` of 255, and cannot have an ``extrema`` maximum
|
||||
of more than 255.
|
||||
|
||||
:param mask: An optional mask.
|
||||
|
||||
.. py:attribute:: extrema
|
||||
|
|
2
setup.py
2
setup.py
|
@ -167,7 +167,7 @@ def _find_library_dirs_ldconfig():
|
|||
# Assuming GLIBC's ldconfig (with option -p)
|
||||
# Alpine Linux uses musl that can't print cache
|
||||
args = ["/sbin/ldconfig", "-p"]
|
||||
expr = fr".*\({abi_type}.*\) => (.*)"
|
||||
expr = rf".*\({abi_type}.*\) => (.*)"
|
||||
env = dict(os.environ)
|
||||
env["LC_ALL"] = "C"
|
||||
env["LANG"] = "C"
|
||||
|
|
|
@ -111,7 +111,9 @@ class DdsImageFile(ImageFile.ImageFile):
|
|||
format_description = "DirectDraw Surface"
|
||||
|
||||
def _open(self):
|
||||
magic, header_size = struct.unpack("<II", self.fp.read(8))
|
||||
if not _accept(self.fp.read(4)):
|
||||
raise SyntaxError("not a DDS file")
|
||||
(header_size,) = struct.unpack("<I", self.fp.read(4))
|
||||
if header_size != 124:
|
||||
raise OSError(f"Unsupported header size {repr(header_size)}")
|
||||
header_bytes = self.fp.read(header_size - 4)
|
||||
|
|
|
@ -26,7 +26,11 @@ from ._binary import o8
|
|||
|
||||
|
||||
def _accept(prefix):
|
||||
return len(prefix) >= 6 and i16(prefix, 4) in [0xAF11, 0xAF12]
|
||||
return (
|
||||
len(prefix) >= 6
|
||||
and i16(prefix, 4) in [0xAF11, 0xAF12]
|
||||
and i16(prefix, 14) in [0, 3] # flags
|
||||
)
|
||||
|
||||
|
||||
##
|
||||
|
@ -44,11 +48,7 @@ class FliImageFile(ImageFile.ImageFile):
|
|||
|
||||
# HEAD
|
||||
s = self.fp.read(128)
|
||||
if not (
|
||||
_accept(s)
|
||||
and i16(s, 14) in [0, 3] # flags
|
||||
and s[20:22] == b"\x00\x00" # reserved
|
||||
):
|
||||
if not (_accept(s) and s[20:22] == b"\x00\x00"):
|
||||
raise SyntaxError("not an FLI/FLC file")
|
||||
|
||||
# frames
|
||||
|
|
|
@ -94,7 +94,8 @@ class FtexImageFile(ImageFile.ImageFile):
|
|||
format_description = "Texture File Format (IW2:EOC)"
|
||||
|
||||
def _open(self):
|
||||
struct.unpack("<I", self.fp.read(4)) # magic
|
||||
if not _accept(self.fp.read(4)):
|
||||
raise SyntaxError("not an FTEX file")
|
||||
struct.unpack("<i", self.fp.read(4)) # version
|
||||
self._size = struct.unpack("<2i", self.fp.read(8))
|
||||
mipmap_count, format_count = struct.unpack("<2i", self.fp.read(8))
|
||||
|
|
|
@ -43,9 +43,9 @@ class GbrImageFile(ImageFile.ImageFile):
|
|||
|
||||
def _open(self):
|
||||
header_size = i32(self.fp.read(4))
|
||||
version = i32(self.fp.read(4))
|
||||
if header_size < 20:
|
||||
raise SyntaxError("not a GIMP brush")
|
||||
version = i32(self.fp.read(4))
|
||||
if version not in (1, 2):
|
||||
raise SyntaxError(f"Unsupported GIMP brush version: {version}")
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ class GimpPaletteFile:
|
|||
break
|
||||
|
||||
# skip fields and comment lines
|
||||
if re.match(br"\w+:|#", s):
|
||||
if re.match(rb"\w+:|#", s):
|
||||
continue
|
||||
if len(s) > 100:
|
||||
raise SyntaxError("bad palette file")
|
||||
|
|
|
@ -167,7 +167,7 @@ class IcnsFile:
|
|||
self.dct = dct = {}
|
||||
self.fobj = fobj
|
||||
sig, filesize = nextheader(fobj)
|
||||
if sig != MAGIC:
|
||||
if not _accept(sig):
|
||||
raise SyntaxError("not an icns file")
|
||||
i = HEADERSIZE
|
||||
while i < filesize:
|
||||
|
|
|
@ -100,7 +100,7 @@ for i in range(2, 33):
|
|||
# --------------------------------------------------------------------
|
||||
# Read IM directory
|
||||
|
||||
split = re.compile(br"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$")
|
||||
split = re.compile(rb"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$")
|
||||
|
||||
|
||||
def number(s):
|
||||
|
|
|
@ -1492,11 +1492,12 @@ class Image:
|
|||
|
||||
def histogram(self, mask=None, extrema=None):
|
||||
"""
|
||||
Returns a histogram for the image. The histogram is returned as
|
||||
a list of pixel counts, one for each pixel value in the source
|
||||
image. If the image has more than one band, the histograms for
|
||||
all bands are concatenated (for example, the histogram for an
|
||||
"RGB" image contains 768 values).
|
||||
Returns a histogram for the image. The histogram is returned as a
|
||||
list of pixel counts, one for each pixel value in the source
|
||||
image. Counts are grouped into 256 bins for each band, even if
|
||||
the image has more than 8 bits per band. If the image has more
|
||||
than one band, the histograms for all bands are concatenated (for
|
||||
example, the histogram for an "RGB" image contains 768 values).
|
||||
|
||||
A bilevel image (mode "1") is treated as a greyscale ("L") image
|
||||
by this method.
|
||||
|
|
|
@ -22,7 +22,7 @@ from . import Image, ImageFile
|
|||
#
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
field = re.compile(br"([a-z]*) ([^ \r\n]*)")
|
||||
field = re.compile(rb"([a-z]*) ([^ \r\n]*)")
|
||||
|
||||
|
||||
##
|
||||
|
|
|
@ -576,42 +576,42 @@ class PdfParser:
|
|||
self.xref_table[reference.object_id] = (offset, 0)
|
||||
return reference
|
||||
|
||||
delimiter = br"[][()<>{}/%]"
|
||||
delimiter_or_ws = br"[][()<>{}/%\000\011\012\014\015\040]"
|
||||
whitespace = br"[\000\011\012\014\015\040]"
|
||||
whitespace_or_hex = br"[\000\011\012\014\015\0400-9a-fA-F]"
|
||||
delimiter = rb"[][()<>{}/%]"
|
||||
delimiter_or_ws = rb"[][()<>{}/%\000\011\012\014\015\040]"
|
||||
whitespace = rb"[\000\011\012\014\015\040]"
|
||||
whitespace_or_hex = rb"[\000\011\012\014\015\0400-9a-fA-F]"
|
||||
whitespace_optional = whitespace + b"*"
|
||||
whitespace_mandatory = whitespace + b"+"
|
||||
# No "\012" aka "\n" or "\015" aka "\r":
|
||||
whitespace_optional_no_nl = br"[\000\011\014\040]*"
|
||||
newline_only = br"[\r\n]+"
|
||||
whitespace_optional_no_nl = rb"[\000\011\014\040]*"
|
||||
newline_only = rb"[\r\n]+"
|
||||
newline = whitespace_optional_no_nl + newline_only + whitespace_optional_no_nl
|
||||
re_trailer_end = re.compile(
|
||||
whitespace_mandatory
|
||||
+ br"trailer"
|
||||
+ rb"trailer"
|
||||
+ whitespace_optional
|
||||
+ br"\<\<(.*\>\>)"
|
||||
+ rb"\<\<(.*\>\>)"
|
||||
+ newline
|
||||
+ br"startxref"
|
||||
+ rb"startxref"
|
||||
+ newline
|
||||
+ br"([0-9]+)"
|
||||
+ rb"([0-9]+)"
|
||||
+ newline
|
||||
+ br"%%EOF"
|
||||
+ rb"%%EOF"
|
||||
+ whitespace_optional
|
||||
+ br"$",
|
||||
+ rb"$",
|
||||
re.DOTALL,
|
||||
)
|
||||
re_trailer_prev = re.compile(
|
||||
whitespace_optional
|
||||
+ br"trailer"
|
||||
+ rb"trailer"
|
||||
+ whitespace_optional
|
||||
+ br"\<\<(.*?\>\>)"
|
||||
+ rb"\<\<(.*?\>\>)"
|
||||
+ newline
|
||||
+ br"startxref"
|
||||
+ rb"startxref"
|
||||
+ newline
|
||||
+ br"([0-9]+)"
|
||||
+ rb"([0-9]+)"
|
||||
+ newline
|
||||
+ br"%%EOF"
|
||||
+ rb"%%EOF"
|
||||
+ whitespace_optional,
|
||||
re.DOTALL,
|
||||
)
|
||||
|
@ -655,12 +655,12 @@ class PdfParser:
|
|||
re_whitespace_optional = re.compile(whitespace_optional)
|
||||
re_name = re.compile(
|
||||
whitespace_optional
|
||||
+ br"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?="
|
||||
+ rb"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?="
|
||||
+ delimiter_or_ws
|
||||
+ br")"
|
||||
+ rb")"
|
||||
)
|
||||
re_dict_start = re.compile(whitespace_optional + br"\<\<")
|
||||
re_dict_end = re.compile(whitespace_optional + br"\>\>" + whitespace_optional)
|
||||
re_dict_start = re.compile(whitespace_optional + rb"\<\<")
|
||||
re_dict_end = re.compile(whitespace_optional + rb"\>\>" + whitespace_optional)
|
||||
|
||||
@classmethod
|
||||
def interpret_trailer(cls, trailer_data):
|
||||
|
@ -689,7 +689,7 @@ class PdfParser:
|
|||
)
|
||||
return trailer
|
||||
|
||||
re_hashes_in_name = re.compile(br"([^#]*)(#([0-9a-fA-F]{2}))?")
|
||||
re_hashes_in_name = re.compile(rb"([^#]*)(#([0-9a-fA-F]{2}))?")
|
||||
|
||||
@classmethod
|
||||
def interpret_name(cls, raw, as_text=False):
|
||||
|
@ -704,53 +704,53 @@ class PdfParser:
|
|||
else:
|
||||
return bytes(name)
|
||||
|
||||
re_null = re.compile(whitespace_optional + br"null(?=" + delimiter_or_ws + br")")
|
||||
re_true = re.compile(whitespace_optional + br"true(?=" + delimiter_or_ws + br")")
|
||||
re_false = re.compile(whitespace_optional + br"false(?=" + delimiter_or_ws + br")")
|
||||
re_null = re.compile(whitespace_optional + rb"null(?=" + delimiter_or_ws + rb")")
|
||||
re_true = re.compile(whitespace_optional + rb"true(?=" + delimiter_or_ws + rb")")
|
||||
re_false = re.compile(whitespace_optional + rb"false(?=" + delimiter_or_ws + rb")")
|
||||
re_int = re.compile(
|
||||
whitespace_optional + br"([-+]?[0-9]+)(?=" + delimiter_or_ws + br")"
|
||||
whitespace_optional + rb"([-+]?[0-9]+)(?=" + delimiter_or_ws + rb")"
|
||||
)
|
||||
re_real = re.compile(
|
||||
whitespace_optional
|
||||
+ br"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?="
|
||||
+ rb"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?="
|
||||
+ delimiter_or_ws
|
||||
+ br")"
|
||||
+ rb")"
|
||||
)
|
||||
re_array_start = re.compile(whitespace_optional + br"\[")
|
||||
re_array_end = re.compile(whitespace_optional + br"]")
|
||||
re_array_start = re.compile(whitespace_optional + rb"\[")
|
||||
re_array_end = re.compile(whitespace_optional + rb"]")
|
||||
re_string_hex = re.compile(
|
||||
whitespace_optional + br"\<(" + whitespace_or_hex + br"*)\>"
|
||||
whitespace_optional + rb"\<(" + whitespace_or_hex + rb"*)\>"
|
||||
)
|
||||
re_string_lit = re.compile(whitespace_optional + br"\(")
|
||||
re_string_lit = re.compile(whitespace_optional + rb"\(")
|
||||
re_indirect_reference = re.compile(
|
||||
whitespace_optional
|
||||
+ br"([-+]?[0-9]+)"
|
||||
+ rb"([-+]?[0-9]+)"
|
||||
+ whitespace_mandatory
|
||||
+ br"([-+]?[0-9]+)"
|
||||
+ rb"([-+]?[0-9]+)"
|
||||
+ whitespace_mandatory
|
||||
+ br"R(?="
|
||||
+ rb"R(?="
|
||||
+ delimiter_or_ws
|
||||
+ br")"
|
||||
+ rb")"
|
||||
)
|
||||
re_indirect_def_start = re.compile(
|
||||
whitespace_optional
|
||||
+ br"([-+]?[0-9]+)"
|
||||
+ rb"([-+]?[0-9]+)"
|
||||
+ whitespace_mandatory
|
||||
+ br"([-+]?[0-9]+)"
|
||||
+ rb"([-+]?[0-9]+)"
|
||||
+ whitespace_mandatory
|
||||
+ br"obj(?="
|
||||
+ rb"obj(?="
|
||||
+ delimiter_or_ws
|
||||
+ br")"
|
||||
+ rb")"
|
||||
)
|
||||
re_indirect_def_end = re.compile(
|
||||
whitespace_optional + br"endobj(?=" + delimiter_or_ws + br")"
|
||||
whitespace_optional + rb"endobj(?=" + delimiter_or_ws + rb")"
|
||||
)
|
||||
re_comment = re.compile(
|
||||
br"(" + whitespace_optional + br"%[^\r\n]*" + newline + br")*"
|
||||
rb"(" + whitespace_optional + rb"%[^\r\n]*" + newline + rb")*"
|
||||
)
|
||||
re_stream_start = re.compile(whitespace_optional + br"stream\r?\n")
|
||||
re_stream_start = re.compile(whitespace_optional + rb"stream\r?\n")
|
||||
re_stream_end = re.compile(
|
||||
whitespace_optional + br"endstream(?=" + delimiter_or_ws + br")"
|
||||
whitespace_optional + rb"endstream(?=" + delimiter_or_ws + rb")"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -876,7 +876,7 @@ class PdfParser:
|
|||
raise PdfFormatError("unrecognized object: " + repr(data[offset : offset + 32]))
|
||||
|
||||
re_lit_str_token = re.compile(
|
||||
br"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))"
|
||||
rb"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))"
|
||||
)
|
||||
escaped_chars = {
|
||||
b"n": b"\n",
|
||||
|
@ -922,16 +922,16 @@ class PdfParser:
|
|||
offset = m.end()
|
||||
raise PdfFormatError("unfinished literal string")
|
||||
|
||||
re_xref_section_start = re.compile(whitespace_optional + br"xref" + newline)
|
||||
re_xref_section_start = re.compile(whitespace_optional + rb"xref" + newline)
|
||||
re_xref_subsection_start = re.compile(
|
||||
whitespace_optional
|
||||
+ br"([0-9]+)"
|
||||
+ rb"([0-9]+)"
|
||||
+ whitespace_mandatory
|
||||
+ br"([0-9]+)"
|
||||
+ rb"([0-9]+)"
|
||||
+ whitespace_optional
|
||||
+ newline_only
|
||||
)
|
||||
re_xref_entry = re.compile(br"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)")
|
||||
re_xref_entry = re.compile(rb"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)")
|
||||
|
||||
def read_xref_table(self, xref_section_offset):
|
||||
subsection_found = False
|
||||
|
|
|
@ -48,7 +48,7 @@ from ._binary import o32be as o32
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
is_cid = re.compile(br"\w\w\w\w").match
|
||||
is_cid = re.compile(rb"\w\w\w\w").match
|
||||
|
||||
|
||||
_MAGIC = b"\211PNG\r\n\032\n"
|
||||
|
|
|
@ -493,7 +493,7 @@ class ImageFileDirectory_v2(MutableMapping):
|
|||
endianness.
|
||||
:param prefix: Override the endianness of the file.
|
||||
"""
|
||||
if ifh[:4] not in PREFIXES:
|
||||
if not _accept(ifh):
|
||||
raise SyntaxError(f"not a TIFF file (header {repr(ifh)} not valid)")
|
||||
self._prefix = prefix if prefix is not None else ifh[:2]
|
||||
if self._prefix == MM:
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
|
||||
from . import Image, ImageFile
|
||||
from ._binary import i16le as word
|
||||
from ._binary import i32le as dword
|
||||
from ._binary import si16le as short
|
||||
from ._binary import si32le as _long
|
||||
|
||||
|
@ -112,7 +111,7 @@ class WmfStubImageFile(ImageFile.StubImageFile):
|
|||
if s[22:26] != b"\x01\x00\t\x00":
|
||||
raise SyntaxError("Unsupported WMF file format")
|
||||
|
||||
elif dword(s) == 1 and s[40:44] == b" EMF":
|
||||
elif s[:4] == b"\x01\x00\x00\x00" and s[40:44] == b" EMF":
|
||||
# enhanced metafile
|
||||
|
||||
# get bounding box
|
||||
|
|
|
@ -25,7 +25,7 @@ from . import Image, ImageFile
|
|||
|
||||
# XBM header
|
||||
xbm_head = re.compile(
|
||||
br"\s*#define[ \t]+.*_width[ \t]+(?P<width>[0-9]+)[\r\n]+"
|
||||
rb"\s*#define[ \t]+.*_width[ \t]+(?P<width>[0-9]+)[\r\n]+"
|
||||
b"#define[ \t]+.*_height[ \t]+(?P<height>[0-9]+)[\r\n]+"
|
||||
b"(?P<hotspot>"
|
||||
b"#define[ \t]+[^_]*_x_hot[ \t]+(?P<xhot>[0-9]+)[\r\n]+"
|
||||
|
@ -52,7 +52,8 @@ class XbmImageFile(ImageFile.ImageFile):
|
|||
|
||||
m = xbm_head.match(self.fp.read(512))
|
||||
|
||||
if m:
|
||||
if not m:
|
||||
raise SyntaxError("not a XBM file")
|
||||
|
||||
xsize = int(m.group("width"))
|
||||
ysize = int(m.group("height"))
|
||||
|
|
|
@ -464,7 +464,7 @@ def build_dep_all():
|
|||
if dep_name in disabled:
|
||||
continue
|
||||
script = build_dep(dep_name)
|
||||
lines.append(fr'cmd.exe /c "{{build_dir}}\{script}"')
|
||||
lines.append(rf'cmd.exe /c "{{build_dir}}\{script}"')
|
||||
lines.append("if errorlevel 1 echo Build failed! && exit /B 1")
|
||||
lines.append("@echo All Pillow dependencies built successfully!")
|
||||
write_script("build_dep_all.cmd", lines)
|
||||
|
|
Loading…
Reference in New Issue
Block a user