Merge branch 'main' into register_handler

This commit is contained in:
Andrew Murray 2024-12-18 19:03:43 +11:00 committed by GitHub
commit 1c26083076
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
77 changed files with 951 additions and 331 deletions

View File

@ -1 +1 @@
cibuildwheel==2.21.3 cibuildwheel==2.22.0

View File

@ -19,7 +19,6 @@ Please send a pull request to the `main` branch. Please include [documentation](
- Follow PEP 8. - Follow PEP 8.
- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running tests on AppVeyor. - When committing only documentation changes please include `[ci skip]` in the commit message to avoid running tests on AppVeyor.
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests. - Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
- Do not add to the [changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) for proposed changes, as that is updated after changes are merged.
## Reporting Issues ## Reporting Issues

View File

@ -3,18 +3,19 @@ tag-template: "$NEXT_MINOR_VERSION"
change-template: '- $TITLE #$NUMBER [@$AUTHOR]' change-template: '- $TITLE #$NUMBER [@$AUTHOR]'
categories: categories:
- title: "Dependencies" - title: "Removals"
label: "Dependency" label: "Removal"
- title: "Deprecations" - title: "Deprecations"
label: "Deprecation" label: "Deprecation"
- title: "Documentation" - title: "Documentation"
label: "Documentation" label: "Documentation"
- title: "Removals" - title: "Dependencies"
label: "Removal" label: "Dependency"
- title: "Testing" - title: "Testing"
label: "Testing" label: "Testing"
- title: "Type hints" - title: "Type hints"
label: "Type hints" label: "Type hints"
- title: "Other changes"
exclude-labels: exclude-labels:
- "changelog: skip" - "changelog: skip"
@ -23,6 +24,4 @@ template: |
https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html
## Changes
$CHANGES $CHANGES

12
.github/renovate.json vendored
View File

@ -1,7 +1,7 @@
{ {
"$schema": "https://docs.renovatebot.com/renovate-schema.json", "$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [ "extends": [
"config:base" "config:recommended"
], ],
"labels": [ "labels": [
"Dependency" "Dependency"
@ -9,9 +9,13 @@
"packageRules": [ "packageRules": [
{ {
"groupName": "github-actions", "groupName": "github-actions",
"matchManagers": ["github-actions"], "matchManagers": [
"separateMajorMinor": "false" "github-actions"
],
"separateMajorMinor": false
} }
], ],
"schedule": ["on the 3rd day of the month"] "schedule": [
"on the 3rd day of the month"
]
} }

View File

@ -33,6 +33,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5

View File

@ -21,6 +21,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: pre-commit cache - name: pre-commit cache
uses: actions/cache@v4 uses: actions/cache@v4

View File

@ -8,8 +8,8 @@ fi
brew install \ brew install \
freetype \ freetype \
ghostscript \ ghostscript \
jpeg-turbo \
libimagequant \ libimagequant \
libjpeg \
libtiff \ libtiff \
little-cms2 \ little-cms2 \
openjpeg \ openjpeg \

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch: workflow_dispatch:
permissions: permissions:
issues: write contents: read
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
@ -15,6 +15,8 @@ concurrency:
jobs: jobs:
stale: stale:
if: github.repository_owner == 'python-pillow' if: github.repository_owner == 'python-pillow'
permissions:
issues: write
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@ -48,6 +48,8 @@ jobs:
- name: Checkout Pillow - name: Checkout Pillow
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install Cygwin - name: Install Cygwin
uses: cygwin/cygwin-install-action@v4 uses: cygwin/cygwin-install-action@v4
@ -131,11 +133,12 @@ jobs:
- name: After success - name: After success
run: | run: |
bash.exe .ci/after_success.sh bash.exe .ci/after_success.sh
rm C:\cygwin\bin\bash.EXE
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
file: ./coverage.xml files: ./coverage.xml
flags: GHA_Cygwin flags: GHA_Cygwin
name: Cygwin Python 3.${{ matrix.python-minor-version }} name: Cygwin Python 3.${{ matrix.python-minor-version }}
token: ${{ secrets.CODECOV_ORG_TOKEN }} token: ${{ secrets.CODECOV_ORG_TOKEN }}

View File

@ -47,6 +47,7 @@ jobs:
debian-12-bookworm-x86, debian-12-bookworm-x86,
debian-12-bookworm-amd64, debian-12-bookworm-amd64,
fedora-40-amd64, fedora-40-amd64,
fedora-41-amd64,
gentoo, gentoo,
ubuntu-22.04-jammy-amd64, ubuntu-22.04-jammy-amd64,
ubuntu-24.04-noble-amd64, ubuntu-24.04-noble-amd64,
@ -64,6 +65,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Build system information - name: Build system information
run: python3 .github/workflows/system-info.py run: python3 .github/workflows/system-info.py
@ -97,11 +100,10 @@ jobs:
MATRIX_DOCKER: ${{ matrix.docker }} MATRIX_DOCKER: ${{ matrix.docker }}
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
flags: GHA_Docker flags: GHA_Docker
name: ${{ matrix.docker }} name: ${{ matrix.docker }}
gcov: true
token: ${{ secrets.CODECOV_ORG_TOKEN }} token: ${{ secrets.CODECOV_ORG_TOKEN }}
success: success:

View File

@ -46,6 +46,8 @@ jobs:
steps: steps:
- name: Checkout Pillow - name: Checkout Pillow
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up shell - name: Set up shell
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
@ -66,16 +68,16 @@ jobs:
mingw-w64-x86_64-openjpeg2 \ mingw-w64-x86_64-openjpeg2 \
mingw-w64-x86_64-python3-numpy \ mingw-w64-x86_64-python3-numpy \
mingw-w64-x86_64-python3-olefile \ mingw-w64-x86_64-python3-olefile \
mingw-w64-x86_64-python3-setuptools \ mingw-w64-x86_64-python3-pip \
mingw-w64-x86_64-python-pytest \
mingw-w64-x86_64-python-pytest-cov \
mingw-w64-x86_64-python-pytest-timeout \
mingw-w64-x86_64-python-pyqt6 mingw-w64-x86_64-python-pyqt6
python3 -m ensurepip
python3 -m pip install pyroma pytest pytest-cov pytest-timeout
pushd depends && ./install_extra_test_images.sh && popd pushd depends && ./install_extra_test_images.sh && popd
- name: Build Pillow - name: Build Pillow
run: SETUPTOOLS_USE_DISTUTILS="stdlib" CFLAGS="-coverage" python3 -m pip install . run: CFLAGS="-coverage" python3 -m pip install .
- name: Test Pillow - name: Test Pillow
run: | run: |
@ -83,9 +85,9 @@ jobs:
.ci/test.sh .ci/test.sh
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
file: ./coverage.xml files: ./coverage.xml
flags: GHA_Windows flags: GHA_Windows
name: "MSYS2 MinGW" name: "MSYS2 MinGW"
token: ${{ secrets.CODECOV_ORG_TOKEN }} token: ${{ secrets.CODECOV_ORG_TOKEN }}

View File

@ -40,6 +40,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Build system information - name: Build system information
run: python3 .github/workflows/system-info.py run: python3 .github/workflows/system-info.py

View File

@ -44,16 +44,20 @@ jobs:
steps: steps:
- name: Checkout Pillow - name: Checkout Pillow
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Checkout cached dependencies - name: Checkout cached dependencies
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
persist-credentials: false
repository: python-pillow/pillow-depends repository: python-pillow/pillow-depends
path: winbuild\depends path: winbuild\depends
- name: Checkout extra test images - name: Checkout extra test images
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
persist-credentials: false
repository: python-pillow/test-images repository: python-pillow/test-images
path: Tests\test-images path: Tests\test-images
@ -69,16 +73,14 @@ jobs:
- name: Print build system information - name: Print build system information
run: python3 .github/workflows/system-info.py run: python3 .github/workflows/system-info.py
- name: Install Python dependencies - name: Upgrade pip
run: > run: |
python3 -m pip install python3 -m pip install --upgrade pip
coverage>=7.4.2
defusedxml - name: Install CPython dependencies
olefile if: "!contains(matrix.python-version, 'pypy')"
pyroma run: |
pytest python3 -m pip install PyQt6
pytest-cov
pytest-timeout
- name: Install dependencies - name: Install dependencies
id: install id: install
@ -178,7 +180,7 @@ jobs:
- name: Build Pillow - name: Build Pillow
run: | run: |
$FLAGS="-C raqm=vendor -C fribidi=vendor" $FLAGS="-C raqm=vendor -C fribidi=vendor"
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS ." cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS .[tests]"
& $env:pythonLocation\python.exe selftest.py --installed & $env:pythonLocation\python.exe selftest.py --installed
shell: pwsh shell: pwsh
@ -213,9 +215,9 @@ jobs:
shell: pwsh shell: pwsh
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
file: ./coverage.xml files: ./coverage.xml
flags: GHA_Windows flags: GHA_Windows
name: ${{ runner.os }} Python ${{ matrix.python-version }} name: ${{ runner.os }} Python ${{ matrix.python-version }}
token: ${{ secrets.CODECOV_ORG_TOKEN }} token: ${{ secrets.CODECOV_ORG_TOKEN }}

View File

@ -42,6 +42,7 @@ jobs:
] ]
python-version: [ python-version: [
"pypy3.10", "pypy3.10",
"3.13t",
"3.13", "3.13",
"3.12", "3.12",
"3.11", "3.11",
@ -52,21 +53,22 @@ jobs:
- { python-version: "3.11", PYTHONOPTIMIZE: 1, REVERSE: "--reverse" } - { python-version: "3.11", PYTHONOPTIMIZE: 1, REVERSE: "--reverse" }
- { python-version: "3.10", PYTHONOPTIMIZE: 2 } - { python-version: "3.10", PYTHONOPTIMIZE: 2 }
# Free-threaded # Free-threaded
- { os: "ubuntu-latest", python-version: "3.13-dev", disable-gil: true } - { python-version: "3.13t", disable-gil: true }
# M1 only available for 3.10+ # M1 only available for 3.10+
- { os: "macos-13", python-version: "3.9" } - { os: "macos-13", python-version: "3.9" }
exclude: exclude:
- { os: "macos-latest", python-version: "3.9" } - { os: "macos-latest", python-version: "3.9" }
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
name: ${{ matrix.os }} Python ${{ matrix.python-version }} ${{ matrix.disable-gil && 'free-threaded' || '' }} name: ${{ matrix.os }} Python ${{ matrix.python-version }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5 uses: Quansight-Labs/setup-python@v5
if: "${{ !matrix.disable-gil }}"
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
allow-prereleases: true allow-prereleases: true
@ -75,13 +77,6 @@ jobs:
".ci/*.sh" ".ci/*.sh"
"pyproject.toml" "pyproject.toml"
- name: Set up Python ${{ matrix.python-version }} (free-threaded)
uses: deadsnakes/action@v3.2.0
if: "${{ matrix.disable-gil }}"
with:
python-version: ${{ matrix.python-version }}
nogil: ${{ matrix.disable-gil }}
- name: Set PYTHON_GIL - name: Set PYTHON_GIL
if: "${{ matrix.disable-gil }}" if: "${{ matrix.disable-gil }}"
run: | run: |
@ -114,7 +109,7 @@ jobs:
GHA_PYTHON_VERSION: ${{ matrix.python-version }} GHA_PYTHON_VERSION: ${{ matrix.python-version }}
- name: Register gcc problem matcher - name: Register gcc problem matcher
if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.12'" if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.13'"
run: echo "::add-matcher::.github/problem-matchers/gcc.json" run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Build - name: Build
@ -154,11 +149,10 @@ jobs:
.ci/after_success.sh .ci/after_success.sh
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }} flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
name: ${{ matrix.os }} Python ${{ matrix.python-version }} name: ${{ matrix.os }} Python ${{ matrix.python-version }}
gcov: true
token: ${{ secrets.CODECOV_ORG_TOKEN }} token: ${{ secrets.CODECOV_ORG_TOKEN }}
success: success:

View File

@ -1,11 +1,33 @@
#!/bin/bash #!/bin/bash
# Define custom utilities
# Test for macOS with [ -n "$IS_MACOS" ] # Setup that needs to be done before multibuild utils are invoked
if [ -z "$IS_MACOS" ]; then PROJECTDIR=$(pwd)
export MB_ML_LIBC=${AUDITWHEEL_POLICY::9} if [[ "$(uname -s)" == "Darwin" ]]; then
export MB_ML_VER=${AUDITWHEEL_POLICY:9} # Safety check - macOS builds require that CIBW_ARCHS is set, and that it
# only contains a single value (even though cibuildwheel allows multiple
# values in CIBW_ARCHS).
if [[ -z "$CIBW_ARCHS" ]]; then
echo "ERROR: Pillow macOS builds require CIBW_ARCHS be defined."
exit 1
fi
if [[ "$CIBW_ARCHS" == *" "* ]]; then
echo "ERROR: Pillow macOS builds only support a single architecture in CIBW_ARCHS."
exit 1
fi
# Build macOS dependencies in `build/darwin`
# Install them into `build/deps/darwin`
WORKDIR=$(pwd)/build/darwin
BUILD_PREFIX=$(pwd)/build/deps/darwin
else
# Build prefix will default to /usr/local
WORKDIR=$(pwd)/build
MB_ML_LIBC=${AUDITWHEEL_POLICY::9}
MB_ML_VER=${AUDITWHEEL_POLICY:9}
fi fi
export PLAT=$CIBW_ARCHS PLAT=$CIBW_ARCHS
# Define custom utilities
source wheels/multibuild/common_utils.sh source wheels/multibuild/common_utils.sh
source wheels/multibuild/library_builders.sh source wheels/multibuild/library_builders.sh
if [ -z "$IS_MACOS" ]; then if [ -z "$IS_MACOS" ]; then
@ -16,10 +38,10 @@ ARCHIVE_SDIR=pillow-depends-main
# Package versions for fresh source builds # Package versions for fresh source builds
FREETYPE_VERSION=2.13.2 FREETYPE_VERSION=2.13.2
HARFBUZZ_VERSION=10.0.1 HARFBUZZ_VERSION=10.1.0
LIBPNG_VERSION=1.6.44 LIBPNG_VERSION=1.6.44
JPEGTURBO_VERSION=3.0.4 JPEGTURBO_VERSION=3.1.0
OPENJPEG_VERSION=2.5.2 OPENJPEG_VERSION=2.5.3
XZ_VERSION=5.6.3 XZ_VERSION=5.6.3
TIFF_VERSION=4.6.0 TIFF_VERSION=4.6.0
LCMS2_VERSION=2.16 LCMS2_VERSION=2.16
@ -28,72 +50,90 @@ if [[ -n "$IS_MACOS" ]]; then
else else
GIFLIB_VERSION=5.2.1 GIFLIB_VERSION=5.2.1
fi fi
if [[ -n "$IS_MACOS" ]] || [[ "$MB_ML_VER" != 2014 ]]; then ZLIB_NG_VERSION=2.2.2
ZLIB_VERSION=1.3.1
else
ZLIB_VERSION=1.2.8
fi
LIBWEBP_VERSION=1.4.0 LIBWEBP_VERSION=1.4.0
BZIP2_VERSION=1.0.8 BZIP2_VERSION=1.0.8
LIBXCB_VERSION=1.17.0 LIBXCB_VERSION=1.17.0
BROTLI_VERSION=1.1.0 BROTLI_VERSION=1.1.0
function build_pkg_config {
if [ -e pkg-config-stamp ]; then return; fi
# This essentially duplicates the Homebrew recipe
ORIGINAL_CFLAGS=$CFLAGS
CFLAGS="$CFLAGS -Wno-int-conversion"
build_simple pkg-config 0.29.2 https://pkg-config.freedesktop.org/releases tar.gz \
--disable-debug --disable-host-tool --with-internal-glib \
--with-pc-path=$BUILD_PREFIX/share/pkgconfig:$BUILD_PREFIX/lib/pkgconfig \
--with-system-include-path=$(xcrun --show-sdk-path --sdk macosx)/usr/include
CFLAGS=$ORIGINAL_CFLAGS
export PKG_CONFIG=$BUILD_PREFIX/bin/pkg-config
touch pkg-config-stamp
}
function build_zlib_ng {
if [ -e zlib-stamp ]; then return; fi
fetch_unpack https://github.com/zlib-ng/zlib-ng/archive/$ZLIB_NG_VERSION.tar.gz zlib-ng-$ZLIB_NG_VERSION.tar.gz
(cd zlib-ng-$ZLIB_NG_VERSION \
&& ./configure --prefix=$BUILD_PREFIX --zlib-compat \
&& make -j4 \
&& make install)
touch zlib-stamp
}
function build_brotli { function build_brotli {
local cmake=$(get_modern_cmake) if [ -e brotli-stamp ]; then return; fi
local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-$BROTLI_VERSION.tar.gz) local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-$BROTLI_VERSION.tar.gz)
(cd $out_dir \ (cd $out_dir \
&& $cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \ && cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_LIBDIR=$BUILD_PREFIX/lib -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \
&& make install) && make install)
if [[ "$MB_ML_LIBC" == "manylinux" ]]; then touch brotli-stamp
cp /usr/local/lib64/libbrotli* /usr/local/lib
cp /usr/local/lib64/pkgconfig/libbrotli* /usr/local/lib/pkgconfig
fi
} }
function build_harfbuzz { function build_harfbuzz {
if [ -e harfbuzz-stamp ]; then return; fi
python3 -m pip install meson ninja python3 -m pip install meson ninja
local out_dir=$(fetch_unpack https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION/$HARFBUZZ_VERSION.tar.xz harfbuzz-$HARFBUZZ_VERSION.tar.xz) local out_dir=$(fetch_unpack https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION/harfbuzz-$HARFBUZZ_VERSION.tar.xz harfbuzz-$HARFBUZZ_VERSION.tar.xz)
(cd $out_dir \ (cd $out_dir \
&& meson setup build --buildtype=release -Dfreetype=enabled -Dglib=disabled) && meson setup build --prefix=$BUILD_PREFIX --libdir=$BUILD_PREFIX/lib --buildtype=release -Dfreetype=enabled -Dglib=disabled)
(cd $out_dir/build \ (cd $out_dir/build \
&& meson install) && meson install)
if [[ "$MB_ML_LIBC" == "manylinux" ]]; then touch harfbuzz-stamp
cp /usr/local/lib64/libharfbuzz* /usr/local/lib
fi
} }
function build { function build {
if [[ -n "$IS_MACOS" ]] && [[ "$CIBW_ARCHS" == "arm64" ]]; then
sudo chown -R runner /usr/local
fi
build_xz build_xz
if [ -z "$IS_ALPINE" ] && [ -z "$IS_MACOS" ]; then if [ -z "$IS_ALPINE" ] && [ -z "$IS_MACOS" ]; then
yum remove -y zlib-devel yum remove -y zlib-devel
fi fi
build_new_zlib build_zlib_ng
build_simple xcb-proto 1.17.0 https://xorg.freedesktop.org/archive/individual/proto build_simple xcb-proto 1.17.0 https://xorg.freedesktop.org/archive/individual/proto
if [ -n "$IS_MACOS" ]; then if [ -n "$IS_MACOS" ]; then
build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto
build_simple libXau 1.0.11 https://www.x.org/pub/individual/lib build_simple libXau 1.0.12 https://www.x.org/pub/individual/lib
build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
cp /usr/local/share/pkgconfig/xcb-proto.pc /usr/local/lib/pkgconfig
fi
else else
sed s/\${pc_sysrootdir\}// /usr/local/share/pkgconfig/xcb-proto.pc > /usr/local/lib/pkgconfig/xcb-proto.pc sed s/\${pc_sysrootdir\}// $BUILD_PREFIX/share/pkgconfig/xcb-proto.pc > $BUILD_PREFIX/lib/pkgconfig/xcb-proto.pc
fi fi
build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib
build_libjpeg_turbo build_libjpeg_turbo
build_tiff if [ -n "$IS_MACOS" ]; then
# Custom tiff build to include jpeg; by default, configure won't include
# headers/libs in the custom macOS prefix. Explicitly disable webp,
# libdeflate and zstd, because on x86_64 macs, it will pick up the
# Homebrew versions of those libraries from /usr/local.
build_simple tiff $TIFF_VERSION https://download.osgeo.org/libtiff tar.gz \
--with-jpeg-include-dir=$BUILD_PREFIX/include --with-jpeg-lib-dir=$BUILD_PREFIX/lib \
--disable-webp --disable-libdeflate --disable-zstd
else
build_tiff
fi
build_libpng build_libpng
build_lcms2 build_lcms2
build_openjpeg build_openjpeg
if [ -f /usr/local/lib64/libopenjp2.so ]; then
cp /usr/local/lib64/libopenjp2.so /usr/local/lib
fi
ORIGINAL_CFLAGS=$CFLAGS ORIGINAL_CFLAGS=$CFLAGS
CFLAGS="$CFLAGS -O3 -DNDEBUG" CFLAGS="$CFLAGS -O3 -DNDEBUG"
@ -115,31 +155,47 @@ function build {
build_harfbuzz build_harfbuzz
} }
# Perform all dependency builds in the build subfolder.
mkdir -p $WORKDIR
pushd $WORKDIR > /dev/null
# Any stuff that you need to do before you start building the wheels # Any stuff that you need to do before you start building the wheels
# Runs in the root directory of this repository. # Runs in the root directory of this repository.
curl -fsSL -o pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip if [[ ! -d $WORKDIR/pillow-depends-main ]]; then
untar pillow-depends-main.zip if [[ ! -f $PROJECTDIR/pillow-depends-main.zip ]]; then
echo "Download pillow dependency sources..."
curl -fSL -o $PROJECTDIR/pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
fi
echo "Unpacking pillow dependency sources..."
untar $PROJECTDIR/pillow-depends-main.zip
fi
if [[ -n "$IS_MACOS" ]]; then if [[ -n "$IS_MACOS" ]]; then
# libtiff and libxcb cause a conflict with building libtiff and libxcb # Homebrew (or similar packaging environments) install can contain some of
# libxau and libxdmcp cause an issue on macOS < 11 # the libraries that we're going to build. However, they may be compiled
# remove cairo to fix building harfbuzz on arm64 # with a MACOSX_DEPLOYMENT_TARGET that doesn't match what we want to use,
# remove lcms2 and libpng to fix building openjpeg on arm64 # and they may bring in other dependencies that we don't want. The same will
# remove jpeg-turbo to avoid inclusion on arm64 # be true of any other locations on the path. To avoid conflicts, strip the
# remove webp and zstd to avoid inclusion on x86_64 # path down to the bare minimum (which, on macOS, won't include any
# curl from brew requires zstd, use system curl # development dependencies).
brew remove --ignore-dependencies libpng libtiff libxcb libxau libxdmcp curl cairo lcms2 zstd export PATH="$BUILD_PREFIX/bin:$(dirname $(which python3)):/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin"
if [[ "$CIBW_ARCHS" == "arm64" ]]; then export CMAKE_PREFIX_PATH=$BUILD_PREFIX
brew remove --ignore-dependencies jpeg-turbo
else
brew remove --ignore-dependencies webp
fi
brew install pkg-config # Ensure the basic structure of the build prefix directory exists.
mkdir -p "$BUILD_PREFIX/bin"
mkdir -p "$BUILD_PREFIX/lib"
# Ensure pkg-config is available
build_pkg_config
# Ensure cmake is available
python3 -m pip install cmake
fi fi
wrap_wheel_builder build wrap_wheel_builder build
# Return to the project root to finish the build
popd > /dev/null
# Append licenses # Append licenses
for filename in wheels/dependency_licenses/*; do for filename in wheels/dependency_licenses/*; do
echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE

View File

@ -1,12 +1,24 @@
#!/bin/bash #!/bin/bash
set -e set -e
# Ensure fribidi is installed by the system.
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
brew install fribidi # If Homebrew is on the path during the build, it may leak into the wheels.
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig" # However, we *do* need Homebrew to provide a copy of fribidi for
if [ -f /opt/homebrew/lib/libfribidi.dylib ]; then # testing purposes so that we can verify the fribidi shim works as expected.
sudo cp /opt/homebrew/lib/libfribidi.dylib /usr/local/lib if [[ "$(uname -m)" == "x86_64" ]]; then
HOMEBREW_PREFIX=/usr/local
else
HOMEBREW_PREFIX=/opt/homebrew
fi fi
$HOMEBREW_PREFIX/bin/brew install fribidi
# Add the lib folder for fribidi so that the vendored library can be found.
# Don't use $HOMEWBREW_PREFIX/lib directly - use the lib folder where the
# installed copy of fribidi is cellared. This ensures we don't pick up the
# Homebrew version of any other library that we're dependent on (most notably,
# freetype).
export DYLD_LIBRARY_PATH=$(dirname $(realpath $HOMEBREW_PREFIX/lib/libfribidi.dylib))
elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then
apk add curl fribidi apk add curl fribidi
else else

View File

@ -61,6 +61,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
persist-credentials: false
submodules: true submodules: true
- uses: actions/setup-python@v5 - uses: actions/setup-python@v5
@ -84,7 +85,7 @@ jobs:
CIBW_ARCHS: "aarch64" CIBW_ARCHS: "aarch64"
# Likewise, select only one Python version per job to speed this up. # Likewise, select only one Python version per job to speed this up.
CIBW_BUILD: "${{ matrix.python-version }}-${{ matrix.spec == 'musllinux' && 'musllinux' || 'manylinux' }}*" CIBW_BUILD: "${{ matrix.python-version }}-${{ matrix.spec == 'musllinux' && 'musllinux' || 'manylinux' }}*"
CIBW_PRERELEASE_PYTHONS: True CIBW_ENABLE: cpython-prerelease
# Extra options for manylinux. # Extra options for manylinux.
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.spec }} CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.spec }}
CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.spec }} CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.spec }}
@ -132,6 +133,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
persist-credentials: false
submodules: true submodules: true
- uses: actions/setup-python@v5 - uses: actions/setup-python@v5
@ -148,10 +150,9 @@ jobs:
env: env:
CIBW_ARCHS: ${{ matrix.cibw_arch }} CIBW_ARCHS: ${{ matrix.cibw_arch }}
CIBW_BUILD: ${{ matrix.build }} CIBW_BUILD: ${{ matrix.build }}
CIBW_FREE_THREADED_SUPPORT: True CIBW_ENABLE: cpython-prerelease cpython-freethreading
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }} CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }} CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
CIBW_PRERELEASE_PYTHONS: True
CIBW_SKIP: pp39-* CIBW_SKIP: pp39-*
MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }} MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }}
@ -173,10 +174,13 @@ jobs:
- cibw_arch: ARM64 - cibw_arch: ARM64
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Checkout extra test images - name: Checkout extra test images
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
persist-credentials: false
repository: python-pillow/test-images repository: python-pillow/test-images
path: Tests\test-images path: Tests\test-images
@ -223,8 +227,7 @@ jobs:
CIBW_ARCHS: ${{ matrix.cibw_arch }} CIBW_ARCHS: ${{ matrix.cibw_arch }}
CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd" CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd"
CIBW_CACHE_PATH: "C:\\cibw" CIBW_CACHE_PATH: "C:\\cibw"
CIBW_FREE_THREADED_SUPPORT: True CIBW_ENABLE: cpython-prerelease cpython-freethreading
CIBW_PRERELEASE_PYTHONS: True
CIBW_SKIP: pp39-* CIBW_SKIP: pp39-*
CIBW_TEST_SKIP: "*-win_arm64" CIBW_TEST_SKIP: "*-win_arm64"
CIBW_TEST_COMMAND: 'docker run --rm CIBW_TEST_COMMAND: 'docker run --rm
@ -253,6 +256,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5

5
.gitignore vendored
View File

@ -19,6 +19,7 @@ lib64/
parts/ parts/
sdist/ sdist/
var/ var/
wheelhouse/
*.egg-info/ *.egg-info/
.installed.cfg .installed.cfg
*.egg *.egg
@ -90,5 +91,9 @@ Tests/images/msp
Tests/images/picins Tests/images/picins
Tests/images/sunraster Tests/images/sunraster
# Test and dependency downloads
pillow-depends-main.zip
pillow-test-images.zip
# pyinstaller # pyinstaller
*.spec *.spec

View File

@ -1,17 +1,17 @@
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.9 rev: v0.8.1
hooks: hooks:
- id: ruff - id: ruff
args: [--exit-non-zero-on-fix] args: [--exit-non-zero-on-fix]
- repo: https://github.com/psf/black-pre-commit-mirror - repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.8.0 rev: 24.10.0
hooks: hooks:
- id: black - id: black
- repo: https://github.com/PyCQA/bandit - repo: https://github.com/PyCQA/bandit
rev: 1.7.10 rev: 1.8.0
hooks: hooks:
- id: bandit - id: bandit
args: [--severity-level=high] args: [--severity-level=high]
@ -24,7 +24,7 @@ repos:
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$) exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
- repo: https://github.com/pre-commit/mirrors-clang-format - repo: https://github.com/pre-commit/mirrors-clang-format
rev: v19.1.1 rev: v19.1.4
hooks: hooks:
- id: clang-format - id: clang-format
types: [c] types: [c]
@ -50,7 +50,7 @@ repos:
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/ exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
- repo: https://github.com/python-jsonschema/check-jsonschema - repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.29.3 rev: 0.30.0
hooks: hooks:
- id: check-github-workflows - id: check-github-workflows
- id: check-readthedocs - id: check-readthedocs
@ -62,12 +62,12 @@ repos:
- id: sphinx-lint - id: sphinx-lint
- repo: https://github.com/tox-dev/pyproject-fmt - repo: https://github.com/tox-dev/pyproject-fmt
rev: 2.2.4 rev: v2.5.0
hooks: hooks:
- id: pyproject-fmt - id: pyproject-fmt
- repo: https://github.com/abravalheri/validate-pyproject - repo: https://github.com/abravalheri/validate-pyproject
rev: v0.20.2 rev: v0.23
hooks: hooks:
- id: validate-pyproject - id: validate-pyproject
additional_dependencies: [trove-classifiers>=2024.10.12] additional_dependencies: [trove-classifiers>=2024.10.12]

View File

@ -2,14 +2,12 @@
Changelog (Pillow) Changelog (Pillow)
================== ==================
11.1.0 (unreleased) 11.1.0 and newer
------------------- ----------------
- Fix IFDRational with a zero denominator #8474 See GitHub Releases:
[radarhere]
- Fixed disabling a feature during install #8469 - https://github.com/python-pillow/Pillow/releases
[radarhere]
11.0.0 (2024-10-15) 11.0.0 (2024-10-15)
------------------- -------------------

View File

@ -5,7 +5,7 @@ The Python Imaging Library (PIL) is
Pillow is the friendly PIL fork. It is Pillow is the friendly PIL fork. It is
Copyright © 2010-2024 by Jeffrey A. Clark and contributors Copyright © 2010 by Jeffrey A. Clark and contributors
Like PIL, Pillow is licensed under the open source MIT-CMU License: Like PIL, Pillow is licensed under the open source MIT-CMU License:

View File

@ -107,7 +107,7 @@ The core image library is designed for fast access to data stored in a few basic
- [Issues](https://github.com/python-pillow/Pillow/issues) - [Issues](https://github.com/python-pillow/Pillow/issues)
- [Pull requests](https://github.com/python-pillow/Pillow/pulls) - [Pull requests](https://github.com/python-pillow/Pillow/pulls)
- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html) - [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html)
- [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) - [Changelog](https://github.com/python-pillow/Pillow/releases)
- [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork) - [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork)
## Report a Vulnerability ## Report a Vulnerability

View File

@ -12,7 +12,6 @@ Released quarterly on January 2nd, April 1st, July 1st and October 15th.
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in `main` branch. * [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in `main` branch.
* [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them. * [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them.
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py` * [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Update `CHANGES.rst`.
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo. * [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
* [ ] Create branch and tag for release e.g.: * [ ] Create branch and tag for release e.g.:
```bash ```bash
@ -34,7 +33,6 @@ Released quarterly on January 2nd, April 1st, July 1st and October 15th.
Released as needed for security, installation or critical bug fixes. Released as needed for security, installation or critical bug fixes.
* [ ] Make necessary changes in `main` branch. * [ ] Make necessary changes in `main` branch.
* [ ] Update `CHANGES.rst`.
* [ ] Check out release branch e.g.: * [ ] Check out release branch e.g.:
```bash ```bash
git checkout -t remotes/origin/5.2.x git checkout -t remotes/origin/5.2.x

View File

@ -34,6 +34,7 @@ def test_wheel_features() -> None:
"fribidi", "fribidi",
"harfbuzz", "harfbuzz",
"libjpeg_turbo", "libjpeg_turbo",
"zlib_ng",
"xcb", "xcb",
} }

View File

@ -22,6 +22,8 @@ def test_bad() -> None:
for f in get_files("b"): for f in get_files("b"):
# Assert that there is no unclosed file warning # Assert that there is no unclosed file warning
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
try: try:
with Image.open(f) as im: with Image.open(f) as im:
im.load() im.load()

View File

@ -36,10 +36,11 @@ def test_version() -> None:
else: else:
assert function(name) == version assert function(name) == version
if name != "PIL": if name != "PIL":
if name == "zlib" and version is not None: if version is not None:
version = re.sub(".zlib-ng$", "", version) if name == "zlib" and features.check_feature("zlib_ng"):
elif name == "libtiff" and version is not None: version = re.sub(".zlib-ng$", "", version)
version = re.sub("t$", "", version) elif name == "libtiff":
version = re.sub("t$", "", version)
assert version is None or re.search(r"\d+(\.\d+)*$", version) assert version is None or re.search(r"\d+(\.\d+)*$", version)
for module in features.modules: for module in features.modules:

View File

@ -36,6 +36,8 @@ def test_unclosed_file() -> None:
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(TEST_FILE) im = Image.open(TEST_FILE)
im.load() im.load()
im.close() im.close()
@ -43,6 +45,8 @@ def test_closed_file() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(TEST_FILE) as im: with Image.open(TEST_FILE) as im:
im.load() im.load()

View File

@ -65,6 +65,8 @@ def test_unclosed_file() -> None:
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(static_test_file) im = Image.open(static_test_file)
im.load() im.load()
im.close() im.close()
@ -81,6 +83,8 @@ def test_seek_after_close() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(static_test_file) as im: with Image.open(static_test_file) as im:
im.load() im.load()

View File

@ -4,6 +4,7 @@ import warnings
from collections.abc import Generator from collections.abc import Generator
from io import BytesIO from io import BytesIO
from pathlib import Path from pathlib import Path
from typing import Any
import pytest import pytest
@ -46,6 +47,8 @@ def test_unclosed_file() -> None:
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(TEST_GIF) im = Image.open(TEST_GIF)
im.load() im.load()
im.close() im.close()
@ -67,6 +70,8 @@ def test_seek_after_close() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(TEST_GIF) as im: with Image.open(TEST_GIF) as im:
im.load() im.load()
@ -1431,7 +1436,8 @@ def test_saving_rgba(tmp_path: Path) -> None:
assert reloaded_rgba.load()[0, 0][3] == 0 assert reloaded_rgba.load()[0, 0][3] == 0
def test_optimizing_p_rgba(tmp_path: Path) -> None: @pytest.mark.parametrize("params", ({}, {"disposal": 2, "optimize": False}))
def test_p_rgba(tmp_path: Path, params: dict[str, Any]) -> None:
out = str(tmp_path / "temp.gif") out = str(tmp_path / "temp.gif")
im1 = Image.new("P", (100, 100)) im1 = Image.new("P", (100, 100))
@ -1443,7 +1449,7 @@ def test_optimizing_p_rgba(tmp_path: Path) -> None:
im2 = Image.new("P", (100, 100)) im2 = Image.new("P", (100, 100))
im2.putpalette(data, "RGBA") im2.putpalette(data, "RGBA")
im1.save(out, save_all=True, append_images=[im2]) im1.save(out, save_all=True, append_images=[im2], **params)
with Image.open(out) as reloaded: with Image.open(out) as reloaded:
assert reloaded.n_frames == 2 assert reloaded.n_frames == 2

View File

@ -21,6 +21,8 @@ def test_sanity() -> None:
with Image.open(TEST_FILE) as im: with Image.open(TEST_FILE) as im:
# Assert that there is no unclosed file warning # Assert that there is no unclosed file warning
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im.load() im.load()
assert im.mode == "RGBA" assert im.mode == "RGBA"

View File

@ -41,6 +41,8 @@ def test_unclosed_file() -> None:
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(TEST_IM) im = Image.open(TEST_IM)
im.load() im.load()
im.close() im.close()
@ -48,6 +50,8 @@ def test_closed_file() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(TEST_IM) as im: with Image.open(TEST_IM) as im:
im.load() im.load()

View File

@ -541,12 +541,12 @@ class TestFileJpeg:
@mark_if_feature_version( @mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing" pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
) )
def test_qtables(self, tmp_path: Path) -> None: def test_qtables(self) -> None:
def _n_qtables_helper(n: int, test_file: str) -> None: def _n_qtables_helper(n: int, test_file: str) -> None:
b = BytesIO()
with Image.open(test_file) as im: with Image.open(test_file) as im:
f = str(tmp_path / "temp.jpg") im.save(b, "JPEG", qtables=[[n] * 64] * n)
im.save(f, qtables=[[n] * 64] * n) with Image.open(b) as im:
with Image.open(f) as im:
assert len(im.quantization) == n assert len(im.quantization) == n
reloaded = self.roundtrip(im, qtables="keep") reloaded = self.roundtrip(im, qtables="keep")
assert im.quantization == reloaded.quantization assert im.quantization == reloaded.quantization
@ -850,6 +850,8 @@ class TestFileJpeg:
out = str(tmp_path / "out.jpg") out = str(tmp_path / "out.jpg")
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im.save(out, exif=exif) im.save(out, exif=exif)
with Image.open(out) as reloaded: with Image.open(out) as reloaded:

View File

@ -2,6 +2,7 @@ from __future__ import annotations
import os import os
import re import re
from collections.abc import Generator
from io import BytesIO from io import BytesIO
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
@ -29,8 +30,16 @@ EXTRA_DIR = "Tests/images/jpeg2000"
pytestmark = skip_unless_feature("jpg_2000") pytestmark = skip_unless_feature("jpg_2000")
test_card = Image.open("Tests/images/test-card.png")
test_card.load() @pytest.fixture
def card() -> Generator[ImageFile.ImageFile, None, None]:
with Image.open("Tests/images/test-card.png") as im:
im.load()
try:
yield im
finally:
im.close()
# OpenJPEG 2.0.0 outputs this debugging message sometimes; we should # OpenJPEG 2.0.0 outputs this debugging message sometimes; we should
# ignore it---it doesn't represent a test failure. # ignore it---it doesn't represent a test failure.
@ -74,76 +83,76 @@ def test_invalid_file() -> None:
Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file) Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file)
def test_bytesio() -> None: def test_bytesio(card: ImageFile.ImageFile) -> None:
with open("Tests/images/test-card-lossless.jp2", "rb") as f: with open("Tests/images/test-card-lossless.jp2", "rb") as f:
data = BytesIO(f.read()) data = BytesIO(f.read())
with Image.open(data) as im: with Image.open(data) as im:
im.load() im.load()
assert_image_similar(im, test_card, 1.0e-3) assert_image_similar(im, card, 1.0e-3)
# These two test pre-written JPEG 2000 files that were not written with # These two test pre-written JPEG 2000 files that were not written with
# PIL (they were made using Adobe Photoshop) # PIL (they were made using Adobe Photoshop)
def test_lossless(tmp_path: Path) -> None: def test_lossless(card: ImageFile.ImageFile, tmp_path: Path) -> None:
with Image.open("Tests/images/test-card-lossless.jp2") as im: with Image.open("Tests/images/test-card-lossless.jp2") as im:
im.load() im.load()
outfile = str(tmp_path / "temp_test-card.png") outfile = str(tmp_path / "temp_test-card.png")
im.save(outfile) im.save(outfile)
assert_image_similar(im, test_card, 1.0e-3) assert_image_similar(im, card, 1.0e-3)
def test_lossy_tiled() -> None: def test_lossy_tiled(card: ImageFile.ImageFile) -> None:
assert_image_similar_tofile( assert_image_similar_tofile(card, "Tests/images/test-card-lossy-tiled.jp2", 2.0)
test_card, "Tests/images/test-card-lossy-tiled.jp2", 2.0
)
def test_lossless_rt() -> None: def test_lossless_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card) im = roundtrip(card)
assert_image_equal(im, test_card) assert_image_equal(im, card)
def test_lossy_rt() -> None: def test_lossy_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, quality_layers=[20]) im = roundtrip(card, quality_layers=[20])
assert_image_similar(im, test_card, 2.0) assert_image_similar(im, card, 2.0)
def test_tiled_rt() -> None: def test_tiled_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, tile_size=(128, 128)) im = roundtrip(card, tile_size=(128, 128))
assert_image_equal(im, test_card) assert_image_equal(im, card)
def test_tiled_offset_rt() -> None: def test_tiled_offset_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32)) im = roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32))
assert_image_equal(im, test_card) assert_image_equal(im, card)
def test_tiled_offset_too_small() -> None: def test_tiled_offset_too_small(card: ImageFile.ImageFile) -> None:
with pytest.raises(ValueError): with pytest.raises(ValueError):
roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32)) roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32))
def test_irreversible_rt() -> None: def test_irreversible_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, irreversible=True, quality_layers=[20]) im = roundtrip(card, irreversible=True, quality_layers=[20])
assert_image_similar(im, test_card, 2.0) assert_image_similar(im, card, 2.0)
def test_prog_qual_rt() -> None: def test_prog_qual_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, quality_layers=[60, 40, 20], progression="LRCP") im = roundtrip(card, quality_layers=[60, 40, 20], progression="LRCP")
assert_image_similar(im, test_card, 2.0) assert_image_similar(im, card, 2.0)
def test_prog_res_rt() -> None: def test_prog_res_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, num_resolutions=8, progression="RLCP") im = roundtrip(card, num_resolutions=8, progression="RLCP")
assert_image_equal(im, test_card) assert_image_equal(im, card)
@pytest.mark.parametrize("num_resolutions", range(2, 6)) @pytest.mark.parametrize("num_resolutions", range(2, 6))
def test_default_num_resolutions(num_resolutions: int) -> None: def test_default_num_resolutions(
card: ImageFile.ImageFile, num_resolutions: int
) -> None:
d = 1 << (num_resolutions - 1) d = 1 << (num_resolutions - 1)
im = test_card.resize((d - 1, d - 1)) im = card.resize((d - 1, d - 1))
with pytest.raises(OSError): with pytest.raises(OSError):
roundtrip(im, num_resolutions=num_resolutions) roundtrip(im, num_resolutions=num_resolutions)
reloaded = roundtrip(im) reloaded = roundtrip(im)
@ -205,31 +214,31 @@ def test_header_errors() -> None:
pass pass
def test_layers_type(tmp_path: Path) -> None: def test_layers_type(card: ImageFile.ImageFile, tmp_path: Path) -> None:
outfile = str(tmp_path / "temp_layers.jp2") outfile = str(tmp_path / "temp_layers.jp2")
for quality_layers in [[100, 50, 10], (100, 50, 10), None]: for quality_layers in [[100, 50, 10], (100, 50, 10), None]:
test_card.save(outfile, quality_layers=quality_layers) card.save(outfile, quality_layers=quality_layers)
for quality_layers_str in ["quality_layers", ("100", "50", "10")]: for quality_layers_str in ["quality_layers", ("100", "50", "10")]:
with pytest.raises(ValueError): with pytest.raises(ValueError):
test_card.save(outfile, quality_layers=quality_layers_str) card.save(outfile, quality_layers=quality_layers_str)
def test_layers() -> None: def test_layers(card: ImageFile.ImageFile) -> None:
out = BytesIO() out = BytesIO()
test_card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP") card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP")
out.seek(0) out.seek(0)
with Image.open(out) as im: with Image.open(out) as im:
im.layers = 1 im.layers = 1
im.load() im.load()
assert_image_similar(im, test_card, 13) assert_image_similar(im, card, 13)
out.seek(0) out.seek(0)
with Image.open(out) as im: with Image.open(out) as im:
im.layers = 3 im.layers = 3
im.load() im.load()
assert_image_similar(im, test_card, 0.4) assert_image_similar(im, card, 0.4)
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -245,24 +254,30 @@ def test_layers() -> None:
(None, {"no_jp2": False}, 4, b"jP"), (None, {"no_jp2": False}, 4, b"jP"),
), ),
) )
def test_no_jp2(name: str, args: dict[str, bool], offset: int, data: bytes) -> None: def test_no_jp2(
card: ImageFile.ImageFile,
name: str,
args: dict[str, bool],
offset: int,
data: bytes,
) -> None:
out = BytesIO() out = BytesIO()
if name: if name:
out.name = name out.name = name
test_card.save(out, "JPEG2000", **args) card.save(out, "JPEG2000", **args)
out.seek(offset) out.seek(offset)
assert out.read(2) == data assert out.read(2) == data
def test_mct() -> None: def test_mct(card: ImageFile.ImageFile) -> None:
# Three component # Three component
for val in (0, 1): for val in (0, 1):
out = BytesIO() out = BytesIO()
test_card.save(out, "JPEG2000", mct=val, no_jp2=True) card.save(out, "JPEG2000", mct=val, no_jp2=True)
assert out.getvalue()[59] == val assert out.getvalue()[59] == val
with Image.open(out) as im: with Image.open(out) as im:
assert_image_similar(im, test_card, 1.0e-3) assert_image_similar(im, card, 1.0e-3)
# Single component should have MCT disabled # Single component should have MCT disabled
for val in (0, 1): for val in (0, 1):
@ -419,22 +434,22 @@ def test_comment() -> None:
pass pass
def test_save_comment() -> None: def test_save_comment(card: ImageFile.ImageFile) -> None:
for comment in ("Created by Pillow", b"Created by Pillow"): for comment in ("Created by Pillow", b"Created by Pillow"):
out = BytesIO() out = BytesIO()
test_card.save(out, "JPEG2000", comment=comment) card.save(out, "JPEG2000", comment=comment)
with Image.open(out) as im: with Image.open(out) as im:
assert im.info["comment"] == b"Created by Pillow" assert im.info["comment"] == b"Created by Pillow"
out = BytesIO() out = BytesIO()
long_comment = b" " * 65531 long_comment = b" " * 65531
test_card.save(out, "JPEG2000", comment=long_comment) card.save(out, "JPEG2000", comment=long_comment)
with Image.open(out) as im: with Image.open(out) as im:
assert im.info["comment"] == long_comment assert im.info["comment"] == long_comment
with pytest.raises(ValueError): with pytest.raises(ValueError):
test_card.save(out, "JPEG2000", comment=long_comment + b" ") card.save(out, "JPEG2000", comment=long_comment + b" ")
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -457,10 +472,10 @@ def test_crashes(test_file: str) -> None:
@skip_unless_feature_version("jpg_2000", "2.4.0") @skip_unless_feature_version("jpg_2000", "2.4.0")
def test_plt_marker() -> None: def test_plt_marker(card: ImageFile.ImageFile) -> None:
# Search the start of the codesteam for PLT # Search the start of the codesteam for PLT
out = BytesIO() out = BytesIO()
test_card.save(out, "JPEG2000", no_jp2=True, plt=True) card.save(out, "JPEG2000", no_jp2=True, plt=True)
out.seek(0) out.seek(0)
while True: while True:
marker = out.read(2) marker = out.read(2)

View File

@ -1098,6 +1098,25 @@ class TestFileLibTiff(LibTiffTestCase):
assert_image_similar(base_im, im, 0.7) assert_image_similar(base_im, im, 0.7)
@pytest.mark.parametrize(
"test_file",
[
"Tests/images/old-style-jpeg-compression-no-samplesperpixel.tif",
"Tests/images/old-style-jpeg-compression.tif",
],
)
def test_buffering(self, test_file: str) -> None:
# load exif first
with Image.open(open(test_file, "rb", buffering=1048576)) as im:
exif = dict(im.getexif())
# load image before exif
with Image.open(open(test_file, "rb", buffering=1048576)) as im2:
im2.load()
exif_after_load = dict(im2.getexif())
assert exif == exif_after_load
@pytest.mark.valgrind_known_error(reason="Backtrace in Python Core") @pytest.mark.valgrind_known_error(reason="Backtrace in Python Core")
def test_sampleformat_not_corrupted(self) -> None: def test_sampleformat_not_corrupted(self) -> None:
# Assert that a TIFF image with SampleFormat=UINT tag is not corrupted # Assert that a TIFF image with SampleFormat=UINT tag is not corrupted

View File

@ -48,6 +48,8 @@ def test_unclosed_file() -> None:
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(test_files[0]) im = Image.open(test_files[0])
im.load() im.load()
im.close() im.close()
@ -63,6 +65,8 @@ def test_seek_after_close() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(test_files[0]) as im: with Image.open(test_files[0]) as im:
im.load() im.load()

View File

@ -338,6 +338,8 @@ class TestFilePng:
with Image.open(TEST_PNG_FILE) as im: with Image.open(TEST_PNG_FILE) as im:
# Assert that there is no unclosed file warning # Assert that there is no unclosed file warning
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im.verify() im.verify()
with Image.open(TEST_PNG_FILE) as im: with Image.open(TEST_PNG_FILE) as im:

View File

@ -35,6 +35,8 @@ def test_unclosed_file() -> None:
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(test_file) im = Image.open(test_file)
im.load() im.load()
im.close() im.close()
@ -42,6 +44,8 @@ def test_closed_file() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(test_file) as im: with Image.open(test_file) as im:
im.load() im.load()

View File

@ -34,6 +34,8 @@ def test_unclosed_file() -> None:
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(TEST_FILE) im = Image.open(TEST_FILE)
im.load() im.load()
im.close() im.close()
@ -41,6 +43,8 @@ def test_closed_file() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(TEST_FILE) as im: with Image.open(TEST_FILE) as im:
im.load() im.load()

View File

@ -37,11 +37,15 @@ def test_unclosed_file() -> None:
def test_close() -> None: def test_close() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
tar = TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg") tar = TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg")
tar.close() tar.close()
def test_contextmanager() -> None: def test_contextmanager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg"): with TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg"):
pass pass

View File

@ -72,6 +72,8 @@ class TestFileTiff:
def test_closed_file(self) -> None: def test_closed_file(self) -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open("Tests/images/multipage.tiff") im = Image.open("Tests/images/multipage.tiff")
im.load() im.load()
im.close() im.close()
@ -88,6 +90,8 @@ class TestFileTiff:
def test_context_manager(self) -> None: def test_context_manager(self) -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open("Tests/images/multipage.tiff") as im: with Image.open("Tests/images/multipage.tiff") as im:
im.load() im.load()

View File

@ -191,6 +191,8 @@ class TestFileWebp:
file_path = "Tests/images/hopper.webp" file_path = "Tests/images/hopper.webp"
with Image.open(file_path) as image: with Image.open(file_path) as image:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
image.save(tmp_path / "temp.webp") image.save(tmp_path / "temp.webp")
def test_file_pointer_could_be_reused(self) -> None: def test_file_pointer_could_be_reused(self) -> None:

View File

@ -1,5 +1,6 @@
from __future__ import annotations from __future__ import annotations
from io import BytesIO
from pathlib import Path from pathlib import Path
from typing import IO from typing import IO
@ -34,6 +35,13 @@ def test_load() -> None:
assert im.load()[0, 0] == (255, 255, 255) assert im.load()[0, 0] == (255, 255, 255)
def test_load_zero_inch() -> None:
b = BytesIO(b"\xd7\xcd\xc6\x9a\x00\x00" + b"\x00" * 10)
with pytest.raises(ValueError):
with Image.open(b):
pass
class TestHandler(ImageFile.StubHandler): class TestHandler(ImageFile.StubHandler):
methodCalled = False methodCalled = False
@ -65,6 +73,12 @@ def test_load_float_dpi() -> None:
with Image.open("Tests/images/drawing.emf") as im: with Image.open("Tests/images/drawing.emf") as im:
assert im.info["dpi"] == 1423.7668161434979 assert im.info["dpi"] == 1423.7668161434979
with open("Tests/images/drawing.emf", "rb") as fp:
data = fp.read()
b = BytesIO(data[:8] + b"\x06\xFA" + data[10:])
with Image.open(b) as im:
assert im.info["dpi"][0] == 2540
def test_load_set_dpi() -> None: def test_load_set_dpi() -> None:
with Image.open("Tests/images/drawing.wmf") as im: with Image.open("Tests/images/drawing.wmf") as im:

View File

@ -737,6 +737,8 @@ class TestImage:
# Act/Assert # Act/Assert
with Image.open(test_file) as im: with Image.open(test_file) as im:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im.save(temp_file) im.save(temp_file)
def test_no_new_file_on_error(self, tmp_path: Path) -> None: def test_no_new_file_on_error(self, tmp_path: Path) -> None:

View File

@ -52,4 +52,6 @@ def test_image(mode: str) -> None:
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
ImageQt.ImageQt("Tests/images/hopper.gif") ImageQt.ImageQt("Tests/images/hopper.gif")

View File

@ -264,4 +264,6 @@ def test_no_resource_warning_for_numpy_array() -> None:
with Image.open(test_file) as im: with Image.open(test_file) as im:
# Act/Assert # Act/Assert
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
array(im) array(im)

View File

@ -1,7 +1,7 @@
# Documentation: https://docs.codecov.com/docs/codecov-yaml # Documentation: https://docs.codecov.com/docs/codecov-yaml
codecov: codecov:
# Avoid "Missing base report" due to committing CHANGES.rst with "[CI skip]" # Avoid "Missing base report" due to committing with "[CI skip]"
# https://github.com/codecov/support/issues/363 # https://github.com/codecov/support/issues/363
# https://docs.codecov.com/docs/comparing-commits # https://docs.codecov.com/docs/comparing-commits
allow_coverage_offsets: true allow_coverage_offsets: true

View File

@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
# install openjpeg # install openjpeg
archive=openjpeg-2.5.2 archive=openjpeg-2.5.3
./download-and-extract.sh $archive https://raw.githubusercontent.com/python-pillow/pillow-depends/main/$archive.tar.gz ./download-and-extract.sh $archive https://raw.githubusercontent.com/python-pillow/pillow-depends/main/$archive.tar.gz

View File

@ -5,7 +5,7 @@ The Python Imaging Library (PIL) is
Pillow is the friendly PIL fork. It is Pillow is the friendly PIL fork. It is
Copyright © 2010-2024 by Jeffrey A. Clark and contributors Copyright © 2010 by Jeffrey A. Clark and contributors
Like PIL, Pillow is licensed under the open source PIL Like PIL, Pillow is licensed under the open source PIL
Software License: Software License:

View File

@ -55,7 +55,7 @@ master_doc = "index"
project = "Pillow (PIL Fork)" project = "Pillow (PIL Fork)"
copyright = ( copyright = (
"1995-2011 Fredrik Lundh and contributors, " "1995-2011 Fredrik Lundh and contributors, "
"2010-2024 Jeffrey A. Clark and contributors." "2010 Jeffrey A. Clark and contributors."
) )
author = "Fredrik Lundh (PIL), Jeffrey A. Clark (Pillow)" author = "Fredrik Lundh (PIL), Jeffrey A. Clark (Pillow)"

View File

@ -692,6 +692,30 @@ The :py:meth:`~PIL.Image.Image.save` method supports the following options:
you fail to do this, you will get errors about not being able to load the you fail to do this, you will get errors about not being able to load the
``_imaging`` DLL). ``_imaging`` DLL).
MPO
^^^
Pillow reads and writes Multi Picture Object (MPO) files. When first opened, it loads
the primary image. The :py:meth:`~PIL.Image.Image.seek` and
:py:meth:`~PIL.Image.Image.tell` methods may be used to read other pictures from the
file. The pictures are zero-indexed and random access is supported.
.. _mpo-saving:
Saving
~~~~~~
When calling :py:meth:`~PIL.Image.Image.save` to write an MPO file, by default
only the first frame of a multiframe image will be saved. If the ``save_all``
argument is present and true, then all frames will be saved, and the following
option will also be available.
**append_images**
A list of images to append as additional pictures. Each of the
images in the list can be single or multiframe images.
.. versionadded:: 9.3.0
MSP MSP
^^^ ^^^
@ -1435,30 +1459,6 @@ Note that there may be an embedded gamma of 2.2 in MIC files.
To enable MIC support, you must install :pypi:`olefile`. To enable MIC support, you must install :pypi:`olefile`.
MPO
^^^
Pillow identifies and reads Multi Picture Object (MPO) files, loading the primary
image when first opened. The :py:meth:`~PIL.Image.Image.seek` and :py:meth:`~PIL.Image.Image.tell`
methods may be used to read other pictures from the file. The pictures are
zero-indexed and random access is supported.
.. _mpo-saving:
Saving
~~~~~~
When calling :py:meth:`~PIL.Image.Image.save` to write an MPO file, by default
only the first frame of a multiframe image will be saved. If the ``save_all``
argument is present and true, then all frames will be saved, and the following
option will also be available.
**append_images**
A list of images to append as additional pictures. Each of the
images in the list can be single or multiframe images.
.. versionadded:: 9.3.0
PCD PCD
^^^ ^^^

View File

@ -678,7 +678,7 @@ Reading from URL
from PIL import Image from PIL import Image
from urllib.request import urlopen from urllib.request import urlopen
url = "https://python-pillow.org/assets/images/pillow-logo.png" url = "https://python-pillow.github.io/assets/images/pillow-logo.png"
img = Image.open(urlopen(url)) img = Image.open(urlopen(url))

View File

@ -58,7 +58,7 @@ Many of Pillow's features require external libraries:
* **openjpeg** provides JPEG 2000 functionality. * **openjpeg** provides JPEG 2000 functionality.
* Pillow has been tested with openjpeg **2.0.0**, **2.1.0**, **2.3.1**, * Pillow has been tested with openjpeg **2.0.0**, **2.1.0**, **2.3.1**,
**2.4.0**, **2.5.0** and **2.5.2**. **2.4.0**, **2.5.0**, **2.5.2** and **2.5.3**.
* Pillow does **not** support the earlier **1.5** series which ships * Pillow does **not** support the earlier **1.5** series which ships
with Debian Jessie. with Debian Jessie.
@ -148,13 +148,7 @@ Many of Pillow's features require external libraries:
The easiest way to install external libraries is via `Homebrew The easiest way to install external libraries is via `Homebrew
<https://brew.sh/>`_. After you install Homebrew, run:: <https://brew.sh/>`_. After you install Homebrew, run::
brew install libjpeg libtiff little-cms2 openjpeg webp brew install libjpeg libraqm libtiff little-cms2 openjpeg webp
To install libraqm on macOS use Homebrew to install its dependencies::
brew install freetype harfbuzz fribidi
Then see ``depends/install_raqm_cmake.sh`` to install libraqm.
.. tab:: Windows .. tab:: Windows
@ -195,11 +189,6 @@ Many of Pillow's features require external libraries:
mingw-w64-x86_64-libimagequant \ mingw-w64-x86_64-libimagequant \
mingw-w64-x86_64-libraqm mingw-w64-x86_64-libraqm
https://www.msys2.org/docs/python/ states that setuptools >= 60 does not work with
MSYS2. To workaround this, before installing Pillow you must run::
export SETUPTOOLS_USE_DISTUTILS=stdlib
.. tab:: FreeBSD .. tab:: FreeBSD
.. Note:: Only FreeBSD 10 and 11 tested .. Note:: Only FreeBSD 10 and 11 tested

View File

@ -31,6 +31,8 @@ These platforms are built and tested for every change.
+----------------------------------+----------------------------+---------------------+ +----------------------------------+----------------------------+---------------------+
| Fedora 40 | 3.12 | x86-64 | | Fedora 40 | 3.12 | x86-64 |
+----------------------------------+----------------------------+---------------------+ +----------------------------------+----------------------------+---------------------+
| Fedora 41 | 3.13 | x86-64 |
+----------------------------------+----------------------------+---------------------+
| Gentoo | 3.12 | x86-64 | | Gentoo | 3.12 | x86-64 |
+----------------------------------+----------------------------+---------------------+ +----------------------------------+----------------------------+---------------------+
| macOS 13 Ventura | 3.9 | x86-64 | | macOS 13 Ventura | 3.9 | x86-64 |
@ -53,7 +55,7 @@ These platforms are built and tested for every change.
| +----------------------------+---------------------+ | +----------------------------+---------------------+
| | 3.13 | x86 | | | 3.13 | x86 |
| +----------------------------+---------------------+ | +----------------------------+---------------------+
| | 3.9 (MinGW) | x86-64 | | | 3.12 (MinGW) | x86-64 |
| +----------------------------+---------------------+ | +----------------------------+---------------------+
| | 3.9 (Cygwin) | x86-64 | | | 3.9 (Cygwin) | x86-64 |
+----------------------------------+----------------------------+---------------------+ +----------------------------------+----------------------------+---------------------+
@ -73,7 +75,9 @@ These platforms have been reported to work at the versions mentioned.
| Operating system | | Tested Python | | Latest tested | | Tested | | Operating system | | Tested Python | | Latest tested | | Tested |
| | | versions | | Pillow version | | processors | | | | versions | | Pillow version | | processors |
+==================================+============================+==================+==============+ +==================================+============================+==================+==============+
| macOS 15 Sequoia | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm | | macOS 15 Sequoia | 3.9, 3.10, 3.11, 3.12, 3.13| 11.0.0 |arm |
| +----------------------------+------------------+ |
| | 3.8 | 10.4.0 | |
+----------------------------------+----------------------------+------------------+--------------+ +----------------------------------+----------------------------+------------------+--------------+
| macOS 14 Sonoma | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm | | macOS 14 Sonoma | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm |
+----------------------------------+----------------------------+------------------+--------------+ +----------------------------------+----------------------------+------------------+--------------+
@ -146,7 +150,7 @@ These platforms have been reported to work at the versions mentioned.
+----------------------------------+----------------------------+------------------+--------------+ +----------------------------------+----------------------------+------------------+--------------+
| FreeBSD 10.2 | 2.7, 3.4 | 3.1.0 |x86-64 | | FreeBSD 10.2 | 2.7, 3.4 | 3.1.0 |x86-64 |
+----------------------------------+----------------------------+------------------+--------------+ +----------------------------------+----------------------------+------------------+--------------+
| Windows 11 | 3.9, 3.10, 3.11, 3.12 | 10.2.0 |arm64 | | Windows 11 23H2 | 3.9, 3.10, 3.11, 3.12, 3.13| 11.0.0 |arm64 |
+----------------------------------+----------------------------+------------------+--------------+ +----------------------------------+----------------------------+------------------+--------------+
| Windows 11 Pro | 3.11, 3.12 | 10.2.0 |x86-64 | | Windows 11 Pro | 3.11, 3.12 | 10.2.0 |x86-64 |
+----------------------------------+----------------------------+------------------+--------------+ +----------------------------------+----------------------------+------------------+--------------+

View File

@ -19,7 +19,7 @@ Example: Parse an image
from PIL import ImageFile from PIL import ImageFile
fp = open("hopper.pgm", "rb") fp = open("hopper.ppm", "rb")
p = ImageFile.Parser() p = ImageFile.Parser()

View File

@ -54,6 +54,7 @@ Feature version numbers are available only where stated.
Support for the following features can be checked: Support for the following features can be checked:
* ``libjpeg_turbo``: (compile time) Whether Pillow was compiled against the libjpeg-turbo version of libjpeg. Compile-time version number is available. * ``libjpeg_turbo``: (compile time) Whether Pillow was compiled against the libjpeg-turbo version of libjpeg. Compile-time version number is available.
* ``zlib_ng``: (compile time) Whether Pillow was compiled against the zlib-ng version of zlib. Compile-time version number is available.
* ``raqm``: Raqm library, required for ``ImageFont.Layout.RAQM`` in :py:func:`PIL.ImageFont.truetype`. Run-time version number is available for Raqm 0.7.0 or newer. * ``raqm``: Raqm library, required for ``ImageFont.Layout.RAQM`` in :py:func:`PIL.ImageFont.truetype`. Run-time version number is available for Raqm 0.7.0 or newer.
* ``libimagequant``: (compile time) ImageQuant quantization support in :py:func:`PIL.Image.Image.quantize`. Run-time version number is available. * ``libimagequant``: (compile time) ImageQuant quantization support in :py:func:`PIL.Image.Image.quantize`. Run-time version number is available.
* ``xcb``: (compile time) Support for X11 in :py:func:`PIL.ImageGrab.grab` via the XCB library. * ``xcb``: (compile time) Support for X11 in :py:func:`PIL.ImageGrab.grab` via the XCB library.

View File

@ -0,0 +1,59 @@
11.1.0
------
Security
========
TODO
^^^^
TODO
:cve:`YYYY-XXXXX`: TODO
^^^^^^^^^^^^^^^^^^^^^^^
TODO
Backwards Incompatible Changes
==============================
TODO
^^^^
Deprecations
============
TODO
^^^^
TODO
API Changes
===========
TODO
^^^^
TODO
API Additions
=============
Check for zlib-ng
^^^^^^^^^^^^^^^^^
You can check if Pillow has been built against the zlib-ng version of the
zlib library, and what version of zlib-ng is being used::
from PIL import features
features.check_feature("zlib_ng") # True or False
features.version_feature("zlib_ng") # "2.2.2" for example, or None
Other Changes
=============
zlib-ng in wheels
^^^^^^^^^^^^^^^^^
Wheels are now built against zlib-ng for improved speed. In tests, saving a PNG
was found to be more than twice as fast at higher compression levels.

View File

@ -14,6 +14,7 @@ expected to be backported to earlier versions.
.. toctree:: .. toctree::
:maxdepth: 2 :maxdepth: 2
11.1.0
11.0.0 11.0.0
10.4.0 10.4.0
10.3.0 10.3.0

View File

@ -56,7 +56,7 @@ optional-dependencies.mic = [
] ]
optional-dependencies.tests = [ optional-dependencies.tests = [
"check-manifest", "check-manifest",
"coverage", "coverage>=7.4.2",
"defusedxml", "defusedxml",
"markdown2", "markdown2",
"olefile", "olefile",
@ -65,6 +65,7 @@ optional-dependencies.tests = [
"pytest", "pytest",
"pytest-cov", "pytest-cov",
"pytest-timeout", "pytest-timeout",
"trove-classifiers>=2024.10.12",
] ]
optional-dependencies.typing = [ optional-dependencies.typing = [
"typing-extensions; python_version<'3.10'", "typing-extensions; python_version<'3.10'",
@ -72,10 +73,10 @@ optional-dependencies.typing = [
optional-dependencies.xmp = [ optional-dependencies.xmp = [
"defusedxml", "defusedxml",
] ]
urls.Changelog = "https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst" urls.Changelog = "https://github.com/python-pillow/Pillow/releases"
urls.Documentation = "https://pillow.readthedocs.io" urls.Documentation = "https://pillow.readthedocs.io"
urls.Funding = "https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=pypi" urls.Funding = "https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=pypi"
urls.Homepage = "https://python-pillow.org" urls.Homepage = "https://python-pillow.github.io"
urls.Mastodon = "https://fosstodon.org/@pillow" urls.Mastodon = "https://fosstodon.org/@pillow"
urls."Release notes" = "https://pillow.readthedocs.io/en/stable/releasenotes/index.html" urls."Release notes" = "https://pillow.readthedocs.io/en/stable/releasenotes/index.html"
urls.Source = "https://github.com/python-pillow/Pillow" urls.Source = "https://github.com/python-pillow/Pillow"
@ -93,10 +94,18 @@ version = { attr = "PIL.__version__" }
[tool.cibuildwheel] [tool.cibuildwheel]
before-all = ".github/workflows/wheels-dependencies.sh" before-all = ".github/workflows/wheels-dependencies.sh"
build-verbosity = 1 build-verbosity = 1
config-settings = "raqm=enable raqm=vendor fribidi=vendor imagequant=disable" config-settings = "raqm=enable raqm=vendor fribidi=vendor imagequant=disable"
# Disable platform guessing on macOS
macos.config-settings = "raqm=enable raqm=vendor fribidi=vendor imagequant=disable platform-guessing=disable"
test-command = "cd {project} && .github/workflows/wheels-test.sh" test-command = "cd {project} && .github/workflows/wheels-test.sh"
test-extras = "tests" test-extras = "tests"
[tool.cibuildwheel.macos.environment]
PATH = "$(pwd)/build/deps/darwin/bin:$(dirname $(which python3)):/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin"
DYLD_LIBRARY_PATH = "$(pwd)/build/deps/darwin/lib"
[tool.black] [tool.black]
exclude = "wheels/multibuild" exclude = "wheels/multibuild"

View File

@ -393,13 +393,14 @@ class pil_build_ext(build_ext):
self.feature.required.discard(x) self.feature.required.discard(x)
_dbg("Disabling %s", x) _dbg("Disabling %s", x)
if getattr(self, f"enable_{x}"): if getattr(self, f"enable_{x}"):
msg = f"Conflicting options: --enable-{x} and --disable-{x}" msg = f"Conflicting options: '-C {x}=enable' and '-C {x}=disable'"
raise ValueError(msg) raise ValueError(msg)
if x == "freetype": if x == "freetype":
_dbg("--disable-freetype implies --disable-raqm") _dbg("'-C freetype=disable' implies '-C raqm=disable'")
if getattr(self, "enable_raqm"): if getattr(self, "enable_raqm"):
msg = ( msg = (
"Conflicting options: --enable-raqm and --disable-freetype" "Conflicting options: "
"'-C raqm=enable' and '-C freetype=disable'"
) )
raise ValueError(msg) raise ValueError(msg)
setattr(self, "disable_raqm", True) setattr(self, "disable_raqm", True)
@ -407,15 +408,17 @@ class pil_build_ext(build_ext):
_dbg("Requiring %s", x) _dbg("Requiring %s", x)
self.feature.required.add(x) self.feature.required.add(x)
if x == "raqm": if x == "raqm":
_dbg("--enable-raqm implies --enable-freetype") _dbg("'-C raqm=enable' implies '-C freetype=enable'")
self.feature.required.add("freetype") self.feature.required.add("freetype")
for x in ("raqm", "fribidi"): for x in ("raqm", "fribidi"):
if getattr(self, f"vendor_{x}"): if getattr(self, f"vendor_{x}"):
if getattr(self, "disable_raqm"): if getattr(self, "disable_raqm"):
msg = f"Conflicting options: --vendor-{x} and --disable-raqm" msg = f"Conflicting options: '-C {x}=vendor' and '-C raqm=disable'"
raise ValueError(msg) raise ValueError(msg)
if x == "fribidi" and not getattr(self, "vendor_raqm"): if x == "fribidi" and not getattr(self, "vendor_raqm"):
msg = f"Conflicting options: --vendor-{x} and not --vendor-raqm" msg = (
f"Conflicting options: '-C {x}=vendor' and not '-C raqm=vendor'"
)
raise ValueError(msg) raise ValueError(msg)
_dbg("Using vendored version of %s", x) _dbg("Using vendored version of %s", x)
self.feature.vendor.add(x) self.feature.vendor.add(x)
@ -448,7 +451,7 @@ class pil_build_ext(build_ext):
def get_macos_sdk_path(self) -> str | None: def get_macos_sdk_path(self) -> str | None:
try: try:
sdk_path = ( sdk_path = (
subprocess.check_output(["xcrun", "--show-sdk-path"]) subprocess.check_output(["xcrun", "--show-sdk-path", "--sdk", "macosx"])
.strip() .strip()
.decode("latin1") .decode("latin1")
) )
@ -606,6 +609,7 @@ class pil_build_ext(build_ext):
_add_directory(library_dirs, "/usr/X11/lib") _add_directory(library_dirs, "/usr/X11/lib")
_add_directory(include_dirs, "/usr/X11/include") _add_directory(include_dirs, "/usr/X11/include")
# Add the macOS SDK path.
sdk_path = self.get_macos_sdk_path() sdk_path = self.get_macos_sdk_path()
if sdk_path: if sdk_path:
_add_directory(library_dirs, os.path.join(sdk_path, "usr", "lib")) _add_directory(library_dirs, os.path.join(sdk_path, "usr", "lib"))
@ -690,6 +694,8 @@ class pil_build_ext(build_ext):
feature.set("zlib", "z") feature.set("zlib", "z")
elif sys.platform == "win32" and _find_library_file(self, "zlib"): elif sys.platform == "win32" and _find_library_file(self, "zlib"):
feature.set("zlib", "zlib") # alternative name feature.set("zlib", "zlib") # alternative name
elif sys.platform == "win32" and _find_library_file(self, "zdll"):
feature.set("zlib", "zdll") # dll import library
if feature.want("jpeg"): if feature.want("jpeg"):
_dbg("Looking for jpeg") _dbg("Looking for jpeg")
@ -1001,7 +1007,7 @@ def debug_build() -> bool:
return hasattr(sys, "gettotalrefcount") or FUZZING_BUILD return hasattr(sys, "gettotalrefcount") or FUZZING_BUILD
files = ["src/_imaging.c"] files: list[str | os.PathLike[str]] = ["src/_imaging.c"]
for src_file in _IMAGING: for src_file in _IMAGING:
files.append("src/" + src_file + ".c") files.append("src/" + src_file + ".c")
for src_file in _LIB_IMAGING: for src_file in _LIB_IMAGING:
@ -1044,7 +1050,7 @@ except DependencyException as err:
msg = f""" msg = f"""
The headers or library files could not be found for {str(err)}, The headers or library files could not be found for {str(err)},
which was requested by the option flag --enable-{str(err)} which was requested by the option flag '-C {str(err)}=enable'
""" """
sys.stderr.write(msg) sys.stderr.write(msg)

View File

@ -695,8 +695,9 @@ def _write_multiple_frames(
) )
background = _get_background(im_frame, color) background = _get_background(im_frame, color)
background_im = Image.new("P", im_frame.size, background) background_im = Image.new("P", im_frame.size, background)
assert im_frames[0].im.palette is not None first_palette = im_frames[0].im.palette
background_im.putpalette(im_frames[0].im.palette) assert first_palette is not None
background_im.putpalette(first_palette, first_palette.mode)
bbox = _getbbox(background_im, im_frame)[1] bbox = _getbbox(background_im, im_frame)[1]
elif encoderinfo.get("optimize") and im_frame.mode != "1": elif encoderinfo.get("optimize") and im_frame.mode != "1":
if "transparency" not in encoderinfo: if "transparency" not in encoderinfo:

View File

@ -357,7 +357,7 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
name = "".join([name[: 92 - len(ext)], ext]) name = "".join([name[: 92 - len(ext)], ext])
fp.write(f"Name: {name}\r\n".encode("ascii")) fp.write(f"Name: {name}\r\n".encode("ascii"))
fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode("ascii")) fp.write(f"Image size (x*y): {im.size[0]}*{im.size[1]}\r\n".encode("ascii"))
fp.write(f"File size (no of images): {frames}\r\n".encode("ascii")) fp.write(f"File size (no of images): {frames}\r\n".encode("ascii"))
if im.mode in ["P", "PA"]: if im.mode in ["P", "PA"]:
fp.write(b"Lut: 1\r\n") fp.write(b"Lut: 1\r\n")

View File

@ -692,13 +692,10 @@ class Image:
) )
def __repr__(self) -> str: def __repr__(self) -> str:
return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % ( return (
self.__class__.__module__, f"<{self.__class__.__module__}.{self.__class__.__name__} "
self.__class__.__name__, f"image mode={self.mode} size={self.size[0]}x{self.size[1]} "
self.mode, f"at 0x{id(self):X}>"
self.size[0],
self.size[1],
id(self),
) )
def _repr_pretty_(self, p: PrettyPrinter, cycle: bool) -> None: def _repr_pretty_(self, p: PrettyPrinter, cycle: bool) -> None:
@ -707,14 +704,8 @@ class Image:
# Same as __repr__ but without unpredictable id(self), # Same as __repr__ but without unpredictable id(self),
# to keep Jupyter notebook `text/plain` output stable. # to keep Jupyter notebook `text/plain` output stable.
p.text( p.text(
"<%s.%s image mode=%s size=%dx%d>" f"<{self.__class__.__module__}.{self.__class__.__name__} "
% ( f"image mode={self.mode} size={self.size[0]}x{self.size[1]}>"
self.__class__.__module__,
self.__class__.__name__,
self.mode,
self.size[0],
self.size[1],
)
) )
def _repr_image(self, image_format: str, **kwargs: Any) -> bytes | None: def _repr_image(self, image_format: str, **kwargs: Any) -> bytes | None:
@ -2550,7 +2541,7 @@ class Image:
filename: str | bytes = "" filename: str | bytes = ""
open_fp = False open_fp = False
if is_path(fp): if is_path(fp):
filename = os.path.realpath(os.fspath(fp)) filename = os.fspath(fp)
open_fp = True open_fp = True
elif fp == sys.stdout: elif fp == sys.stdout:
try: try:
@ -2559,7 +2550,7 @@ class Image:
pass pass
if not filename and hasattr(fp, "name") and is_path(fp.name): if not filename and hasattr(fp, "name") and is_path(fp.name):
# only set the name for metadata purposes # only set the name for metadata purposes
filename = os.path.realpath(os.fspath(fp.name)) filename = os.fspath(fp.name)
# may mutate self! # may mutate self!
self._ensure_mutable() self._ensure_mutable()
@ -3463,7 +3454,7 @@ def open(
exclusive_fp = False exclusive_fp = False
filename: str | bytes = "" filename: str | bytes = ""
if is_path(fp): if is_path(fp):
filename = os.path.realpath(os.fspath(fp)) filename = os.fspath(fp)
if filename: if filename:
fp = builtins.open(filename, "rb") fp = builtins.open(filename, "rb")

View File

@ -120,7 +120,7 @@ class ImageFile(Image.Image):
self.custom_mimetype: str | None = None self.custom_mimetype: str | None = None
self.tile: list[_Tile] = [] self.tile: list[_Tile] = []
""" A list of tile descriptors, or ``None`` """ """ A list of tile descriptors """
self.readonly = 1 # until we know better self.readonly = 1 # until we know better
@ -130,7 +130,7 @@ class ImageFile(Image.Image):
if is_path(fp): if is_path(fp):
# filename # filename
self.fp = open(fp, "rb") self.fp = open(fp, "rb")
self.filename = os.path.realpath(os.fspath(fp)) self.filename = os.fspath(fp)
self._exclusive_fp = True self._exclusive_fp = True
else: else:
# stream # stream

View File

@ -270,7 +270,7 @@ class FreeTypeFont:
) )
if is_path(font): if is_path(font):
font = os.path.realpath(os.fspath(font)) font = os.fspath(font)
if sys.platform == "win32": if sys.platform == "win32":
font_bytes_path = font if isinstance(font, bytes) else font.encode() font_bytes_path = font if isinstance(font, bytes) else font.encode()
try: try:

View File

@ -213,4 +213,7 @@ def toqimage(im: Image.Image | str | QByteArray) -> ImageQt:
def toqpixmap(im: Image.Image | str | QByteArray) -> QPixmap: def toqpixmap(im: Image.Image | str | QByteArray) -> QPixmap:
qimage = toqimage(im) qimage = toqimage(im)
return getattr(QPixmap, "fromImage")(qimage) pixmap = getattr(QPixmap, "fromImage")(qimage)
if qt_version == "6":
pixmap.detach()
return pixmap

View File

@ -72,7 +72,7 @@ def APP(self: JpegImageFile, marker: int) -> None:
n = i16(self.fp.read(2)) - 2 n = i16(self.fp.read(2)) - 2
s = ImageFile._safe_read(self.fp, n) s = ImageFile._safe_read(self.fp, n)
app = "APP%d" % (marker & 15) app = f"APP{marker & 15}"
self.app[app] = s # compatibility self.app[app] = s # compatibility
self.applist.append((app, s)) self.applist.append((app, s))

View File

@ -86,7 +86,7 @@ class PcxImageFile(ImageFile.ImageFile):
elif bits == 1 and planes in (2, 4): elif bits == 1 and planes in (2, 4):
mode = "P" mode = "P"
rawmode = "P;%dL" % planes rawmode = f"P;{planes}L"
self.palette = ImagePalette.raw("RGB", s[16:64]) self.palette = ImagePalette.raw("RGB", s[16:64])
elif version == 5 and bits == 8 and planes == 1: elif version == 5 and bits == 8 and planes == 1:

View File

@ -523,7 +523,7 @@ class PngStream(ChunkStream):
assert self.fp is not None assert self.fp is not None
s = ImageFile._safe_read(self.fp, length) s = ImageFile._safe_read(self.fp, length)
raw_vals = struct.unpack(">%dI" % (len(s) // 4), s) raw_vals = struct.unpack(f">{len(s) // 4}I", s)
self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals) self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals)
return s return s

View File

@ -935,9 +935,9 @@ class ImageFileDirectory_v2(_IFDv2Base):
self._tagdata[tag] = data self._tagdata[tag] = data
self.tagtype[tag] = typ self.tagtype[tag] = typ
msg += " - value: " + ( msg += " - value: "
"<table: %d bytes>" % size if size > 32 else repr(data) msg += f"<table: {size} bytes>" if size > 32 else repr(data)
)
logger.debug(msg) logger.debug(msg)
(self.next,) = ( (self.next,) = (
@ -981,10 +981,8 @@ class ImageFileDirectory_v2(_IFDv2Base):
tagname = TiffTags.lookup(tag, self.group).name tagname = TiffTags.lookup(tag, self.group).name
typname = "ifd" if is_ifd else TYPES.get(typ, "unknown") typname = "ifd" if is_ifd else TYPES.get(typ, "unknown")
msg = f"save: {tagname} ({tag}) - type: {typname} ({typ})" msg = f"save: {tagname} ({tag}) - type: {typname} ({typ}) - value: "
msg += " - value: " + ( msg += f"<table: {len(data)} bytes>" if len(data) >= 16 else str(values)
"<table: %d bytes>" % len(data) if len(data) >= 16 else str(values)
)
logger.debug(msg) logger.debug(msg)
# count is sum of lengths for string and arbitrary data # count is sum of lengths for string and arbitrary data
@ -1216,10 +1214,6 @@ class TiffImageFile(ImageFile.ImageFile):
def _seek(self, frame: int) -> None: def _seek(self, frame: int) -> None:
self.fp = self._fp self.fp = self._fp
# reset buffered io handle in case fp
# was passed to libtiff, invalidating the buffer
self.fp.tell()
while len(self._frame_pos) <= frame: while len(self._frame_pos) <= frame:
if not self.__next: if not self.__next:
msg = "no more images in TIFF file" msg = "no more images in TIFF file"
@ -1303,10 +1297,6 @@ class TiffImageFile(ImageFile.ImageFile):
if not self.is_animated: if not self.is_animated:
self._close_exclusive_fp_after_loading = True self._close_exclusive_fp_after_loading = True
# reset buffered io handle in case fp
# was passed to libtiff, invalidating the buffer
self.fp.tell()
# load IFD data from fp before it is closed # load IFD data from fp before it is closed
exif = self.getexif() exif = self.getexif()
for key in TiffTags.TAGS_V2_GROUPS: for key in TiffTags.TAGS_V2_GROUPS:
@ -1381,8 +1371,17 @@ class TiffImageFile(ImageFile.ImageFile):
logger.debug("have fileno, calling fileno version of the decoder.") logger.debug("have fileno, calling fileno version of the decoder.")
if not close_self_fp: if not close_self_fp:
self.fp.seek(0) self.fp.seek(0)
# Save and restore the file position, because libtiff will move it
# outside of the Python runtime, and that will confuse
# io.BufferedReader and possible others.
# NOTE: This must use os.lseek(), and not fp.tell()/fp.seek(),
# because the buffer read head already may not equal the actual
# file position, and fp.seek() may just adjust it's internal
# pointer and not actually seek the OS file handle.
pos = os.lseek(fp, 0, os.SEEK_CUR)
# 4 bytes, otherwise the trace might error out # 4 bytes, otherwise the trace might error out
n, err = decoder.decode(b"fpfp") n, err = decoder.decode(b"fpfp")
os.lseek(fp, pos, os.SEEK_SET)
else: else:
# we have something else. # we have something else.
logger.debug("don't have fileno or getvalue. just reading") logger.debug("don't have fileno or getvalue. just reading")

View File

@ -92,6 +92,9 @@ class WmfStubImageFile(ImageFile.StubImageFile):
# get units per inch # get units per inch
self._inch = word(s, 14) self._inch = word(s, 14)
if self._inch == 0:
msg = "Invalid inch"
raise ValueError(msg)
# get bounding box # get bounding box
x0 = short(s, 6) x0 = short(s, 6)
@ -128,7 +131,7 @@ class WmfStubImageFile(ImageFile.StubImageFile):
size = x1 - x0, y1 - y0 size = x1 - x0, y1 - y0
# calculate dots per inch from bbox and frame # calculate dots per inch from bbox and frame
xdpi = 2540.0 * (x1 - y0) / (frame[2] - frame[0]) xdpi = 2540.0 * (x1 - x0) / (frame[2] - frame[0])
ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1]) ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1])
self.info["wmf_bbox"] = x0, y0, x1, y1 self.info["wmf_bbox"] = x0, y0, x1, y1

View File

@ -44,10 +44,10 @@ _T_co = TypeVar("_T_co", covariant=True)
class SupportsRead(Protocol[_T_co]): class SupportsRead(Protocol[_T_co]):
def read(self, __length: int = ...) -> _T_co: ... def read(self, length: int = ..., /) -> _T_co: ...
StrOrBytesPath = Union[str, bytes, "os.PathLike[str]", "os.PathLike[bytes]"] StrOrBytesPath = Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]
__all__ = ["Buffer", "IntegralLike", "StrOrBytesPath", "SupportsRead", "TypeGuard"] __all__ = ["Buffer", "IntegralLike", "StrOrBytesPath", "SupportsRead", "TypeGuard"]

View File

@ -127,6 +127,7 @@ features: dict[str, tuple[str, str | bool, str | None]] = {
"fribidi": ("PIL._imagingft", "HAVE_FRIBIDI", "fribidi_version"), "fribidi": ("PIL._imagingft", "HAVE_FRIBIDI", "fribidi_version"),
"harfbuzz": ("PIL._imagingft", "HAVE_HARFBUZZ", "harfbuzz_version"), "harfbuzz": ("PIL._imagingft", "HAVE_HARFBUZZ", "harfbuzz_version"),
"libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO", "libjpeg_turbo_version"), "libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO", "libjpeg_turbo_version"),
"zlib_ng": ("PIL._imaging", "HAVE_ZLIBNG", "zlib_ng_version"),
"libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT", "imagequant_version"), "libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT", "imagequant_version"),
"xcb": ("PIL._imaging", "HAVE_XCB", None), "xcb": ("PIL._imaging", "HAVE_XCB", None),
} }
@ -308,7 +309,11 @@ def pilinfo(out: IO[str] | None = None, supported_formats: bool = True) -> None:
# this check is also in src/_imagingcms.c:setup_module() # this check is also in src/_imagingcms.c:setup_module()
version_static = tuple(int(x) for x in v.split(".")) < (2, 7) version_static = tuple(int(x) for x in v.split(".")) < (2, 7)
t = "compiled for" if version_static else "loaded" t = "compiled for" if version_static else "loaded"
if name == "raqm": if name == "zlib":
zlib_ng_version = version_feature("zlib_ng")
if zlib_ng_version is not None:
v += ", compiled for zlib-ng " + zlib_ng_version
elif name == "raqm":
for f in ("fribidi", "harfbuzz"): for f in ("fribidi", "harfbuzz"):
v2 = version_feature(f) v2 = version_feature(f)
if v2 is not None: if v2 is not None:

View File

@ -4397,6 +4397,20 @@ setup_module(PyObject *m) {
} }
#endif #endif
PyObject *have_zlibng;
#ifdef ZLIBNG_VERSION
have_zlibng = Py_True;
{
PyObject *v = PyUnicode_FromString(ZLIBNG_VERSION);
PyDict_SetItemString(d, "zlib_ng_version", v ? v : Py_None);
Py_XDECREF(v);
}
#else
have_zlibng = Py_False;
#endif
Py_INCREF(have_zlibng);
PyModule_AddObject(m, "HAVE_ZLIBNG", have_zlibng);
#ifdef HAVE_LIBTIFF #ifdef HAVE_LIBTIFF
{ {
extern const char *ImagingTiffVersion(void); extern const char *ImagingTiffVersion(void);

View File

@ -82,6 +82,9 @@ struct {
/* font objects */ /* font objects */
static FT_Library library; static FT_Library library;
#ifdef Py_GIL_DISABLED
static PyMutex ft_library_mutex;
#endif
typedef struct { typedef struct {
PyObject_HEAD FT_Face face; PyObject_HEAD FT_Face face;
@ -187,7 +190,9 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
if (filename && font_bytes_size <= 0) { if (filename && font_bytes_size <= 0) {
self->font_bytes = NULL; self->font_bytes = NULL;
MUTEX_LOCK(&ft_library_mutex);
error = FT_New_Face(library, filename, index, &self->face); error = FT_New_Face(library, filename, index, &self->face);
MUTEX_UNLOCK(&ft_library_mutex);
} else { } else {
/* need to have allocated storage for font_bytes for the life of the object.*/ /* need to have allocated storage for font_bytes for the life of the object.*/
/* Don't free this before FT_Done_Face */ /* Don't free this before FT_Done_Face */
@ -197,6 +202,7 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
} }
if (!error) { if (!error) {
memcpy(self->font_bytes, font_bytes, (size_t)font_bytes_size); memcpy(self->font_bytes, font_bytes, (size_t)font_bytes_size);
MUTEX_LOCK(&ft_library_mutex);
error = FT_New_Memory_Face( error = FT_New_Memory_Face(
library, library,
(FT_Byte *)self->font_bytes, (FT_Byte *)self->font_bytes,
@ -204,6 +210,7 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
index, index,
&self->face &self->face
); );
MUTEX_UNLOCK(&ft_library_mutex);
} }
} }
@ -1433,7 +1440,9 @@ font_setvaraxes(FontObject *self, PyObject *args) {
static void static void
font_dealloc(FontObject *self) { font_dealloc(FontObject *self) {
if (self->face) { if (self->face) {
MUTEX_LOCK(&ft_library_mutex);
FT_Done_Face(self->face); FT_Done_Face(self->face);
MUTEX_UNLOCK(&ft_library_mutex);
} }
if (self->font_bytes) { if (self->font_bytes) {
PyMem_Free(self->font_bytes); PyMem_Free(self->font_bytes);

View File

@ -7,7 +7,10 @@
// https://github.com/python/pythoncapi_compat // https://github.com/python/pythoncapi_compat
// //
// Latest version: // Latest version:
// https://raw.githubusercontent.com/python/pythoncapi_compat/master/pythoncapi_compat.h // https://raw.githubusercontent.com/python/pythoncapi-compat/main/pythoncapi_compat.h
//
// This file was vendored from the following commit:
// https://github.com/python/pythoncapi-compat/commit/0041177c4f348c8952b4c8980b2c90856e61c7c7
// //
// SPDX-License-Identifier: 0BSD // SPDX-License-Identifier: 0BSD
@ -45,6 +48,13 @@ extern "C" {
# define _PyObject_CAST(op) _Py_CAST(PyObject*, op) # define _PyObject_CAST(op) _Py_CAST(PyObject*, op)
#endif #endif
#ifndef Py_BUILD_ASSERT
# define Py_BUILD_ASSERT(cond) \
do { \
(void)sizeof(char [1 - 2 * !(cond)]); \
} while(0)
#endif
// bpo-42262 added Py_NewRef() to Python 3.10.0a3 // bpo-42262 added Py_NewRef() to Python 3.10.0a3
#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef) #if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef)
@ -1338,6 +1348,166 @@ PyDict_SetDefaultRef(PyObject *d, PyObject *key, PyObject *default_value,
} }
#endif #endif
#if PY_VERSION_HEX < 0x030D00B3
# define Py_BEGIN_CRITICAL_SECTION(op) {
# define Py_END_CRITICAL_SECTION() }
# define Py_BEGIN_CRITICAL_SECTION2(a, b) {
# define Py_END_CRITICAL_SECTION2() }
#endif
#if PY_VERSION_HEX < 0x030E0000 && PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION)
typedef struct PyUnicodeWriter PyUnicodeWriter;
static inline void PyUnicodeWriter_Discard(PyUnicodeWriter *writer)
{
_PyUnicodeWriter_Dealloc((_PyUnicodeWriter*)writer);
PyMem_Free(writer);
}
static inline PyUnicodeWriter* PyUnicodeWriter_Create(Py_ssize_t length)
{
if (length < 0) {
PyErr_SetString(PyExc_ValueError,
"length must be positive");
return NULL;
}
const size_t size = sizeof(_PyUnicodeWriter);
PyUnicodeWriter *pub_writer = (PyUnicodeWriter *)PyMem_Malloc(size);
if (pub_writer == _Py_NULL) {
PyErr_NoMemory();
return _Py_NULL;
}
_PyUnicodeWriter *writer = (_PyUnicodeWriter *)pub_writer;
_PyUnicodeWriter_Init(writer);
if (_PyUnicodeWriter_Prepare(writer, length, 127) < 0) {
PyUnicodeWriter_Discard(pub_writer);
return NULL;
}
writer->overallocate = 1;
return pub_writer;
}
static inline PyObject* PyUnicodeWriter_Finish(PyUnicodeWriter *writer)
{
PyObject *str = _PyUnicodeWriter_Finish((_PyUnicodeWriter*)writer);
assert(((_PyUnicodeWriter*)writer)->buffer == NULL);
PyMem_Free(writer);
return str;
}
static inline int
PyUnicodeWriter_WriteChar(PyUnicodeWriter *writer, Py_UCS4 ch)
{
if (ch > 0x10ffff) {
PyErr_SetString(PyExc_ValueError,
"character must be in range(0x110000)");
return -1;
}
return _PyUnicodeWriter_WriteChar((_PyUnicodeWriter*)writer, ch);
}
static inline int
PyUnicodeWriter_WriteStr(PyUnicodeWriter *writer, PyObject *obj)
{
PyObject *str = PyObject_Str(obj);
if (str == NULL) {
return -1;
}
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
Py_DECREF(str);
return res;
}
static inline int
PyUnicodeWriter_WriteRepr(PyUnicodeWriter *writer, PyObject *obj)
{
PyObject *str = PyObject_Repr(obj);
if (str == NULL) {
return -1;
}
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
Py_DECREF(str);
return res;
}
static inline int
PyUnicodeWriter_WriteUTF8(PyUnicodeWriter *writer,
const char *str, Py_ssize_t size)
{
if (size < 0) {
size = (Py_ssize_t)strlen(str);
}
PyObject *str_obj = PyUnicode_FromStringAndSize(str, size);
if (str_obj == _Py_NULL) {
return -1;
}
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj);
Py_DECREF(str_obj);
return res;
}
static inline int
PyUnicodeWriter_WriteWideChar(PyUnicodeWriter *writer,
const wchar_t *str, Py_ssize_t size)
{
if (size < 0) {
size = (Py_ssize_t)wcslen(str);
}
PyObject *str_obj = PyUnicode_FromWideChar(str, size);
if (str_obj == _Py_NULL) {
return -1;
}
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj);
Py_DECREF(str_obj);
return res;
}
static inline int
PyUnicodeWriter_WriteSubstring(PyUnicodeWriter *writer, PyObject *str,
Py_ssize_t start, Py_ssize_t end)
{
if (!PyUnicode_Check(str)) {
PyErr_Format(PyExc_TypeError, "expect str, not %T", str);
return -1;
}
if (start < 0 || start > end) {
PyErr_Format(PyExc_ValueError, "invalid start argument");
return -1;
}
if (end > PyUnicode_GET_LENGTH(str)) {
PyErr_Format(PyExc_ValueError, "invalid end argument");
return -1;
}
return _PyUnicodeWriter_WriteSubstring((_PyUnicodeWriter*)writer, str,
start, end);
}
static inline int
PyUnicodeWriter_Format(PyUnicodeWriter *writer, const char *format, ...)
{
va_list vargs;
va_start(vargs, format);
PyObject *str = PyUnicode_FromFormatV(format, vargs);
va_end(vargs);
if (str == _Py_NULL) {
return -1;
}
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
Py_DECREF(str);
return res;
}
#endif // PY_VERSION_HEX < 0x030E0000
// gh-116560 added PyLong_GetSign() to Python 3.14.0a0 // gh-116560 added PyLong_GetSign() to Python 3.14.0a0
#if PY_VERSION_HEX < 0x030E00A0 #if PY_VERSION_HEX < 0x030E00A0
@ -1354,6 +1524,175 @@ static inline int PyLong_GetSign(PyObject *obj, int *sign)
#endif #endif
// gh-124502 added PyUnicode_Equal() to Python 3.14.0a0
#if PY_VERSION_HEX < 0x030E00A0
static inline int PyUnicode_Equal(PyObject *str1, PyObject *str2)
{
if (!PyUnicode_Check(str1)) {
PyErr_Format(PyExc_TypeError, "first argument must be str, not %s",
Py_TYPE(str1)->tp_name);
return -1;
}
if (!PyUnicode_Check(str2)) {
PyErr_Format(PyExc_TypeError, "second argument must be str, not %s",
Py_TYPE(str2)->tp_name);
return -1;
}
#if PY_VERSION_HEX >= 0x030d0000 && !defined(PYPY_VERSION)
PyAPI_FUNC(int) _PyUnicode_Equal(PyObject *str1, PyObject *str2);
return _PyUnicode_Equal(str1, str2);
#elif PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION)
return _PyUnicode_EQ(str1, str2);
#elif PY_VERSION_HEX >= 0x03090000 && defined(PYPY_VERSION)
return _PyUnicode_EQ(str1, str2);
#else
return (PyUnicode_Compare(str1, str2) == 0);
#endif
}
#endif
// gh-121645 added PyBytes_Join() to Python 3.14.0a0
#if PY_VERSION_HEX < 0x030E00A0
static inline PyObject* PyBytes_Join(PyObject *sep, PyObject *iterable)
{
return _PyBytes_Join(sep, iterable);
}
#endif
#if PY_VERSION_HEX < 0x030E00A0
static inline Py_hash_t Py_HashBuffer(const void *ptr, Py_ssize_t len)
{
#if PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION)
PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void *src, Py_ssize_t len);
return _Py_HashBytes(ptr, len);
#else
Py_hash_t hash;
PyObject *bytes = PyBytes_FromStringAndSize((const char*)ptr, len);
if (bytes == NULL) {
return -1;
}
hash = PyObject_Hash(bytes);
Py_DECREF(bytes);
return hash;
#endif
}
#endif
#if PY_VERSION_HEX < 0x030E00A0
static inline int PyIter_NextItem(PyObject *iter, PyObject **item)
{
iternextfunc tp_iternext;
assert(iter != NULL);
assert(item != NULL);
tp_iternext = Py_TYPE(iter)->tp_iternext;
if (tp_iternext == NULL) {
*item = NULL;
PyErr_Format(PyExc_TypeError, "expected an iterator, got '%s'",
Py_TYPE(iter)->tp_name);
return -1;
}
if ((*item = tp_iternext(iter))) {
return 1;
}
if (!PyErr_Occurred()) {
return 0;
}
if (PyErr_ExceptionMatches(PyExc_StopIteration)) {
PyErr_Clear();
return 0;
}
return -1;
}
#endif
#if PY_VERSION_HEX < 0x030E00A0
static inline PyObject* PyLong_FromInt32(int32_t value)
{
Py_BUILD_ASSERT(sizeof(long) >= 4);
return PyLong_FromLong(value);
}
static inline PyObject* PyLong_FromInt64(int64_t value)
{
Py_BUILD_ASSERT(sizeof(long long) >= 8);
return PyLong_FromLongLong(value);
}
static inline PyObject* PyLong_FromUInt32(uint32_t value)
{
Py_BUILD_ASSERT(sizeof(unsigned long) >= 4);
return PyLong_FromUnsignedLong(value);
}
static inline PyObject* PyLong_FromUInt64(uint64_t value)
{
Py_BUILD_ASSERT(sizeof(unsigned long long) >= 8);
return PyLong_FromUnsignedLongLong(value);
}
static inline int PyLong_AsInt32(PyObject *obj, int32_t *pvalue)
{
Py_BUILD_ASSERT(sizeof(int) == 4);
int value = PyLong_AsInt(obj);
if (value == -1 && PyErr_Occurred()) {
return -1;
}
*pvalue = (int32_t)value;
return 0;
}
static inline int PyLong_AsInt64(PyObject *obj, int64_t *pvalue)
{
Py_BUILD_ASSERT(sizeof(long long) == 8);
long long value = PyLong_AsLongLong(obj);
if (value == -1 && PyErr_Occurred()) {
return -1;
}
*pvalue = (int64_t)value;
return 0;
}
static inline int PyLong_AsUInt32(PyObject *obj, uint32_t *pvalue)
{
Py_BUILD_ASSERT(sizeof(long) >= 4);
unsigned long value = PyLong_AsUnsignedLong(obj);
if (value == (unsigned long)-1 && PyErr_Occurred()) {
return -1;
}
#if SIZEOF_LONG > 4
if ((unsigned long)UINT32_MAX < value) {
PyErr_SetString(PyExc_OverflowError,
"Python int too large to convert to C uint32_t");
return -1;
}
#endif
*pvalue = (uint32_t)value;
return 0;
}
static inline int PyLong_AsUInt64(PyObject *obj, uint64_t *pvalue)
{
Py_BUILD_ASSERT(sizeof(long long) == 8);
unsigned long long value = PyLong_AsUnsignedLongLong(obj);
if (value == (unsigned long long)-1 && PyErr_Occurred()) {
return -1;
}
*pvalue = (uint64_t)value;
return 0;
}
#endif
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif

@ -1 +1 @@
Subproject commit 452dd2d1705f6b2375369a6570c415beb3163f70 Subproject commit 42d761728d141d8462cd9943f4329f12fe62b155

View File

@ -7,6 +7,7 @@ import re
import shutil import shutil
import struct import struct
import subprocess import subprocess
import sys
from typing import Any from typing import Any
@ -112,27 +113,25 @@ V = {
"BROTLI": "1.1.0", "BROTLI": "1.1.0",
"FREETYPE": "2.13.3", "FREETYPE": "2.13.3",
"FRIBIDI": "1.0.16", "FRIBIDI": "1.0.16",
"HARFBUZZ": "10.0.1", "HARFBUZZ": "10.1.0",
"JPEGTURBO": "3.0.4", "JPEGTURBO": "3.1.0",
"LCMS2": "2.16", "LCMS2": "2.16",
"LIBPNG": "1.6.44", "LIBPNG": "1.6.44",
"LIBWEBP": "1.4.0", "LIBWEBP": "1.4.0",
"OPENJPEG": "2.5.2", "OPENJPEG": "2.5.3",
"TIFF": "4.6.0", "TIFF": "4.6.0",
"XZ": "5.6.3", "XZ": "5.6.3",
"ZLIB": "1.3.1", "ZLIBNG": "2.2.2",
} }
V["LIBPNG_DOTLESS"] = V["LIBPNG"].replace(".", "") V["LIBPNG_DOTLESS"] = V["LIBPNG"].replace(".", "")
V["LIBPNG_XY"] = "".join(V["LIBPNG"].split(".")[:2]) V["LIBPNG_XY"] = "".join(V["LIBPNG"].split(".")[:2])
V["ZLIB_DOTLESS"] = V["ZLIB"].replace(".", "")
# dependencies, listed in order of compilation # dependencies, listed in order of compilation
DEPS: dict[str, dict[str, Any]] = { DEPS: dict[str, dict[str, Any]] = {
"libjpeg": { "libjpeg": {
"url": f"{SF_PROJECTS}/libjpeg-turbo/files/{V['JPEGTURBO']}/FILENAME/download", "url": f"https://github.com/libjpeg-turbo/libjpeg-turbo/releases/download/{V['JPEGTURBO']}/libjpeg-turbo-{V['JPEGTURBO']}.tar.gz",
"filename": f"libjpeg-turbo-{V['JPEGTURBO']}.tar.gz", "filename": f"libjpeg-turbo-{V['JPEGTURBO']}.tar.gz",
"dir": f"libjpeg-turbo-{V['JPEGTURBO']}",
"license": ["README.ijg", "LICENSE.md"], "license": ["README.ijg", "LICENSE.md"],
"license_pattern": ( "license_pattern": (
"(LEGAL ISSUES\n============\n\n.+?)\n\nREFERENCES\n==========" "(LEGAL ISSUES\n============\n\n.+?)\n\nREFERENCES\n=========="
@ -155,28 +154,30 @@ DEPS: dict[str, dict[str, Any]] = {
cmd_copy("cjpeg-static.exe", "cjpeg.exe"), cmd_copy("cjpeg-static.exe", "cjpeg.exe"),
cmd_copy("djpeg-static.exe", "djpeg.exe"), cmd_copy("djpeg-static.exe", "djpeg.exe"),
], ],
"headers": ["j*.h"], "headers": ["jconfig.h", r"src\j*.h"],
"libs": ["libjpeg.lib"], "libs": ["libjpeg.lib"],
"bins": ["cjpeg.exe", "djpeg.exe"], "bins": ["cjpeg.exe", "djpeg.exe"],
}, },
"zlib": { "zlib": {
"url": "https://zlib.net/FILENAME", "url": f"https://github.com/zlib-ng/zlib-ng/archive/refs/tags/{V['ZLIBNG']}.tar.gz",
"filename": f"zlib{V['ZLIB_DOTLESS']}.zip", "filename": f"zlib-ng-{V['ZLIBNG']}.tar.gz",
"dir": f"zlib-{V['ZLIB']}", "license": "LICENSE.md",
"license": "README", "patch": {
"license_pattern": "Copyright notice:\n\n(.+)$", r"CMakeLists.txt": {
"set_target_properties(zlib PROPERTIES OUTPUT_NAME zlibstatic${{SUFFIX}})": "set_target_properties(zlib PROPERTIES OUTPUT_NAME zlib)", # noqa: E501
},
},
"build": [ "build": [
cmd_nmake(r"win32\Makefile.msc", "clean"), *cmds_cmake(
cmd_nmake(r"win32\Makefile.msc", "zlib.lib"), "zlib", "-DBUILD_SHARED_LIBS:BOOL=OFF", "-DZLIB_COMPAT:BOOL=ON"
cmd_copy("zlib.lib", "z.lib"), ),
], ],
"headers": [r"z*.h"], "headers": [r"z*.h"],
"libs": [r"*.lib"], "libs": [r"zlib.lib"],
}, },
"xz": { "xz": {
"url": f"https://github.com/tukaani-project/xz/releases/download/v{V['XZ']}/FILENAME", "url": f"https://github.com/tukaani-project/xz/releases/download/v{V['XZ']}/FILENAME",
"filename": f"xz-{V['XZ']}.tar.gz", "filename": f"xz-{V['XZ']}.tar.gz",
"dir": f"xz-{V['XZ']}",
"license": "COPYING", "license": "COPYING",
"build": [ "build": [
*cmds_cmake("liblzma", "-DBUILD_SHARED_LIBS:BOOL=OFF"), *cmds_cmake("liblzma", "-DBUILD_SHARED_LIBS:BOOL=OFF"),
@ -189,7 +190,6 @@ DEPS: dict[str, dict[str, Any]] = {
"libwebp": { "libwebp": {
"url": "http://downloads.webmproject.org/releases/webp/FILENAME", "url": "http://downloads.webmproject.org/releases/webp/FILENAME",
"filename": f"libwebp-{V['LIBWEBP']}.tar.gz", "filename": f"libwebp-{V['LIBWEBP']}.tar.gz",
"dir": f"libwebp-{V['LIBWEBP']}",
"license": "COPYING", "license": "COPYING",
"patch": { "patch": {
r"src\enc\picture_csp_enc.c": { r"src\enc\picture_csp_enc.c": {
@ -211,7 +211,6 @@ DEPS: dict[str, dict[str, Any]] = {
"libtiff": { "libtiff": {
"url": "https://download.osgeo.org/libtiff/FILENAME", "url": "https://download.osgeo.org/libtiff/FILENAME",
"filename": f"tiff-{V['TIFF']}.tar.gz", "filename": f"tiff-{V['TIFF']}.tar.gz",
"dir": f"tiff-{V['TIFF']}",
"license": "LICENSE.md", "license": "LICENSE.md",
"patch": { "patch": {
r"libtiff\tif_lzma.c": { r"libtiff\tif_lzma.c": {
@ -244,7 +243,6 @@ DEPS: dict[str, dict[str, Any]] = {
"url": f"{SF_PROJECTS}/libpng/files/libpng{V['LIBPNG_XY']}/{V['LIBPNG']}/" "url": f"{SF_PROJECTS}/libpng/files/libpng{V['LIBPNG_XY']}/{V['LIBPNG']}/"
f"lpng{V['LIBPNG_DOTLESS']}.zip/download", f"lpng{V['LIBPNG_DOTLESS']}.zip/download",
"filename": f"lpng{V['LIBPNG_DOTLESS']}.zip", "filename": f"lpng{V['LIBPNG_DOTLESS']}.zip",
"dir": f"lpng{V['LIBPNG_DOTLESS']}",
"license": "LICENSE", "license": "LICENSE",
"build": [ "build": [
*cmds_cmake("png_static", "-DPNG_SHARED:BOOL=OFF", "-DPNG_TESTS:BOOL=OFF"), *cmds_cmake("png_static", "-DPNG_SHARED:BOOL=OFF", "-DPNG_TESTS:BOOL=OFF"),
@ -258,7 +256,6 @@ DEPS: dict[str, dict[str, Any]] = {
"brotli": { "brotli": {
"url": f"https://github.com/google/brotli/archive/refs/tags/v{V['BROTLI']}.tar.gz", "url": f"https://github.com/google/brotli/archive/refs/tags/v{V['BROTLI']}.tar.gz",
"filename": f"brotli-{V['BROTLI']}.tar.gz", "filename": f"brotli-{V['BROTLI']}.tar.gz",
"dir": f"brotli-{V['BROTLI']}",
"license": "LICENSE", "license": "LICENSE",
"build": [ "build": [
*cmds_cmake(("brotlicommon", "brotlidec"), "-DBUILD_SHARED_LIBS:BOOL=OFF"), *cmds_cmake(("brotlicommon", "brotlidec"), "-DBUILD_SHARED_LIBS:BOOL=OFF"),
@ -269,7 +266,6 @@ DEPS: dict[str, dict[str, Any]] = {
"freetype": { "freetype": {
"url": "https://download.savannah.gnu.org/releases/freetype/FILENAME", "url": "https://download.savannah.gnu.org/releases/freetype/FILENAME",
"filename": f"freetype-{V['FREETYPE']}.tar.gz", "filename": f"freetype-{V['FREETYPE']}.tar.gz",
"dir": f"freetype-{V['FREETYPE']}",
"license": ["LICENSE.TXT", r"docs\FTL.TXT", r"docs\GPLv2.TXT"], "license": ["LICENSE.TXT", r"docs\FTL.TXT", r"docs\GPLv2.TXT"],
"patch": { "patch": {
r"builds\windows\vc2010\freetype.vcxproj": { r"builds\windows\vc2010\freetype.vcxproj": {
@ -304,7 +300,6 @@ DEPS: dict[str, dict[str, Any]] = {
"lcms2": { "lcms2": {
"url": f"{SF_PROJECTS}/lcms/files/lcms/{V['LCMS2']}/FILENAME/download", "url": f"{SF_PROJECTS}/lcms/files/lcms/{V['LCMS2']}/FILENAME/download",
"filename": f"lcms2-{V['LCMS2']}.tar.gz", "filename": f"lcms2-{V['LCMS2']}.tar.gz",
"dir": f"lcms2-{V['LCMS2']}",
"license": "LICENSE", "license": "LICENSE",
"patch": { "patch": {
r"Projects\VC2022\lcms2_static\lcms2_static.vcxproj": { r"Projects\VC2022\lcms2_static\lcms2_static.vcxproj": {
@ -330,7 +325,6 @@ DEPS: dict[str, dict[str, Any]] = {
"openjpeg": { "openjpeg": {
"url": f"https://github.com/uclouvain/openjpeg/archive/v{V['OPENJPEG']}.tar.gz", "url": f"https://github.com/uclouvain/openjpeg/archive/v{V['OPENJPEG']}.tar.gz",
"filename": f"openjpeg-{V['OPENJPEG']}.tar.gz", "filename": f"openjpeg-{V['OPENJPEG']}.tar.gz",
"dir": f"openjpeg-{V['OPENJPEG']}",
"license": "LICENSE", "license": "LICENSE",
"build": [ "build": [
*cmds_cmake( *cmds_cmake(
@ -345,7 +339,6 @@ DEPS: dict[str, dict[str, Any]] = {
# commit: Merge branch 'master' into msvc (matches 2.17.0 tag) # commit: Merge branch 'master' into msvc (matches 2.17.0 tag)
"url": "https://github.com/ImageOptim/libimagequant/archive/e4c1334be0eff290af5e2b4155057c2953a313ab.zip", "url": "https://github.com/ImageOptim/libimagequant/archive/e4c1334be0eff290af5e2b4155057c2953a313ab.zip",
"filename": "libimagequant-e4c1334be0eff290af5e2b4155057c2953a313ab.zip", "filename": "libimagequant-e4c1334be0eff290af5e2b4155057c2953a313ab.zip",
"dir": "libimagequant-e4c1334be0eff290af5e2b4155057c2953a313ab",
"license": "COPYRIGHT", "license": "COPYRIGHT",
"patch": { "patch": {
"CMakeLists.txt": { "CMakeLists.txt": {
@ -365,7 +358,6 @@ DEPS: dict[str, dict[str, Any]] = {
"harfbuzz": { "harfbuzz": {
"url": f"https://github.com/harfbuzz/harfbuzz/archive/{V['HARFBUZZ']}.zip", "url": f"https://github.com/harfbuzz/harfbuzz/archive/{V['HARFBUZZ']}.zip",
"filename": f"harfbuzz-{V['HARFBUZZ']}.zip", "filename": f"harfbuzz-{V['HARFBUZZ']}.zip",
"dir": f"harfbuzz-{V['HARFBUZZ']}",
"license": "COPYING", "license": "COPYING",
"build": [ "build": [
*cmds_cmake( *cmds_cmake(
@ -380,7 +372,6 @@ DEPS: dict[str, dict[str, Any]] = {
"fribidi": { "fribidi": {
"url": f"https://github.com/fribidi/fribidi/archive/v{V['FRIBIDI']}.zip", "url": f"https://github.com/fribidi/fribidi/archive/v{V['FRIBIDI']}.zip",
"filename": f"fribidi-{V['FRIBIDI']}.zip", "filename": f"fribidi-{V['FRIBIDI']}.zip",
"dir": f"fribidi-{V['FRIBIDI']}",
"license": "COPYING", "license": "COPYING",
"build": [ "build": [
cmd_copy(r"COPYING", rf"{{bin_dir}}\fribidi-{V['FRIBIDI']}-COPYING"), cmd_copy(r"COPYING", rf"{{bin_dir}}\fribidi-{V['FRIBIDI']}-COPYING"),
@ -517,7 +508,10 @@ def extract_dep(url: str, filename: str, prefs: dict[str, str]) -> None:
if sources_dir_abs != member_prefix: if sources_dir_abs != member_prefix:
msg = "Attempted Path Traversal in Tar File" msg = "Attempted Path Traversal in Tar File"
raise RuntimeError(msg) raise RuntimeError(msg)
tgz.extractall(sources_dir) if sys.version_info >= (3, 12):
tgz.extractall(sources_dir, filter="data")
else:
tgz.extractall(sources_dir)
else: else:
msg = "Unknown archive type: " + filename msg = "Unknown archive type: " + filename
raise RuntimeError(msg) raise RuntimeError(msg)
@ -760,6 +754,8 @@ def main() -> None:
} }
for k, v in DEPS.items(): for k, v in DEPS.items():
if "dir" not in v:
v["dir"] = re.sub(r"\.(tar\.gz|zip)", "", v["filename"])
prefs[f"dir_{k}"] = os.path.join(sources_dir, v["dir"]) prefs[f"dir_{k}"] = os.path.join(sources_dir, v["dir"])
print() print()