Merge branch 'main' into jpeg-app-segments

This commit is contained in:
Andrew Murray 2025-03-04 08:51:08 +11:00 committed by GitHub
commit 7df5cfea7c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
305 changed files with 6764 additions and 4694 deletions

View File

@ -1,100 +0,0 @@
skip_commits:
files:
- ".github/**/*"
- ".gitmodules"
- "docs/**/*"
- "wheels/**/*"
version: '{build}'
clone_folder: c:\pillow
init:
- ECHO %PYTHON%
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
# Uncomment previous line to get RDP access during the build.
environment:
COVERAGE_CORE: sysmon
EXECUTABLE: python.exe
TEST_OPTIONS:
DEPLOY: YES
matrix:
- PYTHON: C:/Python312
ARCHITECTURE: x86
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022
- PYTHON: C:/Python39-x64
ARCHITECTURE: AMD64
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
install:
- '%PYTHON%\%EXECUTABLE% --version'
- '%PYTHON%\%EXECUTABLE% -m pip install --upgrade pip'
- curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip
- 7z x pillow-test-images.zip -oc:\
- xcopy /S /Y c:\test-images-main\* c:\pillow\tests\images
- curl -fsSL -o nasm-win64.zip https://raw.githubusercontent.com/python-pillow/pillow-depends/main/nasm-2.16.03-win64.zip
- 7z x nasm-win64.zip -oc:\
- choco install ghostscript --version=10.3.1
- path c:\nasm-2.16.03;C:\Program Files\gs\gs10.03.1\bin;%PATH%
- cd c:\pillow\winbuild\
- ps: |
c:\python39\python.exe c:\pillow\winbuild\build_prepare.py -v --depends=C:\pillow-depends\
c:\pillow\winbuild\build\build_dep_all.cmd
$host.SetShouldExit(0)
- path C:\pillow\winbuild\build\bin;%PATH%
build_script:
- cd c:\pillow
- winbuild\build\build_env.cmd
- '%PYTHON%\%EXECUTABLE% -m pip install -v -C raqm=vendor -C fribidi=vendor .'
- '%PYTHON%\%EXECUTABLE% selftest.py --installed'
test_script:
- cd c:\pillow
- '%PYTHON%\%EXECUTABLE% -m pip install pytest pytest-cov pytest-timeout defusedxml numpy olefile pyroma'
- c:\"Program Files (x86)"\"Windows Kits"\10\Debuggers\x86\gflags.exe /p /enable %PYTHON%\%EXECUTABLE%
- '%PYTHON%\%EXECUTABLE% -c "from PIL import Image"'
- '%PYTHON%\%EXECUTABLE% -m pytest -vx --cov PIL --cov Tests --cov-report term --cov-report xml Tests'
#- '%PYTHON%\%EXECUTABLE% test-installed.py -v -s %TEST_OPTIONS%' TODO TEST_OPTIONS with pytest?
after_test:
- curl -Os https://uploader.codecov.io/latest/windows/codecov.exe
- .\codecov.exe --file coverage.xml --name %PYTHON% --flags AppVeyor
matrix:
fast_finish: true
cache:
- '%LOCALAPPDATA%\pip\Cache'
artifacts:
- path: pillow\*.egg
name: egg
- path: pillow\*.whl
name: wheel
before_deploy:
- cd c:\pillow
- '%PYTHON%\%EXECUTABLE% -m pip wheel -v -C raqm=vendor -C fribidi=vendor .'
- ps: Get-ChildItem .\*.whl | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
deploy:
provider: S3
region: us-west-2
access_key_id: AKIAIRAXC62ZNTVQJMOQ
secret_access_key:
secure: Hwb6klTqtBeMgxAjRoDltiiqpuH8xbwD4UooDzBSiCWXjuFj1lyl4kHgHwTCCGqi
bucket: pillow-nightly
folder: win/$(APPVEYOR_BUILD_NUMBER)/
artifact: /.*egg|wheel/
on:
APPVEYOR_REPO_NAME: python-pillow/Pillow
branch: main
deploy: YES
# Uncomment the following lines to get RDP access after the build/test and block for
# up to the timeout limit (~1hr)
#
#on_finish:
#- ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))

View File

@ -2,8 +2,4 @@
# gather the coverage data # gather the coverage data
python3 -m pip install coverage python3 -m pip install coverage
if [[ $MATRIX_DOCKER ]]; then python3 -m coverage xml
python3 -m coverage xml --ignore-errors
else
python3 -m coverage xml
fi

View File

@ -3,8 +3,5 @@
set -e set -e
python3 -m coverage erase python3 -m coverage erase
if [ $(uname) == "Darwin" ]; then
export CPPFLAGS="-I/usr/local/miniconda/include";
fi
make clean make clean
make install-coverage make install-coverage

View File

@ -2,12 +2,12 @@
aptget_update() aptget_update()
{ {
if [ ! -z $1 ]; then if [ -n "$1" ]; then
echo "" echo ""
echo "Retrying apt-get update..." echo "Retrying apt-get update..."
echo "" echo ""
fi fi
output=`sudo apt-get update 2>&1` output=$(sudo apt-get update 2>&1)
echo "$output" echo "$output"
if [[ $output == *[WE]:\ * ]]; then if [[ $output == *[WE]:\ * ]]; then
return 1 return 1
@ -21,7 +21,7 @@ set -e
if [[ $(uname) != CYGWIN* ]]; then if [[ $(uname) != CYGWIN* ]]; then
sudo apt-get -qq install libfreetype6-dev liblcms2-dev python3-tk\ sudo apt-get -qq install libfreetype6-dev liblcms2-dev python3-tk\
ghostscript libffi-dev libjpeg-turbo-progs libopenjp2-7-dev\ ghostscript libjpeg-turbo8-dev libopenjp2-7-dev\
cmake meson imagemagick libharfbuzz-dev libfribidi-dev\ cmake meson imagemagick libharfbuzz-dev libfribidi-dev\
sway wl-clipboard libopenblas-dev sway wl-clipboard libopenblas-dev
fi fi
@ -30,6 +30,7 @@ python3 -m pip install --upgrade pip
python3 -m pip install --upgrade wheel python3 -m pip install --upgrade wheel
python3 -m pip install coverage python3 -m pip install coverage
python3 -m pip install defusedxml python3 -m pip install defusedxml
python3 -m pip install ipython
python3 -m pip install olefile python3 -m pip install olefile
python3 -m pip install -U pytest python3 -m pip install -U pytest
python3 -m pip install -U pytest-cov python3 -m pip install -U pytest-cov
@ -37,12 +38,7 @@ python3 -m pip install -U pytest-timeout
python3 -m pip install pyroma python3 -m pip install pyroma
if [[ $(uname) != CYGWIN* ]]; then if [[ $(uname) != CYGWIN* ]]; then
# TODO Update condition when NumPy supports free-threading python3 -m pip install numpy
if [[ "$PYTHON_GIL" == "0" ]]; then
python3 -m pip install numpy --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple
else
python3 -m pip install numpy
fi
# PyQt6 doesn't support PyPy3 # PyQt6 doesn't support PyPy3
if [[ $GHA_PYTHON_VERSION == 3.* ]]; then if [[ $GHA_PYTHON_VERSION == 3.* ]]; then
@ -52,10 +48,7 @@ if [[ $(uname) != CYGWIN* ]]; then
fi fi
# Pyroma uses non-isolated build and fails with old setuptools # Pyroma uses non-isolated build and fails with old setuptools
if [[ if [[ $GHA_PYTHON_VERSION == 3.9 ]]; then
$GHA_PYTHON_VERSION == pypy3.9
|| $GHA_PYTHON_VERSION == 3.9
]]; then
# To match pyproject.toml # To match pyproject.toml
python3 -m pip install "setuptools>=67.8" python3 -m pip install "setuptools>=67.8"
fi fi

View File

@ -1 +1 @@
cibuildwheel==2.19.2 cibuildwheel==2.23.0

View File

@ -1 +1,12 @@
mypy==1.11.0 mypy==1.15.0
IceSpringPySideStubs-PyQt6
IceSpringPySideStubs-PySide6
ipython
numpy
packaging
pytest
sphinx
types-atheris
types-defusedxml
types-olefile
types-setuptools

3
.ci/test.cmd Normal file
View File

@ -0,0 +1,3 @@
python.exe -c "from PIL import Image"
IF ERRORLEVEL 1 EXIT /B
python.exe -bb -m pytest -v -x -W always --cov PIL --cov Tests --cov-report term --cov-report xml Tests

View File

@ -4,4 +4,4 @@ set -e
python3 -c "from PIL import Image" python3 -c "from PIL import Image"
python3 -bb -m pytest -v -x -W always --cov PIL --cov Tests --cov-report term Tests $REVERSE python3 -bb -m pytest -v -x -W always --cov PIL --cov Tests --cov-report term --cov-report xml Tests $REVERSE

View File

@ -9,7 +9,7 @@ Please send a pull request to the `main` branch. Please include [documentation](
- Fork the Pillow repository. - Fork the Pillow repository.
- Create a branch from `main`. - Create a branch from `main`.
- Develop bug fixes, features, tests, etc. - Develop bug fixes, features, tests, etc.
- Run the test suite. You can enable GitHub Actions (https://github.com/MY-USERNAME/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/projects/new) on your repo to catch test failures prior to the pull request, and [Codecov](https://codecov.io/gh) to see if the changed code is covered by tests. - Run the test suite. You can enable GitHub Actions (https://github.com/MY-USERNAME/Pillow/actions) on your repo to catch test failures prior to the pull request, and [Codecov](https://codecov.io/gh) to see if the changed code is covered by tests.
- Create a pull request to pull the changes from your branch to the Pillow `main`. - Create a pull request to pull the changes from your branch to the Pillow `main`.
### Guidelines ### Guidelines
@ -17,9 +17,8 @@ Please send a pull request to the `main` branch. Please include [documentation](
- Separate code commits from reformatting commits. - Separate code commits from reformatting commits.
- Provide tests for any newly added code. - Provide tests for any newly added code.
- Follow PEP 8. - Follow PEP 8.
- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running tests on AppVeyor. - When committing only documentation changes please include `[ci skip]` in the commit message to avoid running extra tests.
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests. - Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
- Do not add to the [changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) for proposed changes, as that is updated after changes are merged.
## Reporting Issues ## Reporting Issues

1
.github/mergify.yml vendored
View File

@ -9,7 +9,6 @@ pull_request_rules:
- status-success=Windows Test Successful - status-success=Windows Test Successful
- status-success=MinGW - status-success=MinGW
- status-success=Cygwin Test Successful - status-success=Cygwin Test Successful
- status-success=continuous-integration/appveyor/pr
actions: actions:
merge: merge:
method: merge method: merge

View File

@ -3,18 +3,19 @@ tag-template: "$NEXT_MINOR_VERSION"
change-template: '- $TITLE #$NUMBER [@$AUTHOR]' change-template: '- $TITLE #$NUMBER [@$AUTHOR]'
categories: categories:
- title: "Dependencies" - title: "Removals"
label: "Dependency" label: "Removal"
- title: "Deprecations" - title: "Deprecations"
label: "Deprecation" label: "Deprecation"
- title: "Documentation" - title: "Documentation"
label: "Documentation" label: "Documentation"
- title: "Removals" - title: "Dependencies"
label: "Removal" label: "Dependency"
- title: "Testing" - title: "Testing"
label: "Testing" label: "Testing"
- title: "Type hints" - title: "Type hints"
label: "Type hints" label: "Type hints"
- title: "Other changes"
exclude-labels: exclude-labels:
- "changelog: skip" - "changelog: skip"
@ -23,6 +24,4 @@ template: |
https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html
## Changes
$CHANGES $CHANGES

12
.github/renovate.json vendored
View File

@ -1,7 +1,7 @@
{ {
"$schema": "https://docs.renovatebot.com/renovate-schema.json", "$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [ "extends": [
"config:base" "config:recommended"
], ],
"labels": [ "labels": [
"Dependency" "Dependency"
@ -9,9 +9,13 @@
"packageRules": [ "packageRules": [
{ {
"groupName": "github-actions", "groupName": "github-actions",
"matchManagers": ["github-actions"], "matchManagers": [
"separateMajorMinor": "false" "github-actions"
],
"separateMajorMinor": false
} }
], ],
"schedule": ["on the 3rd day of the month"] "schedule": [
"on the 3rd day of the month"
]
} }

View File

@ -6,11 +6,13 @@ on:
- "**" - "**"
paths: paths:
- ".github/workflows/cifuzz.yml" - ".github/workflows/cifuzz.yml"
- ".github/workflows/wheels-dependencies.sh"
- "**.c" - "**.c"
- "**.h" - "**.h"
pull_request: pull_request:
paths: paths:
- ".github/workflows/cifuzz.yml" - ".github/workflows/cifuzz.yml"
- ".github/workflows/wheels-dependencies.sh"
- "**.c" - "**.c"
- "**.h" - "**.h"
workflow_dispatch: workflow_dispatch:
@ -24,8 +26,6 @@ concurrency:
jobs: jobs:
Fuzzing: Fuzzing:
# Disabled until google/oss-fuzz#11419 upgrades Python to 3.9+
if: false
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Build Fuzzers - name: Build Fuzzers

View File

@ -33,6 +33,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5

View File

@ -21,6 +21,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: pre-commit cache - name: pre-commit cache
uses: actions/cache@v4 uses: actions/cache@v4

View File

@ -2,24 +2,24 @@
set -e set -e
if [[ "$ImageOS" == "macos13" ]]; then
brew uninstall gradle maven
fi
brew install \ brew install \
freetype \ freetype \
ghostscript \ ghostscript \
jpeg-turbo \
libimagequant \ libimagequant \
libjpeg \ libraqm \
libtiff \ libtiff \
little-cms2 \ little-cms2 \
openjpeg \ openjpeg \
webp webp
if [[ "$ImageOS" == "macos13" ]]; then
brew install --ignore-dependencies libraqm
else
brew install libraqm
fi
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig" export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig"
python3 -m pip install coverage python3 -m pip install coverage
python3 -m pip install defusedxml python3 -m pip install defusedxml
python3 -m pip install ipython
python3 -m pip install olefile python3 -m pip install olefile
python3 -m pip install -U pytest python3 -m pip install -U pytest
python3 -m pip install -U pytest-cov python3 -m pip install -U pytest-cov

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch: workflow_dispatch:
permissions: permissions:
issues: write contents: read
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
@ -15,6 +15,8 @@ concurrency:
jobs: jobs:
stale: stale:
if: github.repository_owner == 'python-pillow' if: github.repository_owner == 'python-pillow'
permissions:
issues: write
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@ -48,9 +48,11 @@ jobs:
- name: Checkout Pillow - name: Checkout Pillow
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install Cygwin - name: Install Cygwin
uses: cygwin/cygwin-install-action@v4 uses: cygwin/cygwin-install-action@v5
with: with:
packages: > packages: >
gcc-g++ gcc-g++
@ -74,6 +76,7 @@ jobs:
perl perl
python3${{ matrix.python-minor-version }}-cython python3${{ matrix.python-minor-version }}-cython
python3${{ matrix.python-minor-version }}-devel python3${{ matrix.python-minor-version }}-devel
python3${{ matrix.python-minor-version }}-ipython
python3${{ matrix.python-minor-version }}-numpy python3${{ matrix.python-minor-version }}-numpy
python3${{ matrix.python-minor-version }}-sip python3${{ matrix.python-minor-version }}-sip
python3${{ matrix.python-minor-version }}-tkinter python3${{ matrix.python-minor-version }}-tkinter
@ -130,11 +133,12 @@ jobs:
- name: After success - name: After success
run: | run: |
bash.exe .ci/after_success.sh bash.exe .ci/after_success.sh
rm C:\cygwin\bin\bash.EXE
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
file: ./coverage.xml files: ./coverage.xml
flags: GHA_Cygwin flags: GHA_Cygwin
name: Cygwin Python 3.${{ matrix.python-minor-version }} name: Cygwin Python 3.${{ matrix.python-minor-version }}
token: ${{ secrets.CODECOV_ORG_TOKEN }} token: ${{ secrets.CODECOV_ORG_TOKEN }}

View File

@ -29,42 +29,46 @@ concurrency:
jobs: jobs:
build: build:
runs-on: ubuntu-latest runs-on: ${{ matrix.os }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: ["ubuntu-latest"]
docker: [ docker: [
# Run slower jobs first to give them a headstart and reduce waiting time
ubuntu-22.04-jammy-arm64v8,
ubuntu-24.04-noble-ppc64le,
ubuntu-24.04-noble-s390x,
# Then run the remainder
alpine, alpine,
amazon-2-amd64, amazon-2-amd64,
amazon-2023-amd64, amazon-2023-amd64,
arch, arch,
centos-stream-9-amd64, centos-stream-9-amd64,
centos-stream-10-amd64,
debian-12-bookworm-x86, debian-12-bookworm-x86,
debian-12-bookworm-amd64, debian-12-bookworm-amd64,
fedora-39-amd64,
fedora-40-amd64, fedora-40-amd64,
fedora-41-amd64,
gentoo, gentoo,
ubuntu-22.04-jammy-amd64, ubuntu-22.04-jammy-amd64,
ubuntu-24.04-noble-amd64, ubuntu-24.04-noble-amd64,
] ]
dockerTag: [main] dockerTag: [main]
include: include:
- docker: "ubuntu-22.04-jammy-arm64v8"
qemu-arch: "aarch64"
- docker: "ubuntu-24.04-noble-ppc64le" - docker: "ubuntu-24.04-noble-ppc64le"
os: "ubuntu-22.04"
qemu-arch: "ppc64le" qemu-arch: "ppc64le"
dockerTag: main
- docker: "ubuntu-24.04-noble-s390x" - docker: "ubuntu-24.04-noble-s390x"
os: "ubuntu-22.04"
qemu-arch: "s390x" qemu-arch: "s390x"
dockerTag: main
- docker: "ubuntu-24.04-noble-arm64v8"
os: "ubuntu-24.04-arm"
dockerTag: main
name: ${{ matrix.docker }} name: ${{ matrix.docker }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Build system information - name: Build system information
run: python3 .github/workflows/system-info.py run: python3 .github/workflows/system-info.py
@ -87,22 +91,21 @@ jobs:
- name: After success - name: After success
run: | run: |
PATH="$PATH:~/.local/bin"
docker start pillow_container docker start pillow_container
sudo docker cp pillow_container:/Pillow /Pillow
sudo chown -R runner /Pillow
pil_path=`docker exec pillow_container /vpy3/bin/python -c 'import os, PIL;print(os.path.realpath(os.path.dirname(PIL.__file__)))'` pil_path=`docker exec pillow_container /vpy3/bin/python -c 'import os, PIL;print(os.path.realpath(os.path.dirname(PIL.__file__)))'`
docker stop pillow_container docker stop pillow_container
sudo mkdir -p $pil_path sudo mkdir -p $pil_path
sudo cp src/PIL/*.py $pil_path sudo cp src/PIL/*.py $pil_path
cd /Pillow
.ci/after_success.sh .ci/after_success.sh
env:
MATRIX_DOCKER: ${{ matrix.docker }}
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
flags: GHA_Docker flags: GHA_Docker
name: ${{ matrix.docker }} name: ${{ matrix.docker }}
gcov: true
token: ${{ secrets.CODECOV_ORG_TOKEN }} token: ${{ secrets.CODECOV_ORG_TOKEN }}
success: success:

View File

@ -46,6 +46,8 @@ jobs:
steps: steps:
- name: Checkout Pillow - name: Checkout Pillow
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up shell - name: Set up shell
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
@ -64,29 +66,28 @@ jobs:
mingw-w64-x86_64-libtiff \ mingw-w64-x86_64-libtiff \
mingw-w64-x86_64-libwebp \ mingw-w64-x86_64-libwebp \
mingw-w64-x86_64-openjpeg2 \ mingw-w64-x86_64-openjpeg2 \
mingw-w64-x86_64-python3-numpy \ mingw-w64-x86_64-python-numpy \
mingw-w64-x86_64-python3-olefile \ mingw-w64-x86_64-python-olefile \
mingw-w64-x86_64-python3-setuptools \ mingw-w64-x86_64-python-pip \
mingw-w64-x86_64-python-pytest \
mingw-w64-x86_64-python-pytest-cov \
mingw-w64-x86_64-python-pytest-timeout \
mingw-w64-x86_64-python-pyqt6 mingw-w64-x86_64-python-pyqt6
python3 -m ensurepip
python3 -m pip install pyroma pytest pytest-cov pytest-timeout
pushd depends && ./install_extra_test_images.sh && popd pushd depends && ./install_extra_test_images.sh && popd
- name: Build Pillow - name: Build Pillow
run: SETUPTOOLS_USE_DISTUTILS="stdlib" CFLAGS="-coverage" python3 -m pip install . run: CFLAGS="-coverage" python3 -m pip install .
- name: Test Pillow - name: Test Pillow
run: | run: |
python3 selftest.py --installed python3 selftest.py --installed
python3 -c "from PIL import Image" .ci/test.sh
python3 -m pytest -vx --cov PIL --cov Tests --cov-report term --cov-report xml Tests
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
file: ./coverage.xml files: ./coverage.xml
flags: GHA_Windows flags: GHA_Windows
name: "MSYS2 MinGW" name: "MSYS2 MinGW"
token: ${{ secrets.CODECOV_ORG_TOKEN }} token: ${{ secrets.CODECOV_ORG_TOKEN }}

View File

@ -40,6 +40,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Build system information - name: Build system information
run: python3 .github/workflows/system-info.py run: python3 .github/workflows/system-info.py

View File

@ -31,29 +31,38 @@ env:
jobs: jobs:
build: build:
runs-on: windows-latest runs-on: ${{ matrix.os }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
python-version: ["pypy3.10", "pypy3.9", "3.9", "3.10", "3.11", "3.12", "3.13"] python-version: ["pypy3.11", "pypy3.10", "3.10", "3.11", "3.12", "3.13", "3.14"]
architecture: ["x64"]
os: ["windows-latest"]
include:
# Test the oldest Python on 32-bit
- { python-version: "3.9", architecture: "x86", os: "windows-2019" }
timeout-minutes: 30 timeout-minutes: 30
name: Python ${{ matrix.python-version }} name: Python ${{ matrix.python-version }} (${{ matrix.architecture }})
steps: steps:
- name: Checkout Pillow - name: Checkout Pillow
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Checkout cached dependencies - name: Checkout cached dependencies
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
persist-credentials: false
repository: python-pillow/pillow-depends repository: python-pillow/pillow-depends
path: winbuild\depends path: winbuild\depends
- name: Checkout extra test images - name: Checkout extra test images
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
persist-credentials: false
repository: python-pillow/test-images repository: python-pillow/test-images
path: Tests\test-images path: Tests\test-images
@ -63,22 +72,21 @@ jobs:
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
allow-prereleases: true allow-prereleases: true
architecture: ${{ matrix.architecture }}
cache: pip cache: pip
cache-dependency-path: ".github/workflows/test-windows.yml" cache-dependency-path: ".github/workflows/test-windows.yml"
- name: Print build system information - name: Print build system information
run: python3 .github/workflows/system-info.py run: python3 .github/workflows/system-info.py
- name: Install Python dependencies - name: Upgrade pip
run: > run: |
python3 -m pip install python3 -m pip install --upgrade pip
coverage>=7.4.2
defusedxml - name: Install CPython dependencies
olefile if: "!contains(matrix.python-version, 'pypy') && matrix.architecture != 'x86'"
pyroma run: |
pytest python3 -m pip install PyQt6
pytest-cov
pytest-timeout
- name: Install dependencies - name: Install dependencies
id: install id: install
@ -86,8 +94,8 @@ jobs:
choco install nasm --no-progress choco install nasm --no-progress
echo "C:\Program Files\NASM" >> $env:GITHUB_PATH echo "C:\Program Files\NASM" >> $env:GITHUB_PATH
choco install ghostscript --version=10.3.1 --no-progress choco install ghostscript --version=10.4.0 --no-progress
echo "C:\Program Files\gs\gs10.00.0\bin" >> $env:GITHUB_PATH echo "C:\Program Files\gs\gs10.04.0\bin" >> $env:GITHUB_PATH
# Install extra test images # Install extra test images
xcopy /S /Y Tests\test-images\* Tests\images xcopy /S /Y Tests\test-images\* Tests\images
@ -178,7 +186,7 @@ jobs:
- name: Build Pillow - name: Build Pillow
run: | run: |
$FLAGS="-C raqm=vendor -C fribidi=vendor" $FLAGS="-C raqm=vendor -C fribidi=vendor"
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS ." cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS .[tests]"
& $env:pythonLocation\python.exe selftest.py --installed & $env:pythonLocation\python.exe selftest.py --installed
shell: pwsh shell: pwsh
@ -190,8 +198,8 @@ jobs:
- name: Test Pillow - name: Test Pillow
run: | run: |
path %GITHUB_WORKSPACE%\\winbuild\\build\\bin;%PATH% path %GITHUB_WORKSPACE%\winbuild\build\bin;%PATH%
python.exe -m pytest -vx -W always --cov PIL --cov Tests --cov-report term --cov-report xml Tests .ci\test.cmd
shell: cmd shell: cmd
- name: Prepare to upload errors - name: Prepare to upload errors
@ -213,9 +221,9 @@ jobs:
shell: pwsh shell: pwsh
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
file: ./coverage.xml files: ./coverage.xml
flags: GHA_Windows flags: GHA_Windows
name: ${{ runner.os }} Python ${{ matrix.python-version }} name: ${{ runner.os }} Python ${{ matrix.python-version }}
token: ${{ secrets.CODECOV_ORG_TOKEN }} token: ${{ secrets.CODECOV_ORG_TOKEN }}

View File

@ -37,12 +37,14 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
os: [ os: [
"macos-14", "macos-latest",
"ubuntu-latest", "ubuntu-latest",
] ]
python-version: [ python-version: [
"pypy3.11",
"pypy3.10", "pypy3.10",
"pypy3.9", "3.14",
"3.13t",
"3.13", "3.13",
"3.12", "3.12",
"3.11", "3.11",
@ -53,21 +55,22 @@ jobs:
- { python-version: "3.11", PYTHONOPTIMIZE: 1, REVERSE: "--reverse" } - { python-version: "3.11", PYTHONOPTIMIZE: 1, REVERSE: "--reverse" }
- { python-version: "3.10", PYTHONOPTIMIZE: 2 } - { python-version: "3.10", PYTHONOPTIMIZE: 2 }
# Free-threaded # Free-threaded
- { os: "ubuntu-latest", python-version: "3.13-dev", disable-gil: true } - { python-version: "3.13t", disable-gil: true }
# M1 only available for 3.10+ # M1 only available for 3.10+
- { os: "macos-13", python-version: "3.9" } - { os: "macos-13", python-version: "3.9" }
exclude: exclude:
- { os: "macos-14", python-version: "3.9" } - { os: "macos-latest", python-version: "3.9" }
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
name: ${{ matrix.os }} Python ${{ matrix.python-version }} ${{ matrix.disable-gil && 'free-threaded' || '' }} name: ${{ matrix.os }} Python ${{ matrix.python-version }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5 uses: Quansight-Labs/setup-python@v5
if: "${{ !matrix.disable-gil }}"
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
allow-prereleases: true allow-prereleases: true
@ -76,13 +79,6 @@ jobs:
".ci/*.sh" ".ci/*.sh"
"pyproject.toml" "pyproject.toml"
- name: Set up Python ${{ matrix.python-version }} (free-threaded)
uses: deadsnakes/action@v3.1.0
if: "${{ matrix.disable-gil }}"
with:
python-version: ${{ matrix.python-version }}
nogil: ${{ matrix.disable-gil }}
- name: Set PYTHON_GIL - name: Set PYTHON_GIL
if: "${{ matrix.disable-gil }}" if: "${{ matrix.disable-gil }}"
run: | run: |
@ -115,7 +111,7 @@ jobs:
GHA_PYTHON_VERSION: ${{ matrix.python-version }} GHA_PYTHON_VERSION: ${{ matrix.python-version }}
- name: Register gcc problem matcher - name: Register gcc problem matcher
if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.12'" if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.13'"
run: echo "::add-matcher::.github/problem-matchers/gcc.json" run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Build - name: Build
@ -155,11 +151,10 @@ jobs:
.ci/after_success.sh .ci/after_success.sh
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }} flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
name: ${{ matrix.os }} Python ${{ matrix.python-version }} name: ${{ matrix.os }} Python ${{ matrix.python-version }}
gcov: true
token: ${{ secrets.CODECOV_ORG_TOKEN }} token: ${{ secrets.CODECOV_ORG_TOKEN }}
success: success:

View File

@ -1,11 +1,33 @@
#!/bin/bash #!/bin/bash
# Define custom utilities
# Test for macOS with [ -n "$IS_MACOS" ] # Setup that needs to be done before multibuild utils are invoked
if [ -z "$IS_MACOS" ]; then PROJECTDIR=$(pwd)
export MB_ML_LIBC=${AUDITWHEEL_POLICY::9} if [[ "$(uname -s)" == "Darwin" ]]; then
export MB_ML_VER=${AUDITWHEEL_POLICY:9} # Safety check - macOS builds require that CIBW_ARCHS is set, and that it
# only contains a single value (even though cibuildwheel allows multiple
# values in CIBW_ARCHS).
if [[ -z "$CIBW_ARCHS" ]]; then
echo "ERROR: Pillow macOS builds require CIBW_ARCHS be defined."
exit 1
fi
if [[ "$CIBW_ARCHS" == *" "* ]]; then
echo "ERROR: Pillow macOS builds only support a single architecture in CIBW_ARCHS."
exit 1
fi
# Build macOS dependencies in `build/darwin`
# Install them into `build/deps/darwin`
WORKDIR=$(pwd)/build/darwin
BUILD_PREFIX=$(pwd)/build/deps/darwin
else
# Build prefix will default to /usr/local
WORKDIR=$(pwd)/build
MB_ML_LIBC=${AUDITWHEEL_POLICY::9}
MB_ML_VER=${AUDITWHEEL_POLICY:9}
fi fi
export PLAT=$CIBW_ARCHS PLAT=$CIBW_ARCHS
# Define custom utilities
source wheels/multibuild/common_utils.sh source wheels/multibuild/common_utils.sh
source wheels/multibuild/library_builders.sh source wheels/multibuild/library_builders.sh
if [ -z "$IS_MACOS" ]; then if [ -z "$IS_MACOS" ]; then
@ -15,90 +37,111 @@ fi
ARCHIVE_SDIR=pillow-depends-main ARCHIVE_SDIR=pillow-depends-main
# Package versions for fresh source builds # Package versions for fresh source builds
FREETYPE_VERSION=2.13.2 FREETYPE_VERSION=2.13.3
HARFBUZZ_VERSION=8.5.0 HARFBUZZ_VERSION=10.4.0
LIBPNG_VERSION=1.6.43 LIBPNG_VERSION=1.6.47
JPEGTURBO_VERSION=3.0.3 JPEGTURBO_VERSION=3.1.0
OPENJPEG_VERSION=2.5.2 OPENJPEG_VERSION=2.5.3
XZ_VERSION=5.4.5 XZ_VERSION=5.6.4
TIFF_VERSION=4.6.0 TIFF_VERSION=4.6.0
LCMS2_VERSION=2.16 LCMS2_VERSION=2.17
if [[ -n "$IS_MACOS" ]]; then ZLIB_NG_VERSION=2.2.4
GIFLIB_VERSION=5.2.2 LIBWEBP_VERSION=1.5.0
else
GIFLIB_VERSION=5.2.1
fi
if [[ -n "$IS_MACOS" ]] || [[ "$MB_ML_VER" != 2014 ]]; then
ZLIB_VERSION=1.3.1
else
ZLIB_VERSION=1.2.8
fi
LIBWEBP_VERSION=1.4.0
BZIP2_VERSION=1.0.8 BZIP2_VERSION=1.0.8
LIBXCB_VERSION=1.17.0 LIBXCB_VERSION=1.17.0
BROTLI_VERSION=1.1.0 BROTLI_VERSION=1.1.0
if [[ -n "$IS_MACOS" ]] && [[ "$CIBW_ARCHS" == "x86_64" ]]; then function build_pkg_config {
function build_openjpeg { if [ -e pkg-config-stamp ]; then return; fi
local out_dir=$(fetch_unpack https://github.com/uclouvain/openjpeg/archive/v${OPENJPEG_VERSION}.tar.gz openjpeg-${OPENJPEG_VERSION}.tar.gz) # This essentially duplicates the Homebrew recipe
(cd $out_dir \ CFLAGS="$CFLAGS -Wno-int-conversion" build_simple pkg-config 0.29.2 https://pkg-config.freedesktop.org/releases tar.gz \
&& cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \ --disable-debug --disable-host-tool --with-internal-glib \
&& make install) --with-pc-path=$BUILD_PREFIX/share/pkgconfig:$BUILD_PREFIX/lib/pkgconfig \
touch openjpeg-stamp --with-system-include-path=$(xcrun --show-sdk-path --sdk macosx)/usr/include
} export PKG_CONFIG=$BUILD_PREFIX/bin/pkg-config
fi touch pkg-config-stamp
}
function build_zlib_ng {
if [ -e zlib-stamp ]; then return; fi
fetch_unpack https://github.com/zlib-ng/zlib-ng/archive/$ZLIB_NG_VERSION.tar.gz zlib-ng-$ZLIB_NG_VERSION.tar.gz
(cd zlib-ng-$ZLIB_NG_VERSION \
&& ./configure --prefix=$BUILD_PREFIX --zlib-compat \
&& make -j4 \
&& make install)
if [ -n "$IS_MACOS" ]; then
# Ensure that on macOS, the library name is an absolute path, not an
# @rpath, so that delocate picks up the right library (and doesn't need
# DYLD_LIBRARY_PATH to be set). The default Makefile doesn't have an
# option to control the install_name.
install_name_tool -id $BUILD_PREFIX/lib/libz.1.dylib $BUILD_PREFIX/lib/libz.1.dylib
fi
touch zlib-stamp
}
function build_brotli { function build_brotli {
local cmake=$(get_modern_cmake) if [ -e brotli-stamp ]; then return; fi
local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-1.1.0.tar.gz) local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-$BROTLI_VERSION.tar.gz)
(cd $out_dir \ (cd $out_dir \
&& $cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \ && cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_LIBDIR=$BUILD_PREFIX/lib -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \
&& make install) && make install)
if [[ "$MB_ML_LIBC" == "manylinux" ]]; then touch brotli-stamp
cp /usr/local/lib64/libbrotli* /usr/local/lib }
cp /usr/local/lib64/pkgconfig/libbrotli* /usr/local/lib/pkgconfig
fi function build_harfbuzz {
if [ -e harfbuzz-stamp ]; then return; fi
python3 -m pip install meson ninja
local out_dir=$(fetch_unpack https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION/harfbuzz-$HARFBUZZ_VERSION.tar.xz harfbuzz-$HARFBUZZ_VERSION.tar.xz)
(cd $out_dir \
&& meson setup build --prefix=$BUILD_PREFIX --libdir=$BUILD_PREFIX/lib --buildtype=release -Dfreetype=enabled -Dglib=disabled)
(cd $out_dir/build \
&& meson install)
touch harfbuzz-stamp
} }
function build { function build {
if [[ -n "$IS_MACOS" ]] && [[ "$CIBW_ARCHS" == "arm64" ]]; then
sudo chown -R runner /usr/local
fi
build_xz build_xz
if [ -z "$IS_ALPINE" ] && [ -z "$IS_MACOS" ]; then if [ -z "$IS_ALPINE" ] && [ -z "$SANITIZER" ] && [ -z "$IS_MACOS" ]; then
yum remove -y zlib-devel yum remove -y zlib-devel
fi fi
build_new_zlib build_zlib_ng
build_simple xcb-proto 1.17.0 https://xorg.freedesktop.org/archive/individual/proto build_simple xcb-proto 1.17.0 https://xorg.freedesktop.org/archive/individual/proto
if [ -n "$IS_MACOS" ]; then if [ -n "$IS_MACOS" ]; then
build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto
build_simple libXau 1.0.11 https://www.x.org/pub/individual/lib build_simple libXau 1.0.12 https://www.x.org/pub/individual/lib
build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
cp /usr/local/share/pkgconfig/xcb-proto.pc /usr/local/lib/pkgconfig
fi
else else
sed s/\${pc_sysrootdir\}// /usr/local/share/pkgconfig/xcb-proto.pc > /usr/local/lib/pkgconfig/xcb-proto.pc sed s/\${pc_sysrootdir\}// $BUILD_PREFIX/share/pkgconfig/xcb-proto.pc > $BUILD_PREFIX/lib/pkgconfig/xcb-proto.pc
fi fi
build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib
build_libjpeg_turbo build_libjpeg_turbo
build_tiff if [ -n "$IS_MACOS" ]; then
# Custom tiff build to include jpeg; by default, configure won't include
# headers/libs in the custom macOS prefix. Explicitly disable webp,
# libdeflate and zstd, because on x86_64 macs, it will pick up the
# Homebrew versions of those libraries from /usr/local.
build_simple tiff $TIFF_VERSION https://download.osgeo.org/libtiff tar.gz \
--with-jpeg-include-dir=$BUILD_PREFIX/include --with-jpeg-lib-dir=$BUILD_PREFIX/lib \
--disable-webp --disable-libdeflate --disable-zstd
else
build_tiff
fi
build_libpng build_libpng
build_lcms2 build_lcms2
build_openjpeg build_openjpeg
if [ -f /usr/local/lib64/libopenjp2.so ]; then
cp /usr/local/lib64/libopenjp2.so /usr/local/lib
fi
ORIGINAL_CFLAGS=$CFLAGS webp_cflags="-O3 -DNDEBUG"
CFLAGS="$CFLAGS -O3 -DNDEBUG"
if [[ -n "$IS_MACOS" ]]; then if [[ -n "$IS_MACOS" ]]; then
CFLAGS="$CFLAGS -Wl,-headerpad_max_install_names" webp_cflags="$webp_cflags -Wl,-headerpad_max_install_names"
fi fi
build_libwebp CFLAGS="$CFLAGS $webp_cflags" build_simple libwebp $LIBWEBP_VERSION \
CFLAGS=$ORIGINAL_CFLAGS https://storage.googleapis.com/downloads.webmproject.org/releases/webp tar.gz \
--enable-libwebpmux --enable-libwebpdemux
build_brotli build_brotli
@ -109,42 +152,50 @@ function build {
build_freetype build_freetype
fi fi
if [ -z "$IS_MACOS" ]; then build_harfbuzz
export FREETYPE_LIBS=-lfreetype
export FREETYPE_CFLAGS=-I/usr/local/include/freetype2/
fi
build_simple harfbuzz $HARFBUZZ_VERSION https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION tar.xz --with-freetype=yes --with-glib=no
if [ -z "$IS_MACOS" ]; then
export FREETYPE_LIBS=""
export FREETYPE_CFLAGS=""
fi
} }
# Perform all dependency builds in the build subfolder.
mkdir -p $WORKDIR
pushd $WORKDIR > /dev/null
# Any stuff that you need to do before you start building the wheels # Any stuff that you need to do before you start building the wheels
# Runs in the root directory of this repository. # Runs in the root directory of this repository.
curl -fsSL -o pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip if [[ ! -d $WORKDIR/pillow-depends-main ]]; then
untar pillow-depends-main.zip if [[ ! -f $PROJECTDIR/pillow-depends-main.zip ]]; then
echo "Download pillow dependency sources..."
curl -fSL -o $PROJECTDIR/pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
fi
echo "Unpacking pillow dependency sources..."
untar $PROJECTDIR/pillow-depends-main.zip
fi
if [[ -n "$IS_MACOS" ]]; then if [[ -n "$IS_MACOS" ]]; then
# libtiff and libxcb cause a conflict with building libtiff and libxcb # Homebrew (or similar packaging environments) install can contain some of
# libxau and libxdmcp cause an issue on macOS < 11 # the libraries that we're going to build. However, they may be compiled
# remove cairo to fix building harfbuzz on arm64 # with a MACOSX_DEPLOYMENT_TARGET that doesn't match what we want to use,
# remove lcms2 and libpng to fix building openjpeg on arm64 # and they may bring in other dependencies that we don't want. The same will
# remove jpeg-turbo to avoid inclusion on arm64 # be true of any other locations on the path. To avoid conflicts, strip the
# remove webp and zstd to avoid inclusion on x86_64 # path down to the bare minimum (which, on macOS, won't include any
# curl from brew requires zstd, use system curl # development dependencies).
brew remove --ignore-dependencies libpng libtiff libxcb libxau libxdmcp curl cairo lcms2 zstd export PATH="$BUILD_PREFIX/bin:$(dirname $(which python3)):/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin"
if [[ "$CIBW_ARCHS" == "arm64" ]]; then export CMAKE_PREFIX_PATH=$BUILD_PREFIX
brew remove --ignore-dependencies jpeg-turbo
else
brew remove --ignore-dependencies webp
fi
brew install pkg-config # Ensure the basic structure of the build prefix directory exists.
mkdir -p "$BUILD_PREFIX/bin"
mkdir -p "$BUILD_PREFIX/lib"
# Ensure pkg-config is available
build_pkg_config
# Ensure cmake is available
python3 -m pip install cmake
fi fi
wrap_wheel_builder build wrap_wheel_builder build
# Return to the project root to finish the build
popd > /dev/null
# Append licenses # Append licenses
for filename in wheels/dependency_licenses/*; do for filename in wheels/dependency_licenses/*; do
echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE

View File

@ -11,6 +11,9 @@ if ("$venv" -like "*\cibw-run-*\pp*-win_amd64\*") {
$env:path += ";$pillow\winbuild\build\bin\" $env:path += ";$pillow\winbuild\build\bin\"
& "$venv\Scripts\activate.ps1" & "$venv\Scripts\activate.ps1"
& reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\python.exe" /v "GlobalFlag" /t REG_SZ /d "0x02000000" /f & reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\python.exe" /v "GlobalFlag" /t REG_SZ /d "0x02000000" /f
if ("$venv" -like "*\cibw-run-*-win_amd64\*") {
& python -m pip install numpy
}
cd $pillow cd $pillow
& python -VV & python -VV
if (!$?) { exit $LASTEXITCODE } if (!$?) { exit $LASTEXITCODE }

View File

@ -1,26 +1,31 @@
#!/bin/bash #!/bin/bash
set -e set -e
# Ensure fribidi is installed by the system.
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
brew install fribidi # If Homebrew is on the path during the build, it may leak into the wheels.
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig" # However, we *do* need Homebrew to provide a copy of fribidi for
if [ -f /opt/homebrew/lib/libfribidi.dylib ]; then # testing purposes so that we can verify the fribidi shim works as expected.
sudo cp /opt/homebrew/lib/libfribidi.dylib /usr/local/lib if [[ "$(uname -m)" == "x86_64" ]]; then
HOMEBREW_PREFIX=/usr/local
else
HOMEBREW_PREFIX=/opt/homebrew
fi fi
$HOMEBREW_PREFIX/bin/brew install fribidi
# Add the lib folder for fribidi so that the vendored library can be found.
# Don't use $HOMEWBREW_PREFIX/lib directly - use the lib folder where the
# installed copy of fribidi is cellared. This ensures we don't pick up the
# Homebrew version of any other library that we're dependent on (most notably,
# freetype).
export DYLD_LIBRARY_PATH=$(dirname $(realpath $HOMEBREW_PREFIX/lib/libfribidi.dylib))
elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then
apk add curl fribidi apk add curl fribidi
else else
yum install -y fribidi yum install -y fribidi
fi fi
if [ "${AUDITWHEEL_POLICY::9}" != "musllinux" ]; then python3 -m pip install numpy
# TODO Update condition when NumPy supports free-threading
if [ $(python3 -c "import sysconfig;print(sysconfig.get_config_var('Py_GIL_DISABLED'))") == "1" ]; then
python3 -m pip install numpy --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple
else
python3 -m pip install numpy
fi
fi
if [ ! -d "test-images-main" ]; then if [ ! -d "test-images-main" ]; then
curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip

View File

@ -13,6 +13,7 @@ on:
paths: paths:
- ".ci/requirements-cibw.txt" - ".ci/requirements-cibw.txt"
- ".github/workflows/wheel*" - ".github/workflows/wheel*"
- "pyproject.toml"
- "setup.py" - "setup.py"
- "wheels/*" - "wheels/*"
- "winbuild/build_prepare.py" - "winbuild/build_prepare.py"
@ -23,6 +24,7 @@ on:
paths: paths:
- ".ci/requirements-cibw.txt" - ".ci/requirements-cibw.txt"
- ".github/workflows/wheel*" - ".github/workflows/wheel*"
- "pyproject.toml"
- "setup.py" - "setup.py"
- "wheels/*" - "wheels/*"
- "winbuild/build_prepare.py" - "winbuild/build_prepare.py"
@ -40,63 +42,7 @@ env:
FORCE_COLOR: 1 FORCE_COLOR: 1
jobs: jobs:
build-1-QEMU-emulated-wheels: build-native-wheels:
if: github.event_name != 'schedule' && github.event_name != 'workflow_dispatch'
name: aarch64 ${{ matrix.python-version }} ${{ matrix.spec }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version:
- pp39
- pp310
- cp3{9,10,11}
- cp3{12,13}
spec:
- manylinux2014
- manylinux_2_28
- musllinux
exclude:
- { python-version: pp39, spec: musllinux }
- { python-version: pp310, spec: musllinux }
steps:
- uses: actions/checkout@v4
with:
submodules: true
- uses: actions/setup-python@v5
with:
python-version: "3.x"
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Install cibuildwheel
run: |
python3 -m pip install -r .ci/requirements-cibw.txt
- name: Build wheels
run: |
python3 -m cibuildwheel --output-dir wheelhouse
env:
# Build only the currently selected Linux architecture (so we can
# parallelise for speed).
CIBW_ARCHS: "aarch64"
# Likewise, select only one Python version per job to speed this up.
CIBW_BUILD: "${{ matrix.python-version }}-${{ matrix.spec == 'musllinux' && 'musllinux' || 'manylinux' }}*"
CIBW_PRERELEASE_PYTHONS: True
# Extra options for manylinux.
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.spec }}
CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.spec }}
- uses: actions/upload-artifact@v4
with:
name: dist-qemu-${{ matrix.python-version }}-${{ matrix.spec }}
path: ./wheelhouse/*.whl
build-2-native-wheels:
if: github.event_name != 'schedule' || github.repository_owner == 'python-pillow' if: github.event_name != 'schedule' || github.repository_owner == 'python-pillow'
name: ${{ matrix.name }} name: ${{ matrix.name }}
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
@ -104,12 +50,23 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
include: include:
- name: "macOS x86_64" - name: "macOS 10.10 x86_64"
os: macos-13 os: macos-13
cibw_arch: x86_64 cibw_arch: x86_64
build: "cp3{9,10,11}*"
macosx_deployment_target: "10.10" macosx_deployment_target: "10.10"
- name: "macOS 10.13 x86_64"
os: macos-13
cibw_arch: x86_64
build: "cp3{12,13}*"
macosx_deployment_target: "10.13"
- name: "macOS 10.15 x86_64"
os: macos-13
cibw_arch: x86_64
build: "pp3*"
macosx_deployment_target: "10.15"
- name: "macOS arm64" - name: "macOS arm64"
os: macos-14 os: macos-latest
cibw_arch: arm64 cibw_arch: arm64
macosx_deployment_target: "11.0" macosx_deployment_target: "11.0"
- name: "manylinux2014 and musllinux x86_64" - name: "manylinux2014 and musllinux x86_64"
@ -120,9 +77,18 @@ jobs:
cibw_arch: x86_64 cibw_arch: x86_64
build: "*manylinux*" build: "*manylinux*"
manylinux: "manylinux_2_28" manylinux: "manylinux_2_28"
- name: "manylinux2014 and musllinux aarch64"
os: ubuntu-24.04-arm
cibw_arch: aarch64
- name: "manylinux_2_28 aarch64"
os: ubuntu-24.04-arm
cibw_arch: aarch64
build: "*manylinux*"
manylinux: "manylinux_2_28"
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
persist-credentials: false
submodules: true submodules: true
- uses: actions/setup-python@v5 - uses: actions/setup-python@v5
@ -139,15 +105,17 @@ jobs:
env: env:
CIBW_ARCHS: ${{ matrix.cibw_arch }} CIBW_ARCHS: ${{ matrix.cibw_arch }}
CIBW_BUILD: ${{ matrix.build }} CIBW_BUILD: ${{ matrix.build }}
CIBW_FREE_THREADED_SUPPORT: True CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }} CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }} CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
CIBW_PRERELEASE_PYTHONS: True CIBW_SKIP: pp39-*
MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }} MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }}
- uses: actions/upload-artifact@v4 - uses: actions/upload-artifact@v4
with: with:
name: dist-${{ matrix.os }}-${{ matrix.cibw_arch }}${{ matrix.manylinux && format('-{0}', matrix.manylinux) }} name: dist-${{ matrix.os }}${{ matrix.macosx_deployment_target && format('-{0}', matrix.macosx_deployment_target) }}-${{ matrix.cibw_arch }}${{ matrix.manylinux && format('-{0}', matrix.manylinux) }}
path: ./wheelhouse/*.whl path: ./wheelhouse/*.whl
windows: windows:
@ -163,10 +131,13 @@ jobs:
- cibw_arch: ARM64 - cibw_arch: ARM64
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Checkout extra test images - name: Checkout extra test images
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
persist-credentials: false
repository: python-pillow/test-images repository: python-pillow/test-images
path: Tests\test-images path: Tests\test-images
@ -213,8 +184,8 @@ jobs:
CIBW_ARCHS: ${{ matrix.cibw_arch }} CIBW_ARCHS: ${{ matrix.cibw_arch }}
CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd" CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd"
CIBW_CACHE_PATH: "C:\\cibw" CIBW_CACHE_PATH: "C:\\cibw"
CIBW_FREE_THREADED_SUPPORT: True CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy
CIBW_PRERELEASE_PYTHONS: True CIBW_SKIP: pp39-*
CIBW_TEST_SKIP: "*-win_arm64" CIBW_TEST_SKIP: "*-win_arm64"
CIBW_TEST_COMMAND: 'docker run --rm CIBW_TEST_COMMAND: 'docker run --rm
-v {project}:C:\pillow -v {project}:C:\pillow
@ -242,13 +213,13 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: "3.x"
cache: pip
cache-dependency-path: "Makefile"
- run: make sdist - run: make sdist
@ -259,7 +230,7 @@ jobs:
scientific-python-nightly-wheels-publish: scientific-python-nightly-wheels-publish:
if: github.repository_owner == 'python-pillow' && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') if: github.repository_owner == 'python-pillow' && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch')
needs: [build-2-native-wheels, windows] needs: [build-native-wheels, windows]
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Upload wheels to scientific-python-nightly-wheels name: Upload wheels to scientific-python-nightly-wheels
steps: steps:
@ -269,14 +240,14 @@ jobs:
path: dist path: dist
merge-multiple: true merge-multiple: true
- name: Upload wheels to scientific-python-nightly-wheels - name: Upload wheels to scientific-python-nightly-wheels
uses: scientific-python/upload-nightly-action@b67d7fcc0396e1128a474d1ab2b48aa94680f9fc # 0.5.0 uses: scientific-python/upload-nightly-action@82396a2ed4269ba06c6b2988bb4fd568ef3c3d6b # 0.6.1
with: with:
artifacts_path: dist artifacts_path: dist
anaconda_nightly_upload_token: ${{ secrets.ANACONDA_ORG_UPLOAD_TOKEN }} anaconda_nightly_upload_token: ${{ secrets.ANACONDA_ORG_UPLOAD_TOKEN }}
pypi-publish: pypi-publish:
if: github.repository_owner == 'python-pillow' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags') if: github.repository_owner == 'python-pillow' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
needs: [build-1-QEMU-emulated-wheels, build-2-native-wheels, windows, sdist] needs: [build-native-wheels, windows, sdist]
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Upload release to PyPI name: Upload release to PyPI
environment: environment:
@ -292,3 +263,5 @@ jobs:
merge-multiple: true merge-multiple: true
- name: Publish to PyPI - name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1 uses: pypa/gh-action-pypi-publish@release/v1
with:
attestations: true

5
.gitignore vendored
View File

@ -19,6 +19,7 @@ lib64/
parts/ parts/
sdist/ sdist/
var/ var/
wheelhouse/
*.egg-info/ *.egg-info/
.installed.cfg .installed.cfg
*.egg *.egg
@ -90,5 +91,9 @@ Tests/images/msp
Tests/images/picins Tests/images/picins
Tests/images/sunraster Tests/images/sunraster
# Test and dependency downloads
pillow-depends-main.zip
pillow-test-images.zip
# pyinstaller # pyinstaller
*.spec *.spec

View File

@ -1,17 +1,17 @@
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.0 rev: v0.9.9
hooks: hooks:
- id: ruff - id: ruff
args: [--exit-non-zero-on-fix] args: [--exit-non-zero-on-fix]
- repo: https://github.com/psf/black-pre-commit-mirror - repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.4.2 rev: 25.1.0
hooks: hooks:
- id: black - id: black
- repo: https://github.com/PyCQA/bandit - repo: https://github.com/PyCQA/bandit
rev: 1.7.9 rev: 1.8.3
hooks: hooks:
- id: bandit - id: bandit
args: [--severity-level=high] args: [--severity-level=high]
@ -24,7 +24,7 @@ repos:
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$) exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
- repo: https://github.com/pre-commit/mirrors-clang-format - repo: https://github.com/pre-commit/mirrors-clang-format
rev: v18.1.8 rev: v19.1.7
hooks: hooks:
- id: clang-format - id: clang-format
types: [c] types: [c]
@ -36,7 +36,7 @@ repos:
- id: rst-backticks - id: rst-backticks
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0 rev: v5.0.0
hooks: hooks:
- id: check-executables-have-shebangs - id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable - id: check-shebang-scripts-are-executable
@ -50,29 +50,35 @@ repos:
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/ exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
- repo: https://github.com/python-jsonschema/check-jsonschema - repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.28.6 rev: 0.31.2
hooks: hooks:
- id: check-github-workflows - id: check-github-workflows
- id: check-readthedocs - id: check-readthedocs
- id: check-renovate - id: check-renovate
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.4.1
hooks:
- id: zizmor
- repo: https://github.com/sphinx-contrib/sphinx-lint - repo: https://github.com/sphinx-contrib/sphinx-lint
rev: v0.9.1 rev: v1.0.0
hooks: hooks:
- id: sphinx-lint - id: sphinx-lint
- repo: https://github.com/tox-dev/pyproject-fmt - repo: https://github.com/tox-dev/pyproject-fmt
rev: 2.1.3 rev: v2.5.1
hooks: hooks:
- id: pyproject-fmt - id: pyproject-fmt
- repo: https://github.com/abravalheri/validate-pyproject - repo: https://github.com/abravalheri/validate-pyproject
rev: v0.18 rev: v0.23
hooks: hooks:
- id: validate-pyproject - id: validate-pyproject
additional_dependencies: [trove-classifiers>=2024.10.12]
- repo: https://github.com/tox-dev/tox-ini-fmt - repo: https://github.com/tox-dev/tox-ini-fmt
rev: 1.3.1 rev: 1.5.0
hooks: hooks:
- id: tox-ini-fmt - id: tox-ini-fmt

View File

@ -1,5 +1,8 @@
version: 2 version: 2
sphinx:
configuration: docs/conf.py
formats: [pdf] formats: [pdf]
build: build:

View File

@ -2,9 +2,157 @@
Changelog (Pillow) Changelog (Pillow)
================== ==================
11.0.0 (unreleased) 11.1.0 and newer
----------------
See GitHub Releases:
- https://github.com/python-pillow/Pillow/releases
11.0.0 (2024-10-15)
------------------- -------------------
- Update licence to MIT-CMU #8460
[hugovk]
- Conditionally define ImageCms type hint to avoid requiring core #8197
[radarhere]
- Support writing LONG8 offsets in AppendingTiffWriter #8417
[radarhere]
- Use ImageFile.MAXBLOCK when saving TIFF images #8461
[radarhere]
- Do not close provided file handles with libtiff when saving #8458
[radarhere]
- Support ImageFilter.BuiltinFilter for I;16* images #8438
[radarhere]
- Use ImagingCore.ptr instead of ImagingCore.id #8341
[homm, radarhere, hugovk]
- Updated EPS mode when opening images without transparency #8281
[Yay295, radarhere]
- Use transparency when combining P frames from APNGs #8443
[radarhere]
- Support all resampling filters when resizing I;16* images #8422
[radarhere]
- Free memory on early return #8413
[radarhere]
- Cast int before potentially exceeding INT_MAX #8402
[radarhere]
- Check image value before use #8400
[radarhere]
- Improved copying imagequant libraries #8420
[radarhere]
- Use Capsule for WebP saving #8386
[homm, radarhere]
- Fixed writing multiple StripOffsets to TIFF #8317
[Yay295, radarhere]
- Fix dereference before checking for NULL in ImagingTransformAffine #8398
[PavlNekrasov]
- Use transposed size after opening for TIFF images #8390
[radarhere, homm]
- Improve ImageFont error messages #8338
[yngvem, radarhere, hugovk]
- Mention MAX_TEXT_CHUNK limit in PNG error message #8391
[radarhere]
- Cast Dib handle to int #8385
[radarhere]
- Accept float stroke widths #8369
[radarhere]
- Deprecate ICNS (width, height, scale) sizes in favour of load(scale) #8352
[radarhere]
- Improved handling of RGBA palettes when saving GIF images #8366
[radarhere]
- Deprecate isImageType #8364
[radarhere]
- Support converting more modes to LAB by converting to RGBA first #8358
[radarhere]
- Deprecate support for FreeType 2.9.0 #8356
[hugovk, radarhere]
- Removed unused TiffImagePlugin IFD_LEGACY_API #8355
[radarhere]
- Handle duplicate EXIF header #8350
[zakajd, radarhere]
- Return early from BoxBlur if either width or height is zero #8347
[radarhere]
- Check text is either string or bytes #8308
[radarhere]
- Added writing XMP bytes to JPEG #8286
[radarhere]
- Support JPEG2000 RGBA palettes #8256
[radarhere]
- Expand C image to match GIF frame image size #8237
[radarhere]
- Allow saving I;16 images as PPM #8231
[radarhere]
- When IFD is missing, connect get_ifd() dictionary to Exif #8230
[radarhere]
- Skip truncated ICO mask if LOAD_TRUNCATED_IMAGES is enabled #8180
[radarhere]
- Treat unknown JPEG2000 colorspace as unspecified #8343
[radarhere]
- Updated error message when saving WebP with invalid width or height #8322
[radarhere, hugovk]
- Remove warning if NumPy failed to raise an error during conversion #8326
[radarhere]
- If left and right sides meet in ImageDraw.rounded_rectangle(), do not draw rectangle to fill gap #8304
[radarhere]
- Remove WebP support without anim, mux/demux, and with buggy alpha #8213
[homm, radarhere]
- Add missing TIFF CMYK;16B reader #8298
[homm]
- Remove all WITH_* flags from _imaging.c and other flags #8211
[homm]
- Improve ImageDraw2 shape methods #8265
[radarhere]
- Lock around usages of imaging memory arenas #8238
[lysnikolaou]
- Deprecate JpegImageFile huffman_ac and huffman_dc #8274
[radarhere]
- Deprecate ImageMath lambda_eval and unsafe_eval options argument #8242 - Deprecate ImageMath lambda_eval and unsafe_eval options argument #8242
[radarhere] [radarhere]

View File

@ -5,9 +5,9 @@ The Python Imaging Library (PIL) is
Pillow is the friendly PIL fork. It is Pillow is the friendly PIL fork. It is
Copyright © 2010-2024 by Jeffrey A. Clark and contributors Copyright © 2010 by Jeffrey A. Clark and contributors
Like PIL, Pillow is licensed under the open source HPND License: Like PIL, Pillow is licensed under the open source MIT-CMU License:
By obtaining, using, and/or copying this software and/or its associated By obtaining, using, and/or copying this software and/or its associated
documentation, you agree that you have read, understood, and will comply documentation, you agree that you have read, understood, and will comply

View File

@ -20,7 +20,6 @@ graft docs
graft _custom_build graft _custom_build
# build/src control detritus # build/src control detritus
exclude .appveyor.yml
exclude .clang-format exclude .clang-format
exclude .coveragerc exclude .coveragerc
exclude .editorconfig exclude .editorconfig

View File

@ -17,12 +17,10 @@ coverage:
.PHONY: doc .PHONY: doc
.PHONY: html .PHONY: html
doc html: doc html:
python3 -c "import PIL" > /dev/null 2>&1 || python3 -m pip install .
$(MAKE) -C docs html $(MAKE) -C docs html
.PHONY: htmlview .PHONY: htmlview
htmlview: htmlview:
python3 -c "import PIL" > /dev/null 2>&1 || python3 -m pip install .
$(MAKE) -C docs htmlview $(MAKE) -C docs htmlview
.PHONY: doccheck .PHONY: doccheck
@ -117,7 +115,7 @@ lint-fix:
python3 -c "import black" > /dev/null 2>&1 || python3 -m pip install black python3 -c "import black" > /dev/null 2>&1 || python3 -m pip install black
python3 -m black . python3 -m black .
python3 -c "import ruff" > /dev/null 2>&1 || python3 -m pip install ruff python3 -c "import ruff" > /dev/null 2>&1 || python3 -m pip install ruff
python3 -m ruff --fix . python3 -m ruff check --fix .
.PHONY: mypy .PHONY: mypy
mypy: mypy:

View File

@ -42,16 +42,13 @@ As of 2019, Pillow development is
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test-docker.yml"><img <a href="https://github.com/python-pillow/Pillow/actions/workflows/test-docker.yml"><img
alt="GitHub Actions build status (Test Docker)" alt="GitHub Actions build status (Test Docker)"
src="https://github.com/python-pillow/Pillow/workflows/Test%20Docker/badge.svg"></a> src="https://github.com/python-pillow/Pillow/workflows/Test%20Docker/badge.svg"></a>
<a href="https://ci.appveyor.com/project/python-pillow/Pillow"><img
alt="AppVeyor CI build status (Windows)"
src="https://img.shields.io/appveyor/build/python-pillow/Pillow/main.svg?label=Windows%20build"></a>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml"><img <a href="https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml"><img
alt="GitHub Actions build status (Wheels)" alt="GitHub Actions build status (Wheels)"
src="https://github.com/python-pillow/Pillow/workflows/Wheels/badge.svg"></a> src="https://github.com/python-pillow/Pillow/workflows/Wheels/badge.svg"></a>
<a href="https://app.codecov.io/gh/python-pillow/Pillow"><img <a href="https://app.codecov.io/gh/python-pillow/Pillow"><img
alt="Code coverage" alt="Code coverage"
src="https://codecov.io/gh/python-pillow/Pillow/branch/main/graph/badge.svg"></a> src="https://codecov.io/gh/python-pillow/Pillow/branch/main/graph/badge.svg"></a>
<a href="https://bugs.chromium.org/p/oss-fuzz/issues/list?sort=-opened&can=1&q=proj:pillow"><img <a href="https://issues.oss-fuzz.com/issues?q=title:pillow"><img
alt="Fuzzing Status" alt="Fuzzing Status"
src="https://oss-fuzz-build-logs.storage.googleapis.com/badges/pillow.svg"></a> src="https://oss-fuzz-build-logs.storage.googleapis.com/badges/pillow.svg"></a>
</td> </td>
@ -107,7 +104,7 @@ The core image library is designed for fast access to data stored in a few basic
- [Issues](https://github.com/python-pillow/Pillow/issues) - [Issues](https://github.com/python-pillow/Pillow/issues)
- [Pull requests](https://github.com/python-pillow/Pillow/pulls) - [Pull requests](https://github.com/python-pillow/Pillow/pulls)
- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html) - [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html)
- [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) - [Changelog](https://github.com/python-pillow/Pillow/releases)
- [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork) - [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork)
## Report a Vulnerability ## Report a Vulnerability

View File

@ -9,10 +9,9 @@ Released quarterly on January 2nd, April 1st, July 1st and October 15th.
* [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154 * [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154
* [ ] Develop and prepare release in `main` branch. * [ ] Develop and prepare release in `main` branch.
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in `main` branch. * [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) to confirm passing tests in `main` branch.
* [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them. * [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them.
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py` * [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Update `CHANGES.rst`.
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo. * [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
* [ ] Create branch and tag for release e.g.: * [ ] Create branch and tag for release e.g.:
```bash ```bash
@ -34,13 +33,12 @@ Released quarterly on January 2nd, April 1st, July 1st and October 15th.
Released as needed for security, installation or critical bug fixes. Released as needed for security, installation or critical bug fixes.
* [ ] Make necessary changes in `main` branch. * [ ] Make necessary changes in `main` branch.
* [ ] Update `CHANGES.rst`.
* [ ] Check out release branch e.g.: * [ ] Check out release branch e.g.:
```bash ```bash
git checkout -t remotes/origin/5.2.x git checkout -t remotes/origin/5.2.x
``` ```
* [ ] Cherry pick individual commits from `main` branch to release branch e.g. `5.2.x`, then `git push`. * [ ] Cherry pick individual commits from `main` branch to release branch e.g. `5.2.x`, then `git push`.
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in release branch e.g. `5.2.x`. * [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) to confirm passing tests in release branch e.g. `5.2.x`.
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py` * [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Run pre-release check via `make release-test`. * [ ] Run pre-release check via `make release-test`.
* [ ] Create tag for release e.g.: * [ ] Create tag for release e.g.:

View File

@ -3,26 +3,25 @@ from __future__ import annotations
import zlib import zlib
from io import BytesIO from io import BytesIO
import pytest
from PIL import Image, ImageFile, PngImagePlugin from PIL import Image, ImageFile, PngImagePlugin
TEST_FILE = "Tests/images/png_decompression_dos.png" TEST_FILE = "Tests/images/png_decompression_dos.png"
def test_ignore_dos_text() -> None: def test_ignore_dos_text(monkeypatch: pytest.MonkeyPatch) -> None:
ImageFile.LOAD_TRUNCATED_IMAGES = True monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
try: with Image.open(TEST_FILE) as im:
im = Image.open(TEST_FILE)
im.load() im.load()
finally:
ImageFile.LOAD_TRUNCATED_IMAGES = False
assert isinstance(im, PngImagePlugin.PngImageFile) assert isinstance(im, PngImagePlugin.PngImageFile)
for s in im.text.values(): for s in im.text.values():
assert len(s) < 1024 * 1024, "Text chunk larger than 1M" assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
for s in im.info.values(): for s in im.info.values():
assert len(s) < 1024 * 1024, "Text chunk larger than 1M" assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
def test_dos_text() -> None: def test_dos_text() -> None:

View File

@ -34,6 +34,7 @@ def test_wheel_features() -> None:
"fribidi", "fribidi",
"harfbuzz", "harfbuzz",
"libjpeg_turbo", "libjpeg_turbo",
"zlib_ng",
"xcb", "xcb",
} }

View File

@ -9,7 +9,6 @@ import os
import shutil import shutil
import subprocess import subprocess
import sys import sys
import sysconfig
import tempfile import tempfile
from collections.abc import Sequence from collections.abc import Sequence
from functools import lru_cache from functools import lru_cache
@ -140,18 +139,11 @@ def assert_image_similar_tofile(
filename: str, filename: str,
epsilon: float, epsilon: float,
msg: str | None = None, msg: str | None = None,
mode: str | None = None,
) -> None: ) -> None:
with Image.open(filename) as img: with Image.open(filename) as img:
if mode:
img = img.convert(mode)
assert_image_similar(a, img, epsilon, msg) assert_image_similar(a, img, epsilon, msg)
def assert_all_same(items: Sequence[Any], msg: str | None = None) -> None:
assert items.count(items[0]) == len(items), msg
def assert_not_all_same(items: Sequence[Any], msg: str | None = None) -> None: def assert_not_all_same(items: Sequence[Any], msg: str | None = None) -> None:
assert items.count(items[0]) != len(items), msg assert items.count(items[0]) != len(items), msg
@ -327,16 +319,7 @@ def magick_command() -> list[str] | None:
return None return None
def on_appveyor() -> bool:
return "APPVEYOR" in os.environ
def on_github_actions() -> bool:
return "GITHUB_ACTIONS" in os.environ
def on_ci() -> bool: def on_ci() -> bool:
# GitHub Actions and AppVeyor have "CI"
return "CI" in os.environ return "CI" in os.environ
@ -358,10 +341,6 @@ def is_pypy() -> bool:
return hasattr(sys, "pypy_translation_info") return hasattr(sys, "pypy_translation_info")
def is_mingw() -> bool:
return sysconfig.get_platform() == "mingw"
class CachedProperty: class CachedProperty:
def __init__(self, func: Callable[[Any], Any]) -> None: def __init__(self, func: Callable[[Any], Any]) -> None:
self.func = func self.func = func

BIN
Tests/images/eps/1.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Binary file not shown.

View File

Before

Width:  |  Height:  |  Size: 2.5 KiB

After

Width:  |  Height:  |  Size: 2.5 KiB

View File

Before

Width:  |  Height:  |  Size: 5.4 KiB

After

Width:  |  Height:  |  Size: 5.4 KiB

View File

Before

Width:  |  Height:  |  Size: 5.9 KiB

After

Width:  |  Height:  |  Size: 5.9 KiB

View File

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

View File

Before

Width:  |  Height:  |  Size: 2.7 KiB

After

Width:  |  Height:  |  Size: 2.7 KiB

View File

Before

Width:  |  Height:  |  Size: 5.8 KiB

After

Width:  |  Height:  |  Size: 5.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 486 B

After

Width:  |  Height:  |  Size: 533 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 411 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 391 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 415 B

View File

@ -16,8 +16,9 @@
import atheris import atheris
from atheris.import_hook import instrument_imports
with atheris.instrument_imports(): with instrument_imports():
import sys import sys
import fuzzers import fuzzers

View File

@ -14,8 +14,9 @@
import atheris import atheris
from atheris.import_hook import instrument_imports
with atheris.instrument_imports(): with instrument_imports():
import sys import sys
import fuzzers import fuzzers

View File

@ -1,5 +1,5 @@
{ {
<py3_8_encode_current_locale> <py3_10_encode_current_locale>
Memcheck:Cond Memcheck:Cond
... ...
fun:encode_current_locale fun:encode_current_locale

View File

@ -7,7 +7,7 @@ import fuzzers
import packaging import packaging
import pytest import pytest
from PIL import Image, UnidentifiedImageError, features from PIL import Image, features
from Tests.helper import skip_unless_feature from Tests.helper import skip_unless_feature
if sys.platform.startswith("win32"): if sys.platform.startswith("win32"):
@ -32,21 +32,17 @@ def test_fuzz_images(path: str) -> None:
fuzzers.fuzz_image(f.read()) fuzzers.fuzz_image(f.read())
assert True assert True
except ( except (
# Known exceptions from Pillow
OSError, OSError,
SyntaxError, SyntaxError,
MemoryError, MemoryError,
ValueError, ValueError,
NotImplementedError, NotImplementedError,
OverflowError, OverflowError,
): # Known Image.* exceptions
# Known exceptions that are through from Pillow
assert True
except (
Image.DecompressionBombError, Image.DecompressionBombError,
Image.DecompressionBombWarning, Image.DecompressionBombWarning,
UnidentifiedImageError,
): ):
# Known Image.* exceptions
assert True assert True
finally: finally:
fuzzers.disable_decompressionbomb_error() fuzzers.disable_decompressionbomb_error()

View File

@ -22,6 +22,8 @@ def test_bad() -> None:
for f in get_files("b"): for f in get_files("b"):
# Assert that there is no unclosed file warning # Assert that there is no unclosed file warning
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
try: try:
with Image.open(f) as im: with Image.open(f) as im:
im.load() im.load()

View File

@ -71,6 +71,11 @@ def test_color_modes() -> None:
box_blur(sample.convert("YCbCr")) box_blur(sample.convert("YCbCr"))
@pytest.mark.parametrize("size", ((0, 1), (1, 0)))
def test_zero_dimension(size: tuple[int, int]) -> None:
assert box_blur(Image.new("L", size)).size == size
def test_radius_0() -> None: def test_radius_0() -> None:
assert_blur( assert_blur(
sample, sample,

View File

@ -19,7 +19,7 @@ except ImportError:
class TestColorLut3DCoreAPI: class TestColorLut3DCoreAPI:
def generate_identity_table( def generate_identity_table(
self, channels: int, size: int | tuple[int, int, int] self, channels: int, size: int | tuple[int, int, int]
) -> tuple[int, int, int, int, list[float]]: ) -> tuple[int, tuple[int, int, int], list[float]]:
if isinstance(size, tuple): if isinstance(size, tuple):
size_1d, size_2d, size_3d = size size_1d, size_2d, size_3d = size
else: else:
@ -39,9 +39,7 @@ class TestColorLut3DCoreAPI:
] ]
return ( return (
channels, channels,
size_1d, (size_1d, size_2d, size_3d),
size_2d,
size_3d,
[item for sublist in table for item in sublist], [item for sublist in table for item in sublist],
) )
@ -89,107 +87,81 @@ class TestColorLut3DCoreAPI:
with pytest.raises(ValueError, match=r"size1D \* size2D \* size3D"): with pytest.raises(ValueError, match=r"size1D \* size2D \* size3D"):
im.im.color_lut_3d( im.im.color_lut_3d(
"RGB", Image.Resampling.BILINEAR, 3, 2, 2, 2, [0, 0, 0] * 7 "RGB", Image.Resampling.BILINEAR, 3, (2, 2, 2), [0, 0, 0] * 7
) )
with pytest.raises(ValueError, match=r"size1D \* size2D \* size3D"): with pytest.raises(ValueError, match=r"size1D \* size2D \* size3D"):
im.im.color_lut_3d( im.im.color_lut_3d(
"RGB", Image.Resampling.BILINEAR, 3, 2, 2, 2, [0, 0, 0] * 9 "RGB", Image.Resampling.BILINEAR, 3, (2, 2, 2), [0, 0, 0] * 9
) )
with pytest.raises(TypeError): with pytest.raises(TypeError):
im.im.color_lut_3d( im.im.color_lut_3d(
"RGB", Image.Resampling.BILINEAR, 3, 2, 2, 2, [0, 0, "0"] * 8 "RGB", Image.Resampling.BILINEAR, 3, (2, 2, 2), [0, 0, "0"] * 8
) )
with pytest.raises(TypeError): with pytest.raises(TypeError):
im.im.color_lut_3d("RGB", Image.Resampling.BILINEAR, 3, 2, 2, 2, 16) im.im.color_lut_3d("RGB", Image.Resampling.BILINEAR, 3, (2, 2, 2), 16)
def test_correct_args(self) -> None:
im = Image.new("RGB", (10, 10), 0)
im.im.color_lut_3d(
"RGB", Image.Resampling.BILINEAR, *self.generate_identity_table(3, 3)
)
im.im.color_lut_3d(
"CMYK", Image.Resampling.BILINEAR, *self.generate_identity_table(4, 3)
)
im.im.color_lut_3d(
"RGB",
Image.Resampling.BILINEAR,
*self.generate_identity_table(3, (2, 3, 3)),
)
im.im.color_lut_3d(
"RGB",
Image.Resampling.BILINEAR,
*self.generate_identity_table(3, (65, 3, 3)),
)
im.im.color_lut_3d(
"RGB",
Image.Resampling.BILINEAR,
*self.generate_identity_table(3, (3, 65, 3)),
)
im.im.color_lut_3d(
"RGB",
Image.Resampling.BILINEAR,
*self.generate_identity_table(3, (3, 3, 65)),
)
def test_wrong_mode(self) -> None:
with pytest.raises(ValueError, match="wrong mode"):
im = Image.new("L", (10, 10), 0)
im.im.color_lut_3d(
"RGB", Image.Resampling.BILINEAR, *self.generate_identity_table(3, 3)
)
with pytest.raises(ValueError, match="wrong mode"):
im = Image.new("RGB", (10, 10), 0)
im.im.color_lut_3d(
"L", Image.Resampling.BILINEAR, *self.generate_identity_table(3, 3)
)
with pytest.raises(ValueError, match="wrong mode"):
im = Image.new("L", (10, 10), 0)
im.im.color_lut_3d(
"L", Image.Resampling.BILINEAR, *self.generate_identity_table(3, 3)
)
with pytest.raises(ValueError, match="wrong mode"):
im = Image.new("RGB", (10, 10), 0)
im.im.color_lut_3d(
"RGBA", Image.Resampling.BILINEAR, *self.generate_identity_table(3, 3)
)
with pytest.raises(ValueError, match="wrong mode"):
im = Image.new("RGB", (10, 10), 0)
im.im.color_lut_3d(
"RGB", Image.Resampling.BILINEAR, *self.generate_identity_table(4, 3)
)
def test_correct_mode(self) -> None:
im = Image.new("RGBA", (10, 10), 0)
im.im.color_lut_3d(
"RGBA", Image.Resampling.BILINEAR, *self.generate_identity_table(3, 3)
)
im = Image.new("RGBA", (10, 10), 0)
im.im.color_lut_3d(
"RGBA", Image.Resampling.BILINEAR, *self.generate_identity_table(4, 3)
)
@pytest.mark.parametrize(
"lut_mode, table_channels, table_size",
[
("RGB", 3, 3),
("CMYK", 4, 3),
("RGB", 3, (2, 3, 3)),
("RGB", 3, (65, 3, 3)),
("RGB", 3, (3, 65, 3)),
("RGB", 3, (2, 3, 65)),
],
)
def test_correct_args(
self, lut_mode: str, table_channels: int, table_size: int | tuple[int, int, int]
) -> None:
im = Image.new("RGB", (10, 10), 0) im = Image.new("RGB", (10, 10), 0)
im.im.color_lut_3d( im.im.color_lut_3d(
"HSV", Image.Resampling.BILINEAR, *self.generate_identity_table(3, 3) lut_mode,
Image.Resampling.BILINEAR,
*self.generate_identity_table(table_channels, table_size),
) )
im = Image.new("RGB", (10, 10), 0) @pytest.mark.parametrize(
"image_mode, lut_mode, table_channels, table_size",
[
("L", "RGB", 3, 3),
("RGB", "L", 3, 3),
("L", "L", 3, 3),
("RGB", "RGBA", 3, 3),
("RGB", "RGB", 4, 3),
],
)
def test_wrong_mode(
self, image_mode: str, lut_mode: str, table_channels: int, table_size: int
) -> None:
with pytest.raises(ValueError, match="wrong mode"):
im = Image.new(image_mode, (10, 10), 0)
im.im.color_lut_3d(
lut_mode,
Image.Resampling.BILINEAR,
*self.generate_identity_table(table_channels, table_size),
)
@pytest.mark.parametrize(
"image_mode, lut_mode, table_channels, table_size",
[
("RGBA", "RGBA", 3, 3),
("RGBA", "RGBA", 4, 3),
("RGB", "HSV", 3, 3),
("RGB", "RGBA", 4, 3),
],
)
def test_correct_mode(
self, image_mode: str, lut_mode: str, table_channels: int, table_size: int
) -> None:
im = Image.new(image_mode, (10, 10), 0)
im.im.color_lut_3d( im.im.color_lut_3d(
"RGBA", Image.Resampling.BILINEAR, *self.generate_identity_table(4, 3) lut_mode,
Image.Resampling.BILINEAR,
*self.generate_identity_table(table_channels, table_size),
) )
def test_identities(self) -> None: def test_identities(self) -> None:
@ -290,7 +262,7 @@ class TestColorLut3DCoreAPI:
assert_image_equal( assert_image_equal(
Image.merge('RGB', im.split()[::-1]), Image.merge('RGB', im.split()[::-1]),
im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR, im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR,
3, 2, 2, 2, [ 3, (2, 2, 2), [
0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1,
0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1,
@ -312,7 +284,7 @@ class TestColorLut3DCoreAPI:
# fmt: off # fmt: off
transformed = im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR, transformed = im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR,
3, 2, 2, 2, 3, (2, 2, 2),
[ [
-1, -1, -1, 2, -1, -1, -1, -1, -1, 2, -1, -1,
-1, 2, -1, 2, 2, -1, -1, 2, -1, 2, 2, -1,
@ -333,7 +305,7 @@ class TestColorLut3DCoreAPI:
# fmt: off # fmt: off
transformed = im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR, transformed = im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR,
3, 2, 2, 2, 3, (2, 2, 2),
[ [
-3, -3, -3, 5, -3, -3, -3, -3, -3, 5, -3, -3,
-3, 5, -3, 5, 5, -3, -3, 5, -3, 5, 5, -3,
@ -414,10 +386,12 @@ class TestColorLut3DFilter:
table = numpy.ones((7 * 6 * 5, 3), dtype=numpy.float16) table = numpy.ones((7 * 6 * 5, 3), dtype=numpy.float16)
lut = ImageFilter.Color3DLUT((5, 6, 7), table) lut = ImageFilter.Color3DLUT((5, 6, 7), table)
assert isinstance(lut.table, numpy.ndarray)
assert lut.table.shape == (table.size,) assert lut.table.shape == (table.size,)
table = numpy.ones((7 * 6 * 5 * 3), dtype=numpy.float16) table = numpy.ones((7 * 6 * 5 * 3), dtype=numpy.float16)
lut = ImageFilter.Color3DLUT((5, 6, 7), table) lut = ImageFilter.Color3DLUT((5, 6, 7), table)
assert isinstance(lut.table, numpy.ndarray)
assert lut.table.shape == (table.size,) assert lut.table.shape == (table.size,)
# Check application # Check application

View File

@ -12,19 +12,16 @@ ORIGINAL_LIMIT = Image.MAX_IMAGE_PIXELS
class TestDecompressionBomb: class TestDecompressionBomb:
def teardown_method(self) -> None:
Image.MAX_IMAGE_PIXELS = ORIGINAL_LIMIT
def test_no_warning_small_file(self) -> None: def test_no_warning_small_file(self) -> None:
# Implicit assert: no warning. # Implicit assert: no warning.
# A warning would cause a failure. # A warning would cause a failure.
with Image.open(TEST_FILE): with Image.open(TEST_FILE):
pass pass
def test_no_warning_no_limit(self) -> None: def test_no_warning_no_limit(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Arrange # Arrange
# Turn limit off # Turn limit off
Image.MAX_IMAGE_PIXELS = None monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", None)
assert Image.MAX_IMAGE_PIXELS is None assert Image.MAX_IMAGE_PIXELS is None
# Act / Assert # Act / Assert
@ -33,18 +30,18 @@ class TestDecompressionBomb:
with Image.open(TEST_FILE): with Image.open(TEST_FILE):
pass pass
def test_warning(self) -> None: def test_warning(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Set limit to trigger warning on the test file # Set limit to trigger warning on the test file
Image.MAX_IMAGE_PIXELS = 128 * 128 - 1 monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", 128 * 128 - 1)
assert Image.MAX_IMAGE_PIXELS == 128 * 128 - 1 assert Image.MAX_IMAGE_PIXELS == 128 * 128 - 1
with pytest.warns(Image.DecompressionBombWarning): with pytest.warns(Image.DecompressionBombWarning):
with Image.open(TEST_FILE): with Image.open(TEST_FILE):
pass pass
def test_exception(self) -> None: def test_exception(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Set limit to trigger exception on the test file # Set limit to trigger exception on the test file
Image.MAX_IMAGE_PIXELS = 64 * 128 - 1 monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", 64 * 128 - 1)
assert Image.MAX_IMAGE_PIXELS == 64 * 128 - 1 assert Image.MAX_IMAGE_PIXELS == 64 * 128 - 1
with pytest.raises(Image.DecompressionBombError): with pytest.raises(Image.DecompressionBombError):
@ -66,9 +63,9 @@ class TestDecompressionBomb:
with pytest.raises(Image.DecompressionBombError): with pytest.raises(Image.DecompressionBombError):
im.seek(1) im.seek(1)
def test_exception_gif_zero_width(self) -> None: def test_exception_gif_zero_width(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Set limit to trigger exception on the test file # Set limit to trigger exception on the test file
Image.MAX_IMAGE_PIXELS = 4 * 64 * 128 monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", 4 * 64 * 128)
assert Image.MAX_IMAGE_PIXELS == 4 * 64 * 128 assert Image.MAX_IMAGE_PIXELS == 4 * 64 * 128
with pytest.raises(Image.DecompressionBombError): with pytest.raises(Image.DecompressionBombError):

View File

@ -10,11 +10,6 @@ from PIL import features
from .helper import skip_unless_feature from .helper import skip_unless_feature
try:
from PIL import _webp
except ImportError:
pass
def test_check() -> None: def test_check() -> None:
# Check the correctness of the convenience function # Check the correctness of the convenience function
@ -23,7 +18,11 @@ def test_check() -> None:
for codec in features.codecs: for codec in features.codecs:
assert features.check_codec(codec) == features.check(codec) assert features.check_codec(codec) == features.check(codec)
for feature in features.features: for feature in features.features:
assert features.check_feature(feature) == features.check(feature) if "webp" in feature:
with pytest.warns(DeprecationWarning):
assert features.check_feature(feature) == features.check(feature)
else:
assert features.check_feature(feature) == features.check(feature)
def test_version() -> None: def test_version() -> None:
@ -37,10 +36,11 @@ def test_version() -> None:
else: else:
assert function(name) == version assert function(name) == version
if name != "PIL": if name != "PIL":
if name == "zlib" and version is not None: if version is not None:
version = re.sub(".zlib-ng$", "", version) if name == "zlib" and features.check_feature("zlib_ng"):
elif name == "libtiff" and version is not None: version = re.sub(".zlib-ng$", "", version)
version = re.sub("t$", "", version) elif name == "libtiff":
version = re.sub("t$", "", version)
assert version is None or re.search(r"\d+(\.\d+)*$", version) assert version is None or re.search(r"\d+(\.\d+)*$", version)
for module in features.modules: for module in features.modules:
@ -48,23 +48,26 @@ def test_version() -> None:
for codec in features.codecs: for codec in features.codecs:
test(codec, features.version_codec) test(codec, features.version_codec)
for feature in features.features: for feature in features.features:
test(feature, features.version_feature) if "webp" in feature:
with pytest.warns(DeprecationWarning):
test(feature, features.version_feature)
else:
test(feature, features.version_feature)
@skip_unless_feature("webp")
def test_webp_transparency() -> None: def test_webp_transparency() -> None:
assert features.check("transp_webp") != _webp.WebPDecoderBuggyAlpha() with pytest.warns(DeprecationWarning):
assert features.check("transp_webp") == _webp.HAVE_TRANSPARENCY assert (features.check("transp_webp") or False) == features.check_module("webp")
@skip_unless_feature("webp")
def test_webp_mux() -> None: def test_webp_mux() -> None:
assert features.check("webp_mux") == _webp.HAVE_WEBPMUX with pytest.warns(DeprecationWarning):
assert (features.check("webp_mux") or False) == features.check_module("webp")
@skip_unless_feature("webp")
def test_webp_anim() -> None: def test_webp_anim() -> None:
assert features.check("webp_anim") == _webp.HAVE_WEBPANIM with pytest.warns(DeprecationWarning):
assert (features.check("webp_anim") or False) == features.check_module("webp")
@skip_unless_feature("libjpeg_turbo") @skip_unless_feature("libjpeg_turbo")

View File

@ -258,8 +258,8 @@ def test_apng_mode() -> None:
assert im.mode == "P" assert im.mode == "P"
im.seek(im.n_frames - 1) im.seek(im.n_frames - 1)
im = im.convert("RGBA") im = im.convert("RGBA")
assert im.getpixel((0, 0)) == (255, 0, 0, 0) assert im.getpixel((0, 0)) == (0, 255, 0, 255)
assert im.getpixel((64, 32)) == (255, 0, 0, 0) assert im.getpixel((64, 32)) == (0, 255, 0, 255)
with Image.open("Tests/images/apng/mode_palette_1bit_alpha.png") as im: with Image.open("Tests/images/apng/mode_palette_1bit_alpha.png") as im:
assert im.mode == "P" assert im.mode == "P"
@ -307,13 +307,8 @@ def test_apng_syntax_errors() -> None:
im.load() im.load()
# we can handle this case gracefully # we can handle this case gracefully
exception = None
with Image.open("Tests/images/apng/syntax_num_frames_low.png") as im: with Image.open("Tests/images/apng/syntax_num_frames_low.png") as im:
try: im.seek(im.n_frames - 1)
im.seek(im.n_frames - 1)
except Exception as e:
exception = e
assert exception is None
with pytest.raises(OSError): with pytest.raises(OSError):
with Image.open("Tests/images/apng/syntax_num_frames_high.png") as im: with Image.open("Tests/images/apng/syntax_num_frames_high.png") as im:
@ -405,13 +400,8 @@ def test_apng_save_split_fdat(tmp_path: Path) -> None:
append_images=frames, append_images=frames,
) )
with Image.open(test_file) as im: with Image.open(test_file) as im:
exception = None im.seek(im.n_frames - 1)
try: im.load()
im.seek(im.n_frames - 1)
im.load()
except Exception as e:
exception = e
assert exception is None
def test_apng_save_duration_loop(tmp_path: Path) -> None: def test_apng_save_duration_loop(tmp_path: Path) -> None:

View File

@ -4,7 +4,7 @@ from pathlib import Path
import pytest import pytest
from PIL import Image from PIL import BlpImagePlugin, Image
from .helper import ( from .helper import (
assert_image_equal, assert_image_equal,
@ -19,6 +19,7 @@ def test_load_blp1() -> None:
assert_image_equal_tofile(im, "Tests/images/blp/blp1_jpeg.png") assert_image_equal_tofile(im, "Tests/images/blp/blp1_jpeg.png")
with Image.open("Tests/images/blp/blp1_jpeg2.blp") as im: with Image.open("Tests/images/blp/blp1_jpeg2.blp") as im:
assert im.mode == "RGBA"
im.load() im.load()
@ -37,6 +38,13 @@ def test_load_blp2_dxt1a() -> None:
assert_image_equal_tofile(im, "Tests/images/blp/blp2_dxt1a.png") assert_image_equal_tofile(im, "Tests/images/blp/blp2_dxt1a.png")
def test_invalid_file() -> None:
invalid_file = "Tests/images/flower.jpg"
with pytest.raises(BlpImagePlugin.BLPFormatError):
BlpImagePlugin.BlpImageFile(invalid_file)
def test_save(tmp_path: Path) -> None: def test_save(tmp_path: Path) -> None:
f = str(tmp_path / "temp.blp") f = str(tmp_path / "temp.blp")

View File

@ -83,4 +83,4 @@ def test_handler(tmp_path: Path) -> None:
im.save(temp_file) im.save(temp_file)
assert handler.saved assert handler.saved
BufrStubImagePlugin._handler = None BufrStubImagePlugin.register_handler(None)

View File

@ -4,8 +4,6 @@ import pytest
from PIL import ContainerIO, Image from PIL import ContainerIO, Image
from .helper import hopper
TEST_FILE = "Tests/images/dummy.container" TEST_FILE = "Tests/images/dummy.container"
@ -15,15 +13,15 @@ def test_sanity() -> None:
def test_isatty() -> None: def test_isatty() -> None:
with hopper() as im: with open(TEST_FILE, "rb") as fh:
container = ContainerIO.ContainerIO(im, 0, 0) container = ContainerIO.ContainerIO(fh, 0, 0)
assert container.isatty() is False assert container.isatty() is False
def test_seekable() -> None: def test_seekable() -> None:
with hopper() as im: with open(TEST_FILE, "rb") as fh:
container = ContainerIO.ContainerIO(im, 0, 0) container = ContainerIO.ContainerIO(fh, 0, 0)
assert container.seekable() is True assert container.seekable() is True

View File

@ -26,16 +26,18 @@ def test_sanity() -> None:
@pytest.mark.skipif(is_pypy(), reason="Requires CPython") @pytest.mark.skipif(is_pypy(), reason="Requires CPython")
def test_unclosed_file() -> None: def test_unclosed_file() -> None:
def open() -> None: def open_test_image() -> None:
im = Image.open(TEST_FILE) im = Image.open(TEST_FILE)
im.load() im.load()
with pytest.warns(ResourceWarning): with pytest.warns(ResourceWarning):
open() open_test_image()
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(TEST_FILE) im = Image.open(TEST_FILE)
im.load() im.load()
im.close() im.close()
@ -43,6 +45,8 @@ def test_closed_file() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(TEST_FILE) as im: with Image.open(TEST_FILE) as im:
im.load() im.load()

View File

@ -152,7 +152,7 @@ def test_sanity_ati2_bc5u(image_path: str) -> None:
@pytest.mark.parametrize( @pytest.mark.parametrize(
("image_path", "expected_path"), "image_path, expected_path",
( (
# hexeditted to be typeless # hexeditted to be typeless
(TEST_FILE_DX10_BC5_TYPELESS, TEST_FILE_DX10_BC5_UNORM), (TEST_FILE_DX10_BC5_TYPELESS, TEST_FILE_DX10_BC5_UNORM),
@ -248,7 +248,7 @@ def test_dx10_r8g8b8a8_unorm_srgb() -> None:
@pytest.mark.parametrize( @pytest.mark.parametrize(
("mode", "size", "test_file"), "mode, size, test_file",
[ [
("L", (128, 128), TEST_FILE_UNCOMPRESSED_L), ("L", (128, 128), TEST_FILE_UNCOMPRESSED_L),
("LA", (128, 128), TEST_FILE_UNCOMPRESSED_L_WITH_ALPHA), ("LA", (128, 128), TEST_FILE_UNCOMPRESSED_L_WITH_ALPHA),
@ -331,11 +331,13 @@ def test_dxt5_colorblock_alpha_issue_4142() -> None:
with Image.open("Tests/images/dxt5-colorblock-alpha-issue-4142.dds") as im: with Image.open("Tests/images/dxt5-colorblock-alpha-issue-4142.dds") as im:
px = im.getpixel((0, 0)) px = im.getpixel((0, 0))
assert isinstance(px, tuple)
assert px[0] != 0 assert px[0] != 0
assert px[1] != 0 assert px[1] != 0
assert px[2] != 0 assert px[2] != 0
px = im.getpixel((1, 0)) px = im.getpixel((1, 0))
assert isinstance(px, tuple)
assert px[0] != 0 assert px[0] != 0
assert px[1] != 0 assert px[1] != 0
assert px[2] != 0 assert px[2] != 0
@ -373,7 +375,7 @@ def test_save_unsupported_mode(tmp_path: Path) -> None:
@pytest.mark.parametrize( @pytest.mark.parametrize(
("mode", "test_file"), "mode, test_file",
[ [
("L", "Tests/images/linear_gradient.png"), ("L", "Tests/images/linear_gradient.png"),
("LA", "Tests/images/uncompressed_la.png"), ("LA", "Tests/images/uncompressed_la.png"),

View File

@ -8,6 +8,7 @@ import pytest
from PIL import EpsImagePlugin, Image, UnidentifiedImageError, features from PIL import EpsImagePlugin, Image, UnidentifiedImageError, features
from .helper import ( from .helper import (
assert_image_equal_tofile,
assert_image_similar, assert_image_similar,
assert_image_similar_tofile, assert_image_similar_tofile,
hopper, hopper,
@ -19,18 +20,18 @@ from .helper import (
HAS_GHOSTSCRIPT = EpsImagePlugin.has_ghostscript() HAS_GHOSTSCRIPT = EpsImagePlugin.has_ghostscript()
# Our two EPS test files (they are identical except for their bounding boxes) # Our two EPS test files (they are identical except for their bounding boxes)
FILE1 = "Tests/images/zero_bb.eps" FILE1 = "Tests/images/eps/zero_bb.eps"
FILE2 = "Tests/images/non_zero_bb.eps" FILE2 = "Tests/images/eps/non_zero_bb.eps"
# Due to palletization, we'll need to convert these to RGB after load # Due to palletization, we'll need to convert these to RGB after load
FILE1_COMPARE = "Tests/images/zero_bb.png" FILE1_COMPARE = "Tests/images/eps/zero_bb.png"
FILE1_COMPARE_SCALE2 = "Tests/images/zero_bb_scale2.png" FILE1_COMPARE_SCALE2 = "Tests/images/eps/zero_bb_scale2.png"
FILE2_COMPARE = "Tests/images/non_zero_bb.png" FILE2_COMPARE = "Tests/images/eps/non_zero_bb.png"
FILE2_COMPARE_SCALE2 = "Tests/images/non_zero_bb_scale2.png" FILE2_COMPARE_SCALE2 = "Tests/images/eps/non_zero_bb_scale2.png"
# EPS test files with binary preview # EPS test files with binary preview
FILE3 = "Tests/images/binary_preview_map.eps" FILE3 = "Tests/images/eps/binary_preview_map.eps"
# Three unsigned 32bit little-endian values: # Three unsigned 32bit little-endian values:
# 0xC6D3D0C5 magic number # 0xC6D3D0C5 magic number
@ -80,9 +81,7 @@ simple_eps_file_with_long_binary_data = (
@pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available") @pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available")
@pytest.mark.parametrize( @pytest.mark.parametrize("filename, size", ((FILE1, (460, 352)), (FILE2, (360, 252))))
("filename", "size"), ((FILE1, (460, 352)), (FILE2, (360, 252)))
)
@pytest.mark.parametrize("scale", (1, 2)) @pytest.mark.parametrize("scale", (1, 2))
def test_sanity(filename: str, size: tuple[int, int], scale: int) -> None: def test_sanity(filename: str, size: tuple[int, int], scale: int) -> None:
expected_size = tuple(s * scale for s in size) expected_size = tuple(s * scale for s in size)
@ -96,10 +95,14 @@ def test_sanity(filename: str, size: tuple[int, int], scale: int) -> None:
@pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available") @pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available")
def test_load() -> None: def test_load() -> None:
with Image.open(FILE1) as im: with Image.open(FILE1) as im:
assert im.load()[0, 0] == (255, 255, 255) px = im.load()
assert px is not None
assert px[0, 0] == (255, 255, 255)
# Test again now that it has already been loaded once # Test again now that it has already been loaded once
assert im.load()[0, 0] == (255, 255, 255) px = im.load()
assert px is not None
assert px[0, 0] == (255, 255, 255)
def test_binary() -> None: def test_binary() -> None:
@ -128,6 +131,15 @@ def test_binary_header_only() -> None:
EpsImagePlugin.EpsImageFile(data) EpsImagePlugin.EpsImageFile(data)
@pytest.mark.parametrize("prefix", (b"", simple_binary_header))
def test_simple_eps_file(prefix: bytes) -> None:
data = io.BytesIO(prefix + b"\n".join(simple_eps_file))
with Image.open(data) as img:
assert img.mode == "RGB"
assert img.size == (100, 100)
assert img.format == "EPS"
@pytest.mark.parametrize("prefix", (b"", simple_binary_header)) @pytest.mark.parametrize("prefix", (b"", simple_binary_header))
def test_missing_version_comment(prefix: bytes) -> None: def test_missing_version_comment(prefix: bytes) -> None:
data = io.BytesIO(prefix + b"\n".join(simple_eps_file_without_version)) data = io.BytesIO(prefix + b"\n".join(simple_eps_file_without_version))
@ -143,23 +155,21 @@ def test_missing_boundingbox_comment(prefix: bytes) -> None:
@pytest.mark.parametrize("prefix", (b"", simple_binary_header)) @pytest.mark.parametrize("prefix", (b"", simple_binary_header))
def test_invalid_boundingbox_comment(prefix: bytes) -> None: @pytest.mark.parametrize(
data = io.BytesIO(prefix + b"\n".join(simple_eps_file_with_invalid_boundingbox)) "file_lines",
(
simple_eps_file_with_invalid_boundingbox,
simple_eps_file_with_invalid_boundingbox_valid_imagedata,
),
)
def test_invalid_boundingbox_comment(
prefix: bytes, file_lines: tuple[bytes, ...]
) -> None:
data = io.BytesIO(prefix + b"\n".join(file_lines))
with pytest.raises(OSError, match="cannot determine EPS bounding box"): with pytest.raises(OSError, match="cannot determine EPS bounding box"):
EpsImagePlugin.EpsImageFile(data) EpsImagePlugin.EpsImageFile(data)
@pytest.mark.parametrize("prefix", (b"", simple_binary_header))
def test_invalid_boundingbox_comment_valid_imagedata_comment(prefix: bytes) -> None:
data = io.BytesIO(
prefix + b"\n".join(simple_eps_file_with_invalid_boundingbox_valid_imagedata)
)
with Image.open(data) as img:
assert img.mode == "RGB"
assert img.size == (100, 100)
assert img.format == "EPS"
@pytest.mark.parametrize("prefix", (b"", simple_binary_header)) @pytest.mark.parametrize("prefix", (b"", simple_binary_header))
def test_ascii_comment_too_long(prefix: bytes) -> None: def test_ascii_comment_too_long(prefix: bytes) -> None:
data = io.BytesIO(prefix + b"\n".join(simple_eps_file_with_long_ascii_comment)) data = io.BytesIO(prefix + b"\n".join(simple_eps_file_with_long_ascii_comment))
@ -179,7 +189,7 @@ def test_load_long_binary_data(prefix: bytes) -> None:
data = io.BytesIO(prefix + b"\n".join(simple_eps_file_with_long_binary_data)) data = io.BytesIO(prefix + b"\n".join(simple_eps_file_with_long_binary_data))
with Image.open(data) as img: with Image.open(data) as img:
img.load() img.load()
assert img.mode == "RGB" assert img.mode == "1"
assert img.size == (100, 100) assert img.size == (100, 100)
assert img.format == "EPS" assert img.format == "EPS"
@ -189,7 +199,7 @@ def test_load_long_binary_data(prefix: bytes) -> None:
) )
@pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available") @pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available")
def test_cmyk() -> None: def test_cmyk() -> None:
with Image.open("Tests/images/pil_sample_cmyk.eps") as cmyk_image: with Image.open("Tests/images/eps/pil_sample_cmyk.eps") as cmyk_image:
assert cmyk_image.mode == "CMYK" assert cmyk_image.mode == "CMYK"
assert cmyk_image.size == (100, 100) assert cmyk_image.size == (100, 100)
assert cmyk_image.format == "EPS" assert cmyk_image.format == "EPS"
@ -206,8 +216,8 @@ def test_cmyk() -> None:
@pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available") @pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available")
def test_showpage() -> None: def test_showpage() -> None:
# See https://github.com/python-pillow/Pillow/issues/2615 # See https://github.com/python-pillow/Pillow/issues/2615
with Image.open("Tests/images/reqd_showpage.eps") as plot_image: with Image.open("Tests/images/eps/reqd_showpage.eps") as plot_image:
with Image.open("Tests/images/reqd_showpage.png") as target: with Image.open("Tests/images/eps/reqd_showpage.png") as target:
# should not crash/hang # should not crash/hang
plot_image.load() plot_image.load()
# fonts could be slightly different # fonts could be slightly different
@ -216,11 +226,11 @@ def test_showpage() -> None:
@pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available") @pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available")
def test_transparency() -> None: def test_transparency() -> None:
with Image.open("Tests/images/reqd_showpage.eps") as plot_image: with Image.open("Tests/images/eps/reqd_showpage.eps") as plot_image:
plot_image.load(transparency=True) plot_image.load(transparency=True)
assert plot_image.mode == "RGBA" assert plot_image.mode == "RGBA"
with Image.open("Tests/images/reqd_showpage_transparency.png") as target: with Image.open("Tests/images/eps/reqd_showpage_transparency.png") as target:
# fonts could be slightly different # fonts could be slightly different
assert_image_similar(plot_image, target, 6) assert_image_similar(plot_image, target, 6)
@ -247,9 +257,19 @@ def test_bytesio_object() -> None:
assert_image_similar(img, image1_scale1_compare, 5) assert_image_similar(img, image1_scale1_compare, 5)
def test_1_mode() -> None: @pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available")
with Image.open("Tests/images/1.eps") as im: @pytest.mark.parametrize(
assert im.mode == "1" # These images have an "ImageData" descriptor.
"filename",
(
"Tests/images/eps/1.eps",
"Tests/images/eps/1_boundingbox_after_imagedata.eps",
"Tests/images/eps/1_second_imagedata.eps",
),
)
def test_1(filename: str) -> None:
with Image.open(filename) as im:
assert_image_equal_tofile(im, "Tests/images/eps/1.bmp")
def test_image_mode_not_supported(tmp_path: Path) -> None: def test_image_mode_not_supported(tmp_path: Path) -> None:
@ -304,7 +324,9 @@ def test_render_scale2() -> None:
@pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available") @pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available")
@pytest.mark.parametrize("filename", (FILE1, FILE2, "Tests/images/illu10_preview.eps")) @pytest.mark.parametrize(
"filename", (FILE1, FILE2, "Tests/images/eps/illu10_preview.eps")
)
def test_resize(filename: str) -> None: def test_resize(filename: str) -> None:
with Image.open(filename) as im: with Image.open(filename) as im:
new_size = (100, 100) new_size = (100, 100)
@ -346,10 +368,10 @@ def test_readline(prefix: bytes, line_ending: bytes) -> None:
@pytest.mark.parametrize( @pytest.mark.parametrize(
"filename", "filename",
( (
"Tests/images/illu10_no_preview.eps", "Tests/images/eps/illu10_no_preview.eps",
"Tests/images/illu10_preview.eps", "Tests/images/eps/illu10_preview.eps",
"Tests/images/illuCS6_no_preview.eps", "Tests/images/eps/illuCS6_no_preview.eps",
"Tests/images/illuCS6_preview.eps", "Tests/images/eps/illuCS6_preview.eps",
), ),
) )
def test_open_eps(filename: str) -> None: def test_open_eps(filename: str) -> None:
@ -361,7 +383,7 @@ def test_open_eps(filename: str) -> None:
@pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available") @pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available")
def test_emptyline() -> None: def test_emptyline() -> None:
# Test file includes an empty line in the header data # Test file includes an empty line in the header data
emptyline_file = "Tests/images/zero_bb_emptyline.eps" emptyline_file = "Tests/images/eps/zero_bb_emptyline.eps"
with Image.open(emptyline_file) as image: with Image.open(emptyline_file) as image:
image.load() image.load()
@ -373,7 +395,7 @@ def test_emptyline() -> None:
@pytest.mark.timeout(timeout=5) @pytest.mark.timeout(timeout=5)
@pytest.mark.parametrize( @pytest.mark.parametrize(
"test_file", "test_file",
["Tests/images/timeout-d675703545fee17acab56e5fec644c19979175de.eps"], ["Tests/images/eps/timeout-d675703545fee17acab56e5fec644c19979175de.eps"],
) )
def test_timeout(test_file: str) -> None: def test_timeout(test_file: str) -> None:
with open(test_file, "rb") as f: with open(test_file, "rb") as f:
@ -386,7 +408,7 @@ def test_bounding_box_in_trailer() -> None:
# Check bounding boxes are parsed in the same way # Check bounding boxes are parsed in the same way
# when specified in the header and the trailer # when specified in the header and the trailer
with ( with (
Image.open("Tests/images/zero_bb_trailer.eps") as trailer_image, Image.open("Tests/images/eps/zero_bb_trailer.eps") as trailer_image,
Image.open(FILE1) as header_image, Image.open(FILE1) as header_image,
): ):
assert trailer_image.size == header_image.size assert trailer_image.size == header_image.size
@ -394,12 +416,12 @@ def test_bounding_box_in_trailer() -> None:
def test_eof_before_bounding_box() -> None: def test_eof_before_bounding_box() -> None:
with pytest.raises(OSError): with pytest.raises(OSError):
with Image.open("Tests/images/zero_bb_eof_before_boundingbox.eps"): with Image.open("Tests/images/eps/zero_bb_eof_before_boundingbox.eps"):
pass pass
def test_invalid_data_after_eof() -> None: def test_invalid_data_after_eof() -> None:
with open("Tests/images/illuCS6_preview.eps", "rb") as f: with open("Tests/images/eps/illuCS6_preview.eps", "rb") as f:
img_bytes = io.BytesIO(f.read() + b"\r\n%" + (b" " * 255)) img_bytes = io.BytesIO(f.read() + b"\r\n%" + (b" " * 255))
with Image.open(img_bytes) as img: with Image.open(img_bytes) as img:

View File

@ -35,36 +35,35 @@ def test_sanity() -> None:
assert im.is_animated assert im.is_animated
def test_prefix_chunk() -> None: def test_prefix_chunk(monkeypatch: pytest.MonkeyPatch) -> None:
ImageFile.LOAD_TRUNCATED_IMAGES = True monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
try: with Image.open(animated_test_file_with_prefix_chunk) as im:
with Image.open(animated_test_file_with_prefix_chunk) as im: assert im.mode == "P"
assert im.mode == "P" assert im.size == (320, 200)
assert im.size == (320, 200) assert im.format == "FLI"
assert im.format == "FLI" assert im.info["duration"] == 171
assert im.info["duration"] == 171 assert im.is_animated
assert im.is_animated
palette = im.getpalette() palette = im.getpalette()
assert palette[3:6] == [255, 255, 255] assert palette[3:6] == [255, 255, 255]
assert palette[381:384] == [204, 204, 12] assert palette[381:384] == [204, 204, 12]
assert palette[765:] == [252, 0, 0] assert palette[765:] == [252, 0, 0]
finally:
ImageFile.LOAD_TRUNCATED_IMAGES = False
@pytest.mark.skipif(is_pypy(), reason="Requires CPython") @pytest.mark.skipif(is_pypy(), reason="Requires CPython")
def test_unclosed_file() -> None: def test_unclosed_file() -> None:
def open() -> None: def open_test_image() -> None:
im = Image.open(static_test_file) im = Image.open(static_test_file)
im.load() im.load()
with pytest.warns(ResourceWarning): with pytest.warns(ResourceWarning):
open() open_test_image()
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(static_test_file) im = Image.open(static_test_file)
im.load() im.load()
im.close() im.close()
@ -81,6 +80,8 @@ def test_seek_after_close() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(static_test_file) as im: with Image.open(static_test_file) as im:
im.load() im.load()

View File

@ -1,5 +1,8 @@
from __future__ import annotations from __future__ import annotations
import io
import struct
import pytest import pytest
from PIL import FtexImagePlugin, Image from PIL import FtexImagePlugin, Image
@ -23,3 +26,15 @@ def test_invalid_file() -> None:
with pytest.raises(SyntaxError): with pytest.raises(SyntaxError):
FtexImagePlugin.FtexImageFile(invalid_file) FtexImagePlugin.FtexImageFile(invalid_file)
def test_invalid_texture() -> None:
with open("Tests/images/ftex_dxt1.ftc", "rb") as fp:
data = fp.read()
# Change texture compression format
data = data[:24] + struct.pack("<i", 2) + data[28:]
with pytest.raises(ValueError, match="Invalid texture compression format: 2"):
with Image.open(io.BytesIO(data)):
pass

View File

@ -14,10 +14,14 @@ def test_gbr_file() -> None:
def test_load() -> None: def test_load() -> None:
with Image.open("Tests/images/gbr.gbr") as im: with Image.open("Tests/images/gbr.gbr") as im:
assert im.load()[0, 0] == (0, 0, 0, 0) px = im.load()
assert px is not None
assert px[0, 0] == (0, 0, 0, 0)
# Test again now that it has already been loaded once # Test again now that it has already been loaded once
assert im.load()[0, 0] == (0, 0, 0, 0) px = im.load()
assert px is not None
assert px[0, 0] == (0, 0, 0, 0)
def test_multiple_load_operations() -> None: def test_multiple_load_operations() -> None:

View File

@ -4,6 +4,8 @@ import pytest
from PIL import GdImageFile, UnidentifiedImageError from PIL import GdImageFile, UnidentifiedImageError
from .helper import assert_image_similar_tofile
TEST_GD_FILE = "Tests/images/hopper.gd" TEST_GD_FILE = "Tests/images/hopper.gd"
@ -11,6 +13,7 @@ def test_sanity() -> None:
with GdImageFile.open(TEST_GD_FILE) as im: with GdImageFile.open(TEST_GD_FILE) as im:
assert im.size == (128, 128) assert im.size == (128, 128)
assert im.format == "GD" assert im.format == "GD"
assert_image_similar_tofile(im.convert("RGB"), "Tests/images/hopper.jpg", 14)
def test_bad_mode() -> None: def test_bad_mode() -> None:

View File

@ -4,6 +4,7 @@ import warnings
from collections.abc import Generator from collections.abc import Generator
from io import BytesIO from io import BytesIO
from pathlib import Path from pathlib import Path
from typing import Any
import pytest import pytest
@ -21,9 +22,6 @@ from .helper import (
# sample gif stream # sample gif stream
TEST_GIF = "Tests/images/hopper.gif" TEST_GIF = "Tests/images/hopper.gif"
with open(TEST_GIF, "rb") as f:
data = f.read()
def test_sanity() -> None: def test_sanity() -> None:
with Image.open(TEST_GIF) as im: with Image.open(TEST_GIF) as im:
@ -36,16 +34,18 @@ def test_sanity() -> None:
@pytest.mark.skipif(is_pypy(), reason="Requires CPython") @pytest.mark.skipif(is_pypy(), reason="Requires CPython")
def test_unclosed_file() -> None: def test_unclosed_file() -> None:
def open() -> None: def open_test_image() -> None:
im = Image.open(TEST_GIF) im = Image.open(TEST_GIF)
im.load() im.load()
with pytest.warns(ResourceWarning): with pytest.warns(ResourceWarning):
open() open_test_image()
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(TEST_GIF) im = Image.open(TEST_GIF)
im.load() im.load()
im.close() im.close()
@ -67,6 +67,8 @@ def test_seek_after_close() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(TEST_GIF) as im: with Image.open(TEST_GIF) as im:
im.load() im.load()
@ -81,12 +83,12 @@ def test_invalid_file() -> None:
def test_l_mode_transparency() -> None: def test_l_mode_transparency() -> None:
with Image.open("Tests/images/no_palette_with_transparency.gif") as im: with Image.open("Tests/images/no_palette_with_transparency.gif") as im:
assert im.mode == "L" assert im.mode == "L"
assert im.load()[0, 0] == 128 assert im.getpixel((0, 0)) == 128
assert im.info["transparency"] == 255 assert im.info["transparency"] == 255
im.seek(1) im.seek(1)
assert im.mode == "L" assert im.mode == "L"
assert im.load()[0, 0] == 128 assert im.getpixel((0, 0)) == 128
def test_l_mode_after_rgb() -> None: def test_l_mode_after_rgb() -> None:
@ -104,7 +106,7 @@ def test_palette_not_needed_for_second_frame() -> None:
assert_image_similar(im, hopper("L").convert("RGB"), 8) assert_image_similar(im, hopper("L").convert("RGB"), 8)
def test_strategy() -> None: def test_strategy(monkeypatch: pytest.MonkeyPatch) -> None:
with Image.open("Tests/images/iss634.gif") as im: with Image.open("Tests/images/iss634.gif") as im:
expected_rgb_always = im.convert("RGB") expected_rgb_always = im.convert("RGB")
@ -114,35 +116,36 @@ def test_strategy() -> None:
im.seek(1) im.seek(1)
expected_different = im.convert("RGB") expected_different = im.convert("RGB")
try: monkeypatch.setattr(
GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_ALWAYS GifImagePlugin, "LOADING_STRATEGY", GifImagePlugin.LoadingStrategy.RGB_ALWAYS
with Image.open("Tests/images/iss634.gif") as im: )
assert im.mode == "RGB" with Image.open("Tests/images/iss634.gif") as im:
assert_image_equal(im, expected_rgb_always) assert im.mode == "RGB"
assert_image_equal(im, expected_rgb_always)
with Image.open("Tests/images/chi.gif") as im: with Image.open("Tests/images/chi.gif") as im:
assert im.mode == "RGBA" assert im.mode == "RGBA"
assert_image_equal(im, expected_rgb_always_rgba) assert_image_equal(im, expected_rgb_always_rgba)
GifImagePlugin.LOADING_STRATEGY = ( monkeypatch.setattr(
GifImagePlugin.LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY GifImagePlugin,
) "LOADING_STRATEGY",
# Stay in P mode with only a global palette GifImagePlugin.LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY,
with Image.open("Tests/images/chi.gif") as im: )
assert im.mode == "P" # Stay in P mode with only a global palette
with Image.open("Tests/images/chi.gif") as im:
assert im.mode == "P"
im.seek(1) im.seek(1)
assert im.mode == "P" assert im.mode == "P"
assert_image_equal(im.convert("RGB"), expected_different) assert_image_equal(im.convert("RGB"), expected_different)
# Change to RGB mode when a frame has an individual palette # Change to RGB mode when a frame has an individual palette
with Image.open("Tests/images/iss634.gif") as im: with Image.open("Tests/images/iss634.gif") as im:
assert im.mode == "P" assert im.mode == "P"
im.seek(1) im.seek(1)
assert im.mode == "RGB" assert im.mode == "RGB"
finally:
GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_AFTER_FIRST
def test_optimize() -> None: def test_optimize() -> None:
@ -304,8 +307,9 @@ def test_roundtrip_save_all_1(tmp_path: Path) -> None:
def test_loading_multiple_palettes(path: str, mode: str) -> None: def test_loading_multiple_palettes(path: str, mode: str) -> None:
with Image.open(path) as im: with Image.open(path) as im:
assert im.mode == "P" assert im.mode == "P"
assert im.palette is not None
first_frame_colors = im.palette.colors.keys() first_frame_colors = im.palette.colors.keys()
original_color = im.convert("RGB").load()[0, 0] original_color = im.convert("RGB").getpixel((0, 0))
im.seek(1) im.seek(1)
assert im.mode == mode assert im.mode == mode
@ -313,10 +317,10 @@ def test_loading_multiple_palettes(path: str, mode: str) -> None:
im = im.convert("RGB") im = im.convert("RGB")
# Check a color only from the old palette # Check a color only from the old palette
assert im.load()[0, 0] == original_color assert im.getpixel((0, 0)) == original_color
# Check a color from the new palette # Check a color from the new palette
assert im.load()[24, 24] not in first_frame_colors assert im.getpixel((24, 24)) not in first_frame_colors
def test_headers_saving_for_animated_gifs(tmp_path: Path) -> None: def test_headers_saving_for_animated_gifs(tmp_path: Path) -> None:
@ -482,8 +486,7 @@ def test_eoferror() -> None:
def test_first_frame_transparency() -> None: def test_first_frame_transparency() -> None:
with Image.open("Tests/images/first_frame_transparency.gif") as im: with Image.open("Tests/images/first_frame_transparency.gif") as im:
px = im.load() assert im.getpixel((0, 0)) == im.info["transparency"]
assert px[0, 0] == im.info["transparency"]
def test_dispose_none() -> None: def test_dispose_none() -> None:
@ -523,6 +526,7 @@ def test_dispose_background_transparency() -> None:
with Image.open("Tests/images/dispose_bgnd_transparency.gif") as img: with Image.open("Tests/images/dispose_bgnd_transparency.gif") as img:
img.seek(2) img.seek(2)
px = img.load() px = img.load()
assert px is not None
assert px[35, 30][3] == 0 assert px[35, 30][3] == 0
@ -550,17 +554,15 @@ def test_dispose_background_transparency() -> None:
def test_transparent_dispose( def test_transparent_dispose(
loading_strategy: GifImagePlugin.LoadingStrategy, loading_strategy: GifImagePlugin.LoadingStrategy,
expected_colors: tuple[tuple[int | tuple[int, int, int, int], ...]], expected_colors: tuple[tuple[int | tuple[int, int, int, int], ...]],
monkeypatch: pytest.MonkeyPatch,
) -> None: ) -> None:
GifImagePlugin.LOADING_STRATEGY = loading_strategy monkeypatch.setattr(GifImagePlugin, "LOADING_STRATEGY", loading_strategy)
try: with Image.open("Tests/images/transparent_dispose.gif") as img:
with Image.open("Tests/images/transparent_dispose.gif") as img: for frame in range(3):
for frame in range(3): img.seek(frame)
img.seek(frame) for x in range(3):
for x in range(3): color = img.getpixel((x, 0))
color = img.getpixel((x, 0)) assert color == expected_colors[frame][x]
assert color == expected_colors[frame][x]
finally:
GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_AFTER_FIRST
def test_dispose_previous() -> None: def test_dispose_previous() -> None:
@ -759,6 +761,21 @@ def test_dispose2_previous_frame(tmp_path: Path) -> None:
assert im.getpixel((0, 0)) == (0, 0, 0, 255) assert im.getpixel((0, 0)) == (0, 0, 0, 255)
def test_dispose2_without_transparency(tmp_path: Path) -> None:
out = str(tmp_path / "temp.gif")
im = Image.new("P", (100, 100))
im2 = Image.new("P", (100, 100), (0, 0, 0))
im2.putpixel((50, 50), (255, 0, 0))
im.save(out, save_all=True, append_images=[im2], disposal=2)
with Image.open(out) as reloaded:
reloaded.seek(1)
assert reloaded.tile[0].extents == (0, 0, 100, 100)
def test_transparency_in_second_frame(tmp_path: Path) -> None: def test_transparency_in_second_frame(tmp_path: Path) -> None:
out = str(tmp_path / "temp.gif") out = str(tmp_path / "temp.gif")
with Image.open("Tests/images/different_transparency.gif") as im: with Image.open("Tests/images/different_transparency.gif") as im:
@ -978,7 +995,7 @@ def test_webp_background(tmp_path: Path) -> None:
out = str(tmp_path / "temp.gif") out = str(tmp_path / "temp.gif")
# Test opaque WebP background # Test opaque WebP background
if features.check("webp") and features.check("webp_anim"): if features.check("webp"):
with Image.open("Tests/images/hopper.webp") as im: with Image.open("Tests/images/hopper.webp") as im:
assert im.info["background"] == (255, 255, 255, 255) assert im.info["background"] == (255, 255, 255, 255)
im.save(out) im.save(out)
@ -1308,6 +1325,7 @@ def test_palette_save_all_P(tmp_path: Path) -> None:
with Image.open(out) as im: with Image.open(out) as im:
# Assert that the frames are correct, and each frame has the same palette # Assert that the frames are correct, and each frame has the same palette
assert_image_equal(im.convert("RGB"), frames[0].convert("RGB")) assert_image_equal(im.convert("RGB"), frames[0].convert("RGB"))
assert im.palette is not None
assert im.palette.palette == im.global_palette.palette assert im.palette.palette == im.global_palette.palette
im.seek(1) im.seek(1)
@ -1342,32 +1360,30 @@ def test_save_I(tmp_path: Path) -> None:
assert_image_equal(reloaded.convert("L"), im.convert("L")) assert_image_equal(reloaded.convert("L"), im.convert("L"))
def test_getdata() -> None: def test_getdata(monkeypatch: pytest.MonkeyPatch) -> None:
# Test getheader/getdata against legacy values. # Test getheader/getdata against legacy values.
# Create a 'P' image with holes in the palette. # Create a 'P' image with holes in the palette.
im = Image._wedge().resize((16, 16), Image.Resampling.NEAREST) im = Image.linear_gradient(mode="L").resize((16, 16), Image.Resampling.NEAREST)
im.putpalette(ImagePalette.ImagePalette("RGB")) im.putpalette(ImagePalette.ImagePalette("RGB"))
im.info = {"background": 0} im.info = {"background": 0}
passed_palette = bytes(255 - i // 3 for i in range(768)) passed_palette = bytes(255 - i // 3 for i in range(768))
GifImagePlugin._FORCE_OPTIMIZE = True monkeypatch.setattr(GifImagePlugin, "_FORCE_OPTIMIZE", True)
try:
h = GifImagePlugin.getheader(im, passed_palette)
d = GifImagePlugin.getdata(im)
import pickle h = GifImagePlugin.getheader(im, passed_palette)
d = GifImagePlugin.getdata(im)
# Enable to get target values on pre-refactor version import pickle
# with open('Tests/images/gif_header_data.pkl', 'wb') as f:
# pickle.dump((h, d), f, 1)
with open("Tests/images/gif_header_data.pkl", "rb") as f:
(h_target, d_target) = pickle.load(f)
assert h == h_target # Enable to get target values on pre-refactor version
assert d == d_target # with open('Tests/images/gif_header_data.pkl', 'wb') as f:
finally: # pickle.dump((h, d), f, 1)
GifImagePlugin._FORCE_OPTIMIZE = False with open("Tests/images/gif_header_data.pkl", "rb") as f:
(h_target, d_target) = pickle.load(f)
assert h == h_target
assert d == d_target
def test_lzw_bits() -> None: def test_lzw_bits() -> None:
@ -1378,8 +1394,27 @@ def test_lzw_bits() -> None:
im.load() im.load()
def test_extents() -> None: @pytest.mark.parametrize(
with Image.open("Tests/images/test_extents.gif") as im: "test_file, loading_strategy",
(
("test_extents.gif", GifImagePlugin.LoadingStrategy.RGB_AFTER_FIRST),
(
"test_extents.gif",
GifImagePlugin.LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY,
),
(
"test_extents_transparency.gif",
GifImagePlugin.LoadingStrategy.RGB_AFTER_FIRST,
),
),
)
def test_extents(
test_file: str,
loading_strategy: GifImagePlugin.LoadingStrategy,
monkeypatch: pytest.MonkeyPatch,
) -> None:
monkeypatch.setattr(GifImagePlugin, "LOADING_STRATEGY", loading_strategy)
with Image.open("Tests/images/" + test_file) as im:
assert im.size == (100, 100) assert im.size == (100, 100)
# Check that n_frames does not change the size # Check that n_frames does not change the size
@ -1389,6 +1424,9 @@ def test_extents() -> None:
im.seek(1) im.seek(1)
assert im.size == (150, 150) assert im.size == (150, 150)
im.load()
assert im.im.size == (150, 150)
def test_missing_background() -> None: def test_missing_background() -> None:
# The Global Color Table Flag isn't set, so there is no background color index, # The Global Color Table Flag isn't set, so there is no background color index,
@ -1406,3 +1444,22 @@ def test_saving_rgba(tmp_path: Path) -> None:
with Image.open(out) as reloaded: with Image.open(out) as reloaded:
reloaded_rgba = reloaded.convert("RGBA") reloaded_rgba = reloaded.convert("RGBA")
assert reloaded_rgba.load()[0, 0][3] == 0 assert reloaded_rgba.load()[0, 0][3] == 0
@pytest.mark.parametrize("params", ({}, {"disposal": 2, "optimize": False}))
def test_p_rgba(tmp_path: Path, params: dict[str, Any]) -> None:
out = str(tmp_path / "temp.gif")
im1 = Image.new("P", (100, 100))
d = ImageDraw.Draw(im1)
d.ellipse([(40, 40), (60, 60)], fill=1)
data = [0, 0, 0, 0, 0, 0, 0, 255] + [0, 0, 0, 0] * 254
im1.putpalette(data, "RGBA")
im2 = Image.new("P", (100, 100))
im2.putpalette(data, "RGBA")
im1.save(out, save_all=True, append_images=[im2], **params)
with Image.open(out) as reloaded:
assert reloaded.n_frames == 2

View File

@ -83,4 +83,4 @@ def test_handler(tmp_path: Path) -> None:
im.save(temp_file) im.save(temp_file)
assert handler.saved assert handler.saved
GribStubImagePlugin._handler = None GribStubImagePlugin.register_handler(None)

View File

@ -85,4 +85,4 @@ def test_handler(tmp_path: Path) -> None:
im.save(temp_file) im.save(temp_file)
assert handler.saved assert handler.saved
Hdf5StubImagePlugin._handler = None Hdf5StubImagePlugin.register_handler(None)

View File

@ -21,6 +21,8 @@ def test_sanity() -> None:
with Image.open(TEST_FILE) as im: with Image.open(TEST_FILE) as im:
# Assert that there is no unclosed file warning # Assert that there is no unclosed file warning
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im.load() im.load()
assert im.mode == "RGBA" assert im.mode == "RGBA"
@ -30,10 +32,14 @@ def test_sanity() -> None:
def test_load() -> None: def test_load() -> None:
with Image.open(TEST_FILE) as im: with Image.open(TEST_FILE) as im:
assert im.load()[0, 0] == (0, 0, 0, 0) px = im.load()
assert px is not None
assert px[0, 0] == (0, 0, 0, 0)
# Test again now that it has already been loaded once # Test again now that it has already been loaded once
assert im.load()[0, 0] == (0, 0, 0, 0) px = im.load()
assert px is not None
assert px[0, 0] == (0, 0, 0, 0)
def test_save(tmp_path: Path) -> None: def test_save(tmp_path: Path) -> None:
@ -63,8 +69,8 @@ def test_save_append_images(tmp_path: Path) -> None:
assert_image_similar_tofile(im, temp_file, 1) assert_image_similar_tofile(im, temp_file, 1)
with Image.open(temp_file) as reread: with Image.open(temp_file) as reread:
reread.size = (16, 16, 2) reread.size = (16, 16)
reread.load() reread.load(2)
assert_image_equal(reread, provided_im) assert_image_equal(reread, provided_im)
@ -87,14 +93,21 @@ def test_sizes() -> None:
for w, h, r in im.info["sizes"]: for w, h, r in im.info["sizes"]:
wr = w * r wr = w * r
hr = h * r hr = h * r
im.size = (w, h, r) with pytest.warns(DeprecationWarning):
im.size = (w, h, r)
im.load() im.load()
assert im.mode == "RGBA" assert im.mode == "RGBA"
assert im.size == (wr, hr) assert im.size == (wr, hr)
# Test using load() with scale
im.size = (w, h)
im.load(scale=r)
assert im.mode == "RGBA"
assert im.size == (wr, hr)
# Check that we cannot load an incorrect size # Check that we cannot load an incorrect size
with pytest.raises(ValueError): with pytest.raises(ValueError):
im.size = (1, 1) im.size = (1, 2)
def test_older_icon() -> None: def test_older_icon() -> None:
@ -105,8 +118,8 @@ def test_older_icon() -> None:
wr = w * r wr = w * r
hr = h * r hr = h * r
with Image.open("Tests/images/pillow2.icns") as im2: with Image.open("Tests/images/pillow2.icns") as im2:
im2.size = (w, h, r) im2.size = (w, h)
im2.load() im2.load(r)
assert im2.mode == "RGBA" assert im2.mode == "RGBA"
assert im2.size == (wr, hr) assert im2.size == (wr, hr)
@ -122,8 +135,8 @@ def test_jp2_icon() -> None:
wr = w * r wr = w * r
hr = h * r hr = h * r
with Image.open("Tests/images/pillow3.icns") as im2: with Image.open("Tests/images/pillow3.icns") as im2:
im2.size = (w, h, r) im2.size = (w, h)
im2.load() im2.load(r)
assert im2.mode == "RGBA" assert im2.mode == "RGBA"
assert im2.size == (wr, hr) assert im2.size == (wr, hr)

View File

@ -6,7 +6,7 @@ from pathlib import Path
import pytest import pytest
from PIL import IcoImagePlugin, Image, ImageDraw from PIL import IcoImagePlugin, Image, ImageDraw, ImageFile
from .helper import assert_image_equal, assert_image_equal_tofile, hopper from .helper import assert_image_equal, assert_image_equal_tofile, hopper
@ -24,7 +24,9 @@ def test_sanity() -> None:
def test_load() -> None: def test_load() -> None:
with Image.open(TEST_ICO_FILE) as im: with Image.open(TEST_ICO_FILE) as im:
assert im.load()[0, 0] == (1, 1, 9, 255) px = im.load()
assert px is not None
assert px[0, 0] == (1, 1, 9, 255)
def test_mask() -> None: def test_mask() -> None:
@ -241,3 +243,25 @@ def test_draw_reloaded(tmp_path: Path) -> None:
with Image.open(outfile) as im: with Image.open(outfile) as im:
assert_image_equal_tofile(im, "Tests/images/hopper_draw.ico") assert_image_equal_tofile(im, "Tests/images/hopper_draw.ico")
def test_truncated_mask(monkeypatch: pytest.MonkeyPatch) -> None:
# 1 bpp
with open("Tests/images/hopper_mask.ico", "rb") as fp:
data = fp.read()
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
data = data[:-3]
with Image.open(io.BytesIO(data)) as im:
assert im.mode == "1"
# 32 bpp
output = io.BytesIO()
expected = hopper("RGBA")
expected.save(output, "ico", bitmap_format="bmp")
data = output.getvalue()[:-1]
with Image.open(io.BytesIO(data)) as im:
assert im.mode == "RGB"

View File

@ -31,16 +31,18 @@ def test_name_limit(tmp_path: Path) -> None:
@pytest.mark.skipif(is_pypy(), reason="Requires CPython") @pytest.mark.skipif(is_pypy(), reason="Requires CPython")
def test_unclosed_file() -> None: def test_unclosed_file() -> None:
def open() -> None: def open_test_image() -> None:
im = Image.open(TEST_IM) im = Image.open(TEST_IM)
im.load() im.load()
with pytest.warns(ResourceWarning): with pytest.warns(ResourceWarning):
open() open_test_image()
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(TEST_IM) im = Image.open(TEST_IM)
im.load() im.load()
im.close() im.close()
@ -48,6 +50,8 @@ def test_closed_file() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(TEST_IM) as im: with Image.open(TEST_IM) as im:
im.load() im.load()

View File

@ -58,10 +58,7 @@ def test_getiptcinfo_fotostation() -> None:
# Assert # Assert
assert iptc is not None assert iptc is not None
for tag in iptc.keys(): assert 240 in (tag[0] for tag in iptc.keys()), "FotoStation tag not found"
if tag[0] == 240:
return
pytest.fail("FotoStation tag not found")
def test_getiptcinfo_zero_padding() -> None: def test_getiptcinfo_zero_padding() -> None:
@ -77,6 +74,16 @@ def test_getiptcinfo_zero_padding() -> None:
assert len(iptc) == 3 assert len(iptc) == 3
def test_getiptcinfo_tiff() -> None:
# Arrange
with Image.open("Tests/images/hopper.Lab.tif") as im:
# Act
iptc = IptcImagePlugin.getiptcinfo(im)
# Assert
assert iptc == {(1, 90): b"\x1b%G", (2, 0): b"\xcf\xc0"}
def test_getiptcinfo_tiff_none() -> None: def test_getiptcinfo_tiff_none() -> None:
# Arrange # Arrange
with Image.open("Tests/images/hopper.tif") as im: with Image.open("Tests/images/hopper.tif") as im:

View File

@ -179,7 +179,7 @@ class TestFileJpeg:
assert k > 0.9 assert k > 0.9
def test_rgb(self) -> None: def test_rgb(self) -> None:
def getchannels(im: JpegImagePlugin.JpegImageFile) -> tuple[int, int, int]: def getchannels(im: JpegImagePlugin.JpegImageFile) -> tuple[int, ...]:
return tuple(v[0] for v in im.layer) return tuple(v[0] for v in im.layer)
im = hopper() im = hopper()
@ -206,6 +206,10 @@ class TestFileJpeg:
assert test(100, 200) == (100, 200) assert test(100, 200) == (100, 200)
assert test(0) is None # square pixels assert test(0) is None # square pixels
def test_dpi_jfif_cm(self) -> None:
with Image.open("Tests/images/jfif_unit_cm.jpg") as im:
assert im.info["dpi"] == (2.54, 5.08)
@mark_if_feature_version( @mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing" pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
) )
@ -302,7 +306,10 @@ class TestFileJpeg:
assert not im2.info.get("progressive") assert not im2.info.get("progressive")
assert im3.info.get("progressive") assert im3.info.get("progressive")
assert_image_equal(im1, im3) if features.check_feature("mozjpeg"):
assert_image_similar(im1, im3, 9.39)
else:
assert_image_equal(im1, im3)
assert im1_bytes >= im3_bytes assert im1_bytes >= im3_bytes
def test_progressive_large_buffer(self, tmp_path: Path) -> None: def test_progressive_large_buffer(self, tmp_path: Path) -> None:
@ -374,7 +381,6 @@ class TestFileJpeg:
assert exif.get_ifd(0x8825) == {} assert exif.get_ifd(0x8825) == {}
transposed = ImageOps.exif_transpose(im) transposed = ImageOps.exif_transpose(im)
assert transposed is not None
exif = transposed.getexif() exif = transposed.getexif()
assert exif.get_ifd(0x8825) == {} assert exif.get_ifd(0x8825) == {}
@ -445,8 +451,12 @@ class TestFileJpeg:
im2 = self.roundtrip(hopper(), progressive=1) im2 = self.roundtrip(hopper(), progressive=1)
im3 = self.roundtrip(hopper(), progression=1) # compatibility im3 = self.roundtrip(hopper(), progression=1) # compatibility
assert_image_equal(im1, im2) if features.check_feature("mozjpeg"):
assert_image_equal(im1, im3) assert_image_similar(im1, im2, 9.39)
assert_image_similar(im1, im3, 9.39)
else:
assert_image_equal(im1, im2)
assert_image_equal(im1, im3)
assert im2.info.get("progressive") assert im2.info.get("progressive")
assert im2.info.get("progression") assert im2.info.get("progression")
assert im3.info.get("progressive") assert im3.info.get("progressive")
@ -545,12 +555,13 @@ class TestFileJpeg:
@mark_if_feature_version( @mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing" pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
) )
def test_truncated_jpeg_should_read_all_the_data(self) -> None: def test_truncated_jpeg_should_read_all_the_data(
self, monkeypatch: pytest.MonkeyPatch
) -> None:
filename = "Tests/images/truncated_jpeg.jpg" filename = "Tests/images/truncated_jpeg.jpg"
ImageFile.LOAD_TRUNCATED_IMAGES = True monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
with Image.open(filename) as im: with Image.open(filename) as im:
im.load() im.load()
ImageFile.LOAD_TRUNCATED_IMAGES = False
assert im.getbbox() is not None assert im.getbbox() is not None
def test_truncated_jpeg_throws_oserror(self) -> None: def test_truncated_jpeg_throws_oserror(self) -> None:
@ -566,12 +577,12 @@ class TestFileJpeg:
@mark_if_feature_version( @mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing" pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
) )
def test_qtables(self, tmp_path: Path) -> None: def test_qtables(self) -> None:
def _n_qtables_helper(n: int, test_file: str) -> None: def _n_qtables_helper(n: int, test_file: str) -> None:
b = BytesIO()
with Image.open(test_file) as im: with Image.open(test_file) as im:
f = str(tmp_path / "temp.jpg") im.save(b, "JPEG", qtables=[[n] * 64] * n)
im.save(f, qtables=[[n] * 64] * n) with Image.open(b) as im:
with Image.open(f) as im:
assert len(im.quantization) == n assert len(im.quantization) == n
reloaded = self.roundtrip(im, qtables="keep") reloaded = self.roundtrip(im, qtables="keep")
assert im.quantization == reloaded.quantization assert im.quantization == reloaded.quantization
@ -875,6 +886,8 @@ class TestFileJpeg:
out = str(tmp_path / "out.jpg") out = str(tmp_path / "out.jpg")
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im.save(out, exif=exif) im.save(out, exif=exif)
with Image.open(out) as reloaded: with Image.open(out) as reloaded:
@ -946,7 +959,7 @@ class TestFileJpeg:
def test_jpeg_magic_number(self, monkeypatch: pytest.MonkeyPatch) -> None: def test_jpeg_magic_number(self, monkeypatch: pytest.MonkeyPatch) -> None:
size = 4097 size = 4097
buffer = BytesIO(b"\xFF" * size) # Many xFF bytes buffer = BytesIO(b"\xff" * size) # Many xff bytes
max_pos = 0 max_pos = 0
orig_read = buffer.read orig_read = buffer.read
@ -1016,23 +1029,44 @@ class TestFileJpeg:
else: else:
assert im.getxmp() == {"xmpmeta": None} assert im.getxmp() == {"xmpmeta": None}
def test_save_xmp(self, tmp_path: Path) -> None:
f = str(tmp_path / "temp.jpg")
im = hopper()
im.save(f, xmp=b"XMP test")
with Image.open(f) as reloaded:
assert reloaded.info["xmp"] == b"XMP test"
# Check that XMP is not saved from image info
reloaded.save(f)
with Image.open(f) as reloaded:
assert "xmp" not in reloaded.info
im.save(f, xmp=b"1" * 65504)
with Image.open(f) as reloaded:
assert reloaded.info["xmp"] == b"1" * 65504
with pytest.raises(ValueError):
im.save(f, xmp=b"1" * 65505)
@pytest.mark.timeout(timeout=1) @pytest.mark.timeout(timeout=1)
def test_eof(self) -> None: def test_eof(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Even though this decoder never says that it is finished # Even though this decoder never says that it is finished
# the image should still end when there is no new data # the image should still end when there is no new data
class InfiniteMockPyDecoder(ImageFile.PyDecoder): class InfiniteMockPyDecoder(ImageFile.PyDecoder):
def decode(self, buffer: bytes) -> tuple[int, int]: def decode(
self, buffer: bytes | Image.SupportsArrayInterface
) -> tuple[int, int]:
return 0, 0 return 0, 0
Image.register_decoder("INFINITE", InfiniteMockPyDecoder) Image.register_decoder("INFINITE", InfiniteMockPyDecoder)
with Image.open(TEST_FILE) as im: with Image.open(TEST_FILE) as im:
im.tile = [ im.tile = [
("INFINITE", (0, 0, 128, 128), 0, ("RGB", 0, 1)), ImageFile._Tile("INFINITE", (0, 0, 128, 128), 0, ("RGB", 0, 1)),
] ]
ImageFile.LOAD_TRUNCATED_IMAGES = True monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
im.load() im.load()
ImageFile.LOAD_TRUNCATED_IMAGES = False
def test_separate_tables(self) -> None: def test_separate_tables(self) -> None:
im = hopper() im = hopper()
@ -1044,13 +1078,16 @@ class TestFileJpeg:
# SOI, EOI # SOI, EOI
for marker in b"\xff\xd8", b"\xff\xd9": for marker in b"\xff\xd8", b"\xff\xd9":
assert marker in data[1] and marker in data[2] assert marker in data[1]
assert marker in data[2]
# DHT, DQT # DHT, DQT
for marker in b"\xff\xc4", b"\xff\xdb": for marker in b"\xff\xc4", b"\xff\xdb":
assert marker in data[1] and marker not in data[2] assert marker in data[1]
assert marker not in data[2]
# SOF0, SOS, APP0 (JFIF header) # SOF0, SOS, APP0 (JFIF header)
for marker in b"\xff\xc0", b"\xff\xda", b"\xff\xe0": for marker in b"\xff\xc0", b"\xff\xda", b"\xff\xe0":
assert marker not in data[1] and marker in data[2] assert marker not in data[1]
assert marker in data[2]
with Image.open(BytesIO(data[0])) as interchange_im: with Image.open(BytesIO(data[0])) as interchange_im:
with Image.open(BytesIO(data[1] + data[2])) as combined_im: with Image.open(BytesIO(data[1] + data[2])) as combined_im:
@ -1070,6 +1107,13 @@ class TestFileJpeg:
assert im._repr_jpeg_() is None assert im._repr_jpeg_() is None
def test_deprecation(self) -> None:
with Image.open(TEST_FILE) as im:
with pytest.warns(DeprecationWarning):
assert im.huffman_ac == {}
with pytest.warns(DeprecationWarning):
assert im.huffman_dc == {}
@pytest.mark.skipif(not is_win32(), reason="Windows only") @pytest.mark.skipif(not is_win32(), reason="Windows only")
@skip_unless_feature("jpg") @skip_unless_feature("jpg")

View File

@ -2,6 +2,7 @@ from __future__ import annotations
import os import os
import re import re
from collections.abc import Generator
from io import BytesIO from io import BytesIO
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
@ -29,8 +30,16 @@ EXTRA_DIR = "Tests/images/jpeg2000"
pytestmark = skip_unless_feature("jpg_2000") pytestmark = skip_unless_feature("jpg_2000")
test_card = Image.open("Tests/images/test-card.png")
test_card.load() @pytest.fixture
def card() -> Generator[ImageFile.ImageFile, None, None]:
with Image.open("Tests/images/test-card.png") as im:
im.load()
try:
yield im
finally:
im.close()
# OpenJPEG 2.0.0 outputs this debugging message sometimes; we should # OpenJPEG 2.0.0 outputs this debugging message sometimes; we should
# ignore it---it doesn't represent a test failure. # ignore it---it doesn't represent a test failure.
@ -54,6 +63,7 @@ def test_sanity() -> None:
with Image.open("Tests/images/test-card-lossless.jp2") as im: with Image.open("Tests/images/test-card-lossless.jp2") as im:
px = im.load() px = im.load()
assert px is not None
assert px[0, 0] == (0, 0, 0) assert px[0, 0] == (0, 0, 0)
assert im.mode == "RGB" assert im.mode == "RGB"
assert im.size == (640, 480) assert im.size == (640, 480)
@ -74,76 +84,76 @@ def test_invalid_file() -> None:
Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file) Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file)
def test_bytesio() -> None: def test_bytesio(card: ImageFile.ImageFile) -> None:
with open("Tests/images/test-card-lossless.jp2", "rb") as f: with open("Tests/images/test-card-lossless.jp2", "rb") as f:
data = BytesIO(f.read()) data = BytesIO(f.read())
with Image.open(data) as im: with Image.open(data) as im:
im.load() im.load()
assert_image_similar(im, test_card, 1.0e-3) assert_image_similar(im, card, 1.0e-3)
# These two test pre-written JPEG 2000 files that were not written with # These two test pre-written JPEG 2000 files that were not written with
# PIL (they were made using Adobe Photoshop) # PIL (they were made using Adobe Photoshop)
def test_lossless(tmp_path: Path) -> None: def test_lossless(card: ImageFile.ImageFile, tmp_path: Path) -> None:
with Image.open("Tests/images/test-card-lossless.jp2") as im: with Image.open("Tests/images/test-card-lossless.jp2") as im:
im.load() im.load()
outfile = str(tmp_path / "temp_test-card.png") outfile = str(tmp_path / "temp_test-card.png")
im.save(outfile) im.save(outfile)
assert_image_similar(im, test_card, 1.0e-3) assert_image_similar(im, card, 1.0e-3)
def test_lossy_tiled() -> None: def test_lossy_tiled(card: ImageFile.ImageFile) -> None:
assert_image_similar_tofile( assert_image_similar_tofile(card, "Tests/images/test-card-lossy-tiled.jp2", 2.0)
test_card, "Tests/images/test-card-lossy-tiled.jp2", 2.0
)
def test_lossless_rt() -> None: def test_lossless_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card) im = roundtrip(card)
assert_image_equal(im, test_card) assert_image_equal(im, card)
def test_lossy_rt() -> None: def test_lossy_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, quality_layers=[20]) im = roundtrip(card, quality_layers=[20])
assert_image_similar(im, test_card, 2.0) assert_image_similar(im, card, 2.0)
def test_tiled_rt() -> None: def test_tiled_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, tile_size=(128, 128)) im = roundtrip(card, tile_size=(128, 128))
assert_image_equal(im, test_card) assert_image_equal(im, card)
def test_tiled_offset_rt() -> None: def test_tiled_offset_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32)) im = roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32))
assert_image_equal(im, test_card) assert_image_equal(im, card)
def test_tiled_offset_too_small() -> None: def test_tiled_offset_too_small(card: ImageFile.ImageFile) -> None:
with pytest.raises(ValueError): with pytest.raises(ValueError):
roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32)) roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32))
def test_irreversible_rt() -> None: def test_irreversible_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, irreversible=True, quality_layers=[20]) im = roundtrip(card, irreversible=True, quality_layers=[20])
assert_image_similar(im, test_card, 2.0) assert_image_similar(im, card, 2.0)
def test_prog_qual_rt() -> None: def test_prog_qual_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, quality_layers=[60, 40, 20], progression="LRCP") im = roundtrip(card, quality_layers=[60, 40, 20], progression="LRCP")
assert_image_similar(im, test_card, 2.0) assert_image_similar(im, card, 2.0)
def test_prog_res_rt() -> None: def test_prog_res_rt(card: ImageFile.ImageFile) -> None:
im = roundtrip(test_card, num_resolutions=8, progression="RLCP") im = roundtrip(card, num_resolutions=8, progression="RLCP")
assert_image_equal(im, test_card) assert_image_equal(im, card)
@pytest.mark.parametrize("num_resolutions", range(2, 6)) @pytest.mark.parametrize("num_resolutions", range(2, 6))
def test_default_num_resolutions(num_resolutions: int) -> None: def test_default_num_resolutions(
card: ImageFile.ImageFile, num_resolutions: int
) -> None:
d = 1 << (num_resolutions - 1) d = 1 << (num_resolutions - 1)
im = test_card.resize((d - 1, d - 1)) im = card.resize((d - 1, d - 1))
with pytest.raises(OSError): with pytest.raises(OSError):
roundtrip(im, num_resolutions=num_resolutions) roundtrip(im, num_resolutions=num_resolutions)
reloaded = roundtrip(im) reloaded = roundtrip(im)
@ -172,14 +182,20 @@ def test_load_dpi() -> None:
assert "dpi" not in im.info assert "dpi" not in im.info
def test_restricted_icc_profile() -> None: def test_restricted_icc_profile(monkeypatch: pytest.MonkeyPatch) -> None:
ImageFile.LOAD_TRUNCATED_IMAGES = True monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
try: # JPEG2000 image with a restricted ICC profile and a known colorspace
# JPEG2000 image with a restricted ICC profile and a known colorspace with Image.open("Tests/images/balloon_eciRGBv2_aware.jp2") as im:
with Image.open("Tests/images/balloon_eciRGBv2_aware.jp2") as im: assert im.mode == "RGB"
assert im.mode == "RGB"
finally:
ImageFile.LOAD_TRUNCATED_IMAGES = False @pytest.mark.skipif(
not os.path.exists(EXTRA_DIR), reason="Extra image files not installed"
)
def test_unknown_colorspace() -> None:
with Image.open(f"{EXTRA_DIR}/file8.jp2") as im:
im.load()
assert im.mode == "L"
def test_header_errors() -> None: def test_header_errors() -> None:
@ -196,31 +212,31 @@ def test_header_errors() -> None:
pass pass
def test_layers_type(tmp_path: Path) -> None: def test_layers_type(card: ImageFile.ImageFile, tmp_path: Path) -> None:
outfile = str(tmp_path / "temp_layers.jp2") outfile = str(tmp_path / "temp_layers.jp2")
for quality_layers in [[100, 50, 10], (100, 50, 10), None]: for quality_layers in [[100, 50, 10], (100, 50, 10), None]:
test_card.save(outfile, quality_layers=quality_layers) card.save(outfile, quality_layers=quality_layers)
for quality_layers_str in ["quality_layers", ("100", "50", "10")]: for quality_layers_str in ["quality_layers", ("100", "50", "10")]:
with pytest.raises(ValueError): with pytest.raises(ValueError):
test_card.save(outfile, quality_layers=quality_layers_str) card.save(outfile, quality_layers=quality_layers_str)
def test_layers() -> None: def test_layers(card: ImageFile.ImageFile) -> None:
out = BytesIO() out = BytesIO()
test_card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP") card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP")
out.seek(0) out.seek(0)
with Image.open(out) as im: with Image.open(out) as im:
im.layers = 1 im.layers = 1
im.load() im.load()
assert_image_similar(im, test_card, 13) assert_image_similar(im, card, 13)
out.seek(0) out.seek(0)
with Image.open(out) as im: with Image.open(out) as im:
im.layers = 3 im.layers = 3
im.load() im.load()
assert_image_similar(im, test_card, 0.4) assert_image_similar(im, card, 0.4)
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -233,27 +249,33 @@ def test_layers() -> None:
("foo.jp2", {"no_jp2": True}, 0, b"\xff\x4f"), ("foo.jp2", {"no_jp2": True}, 0, b"\xff\x4f"),
("foo.j2k", {"no_jp2": False}, 0, b"\xff\x4f"), ("foo.j2k", {"no_jp2": False}, 0, b"\xff\x4f"),
("foo.jp2", {"no_jp2": False}, 4, b"jP"), ("foo.jp2", {"no_jp2": False}, 4, b"jP"),
("foo.jp2", {"no_jp2": False}, 4, b"jP"), (None, {"no_jp2": False}, 4, b"jP"),
), ),
) )
def test_no_jp2(name: str, args: dict[str, bool], offset: int, data: bytes) -> None: def test_no_jp2(
card: ImageFile.ImageFile,
name: str,
args: dict[str, bool],
offset: int,
data: bytes,
) -> None:
out = BytesIO() out = BytesIO()
if name: if name:
out.name = name out.name = name
test_card.save(out, "JPEG2000", **args) card.save(out, "JPEG2000", **args)
out.seek(offset) out.seek(offset)
assert out.read(2) == data assert out.read(2) == data
def test_mct() -> None: def test_mct(card: ImageFile.ImageFile) -> None:
# Three component # Three component
for val in (0, 1): for val in (0, 1):
out = BytesIO() out = BytesIO()
test_card.save(out, "JPEG2000", mct=val, no_jp2=True) card.save(out, "JPEG2000", mct=val, no_jp2=True)
assert out.getvalue()[59] == val assert out.getvalue()[59] == val
with Image.open(out) as im: with Image.open(out) as im:
assert_image_similar(im, test_card, 1.0e-3) assert_image_similar(im, card, 1.0e-3)
# Single component should have MCT disabled # Single component should have MCT disabled
for val in (0, 1): for val in (0, 1):
@ -301,6 +323,18 @@ def test_cmyk() -> None:
assert im.getpixel((0, 0)) == (185, 134, 0, 0) assert im.getpixel((0, 0)) == (185, 134, 0, 0)
@pytest.mark.skipif(
not os.path.exists(EXTRA_DIR), reason="Extra image files not installed"
)
@skip_unless_feature_version("jpg_2000", "2.5.3")
def test_cmyk_save() -> None:
with Image.open(f"{EXTRA_DIR}/issue205.jp2") as jp2:
assert jp2.mode == "CMYK"
im = roundtrip(jp2)
assert_image_equal(im, jp2)
@pytest.mark.parametrize("ext", (".j2k", ".jp2")) @pytest.mark.parametrize("ext", (".j2k", ".jp2"))
def test_16bit_monochrome_has_correct_mode(ext: str) -> None: def test_16bit_monochrome_has_correct_mode(ext: str) -> None:
with Image.open("Tests/images/16bit.cropped" + ext) as im: with Image.open("Tests/images/16bit.cropped" + ext) as im:
@ -388,13 +422,23 @@ def test_subsampling_decode(name: str) -> None:
def test_pclr() -> None: def test_pclr() -> None:
with Image.open(f"{EXTRA_DIR}/issue104_jpxstream.jp2") as im: with Image.open(f"{EXTRA_DIR}/issue104_jpxstream.jp2") as im:
assert im.mode == "P" assert im.mode == "P"
assert im.palette is not None
assert len(im.palette.colors) == 256 assert len(im.palette.colors) == 256
assert im.palette.colors[(255, 255, 255)] == 0 assert im.palette.colors[(255, 255, 255)] == 0
with Image.open(
f"{EXTRA_DIR}/147af3f1083de4393666b7d99b01b58b_signal_sigsegv_130c531_6155_5136.jp2"
) as im:
assert im.mode == "P"
assert im.palette is not None
assert len(im.palette.colors) == 139
assert im.palette.colors[(0, 0, 0, 0)] == 0
def test_comment() -> None: def test_comment() -> None:
with Image.open("Tests/images/comment.jp2") as im: for path in ("Tests/images/9bit.j2k", "Tests/images/comment.jp2"):
assert im.info["comment"] == b"Created by OpenJPEG version 2.5.0" with Image.open(path) as im:
assert im.info["comment"] == b"Created by OpenJPEG version 2.5.0"
# Test an image that is truncated partway through a codestream # Test an image that is truncated partway through a codestream
with open("Tests/images/comment.jp2", "rb") as fp: with open("Tests/images/comment.jp2", "rb") as fp:
@ -403,22 +447,22 @@ def test_comment() -> None:
pass pass
def test_save_comment() -> None: def test_save_comment(card: ImageFile.ImageFile) -> None:
for comment in ("Created by Pillow", b"Created by Pillow"): for comment in ("Created by Pillow", b"Created by Pillow"):
out = BytesIO() out = BytesIO()
test_card.save(out, "JPEG2000", comment=comment) card.save(out, "JPEG2000", comment=comment)
with Image.open(out) as im: with Image.open(out) as im:
assert im.info["comment"] == b"Created by Pillow" assert im.info["comment"] == b"Created by Pillow"
out = BytesIO() out = BytesIO()
long_comment = b" " * 65531 long_comment = b" " * 65531
test_card.save(out, "JPEG2000", comment=long_comment) card.save(out, "JPEG2000", comment=long_comment)
with Image.open(out) as im: with Image.open(out) as im:
assert im.info["comment"] == long_comment assert im.info["comment"] == long_comment
with pytest.raises(ValueError): with pytest.raises(ValueError):
test_card.save(out, "JPEG2000", comment=long_comment + b" ") card.save(out, "JPEG2000", comment=long_comment + b" ")
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -441,15 +485,14 @@ def test_crashes(test_file: str) -> None:
@skip_unless_feature_version("jpg_2000", "2.4.0") @skip_unless_feature_version("jpg_2000", "2.4.0")
def test_plt_marker() -> None: def test_plt_marker(card: ImageFile.ImageFile) -> None:
# Search the start of the codesteam for PLT # Search the start of the codesteam for PLT
out = BytesIO() out = BytesIO()
test_card.save(out, "JPEG2000", no_jp2=True, plt=True) card.save(out, "JPEG2000", no_jp2=True, plt=True)
out.seek(0) out.seek(0)
while True: while True:
marker = out.read(2) marker = out.read(2)
if not marker: assert marker, "End of stream without PLT"
pytest.fail("End of stream without PLT")
jp2_boxid = _binary.i16be(marker) jp2_boxid = _binary.i16be(marker)
if jp2_boxid == 0xFF4F: if jp2_boxid == 0xFF4F:

View File

@ -36,11 +36,7 @@ class LibTiffTestCase:
im.load() im.load()
im.getdata() im.getdata()
try: assert im._compression == "group4"
assert im._compression == "group4"
except AttributeError:
print("No _compression")
print(dir(im))
# can we write it back out, in a different form. # can we write it back out, in a different form.
out = str(tmp_path / "temp.png") out = str(tmp_path / "temp.png")
@ -313,7 +309,7 @@ class TestFileLibTiff(LibTiffTestCase):
} }
def check_tags( def check_tags(
tiffinfo: TiffImagePlugin.ImageFileDirectory_v2 | dict[int, str] tiffinfo: TiffImagePlugin.ImageFileDirectory_v2 | dict[int, str],
) -> None: ) -> None:
im = hopper() im = hopper()
@ -1098,6 +1094,27 @@ class TestFileLibTiff(LibTiffTestCase):
assert_image_similar(base_im, im, 0.7) assert_image_similar(base_im, im, 0.7)
@pytest.mark.parametrize(
"test_file",
[
"Tests/images/old-style-jpeg-compression-no-samplesperpixel.tif",
"Tests/images/old-style-jpeg-compression.tif",
],
)
def test_buffering(self, test_file: str) -> None:
# load exif first
with open(test_file, "rb", buffering=1048576) as f:
with Image.open(f) as im:
exif = dict(im.getexif())
# load image before exif
with open(test_file, "rb", buffering=1048576) as f:
with Image.open(f) as im2:
im2.load()
exif_after_load = dict(im2.getexif())
assert exif == exif_after_load
@pytest.mark.valgrind_known_error(reason="Backtrace in Python Core") @pytest.mark.valgrind_known_error(reason="Backtrace in Python Core")
def test_sampleformat_not_corrupted(self) -> None: def test_sampleformat_not_corrupted(self) -> None:
# Assert that a TIFF image with SampleFormat=UINT tag is not corrupted # Assert that a TIFF image with SampleFormat=UINT tag is not corrupted
@ -1127,7 +1144,7 @@ class TestFileLibTiff(LibTiffTestCase):
im.load() im.load()
# Assert that the error code is IMAGING_CODEC_MEMORY # Assert that the error code is IMAGING_CODEC_MEMORY
assert str(e.value) == "-9" assert str(e.value) == "decoder error -9"
@pytest.mark.parametrize("compression", ("tiff_adobe_deflate", "jpeg")) @pytest.mark.parametrize("compression", ("tiff_adobe_deflate", "jpeg"))
def test_save_multistrip(self, compression: str, tmp_path: Path) -> None: def test_save_multistrip(self, compression: str, tmp_path: Path) -> None:
@ -1141,23 +1158,22 @@ class TestFileLibTiff(LibTiffTestCase):
assert len(im.tag_v2[STRIPOFFSETS]) > 1 assert len(im.tag_v2[STRIPOFFSETS]) > 1
@pytest.mark.parametrize("argument", (True, False)) @pytest.mark.parametrize("argument", (True, False))
def test_save_single_strip(self, argument: bool, tmp_path: Path) -> None: def test_save_single_strip(
self, argument: bool, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
) -> None:
im = hopper("RGB").resize((256, 256)) im = hopper("RGB").resize((256, 256))
out = str(tmp_path / "temp.tif") out = str(tmp_path / "temp.tif")
if not argument: if not argument:
TiffImagePlugin.STRIP_SIZE = 2**18 monkeypatch.setattr(TiffImagePlugin, "STRIP_SIZE", 2**18)
try: arguments: dict[str, str | int] = {"compression": "tiff_adobe_deflate"}
arguments: dict[str, str | int] = {"compression": "tiff_adobe_deflate"} if argument:
if argument: arguments["strip_size"] = 2**18
arguments["strip_size"] = 2**18 im.save(out, "TIFF", **arguments)
im.save(out, "TIFF", **arguments)
with Image.open(out) as im: with Image.open(out) as im:
assert isinstance(im, TiffImagePlugin.TiffImageFile) assert isinstance(im, TiffImagePlugin.TiffImageFile)
assert len(im.tag_v2[STRIPOFFSETS]) == 1 assert len(im.tag_v2[STRIPOFFSETS]) == 1
finally:
TiffImagePlugin.STRIP_SIZE = 65536
@pytest.mark.parametrize("compression", ("tiff_adobe_deflate", None)) @pytest.mark.parametrize("compression", ("tiff_adobe_deflate", None))
def test_save_zero(self, compression: str | None, tmp_path: Path) -> None: def test_save_zero(self, compression: str | None, tmp_path: Path) -> None:

View File

@ -29,25 +29,32 @@ def roundtrip(im: Image.Image, **options: Any) -> ImageFile.ImageFile:
@pytest.mark.parametrize("test_file", test_files) @pytest.mark.parametrize("test_file", test_files)
def test_sanity(test_file: str) -> None: def test_sanity(test_file: str) -> None:
with Image.open(test_file) as im: def check(im: ImageFile.ImageFile) -> None:
im.load() im.load()
assert im.mode == "RGB" assert im.mode == "RGB"
assert im.size == (640, 480) assert im.size == (640, 480)
assert im.format == "MPO" assert im.format == "MPO"
with Image.open(test_file) as im:
check(im)
with MpoImagePlugin.MpoImageFile(test_file) as im:
check(im)
@pytest.mark.skipif(is_pypy(), reason="Requires CPython") @pytest.mark.skipif(is_pypy(), reason="Requires CPython")
def test_unclosed_file() -> None: def test_unclosed_file() -> None:
def open() -> None: def open_test_image() -> None:
im = Image.open(test_files[0]) im = Image.open(test_files[0])
im.load() im.load()
with pytest.warns(ResourceWarning): with pytest.warns(ResourceWarning):
open() open_test_image()
def test_closed_file() -> None: def test_closed_file() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
im = Image.open(test_files[0]) im = Image.open(test_files[0])
im.load() im.load()
im.close() im.close()
@ -63,6 +70,8 @@ def test_seek_after_close() -> None:
def test_context_manager() -> None: def test_context_manager() -> None:
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("error")
with Image.open(test_files[0]) as im: with Image.open(test_files[0]) as im:
im.load() im.load()
@ -73,8 +82,8 @@ def test_app(test_file: str) -> None:
with Image.open(test_file) as im: with Image.open(test_file) as im:
assert im.applist[0][0] == "APP1" assert im.applist[0][0] == "APP1"
assert im.applist[1][0] == "APP2" assert im.applist[1][0] == "APP2"
assert ( assert im.applist[1][1].startswith(
im.applist[1][1][:16] == b"MPF\x00MM\x00*\x00\x00\x00\x08\x00\x03\xb0\x00" b"MPF\x00MM\x00*\x00\x00\x00\x08\x00\x03\xb0\x00"
) )
assert len(im.applist) == 2 assert len(im.applist) == 2
@ -293,3 +302,15 @@ def test_save_all() -> None:
# Test that a single frame image will not be saved as an MPO # Test that a single frame image will not be saved as an MPO
jpg = roundtrip(im, save_all=True) jpg = roundtrip(im, save_all=True)
assert "mp" not in jpg.info assert "mp" not in jpg.info
def test_save_xmp() -> None:
im = Image.new("RGB", (1, 1))
im2 = Image.new("RGB", (1, 1), "#f00")
im2.encoderinfo = {"xmp": b"Second frame"}
im_reloaded = roundtrip(im, xmp=b"First frame", save_all=True, append_images=[im2])
assert im_reloaded.info["xmp"] == b"First frame"
im_reloaded.seek(1)
assert im_reloaded.info["xmp"] == b"Second frame"

Some files were not shown because too many files have changed in this diff Show More