Compare commits

..

No commits in common. "main" and "9.4.0" have entirely different histories.
main ... 9.4.0

806 changed files with 19767 additions and 51831 deletions

95
.appveyor.yml Normal file
View File

@ -0,0 +1,95 @@
version: '{build}'
clone_folder: c:\pillow
init:
- ECHO %PYTHON%
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
# Uncomment previous line to get RDP access during the build.
environment:
EXECUTABLE: python.exe
TEST_OPTIONS:
DEPLOY: YES
matrix:
- PYTHON: C:/Python311
ARCHITECTURE: x86
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022
- PYTHON: C:/Python37-x64
ARCHITECTURE: x64
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
install:
- '%PYTHON%\%EXECUTABLE% --version'
- curl -fsSL -o pillow-depends.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
- 7z x pillow-depends.zip -oc:\
- mv c:\pillow-depends-main c:\pillow-depends
- xcopy /S /Y c:\pillow-depends\test_images\* c:\pillow\tests\images
- 7z x ..\pillow-depends\nasm-2.15.05-win64.zip -oc:\
- ..\pillow-depends\gs1000w32.exe /S
- path c:\nasm-2.15.05;C:\Program Files (x86)\gs\gs10.0.0\bin;%PATH%
- cd c:\pillow\winbuild\
- ps: |
c:\python37\python.exe c:\pillow\winbuild\build_prepare.py -v --depends=C:\pillow-depends\
c:\pillow\winbuild\build\build_dep_all.cmd
$host.SetShouldExit(0)
- path C:\pillow\winbuild\build\bin;%PATH%
build_script:
- ps: |
c:\pillow\winbuild\build\build_pillow.cmd install
$host.SetShouldExit(0)
- cd c:\pillow
- '%PYTHON%\%EXECUTABLE% selftest.py --installed'
test_script:
- cd c:\pillow
- '%PYTHON%\%EXECUTABLE% -m pip install pytest pytest-cov pytest-timeout'
- c:\"Program Files (x86)"\"Windows Kits"\10\Debuggers\x86\gflags.exe /p /enable %PYTHON%\%EXECUTABLE%
- '%PYTHON%\%EXECUTABLE% -c "from PIL import Image"'
- '%PYTHON%\%EXECUTABLE% -m pytest -vx --cov PIL --cov Tests --cov-report term --cov-report xml Tests'
#- '%PYTHON%\%EXECUTABLE% test-installed.py -v -s %TEST_OPTIONS%' TODO TEST_OPTIONS with pytest?
after_test:
- python -m pip install codecov
- codecov --file coverage.xml --name %PYTHON% --flags AppVeyor
matrix:
fast_finish: true
cache:
- '%LOCALAPPDATA%\pip\Cache'
artifacts:
- path: pillow\dist\*.egg
name: egg
- path: pillow\dist\*.wheel
name: wheel
before_deploy:
- cd c:\pillow
- '%PYTHON%\%EXECUTABLE% -m pip install wheel'
- cd c:\pillow\winbuild\
- c:\pillow\winbuild\build\build_pillow.cmd bdist_wheel
- cd c:\pillow
- ps: Get-ChildItem .\dist\*.* | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
deploy:
provider: S3
region: us-west-2
access_key_id: AKIAIRAXC62ZNTVQJMOQ
secret_access_key:
secure: Hwb6klTqtBeMgxAjRoDltiiqpuH8xbwD4UooDzBSiCWXjuFj1lyl4kHgHwTCCGqi
bucket: pillow-nightly
folder: win/$(APPVEYOR_BUILD_NUMBER)/
artifact: /.*egg|wheel/
on:
APPVEYOR_REPO_NAME: python-pillow/Pillow
branch: main
deploy: YES
# Uncomment the following lines to get RDP access after the build/test and block for
# up to the timeout limit (~1hr)
#
#on_finish:
#- ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))

View File

@ -1,5 +1,9 @@
#!/bin/bash #!/bin/bash
# gather the coverage data # gather the coverage data
python3 -m pip install coverage python3 -m pip install codecov
python3 -m coverage xml if [[ $MATRIX_DOCKER ]]; then
python3 -m coverage xml --ignore-errors
else
python3 -m coverage xml
fi

View File

@ -3,5 +3,8 @@
set -e set -e
python3 -m coverage erase python3 -m coverage erase
if [ $(uname) == "Darwin" ]; then
export CPPFLAGS="-I/usr/local/miniconda/include";
fi
make clean make clean
make install-coverage make install-coverage

View File

@ -2,59 +2,60 @@
aptget_update() aptget_update()
{ {
if [ -n "$1" ]; then if [ ! -z $1 ]; then
echo "" echo ""
echo "Retrying apt-get update..." echo "Retrying apt-get update..."
echo "" echo ""
fi fi
output=$(sudo apt-get update 2>&1) output=`sudo apt-get update 2>&1`
echo "$output" echo "$output"
if [[ $output == *[WE]:\ * ]]; then if [[ $output == *[WE]:\ * ]]; then
return 1 return 1
fi fi
} }
aptget_update || aptget_update retry || aptget_update retry if [[ $(uname) != CYGWIN* ]]; then
aptget_update || aptget_update retry || aptget_update retry
fi
set -e set -e
sudo apt-get -qq install libfreetype6-dev liblcms2-dev libtiff-dev python3-tk\ if [[ $(uname) != CYGWIN* ]]; then
ghostscript libjpeg-turbo8-dev libopenjp2-7-dev\ sudo apt-get -qq install libfreetype6-dev liblcms2-dev python3-tk\
cmake meson imagemagick libharfbuzz-dev libfribidi-dev\ ghostscript libffi-dev libjpeg-turbo-progs libopenjp2-7-dev\
sway wl-clipboard libopenblas-dev nasm cmake meson imagemagick libharfbuzz-dev libfribidi-dev
fi
python3 -m pip install --upgrade pip python3 -m pip install --upgrade pip
python3 -m pip install --upgrade wheel python3 -m pip install --upgrade wheel
PYTHONOPTIMIZE=0 python3 -m pip install cffi
python3 -m pip install coverage python3 -m pip install coverage
python3 -m pip install defusedxml python3 -m pip install defusedxml
python3 -m pip install ipython
python3 -m pip install numpy
python3 -m pip install olefile python3 -m pip install olefile
python3 -m pip install -U pytest python3 -m pip install -U pytest
python3 -m pip install -U pytest-cov python3 -m pip install -U pytest-cov
python3 -m pip install -U pytest-timeout python3 -m pip install -U pytest-timeout
python3 -m pip install pyroma python3 -m pip install pyroma
# optional test dependency, only install if there's a binary package.
# fails on beta 3.14 and PyPy
python3 -m pip install --only-binary=:all: pyarrow || true
# PyQt6 doesn't support PyPy3 if [[ $(uname) != CYGWIN* ]]; then
if [[ $GHA_PYTHON_VERSION == 3.* ]]; then python3 -m pip install numpy
sudo apt-get -qq install libegl1 libxcb-cursor0 libxcb-icccm4 libxcb-image0 libxcb-keysyms1 libxcb-randr0 libxcb-render-util0 libxcb-shape0 libxkbcommon-x11-0
# TODO Update condition when pyqt6 supports free-threading # PyQt6 doesn't support PyPy3
if ! [[ "$PYTHON_GIL" == "0" ]]; then python3 -m pip install pyqt6 ; fi if [[ $GHA_PYTHON_VERSION == 3.* ]]; then
sudo apt-get -qq install libegl1 libxcb-icccm4 libxcb-image0 libxcb-keysyms1 libxcb-randr0 libxcb-render-util0 libxcb-shape0 libxkbcommon-x11-0
python3 -m pip install pyqt6
fi
# webp
pushd depends && ./install_webp.sh && popd
# libimagequant
pushd depends && ./install_imagequant.sh && popd
# raqm
pushd depends && ./install_raqm.sh && popd
# extra test images
pushd depends && ./install_extra_test_images.sh && popd
else
cd depends && ./install_extra_test_images.sh && cd ..
fi fi
# webp
pushd depends && ./install_webp.sh && popd
# libimagequant
pushd depends && ./install_imagequant.sh && popd
# raqm
pushd depends && ./install_raqm.sh && popd
# libavif
pushd depends && ./install_libavif.sh && popd
# extra test images
pushd depends && ./install_extra_test_images.sh && popd

View File

@ -1 +0,0 @@
cibuildwheel==3.1.3

View File

@ -1,14 +0,0 @@
mypy==1.17.1
IceSpringPySideStubs-PyQt6
IceSpringPySideStubs-PySide6
ipython
numpy
packaging
pyarrow-stubs
pybind11
pytest
sphinx
types-atheris
types-defusedxml
types-olefile
types-setuptools

View File

@ -1,3 +0,0 @@
python.exe -c "from PIL import Image"
IF ERRORLEVEL 1 EXIT /B
python.exe -bb -m pytest -vv -x -W always --cov PIL --cov Tests --cov-report term --cov-report xml Tests

View File

@ -4,4 +4,4 @@ set -e
python3 -c "from PIL import Image" python3 -c "from PIL import Image"
python3 -bb -m pytest -vv -x -W always --cov PIL --cov Tests --cov-report term --cov-report xml Tests $REVERSE python3 -bb -m pytest -v -x -W always --cov PIL --cov Tests --cov-report term Tests $REVERSE

View File

@ -1,37 +1,16 @@
# A clang-format style that approximates Python's PEP 7 # A clang-format style that approximates Python's PEP 7
# Useful for IDE integration # Useful for IDE integration
Language: C
BasedOnStyle: Google BasedOnStyle: Google
AlwaysBreakAfterReturnType: All AlwaysBreakAfterReturnType: All
AllowShortIfStatementsOnASingleLine: false AllowShortIfStatementsOnASingleLine: false
AlignAfterOpenBracket: BlockIndent AlignAfterOpenBracket: AlwaysBreak
BinPackArguments: false BinPackArguments: false
BinPackParameters: false BinPackParameters: false
BreakBeforeBraces: Attach BreakBeforeBraces: Attach
ColumnLimit: 88 ColumnLimit: 88
DerivePointerAlignment: false DerivePointerAlignment: false
IndentGotoLabels: false
IndentWidth: 4 IndentWidth: 4
PointerAlignment: Right
ReflowComments: true
SortIncludes: false
SpaceBeforeParens: ControlStatements
SpacesInParentheses: false
TabWidth: 4
UseTab: Never
---
Language: Cpp Language: Cpp
BasedOnStyle: Google
AlwaysBreakAfterReturnType: All
AllowShortIfStatementsOnASingleLine: false
AlignAfterOpenBracket: BlockIndent
BinPackArguments: false
BinPackParameters: false
BreakBeforeBraces: Attach
ColumnLimit: 88
DerivePointerAlignment: false
IndentGotoLabels: false
IndentWidth: 4
PointerAlignment: Right PointerAlignment: Right
ReflowComments: true ReflowComments: true
SortIncludes: false SortIncludes: false

View File

@ -2,21 +2,19 @@
[report] [report]
# Regexes for lines to exclude from consideration # Regexes for lines to exclude from consideration
exclude_also = exclude_lines =
# Don't complain if non-runnable code isn't run # Have to re-enable the standard pragma:
pragma: no cover
# Don't complain if non-runnable code isn't run:
if 0: if 0:
if __name__ == .__main__.: if __name__ == .__main__.:
# Don't complain about debug code # Don't complain about debug code
if DEBUG: if DEBUG:
# Don't complain about compatibility code for missing optional dependencies
except ImportError
if TYPE_CHECKING:
@abc.abstractmethod
# Empty bodies in protocols or abstract methods
^\s*def [a-zA-Z0-9_]+\(.*\)(\s*->.*)?:\s*\.\.\.(\s*#.*)?$
^\s*\.\.\.(\s*#.*)?$
[run] [run]
omit = omit =
checks/*.py Tests/32bit_segfault_check.py
Tests/bench_cffi_access.py
Tests/check_*.py
Tests/createfontdatachunk.py Tests/createfontdatachunk.py

View File

@ -13,7 +13,11 @@ indent_style = space
trim_trailing_whitespace = true trim_trailing_whitespace = true
[*.{toml,yml}] [*.rst]
# Four-space indentation
indent_size = 4
[*.yml]
# Two-space indentation # Two-space indentation
indent_size = 2 indent_size = 2

View File

@ -1,6 +0,0 @@
# Flake8
8de95676e0fd89f2326b3953488ab66ff29cd2d0
# Format with Black
53a7e3500437a9fd5826bc04758f7116bd7e52dc
# Format the C code with ClangFormat
46b7e86bab79450ec0a2866c6c0c679afb659d17

View File

@ -9,7 +9,7 @@ Please send a pull request to the `main` branch. Please include [documentation](
- Fork the Pillow repository. - Fork the Pillow repository.
- Create a branch from `main`. - Create a branch from `main`.
- Develop bug fixes, features, tests, etc. - Develop bug fixes, features, tests, etc.
- Run the test suite. You can enable GitHub Actions (https://github.com/MY-USERNAME/Pillow/actions) on your repo to catch test failures prior to the pull request, and [Codecov](https://codecov.io/gh) to see if the changed code is covered by tests. - Run the test suite. You can enable GitHub Actions (https://github.com/MY-USERNAME/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/projects/new) on your repo to catch test failures prior to the pull request, and [Codecov](https://codecov.io/gh) to see if the changed code is covered by tests.
- Create a pull request to pull the changes from your branch to the Pillow `main`. - Create a pull request to pull the changes from your branch to the Pillow `main`.
### Guidelines ### Guidelines
@ -17,7 +17,7 @@ Please send a pull request to the `main` branch. Please include [documentation](
- Separate code commits from reformatting commits. - Separate code commits from reformatting commits.
- Provide tests for any newly added code. - Provide tests for any newly added code.
- Follow PEP 8. - Follow PEP 8.
- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running extra tests. - When committing only documentation changes please include `[ci skip]` in the commit message to avoid running tests on AppVeyor.
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests. - Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
## Reporting Issues ## Reporting Issues

2
.github/FUNDING.yml vendored
View File

@ -1 +1 @@
tidelift: "pypi/pillow" tidelift: "pypi/Pillow"

View File

@ -48,21 +48,6 @@ Thank you.
* Python: * Python:
* Pillow: * Pillow:
```text
Please paste here the output of running:
python3 -m PIL.report
or
python3 -m PIL --report
Or the output of the following Python code:
from PIL import report
# or
from PIL import features
features.pilinfo(supported_formats=False)
```
<!-- <!--
Please include **code** that reproduces the issue and whenever possible, an **image** that demonstrates the issue. Please upload images to GitHub, not to third-party file hosting sites. If necessary, add the image to a zip or tar archive. Please include **code** that reproduces the issue and whenever possible, an **image** that demonstrates the issue. Please upload images to GitHub, not to third-party file hosting sites. If necessary, add the image to a zip or tar archive.

View File

@ -1,46 +0,0 @@
---
name: "Maintainers only: Release"
about: For maintainers to schedule a quarterly release
labels: Release
---
## Main release
Released quarterly on January 2nd, April 1st, July 1st and October 15th.
* [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154
* [ ] Develop and prepare release in `main` branch.
* [ ] Add release notes e.g. https://github.com/python-pillow/Pillow/pull/8885
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) to confirm passing tests in `main` branch.
* [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them.
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
* [ ] Create branch and tag for release e.g.:
```bash
git branch [[MAJOR.MINOR]].x
git tag [[MAJOR.MINOR]].0
git push --tags
```
* [ ] Check the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) has passed, including the "Upload release to PyPI" job. This will have been triggered by the new tag.
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases).
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), increment and append `.dev0` to version identifier in `src/PIL/_version.py` and then:
```bash
git push --all
```
## Publicize release
* [ ] Announce release availability via [Mastodon](https://fosstodon.org/@pillow) e.g. https://fosstodon.org/@pillow/110639450470725321
## Documentation
* [ ] Make sure the [default version for Read the Docs](https://pillow.readthedocs.io/en/stable/) is up-to-date with the release changes
## Docker images
* [ ] Update Pillow in the Docker Images repository
```bash
git clone https://github.com/python-pillow/docker-images
cd docker-images
./update-pillow-tag.sh [[release tag]]
```

4
.github/mergify.yml vendored
View File

@ -7,7 +7,9 @@ pull_request_rules:
- status-success=Test Successful - status-success=Test Successful
- status-success=Docker Test Successful - status-success=Docker Test Successful
- status-success=Windows Test Successful - status-success=Windows Test Successful
- status-success=MinGW - status-success=MinGW Test Successful
- status-success=Cygwin Test Successful
- status-success=continuous-integration/appveyor/pr
actions: actions:
merge: merge:
method: merge method: merge

View File

@ -1,18 +0,0 @@
{
"__comment": "Based on vscode-cpptools' Extension/package.json gcc rule",
"problemMatcher": [
{
"owner": "gcc-problem-matcher",
"pattern": [
{
"regexp": "^\\s*(.*):(\\d+):(\\d+):\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5
}
]
}
]
}

View File

@ -3,19 +3,16 @@ tag-template: "$NEXT_MINOR_VERSION"
change-template: '- $TITLE #$NUMBER [@$AUTHOR]' change-template: '- $TITLE #$NUMBER [@$AUTHOR]'
categories: categories:
- title: "Removals" - title: "Dependencies"
label: "Removal" label: "Dependency"
- title: "Deprecations" - title: "Deprecations"
label: "Deprecation" label: "Deprecation"
- title: "Documentation" - title: "Documentation"
label: "Documentation" label: "Documentation"
- title: "Dependencies" - title: "Removals"
label: "Dependency" label: "Removal"
- title: "Testing" - title: "Testing"
label: "Testing" label: "Testing"
- title: "Type hints"
label: "Type hints"
- title: "Other changes"
exclude-labels: exclude-labels:
- "changelog: skip" - "changelog: skip"
@ -24,4 +21,6 @@ template: |
https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html
## Changes
$CHANGES $CHANGES

12
.github/renovate.json vendored
View File

@ -1,7 +1,7 @@
{ {
"$schema": "https://docs.renovatebot.com/renovate-schema.json", "$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [ "extends": [
"config:recommended" "config:base"
], ],
"labels": [ "labels": [
"Dependency" "Dependency"
@ -9,13 +9,9 @@
"packageRules": [ "packageRules": [
{ {
"groupName": "github-actions", "groupName": "github-actions",
"matchManagers": [ "matchManagers": ["github-actions"],
"github-actions" "separateMajorMinor": "false"
],
"separateMajorMinor": false
} }
], ],
"schedule": [ "schedule": ["on the 3rd day of the month"]
"* * 3 * *"
]
} }

View File

@ -2,17 +2,11 @@ name: CIFuzz
on: on:
push: push:
branches:
- "**"
paths: paths:
- ".github/workflows/cifuzz.yml"
- ".github/workflows/wheels-dependencies.sh"
- "**.c" - "**.c"
- "**.h" - "**.h"
pull_request: pull_request:
paths: paths:
- ".github/workflows/cifuzz.yml"
- ".github/workflows/wheels-dependencies.sh"
- "**.c" - "**.c"
- "**.h" - "**.h"
workflow_dispatch: workflow_dispatch:
@ -20,7 +14,7 @@ on:
permissions: permissions:
contents: read contents: read
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
@ -44,13 +38,13 @@ jobs:
language: python language: python
dry-run: false dry-run: false
- name: Upload New Crash - name: Upload New Crash
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
if: failure() && steps.build.outcome == 'success' if: failure() && steps.build.outcome == 'success'
with: with:
name: artifacts name: artifacts
path: ./out/artifacts path: ./out/artifacts
- name: Upload Legacy Crash - name: Upload Legacy Crash
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
if: steps.run.outcome == 'success' if: steps.run.outcome == 'success'
with: with:
name: crash name: crash

View File

@ -1,71 +0,0 @@
name: Docs
on:
push:
branches:
- "**"
paths:
- ".github/workflows/docs.yml"
- "docs/**"
- "src/PIL/**"
pull_request:
paths:
- ".github/workflows/docs.yml"
- "docs/**"
- "src/PIL/**"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
FORCE_COLOR: 1
jobs:
build:
runs-on: ubuntu-latest
name: Docs
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
cache: pip
cache-dependency-path: |
".ci/*.sh"
"pyproject.toml"
- name: Build system information
run: python3 .github/workflows/system-info.py
- name: Cache libimagequant
uses: actions/cache@v4
id: cache-libimagequant
with:
path: ~/cache-libimagequant
key: ${{ runner.os }}-libimagequant-${{ hashFiles('depends/install_imagequant.sh') }}
- name: Install Linux dependencies
run: |
.ci/install.sh
env:
GHA_PYTHON_VERSION: "3.x"
GHA_LIBIMAGEQUANT_CACHE_HIT: ${{ steps.cache-libimagequant.outputs.cache-hit }}
- name: Build
run: |
.ci/build.sh
- name: Docs
run: |
make doccheck

View File

@ -2,9 +2,6 @@ name: Lint
on: [push, pull_request, workflow_dispatch] on: [push, pull_request, workflow_dispatch]
env:
FORCE_COLOR: 1
permissions: permissions:
contents: read contents: read
@ -20,12 +17,10 @@ jobs:
name: Lint name: Lint
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
persist-credentials: false
- name: pre-commit cache - name: pre-commit cache
uses: actions/cache@v4 uses: actions/cache@v3
with: with:
path: ~/.cache/pre-commit path: ~/.cache/pre-commit
key: lint-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }} key: lint-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }}
@ -33,7 +28,7 @@ jobs:
lint-pre-commit- lint-pre-commit-
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v4
with: with:
python-version: "3.x" python-version: "3.x"
cache: pip cache: pip
@ -51,6 +46,3 @@ jobs:
run: tox -e lint run: tox -e lint
env: env:
PRE_COMMIT_COLOR: always PRE_COMMIT_COLOR: always
- name: Mypy
run: tox -e mypy

View File

@ -2,40 +2,18 @@
set -e set -e
if [[ "$ImageOS" == "macos13" ]]; then brew install libtiff libjpeg openjpeg libimagequant webp little-cms2 freetype libraqm
brew uninstall gradle maven
fi
brew install \
aom \
dav1d \
freetype \
ghostscript \
jpeg-turbo \
libimagequant \
libraqm \
libtiff \
little-cms2 \
openjpeg \
rav1e \
svt-av1 \
webp
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig"
PYTHONOPTIMIZE=0 python3 -m pip install cffi
python3 -m pip install coverage python3 -m pip install coverage
python3 -m pip install defusedxml python3 -m pip install defusedxml
python3 -m pip install ipython
python3 -m pip install olefile python3 -m pip install olefile
python3 -m pip install -U pytest python3 -m pip install -U pytest
python3 -m pip install -U pytest-cov python3 -m pip install -U pytest-cov
python3 -m pip install -U pytest-timeout python3 -m pip install -U pytest-timeout
python3 -m pip install pyroma python3 -m pip install pyroma
python3 -m pip install numpy
# optional test dependency, only install if there's a binary package.
# fails on beta 3.14 and PyPy
python3 -m pip install --only-binary=:all: pyarrow || true
# libavif python3 -m pip install numpy
pushd depends && ./install_libavif.sh && popd
# extra test images # extra test images
pushd depends && ./install_extra_test_images.sh && popd pushd depends && ./install_extra_test_images.sh && popd

View File

@ -10,7 +10,7 @@ on:
permissions: permissions:
contents: read contents: read
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
@ -23,6 +23,6 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
# Drafts your next release notes as pull requests are merged into "main" # Drafts your next release notes as pull requests are merged into "main"
- uses: release-drafter/release-drafter@v6 - uses: release-drafter/release-drafter@v5
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -6,23 +6,21 @@ on:
workflow_dispatch: workflow_dispatch:
permissions: permissions:
contents: read issues: write
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
stale: stale:
if: github.repository_owner == 'python-pillow' if: github.repository_owner == 'python-pillow'
permissions:
issues: write
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: "Check issues" - name: "Check issues"
uses: actions/stale@v9 uses: actions/stale@v7
with: with:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
only-labels: "Awaiting OP Action" only-labels: "Awaiting OP Action"

View File

@ -6,9 +6,6 @@ This sort of info is missing from GitHub Actions.
Requested here: Requested here:
https://github.com/actions/virtual-environments/issues/79 https://github.com/actions/virtual-environments/issues/79
""" """
from __future__ import annotations
import os import os
import platform import platform
import sys import sys

116
.github/workflows/test-cygwin.yml vendored Normal file
View File

@ -0,0 +1,116 @@
name: Test Cygwin
on: [push, pull_request, workflow_dispatch]
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build:
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
python-minor-version: [8, 9]
timeout-minutes: 40
name: Python 3.${{ matrix.python-minor-version }}
steps:
- name: Fix line endings
run: |
git config --global core.autocrlf input
- name: Checkout Pillow
uses: actions/checkout@v3
- name: Install Cygwin
uses: cygwin/cygwin-install-action@v3
with:
platform: x86_64
packages: >
ImageMagick gcc-g++ ghostscript jpeg libfreetype-devel
libimagequant-devel libjpeg-devel liblapack-devel
liblcms2-devel libopenjp2-devel libraqm-devel
libtiff-devel libwebp-devel libxcb-devel libxcb-xinerama0
make netpbm perl
python3${{ matrix.python-minor-version }}-cffi
python3${{ matrix.python-minor-version }}-cython
python3${{ matrix.python-minor-version }}-devel
python3${{ matrix.python-minor-version }}-numpy
python3${{ matrix.python-minor-version }}-sip
python3${{ matrix.python-minor-version }}-tkinter
qt5-devel-tools subversion xorg-server-extra zlib-devel
- name: Add Lapack to PATH
uses: egor-tensin/cleanup-path@v3
with:
dirs: 'C:\cygwin\bin;C:\cygwin\lib\lapack'
- name: pip cache
uses: actions/cache@v3
with:
path: 'C:\cygwin\home\runneradmin\.cache\pip'
key: ${{ runner.os }}-cygwin-pip3.${{ matrix.python-minor-version }}-${{ hashFiles('.ci/install.sh') }}
restore-keys: |
${{ runner.os }}-cygwin-pip3.${{ matrix.python-minor-version }}-
- name: Build system information
run: |
dash.exe -c "python3 .github/workflows/system-info.py"
- name: Install dependencies
run: |
bash.exe .ci/install.sh
- name: Install a different NumPy
shell: dash.exe -l "{0}"
run: |
python3 -m pip install -U 'numpy!=1.21.*'
- name: Build
shell: bash.exe -eo pipefail -o igncr "{0}"
run: |
SETUPTOOLS_USE_DISTUTILS=stdlib .ci/build.sh
- name: Test
run: |
bash.exe xvfb-run -s '-screen 0 1024x768x24' .ci/test.sh
- name: Prepare to upload errors
if: failure()
run: |
dash.exe -c "mkdir -p Tests/errors"
- name: Upload errors
uses: actions/upload-artifact@v3
if: failure()
with:
name: errors
path: Tests/errors
- name: After success
run: |
bash.exe .ci/after_success.sh
- name: Upload coverage
uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
flags: GHA_Cygwin
name: Cygwin Python 3.${{ matrix.python-minor-version }}
success:
permissions:
contents: none
needs: build
runs-on: ubuntu-latest
name: Cygwin Test Successful
steps:
- name: Success
run: echo Cygwin Test Successful

View File

@ -1,23 +1,6 @@
name: Test Docker name: Test Docker
on: on: [push, pull_request, workflow_dispatch]
push:
branches:
- "**"
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
pull_request:
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
workflow_dispatch:
permissions: permissions:
contents: read contents: read
@ -29,55 +12,52 @@ concurrency:
jobs: jobs:
build: build:
runs-on: ${{ matrix.os }} runs-on: ubuntu-latest
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: ["ubuntu-latest"]
docker: [ docker: [
# Run slower jobs first to give them a headstart and reduce waiting time # Run slower jobs first to give them a headstart and reduce waiting time
ubuntu-24.04-noble-ppc64le, ubuntu-22.04-jammy-arm64v8,
ubuntu-24.04-noble-s390x, ubuntu-22.04-jammy-ppc64le,
ubuntu-22.04-jammy-s390x,
# Then run the remainder # Then run the remainder
alpine, alpine,
amazon-2-amd64, amazon-2-amd64,
amazon-2023-amd64,
arch, arch,
centos-7-amd64,
centos-stream-8-amd64,
centos-stream-9-amd64, centos-stream-9-amd64,
centos-stream-10-amd64, debian-10-buster-x86,
debian-12-bookworm-x86, debian-11-bullseye-x86,
debian-12-bookworm-amd64, fedora-36-amd64,
fedora-41-amd64, fedora-37-amd64,
fedora-42-amd64,
gentoo, gentoo,
ubuntu-18.04-bionic-amd64,
ubuntu-20.04-focal-amd64,
ubuntu-22.04-jammy-amd64, ubuntu-22.04-jammy-amd64,
ubuntu-24.04-noble-amd64,
] ]
dockerTag: [main] dockerTag: [main]
include: include:
- docker: "ubuntu-24.04-noble-ppc64le" - docker: "ubuntu-22.04-jammy-arm64v8"
qemu-arch: "aarch64"
- docker: "ubuntu-22.04-jammy-ppc64le"
qemu-arch: "ppc64le" qemu-arch: "ppc64le"
- docker: "ubuntu-24.04-noble-s390x" - docker: "ubuntu-22.04-jammy-s390x"
qemu-arch: "s390x" qemu-arch: "s390x"
- docker: "ubuntu-24.04-noble-arm64v8"
os: "ubuntu-24.04-arm"
dockerTag: main
name: ${{ matrix.docker }} name: ${{ matrix.docker }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
persist-credentials: false
- name: Build system information - name: Build system information
run: python3 .github/workflows/system-info.py run: python3 .github/workflows/system-info.py
- name: Set up QEMU - name: Set up QEMU
if: "matrix.qemu-arch" if: "matrix.qemu-arch"
uses: docker/setup-qemu-action@v3 run: |
with: docker run --rm --privileged aptman/qus -s -- -p ${{ matrix.qemu-arch }}
platforms: ${{ matrix.qemu-arch }}
- name: Docker pull - name: Docker pull
run: | run: |
@ -85,29 +65,28 @@ jobs:
- name: Docker build - name: Docker build
run: | run: |
# The Pillow user in the docker container is UID 1001 # The Pillow user in the docker container is UID 1000
sudo chown -R 1001 $GITHUB_WORKSPACE sudo chown -R 1000 $GITHUB_WORKSPACE
docker run --name pillow_container -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }} docker run --name pillow_container -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
sudo chown -R runner $GITHUB_WORKSPACE sudo chown -R runner $GITHUB_WORKSPACE
- name: After success - name: After success
run: | run: |
PATH="$PATH:~/.local/bin"
docker start pillow_container docker start pillow_container
sudo docker cp pillow_container:/Pillow /Pillow
sudo chown -R runner /Pillow
pil_path=`docker exec pillow_container /vpy3/bin/python -c 'import os, PIL;print(os.path.realpath(os.path.dirname(PIL.__file__)))'` pil_path=`docker exec pillow_container /vpy3/bin/python -c 'import os, PIL;print(os.path.realpath(os.path.dirname(PIL.__file__)))'`
docker stop pillow_container docker stop pillow_container
sudo mkdir -p $pil_path sudo mkdir -p $pil_path
sudo cp src/PIL/*.py $pil_path sudo cp src/PIL/*.py $pil_path
cd /Pillow
.ci/after_success.sh .ci/after_success.sh
env:
MATRIX_DOCKER: ${{ matrix.docker }}
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v3
with: with:
flags: GHA_Docker flags: GHA_Docker
name: ${{ matrix.docker }} name: ${{ matrix.docker }}
token: ${{ secrets.CODECOV_ORG_TOKEN }}
success: success:
permissions: permissions:

View File

@ -1,53 +1,42 @@
name: Test MinGW name: Test MinGW
on: on: [push, pull_request, workflow_dispatch]
push:
branches:
- "**"
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
pull_request:
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
workflow_dispatch:
permissions: permissions:
contents: read contents: read
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
env:
COVERAGE_CORE: sysmon
jobs: jobs:
build: build:
runs-on: windows-latest runs-on: windows-latest
strategy:
fail-fast: false
matrix:
mingw: ["MINGW32", "MINGW64"]
include:
- mingw: "MINGW32"
name: "MSYS2 MinGW 32-bit"
package: "mingw-w64-i686"
- mingw: "MINGW64"
name: "MSYS2 MinGW 64-bit"
package: "mingw-w64-x86_64"
defaults: defaults:
run: run:
shell: bash.exe --login -eo pipefail "{0}" shell: bash.exe --login -eo pipefail "{0}"
env: env:
MSYSTEM: MINGW64 MSYSTEM: ${{ matrix.mingw }}
CHERE_INVOKING: 1 CHERE_INVOKING: 1
timeout-minutes: 30 timeout-minutes: 30
name: "MinGW" name: ${{ matrix.name }}
steps: steps:
- name: Checkout Pillow - name: Checkout Pillow
uses: actions/checkout@v4 uses: actions/checkout@v3
with:
persist-credentials: false
- name: Set up shell - name: Set up shell
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
@ -56,39 +45,50 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: | run: |
pacman -S --noconfirm \ pacman -S --noconfirm \
mingw-w64-x86_64-freetype \ ${{ matrix.package }}-python3-cffi \
mingw-w64-x86_64-gcc \ ${{ matrix.package }}-python3-numpy \
mingw-w64-x86_64-ghostscript \ ${{ matrix.package }}-python3-olefile \
mingw-w64-x86_64-lcms2 \ ${{ matrix.package }}-python3-pip \
mingw-w64-x86_64-libavif \ ${{ matrix.package }}-python-pyqt6 \
mingw-w64-x86_64-libimagequant \ ${{ matrix.package }}-python3-setuptools \
mingw-w64-x86_64-libjpeg-turbo \ ${{ matrix.package }}-freetype \
mingw-w64-x86_64-libraqm \ ${{ matrix.package }}-gcc \
mingw-w64-x86_64-libtiff \ ${{ matrix.package }}-ghostscript \
mingw-w64-x86_64-libwebp \ ${{ matrix.package }}-lcms2 \
mingw-w64-x86_64-openjpeg2 \ ${{ matrix.package }}-libimagequant \
mingw-w64-x86_64-python-numpy \ ${{ matrix.package }}-libjpeg-turbo \
mingw-w64-x86_64-python-olefile \ ${{ matrix.package }}-libraqm \
mingw-w64-x86_64-python-pip \ ${{ matrix.package }}-libtiff \
mingw-w64-x86_64-python-pytest \ ${{ matrix.package }}-libwebp \
mingw-w64-x86_64-python-pytest-cov \ ${{ matrix.package }}-openjpeg2 \
mingw-w64-x86_64-python-pytest-timeout \ subversion
mingw-w64-x86_64-python-pyqt6
python3 -m pip install pyroma pytest pytest-cov pytest-timeout
pushd depends && ./install_extra_test_images.sh && popd pushd depends && ./install_extra_test_images.sh && popd
- name: Build Pillow - name: Build Pillow
run: CFLAGS="-coverage" python3 -m pip install . run: CFLAGS="-coverage" python3 -m pip install --global-option="build_ext" .
- name: Test Pillow - name: Test Pillow
run: | run: |
python3 selftest.py --installed python3 selftest.py --installed
.ci/test.sh python3 -c "from PIL import Image"
python3 -m pytest -vx --cov PIL --cov Tests --cov-report term --cov-report xml Tests
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v3
with: with:
files: ./coverage.xml file: ./coverage.xml
flags: GHA_Windows flags: GHA_Windows
name: "MSYS2 MinGW" name: ${{ matrix.name }}
token: ${{ secrets.CODECOV_ORG_TOKEN }}
success:
permissions:
contents: none
needs: build
runs-on: ubuntu-latest
name: MinGW Test Successful
steps:
- name: Success
run: echo MinGW Test Successful

View File

@ -1,60 +0,0 @@
name: Test Valgrind Memory Leaks
# like the Docker tests, but running valgrind only on *.c/*.h changes.
# this is very expensive. Only run on the pull request.
on:
# push:
# branches:
# - "**"
# paths:
# - ".github/workflows/test-valgrind.yml"
# - "**.c"
# - "**.h"
pull_request:
paths:
- ".github/workflows/test-valgrind.yml"
- "**.c"
- "**.h"
- "depends/docker-test-valgrind-memory.sh"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
docker: [
ubuntu-22.04-jammy-amd64-valgrind,
]
dockerTag: [main]
name: ${{ matrix.docker }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Build system information
run: python3 .github/workflows/system-info.py
- name: Docker pull
run: |
docker pull pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
- name: Build and Run Valgrind
run: |
# The Pillow user in the docker container is UID 1001
sudo chown -R 1001 $GITHUB_WORKSPACE
docker run --name pillow_container -e "PILLOW_VALGRIND_TEST=true" -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }} /Pillow/depends/docker-test-valgrind-memory.sh
sudo chown -R runner $GITHUB_WORKSPACE

View File

@ -1,18 +1,14 @@
name: Test Valgrind name: Test Valgrind
# like the Docker tests, but running valgrind only on *.c/*.h changes. # like the docker tests, but running valgrind only on *.c/*.h changes.
on: on:
push: push:
branches:
- "**"
paths: paths:
- ".github/workflows/test-valgrind.yml"
- "**.c" - "**.c"
- "**.h" - "**.h"
pull_request: pull_request:
paths: paths:
- ".github/workflows/test-valgrind.yml"
- "**.c" - "**.c"
- "**.h" - "**.h"
workflow_dispatch: workflow_dispatch:
@ -20,7 +16,7 @@ on:
permissions: permissions:
contents: read contents: read
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
@ -39,9 +35,7 @@ jobs:
name: ${{ matrix.docker }} name: ${{ matrix.docker }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
persist-credentials: false
- name: Build system information - name: Build system information
run: python3 .github/workflows/system-info.py run: python3 .github/workflows/system-info.py
@ -52,7 +46,7 @@ jobs:
- name: Build and Run Valgrind - name: Build and Run Valgrind
run: | run: |
# The Pillow user in the docker container is UID 1001 # The Pillow user in the docker container is UID 1000
sudo chown -R 1001 $GITHUB_WORKSPACE sudo chown -R 1000 $GITHUB_WORKSPACE
docker run --name pillow_container -e "PILLOW_VALGRIND_TEST=true" -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }} docker run --name pillow_container -e "PILLOW_VALGRIND_TEST=true" -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
sudo chown -R runner $GITHUB_WORKSPACE sudo chown -R runner $GITHUB_WORKSPACE

View File

@ -1,23 +1,6 @@
name: Test Windows name: Test Windows
on: on: [push, pull_request, workflow_dispatch]
push:
branches:
- "**"
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
pull_request:
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
workflow_dispatch:
permissions: permissions:
contents: read contents: read
@ -26,51 +9,40 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
env:
COVERAGE_CORE: sysmon
jobs: jobs:
build: build:
runs-on: windows-latest runs-on: windows-latest
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
python-version: ["pypy3.11", "3.11", "3.12", "3.13", "3.14"] python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
architecture: ["x64"] architecture: ["x86", "x64"]
include: include:
# Test the oldest Python on 32-bit # PyPy 7.3.4+ only ships 64-bit binaries for Windows
- { python-version: "3.10", architecture: "x86" } - python-version: "pypy3.8"
architecture: "x64"
- python-version: "pypy3.9"
architecture: "x64"
timeout-minutes: 45 timeout-minutes: 30
name: Python ${{ matrix.python-version }} (${{ matrix.architecture }}) name: Python ${{ matrix.python-version }} ${{ matrix.architecture }}
steps: steps:
- name: Checkout Pillow - name: Checkout Pillow
uses: actions/checkout@v4 uses: actions/checkout@v3
with:
persist-credentials: false
- name: Checkout cached dependencies - name: Checkout cached dependencies
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
persist-credentials: false
repository: python-pillow/pillow-depends repository: python-pillow/pillow-depends
path: winbuild\depends path: winbuild\depends
- name: Checkout extra test images
uses: actions/checkout@v4
with:
persist-credentials: false
repository: python-pillow/test-images
path: Tests\test-images
# sets env: pythonLocation # sets env: pythonLocation
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v4
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
allow-prereleases: true
architecture: ${{ matrix.architecture }} architecture: ${{ matrix.architecture }}
cache: pip cache: pip
cache-dependency-path: ".github/workflows/test-windows.yml" cache-dependency-path: ".github/workflows/test-windows.yml"
@ -78,30 +50,19 @@ jobs:
- name: Print build system information - name: Print build system information
run: python3 .github/workflows/system-info.py run: python3 .github/workflows/system-info.py
- name: Upgrade pip - name: python3 -m pip install wheel pytest pytest-cov pytest-timeout defusedxml
run: | run: python3 -m pip install wheel pytest pytest-cov pytest-timeout defusedxml
python3 -m pip install --upgrade pip
- name: Install CPython dependencies
if: "!contains(matrix.python-version, 'pypy') && !contains(matrix.python-version, '3.14') && matrix.architecture != 'x86'"
run: |
python3 -m pip install PyQt6
- name: Install PyArrow dependency
run: |
python3 -m pip install --only-binary=:all: pyarrow || true
- name: Install dependencies - name: Install dependencies
id: install id: install
run: | run: |
choco install nasm --no-progress 7z x winbuild\depends\nasm-2.15.05-win64.zip "-o$env:RUNNER_WORKSPACE\"
echo "C:\Program Files\NASM" >> $env:GITHUB_PATH echo "$env:RUNNER_WORKSPACE\nasm-2.15.05" >> $env:GITHUB_PATH
choco install ghostscript --version=10.5.1 --no-progress winbuild\depends\gs1000w32.exe /S
echo "C:\Program Files\gs\gs10.05.1\bin" >> $env:GITHUB_PATH echo "C:\Program Files (x86)\gs\gs10.0.0\bin" >> $env:GITHUB_PATH
# Install extra test images xcopy /S /Y winbuild\depends\test_images\* Tests\images\
xcopy /S /Y Tests\test-images\* Tests\images
# make cache key depend on VS version # make cache key depend on VS version
& "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe" ` & "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe" `
@ -111,7 +72,7 @@ jobs:
- name: Cache build - name: Cache build
id: build-cache id: build-cache
uses: actions/cache@v4 uses: actions/cache@v3
with: with:
path: winbuild\build path: winbuild\build
key: key:
@ -120,7 +81,7 @@ jobs:
- name: Prepare build - name: Prepare build
if: steps.build-cache.outputs.cache-hit != 'true' if: steps.build-cache.outputs.cache-hit != 'true'
run: | run: |
& python.exe winbuild\build_prepare.py -v & python.exe winbuild\build_prepare.py -v --python=$env:pythonLocation --srcdir
shell: pwsh shell: pwsh
- name: Build dependencies / libjpeg-turbo - name: Build dependencies / libjpeg-turbo
@ -148,10 +109,6 @@ jobs:
if: steps.build-cache.outputs.cache-hit != 'true' if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_libpng.cmd" run: "& winbuild\\build\\build_dep_libpng.cmd"
- name: Build dependencies / libavif
if: steps.build-cache.outputs.cache-hit != 'true' && matrix.architecture == 'x64'
run: "& winbuild\\build\\build_dep_libavif.cmd"
# for FreeType WOFF2 font support # for FreeType WOFF2 font support
- name: Build dependencies / brotli - name: Build dependencies / brotli
if: steps.build-cache.outputs.cache-hit != 'true' if: steps.build-cache.outputs.cache-hit != 'true'
@ -192,8 +149,9 @@ jobs:
- name: Build Pillow - name: Build Pillow
run: | run: |
$FLAGS="-C raqm=vendor -C fribidi=vendor" $FLAGS=""
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS .[tests]" if ('${{ github.event_name }}' -ne 'pull_request') { $FLAGS="--disable-imagequant" }
& winbuild\build\build_pillow.cmd $FLAGS install
& $env:pythonLocation\python.exe selftest.py --installed & $env:pythonLocation\python.exe selftest.py --installed
shell: pwsh shell: pwsh
@ -205,8 +163,8 @@ jobs:
- name: Test Pillow - name: Test Pillow
run: | run: |
path %GITHUB_WORKSPACE%\winbuild\build\bin;%PATH% path %GITHUB_WORKSPACE%\\winbuild\\build\\bin;%PATH%
.ci\test.cmd python.exe -m pytest -vx -W always --cov PIL --cov Tests --cov-report term --cov-report xml Tests
shell: cmd shell: cmd
- name: Prepare to upload errors - name: Prepare to upload errors
@ -216,7 +174,7 @@ jobs:
shell: bash shell: bash
- name: Upload errors - name: Upload errors
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
if: failure() if: failure()
with: with:
name: errors name: errors
@ -228,12 +186,51 @@ jobs:
shell: pwsh shell: pwsh
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v3
with: with:
files: ./coverage.xml file: ./coverage.xml
flags: GHA_Windows flags: GHA_Windows
name: ${{ runner.os }} Python ${{ matrix.python-version }} name: ${{ runner.os }} Python ${{ matrix.python-version }} ${{ matrix.architecture }}
token: ${{ secrets.CODECOV_ORG_TOKEN }}
- name: Build wheel
id: wheel
if: "github.event_name != 'pull_request'"
run: |
mkdir fribidi\${{ matrix.architecture }}
copy winbuild\build\bin\fribidi* fribidi\${{ matrix.architecture }}
setlocal EnableDelayedExpansion
for %%f in (winbuild\build\license\*) do (
set x=%%~nf
rem Skip FriBiDi license, it is not included in the wheel.
set fribidi=!x:~0,7!
if NOT !fribidi!==fribidi (
rem Skip imagequant license, it is not included in the wheel.
set libimagequant=!x:~0,13!
if NOT !libimagequant!==libimagequant (
echo. >> LICENSE
echo ===== %%~nf ===== >> LICENSE
echo. >> LICENSE
type %%f >> LICENSE
)
)
)
for /f "tokens=3 delims=/" %%a in ("${{ github.ref }}") do echo dist=dist-%%a >> %GITHUB_OUTPUT%
winbuild\\build\\build_pillow.cmd --disable-imagequant bdist_wheel
shell: cmd
- name: Upload wheel
uses: actions/upload-artifact@v3
if: "github.event_name != 'pull_request'"
with:
name: ${{ steps.wheel.outputs.dist }}
path: dist\*.whl
- name: Upload fribidi.dll
if: "github.event_name != 'pull_request' && matrix.python-version == 3.11"
uses: actions/upload-artifact@v3
with:
name: fribidi
path: fribidi\*
success: success:
permissions: permissions:

View File

@ -1,23 +1,6 @@
name: Test name: Test
on: on: [push, pull_request, workflow_dispatch]
push:
branches:
- "**"
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
pull_request:
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
workflow_dispatch:
permissions: permissions:
contents: read contents: read
@ -26,10 +9,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
env:
COVERAGE_CORE: sysmon
FORCE_COLOR: 1
jobs: jobs:
build: build:
@ -41,67 +20,43 @@ jobs:
"ubuntu-latest", "ubuntu-latest",
] ]
python-version: [ python-version: [
"pypy3.11", "pypy3.9",
"3.14t", "pypy3.8",
"3.14",
"3.13t",
"3.13",
"3.12",
"3.11", "3.11",
"3.10", "3.10",
"3.9",
"3.8",
"3.7",
] ]
include: include:
- { python-version: "3.12", PYTHONOPTIMIZE: 1, REVERSE: "--reverse" } - python-version: "3.7"
- { python-version: "3.11", PYTHONOPTIMIZE: 2 } PYTHONOPTIMIZE: 1
# Free-threaded REVERSE: "--reverse"
- { python-version: "3.14t", disable-gil: true } - python-version: "3.8"
- { python-version: "3.13t", disable-gil: true } PYTHONOPTIMIZE: 2
# Intel
- { os: "macos-13", python-version: "3.10" }
exclude:
- { os: "macos-latest", python-version: "3.10" }
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
name: ${{ matrix.os }} Python ${{ matrix.python-version }} name: ${{ matrix.os }} Python ${{ matrix.python-version }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5 uses: actions/setup-python@v4
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
allow-prereleases: true
cache: pip cache: pip
cache-dependency-path: | cache-dependency-path: ".ci/*.sh"
".ci/*.sh"
"pyproject.toml"
- name: Set PYTHON_GIL
if: "${{ matrix.disable-gil }}"
run: |
echo "PYTHON_GIL=0" >> $GITHUB_ENV
- name: Build system information - name: Build system information
run: python3 .github/workflows/system-info.py run: python3 .github/workflows/system-info.py
- name: Cache libimagequant
if: startsWith(matrix.os, 'ubuntu')
uses: actions/cache@v4
id: cache-libimagequant
with:
path: ~/cache-libimagequant
key: ${{ runner.os }}-libimagequant-${{ hashFiles('depends/install_imagequant.sh') }}
- name: Install Linux dependencies - name: Install Linux dependencies
if: startsWith(matrix.os, 'ubuntu') if: startsWith(matrix.os, 'ubuntu')
run: | run: |
.ci/install.sh .ci/install.sh
env: env:
GHA_PYTHON_VERSION: ${{ matrix.python-version }} GHA_PYTHON_VERSION: ${{ matrix.python-version }}
GHA_LIBIMAGEQUANT_CACHE_HIT: ${{ steps.cache-libimagequant.outputs.cache-hit }}
- name: Install macOS dependencies - name: Install macOS dependencies
if: startsWith(matrix.os, 'macOS') if: startsWith(matrix.os, 'macOS')
@ -110,10 +65,6 @@ jobs:
env: env:
GHA_PYTHON_VERSION: ${{ matrix.python-version }} GHA_PYTHON_VERSION: ${{ matrix.python-version }}
- name: Register gcc problem matcher
if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.13'"
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Build - name: Build
run: | run: |
.ci/build.sh .ci/build.sh
@ -124,9 +75,7 @@ jobs:
python3 -m pip install pytest-reverse python3 -m pip install pytest-reverse
fi fi
if [ "${{ matrix.os }}" = "ubuntu-latest" ]; then if [ "${{ matrix.os }}" = "ubuntu-latest" ]; then
xvfb-run -s '-screen 0 1024x768x24' sway& xvfb-run -s '-screen 0 1024x768x24' .ci/test.sh
export WAYLAND_DISPLAY=wayland-1
.ci/test.sh
else else
.ci/test.sh .ci/test.sh
fi fi
@ -140,22 +89,27 @@ jobs:
mkdir -p Tests/errors mkdir -p Tests/errors
- name: Upload errors - name: Upload errors
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
if: failure() if: failure()
with: with:
name: errors name: errors
path: Tests/errors path: Tests/errors
- name: Docs
if: startsWith(matrix.os, 'ubuntu') && matrix.python-version == 3.11
run: |
make doccheck
- name: After success - name: After success
run: | run: |
.ci/after_success.sh .ci/after_success.sh
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v3
with: with:
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }} file: ./coverage.xml
flags: ${{ matrix.os == 'macos-latest' && 'GHA_macOS' || 'GHA_Ubuntu' }}
name: ${{ matrix.os }} Python ${{ matrix.python-version }} name: ${{ matrix.os }} Python ${{ matrix.python-version }}
token: ${{ secrets.CODECOV_ORG_TOKEN }}
success: success:
permissions: permissions:

View File

@ -1,420 +0,0 @@
#!/bin/bash
# Safety check - Pillow builds require that CIBW_ARCHS is set, and that it only
# contains a single value (even though cibuildwheel allows multiple values in
# CIBW_ARCHS). This check doesn't work on Linux because of how the CIBW_ARCHS
# variable is exposed.
function check_cibw_archs {
if [[ -z "$CIBW_ARCHS" ]]; then
echo "ERROR: Pillow builds require CIBW_ARCHS be defined."
exit 1
fi
if [[ "$CIBW_ARCHS" == *" "* ]]; then
echo "ERROR: Pillow builds only support a single architecture in CIBW_ARCHS."
exit 1
fi
}
# Setup that needs to be done before multibuild utils are invoked. Process
# potential cross-build platforms before native platforms to ensure that we pick
# up the cross environment.
PROJECTDIR=$(pwd)
if [[ "$CIBW_PLATFORM" == "ios" ]]; then
check_cibw_archs
# On iOS, CIBW_ARCHS is actually a multi-arch - arm64_iphoneos,
# arm64_iphonesimulator or x86_64_iphonesimulator. Split into the CPU
# platform, and the iOS SDK.
PLAT=$(echo $CIBW_ARCHS | sed "s/\(.*\)_\(.*\)/\1/")
IOS_SDK=$(echo $CIBW_ARCHS | sed "s/\(.*\)_\(.*\)/\2/")
# Build iOS builds in `build/iphoneos` or `build/iphonesimulator`
# (depending on the build target). Install them into `build/deps/iphoneos`
# or `build/deps/iphonesimulator`
WORKDIR=$(pwd)/build/$IOS_SDK
BUILD_PREFIX=$(pwd)/build/deps/$IOS_SDK
PATCH_DIR=$(pwd)/patches/iOS
# GNU tooling insists on using aarch64 rather than arm64
if [[ $PLAT == "arm64" ]]; then
GNU_ARCH=aarch64
else
GNU_ARCH=x86_64
fi
IOS_SDK_PATH=$(xcrun --sdk $IOS_SDK --show-sdk-path)
CMAKE_SYSTEM_NAME=iOS
IOS_HOST_TRIPLE=$PLAT-apple-ios$IPHONEOS_DEPLOYMENT_TARGET
if [[ "$IOS_SDK" == "iphonesimulator" ]]; then
IOS_HOST_TRIPLE=$IOS_HOST_TRIPLE-simulator
fi
# GNU Autotools doesn't recognize the existence of arm64-apple-ios-simulator
# as a valid host. However, the only difference between arm64-apple-ios and
# arm64-apple-ios-simulator is the choice of sysroot, and that is
# coordinated by CC, CFLAGS etc. From the perspective of configure, the two
# platforms are identical, so we can use arm64-apple-ios consistently.
# This (mostly) avoids us needing to patch config.sub in dependency sources.
HOST_CONFIGURE_FLAGS="--disable-shared --enable-static --host=$GNU_ARCH-apple-ios --build=$GNU_ARCH-apple-darwin"
# CMake has native support for iOS. However, most of that support is based
# on using the Xcode builder, which isn't very helpful for most of Pillow's
# dependencies. Therefore, we lean on the OSX configurations, plus CC, CFLAGS
# etc. to ensure the right sysroot is selected.
HOST_CMAKE_FLAGS="-DCMAKE_SYSTEM_NAME=$CMAKE_SYSTEM_NAME -DCMAKE_SYSTEM_PROCESSOR=$GNU_ARCH -DCMAKE_OSX_DEPLOYMENT_TARGET=$IPHONEOS_DEPLOYMENT_TARGET -DCMAKE_OSX_SYSROOT=$IOS_SDK_PATH -DBUILD_SHARED_LIBS=NO -DENABLE_SHARED=NO"
# Meson needs to be pointed at a cross-platform configuration file
# This will be generated once CC etc. have been evaluated.
HOST_MESON_FLAGS="--cross-file $WORKDIR/meson-cross.txt -Dprefer_static=true -Ddefault_library=static"
elif [[ "$(uname -s)" == "Darwin" ]]; then
check_cibw_archs
# Build macOS dependencies in `build/darwin`
# Install them into `build/deps/darwin`
PLAT=$CIBW_ARCHS
WORKDIR=$(pwd)/build/darwin
BUILD_PREFIX=$(pwd)/build/deps/darwin
else
# Build prefix will default to /usr/local
PLAT="${CIBW_ARCHS:-$AUDITWHEEL_ARCH}"
WORKDIR=$(pwd)/build
MB_ML_LIBC=${AUDITWHEEL_POLICY::9}
MB_ML_VER=${AUDITWHEEL_POLICY:9}
fi
# Define custom utilities
source wheels/multibuild/common_utils.sh
source wheels/multibuild/library_builders.sh
if [[ -z "$IS_MACOS" ]]; then
source wheels/multibuild/manylinux_utils.sh
fi
ARCHIVE_SDIR=pillow-depends-main
# Package versions for fresh source builds. Version numbers with "Patched"
# annotations have a source code patch that is required for some platforms. If
# you change those versions, ensure the patch is also updated.
FREETYPE_VERSION=2.13.3
HARFBUZZ_VERSION=11.2.1
LIBPNG_VERSION=1.6.50
JPEGTURBO_VERSION=3.1.1
OPENJPEG_VERSION=2.5.3
XZ_VERSION=5.8.1
TIFF_VERSION=4.7.0
LCMS2_VERSION=2.17
ZLIB_VERSION=1.3.1
ZLIB_NG_VERSION=2.2.4
LIBWEBP_VERSION=1.6.0
BZIP2_VERSION=1.0.8
LIBXCB_VERSION=1.17.0
BROTLI_VERSION=1.1.0 # Patched; next release won't need patching. See patch file.
LIBAVIF_VERSION=1.3.0
function build_pkg_config {
if [ -e pkg-config-stamp ]; then return; fi
# This essentially duplicates the Homebrew recipe.
# On iOS, we need a binary that can be executed on the build machine; but we
# can create a host-specific pc-path to store iOS .pc files. To ensure a
# macOS-compatible build, we temporarily clear environment flags that set
# iOS-specific values.
if [[ -n "$IOS_SDK" ]]; then
ORIGINAL_HOST_CONFIGURE_FLAGS=$HOST_CONFIGURE_FLAGS
ORIGINAL_IPHONEOS_DEPLOYMENT_TARGET=$IPHONEOS_DEPLOYMENT_TARGET
unset HOST_CONFIGURE_FLAGS
unset IPHONEOS_DEPLOYMENT_TARGET
fi
CFLAGS="$CFLAGS -Wno-int-conversion" CPPFLAGS="" build_simple pkg-config 0.29.2 https://pkg-config.freedesktop.org/releases tar.gz \
--disable-debug --disable-host-tool --with-internal-glib \
--with-pc-path=$BUILD_PREFIX/share/pkgconfig:$BUILD_PREFIX/lib/pkgconfig \
--with-system-include-path=$(xcrun --show-sdk-path --sdk macosx)/usr/include
if [[ -n "$IOS_SDK" ]]; then
HOST_CONFIGURE_FLAGS=$ORIGINAL_HOST_CONFIGURE_FLAGS
IPHONEOS_DEPLOYMENT_TARGET=$ORIGINAL_IPHONEOS_DEPLOYMENT_TARGET
fi;
export PKG_CONFIG=$BUILD_PREFIX/bin/pkg-config
touch pkg-config-stamp
}
function build_zlib_ng {
if [ -e zlib-stamp ]; then return; fi
# zlib-ng uses a "configure" script, but it's not a GNU autotools script, so
# it doesn't honor the usual flags. Temporarily disable any
# cross-compilation flags.
ORIGINAL_HOST_CONFIGURE_FLAGS=$HOST_CONFIGURE_FLAGS
unset HOST_CONFIGURE_FLAGS
build_github zlib-ng/zlib-ng $ZLIB_NG_VERSION --zlib-compat
HOST_CONFIGURE_FLAGS=$ORIGINAL_HOST_CONFIGURE_FLAGS
if [[ -n "$IS_MACOS" ]] && [[ -z "$IOS_SDK" ]]; then
# Ensure that on macOS, the library name is an absolute path, not an
# @rpath, so that delocate picks up the right library (and doesn't need
# DYLD_LIBRARY_PATH to be set). The default Makefile doesn't have an
# option to control the install_name. This isn't needed on iOS, as iOS
# only builds the static library.
install_name_tool -id $BUILD_PREFIX/lib/libz.1.dylib $BUILD_PREFIX/lib/libz.1.dylib
fi
touch zlib-stamp
}
function build_brotli {
if [ -e brotli-stamp ]; then return; fi
local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-$BROTLI_VERSION.tar.gz)
(cd $out_dir \
&& cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_LIBDIR=$BUILD_PREFIX/lib -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib $HOST_CMAKE_FLAGS . \
&& make install)
touch brotli-stamp
}
function build_harfbuzz {
if [ -e harfbuzz-stamp ]; then return; fi
python3 -m pip install meson ninja
local out_dir=$(fetch_unpack https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION/harfbuzz-$HARFBUZZ_VERSION.tar.xz harfbuzz-$HARFBUZZ_VERSION.tar.xz)
(cd $out_dir \
&& meson setup build --prefix=$BUILD_PREFIX --libdir=$BUILD_PREFIX/lib --buildtype=minsize -Dfreetype=enabled -Dglib=disabled -Dtests=disabled $HOST_MESON_FLAGS)
(cd $out_dir/build \
&& meson install)
touch harfbuzz-stamp
}
function build_libavif {
if [ -e libavif-stamp ]; then return; fi
python3 -m pip install meson ninja
if ([[ "$PLAT" == "x86_64" ]] && [[ -z "$IOS_SDK" ]]) || [ -n "$SANITIZER" ]; then
build_simple nasm 2.16.03 https://www.nasm.us/pub/nasm/releasebuilds/2.16.03
fi
local build_type=MinSizeRel
local build_shared=ON
local lto=ON
local libavif_cmake_flags
if [[ -n "$IS_MACOS" ]]; then
lto=OFF
libavif_cmake_flags=(
-DCMAKE_C_FLAGS_MINSIZEREL="-Oz -DNDEBUG -flto" \
-DCMAKE_CXX_FLAGS_MINSIZEREL="-Oz -DNDEBUG -flto" \
-DCMAKE_SHARED_LINKER_FLAGS_INIT="-Wl,-S,-x,-dead_strip_dylibs" \
)
if [[ -n "$IOS_SDK" ]]; then
build_shared=OFF
fi
else
if [[ "$MB_ML_VER" == 2014 ]] && [[ "$PLAT" == "x86_64" ]]; then
build_type=Release
fi
libavif_cmake_flags=(-DCMAKE_SHARED_LINKER_FLAGS_INIT="-Wl,--strip-all,-z,relro,-z,now")
fi
if [[ -n "$IOS_SDK" ]] && [[ "$PLAT" == "x86_64" ]]; then
libavif_cmake_flags+=(-DAOM_TARGET_CPU=generic)
else
libavif_cmake_flags+=(
-DAVIF_CODEC_AOM_DECODE=OFF \
-DAVIF_CODEC_DAV1D=LOCAL
)
fi
local out_dir=$(fetch_unpack https://github.com/AOMediaCodec/libavif/archive/refs/tags/v$LIBAVIF_VERSION.tar.gz libavif-$LIBAVIF_VERSION.tar.gz)
# CONFIG_AV1_HIGHBITDEPTH=0 is a flag for libaom (included as a subproject
# of libavif) that disables support for encoding high bit depth images.
(cd $out_dir \
&& cmake \
-DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX \
-DCMAKE_INSTALL_LIBDIR=$BUILD_PREFIX/lib \
-DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib \
-DBUILD_SHARED_LIBS=$build_shared \
-DAVIF_LIBSHARPYUV=LOCAL \
-DAVIF_LIBYUV=LOCAL \
-DAVIF_CODEC_AOM=LOCAL \
-DCONFIG_AV1_HIGHBITDEPTH=0 \
-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=$lto \
-DCMAKE_C_VISIBILITY_PRESET=hidden \
-DCMAKE_CXX_VISIBILITY_PRESET=hidden \
-DCMAKE_BUILD_TYPE=$build_type \
"${libavif_cmake_flags[@]}" \
$HOST_CMAKE_FLAGS . )
if [[ -n "$IOS_SDK" ]]; then
# libavif's CMake configuration generates a meson cross file... but it
# doesn't work for iOS cross-compilation. Copy in Pillow-generated
# meson-cross config to replace the cmake-generated version.
cp $WORKDIR/meson-cross.txt $out_dir/crossfile-apple.meson
fi
(cd $out_dir && make install)
touch libavif-stamp
}
function build {
build_xz
if [ -z "$IS_ALPINE" ] && [ -z "$SANITIZER" ] && [ -z "$IS_MACOS" ]; then
yum remove -y zlib-devel
fi
if [[ -n "$IS_MACOS" ]] && [[ "$MACOSX_DEPLOYMENT_TARGET" == "10.10" || "$MACOSX_DEPLOYMENT_TARGET" == "10.13" ]]; then
build_new_zlib
else
build_zlib_ng
fi
build_simple xcb-proto 1.17.0 https://xorg.freedesktop.org/archive/individual/proto
if [[ -n "$IS_MACOS" ]]; then
build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto
build_simple libXau 1.0.12 https://www.x.org/pub/individual/lib
build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist
else
sed "s/\${pc_sysrootdir\}//" $BUILD_PREFIX/share/pkgconfig/xcb-proto.pc > $BUILD_PREFIX/lib/pkgconfig/xcb-proto.pc
fi
build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib
build_libjpeg_turbo
if [[ -n "$IS_MACOS" ]]; then
# Custom tiff build to include jpeg; by default, configure won't include
# headers/libs in the custom macOS/iOS prefix. Explicitly disable webp,
# libdeflate and zstd, because on x86_64 macs, it will pick up the
# Homebrew versions of those libraries from /usr/local.
build_simple tiff $TIFF_VERSION https://download.osgeo.org/libtiff tar.gz \
--with-jpeg-include-dir=$BUILD_PREFIX/include --with-jpeg-lib-dir=$BUILD_PREFIX/lib \
--disable-webp --disable-libdeflate --disable-zstd
else
build_tiff
fi
build_libavif
build_libpng
build_lcms2
build_openjpeg
webp_cflags="-O3 -DNDEBUG"
if [[ -n "$IS_MACOS" ]]; then
webp_cflags="$webp_cflags -Wl,-headerpad_max_install_names"
fi
webp_ldflags=""
if [[ -n "$IOS_SDK" ]]; then
webp_ldflags="$webp_ldflags -llzma -lz"
fi
CFLAGS="$CFLAGS $webp_cflags" LDFLAGS="$LDFLAGS $webp_ldflags" build_simple libwebp $LIBWEBP_VERSION \
https://storage.googleapis.com/downloads.webmproject.org/releases/webp tar.gz \
--enable-libwebpmux --enable-libwebpdemux
build_brotli
if [[ -n "$IS_MACOS" ]]; then
# Custom freetype build
build_simple freetype $FREETYPE_VERSION https://download.savannah.gnu.org/releases/freetype tar.gz --with-harfbuzz=no
else
build_freetype
fi
if [[ -z "$IOS_SDK" ]]; then
# On iOS, there's no vendor-provided raqm, and we can't ship it due to
# licensing, so there's no point building harfbuzz.
build_harfbuzz
fi
}
function create_meson_cross_config {
cat << EOF > $WORKDIR/meson-cross.txt
[binaries]
pkg-config = '$BUILD_PREFIX/bin/pkg-config'
cmake = '$(which cmake)'
c = '$CC'
cpp = '$CXX'
strip = '$STRIP'
[built-in options]
c_args = '$CFLAGS -I$BUILD_PREFIX/include'
cpp_args = '$CXXFLAGS -I$BUILD_PREFIX/include'
c_link_args = '$CFLAGS -L$BUILD_PREFIX/lib'
cpp_link_args = '$CFLAGS -L$BUILD_PREFIX/lib'
[host_machine]
system = 'darwin'
subsystem = 'ios'
kernel = 'xnu'
cpu_family = '$(uname -m)'
cpu = '$(uname -m)'
endian = 'little'
EOF
}
# Perform all dependency builds in the build subfolder.
mkdir -p $WORKDIR
pushd $WORKDIR > /dev/null
# Any stuff that you need to do before you start building the wheels
# Runs in the root directory of this repository.
if [[ ! -d $WORKDIR/pillow-depends-main ]]; then
if [[ ! -f $PROJECTDIR/pillow-depends-main.zip ]]; then
echo "Download pillow dependency sources..."
curl -fSL -o $PROJECTDIR/pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
fi
echo "Unpacking pillow dependency sources..."
untar $PROJECTDIR/pillow-depends-main.zip
fi
if [[ -n "$IS_MACOS" ]]; then
# Ensure the basic structure of the build prefix directory exists.
mkdir -p "$BUILD_PREFIX/bin"
mkdir -p "$BUILD_PREFIX/lib"
# Ensure pkg-config is available. This is done *before* setting CC, CFLAGS
# etc. to ensure that the build is *always* a macOS build, even when building
# for iOS.
build_pkg_config
# Ensure cmake is available, and that the default prefix used by CMake is
# the build prefix
python3 -m pip install cmake
export CMAKE_PREFIX_PATH=$BUILD_PREFIX
if [[ -n "$IOS_SDK" ]]; then
export AR="$(xcrun --find --sdk $IOS_SDK ar)"
export CPP="$(xcrun --find --sdk $IOS_SDK clang) -E"
export CC=$(xcrun --find --sdk $IOS_SDK clang)
export CXX=$(xcrun --find --sdk $IOS_SDK clang++)
export LD=$(xcrun --find --sdk $IOS_SDK ld)
export STRIP=$(xcrun --find --sdk $IOS_SDK strip)
CPPFLAGS="$CPPFLAGS --sysroot=$IOS_SDK_PATH"
CFLAGS="-target $IOS_HOST_TRIPLE --sysroot=$IOS_SDK_PATH -mios-version-min=$IPHONEOS_DEPLOYMENT_TARGET"
CXXFLAGS="-target $IOS_HOST_TRIPLE --sysroot=$IOS_SDK_PATH -mios-version-min=$IPHONEOS_DEPLOYMENT_TARGET"
# Having IPHONEOS_DEPLOYMENT_TARGET in the environment causes problems
# with some cross-building toolchains, because it introduces implicit
# behavior into clang.
unset IPHONEOS_DEPLOYMENT_TARGET
# Now that we know CC etc., we can create a meson cross-configuration file
create_meson_cross_config
fi
fi
wrap_wheel_builder build
# A safety catch for iOS. iOS can't use dynamic libraries, but clang will prefer
# to link dynamic libraries to static libraries. The only way to reliably
# prevent this is to not have dynamic libraries available in the first place.
# The build process *shouldn't* generate any dylibs... but just in case, purge
# any dylibs that *have* been installed into the build prefix directory.
if [[ -n "$IOS_SDK" ]]; then
find "$BUILD_PREFIX" -name "*.dylib" -exec rm -rf {} \;
fi
# Return to the project root to finish the build
popd > /dev/null
# Append licenses
for filename in wheels/dependency_licenses/*; do
echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE
cat $filename >> LICENSE
done

View File

@ -1,26 +0,0 @@
param ([string]$venv, [string]$pillow="C:\pillow")
$ErrorActionPreference = 'Stop'
$ProgressPreference = 'SilentlyContinue'
Set-PSDebug -Trace 1
if ("$venv" -like "*\cibw-run-*\pp*-win_amd64\*") {
# unlike CPython, PyPy requires Visual C++ Redistributable to be installed
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
Invoke-WebRequest -Uri 'https://aka.ms/vs/15/release/vc_redist.x64.exe' -OutFile 'vc_redist.x64.exe'
C:\vc_redist.x64.exe /install /quiet /norestart | Out-Null
}
$env:path += ";$pillow\winbuild\build\bin\"
if (Test-Path $venv\Scripts\pypy.exe) {
$python = "pypy.exe"
} else {
$python = "python.exe"
}
& reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\python.exe" /v "GlobalFlag" /t REG_SZ /d "0x02000000" /f
cd $pillow
& $venv\Scripts\$python -VV
if (!$?) { exit $LASTEXITCODE }
& $venv\Scripts\$python selftest.py
if (!$?) { exit $LASTEXITCODE }
& $venv\Scripts\$python -m pytest -vv -x checks\check_wheel.py
if (!$?) { exit $LASTEXITCODE }
& $venv\Scripts\$python -m pytest -vv -x Tests
if (!$?) { exit $LASTEXITCODE }

View File

@ -1,37 +0,0 @@
#!/bin/bash
set -e
# Ensure fribidi is installed by the system.
if [[ "$OSTYPE" == "darwin"* ]]; then
# If Homebrew is on the path during the build, it may leak into the wheels.
# However, we *do* need Homebrew to provide a copy of fribidi for
# testing purposes so that we can verify the fribidi shim works as expected.
if [[ "$(uname -m)" == "x86_64" ]]; then
HOMEBREW_PREFIX=/usr/local
else
HOMEBREW_PREFIX=/opt/homebrew
fi
$HOMEBREW_PREFIX/bin/brew install fribidi
# Add the lib folder for fribidi so that the vendored library can be found.
# Don't use $HOMEWBREW_PREFIX/lib directly - use the lib folder where the
# installed copy of fribidi is cellared. This ensures we don't pick up the
# Homebrew version of any other library that we're dependent on (most notably,
# freetype).
export DYLD_LIBRARY_PATH=$(dirname $(realpath $HOMEBREW_PREFIX/lib/libfribidi.dylib))
elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then
apk add curl fribidi
else
yum install -y fribidi
fi
if [ ! -d "test-images-main" ]; then
curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip
unzip pillow-test-images.zip
mv test-images-main/* Tests/images
fi
# Runs tests
python3 selftest.py
python3 -m pytest -vv -x checks/check_wheel.py
python3 -m pytest -vv -x

View File

@ -1,289 +0,0 @@
name: Wheels
on:
schedule:
# ┌───────────── minute (0 - 59)
# │ ┌───────────── hour (0 - 23)
# │ │ ┌───────────── day of the month (1 - 31)
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
# │ │ │ │ │
- cron: "42 1 * * 0,3"
push:
paths:
- ".ci/requirements-cibw.txt"
- ".github/workflows/wheel*"
- "pyproject.toml"
- "setup.py"
- "wheels/*"
- "winbuild/build_prepare.py"
- "winbuild/fribidi.cmake"
tags:
- "*"
pull_request:
paths:
- ".ci/requirements-cibw.txt"
- ".github/workflows/wheel*"
- "pyproject.toml"
- "setup.py"
- "wheels/*"
- "winbuild/build_prepare.py"
- "winbuild/fribidi.cmake"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
FORCE_COLOR: 1
jobs:
build-native-wheels:
if: github.event_name != 'schedule' || github.repository_owner == 'python-pillow'
name: ${{ matrix.name }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- name: "macOS 10.10 x86_64"
platform: macos
os: macos-13
cibw_arch: x86_64
build: "cp3{9,10,11}*"
macosx_deployment_target: "10.10"
- name: "macOS 10.13 x86_64"
platform: macos
os: macos-13
cibw_arch: x86_64
build: "cp3{12,13,14}*"
macosx_deployment_target: "10.13"
- name: "macOS 10.15 x86_64"
platform: macos
os: macos-13
cibw_arch: x86_64
build: "pp3*"
macosx_deployment_target: "10.15"
- name: "macOS arm64"
platform: macos
os: macos-latest
cibw_arch: arm64
macosx_deployment_target: "11.0"
- name: "manylinux2014 and musllinux x86_64"
platform: linux
os: ubuntu-latest
cibw_arch: x86_64
manylinux: "manylinux2014"
- name: "manylinux_2_28 x86_64"
platform: linux
os: ubuntu-latest
cibw_arch: x86_64
build: "*manylinux*"
- name: "manylinux2014 and musllinux aarch64"
platform: linux
os: ubuntu-24.04-arm
cibw_arch: aarch64
manylinux: "manylinux2014"
- name: "manylinux_2_28 aarch64"
platform: linux
os: ubuntu-24.04-arm
cibw_arch: aarch64
build: "*manylinux*"
- name: "iOS arm64 device"
platform: ios
os: macos-latest
cibw_arch: arm64_iphoneos
- name: "iOS arm64 simulator"
platform: ios
os: macos-latest
cibw_arch: arm64_iphonesimulator
- name: "iOS x86_64 simulator"
platform: ios
os: macos-13
cibw_arch: x86_64_iphonesimulator
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: true
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install cibuildwheel
run: |
python3 -m pip install -r .ci/requirements-cibw.txt
- name: Build wheels
run: |
python3 -m cibuildwheel --output-dir wheelhouse
env:
CIBW_PLATFORM: ${{ matrix.platform }}
CIBW_ARCHS: ${{ matrix.cibw_arch }}
CIBW_BUILD: ${{ matrix.build }}
CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }}
- uses: actions/upload-artifact@v4
with:
name: dist-${{ matrix.name }}
path: ./wheelhouse/*.whl
windows:
if: github.event_name != 'schedule' || github.repository_owner == 'python-pillow'
name: Windows ${{ matrix.cibw_arch }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- cibw_arch: x86
os: windows-latest
- cibw_arch: AMD64
os: windows-latest
- cibw_arch: ARM64
os: windows-11-arm
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Checkout extra test images
uses: actions/checkout@v4
with:
persist-credentials: false
repository: python-pillow/test-images
path: Tests\test-images
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install cibuildwheel
run: |
python.exe -m pip install -r .ci/requirements-cibw.txt
- name: Prepare for build
run: |
choco install nasm --no-progress
echo "C:\Program Files\NASM" >> $env:GITHUB_PATH
# Install extra test images
xcopy /S /Y Tests\test-images\* Tests\images
& python.exe winbuild\build_prepare.py -v --no-imagequant --architecture=${{ matrix.cibw_arch }}
shell: pwsh
- name: Build wheels
run: |
setlocal EnableDelayedExpansion
for %%f in (winbuild\build\license\*) do (
set x=%%~nf
rem Skip FriBiDi license, it is not included in the wheel.
set fribidi=!x:~0,7!
if NOT !fribidi!==fribidi (
rem Skip imagequant license, it is not included in the wheel.
set libimagequant=!x:~0,13!
if NOT !libimagequant!==libimagequant (
echo. >> LICENSE
echo ===== %%~nf ===== >> LICENSE
echo. >> LICENSE
type %%f >> LICENSE
)
)
)
call winbuild\\build\\build_env.cmd
%pythonLocation%\python.exe -m cibuildwheel . --output-dir wheelhouse
env:
CIBW_ARCHS: ${{ matrix.cibw_arch }}
CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd"
CIBW_CACHE_PATH: "C:\\cibw"
CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy
CIBW_TEST_SKIP: "*-win_arm64"
CIBW_TEST_COMMAND: 'docker run --rm
-v {project}:C:\pillow
-v C:\cibw:C:\cibw
-v %CD%\..\venv-test:%CD%\..\venv-test
-e CI -e GITHUB_ACTIONS
mcr.microsoft.com/windows/servercore:ltsc2022
powershell C:\pillow\.github\workflows\wheels-test.ps1 %CD%\..\venv-test'
shell: cmd
- name: Upload wheels
uses: actions/upload-artifact@v4
with:
name: dist-windows-${{ matrix.cibw_arch }}
path: ./wheelhouse/*.whl
- name: Upload fribidi.dll
uses: actions/upload-artifact@v4
with:
name: fribidi-windows-${{ matrix.cibw_arch }}
path: winbuild\build\bin\fribidi*
sdist:
if: github.event_name != 'schedule'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: make sdist
- uses: actions/upload-artifact@v4
with:
name: dist-sdist
path: dist/*.tar.gz
scientific-python-nightly-wheels-publish:
if: github.repository_owner == 'python-pillow' && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch')
needs: [build-native-wheels, windows]
runs-on: ubuntu-latest
name: Upload wheels to scientific-python-nightly-wheels
steps:
- uses: actions/download-artifact@v4
with:
pattern: dist-*
path: dist
merge-multiple: true
- name: Upload wheels to scientific-python-nightly-wheels
uses: scientific-python/upload-nightly-action@b36e8c0c10dbcfd2e05bf95f17ef8c14fd708dbf # 0.6.2
with:
artifacts_path: dist
anaconda_nightly_upload_token: ${{ secrets.ANACONDA_ORG_UPLOAD_TOKEN }}
pypi-publish:
if: github.repository_owner == 'python-pillow' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
needs: [build-native-wheels, windows, sdist]
runs-on: ubuntu-latest
name: Upload release to PyPI
environment:
name: release-pypi
url: https://pypi.org/p/Pillow
permissions:
id-token: write
steps:
- uses: actions/download-artifact@v4
with:
pattern: dist-*
path: dist
merge-multiple: true
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
attestations: true

7
.github/zizmor.yml vendored
View File

@ -1,7 +0,0 @@
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
# https://woodruffw.github.io/zizmor/configuration/
rules:
unpinned-uses:
config:
policies:
"*": ref-pin

7
.gitignore vendored
View File

@ -19,7 +19,6 @@ lib64/
parts/ parts/
sdist/ sdist/
var/ var/
wheelhouse/
*.egg-info/ *.egg-info/
.installed.cfg .installed.cfg
*.egg *.egg
@ -80,7 +79,7 @@ docs/_build/
# JetBrains # JetBrains
.idea .idea
# Extra test images installed from python-pillow/test-images # Extra test images installed from pillow-depends/test_images
Tests/images/README.md Tests/images/README.md
Tests/images/crash_1.tif Tests/images/crash_1.tif
Tests/images/crash_2.tif Tests/images/crash_2.tif
@ -91,9 +90,5 @@ Tests/images/msp
Tests/images/picins Tests/images/picins
Tests/images/sunraster Tests/images/sunraster
# Test and dependency downloads
pillow-depends-main.zip
pillow-test-images.zip
# pyinstaller # pyinstaller
*.spec *.spec

3
.gitmodules vendored
View File

@ -1,3 +0,0 @@
[submodule "multibuild"]
path = wheels/multibuild
url = https://github.com/multi-build/multibuild.git

View File

@ -1,92 +1,65 @@
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/psf/black
rev: v0.12.2 rev: 22.12.0
hooks:
- id: ruff-check
args: [--exit-non-zero-on-fix]
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 25.1.0
hooks: hooks:
- id: black - id: black
args: [--target-version=py37]
# Only .py files, until https://github.com/psf/black/issues/402 resolved
files: \.py$
types: []
- repo: https://github.com/PyCQA/isort
rev: 5.11.1
hooks:
- id: isort
- repo: https://github.com/PyCQA/bandit - repo: https://github.com/PyCQA/bandit
rev: 1.8.6 rev: 1.7.4
hooks: hooks:
- id: bandit - id: bandit
args: [--severity-level=high] args: [--severity-level=high]
files: ^src/ files: ^src/
- repo: https://github.com/asottile/yesqa
rev: v1.4.0
hooks:
- id: yesqa
- repo: https://github.com/Lucas-C/pre-commit-hooks - repo: https://github.com/Lucas-C/pre-commit-hooks
rev: v1.5.5 rev: v1.3.1
hooks: hooks:
- id: remove-tabs - id: remove-tabs
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$|\.patch$) exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.opt$)
- repo: https://github.com/pre-commit/mirrors-clang-format - repo: https://github.com/PyCQA/flake8
rev: v20.1.7 rev: 6.0.0
hooks: hooks:
- id: clang-format - id: flake8
types: [c] additional_dependencies:
exclude: ^src/thirdparty/ [flake8-2020, flake8-errmsg, flake8-implicit-str-concat]
- repo: https://github.com/pre-commit/pygrep-hooks - repo: https://github.com/pre-commit/pygrep-hooks
rev: v1.10.0 rev: v1.9.0
hooks: hooks:
- id: python-check-blanket-noqa
- id: rst-backticks - id: rst-backticks
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0 rev: v4.4.0
hooks: hooks:
- id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable
- id: check-merge-conflict - id: check-merge-conflict
- id: check-json - id: check-json
- id: check-toml
- id: check-yaml - id: check-yaml
args: [--allow-multiple-documents]
- id: end-of-file-fixer
exclude: ^Tests/images/|\.patch$
- id: trailing-whitespace
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/|\.patch$
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.33.2
hooks:
- id: check-github-workflows
- id: check-readthedocs
- id: check-renovate
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.11.0
hooks:
- id: zizmor
- repo: https://github.com/sphinx-contrib/sphinx-lint - repo: https://github.com/sphinx-contrib/sphinx-lint
rev: v1.0.0 rev: v0.6.7
hooks: hooks:
- id: sphinx-lint - id: sphinx-lint
- repo: https://github.com/tox-dev/pyproject-fmt
rev: v2.6.0
hooks:
- id: pyproject-fmt
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.24.1
hooks:
- id: validate-pyproject
additional_dependencies: [trove-classifiers>=2024.10.12]
- repo: https://github.com/tox-dev/tox-ini-fmt - repo: https://github.com/tox-dev/tox-ini-fmt
rev: 1.5.0 rev: 0.5.2
hooks: hooks:
- id: tox-ini-fmt - id: tox-ini-fmt
- repo: meta
hooks:
- id: check-hooks-apply
- id: check-useless-excludes
ci: ci:
autoupdate_schedule: monthly autoupdate_schedule: monthly

View File

@ -1,19 +1,5 @@
version: 2 version: 2
sphinx:
configuration: docs/conf.py
formats: [pdf]
build:
os: ubuntu-lts-latest
tools:
python: "3"
jobs:
post_checkout:
- git remote add upstream https://github.com/python-pillow/Pillow.git # For forks
- git fetch upstream --tags
python: python:
install: install:
- method: pip - method: pip

File diff suppressed because it is too large Load Diff

10
LICENSE
View File

@ -1,20 +1,20 @@
The Python Imaging Library (PIL) is The Python Imaging Library (PIL) is
Copyright © 1997-2011 by Secret Labs AB Copyright © 1997-2011 by Secret Labs AB
Copyright © 1995-2011 by Fredrik Lundh and contributors Copyright © 1995-2011 by Fredrik Lundh
Pillow is the friendly PIL fork. It is Pillow is the friendly PIL fork. It is
Copyright © 2010 by Jeffrey A. Clark and contributors Copyright © 2010-2023 by Alex Clark and contributors
Like PIL, Pillow is licensed under the open source MIT-CMU License: Like PIL, Pillow is licensed under the open source HPND License:
By obtaining, using, and/or copying this software and/or its associated By obtaining, using, and/or copying this software and/or its associated
documentation, you agree that you have read, understood, and will comply documentation, you agree that you have read, understood, and will comply
with the following terms and conditions: with the following terms and conditions:
Permission to use, copy, modify and distribute this software and its Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted, associated documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appears in all copies, and that provided that the above copyright notice appears in all copies, and that
both that copyright notice and this permission notice appear in supporting both that copyright notice and this permission notice appear in supporting
documentation, and that the name of Secret Labs AB or the author not be documentation, and that the name of Secret Labs AB or the author not be

View File

@ -5,43 +5,26 @@ include *.md
include *.py include *.py
include *.rst include *.rst
include *.sh include *.sh
include *.toml
include *.txt include *.txt
include *.yaml include *.yaml
include .flake8
include LICENSE include LICENSE
include Makefile include Makefile
include tox.ini include tox.ini
graft Tests graft Tests
graft Tests/images
graft checks
graft patches
graft src graft src
graft depends graft depends
graft winbuild graft winbuild
graft docs graft docs
graft _custom_build
# build/src control detritus # build/src control detritus
exclude .appveyor.yml
exclude .clang-format exclude .clang-format
exclude .coveragerc exclude .coveragerc
exclude .editorconfig exclude .editorconfig
exclude .readthedocs.yml exclude .readthedocs.yml
exclude codecov.yml exclude codecov.yml
exclude renovate.json exclude renovate.json
exclude Tests/images/README.md
exclude Tests/images/crash*.tif
exclude Tests/images/string_dimension.tiff
global-exclude .git* global-exclude .git*
global-exclude *.pyc global-exclude *.pyc
global-exclude *.so global-exclude *.so
prune .ci prune .ci
prune wheels
prune winbuild/build
prune winbuild/depends
prune Tests/errors
prune Tests/images/jpeg2000
prune Tests/images/msp
prune Tests/images/picins
prune Tests/images/sunraster
prune Tests/test-images

View File

@ -2,6 +2,7 @@
.PHONY: clean .PHONY: clean
clean: clean:
python3 setup.py clean
rm src/PIL/*.so || true rm src/PIL/*.so || true
rm -r build || true rm -r build || true
find . -name __pycache__ | xargs rm -r || true find . -name __pycache__ | xargs rm -r || true
@ -15,18 +16,10 @@ coverage:
python3 -m coverage report python3 -m coverage report
.PHONY: doc .PHONY: doc
.PHONY: html doc:
doc html: python3 -c "import PIL" > /dev/null 2>&1 || python3 -m pip install .
$(MAKE) -C docs html $(MAKE) -C docs html
.PHONY: htmlview
htmlview:
$(MAKE) -C docs htmlview
.PHONY: htmllive
htmllive:
$(MAKE) -C docs htmllive
.PHONY: doccheck .PHONY: doccheck
doccheck: doccheck:
$(MAKE) doc $(MAKE) doc
@ -45,16 +38,19 @@ help:
@echo " coverage run coverage test (in progress)" @echo " coverage run coverage test (in progress)"
@echo " doc make HTML docs" @echo " doc make HTML docs"
@echo " docserve run an HTTP server on the docs directory" @echo " docserve run an HTTP server on the docs directory"
@echo " html make HTML docs" @echo " html to make standalone HTML files"
@echo " htmlview open the index page built by the html target in your browser" @echo " inplace make inplace extension"
@echo " htmllive rebuild and reload HTML files in your browser"
@echo " install make and install" @echo " install make and install"
@echo " install-coverage make and install with C coverage" @echo " install-coverage make and install with C coverage"
@echo " lint run the lint checks" @echo " lint run the lint checks"
@echo " lint-fix run Ruff to (mostly) fix lint issues" @echo " lint-fix run Black and isort to (mostly) fix lint issues"
@echo " release-test run code and package tests before release" @echo " release-test run code and package tests before release"
@echo " test run tests on installed Pillow" @echo " test run tests on installed Pillow"
.PHONY: inplace
inplace: clean
python3 -m pip install -e --global-option="build_ext" --global-option="--inplace" .
.PHONY: install .PHONY: install
install: install:
python3 -m pip -v install . python3 -m pip -v install .
@ -62,7 +58,7 @@ install:
.PHONY: install-coverage .PHONY: install-coverage
install-coverage: install-coverage:
CFLAGS="-coverage -Werror=implicit-function-declaration" python3 -m pip -v install . CFLAGS="-coverage -Werror=implicit-function-declaration" python3 -m pip -v install --global-option="build_ext" .
python3 selftest.py python3 selftest.py
.PHONY: debug .PHONY: debug
@ -71,15 +67,16 @@ debug:
# for our stuff, kills optimization, and redirects to dev null so we # for our stuff, kills optimization, and redirects to dev null so we
# see any build failures. # see any build failures.
make clean > /dev/null make clean > /dev/null
CFLAGS='-g -O0' python3 -m pip -v install . > /dev/null CFLAGS='-g -O0' python3 -m pip -v install --global-option="build_ext" . > /dev/null
.PHONY: release-test .PHONY: release-test
release-test: release-test:
python3 checks/check_release_notes.py
python3 -m pip install -e .[tests] python3 -m pip install -e .[tests]
python3 selftest.py python3 selftest.py
python3 -m pytest Tests python3 -m pytest Tests
python3 -m pip install . python3 -m pip install .
-rm dist/*.egg
-rmdir dist
python3 -m pytest -qq python3 -m pytest -qq
python3 -m check_manifest python3 -m check_manifest
python3 -m pyroma . python3 -m pyroma .
@ -97,27 +94,13 @@ test:
python3 -c "import pytest" > /dev/null 2>&1 || python3 -m pip install pytest python3 -c "import pytest" > /dev/null 2>&1 || python3 -m pip install pytest
python3 -m pytest -qq python3 -m pytest -qq
.PHONY: test-p
test-p:
python3 -c "import xdist" > /dev/null 2>&1 || python3 -m pip install pytest-xdist
python3 -m pytest -qq -n auto
.PHONY: valgrind .PHONY: valgrind
valgrind: valgrind:
python3 -c "import pytest_valgrind" > /dev/null 2>&1 || python3 -m pip install pytest-valgrind python3 -c "import pytest_valgrind" > /dev/null 2>&1 || python3 -m pip install pytest-valgrind
PILLOW_VALGRIND_TEST=true PYTHONMALLOC=malloc valgrind --suppressions=Tests/oss-fuzz/python.supp --leak-check=no \ PYTHONMALLOC=malloc valgrind --suppressions=Tests/oss-fuzz/python.supp --leak-check=no \
--log-file=/tmp/valgrind-output \ --log-file=/tmp/valgrind-output \
python3 -m pytest --no-memcheck -vv --valgrind --valgrind-log=/tmp/valgrind-output python3 -m pytest --no-memcheck -vv --valgrind --valgrind-log=/tmp/valgrind-output
.PHONY: valgrind-leak
valgrind-leak:
python3 -c "import pytest_valgrind" > /dev/null 2>&1 || python3 -m pip install pytest-valgrind
PILLOW_VALGRIND_TEST=true PYTHONMALLOC=malloc valgrind --suppressions=Tests/oss-fuzz/python.supp \
--leak-check=full --show-leak-kinds=definite --errors-for-leak-kinds=definite \
--log-file=/tmp/valgrind-output \
python3 -m pytest -vv --valgrind --valgrind-log=/tmp/valgrind-output
.PHONY: readme .PHONY: readme
readme: readme:
python3 -c "import markdown2" > /dev/null 2>&1 || python3 -m pip install markdown2 python3 -c "import markdown2" > /dev/null 2>&1 || python3 -m pip install markdown2
@ -132,11 +115,6 @@ lint:
.PHONY: lint-fix .PHONY: lint-fix
lint-fix: lint-fix:
python3 -c "import black" > /dev/null 2>&1 || python3 -m pip install black python3 -c "import black" > /dev/null 2>&1 || python3 -m pip install black
python3 -m black . python3 -c "import isort" > /dev/null 2>&1 || python3 -m pip install isort
python3 -c "import ruff" > /dev/null 2>&1 || python3 -m pip install ruff python3 -m black --target-version py37 .
python3 -m ruff check --fix . python3 -m isort .
.PHONY: mypy
mypy:
python3 -c "import tox" > /dev/null 2>&1 || python3 -m pip install tox
python3 -m tox -e mypy

View File

@ -6,9 +6,9 @@
## Python Imaging Library (Fork) ## Python Imaging Library (Fork)
Pillow is the friendly PIL fork by [Jeffrey A. Clark and Pillow is the friendly PIL fork by [Alex Clark and
contributors](https://github.com/python-pillow/Pillow/graphs/contributors). Contributors](https://github.com/python-pillow/Pillow/graphs/contributors).
PIL is the Python Imaging Library by Fredrik Lundh and contributors. PIL is the Python Imaging Library by Fredrik Lundh and Contributors.
As of 2019, Pillow development is As of 2019, Pillow development is
[supported by Tidelift](https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=readme&utm_campaign=enterprise). [supported by Tidelift](https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=readme&utm_campaign=enterprise).
@ -36,16 +36,25 @@ As of 2019, Pillow development is
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test-mingw.yml"><img <a href="https://github.com/python-pillow/Pillow/actions/workflows/test-mingw.yml"><img
alt="GitHub Actions build status (Test MinGW)" alt="GitHub Actions build status (Test MinGW)"
src="https://github.com/python-pillow/Pillow/workflows/Test%20MinGW/badge.svg"></a> src="https://github.com/python-pillow/Pillow/workflows/Test%20MinGW/badge.svg"></a>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test-cygwin.yml"><img
alt="GitHub Actions build status (Test Cygwin)"
src="https://github.com/python-pillow/Pillow/workflows/Test%20Cygwin/badge.svg"></a>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test-docker.yml"><img <a href="https://github.com/python-pillow/Pillow/actions/workflows/test-docker.yml"><img
alt="GitHub Actions build status (Test Docker)" alt="GitHub Actions build status (Test Docker)"
src="https://github.com/python-pillow/Pillow/workflows/Test%20Docker/badge.svg"></a> src="https://github.com/python-pillow/Pillow/workflows/Test%20Docker/badge.svg"></a>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml"><img <a href="https://ci.appveyor.com/project/python-pillow/Pillow"><img
alt="GitHub Actions build status (Wheels)" alt="AppVeyor CI build status (Windows)"
src="https://github.com/python-pillow/Pillow/workflows/Wheels/badge.svg"></a> src="https://img.shields.io/appveyor/build/python-pillow/Pillow/main.svg?label=Windows%20build"></a>
<a href="https://github.com/python-pillow/pillow-wheels/actions"><img
alt="GitHub Actions wheels build status (Wheels)"
src="https://github.com/python-pillow/pillow-wheels/workflows/Wheels/badge.svg"></a>
<a href="https://app.travis-ci.com/github/python-pillow/pillow-wheels"><img
alt="Travis CI wheels build status (aarch64)"
src="https://img.shields.io/travis/com/python-pillow/pillow-wheels/main.svg?label=aarch64%20wheels"></a>
<a href="https://app.codecov.io/gh/python-pillow/Pillow"><img <a href="https://app.codecov.io/gh/python-pillow/Pillow"><img
alt="Code coverage" alt="Code coverage"
src="https://codecov.io/gh/python-pillow/Pillow/branch/main/graph/badge.svg"></a> src="https://codecov.io/gh/python-pillow/Pillow/branch/main/graph/badge.svg"></a>
<a href="https://issues.oss-fuzz.com/issues?q=title:pillow"><img <a href="https://bugs.chromium.org/p/oss-fuzz/issues/list?sort=-opened&can=1&q=proj:pillow"><img
alt="Fuzzing Status" alt="Fuzzing Status"
src="https://oss-fuzz-build-logs.storage.googleapis.com/badges/pillow.svg"></a> src="https://oss-fuzz-build-logs.storage.googleapis.com/badges/pillow.svg"></a>
</td> </td>
@ -58,16 +67,16 @@ As of 2019, Pillow development is
src="https://zenodo.org/badge/17549/python-pillow/Pillow.svg"></a> src="https://zenodo.org/badge/17549/python-pillow/Pillow.svg"></a>
<a href="https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=badge"><img <a href="https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=badge"><img
alt="Tidelift" alt="Tidelift"
src="https://tidelift.com/badges/package/pypi/pillow?style=flat"></a> src="https://tidelift.com/badges/package/pypi/Pillow?style=flat"></a>
<a href="https://pypi.org/project/pillow/"><img <a href="https://pypi.org/project/Pillow/"><img
alt="Newest PyPI version" alt="Newest PyPI version"
src="https://img.shields.io/pypi/v/pillow.svg"></a> src="https://img.shields.io/pypi/v/pillow.svg"></a>
<a href="https://pypi.org/project/pillow/"><img <a href="https://pypi.org/project/Pillow/"><img
alt="Number of PyPI downloads" alt="Number of PyPI downloads"
src="https://img.shields.io/pypi/dm/pillow.svg"></a> src="https://img.shields.io/pypi/dm/pillow.svg"></a>
<a href="https://www.bestpractices.dev/projects/6331"><img <a href="https://bestpractices.coreinfrastructure.org/projects/6331"><img
alt="OpenSSF Best Practices" alt="OpenSSF Best Practices"
src="https://www.bestpractices.dev/projects/6331/badge"></a> src="https://bestpractices.coreinfrastructure.org/projects/6331/badge"></a>
</td> </td>
</tr> </tr>
<tr> <tr>
@ -76,10 +85,9 @@ As of 2019, Pillow development is
<a href="https://gitter.im/python-pillow/Pillow?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge"><img <a href="https://gitter.im/python-pillow/Pillow?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge"><img
alt="Join the chat at https://gitter.im/python-pillow/Pillow" alt="Join the chat at https://gitter.im/python-pillow/Pillow"
src="https://badges.gitter.im/python-pillow/Pillow.svg"></a> src="https://badges.gitter.im/python-pillow/Pillow.svg"></a>
<a href="https://fosstodon.org/@pillow"><img <a href="https://twitter.com/PythonPillow"><img
alt="Follow on https://fosstodon.org/@pillow" alt="Follow on https://twitter.com/PythonPillow"
src="https://img.shields.io/badge/publish-on%20Mastodon-595aff.svg" src="https://img.shields.io/badge/tweet-on%20Twitter-00aced.svg"></a>
rel="me"></a>
</td> </td>
</tr> </tr>
</table> </table>
@ -92,18 +100,18 @@ This library provides extensive file format support, an efficient internal repre
The core image library is designed for fast access to data stored in a few basic pixel formats. It should provide a solid foundation for a general image processing tool. The core image library is designed for fast access to data stored in a few basic pixel formats. It should provide a solid foundation for a general image processing tool.
## More information ## More Information
- [Documentation](https://pillow.readthedocs.io/) - [Documentation](https://pillow.readthedocs.io/)
- [Installation](https://pillow.readthedocs.io/en/latest/installation/basic-installation.html) - [Installation](https://pillow.readthedocs.io/en/latest/installation.html)
- [Handbook](https://pillow.readthedocs.io/en/latest/handbook/index.html) - [Handbook](https://pillow.readthedocs.io/en/latest/handbook/index.html)
- [Contribute](https://github.com/python-pillow/Pillow/blob/main/.github/CONTRIBUTING.md) - [Contribute](https://github.com/python-pillow/Pillow/blob/main/.github/CONTRIBUTING.md)
- [Issues](https://github.com/python-pillow/Pillow/issues) - [Issues](https://github.com/python-pillow/Pillow/issues)
- [Pull requests](https://github.com/python-pillow/Pillow/pulls) - [Pull requests](https://github.com/python-pillow/Pillow/pulls)
- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html) - [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html)
- [Changelog](https://github.com/python-pillow/Pillow/releases) - [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst)
- [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork) - [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork)
## Report a vulnerability ## Report a Vulnerability
To report a security vulnerability, please follow the procedure described in the [Tidelift security policy](https://tidelift.com/docs/security). To report a security vulnerability, please follow the procedure described in the [Tidelift security policy](https://tidelift.com/docs/security).

View File

@ -1,45 +1,75 @@
# Release checklist # Release Checklist
See https://pillow.readthedocs.io/en/stable/releasenotes/versioning.html for See https://pillow.readthedocs.io/en/stable/releasenotes/versioning.html for
information about how the version numbers line up with releases. information about how the version numbers line up with releases.
## Main release ## Main Release
Released quarterly on January 2nd, April 1st, July 1st and October 15th. Released quarterly on January 2nd, April 1st, July 1st and October 15th.
* [ ] Create a new issue and select the "Maintainers only: Release" template. * [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154
* [ ] Develop and prepare release in `main` branch.
## Point release * [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in `main` branch.
* [ ] Check that all of the wheel builds [Pillow Wheel Builder](https://github.com/python-pillow/pillow-wheels) pass the tests in Travis CI and GitHub Actions.
Released as needed for security, installation or critical bug fixes. * [ ] In compliance with [PEP 440](https://www.python.org/dev/peps/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Update `CHANGES.rst`.
* [ ] Make necessary changes in `main` branch. * [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
* [ ] Check out release branch e.g.: * [ ] Create branch and tag for release e.g.:
```bash ```bash
git checkout -t remotes/origin/5.2.x git branch 5.2.x
``` git tag 5.2.0
* [ ] Cherry pick individual commits from `main` branch to release branch e.g. `5.2.x`, then `git push`. git push --all
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) to confirm passing tests in release branch e.g. `5.2.x`.
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Run pre-release check via `make release-test`.
* [ ] Create tag for release e.g.:
```bash
git tag 5.2.1
git push --tags git push --tags
``` ```
* [ ] Create and check source distribution: * [ ] Create and check source distribution:
```bash ```bash
make sdist make sdist
``` ```
* [ ] Check the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) * [ ] Create [binary distributions](https://github.com/python-pillow/Pillow/blob/main/RELEASING.md#binary-distributions)
has passed, including the "Upload release to PyPI" job. This will have been triggered * [ ] Check and upload all binaries and source distributions e.g.:
by the new tag.
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases) and then:
```bash ```bash
git push python3 -m twine check --strict dist/*
python3 -m twine upload dist/Pillow-5.2.0*
``` ```
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases)
* [ ] In compliance with [PEP 440](https://www.python.org/dev/peps/pep-0440/), increment and append `.dev0` to version identifier in `src/PIL/_version.py`
## Embargoed release ## Point Release
Released as needed for security, installation or critical bug fixes.
* [ ] Make necessary changes in `main` branch.
* [ ] Update `CHANGES.rst`.
* [ ] Check out release branch e.g.:
```bash
git checkout -t remotes/origin/5.2.x
```
* [ ] Cherry pick individual commits from `main` branch to release branch e.g. `5.2.x`, then `git push`.
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in release branch e.g. `5.2.x`.
* [ ] In compliance with [PEP 440](https://www.python.org/dev/peps/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Run pre-release check via `make release-test`.
* [ ] Create tag for release e.g.:
```bash
git tag 5.2.1
git push
git push --tags
```
* [ ] Create and check source distribution:
```bash
make sdist
```
* [ ] Create [binary distributions](https://github.com/python-pillow/Pillow/blob/main/RELEASING.md#binary-distributions)
* [ ] Check and upload all binaries and source distributions e.g.:
```bash
python3 -m twine check --strict dist/*
python3 -m twine upload dist/Pillow-5.2.1*
```
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases)
## Embargoed Release
Released as needed privately to individual vendors for critical security-related bug fixes. Released as needed privately to individual vendors for critical security-related bug fixes.
@ -53,25 +83,41 @@ Released as needed privately to individual vendors for critical security-related
```bash ```bash
git checkout 2.5.x git checkout 2.5.x
git tag 2.5.3 git tag 2.5.3
git push origin 2.5.x
git push origin --tags git push origin --tags
``` ```
* [ ] Check the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) * [ ] Create and check source distribution:
has passed, including the "Upload release to PyPI" job. This will have been triggered
by the new tag.
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases) and then:
```bash ```bash
git push origin 2.5.x make sdist
``` ```
* [ ] Create [binary distributions](https://github.com/python-pillow/Pillow/blob/main/RELEASING.md#binary-distributions)
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases)
## Publicize release ## Binary Distributions
* [ ] Announce release availability via [Mastodon](https://fosstodon.org/@pillow) e.g. https://fosstodon.org/@pillow/110639450470725321 ### Windows
* [ ] Download the artifacts from the [GitHub Actions "Test Windows" workflow](https://github.com/python-pillow/Pillow/actions/workflows/test-windows.yml)
and copy into `dist/`
### Mac and Linux
* [ ] Use the [Pillow Wheel Builder](https://github.com/python-pillow/pillow-wheels):
```bash
git clone https://github.com/python-pillow/pillow-wheels
cd pillow-wheels
./update-pillow-tag.sh [[release tag]]
```
* [ ] Download wheels from the [Pillow Wheel Builder release](https://github.com/python-pillow/pillow-wheels/releases)
and copy into `dist/`
## Publicize Release
* [ ] Announce release availability via [Twitter](https://twitter.com/pythonpillow) e.g. https://twitter.com/PythonPillow/status/1013789184354603010
## Documentation ## Documentation
* [ ] Make sure the [default version for Read the Docs](https://pillow.readthedocs.io/en/stable/) is up-to-date with the release changes * [ ] Make sure the [default version for Read the Docs](https://pillow.readthedocs.io/en/stable/) is up-to-date with the release changes
## Docker images ## Docker Images
* [ ] Update Pillow in the Docker Images repository * [ ] Update Pillow in the Docker Images repository
```bash ```bash

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from __future__ import annotations
import sys import sys

View File

@ -1,4 +1,4 @@
Pillow tests Pillow Tests
============ ============
Test scripts are named ``test_xxx.py``. Helper classes and functions can be found in ``helper.py``. Test scripts are named ``test_xxx.py``. Helper classes and functions can be found in ``helper.py``.

View File

@ -0,0 +1,58 @@
import time
from PIL import PyAccess
from .helper import hopper
# Not running this test by default. No DOS against CI.
def iterate_get(size, access):
(w, h) = size
for x in range(w):
for y in range(h):
access[(x, y)]
def iterate_set(size, access):
(w, h) = size
for x in range(w):
for y in range(h):
access[(x, y)] = (x % 256, y % 256, 0)
def timer(func, label, *args):
iterations = 5000
starttime = time.time()
for x in range(iterations):
func(*args)
if time.time() - starttime > 10:
print(
"{}: breaking at {} iterations, {:.6f} per iteration".format(
label, x + 1, (time.time() - starttime) / (x + 1.0)
)
)
break
if x == iterations - 1:
endtime = time.time()
print(
"{}: {:.4f} s {:.6f} per iteration".format(
label, endtime - starttime, (endtime - starttime) / (x + 1.0)
)
)
def test_direct():
im = hopper()
im.load()
# im = Image.new( "RGB", (2000, 2000), (1, 3, 2))
caccess = im.im.pixel_access(False)
access = PyAccess.new(im, False)
assert caccess[(0, 0)] == access[(0, 0)]
print("Size: %sx%s" % im.size)
timer(iterate_get, "PyAccess - get", im.size, access)
timer(iterate_set, "PyAccess - set", im.size, access)
timer(iterate_get, "C-api - get", im.size, caccess)
timer(iterate_set, "C-api - set", im.size, caccess)

View File

@ -1,4 +1,4 @@
from __future__ import annotations #!/usr/bin/env python3
from PIL import Image from PIL import Image

View File

@ -1,11 +1,10 @@
from __future__ import annotations
from PIL import Image from PIL import Image
TEST_FILE = "Tests/images/fli_overflow.fli" TEST_FILE = "Tests/images/fli_overflow.fli"
def test_fli_overflow() -> None: def test_fli_overflow():
# this should not crash with a malloc error or access violation # this should not crash with a malloc error or access violation
with Image.open(TEST_FILE) as im: with Image.open(TEST_FILE) as im:
im.load() im.load()

View File

@ -1,6 +1,5 @@
# Tests potential DOS of IcnsImagePlugin with 0 length block. # Tests potential DOS of IcnsImagePlugin with 0 length block.
# Run from anywhere that PIL is importable. # Run from anywhere that PIL is importable.
from __future__ import annotations
from io import BytesIO from io import BytesIO

View File

@ -1,9 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from __future__ import annotations
from collections.abc import Callable
from typing import Any
import pytest import pytest
from PIL import Image from PIL import Image
@ -16,37 +11,31 @@ max_iterations = 10000
pytestmark = pytest.mark.skipif(is_win32(), reason="requires Unix or macOS") pytestmark = pytest.mark.skipif(is_win32(), reason="requires Unix or macOS")
def _get_mem_usage() -> float: def _get_mem_usage():
from resource import RUSAGE_SELF, getpagesize, getrusage from resource import RUSAGE_SELF, getpagesize, getrusage
mem = getrusage(RUSAGE_SELF).ru_maxrss mem = getrusage(RUSAGE_SELF).ru_maxrss
return mem * getpagesize() / 1024 / 1024 return mem * getpagesize() / 1024 / 1024
def _test_leak( def _test_leak(min_iterations, max_iterations, fn, *args, **kwargs):
min_iterations: int,
max_iterations: int,
fn: Callable[..., Image.Image | None],
*args: Any,
) -> None:
mem_limit = None mem_limit = None
for i in range(max_iterations): for i in range(max_iterations):
fn(*args) fn(*args, **kwargs)
mem = _get_mem_usage() mem = _get_mem_usage()
if i < min_iterations: if i < min_iterations:
mem_limit = mem + 1 mem_limit = mem + 1
continue continue
msg = f"memory usage limit exceeded after {i + 1} iterations" msg = f"memory usage limit exceeded after {i + 1} iterations"
assert mem_limit is not None
assert mem <= mem_limit, msg assert mem <= mem_limit, msg
def test_leak_putdata() -> None: def test_leak_putdata():
im = Image.new("RGB", (25, 25)) im = Image.new("RGB", (25, 25))
_test_leak(min_iterations, max_iterations, im.putdata, im.getdata()) _test_leak(min_iterations, max_iterations, im.putdata, im.getdata())
def test_leak_getlist() -> None: def test_leak_getlist():
im = Image.new("P", (25, 25)) im = Image.new("P", (25, 25))
_test_leak( _test_leak(
min_iterations, min_iterations,

View File

@ -1,6 +1,5 @@
# Tests potential DOS of Jpeg2kImagePlugin with 0 length block. # Tests potential DOS of Jpeg2kImagePlugin with 0 length block.
# Run from anywhere that PIL is importable. # Run from anywhere that PIL is importable.
from __future__ import annotations
from io import BytesIO from io import BytesIO

6
checks/check_j2k_leaks.py → Tests/check_j2k_leaks.py Normal file → Executable file
View File

@ -1,5 +1,3 @@
from __future__ import annotations
from io import BytesIO from io import BytesIO
import pytest import pytest
@ -20,7 +18,7 @@ pytestmark = [
] ]
def test_leak_load() -> None: def test_leak_load():
from resource import RLIMIT_AS, RLIMIT_STACK, setrlimit from resource import RLIMIT_AS, RLIMIT_STACK, setrlimit
setrlimit(RLIMIT_STACK, (stack_size, stack_size)) setrlimit(RLIMIT_STACK, (stack_size, stack_size))
@ -30,7 +28,7 @@ def test_leak_load() -> None:
im.load() im.load()
def test_leak_save() -> None: def test_leak_save():
from resource import RLIMIT_AS, RLIMIT_STACK, setrlimit from resource import RLIMIT_AS, RLIMIT_STACK, setrlimit
setrlimit(RLIMIT_STACK, (stack_size, stack_size)) setrlimit(RLIMIT_STACK, (stack_size, stack_size))

View File

@ -0,0 +1,10 @@
import pytest
from PIL import Image
def test_j2k_overflow(tmp_path):
im = Image.new("RGBA", (1024, 131584))
target = str(tmp_path / "temp.jpc")
with pytest.raises(OSError):
im.save(target)

View File

@ -1,3 +1,5 @@
#!/usr/bin/env python3
# Reproductions/tests for OOB read errors in FliDecode.c # Reproductions/tests for OOB read errors in FliDecode.c
# When run in python, all of these images should fail for # When run in python, all of these images should fail for
@ -10,7 +12,7 @@
# the output should be empty. There may be python issues # the output should be empty. There may be python issues
# in the valgrind especially if run in a debug python # in the valgrind especially if run in a debug python
# version. # version.
from __future__ import annotations
from PIL import Image from PIL import Image

View File

@ -1,5 +1,3 @@
from __future__ import annotations
from io import BytesIO from io import BytesIO
import pytest import pytest
@ -13,7 +11,7 @@ iterations = 5000
When run on a system without the jpeg leak fixes, When run on a system without the jpeg leak fixes,
the valgrind runs look like this. the valgrind runs look like this.
valgrind --tool=massif python test-installed.py -s -v checks/check_jpeg_leaks.py valgrind --tool=massif python test-installed.py -s -v Tests/check_jpeg_leaks.py
""" """
@ -77,48 +75,49 @@ post-patch:
""" """
standard_l_qtable = ( def test_qtables_leak():
# fmt: off
16, 11, 10, 16, 24, 40, 51, 61,
12, 12, 14, 19, 26, 58, 60, 55,
14, 13, 16, 24, 40, 57, 69, 56,
14, 17, 22, 29, 51, 87, 80, 62,
18, 22, 37, 56, 68, 109, 103, 77,
24, 35, 55, 64, 81, 104, 113, 92,
49, 64, 78, 87, 103, 121, 120, 101,
72, 92, 95, 98, 112, 100, 103, 99,
# fmt: on
)
standard_chrominance_qtable = (
# fmt: off
17, 18, 24, 47, 99, 99, 99, 99,
18, 21, 26, 66, 99, 99, 99, 99,
24, 26, 56, 99, 99, 99, 99, 99,
47, 66, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
# fmt: on
)
@pytest.mark.parametrize(
"qtables",
(
(standard_l_qtable, standard_chrominance_qtable),
[standard_l_qtable, standard_chrominance_qtable],
),
)
def test_qtables_leak(qtables: tuple[tuple[int, ...]] | list[tuple[int, ...]]) -> None:
im = hopper("RGB") im = hopper("RGB")
standard_l_qtable = [
int(s)
for s in """
16 11 10 16 24 40 51 61
12 12 14 19 26 58 60 55
14 13 16 24 40 57 69 56
14 17 22 29 51 87 80 62
18 22 37 56 68 109 103 77
24 35 55 64 81 104 113 92
49 64 78 87 103 121 120 101
72 92 95 98 112 100 103 99
""".split(
None
)
]
standard_chrominance_qtable = [
int(s)
for s in """
17 18 24 47 99 99 99 99
18 21 26 66 99 99 99 99
24 26 56 99 99 99 99 99
47 66 99 99 99 99 99 99
99 99 99 99 99 99 99 99
99 99 99 99 99 99 99 99
99 99 99 99 99 99 99 99
99 99 99 99 99 99 99 99
""".split(
None
)
]
qtables = [standard_l_qtable, standard_chrominance_qtable]
for _ in range(iterations): for _ in range(iterations):
test_output = BytesIO() test_output = BytesIO()
im.save(test_output, "JPEG", qtables=qtables) im.save(test_output, "JPEG", qtables=qtables)
def test_exif_leak() -> None: def test_exif_leak():
""" """
pre patch: pre patch:
@ -181,7 +180,7 @@ def test_exif_leak() -> None:
im.save(test_output, "JPEG", exif=exif) im.save(test_output, "JPEG", exif=exif)
def test_base_save() -> None: def test_base_save():
""" """
base case: base case:
MB MB

View File

@ -1,8 +1,4 @@
from __future__ import annotations
import sys import sys
from pathlib import Path
from types import ModuleType
import pytest import pytest
@ -18,7 +14,6 @@ from PIL import Image
# 2.7 and 3.2. # 2.7 and 3.2.
numpy: ModuleType | None
try: try:
import numpy import numpy
except ImportError: except ImportError:
@ -31,24 +26,23 @@ XDIM = 48000
pytestmark = pytest.mark.skipif(sys.maxsize <= 2**32, reason="requires 64-bit system") pytestmark = pytest.mark.skipif(sys.maxsize <= 2**32, reason="requires 64-bit system")
def _write_png(tmp_path: Path, xdim: int, ydim: int) -> None: def _write_png(tmp_path, xdim, ydim):
f = tmp_path / "temp.png" f = str(tmp_path / "temp.png")
im = Image.new("L", (xdim, ydim), 0) im = Image.new("L", (xdim, ydim), 0)
im.save(f) im.save(f)
def test_large(tmp_path: Path) -> None: def test_large(tmp_path):
"""succeeded prepatch""" """succeeded prepatch"""
_write_png(tmp_path, XDIM, YDIM) _write_png(tmp_path, XDIM, YDIM)
def test_2gpx(tmp_path: Path) -> None: def test_2gpx(tmp_path):
"""failed prepatch""" """failed prepatch"""
_write_png(tmp_path, XDIM, XDIM) _write_png(tmp_path, XDIM, XDIM)
@pytest.mark.skipif(numpy is None, reason="Numpy is not installed") @pytest.mark.skipif(numpy is None, reason="Numpy is not installed")
def test_size_greater_than_int() -> None: def test_size_greater_than_int():
assert numpy is not None
arr = numpy.ndarray(shape=(16394, 16394)) arr = numpy.ndarray(shape=(16394, 16394))
Image.fromarray(arr) Image.fromarray(arr)

View File

@ -1,7 +1,4 @@
from __future__ import annotations
import sys import sys
from pathlib import Path
import pytest import pytest
@ -25,19 +22,19 @@ XDIM = 48000
pytestmark = pytest.mark.skipif(sys.maxsize <= 2**32, reason="requires 64-bit system") pytestmark = pytest.mark.skipif(sys.maxsize <= 2**32, reason="requires 64-bit system")
def _write_png(tmp_path: Path, xdim: int, ydim: int) -> None: def _write_png(tmp_path, xdim, ydim):
dtype = np.uint8 dtype = np.uint8
a = np.zeros((xdim, ydim), dtype=dtype) a = np.zeros((xdim, ydim), dtype=dtype)
f = tmp_path / "temp.png" f = str(tmp_path / "temp.png")
im = Image.fromarray(a, "L") im = Image.fromarray(a, "L")
im.save(f) im.save(f)
def test_large(tmp_path: Path) -> None: def test_large(tmp_path):
"""succeeded prepatch""" """succeeded prepatch"""
_write_png(tmp_path, XDIM, YDIM) _write_png(tmp_path, XDIM, YDIM)
def test_2gpx(tmp_path: Path) -> None: def test_2gpx(tmp_path):
"""failed prepatch""" """failed prepatch"""
_write_png(tmp_path, XDIM, XDIM) _write_png(tmp_path, XDIM, XDIM)

View File

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest import pytest
from PIL import Image from PIL import Image
@ -7,7 +5,7 @@ from PIL import Image
TEST_FILE = "Tests/images/libtiff_segfault.tif" TEST_FILE = "Tests/images/libtiff_segfault.tif"
def test_libtiff_segfault() -> None: def test_libtiff_segfault():
"""This test should not segfault. It will on Pillow <= 3.1.0 and """This test should not segfault. It will on Pillow <= 3.1.0 and
libtiff >= 4.0.0 libtiff >= 4.0.0
""" """

View File

@ -1,30 +1,29 @@
from __future__ import annotations
import zlib import zlib
from io import BytesIO from io import BytesIO
import pytest
from PIL import Image, ImageFile, PngImagePlugin from PIL import Image, ImageFile, PngImagePlugin
TEST_FILE = "Tests/images/png_decompression_dos.png" TEST_FILE = "Tests/images/png_decompression_dos.png"
def test_ignore_dos_text(monkeypatch: pytest.MonkeyPatch) -> None: def test_ignore_dos_text():
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True) ImageFile.LOAD_TRUNCATED_IMAGES = True
with Image.open(TEST_FILE) as im: try:
im = Image.open(TEST_FILE)
im.load() im.load()
finally:
ImageFile.LOAD_TRUNCATED_IMAGES = False
assert isinstance(im, PngImagePlugin.PngImageFile) for s in im.text.values():
for s in im.text.values(): assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
for s in im.info.values(): for s in im.info.values():
assert len(s) < 1024 * 1024, "Text chunk larger than 1M" assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
def test_dos_text() -> None: def test_dos_text():
try: try:
im = Image.open(TEST_FILE) im = Image.open(TEST_FILE)
im.load() im.load()
@ -32,12 +31,11 @@ def test_dos_text() -> None:
assert msg, "Decompressed Data Too Large" assert msg, "Decompressed Data Too Large"
return return
assert isinstance(im, PngImagePlugin.PngImageFile)
for s in im.text.values(): for s in im.text.values():
assert len(s) < 1024 * 1024, "Text chunk larger than 1M" assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
def test_dos_total_memory() -> None: def test_dos_total_memory():
im = Image.new("L", (1, 1)) im = Image.new("L", (1, 1))
compressed_data = zlib.compress(b"a" * 1024 * 1023) compressed_data = zlib.compress(b"a" * 1024 * 1023)
@ -54,11 +52,10 @@ def test_dos_total_memory() -> None:
try: try:
im2 = Image.open(b) im2 = Image.open(b)
except ValueError as msg: except ValueError as msg:
assert "Too much memory" in str(msg) assert "Too much memory" in msg
return return
total_len = 0 total_len = 0
assert isinstance(im2, PngImagePlugin.PngImageFile)
for txt in im2.text.values(): for txt in im2.text.values():
total_len += len(txt) total_len += len(txt)
assert total_len < 64 * 1024 * 1024, "Total text chunks greater than 64M" assert total_len < 64 * 1024 * 1024, "Total text chunks greater than 64M"

View File

@ -1,11 +1,7 @@
from __future__ import annotations
import io import io
import pytest
def pytest_report_header(config):
def pytest_report_header(config: pytest.Config) -> str:
try: try:
from PIL import features from PIL import features
@ -16,7 +12,7 @@ def pytest_report_header(config: pytest.Config) -> str:
return f"pytest_report_header failed: {e}" return f"pytest_report_header failed: {e}"
def pytest_configure(config: pytest.Config) -> None: def pytest_configure(config):
config.addinivalue_line( config.addinivalue_line(
"markers", "markers",
"pil_noop_mark: A conditional mark where nothing special happens", "pil_noop_mark: A conditional mark where nothing special happens",

View File

@ -1,6 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from __future__ import annotations
import base64 import base64
import os import os

Binary file not shown.

View File

@ -37,4 +37,4 @@ The Font Software may be sold as part of a larger software package but no copy o
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE. THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
Except as contained in this notice, the name of Tavmjong Bah shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Font Software without prior written authorization from Tavmjong Bah. For further information, contact: tavmjong @ free . fr. Except as contained in this notice, the name of Tavmjong Bah shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Font Software without prior written authorization from Tavmjong Bah. For further information, contact: tavmjong @ free . fr.

Binary file not shown.

View File

@ -2,6 +2,7 @@
NotoNastaliqUrdu-Regular.ttf and NotoSansSymbols-Regular.ttf, from https://github.com/googlei18n/noto-fonts NotoNastaliqUrdu-Regular.ttf and NotoSansSymbols-Regular.ttf, from https://github.com/googlei18n/noto-fonts
NotoSans-Regular.ttf, from https://www.google.com/get/noto/ NotoSans-Regular.ttf, from https://www.google.com/get/noto/
NotoSansJP-Thin.otf, from https://www.google.com/get/noto/help/cjk/ NotoSansJP-Thin.otf, from https://www.google.com/get/noto/help/cjk/
NotoColorEmoji.ttf, from https://github.com/googlefonts/noto-emoji
AdobeVFPrototype.ttf, from https://github.com/adobe-fonts/adobe-variable-font-prototype AdobeVFPrototype.ttf, from https://github.com/adobe-fonts/adobe-variable-font-prototype
TINY5x3GX.ttf, from http://velvetyne.fr/fonts/tiny TINY5x3GX.ttf, from http://velvetyne.fr/fonts/tiny
ArefRuqaa-Regular.ttf, from https://github.com/google/fonts/tree/master/ofl/arefruqaa ArefRuqaa-Regular.ttf, from https://github.com/google/fonts/tree/master/ofl/arefruqaa
@ -24,5 +25,3 @@ FreeMono.ttf is licensed under GPLv3.
10x20-ISO8859-1.pcf, from https://packages.ubuntu.com/xenial/xfonts-base 10x20-ISO8859-1.pcf, from https://packages.ubuntu.com/xenial/xfonts-base
"Public domain font. Share and enjoy." "Public domain font. Share and enjoy."
CBDTTestFont.ttf and EBDTTestFont.ttf from https://github.com/nulano/font-tests are public domain.

Binary file not shown.

View File

@ -2,55 +2,57 @@
Helper functions. Helper functions.
""" """
from __future__ import annotations
import logging import logging
import os import os
import shutil import shutil
import subprocess
import sys import sys
import sysconfig
import tempfile import tempfile
from functools import lru_cache
from io import BytesIO from io import BytesIO
import pytest import pytest
from packaging.version import parse as parse_version from packaging.version import parse as parse_version
from PIL import Image, ImageFile, ImageMath, features from PIL import Image, ImageMath, features
TYPE_CHECKING = False
if TYPE_CHECKING:
from collections.abc import Callable, Sequence
from pathlib import Path
from typing import Any
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
uploader = None
if os.environ.get("SHOW_ERRORS"): HAS_UPLOADER = False
uploader = "show"
if os.environ.get("SHOW_ERRORS", None):
# local img.show for errors.
HAS_UPLOADER = True
class test_image_results:
@staticmethod
def upload(a, b):
a.show()
b.show()
elif "GITHUB_ACTIONS" in os.environ: elif "GITHUB_ACTIONS" in os.environ:
uploader = "github_actions" HAS_UPLOADER = True
class test_image_results:
@staticmethod
def upload(a, b):
dir_errors = os.path.join(os.path.dirname(__file__), "errors")
os.makedirs(dir_errors, exist_ok=True)
tmpdir = tempfile.mkdtemp(dir=dir_errors)
a.save(os.path.join(tmpdir, "a.png"))
b.save(os.path.join(tmpdir, "b.png"))
return tmpdir
else:
try:
import test_image_results
HAS_UPLOADER = True
except ImportError:
pass
def upload(a: Image.Image, b: Image.Image) -> str | None: def convert_to_comparable(a, b):
if uploader == "show":
# local img.show for errors.
a.show()
b.show()
elif uploader == "github_actions":
dir_errors = os.path.join(os.path.dirname(__file__), "errors")
os.makedirs(dir_errors, exist_ok=True)
tmpdir = tempfile.mkdtemp(dir=dir_errors)
a.save(os.path.join(tmpdir, "a.png"))
b.save(os.path.join(tmpdir, "b.png"))
return tmpdir
return None
def convert_to_comparable(
a: Image.Image, b: Image.Image
) -> tuple[Image.Image, Image.Image]:
new_a, new_b = a, b new_a, new_b = a, b
if a.mode == "P": if a.mode == "P":
new_a = Image.new("L", a.size) new_a = Image.new("L", a.size)
@ -63,16 +65,14 @@ def convert_to_comparable(
return new_a, new_b return new_a, new_b
def assert_deep_equal(a: Any, b: Any, msg: str | None = None) -> None: def assert_deep_equal(a, b, msg=None):
try: try:
assert len(a) == len(b), msg or f"got length {len(a)}, expected {len(b)}" assert len(a) == len(b), msg or f"got length {len(a)}, expected {len(b)}"
except Exception: except Exception:
assert a == b, msg assert a == b, msg
def assert_image( def assert_image(im, mode, size, msg=None):
im: Image.Image, mode: str, size: tuple[int, int], msg: str | None = None
) -> None:
if mode is not None: if mode is not None:
assert im.mode == mode, ( assert im.mode == mode, (
msg or f"got mode {repr(im.mode)}, expected {repr(mode)}" msg or f"got mode {repr(im.mode)}, expected {repr(mode)}"
@ -84,35 +84,28 @@ def assert_image(
) )
def assert_image_equal(a: Image.Image, b: Image.Image, msg: str | None = None) -> None: def assert_image_equal(a, b, msg=None):
assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}" assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}"
assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}" assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}"
if a.tobytes() != b.tobytes(): if a.tobytes() != b.tobytes():
try: if HAS_UPLOADER:
url = upload(a, b) try:
if url: url = test_image_results.upload(a, b)
logger.error("URL for test images: %s", url) logger.error(f"Url for test images: {url}")
except Exception: except Exception:
pass pass
pytest.fail(msg or "got different content") assert False, msg or "got different content"
def assert_image_equal_tofile( def assert_image_equal_tofile(a, filename, msg=None, mode=None):
a: Image.Image,
filename: str | Path,
msg: str | None = None,
mode: str | None = None,
) -> None:
with Image.open(filename) as img: with Image.open(filename) as img:
if mode: if mode:
img = img.convert(mode) img = img.convert(mode)
assert_image_equal(a, img, msg) assert_image_equal(a, img, msg)
def assert_image_similar( def assert_image_similar(a, b, epsilon, msg=None):
a: Image.Image, b: Image.Image, epsilon: float, msg: str | None = None
) -> None:
assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}" assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}"
assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}" assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}"
@ -120,9 +113,7 @@ def assert_image_similar(
diff = 0 diff = 0
for ach, bch in zip(a.split(), b.split()): for ach, bch in zip(a.split(), b.split()):
chdiff = ImageMath.lambda_eval( chdiff = ImageMath.eval("abs(a - b)", a=ach, b=bch).convert("L")
lambda args: abs(args["a"] - args["b"]), a=ach, b=bch
).convert("L")
diff += sum(i * num for i, num in enumerate(chdiff.histogram())) diff += sum(i * num for i, num in enumerate(chdiff.histogram()))
ave_diff = diff / (a.size[0] * a.size[1]) ave_diff = diff / (a.size[0] * a.size[1])
@ -132,75 +123,61 @@ def assert_image_similar(
+ f" average pixel value difference {ave_diff:.4f} > epsilon {epsilon:.4f}" + f" average pixel value difference {ave_diff:.4f} > epsilon {epsilon:.4f}"
) )
except Exception as e: except Exception as e:
try: if HAS_UPLOADER:
url = upload(a, b) try:
if url: url = test_image_results.upload(a, b)
logger.exception("URL for test images: %s", url) logger.error(f"Url for test images: {url}")
except Exception: except Exception:
pass pass
raise e raise e
def assert_image_similar_tofile( def assert_image_similar_tofile(a, filename, epsilon, msg=None, mode=None):
a: Image.Image,
filename: str | Path,
epsilon: float,
msg: str | None = None,
) -> None:
with Image.open(filename) as img: with Image.open(filename) as img:
if mode:
img = img.convert(mode)
assert_image_similar(a, img, epsilon, msg) assert_image_similar(a, img, epsilon, msg)
def assert_not_all_same(items: Sequence[Any], msg: str | None = None) -> None: def assert_all_same(items, msg=None):
assert items.count(items[0]) == len(items), msg
def assert_not_all_same(items, msg=None):
assert items.count(items[0]) != len(items), msg assert items.count(items[0]) != len(items), msg
def assert_tuple_approx_equal( def assert_tuple_approx_equal(actuals, targets, threshold, msg):
actuals: Sequence[int], targets: tuple[int, ...], threshold: int, msg: str
) -> None:
"""Tests if actuals has values within threshold from targets""" """Tests if actuals has values within threshold from targets"""
value = True
for i, target in enumerate(targets): for i, target in enumerate(targets):
if not (target - threshold <= actuals[i] <= target + threshold): value *= target - threshold <= actuals[i] <= target + threshold
pytest.fail(msg + ": " + repr(actuals) + " != " + repr(targets))
assert value, msg + ": " + repr(actuals) + " != " + repr(targets)
def timeout_unless_slower_valgrind(timeout: float) -> pytest.MarkDecorator: def skip_unless_feature(feature):
if "PILLOW_VALGRIND_TEST" in os.environ:
return pytest.mark.pil_noop_mark()
return pytest.mark.timeout(timeout)
def skip_unless_feature(feature: str) -> pytest.MarkDecorator:
reason = f"{feature} not available" reason = f"{feature} not available"
return pytest.mark.skipif(not features.check(feature), reason=reason) return pytest.mark.skipif(not features.check(feature), reason=reason)
def skip_unless_feature_version( def skip_unless_feature_version(feature, version_required, reason=None):
feature: str, required: str, reason: str | None = None if not features.check(feature):
) -> pytest.MarkDecorator:
version = features.version(feature)
if version is None:
return pytest.mark.skip(f"{feature} not available") return pytest.mark.skip(f"{feature} not available")
if reason is None: if reason is None:
reason = f"{feature} is older than {required}" reason = f"{feature} is older than {version_required}"
version_required = parse_version(required) version_required = parse_version(version_required)
version_available = parse_version(version) version_available = parse_version(features.version(feature))
return pytest.mark.skipif(version_available < version_required, reason=reason) return pytest.mark.skipif(version_available < version_required, reason=reason)
def mark_if_feature_version( def mark_if_feature_version(mark, feature, version_blacklist, reason=None):
mark: pytest.MarkDecorator, if not features.check(feature):
feature: str,
version_blacklist: str,
reason: str | None = None,
) -> pytest.MarkDecorator:
version = features.version(feature)
if version is None:
return pytest.mark.pil_noop_mark() return pytest.mark.pil_noop_mark()
if reason is None: if reason is None:
reason = f"{feature} is {version_blacklist}" reason = f"{feature} is {version_blacklist}"
version_required = parse_version(version_blacklist) version_required = parse_version(version_blacklist)
version_available = parse_version(version) version_available = parse_version(features.version(feature))
if ( if (
version_available.major == version_required.major version_available.major == version_required.major
and version_available.minor == version_required.minor and version_available.minor == version_required.minor
@ -215,7 +192,7 @@ class PillowLeakTestCase:
iterations = 100 # count iterations = 100 # count
mem_limit = 512 # k mem_limit = 512 # k
def _get_mem_usage(self) -> float: def _get_mem_usage(self):
""" """
Gets the RUSAGE memory usage, returns in K. Encapsulates the difference Gets the RUSAGE memory usage, returns in K. Encapsulates the difference
between macOS and Linux rss reporting between macOS and Linux rss reporting
@ -226,13 +203,18 @@ class PillowLeakTestCase:
from resource import RUSAGE_SELF, getrusage from resource import RUSAGE_SELF, getrusage
mem = getrusage(RUSAGE_SELF).ru_maxrss mem = getrusage(RUSAGE_SELF).ru_maxrss
# man 2 getrusage: if sys.platform == "darwin":
# ru_maxrss # man 2 getrusage:
# This is the maximum resident set size utilized # ru_maxrss
# in bytes on macOS, in kilobytes on Linux # This is the maximum resident set size utilized (in bytes).
return mem / 1024 if sys.platform == "darwin" else mem return mem / 1024 # Kb
# linux
# man 2 getrusage
# ru_maxrss (since Linux 2.6.32)
# This is the maximum resident set size used (in kilobytes).
return mem # Kb
def _test_leak(self, core: Callable[[], None]) -> None: def _test_leak(self, core):
start_mem = self._get_mem_usage() start_mem = self._get_mem_usage()
for cycle in range(self.iterations): for cycle in range(self.iterations):
core() core()
@ -244,63 +226,52 @@ class PillowLeakTestCase:
# helpers # helpers
def fromstring(data: bytes) -> ImageFile.ImageFile: def fromstring(data):
return Image.open(BytesIO(data)) return Image.open(BytesIO(data))
def tostring(im: Image.Image, string_format: str, **options: Any) -> bytes: def tostring(im, string_format, **options):
out = BytesIO() out = BytesIO()
im.save(out, string_format, **options) im.save(out, string_format, **options)
return out.getvalue() return out.getvalue()
def hopper(mode: str | None = None) -> Image.Image: def hopper(mode=None, cache={}):
# Use caching to reduce reading from disk, but return a copy
# so that the cached image isn't modified by the tests
# (for fast, isolated, repeatable tests).
if mode is None: if mode is None:
# Always return fresh not-yet-loaded version of image. # Always return fresh not-yet-loaded version of image.
# Operations on not-yet-loaded images are a separate class of errors # Operations on not-yet-loaded images is separate class of errors
# that we should catch. # what we should catch.
return Image.open("Tests/images/hopper.ppm") return Image.open("Tests/images/hopper.ppm")
# Use caching to reduce reading from disk but so an original copy is
return _cached_hopper(mode).copy() # returned each time and the cached image isn't modified by tests
# (for fast, isolated, repeatable tests).
im = cache.get(mode)
@lru_cache if im is None:
def _cached_hopper(mode: str) -> Image.Image: if mode == "F":
if mode == "F": im = hopper("L").convert(mode)
im = hopper("L") elif mode[:4] == "I;16":
else: im = hopper("I").convert(mode)
im = hopper()
try:
im = im.convert(mode)
except ImportError:
if mode == "LAB":
im = Image.open("Tests/images/hopper.Lab.tif")
else: else:
raise im = hopper().convert(mode)
return im cache[mode] = im
return im.copy()
def djpeg_available() -> bool: def djpeg_available():
if shutil.which("djpeg"): return bool(shutil.which("djpeg"))
try:
subprocess.check_call(["djpeg", "-version"])
return True
except subprocess.CalledProcessError: # pragma: no cover
return False
return False
def netpbm_available() -> bool: def cjpeg_available():
return bool(shutil.which("cjpeg"))
def netpbm_available():
return bool(shutil.which("ppmquant") and shutil.which("ppmtogif")) return bool(shutil.which("ppmquant") and shutil.which("ppmtogif"))
def magick_command() -> list[str] | None: def magick_command():
if sys.platform == "win32": if sys.platform == "win32":
magickhome = os.environ.get("MAGICK_HOME") magickhome = os.environ.get("MAGICK_HOME", "")
if magickhome: if magickhome:
imagemagick = [os.path.join(magickhome, "convert.exe")] imagemagick = [os.path.join(magickhome, "convert.exe")]
graphicsmagick = [os.path.join(magickhome, "gm.exe"), "convert"] graphicsmagick = [os.path.join(magickhome, "gm.exe"), "convert"]
@ -315,35 +286,47 @@ def magick_command() -> list[str] | None:
return imagemagick return imagemagick
if graphicsmagick and shutil.which(graphicsmagick[0]): if graphicsmagick and shutil.which(graphicsmagick[0]):
return graphicsmagick return graphicsmagick
return None
def on_ci() -> bool: def on_appveyor():
return "APPVEYOR" in os.environ
def on_github_actions():
return "GITHUB_ACTIONS" in os.environ
def on_ci():
# GitHub Actions and AppVeyor have "CI"
return "CI" in os.environ return "CI" in os.environ
def is_big_endian() -> bool: def is_big_endian():
return sys.byteorder == "big" return sys.byteorder == "big"
def is_ppc64le() -> bool: def is_ppc64le():
import platform import platform
return platform.machine() == "ppc64le" return platform.machine() == "ppc64le"
def is_win32() -> bool: def is_win32():
return sys.platform.startswith("win32") return sys.platform.startswith("win32")
def is_pypy() -> bool: def is_pypy():
return hasattr(sys, "pypy_translation_info") return hasattr(sys, "pypy_translation_info")
def is_mingw():
return sysconfig.get_platform() == "mingw"
class CachedProperty: class CachedProperty:
def __init__(self, func: Callable[[Any], Any]) -> None: def __init__(self, func):
self.func = func self.func = func
def __get__(self, instance: Any, cls: type[Any] | None = None) -> Any: def __get__(self, instance, cls=None):
result = instance.__dict__[self.func.__name__] = self.func(instance) result = instance.__dict__[self.func.__name__] = self.func(instance)
return result return result

View File

@ -22,3 +22,4 @@ and that the name of ICC shall not be used in advertising or publicity
pertaining to distribution of the software without specific, written pertaining to distribution of the software without specific, written
prior permission. ICC makes no representations about the suitability prior permission. ICC makes no representations about the suitability
of this software for any purpose. of this software for any purpose.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 578 B

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 233 B

View File

Before

Width:  |  Height:  |  Size: 331 B

After

Width:  |  Height:  |  Size: 331 B

View File

Before

Width:  |  Height:  |  Size: 668 B

After

Width:  |  Height:  |  Size: 668 B

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More