Compare commits

..

7 Commits
main ... 8.0.1

Author SHA1 Message Date
Hugo van Kemenade
d209b7cab2 8.0.1 version bump 2020-10-22 18:05:53 +03:00
Hugo van Kemenade
b60b891273 Update CHANGES.rst 2020-10-22 17:59:18 +03:00
Andrew Murray
78889c72e7 Moved string_dimension image to pillow-depends 2020-10-22 17:49:48 +03:00
Hugo van Kemenade
ef948d998e Clarify wording
Co-authored-by: nulano <nulano@nulano.eu>
2020-10-22 17:34:06 +03:00
Hugo van Kemenade
44fe3545c3 Add release notes for 8.0.1 2020-10-22 17:33:15 +03:00
Andrew Murray
982b6c27a9 Updated freetype to 2.10.4 2020-10-22 17:32:18 +03:00
Andrew Murray
09ee6f3f43 Clean up dist in release-test 2020-10-22 17:32:15 +03:00
1070 changed files with 39907 additions and 85888 deletions

93
.appveyor.yml Normal file
View File

@ -0,0 +1,93 @@
version: '{build}'
image: Visual Studio 2017
clone_folder: c:\pillow
init:
- ECHO %PYTHON%
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
# Uncomment previous line to get RDP access during the build.
environment:
EXECUTABLE: python.exe
PIP_DIR: Scripts
TEST_OPTIONS:
DEPLOY: YES
matrix:
- PYTHON: C:/Python38
ARCHITECTURE: x86
- PYTHON: C:/Python36-x64
ARCHITECTURE: x64
install:
- curl -fsSL -o pillow-depends.zip https://github.com/python-pillow/pillow-depends/archive/master.zip
- 7z x pillow-depends.zip -oc:\
- mv c:\pillow-depends-master c:\pillow-depends
- xcopy /s c:\pillow-depends\test_images\* c:\pillow\tests\images
- 7z x ..\pillow-depends\nasm-2.14.02-win64.zip -oc:\
- ..\pillow-depends\gs9533w32.exe /S
- path c:\nasm-2.14.02;C:\Program Files (x86)\gs\gs9.53.3\bin;%PATH%
- cd c:\pillow\winbuild\
- ps: |
c:\python37\python.exe c:\pillow\winbuild\build_prepare.py -v --depends=C:\pillow-depends\
c:\pillow\winbuild\build\build_dep_all.cmd
$host.SetShouldExit(0)
- path C:\pillow\winbuild\build\bin;%PATH%
build_script:
- ps: |
c:\pillow\winbuild\build\build_pillow.cmd install
$host.SetShouldExit(0)
- cd c:\pillow
- '%PYTHON%\%EXECUTABLE% selftest.py --installed'
test_script:
- cd c:\pillow
- '%PYTHON%\%PIP_DIR%\pip.exe install pytest pytest-cov'
- c:\"Program Files (x86)"\"Windows Kits"\10\Debuggers\x86\gflags.exe /p /enable %PYTHON%\%EXECUTABLE%
- '%PYTHON%\%EXECUTABLE% -m pytest -vx --cov PIL --cov Tests --cov-report term --cov-report xml Tests'
#- '%PYTHON%\%EXECUTABLE% test-installed.py -v -s %TEST_OPTIONS%' TODO TEST_OPTIONS with pytest?
after_test:
- pip install codecov
- codecov --file coverage.xml --name %PYTHON% --flags AppVeyor
matrix:
fast_finish: true
cache:
- '%LOCALAPPDATA%\pip\Cache'
artifacts:
- path: pillow\dist\*.egg
name: egg
- path: pillow\dist\*.wheel
name: wheel
before_deploy:
- cd c:\pillow
- '%PYTHON%\%PIP_DIR%\pip.exe install wheel'
- cd c:\pillow\winbuild\
- c:\pillow\winbuild\build\build_pillow.cmd bdist_wheel
- cd c:\pillow
- ps: Get-ChildItem .\dist\*.* | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
deploy:
provider: S3
region: us-west-2
access_key_id: AKIAIRAXC62ZNTVQJMOQ
secret_access_key:
secure: Hwb6klTqtBeMgxAjRoDltiiqpuH8xbwD4UooDzBSiCWXjuFj1lyl4kHgHwTCCGqi
bucket: pillow-nightly
folder: win/$(APPVEYOR_BUILD_NUMBER)/
artifact: /.*egg|wheel/
on:
APPVEYOR_REPO_NAME: python-pillow/Pillow
branch: master
deploy: YES
# Uncomment the following lines to get RDP access after the build/test and block for
# up to the timeout limit (~1hr)
#
#on_finish:
#- ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))

View File

@ -1,5 +1,19 @@
#!/bin/bash
# gather the coverage data
python3 -m pip install coverage
python3 -m coverage xml
pip3 install codecov
if [[ $MATRIX_DOCKER ]]; then
coverage xml --ignore-errors
else
coverage xml
fi
if [[ $TRAVIS ]]; then
codecov --flags TravisCI
fi
if [ "$TRAVIS_PYTHON_VERSION" == "3.8" ]; then
# Coverage and quality reports on just the latest diff.
depends/diffcover-install.sh
depends/diffcover-run.sh
fi

View File

@ -2,6 +2,9 @@
set -e
python3 -m coverage erase
coverage erase
if [ $(uname) == "Darwin" ]; then
export CPPFLAGS="-I/usr/local/miniconda/include";
fi
make clean
make install-coverage

View File

@ -2,74 +2,62 @@
aptget_update()
{
if [ -n "$1" ]; then
if [ ! -z $1 ]; then
echo ""
echo "Retrying apt-get update..."
echo ""
fi
output=$(sudo apt-get update 2>&1)
output=`sudo apt-get update 2>&1`
echo "$output"
if [[ $output == *[WE]:\ * ]]; then
return 1
fi
}
if [[ $(uname) != CYGWIN* ]]; then
aptget_update || aptget_update retry || aptget_update retry
fi
aptget_update || aptget_update retry || aptget_update retry
set -e
if [[ $(uname) != CYGWIN* ]]; then
sudo apt-get -qq install libfreetype6-dev liblcms2-dev libtiff-dev python3-tk\
ghostscript libjpeg-turbo8-dev libopenjp2-7-dev\
cmake meson imagemagick libharfbuzz-dev libfribidi-dev\
sway wl-clipboard libopenblas-dev nasm
sudo apt-get -qq install libfreetype6-dev liblcms2-dev python3-tk\
ghostscript libffi-dev libjpeg-turbo-progs libopenjp2-7-dev\
cmake imagemagick libharfbuzz-dev libfribidi-dev
if [[ $TRAVIS_CPU_ARCH == "s390x" || $TRAVIS_CPU_ARCH == "ppc64le" ]]; then sudo chown $USER ~/.cache/pip/wheels ; fi
pip install --upgrade pip
PYTHONOPTIMIZE=0 pip install cffi
pip install coverage
pip install olefile
pip install -U pytest
pip install -U pytest-cov
pip install pyroma
pip install test-image-results
pip install numpy
# TODO Remove when 3.8 / 3.9 / PyPy3 includes setuptools 49.3.2+:
if [ "$GHA_PYTHON_VERSION" == "3.8" ]; then pip install -U "setuptools>=49.3.2" ; fi
if [ "$GHA_PYTHON_VERSION" == "3.9" ]; then pip install -U "setuptools>=49.3.2" ; fi
if [ "$TRAVIS_PYTHON_VERSION" == "pypy3.6-7.3.1" ]; then pip install -U "setuptools>=49.3.2" ; fi
if [[ $TRAVIS_PYTHON_VERSION == 3.* ]]; then
# arm64, ppc64le, s390x CPUs:
# "ERROR: Could not find a version that satisfies the requirement pyqt5"
if [[ $TRAVIS_CPU_ARCH == "amd64" ]]; then
sudo apt-get -qq install libxcb-xinerama0 pyqt5-dev-tools
pip install pyqt5
fi
fi
python3 -m pip install --upgrade pip
python3 -m pip install --upgrade wheel
python3 -m pip install coverage
python3 -m pip install defusedxml
python3 -m pip install ipython
python3 -m pip install olefile
python3 -m pip install -U pytest
python3 -m pip install -U pytest-cov
python3 -m pip install -U pytest-timeout
python3 -m pip install pyroma
# optional test dependency, only install if there's a binary package.
# fails on beta 3.14 and PyPy
python3 -m pip install --only-binary=:all: pyarrow || true
# docs only on Python 3.8
if [ "$TRAVIS_PYTHON_VERSION" == "3.8" ]; then pip install -r requirements.txt ; fi
if [[ $(uname) != CYGWIN* ]]; then
python3 -m pip install numpy
# webp
pushd depends && ./install_webp.sh && popd
# PyQt6 doesn't support PyPy3
if [[ $GHA_PYTHON_VERSION == 3.* ]]; then
sudo apt-get -qq install libegl1 libxcb-cursor0 libxcb-icccm4 libxcb-image0 libxcb-keysyms1 libxcb-randr0 libxcb-render-util0 libxcb-shape0 libxkbcommon-x11-0
# TODO Update condition when pyqt6 supports free-threading
if ! [[ "$PYTHON_GIL" == "0" ]]; then python3 -m pip install pyqt6 ; fi
fi
# libimagequant
pushd depends && ./install_imagequant.sh && popd
# Pyroma uses non-isolated build and fails with old setuptools
if [[ $GHA_PYTHON_VERSION == 3.9 ]]; then
# To match pyproject.toml
python3 -m pip install "setuptools>=77"
fi
# raqm
pushd depends && ./install_raqm.sh && popd
# webp
pushd depends && ./install_webp.sh && popd
# libimagequant
pushd depends && ./install_imagequant.sh && popd
# raqm
pushd depends && ./install_raqm.sh && popd
# libavif
pushd depends && ./install_libavif.sh && popd
# extra test images
pushd depends && ./install_extra_test_images.sh && popd
else
cd depends && ./install_extra_test_images.sh && cd ..
fi
# extra test images
pushd depends && ./install_extra_test_images.sh && popd

View File

@ -1 +0,0 @@
cibuildwheel==3.1.2

View File

@ -1,14 +0,0 @@
mypy==1.17.0
IceSpringPySideStubs-PyQt6
IceSpringPySideStubs-PySide6
ipython
numpy
packaging
pyarrow-stubs
pybind11
pytest
sphinx
types-atheris
types-defusedxml
types-olefile
types-setuptools

View File

@ -1,3 +0,0 @@
python.exe -c "from PIL import Image"
IF ERRORLEVEL 1 EXIT /B
python.exe -bb -m pytest -vv -x -W always --cov PIL --cov Tests --cov-report term --cov-report xml Tests

View File

@ -2,6 +2,9 @@
set -e
python3 -c "from PIL import Image"
python -bb -m pytest -v -x -W always --cov PIL --cov Tests --cov-report term Tests
python3 -bb -m pytest -vv -x -W always --cov PIL --cov Tests --cov-report term --cov-report xml Tests $REVERSE
# Docs
if [ "$TRAVIS_PYTHON_VERSION" == "3.8" ] && [ "$TRAVIS_CPU_ARCH" == "amd64" ]; then
make doccheck
fi

View File

@ -1,41 +0,0 @@
# A clang-format style that approximates Python's PEP 7
# Useful for IDE integration
Language: C
BasedOnStyle: Google
AlwaysBreakAfterReturnType: All
AllowShortIfStatementsOnASingleLine: false
AlignAfterOpenBracket: BlockIndent
BinPackArguments: false
BinPackParameters: false
BreakBeforeBraces: Attach
ColumnLimit: 88
DerivePointerAlignment: false
IndentGotoLabels: false
IndentWidth: 4
PointerAlignment: Right
ReflowComments: true
SortIncludes: false
SpaceBeforeParens: ControlStatements
SpacesInParentheses: false
TabWidth: 4
UseTab: Never
---
Language: Cpp
BasedOnStyle: Google
AlwaysBreakAfterReturnType: All
AllowShortIfStatementsOnASingleLine: false
AlignAfterOpenBracket: BlockIndent
BinPackArguments: false
BinPackParameters: false
BreakBeforeBraces: Attach
ColumnLimit: 88
DerivePointerAlignment: false
IndentGotoLabels: false
IndentWidth: 4
PointerAlignment: Right
ReflowComments: true
SortIncludes: false
SpaceBeforeParens: ControlStatements
SpacesInParentheses: false
TabWidth: 4
UseTab: Never

View File

@ -2,21 +2,19 @@
[report]
# Regexes for lines to exclude from consideration
exclude_also =
# Don't complain if non-runnable code isn't run
exclude_lines =
# Have to re-enable the standard pragma:
pragma: no cover
# Don't complain if non-runnable code isn't run:
if 0:
if __name__ == .__main__.:
# Don't complain about debug code
if DEBUG:
# Don't complain about compatibility code for missing optional dependencies
except ImportError
if TYPE_CHECKING:
@abc.abstractmethod
# Empty bodies in protocols or abstract methods
^\s*def [a-zA-Z0-9_]+\(.*\)(\s*->.*)?:\s*\.\.\.(\s*#.*)?$
^\s*\.\.\.(\s*#.*)?$
[run]
omit =
checks/*.py
Tests/32bit_segfault_check.py
Tests/bench_cffi_access.py
Tests/check_*.py
Tests/createfontdatachunk.py

View File

@ -13,9 +13,10 @@ indent_style = space
trim_trailing_whitespace = true
[*.{toml,yml}]
[*.yml]
# Two-space indentation
indent_size = 2
indent_style = space
# Tab indentation (no size specified)
[Makefile]

View File

@ -1,6 +0,0 @@
# Flake8
8de95676e0fd89f2326b3953488ab66ff29cd2d0
# Format with Black
53a7e3500437a9fd5826bc04758f7116bd7e52dc
# Format the C code with ClangFormat
46b7e86bab79450ec0a2866c6c0c679afb659d17

View File

@ -4,21 +4,20 @@ Bug fixes, feature additions, tests, documentation and more can be contributed v
## Bug fixes, feature additions, etc.
Please send a pull request to the `main` branch. Please include [documentation](https://pillow.readthedocs.io) and [tests](../Tests/README.rst) for new features. Tests or documentation without bug fixes or feature additions are welcome too. Feel free to ask questions [via issues](https://github.com/python-pillow/Pillow/issues/new), [discussions](https://github.com/python-pillow/Pillow/discussions/new), [Gitter](https://gitter.im/python-pillow/Pillow) or irc://irc.freenode.net#pil
Please send a pull request to the master branch. Please include [documentation](https://pillow.readthedocs.io) and [tests](../Tests/README.rst) for new features. Tests or documentation without bug fixes or feature additions are welcome too. Feel free to ask questions [via issues](https://github.com/python-pillow/Pillow/issues/new), [Gitter](https://gitter.im/python-pillow/Pillow) or irc://irc.freenode.net#pil
- Fork the Pillow repository.
- Create a branch from `main`.
- Create a branch from master.
- Develop bug fixes, features, tests, etc.
- Run the test suite. You can enable GitHub Actions (https://github.com/MY-USERNAME/Pillow/actions) on your repo to catch test failures prior to the pull request, and [Codecov](https://codecov.io/gh) to see if the changed code is covered by tests.
- Create a pull request to pull the changes from your branch to the Pillow `main`.
- Run the test suite. You can enable [Travis CI](https://travis-ci.org/profile/) and [AppVeyor](https://ci.appveyor.com/projects/new) on your repo to catch test failures prior to the pull request, and [Codecov](https://codecov.io/gh) to see if the changed code is covered by tests.
- Create a pull request to pull the changes from your branch to the Pillow master.
### Guidelines
- Separate code commits from reformatting commits.
- Provide tests for any newly added code.
- Follow PEP 8.
- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running extra tests.
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
- When committing only documentation changes please include [ci skip] in the commit message to avoid running tests on Travis-CI and AppVeyor.
## Reporting Issues
@ -35,4 +34,4 @@ The best reproductions are self-contained scripts with minimal dependencies. If
## Security vulnerabilities
Please see our [security policy](https://github.com/python-pillow/Pillow/blob/main/.github/SECURITY.md).
Please see our [security policy](https://github.com/python-pillow/Pillow/blob/master/.github/SECURITY.md).

2
.github/FUNDING.yml vendored
View File

@ -1 +1 @@
tidelift: "pypi/pillow"
tidelift: "pypi/Pillow"

View File

@ -48,21 +48,6 @@ Thank you.
* Python:
* Pillow:
```text
Please paste here the output of running:
python3 -m PIL.report
or
python3 -m PIL --report
Or the output of the following Python code:
from PIL import report
# or
from PIL import features
features.pilinfo(supported_formats=False)
```
<!--
Please include **code** that reproduces the issue and whenever possible, an **image** that demonstrates the issue. Please upload images to GitHub, not to third-party file hosting sites. If necessary, add the image to a zip or tar archive.

View File

@ -1,46 +0,0 @@
---
name: "Maintainers only: Release"
about: For maintainers to schedule a quarterly release
labels: Release
---
## Main release
Released quarterly on January 2nd, April 1st, July 1st and October 15th.
* [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154
* [ ] Develop and prepare release in `main` branch.
* [ ] Add release notes e.g. https://github.com/python-pillow/Pillow/pull/8885
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) to confirm passing tests in `main` branch.
* [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them.
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
* [ ] Create branch and tag for release e.g.:
```bash
git branch [[MAJOR.MINOR]].x
git tag [[MAJOR.MINOR]].0
git push --tags
```
* [ ] Check the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) has passed, including the "Upload release to PyPI" job. This will have been triggered by the new tag.
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases).
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), increment and append `.dev0` to version identifier in `src/PIL/_version.py` and then:
```bash
git push --all
```
## Publicize release
* [ ] Announce release availability via [Mastodon](https://fosstodon.org/@pillow) e.g. https://fosstodon.org/@pillow/110639450470725321
## Documentation
* [ ] Make sure the [default version for Read the Docs](https://pillow.readthedocs.io/en/stable/) is up-to-date with the release changes
## Docker images
* [ ] Update Pillow in the Docker Images repository
```bash
git clone https://github.com/python-pillow/docker-images
cd docker-images
./update-pillow-tag.sh [[release tag]]
```

4
.github/mergify.yml vendored
View File

@ -7,8 +7,8 @@ pull_request_rules:
- status-success=Test Successful
- status-success=Docker Test Successful
- status-success=Windows Test Successful
- status-success=MinGW
- status-success=Cygwin Test Successful
- status-success=continuous-integration/appveyor/pr
- status-success=continuous-integration/travis-ci/pr
actions:
merge:
method: merge

View File

@ -1,18 +0,0 @@
{
"__comment": "Based on vscode-cpptools' Extension/package.json gcc rule",
"problemMatcher": [
{
"owner": "gcc-problem-matcher",
"pattern": [
{
"regexp": "^\\s*(.*):(\\d+):(\\d+):\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5
}
]
}
]
}

View File

@ -1,27 +0,0 @@
name-template: "$NEXT_MINOR_VERSION"
tag-template: "$NEXT_MINOR_VERSION"
change-template: '- $TITLE #$NUMBER [@$AUTHOR]'
categories:
- title: "Removals"
label: "Removal"
- title: "Deprecations"
label: "Deprecation"
- title: "Documentation"
label: "Documentation"
- title: "Dependencies"
label: "Dependency"
- title: "Testing"
label: "Testing"
- title: "Type hints"
label: "Type hints"
- title: "Other changes"
exclude-labels:
- "changelog: skip"
template: |
https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html
$CHANGES

21
.github/renovate.json vendored
View File

@ -1,21 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended"
],
"labels": [
"Dependency"
],
"packageRules": [
{
"groupName": "github-actions",
"matchManagers": [
"github-actions"
],
"separateMajorMinor": false
}
],
"schedule": [
"* * 3 * *"
]
}

View File

@ -1,62 +0,0 @@
name: CIFuzz
on:
push:
branches:
- "**"
paths:
- ".github/workflows/cifuzz.yml"
- ".github/workflows/wheels-dependencies.sh"
- "**.c"
- "**.h"
pull_request:
paths:
- ".github/workflows/cifuzz.yml"
- ".github/workflows/wheels-dependencies.sh"
- "**.c"
- "**.h"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
Fuzzing:
runs-on: ubuntu-latest
steps:
- name: Build Fuzzers
id: build
uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master
with:
oss-fuzz-project-name: 'pillow'
language: python
dry-run: false
- name: Run Fuzzers
id: run
uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master
with:
oss-fuzz-project-name: 'pillow'
fuzz-seconds: 600
language: python
dry-run: false
- name: Upload New Crash
uses: actions/upload-artifact@v4
if: failure() && steps.build.outcome == 'success'
with:
name: artifacts
path: ./out/artifacts
- name: Upload Legacy Crash
uses: actions/upload-artifact@v4
if: steps.run.outcome == 'success'
with:
name: crash
path: ./out/crash*
- name: Fail on legacy crash
if: success()
run: |
[ ! -e out/crash-* ]
echo No legacy crash detected

View File

@ -1,71 +0,0 @@
name: Docs
on:
push:
branches:
- "**"
paths:
- ".github/workflows/docs.yml"
- "docs/**"
- "src/PIL/**"
pull_request:
paths:
- ".github/workflows/docs.yml"
- "docs/**"
- "src/PIL/**"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
FORCE_COLOR: 1
jobs:
build:
runs-on: ubuntu-latest
name: Docs
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
cache: pip
cache-dependency-path: |
".ci/*.sh"
"pyproject.toml"
- name: Build system information
run: python3 .github/workflows/system-info.py
- name: Cache libimagequant
uses: actions/cache@v4
id: cache-libimagequant
with:
path: ~/cache-libimagequant
key: ${{ runner.os }}-libimagequant-${{ hashFiles('depends/install_imagequant.sh') }}
- name: Install Linux dependencies
run: |
.ci/install.sh
env:
GHA_PYTHON_VERSION: "3.x"
GHA_LIBIMAGEQUANT_CACHE_HIT: ${{ steps.cache-libimagequant.outputs.cache-hit }}
- name: Build
run: |
.ci/build.sh
- name: Docs
run: |
make doccheck

View File

@ -1,16 +1,6 @@
name: Lint
on: [push, pull_request, workflow_dispatch]
env:
FORCE_COLOR: 1
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
on: [push, pull_request]
jobs:
build:
@ -20,12 +10,18 @@ jobs:
name: Lint
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v2
- name: pip cache
uses: actions/cache@v2
with:
persist-credentials: false
path: ~/.cache/pip
key: lint-pip-${{ hashFiles('**/setup.py') }}
restore-keys: |
lint-pip-
- name: pre-commit cache
uses: actions/cache@v4
uses: actions/cache@v2
with:
path: ~/.cache/pre-commit
key: lint-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }}
@ -33,24 +29,20 @@ jobs:
lint-pre-commit-
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v2
with:
python-version: "3.x"
cache: pip
cache-dependency-path: "setup.py"
python-version: 3.8
- name: Build system information
run: python3 .github/workflows/system-info.py
run: python .github/workflows/system-info.py
- name: Install dependencies
run: |
python3 -m pip install -U pip
python3 -m pip install -U tox
python -m pip install -U pip
python -m pip install -U tox
- name: Lint
run: tox -e lint
env:
PRE_COMMIT_COLOR: always
- name: Mypy
run: tox -e mypy

View File

@ -2,40 +2,22 @@
set -e
if [[ "$ImageOS" == "macos13" ]]; then
brew uninstall gradle maven
fi
brew install \
aom \
dav1d \
freetype \
ghostscript \
jpeg-turbo \
libimagequant \
libraqm \
libtiff \
little-cms2 \
openjpeg \
rav1e \
svt-av1 \
webp
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig"
brew install libtiff libjpeg openjpeg libimagequant webp little-cms2 freetype openblas
python3 -m pip install coverage
python3 -m pip install defusedxml
python3 -m pip install ipython
python3 -m pip install olefile
python3 -m pip install -U pytest
python3 -m pip install -U pytest-cov
python3 -m pip install -U pytest-timeout
python3 -m pip install pyroma
python3 -m pip install numpy
# optional test dependency, only install if there's a binary package.
# fails on beta 3.14 and PyPy
python3 -m pip install --only-binary=:all: pyarrow || true
PYTHONOPTIMIZE=0 pip install cffi
pip install coverage
pip install olefile
pip install -U pytest
pip install -U pytest-cov
pip install pyroma
pip install test-image-results
# libavif
pushd depends && ./install_libavif.sh && popd
echo -e "[openblas]\nlibraries = openblas\nlibrary_dirs = /usr/local/opt/openblas/lib" >> ~/.numpy-site.cfg
pip install numpy
# TODO Remove when 3.8 / 3.9 includes setuptools 49.3.2+:
if [ "$GHA_PYTHON_VERSION" == "3.8" ]; then pip install -U "setuptools>=49.3.2" ; fi
if [ "$GHA_PYTHON_VERSION" == "3.9" ]; then pip install -U "setuptools>=49.3.2" ; fi
# extra test images
pushd depends && ./install_extra_test_images.sh && popd

View File

@ -1,28 +0,0 @@
name: Release drafter
on:
push:
# branches to consider in the event; optional, defaults to all
branches:
- main
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
update_release_draft:
permissions:
contents: write # for release-drafter/release-drafter to create a github release
pull-requests: write # for release-drafter/release-drafter to add label to PR
if: github.repository == 'python-pillow/Pillow'
runs-on: ubuntu-latest
steps:
# Drafts your next release notes as pull requests are merged into "main"
- uses: release-drafter/release-drafter@v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,33 +0,0 @@
name: Close stale issues
on:
schedule:
- cron: "10 0 * * *"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
stale:
if: github.repository_owner == 'python-pillow'
permissions:
issues: write
runs-on: ubuntu-latest
steps:
- name: "Check issues"
uses: actions/stale@v9
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
only-labels: "Awaiting OP Action"
close-issue-message: "Closing this issue as no feedback has been received."
days-before-stale: 7
days-before-issue-close: 0
days-before-pr-close: -1
labels-to-remove-when-unstale: "Awaiting OP Action"

View File

@ -6,9 +6,6 @@ This sort of info is missing from GitHub Actions.
Requested here:
https://github.com/actions/virtual-environments/issues/79
"""
from __future__ import annotations
import os
import platform
import sys

View File

@ -1,150 +0,0 @@
name: Test Cygwin
on:
push:
branches:
- "**"
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
pull_request:
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
COVERAGE_CORE: sysmon
jobs:
build:
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
python-minor-version: [9]
timeout-minutes: 40
name: Python 3.${{ matrix.python-minor-version }}
steps:
- name: Fix line endings
run: |
git config --global core.autocrlf input
- name: Checkout Pillow
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install Cygwin
uses: cygwin/cygwin-install-action@v6
with:
packages: >
gcc-g++
ghostscript
git
ImageMagick
jpeg
libfreetype-devel
libimagequant-devel
libjpeg-devel
liblapack-devel
liblcms2-devel
libopenjp2-devel
libraqm-devel
libtiff-devel
libwebp-devel
libxcb-devel
libxcb-xinerama0
make
netpbm
perl
python3${{ matrix.python-minor-version }}-cython
python3${{ matrix.python-minor-version }}-devel
python3${{ matrix.python-minor-version }}-ipython
python3${{ matrix.python-minor-version }}-numpy
python3${{ matrix.python-minor-version }}-sip
python3${{ matrix.python-minor-version }}-tkinter
wget
xorg-server-extra
zlib-devel
- name: Add Lapack to PATH
uses: egor-tensin/cleanup-path@v4
with:
dirs: 'C:\cygwin\bin;C:\cygwin\lib\lapack'
- name: pip cache
uses: actions/cache@v4
with:
path: 'C:\cygwin\home\runneradmin\.cache\pip'
key: ${{ runner.os }}-cygwin-pip3.${{ matrix.python-minor-version }}-${{ hashFiles('.ci/install.sh') }}
restore-keys: |
${{ runner.os }}-cygwin-pip3.${{ matrix.python-minor-version }}-
- name: Build system information
run: |
dash.exe -c "python3 .github/workflows/system-info.py"
- name: Install dependencies
run: |
bash.exe .ci/install.sh
- name: Build
shell: bash.exe -eo pipefail -o igncr "{0}"
run: |
.ci/build.sh
- name: Test
run: |
bash.exe xvfb-run -s '-screen 0 1024x768x24' .ci/test.sh
- name: Prepare to upload errors
if: failure()
run: |
dash.exe -c "mkdir -p Tests/errors"
- name: Upload errors
uses: actions/upload-artifact@v4
if: failure()
with:
name: errors
path: Tests/errors
- name: After success
run: |
bash.exe .ci/after_success.sh
rm C:\cygwin\bin\bash.EXE
- name: Upload coverage
uses: codecov/codecov-action@v5
with:
files: ./coverage.xml
flags: GHA_Cygwin
name: Cygwin Python 3.${{ matrix.python-minor-version }}
token: ${{ secrets.CODECOV_ORG_TOKEN }}
success:
permissions:
contents: none
needs: build
runs-on: ubuntu-latest
name: Cygwin Test Successful
steps:
- name: Success
run: echo Cygwin Test Successful

View File

@ -1,83 +1,37 @@
name: Test Docker
on:
push:
branches:
- "**"
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
pull_request:
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
on: [push, pull_request]
jobs:
build:
runs-on: ${{ matrix.os }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
os: ["ubuntu-latest"]
docker: [
# Run slower jobs first to give them a headstart and reduce waiting time
ubuntu-24.04-noble-ppc64le,
ubuntu-24.04-noble-s390x,
# Then run the remainder
alpine,
amazon-2-amd64,
amazon-2023-amd64,
arch,
centos-stream-9-amd64,
centos-stream-10-amd64,
debian-12-bookworm-x86,
debian-12-bookworm-amd64,
fedora-41-amd64,
fedora-42-amd64,
gentoo,
ubuntu-22.04-jammy-amd64,
ubuntu-24.04-noble-amd64,
ubuntu-18.04-bionic-amd64,
ubuntu-20.04-focal-amd64,
debian-10-buster-x86,
centos-6-amd64,
centos-7-amd64,
centos-8-amd64,
amazon-1-amd64,
amazon-2-amd64,
fedora-31-amd64,
fedora-32-amd64,
]
dockerTag: [main]
include:
- docker: "ubuntu-24.04-noble-ppc64le"
qemu-arch: "ppc64le"
- docker: "ubuntu-24.04-noble-s390x"
qemu-arch: "s390x"
- docker: "ubuntu-24.04-noble-arm64v8"
os: "ubuntu-24.04-arm"
dockerTag: main
dockerTag: [master]
name: ${{ matrix.docker }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/checkout@v2
- name: Build system information
run: python3 .github/workflows/system-info.py
- name: Set up QEMU
if: "matrix.qemu-arch"
uses: docker/setup-qemu-action@v3
with:
platforms: ${{ matrix.qemu-arch }}
run: python .github/workflows/system-info.py
- name: Docker pull
run: |
@ -85,33 +39,30 @@ jobs:
- name: Docker build
run: |
# The Pillow user in the docker container is UID 1001
sudo chown -R 1001 $GITHUB_WORKSPACE
# The Pillow user in the docker container is UID 1000
sudo chown -R 1000 $GITHUB_WORKSPACE
docker run --name pillow_container -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
sudo chown -R runner $GITHUB_WORKSPACE
- name: After success
run: |
PATH="$PATH:~/.local/bin"
docker start pillow_container
sudo docker cp pillow_container:/Pillow /Pillow
sudo chown -R runner /Pillow
pil_path=`docker exec pillow_container /vpy3/bin/python -c 'import os, PIL;print(os.path.realpath(os.path.dirname(PIL.__file__)))'`
docker stop pillow_container
sudo mkdir -p $pil_path
sudo cp src/PIL/*.py $pil_path
cd /Pillow
.ci/after_success.sh
env:
MATRIX_DOCKER: ${{ matrix.docker }}
- name: Upload coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v1
with:
flags: GHA_Docker
name: ${{ matrix.docker }}
token: ${{ secrets.CODECOV_ORG_TOKEN }}
success:
permissions:
contents: none
needs: build
runs-on: ubuntu-latest
name: Docker Test Successful

View File

@ -1,94 +0,0 @@
name: Test MinGW
on:
push:
branches:
- "**"
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
pull_request:
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
COVERAGE_CORE: sysmon
jobs:
build:
runs-on: windows-latest
defaults:
run:
shell: bash.exe --login -eo pipefail "{0}"
env:
MSYSTEM: MINGW64
CHERE_INVOKING: 1
timeout-minutes: 30
name: "MinGW"
steps:
- name: Checkout Pillow
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up shell
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
shell: pwsh
- name: Install dependencies
run: |
pacman -S --noconfirm \
mingw-w64-x86_64-freetype \
mingw-w64-x86_64-gcc \
mingw-w64-x86_64-ghostscript \
mingw-w64-x86_64-lcms2 \
mingw-w64-x86_64-libavif \
mingw-w64-x86_64-libimagequant \
mingw-w64-x86_64-libjpeg-turbo \
mingw-w64-x86_64-libraqm \
mingw-w64-x86_64-libtiff \
mingw-w64-x86_64-libwebp \
mingw-w64-x86_64-openjpeg2 \
mingw-w64-x86_64-python-numpy \
mingw-w64-x86_64-python-olefile \
mingw-w64-x86_64-python-pip \
mingw-w64-x86_64-python-pytest \
mingw-w64-x86_64-python-pytest-cov \
mingw-w64-x86_64-python-pytest-timeout \
mingw-w64-x86_64-python-pyqt6
pushd depends && ./install_extra_test_images.sh && popd
- name: Build Pillow
run: CFLAGS="-coverage" python3 -m pip install .
- name: Test Pillow
run: |
python3 selftest.py --installed
.ci/test.sh
- name: Upload coverage
uses: codecov/codecov-action@v5
with:
files: ./coverage.xml
flags: GHA_Windows
name: "MSYS2 MinGW"
token: ${{ secrets.CODECOV_ORG_TOKEN }}

View File

@ -1,60 +0,0 @@
name: Test Valgrind Memory Leaks
# like the Docker tests, but running valgrind only on *.c/*.h changes.
# this is very expensive. Only run on the pull request.
on:
# push:
# branches:
# - "**"
# paths:
# - ".github/workflows/test-valgrind.yml"
# - "**.c"
# - "**.h"
pull_request:
paths:
- ".github/workflows/test-valgrind.yml"
- "**.c"
- "**.h"
- "depends/docker-test-valgrind-memory.sh"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
docker: [
ubuntu-22.04-jammy-amd64-valgrind,
]
dockerTag: [main]
name: ${{ matrix.docker }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Build system information
run: python3 .github/workflows/system-info.py
- name: Docker pull
run: |
docker pull pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
- name: Build and Run Valgrind
run: |
# The Pillow user in the docker container is UID 1001
sudo chown -R 1001 $GITHUB_WORKSPACE
docker run --name pillow_container -e "PILLOW_VALGRIND_TEST=true" -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }} /Pillow/depends/docker-test-valgrind-memory.sh
sudo chown -R runner $GITHUB_WORKSPACE

View File

@ -1,58 +0,0 @@
name: Test Valgrind
# like the Docker tests, but running valgrind only on *.c/*.h changes.
on:
push:
branches:
- "**"
paths:
- ".github/workflows/test-valgrind.yml"
- "**.c"
- "**.h"
pull_request:
paths:
- ".github/workflows/test-valgrind.yml"
- "**.c"
- "**.h"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
docker: [
ubuntu-22.04-jammy-amd64-valgrind,
]
dockerTag: [main]
name: ${{ matrix.docker }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Build system information
run: python3 .github/workflows/system-info.py
- name: Docker pull
run: |
docker pull pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
- name: Build and Run Valgrind
run: |
# The Pillow user in the docker container is UID 1001
sudo chown -R 1001 $GITHUB_WORKSPACE
docker run --name pillow_container -e "PILLOW_VALGRIND_TEST=true" -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
sudo chown -R runner $GITHUB_WORKSPACE

View File

@ -1,170 +1,120 @@
name: Test Windows
on:
push:
branches:
- "**"
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
pull_request:
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
COVERAGE_CORE: sysmon
on: [push, pull_request]
jobs:
build:
runs-on: windows-latest
runs-on: windows-2019
strategy:
fail-fast: false
matrix:
python-version: ["pypy3.11", "3.10", "3.11", "3.12", ">=3.13.5", "3.14"]
architecture: ["x64"]
python-version: ["3.6", "3.7", "3.8", "3.9", "pypy3"]
architecture: ["x86", "x64"]
include:
# Test the oldest Python on 32-bit
- { python-version: "3.9", architecture: "x86" }
- architecture: "x86"
platform-vcvars: "x86"
platform-msbuild: "Win32"
- architecture: "x64"
platform-vcvars: "x86_amd64"
platform-msbuild: "x64"
exclude:
# PyPy does not support 64-bit on Windows
- python-version: "pypy3"
architecture: "x64"
timeout-minutes: 30
timeout-minutes: 45
name: Python ${{ matrix.python-version }} (${{ matrix.architecture }})
name: Python ${{ matrix.python-version }} ${{ matrix.architecture }}
steps:
- name: Checkout Pillow
uses: actions/checkout@v4
with:
persist-credentials: false
uses: actions/checkout@v2
- name: Checkout cached dependencies
uses: actions/checkout@v4
uses: actions/checkout@v2
with:
persist-credentials: false
repository: python-pillow/pillow-depends
path: winbuild\depends
- name: Checkout extra test images
uses: actions/checkout@v4
- name: Cache pip
uses: actions/cache@v2
with:
persist-credentials: false
repository: python-pillow/test-images
path: Tests\test-images
path: ~\AppData\Local\pip\Cache
key:
${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.architecture }}-${{ hashFiles('**/.github/workflows/test-windows.yml') }}
restore-keys: |
${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.architecture }}-
${{ runner.os }}-${{ matrix.python-version }}-
# sets env: pythonLocation
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
architecture: ${{ matrix.architecture }}
cache: pip
cache-dependency-path: ".github/workflows/test-windows.yml"
- name: Set up TCL
if: "contains(matrix.python-version, 'pypy')"
run: echo "TCL_LIBRARY=$env:pythonLocation\tcl\tcl8.5" >> $env:GITHUB_ENV
shell: pwsh
- name: Print build system information
run: python3 .github/workflows/system-info.py
run: python .github/workflows/system-info.py
- name: Upgrade pip
run: |
python3 -m pip install --upgrade pip
- name: pip install wheel pytest pytest-cov
run: python -m pip install wheel pytest pytest-cov
- name: Install CPython dependencies
if: "!contains(matrix.python-version, 'pypy') && !contains(matrix.python-version, '3.14') && matrix.architecture != 'x86'"
run: |
python3 -m pip install PyQt6
- name: Install PyArrow dependency
run: |
python3 -m pip install --only-binary=:all: pyarrow || true
# TODO Remove when 3.8 / 3.9 includes setuptools 49.3.2+:
- name: Upgrade setuptools
if: "contains(matrix.python-version, '3.8') || contains(matrix.python-version, '3.9')"
run: python -m pip install -U "setuptools>=49.3.2"
- name: Install dependencies
id: install
run: |
choco install nasm --no-progress
echo "C:\Program Files\NASM" >> $env:GITHUB_PATH
7z x winbuild\depends\nasm-2.14.02-win64.zip "-o$env:RUNNER_WORKSPACE\"
echo "$env:RUNNER_WORKSPACE\nasm-2.14.02" >> $env:GITHUB_PATH
choco install ghostscript --version=10.5.1 --no-progress
echo "C:\Program Files\gs\gs10.05.1\bin" >> $env:GITHUB_PATH
winbuild\depends\gs9533w32.exe /S
echo "C:\Program Files (x86)\gs\gs9.53.3\bin" >> $env:GITHUB_PATH
# Install extra test images
xcopy /S /Y Tests\test-images\* Tests\images
# make cache key depend on VS version
& "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe" `
| find """catalog_buildVersion""" `
| ForEach-Object { $a = $_.split(" ")[1]; echo "vs=$a" >> $env:GITHUB_OUTPUT }
xcopy /s winbuild\depends\test_images\* Tests\images\
shell: pwsh
- name: Cache build
id: build-cache
uses: actions/cache@v4
uses: actions/cache@v2
with:
path: winbuild\build
key:
${{ hashFiles('winbuild\build_prepare.py') }}-${{ hashFiles('.github\workflows\test-windows.yml') }}-${{ env.pythonLocation }}-${{ steps.install.outputs.vs }}
${{ hashFiles('winbuild\build_prepare.py') }}-${{ hashFiles('.github\workflows\test-windows.yml') }}-${{ env.pythonLocation }}
- name: Prepare build
if: steps.build-cache.outputs.cache-hit != 'true'
run: |
& python.exe winbuild\build_prepare.py -v
& python.exe winbuild\build_prepare.py -v --python=$env:pythonLocation --srcdir
shell: pwsh
- name: Build dependencies / libjpeg-turbo
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_libjpeg.cmd"
- name: Build dependencies / zlib
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_zlib.cmd"
- name: Build dependencies / xz
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_xz.cmd"
- name: Build dependencies / WebP
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_libwebp.cmd"
- name: Build dependencies / LibTiff
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_libtiff.cmd"
# for FreeType CBDT/SBIX font support
- name: Build dependencies / WebP
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_libwebp.cmd"
# for FreeType CBDT font support
- name: Build dependencies / libpng
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_libpng.cmd"
- name: Build dependencies / libavif
if: steps.build-cache.outputs.cache-hit != 'true' && matrix.architecture == 'x64'
run: "& winbuild\\build\\build_dep_libavif.cmd"
# for FreeType WOFF2 font support
- name: Build dependencies / brotli
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_brotli.cmd"
- name: Build dependencies / FreeType
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_freetype.cmd"
- name: Build dependencies / LCMS2
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_lcms2.cmd"
- name: Build dependencies / OpenJPEG
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_openjpeg.cmd"
@ -178,13 +128,14 @@ jobs:
- name: Build dependencies / HarfBuzz
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_harfbuzz.cmd"
# Raqm dependencies
- name: Build dependencies / FriBidi
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_fribidi.cmd"
- name: Build dependencies / Raqm
if: steps.build-cache.outputs.cache-hit != 'true'
run: "& winbuild\\build\\build_dep_libraqm.cmd"
# trim ~150MB for each job
# trim ~150MB x 9
- name: Optimize build cache
if: steps.build-cache.outputs.cache-hit != 'true'
run: rmdir /S /Q winbuild\build\src
@ -192,31 +143,31 @@ jobs:
- name: Build Pillow
run: |
$FLAGS="-C raqm=vendor -C fribidi=vendor"
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS .[tests]"
$FLAGS=""
if ('${{ github.event_name }}' -eq 'push') { $FLAGS="--disable-imagequant" }
& winbuild\build\build_pillow.cmd $FLAGS install
& $env:pythonLocation\python.exe selftest.py --installed
shell: pwsh
# skip PyPy for speed
# failing with PyPy3
- name: Enable heap verification
if: "!contains(matrix.python-version, 'pypy')"
run: |
& reg.exe add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\python.exe" /v "GlobalFlag" /t REG_SZ /d "0x02000000" /f
run: "& 'C:\\Program Files (x86)\\Windows Kits\\10\\Debuggers\\x86\\gflags.exe' /p /enable $env:pythonLocation\\python.exe"
- name: Test Pillow
run: |
path %GITHUB_WORKSPACE%\winbuild\build\bin;%PATH%
.ci\test.cmd
path %GITHUB_WORKSPACE%\\winbuild\\build\\bin;%PATH%
python.exe -m pytest -vx -W always --cov PIL --cov Tests --cov-report term --cov-report xml Tests
shell: cmd
- name: Prepare to upload errors
if: failure()
run: |
mkdir -p Tests/errors
shell: bash
shell: pwsh
- name: Upload errors
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
if: failure()
with:
name: errors
@ -228,17 +179,101 @@ jobs:
shell: pwsh
- name: Upload coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v1
with:
files: ./coverage.xml
file: ./coverage.xml
flags: GHA_Windows
name: ${{ runner.os }} Python ${{ matrix.python-version }}
token: ${{ secrets.CODECOV_ORG_TOKEN }}
name: ${{ runner.os }} Python ${{ matrix.python-version }} ${{ matrix.architecture }}
- name: Build wheel
id: wheel
if: "github.event_name == 'push'"
run: |
for /f "tokens=3 delims=/" %%a in ("${{ github.ref }}") do echo ::set-output name=dist::dist-%%a
winbuild\\build\\build_pillow.cmd --disable-imagequant bdist_wheel
shell: cmd
- uses: actions/upload-artifact@v2
if: "github.event_name == 'push'"
with:
name: ${{ steps.wheel.outputs.dist }}
path: dist\*.whl
msys:
runs-on: windows-2019
strategy:
fail-fast: false
matrix:
mingw: ["MINGW32", "MINGW64"]
include:
- mingw: "MINGW32"
name: "MSYS2 MinGW 32-bit"
package: "mingw-w64-i686"
- mingw: "MINGW64"
name: "MSYS2 MinGW 64-bit"
package: "mingw-w64-x86_64"
defaults:
run:
shell: bash.exe --login -eo pipefail "{0}"
env:
MSYSTEM: ${{ matrix.mingw }}
CHERE_INVOKING: 1
timeout-minutes: 30
name: ${{ matrix.name }}
steps:
- uses: actions/checkout@v2
- name: Set up shell
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
shell: pwsh
- name: Install Dependencies
run: |
pacman -S --noconfirm \
${{ matrix.package }}-python3-cffi \
${{ matrix.package }}-python3-numpy \
${{ matrix.package }}-python3-olefile \
${{ matrix.package }}-python3-pip \
${{ matrix.package }}-python3-pyqt5 \
${{ matrix.package }}-python3-pytest \
${{ matrix.package }}-python3-pytest-cov \
${{ matrix.package }}-python3-setuptools \
${{ matrix.package }}-freetype \
${{ matrix.package }}-ghostscript \
${{ matrix.package }}-lcms2 \
${{ matrix.package }}-libimagequant \
${{ matrix.package }}-libjpeg-turbo \
${{ matrix.package }}-libraqm \
${{ matrix.package }}-libtiff \
${{ matrix.package }}-libwebp \
${{ matrix.package }}-openjpeg2 \
subversion
python3 -m pip install pyroma
pushd depends && ./install_extra_test_images.sh && popd
- name: Build Pillow
run: CFLAGS="-coverage" python3 setup.py build_ext install
- name: Test Pillow
run: |
python3 selftest.py --installed
python3 -m pytest -vx --cov PIL --cov Tests --cov-report term --cov-report xml Tests
- name: Upload coverage
run: |
python3 -m pip install codecov
bash <(curl -s https://codecov.io/bash) -F GHA_Windows
env:
CODECOV_NAME: ${{ matrix.name }}
success:
permissions:
contents: none
needs: build
needs: [build, msys]
runs-on: ubuntu-latest
name: Windows Test Successful
steps:

View File

@ -1,34 +1,6 @@
name: Test
on:
push:
branches:
- "**"
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
pull_request:
paths-ignore:
- ".github/workflows/docs.yml"
- ".github/workflows/wheels*"
- ".gitmodules"
- "docs/**"
- "wheels/**"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
COVERAGE_CORE: sysmon
FORCE_COLOR: 1
on: [push, pull_request]
jobs:
build:
@ -37,64 +9,54 @@ jobs:
fail-fast: false
matrix:
os: [
"macos-latest",
"ubuntu-latest",
"macOS-latest",
]
python-version: [
"pypy3.11",
"3.14t",
"3.14",
"3.13t",
"3.13",
"3.12",
"3.11",
"3.10",
"pypy3",
"3.9",
"3.8",
"3.7",
"3.6",
]
include:
- { python-version: "3.11", PYTHONOPTIMIZE: 1, REVERSE: "--reverse" }
- { python-version: "3.10", PYTHONOPTIMIZE: 2 }
# Free-threaded
- { python-version: "3.14t", disable-gil: true }
- { python-version: "3.13t", disable-gil: true }
# M1 only available for 3.10+
- { os: "macos-13", python-version: "3.9" }
exclude:
- { os: "macos-latest", python-version: "3.9" }
- python-version: "3.6"
env: PYTHONOPTIMIZE=1
- python-version: "3.7"
env: PYTHONOPTIMIZE=2
# Include new variables for Codecov
- os: ubuntu-latest
codecov-flag: GHA_Ubuntu
- os: macOS-latest
codecov-flag: GHA_macOS
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
cache: pip
cache-dependency-path: |
".ci/*.sh"
"pyproject.toml"
- name: Set PYTHON_GIL
if: "${{ matrix.disable-gil }}"
- name: Get pip cache dir
id: pip-cache
run: |
echo "PYTHON_GIL=0" >> $GITHUB_ENV
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v2
with:
path: ${{ steps.pip-cache.outputs.dir }}
key:
${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('**/.ci/*.sh') }}
restore-keys: |
${{ matrix.os }}-${{ matrix.python-version }}-
- name: Build system information
run: python3 .github/workflows/system-info.py
- name: Cache libimagequant
if: startsWith(matrix.os, 'ubuntu')
uses: actions/cache@v4
id: cache-libimagequant
with:
path: ~/cache-libimagequant
key: ${{ runner.os }}-libimagequant-${{ hashFiles('depends/install_imagequant.sh') }}
run: python .github/workflows/system-info.py
- name: Install Linux dependencies
if: startsWith(matrix.os, 'ubuntu')
@ -102,7 +64,6 @@ jobs:
.ci/install.sh
env:
GHA_PYTHON_VERSION: ${{ matrix.python-version }}
GHA_LIBIMAGEQUANT_CACHE_HIT: ${{ steps.cache-libimagequant.outputs.cache-hit }}
- name: Install macOS dependencies
if: startsWith(matrix.os, 'macOS')
@ -111,56 +72,43 @@ jobs:
env:
GHA_PYTHON_VERSION: ${{ matrix.python-version }}
- name: Register gcc problem matcher
if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.13'"
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Build
run: |
.ci/build.sh
- name: Test
run: |
if [ $REVERSE ]; then
python3 -m pip install pytest-reverse
fi
if [ "${{ matrix.os }}" = "ubuntu-latest" ]; then
xvfb-run -s '-screen 0 1024x768x24' sway&
export WAYLAND_DISPLAY=wayland-1
.ci/test.sh
else
.ci/test.sh
fi
env:
PYTHONOPTIMIZE: ${{ matrix.PYTHONOPTIMIZE }}
REVERSE: ${{ matrix.REVERSE }}
.ci/test.sh
- name: Prepare to upload errors
if: failure()
run: |
mkdir -p Tests/errors
shell: pwsh
- name: Upload errors
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
if: failure()
with:
name: errors
path: Tests/errors
- name: Docs
if: startsWith(matrix.os, 'ubuntu') && matrix.python-version == 3.8
run: |
pip install sphinx-removed-in sphinx-rtd-theme
make doccheck
- name: After success
run: |
.ci/after_success.sh
- name: Upload coverage
uses: codecov/codecov-action@v5
with:
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
token: ${{ secrets.CODECOV_ORG_TOKEN }}
run: bash <(curl -s https://codecov.io/bash) -F ${{ matrix.codecov-flag }}
env:
CODECOV_NAME: ${{ matrix.os }} Python ${{ matrix.python-version }}
success:
permissions:
contents: none
needs: build
runs-on: ubuntu-latest
name: Test Successful

View File

@ -1,403 +0,0 @@
#!/bin/bash
# Safety check - Pillow builds require that CIBW_ARCHS is set, and that it only
# contains a single value (even though cibuildwheel allows multiple values in
# CIBW_ARCHS). This check doesn't work on Linux because of how the CIBW_ARCHS
# variable is exposed.
function check_cibw_archs {
if [[ -z "$CIBW_ARCHS" ]]; then
echo "ERROR: Pillow builds require CIBW_ARCHS be defined."
exit 1
fi
if [[ "$CIBW_ARCHS" == *" "* ]]; then
echo "ERROR: Pillow builds only support a single architecture in CIBW_ARCHS."
exit 1
fi
}
# Setup that needs to be done before multibuild utils are invoked. Process
# potential cross-build platforms before native platforms to ensure that we pick
# up the cross environment.
PROJECTDIR=$(pwd)
if [[ "$CIBW_PLATFORM" == "ios" ]]; then
check_cibw_archs
# On iOS, CIBW_ARCHS is actually a multi-arch - arm64_iphoneos,
# arm64_iphonesimulator or x86_64_iphonesimulator. Split into the CPU
# platform, and the iOS SDK.
PLAT=$(echo $CIBW_ARCHS | sed "s/\(.*\)_\(.*\)/\1/")
IOS_SDK=$(echo $CIBW_ARCHS | sed "s/\(.*\)_\(.*\)/\2/")
# Build iOS builds in `build/iphoneos` or `build/iphonesimulator`
# (depending on the build target). Install them into `build/deps/iphoneos`
# or `build/deps/iphonesimulator`
WORKDIR=$(pwd)/build/$IOS_SDK
BUILD_PREFIX=$(pwd)/build/deps/$IOS_SDK
PATCH_DIR=$(pwd)/patches/iOS
# GNU tooling insists on using aarch64 rather than arm64
if [[ $PLAT == "arm64" ]]; then
GNU_ARCH=aarch64
else
GNU_ARCH=x86_64
fi
IOS_SDK_PATH=$(xcrun --sdk $IOS_SDK --show-sdk-path)
CMAKE_SYSTEM_NAME=iOS
IOS_HOST_TRIPLE=$PLAT-apple-ios$IPHONEOS_DEPLOYMENT_TARGET
if [[ "$IOS_SDK" == "iphonesimulator" ]]; then
IOS_HOST_TRIPLE=$IOS_HOST_TRIPLE-simulator
fi
# GNU Autotools doesn't recognize the existence of arm64-apple-ios-simulator
# as a valid host. However, the only difference between arm64-apple-ios and
# arm64-apple-ios-simulator is the choice of sysroot, and that is
# coordinated by CC, CFLAGS etc. From the perspective of configure, the two
# platforms are identical, so we can use arm64-apple-ios consistently.
# This (mostly) avoids us needing to patch config.sub in dependency sources.
HOST_CONFIGURE_FLAGS="--disable-shared --enable-static --host=$GNU_ARCH-apple-ios --build=$GNU_ARCH-apple-darwin"
# CMake has native support for iOS. However, most of that support is based
# on using the Xcode builder, which isn't very helpful for most of Pillow's
# dependencies. Therefore, we lean on the OSX configurations, plus CC, CFLAGS
# etc. to ensure the right sysroot is selected.
HOST_CMAKE_FLAGS="-DCMAKE_SYSTEM_NAME=$CMAKE_SYSTEM_NAME -DCMAKE_SYSTEM_PROCESSOR=$GNU_ARCH -DCMAKE_OSX_DEPLOYMENT_TARGET=$IPHONEOS_DEPLOYMENT_TARGET -DCMAKE_OSX_SYSROOT=$IOS_SDK_PATH -DBUILD_SHARED_LIBS=NO -DENABLE_SHARED=NO"
# Meson needs to be pointed at a cross-platform configuration file
# This will be generated once CC etc. have been evaluated.
HOST_MESON_FLAGS="--cross-file $WORKDIR/meson-cross.txt -Dprefer_static=true -Ddefault_library=static"
elif [[ "$(uname -s)" == "Darwin" ]]; then
check_cibw_archs
# Build macOS dependencies in `build/darwin`
# Install them into `build/deps/darwin`
PLAT=$CIBW_ARCHS
WORKDIR=$(pwd)/build/darwin
BUILD_PREFIX=$(pwd)/build/deps/darwin
else
# Build prefix will default to /usr/local
PLAT="${CIBW_ARCHS:-$AUDITWHEEL_ARCH}"
WORKDIR=$(pwd)/build
MB_ML_LIBC=${AUDITWHEEL_POLICY::9}
MB_ML_VER=${AUDITWHEEL_POLICY:9}
fi
# Define custom utilities
source wheels/multibuild/common_utils.sh
source wheels/multibuild/library_builders.sh
if [[ -z "$IS_MACOS" ]]; then
source wheels/multibuild/manylinux_utils.sh
fi
ARCHIVE_SDIR=pillow-depends-main
# Package versions for fresh source builds. Version numbers with "Patched"
# annotations have a source code patch that is required for some platforms. If
# you change those versions, ensure the patch is also updated.
FREETYPE_VERSION=2.13.3
HARFBUZZ_VERSION=11.2.1
LIBPNG_VERSION=1.6.50
JPEGTURBO_VERSION=3.1.1
OPENJPEG_VERSION=2.5.3
XZ_VERSION=5.8.1
TIFF_VERSION=4.7.0
LCMS2_VERSION=2.17
ZLIB_VERSION=1.3.1
ZLIB_NG_VERSION=2.2.4
LIBWEBP_VERSION=1.6.0
BZIP2_VERSION=1.0.8
LIBXCB_VERSION=1.17.0
BROTLI_VERSION=1.1.0 # Patched; next release won't need patching. See patch file.
LIBAVIF_VERSION=1.3.0
function build_pkg_config {
if [ -e pkg-config-stamp ]; then return; fi
# This essentially duplicates the Homebrew recipe.
# On iOS, we need a binary that can be executed on the build machine; but we
# can create a host-specific pc-path to store iOS .pc files. To ensure a
# macOS-compatible build, we temporarily clear environment flags that set
# iOS-specific values.
if [[ -n "$IOS_SDK" ]]; then
ORIGINAL_HOST_CONFIGURE_FLAGS=$HOST_CONFIGURE_FLAGS
ORIGINAL_IPHONEOS_DEPLOYMENT_TARGET=$IPHONEOS_DEPLOYMENT_TARGET
unset HOST_CONFIGURE_FLAGS
unset IPHONEOS_DEPLOYMENT_TARGET
fi
CFLAGS="$CFLAGS -Wno-int-conversion" CPPFLAGS="" build_simple pkg-config 0.29.2 https://pkg-config.freedesktop.org/releases tar.gz \
--disable-debug --disable-host-tool --with-internal-glib \
--with-pc-path=$BUILD_PREFIX/share/pkgconfig:$BUILD_PREFIX/lib/pkgconfig \
--with-system-include-path=$(xcrun --show-sdk-path --sdk macosx)/usr/include
if [[ -n "$IOS_SDK" ]]; then
HOST_CONFIGURE_FLAGS=$ORIGINAL_HOST_CONFIGURE_FLAGS
IPHONEOS_DEPLOYMENT_TARGET=$ORIGINAL_IPHONEOS_DEPLOYMENT_TARGET
fi;
export PKG_CONFIG=$BUILD_PREFIX/bin/pkg-config
touch pkg-config-stamp
}
function build_zlib_ng {
if [ -e zlib-stamp ]; then return; fi
# zlib-ng uses a "configure" script, but it's not a GNU autotools script, so
# it doesn't honor the usual flags. Temporarily disable any
# cross-compilation flags.
ORIGINAL_HOST_CONFIGURE_FLAGS=$HOST_CONFIGURE_FLAGS
unset HOST_CONFIGURE_FLAGS
build_github zlib-ng/zlib-ng $ZLIB_NG_VERSION --zlib-compat
HOST_CONFIGURE_FLAGS=$ORIGINAL_HOST_CONFIGURE_FLAGS
if [[ -n "$IS_MACOS" ]] && [[ -z "$IOS_SDK" ]]; then
# Ensure that on macOS, the library name is an absolute path, not an
# @rpath, so that delocate picks up the right library (and doesn't need
# DYLD_LIBRARY_PATH to be set). The default Makefile doesn't have an
# option to control the install_name. This isn't needed on iOS, as iOS
# only builds the static library.
install_name_tool -id $BUILD_PREFIX/lib/libz.1.dylib $BUILD_PREFIX/lib/libz.1.dylib
fi
touch zlib-stamp
}
function build_brotli {
if [ -e brotli-stamp ]; then return; fi
local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-$BROTLI_VERSION.tar.gz)
(cd $out_dir \
&& cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_LIBDIR=$BUILD_PREFIX/lib -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib $HOST_CMAKE_FLAGS . \
&& make install)
touch brotli-stamp
}
function build_harfbuzz {
if [ -e harfbuzz-stamp ]; then return; fi
python3 -m pip install meson ninja
local out_dir=$(fetch_unpack https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION/harfbuzz-$HARFBUZZ_VERSION.tar.xz harfbuzz-$HARFBUZZ_VERSION.tar.xz)
(cd $out_dir \
&& meson setup build --prefix=$BUILD_PREFIX --libdir=$BUILD_PREFIX/lib --buildtype=minsize -Dfreetype=enabled -Dglib=disabled -Dtests=disabled $HOST_MESON_FLAGS)
(cd $out_dir/build \
&& meson install)
touch harfbuzz-stamp
}
function build_libavif {
if [ -e libavif-stamp ]; then return; fi
python3 -m pip install meson ninja
if [[ "$PLAT" == "x86_64" ]] || [ -n "$SANITIZER" ]; then
build_simple nasm 2.16.03 https://www.nasm.us/pub/nasm/releasebuilds/2.16.03
fi
local build_type=MinSizeRel
local lto=ON
local libavif_cmake_flags
if [ -n "$IS_MACOS" ]; then
lto=OFF
libavif_cmake_flags=(
-DCMAKE_C_FLAGS_MINSIZEREL="-Oz -DNDEBUG -flto" \
-DCMAKE_CXX_FLAGS_MINSIZEREL="-Oz -DNDEBUG -flto" \
-DCMAKE_SHARED_LINKER_FLAGS_INIT="-Wl,-S,-x,-dead_strip_dylibs" \
)
else
if [[ "$MB_ML_VER" == 2014 ]] && [[ "$PLAT" == "x86_64" ]]; then
build_type=Release
fi
libavif_cmake_flags=(-DCMAKE_SHARED_LINKER_FLAGS_INIT="-Wl,--strip-all,-z,relro,-z,now")
fi
local out_dir=$(fetch_unpack https://github.com/AOMediaCodec/libavif/archive/refs/tags/v$LIBAVIF_VERSION.tar.gz libavif-$LIBAVIF_VERSION.tar.gz)
# CONFIG_AV1_HIGHBITDEPTH=0 is a flag for libaom (included as a subproject
# of libavif) that disables support for encoding high bit depth images.
(cd $out_dir \
&& cmake \
-DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX \
-DCMAKE_INSTALL_LIBDIR=$BUILD_PREFIX/lib \
-DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib \
-DBUILD_SHARED_LIBS=ON \
-DAVIF_LIBSHARPYUV=LOCAL \
-DAVIF_LIBYUV=LOCAL \
-DAVIF_CODEC_AOM=LOCAL \
-DCONFIG_AV1_HIGHBITDEPTH=0 \
-DAVIF_CODEC_AOM_DECODE=OFF \
-DAVIF_CODEC_DAV1D=LOCAL \
-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=$lto \
-DCMAKE_C_VISIBILITY_PRESET=hidden \
-DCMAKE_CXX_VISIBILITY_PRESET=hidden \
-DCMAKE_BUILD_TYPE=$build_type \
"${libavif_cmake_flags[@]}" \
. \
&& make install)
touch libavif-stamp
}
function build {
build_xz
if [ -z "$IS_ALPINE" ] && [ -z "$SANITIZER" ] && [ -z "$IS_MACOS" ]; then
yum remove -y zlib-devel
fi
if [[ -n "$IS_MACOS" ]] && [[ "$MACOSX_DEPLOYMENT_TARGET" == "10.10" || "$MACOSX_DEPLOYMENT_TARGET" == "10.13" ]]; then
build_new_zlib
else
build_zlib_ng
fi
build_simple xcb-proto 1.17.0 https://xorg.freedesktop.org/archive/individual/proto
if [[ -n "$IS_MACOS" ]]; then
build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto
build_simple libXau 1.0.12 https://www.x.org/pub/individual/lib
build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist
else
sed "s/\${pc_sysrootdir\}//" $BUILD_PREFIX/share/pkgconfig/xcb-proto.pc > $BUILD_PREFIX/lib/pkgconfig/xcb-proto.pc
fi
build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib
build_libjpeg_turbo
if [[ -n "$IS_MACOS" ]]; then
# Custom tiff build to include jpeg; by default, configure won't include
# headers/libs in the custom macOS/iOS prefix. Explicitly disable webp,
# libdeflate and zstd, because on x86_64 macs, it will pick up the
# Homebrew versions of those libraries from /usr/local.
build_simple tiff $TIFF_VERSION https://download.osgeo.org/libtiff tar.gz \
--with-jpeg-include-dir=$BUILD_PREFIX/include --with-jpeg-lib-dir=$BUILD_PREFIX/lib \
--disable-webp --disable-libdeflate --disable-zstd
else
build_tiff
fi
if [[ -z "$IOS_SDK" ]]; then
# Short term workaround; don't build libavif on iOS
build_libavif
fi
build_libpng
build_lcms2
build_openjpeg
webp_cflags="-O3 -DNDEBUG"
if [[ -n "$IS_MACOS" ]]; then
webp_cflags="$webp_cflags -Wl,-headerpad_max_install_names"
fi
webp_ldflags=""
if [[ -n "$IOS_SDK" ]]; then
webp_ldflags="$webp_ldflags -llzma -lz"
fi
CFLAGS="$CFLAGS $webp_cflags" LDFLAGS="$LDFLAGS $webp_ldflags" build_simple libwebp $LIBWEBP_VERSION \
https://storage.googleapis.com/downloads.webmproject.org/releases/webp tar.gz \
--enable-libwebpmux --enable-libwebpdemux
build_brotli
if [[ -n "$IS_MACOS" ]]; then
# Custom freetype build
build_simple freetype $FREETYPE_VERSION https://download.savannah.gnu.org/releases/freetype tar.gz --with-harfbuzz=no
else
build_freetype
fi
if [[ -z "$IOS_SDK" ]]; then
# On iOS, there's no vendor-provided raqm, and we can't ship it due to
# licensing, so there's no point building harfbuzz.
build_harfbuzz
fi
}
function create_meson_cross_config {
cat << EOF > $WORKDIR/meson-cross.txt
[binaries]
pkg-config = '$BUILD_PREFIX/bin/pkg-config'
cmake = '$(which cmake)'
c = '$CC'
cpp = '$CXX'
strip = '$STRIP'
[built-in options]
c_args = '$CFLAGS -I$BUILD_PREFIX/include'
cpp_args = '$CXXFLAGS -I$BUILD_PREFIX/include'
c_link_args = '$CFLAGS -L$BUILD_PREFIX/lib'
cpp_link_args = '$CFLAGS -L$BUILD_PREFIX/lib'
[host_machine]
system = 'darwin'
subsystem = 'ios'
kernel = 'xnu'
cpu_family = '$(uname -m)'
cpu = '$(uname -m)'
endian = 'little'
EOF
}
# Perform all dependency builds in the build subfolder.
mkdir -p $WORKDIR
pushd $WORKDIR > /dev/null
# Any stuff that you need to do before you start building the wheels
# Runs in the root directory of this repository.
if [[ ! -d $WORKDIR/pillow-depends-main ]]; then
if [[ ! -f $PROJECTDIR/pillow-depends-main.zip ]]; then
echo "Download pillow dependency sources..."
curl -fSL -o $PROJECTDIR/pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
fi
echo "Unpacking pillow dependency sources..."
untar $PROJECTDIR/pillow-depends-main.zip
fi
if [[ -n "$IS_MACOS" ]]; then
# Ensure the basic structure of the build prefix directory exists.
mkdir -p "$BUILD_PREFIX/bin"
mkdir -p "$BUILD_PREFIX/lib"
# Ensure pkg-config is available. This is done *before* setting CC, CFLAGS
# etc. to ensure that the build is *always* a macOS build, even when building
# for iOS.
build_pkg_config
# Ensure cmake is available, and that the default prefix used by CMake is
# the build prefix
python3 -m pip install cmake
export CMAKE_PREFIX_PATH=$BUILD_PREFIX
if [[ -n "$IOS_SDK" ]]; then
export AR="$(xcrun --find --sdk $IOS_SDK ar)"
export CPP="$(xcrun --find --sdk $IOS_SDK clang) -E"
export CC=$(xcrun --find --sdk $IOS_SDK clang)
export CXX=$(xcrun --find --sdk $IOS_SDK clang++)
export LD=$(xcrun --find --sdk $IOS_SDK ld)
export STRIP=$(xcrun --find --sdk $IOS_SDK strip)
CPPFLAGS="$CPPFLAGS --sysroot=$IOS_SDK_PATH"
CFLAGS="-target $IOS_HOST_TRIPLE --sysroot=$IOS_SDK_PATH -mios-version-min=$IPHONEOS_DEPLOYMENT_TARGET"
CXXFLAGS="-target $IOS_HOST_TRIPLE --sysroot=$IOS_SDK_PATH -mios-version-min=$IPHONEOS_DEPLOYMENT_TARGET"
# Having IPHONEOS_DEPLOYMENT_TARGET in the environment causes problems
# with some cross-building toolchains, because it introduces implicit
# behavior into clang.
unset IPHONEOS_DEPLOYMENT_TARGET
# Now that we know CC etc., we can create a meson cross-configuration file
create_meson_cross_config
fi
fi
wrap_wheel_builder build
# A safety catch for iOS. iOS can't use dynamic libraries, but clang will prefer
# to link dynamic libraries to static libraries. The only way to reliably
# prevent this is to not have dynamic libraries available in the first place.
# The build process *shouldn't* generate any dylibs... but just in case, purge
# any dylibs that *have* been installed into the build prefix directory.
if [[ -n "$IOS_SDK" ]]; then
find "$BUILD_PREFIX" -name "*.dylib" -exec rm -rf {} \;
fi
# Return to the project root to finish the build
popd > /dev/null
# Append licenses
for filename in wheels/dependency_licenses/*; do
echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE
cat $filename >> LICENSE
done

View File

@ -1,26 +0,0 @@
param ([string]$venv, [string]$pillow="C:\pillow")
$ErrorActionPreference = 'Stop'
$ProgressPreference = 'SilentlyContinue'
Set-PSDebug -Trace 1
if ("$venv" -like "*\cibw-run-*\pp*-win_amd64\*") {
# unlike CPython, PyPy requires Visual C++ Redistributable to be installed
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
Invoke-WebRequest -Uri 'https://aka.ms/vs/15/release/vc_redist.x64.exe' -OutFile 'vc_redist.x64.exe'
C:\vc_redist.x64.exe /install /quiet /norestart | Out-Null
}
$env:path += ";$pillow\winbuild\build\bin\"
if (Test-Path $venv\Scripts\pypy.exe) {
$python = "pypy.exe"
} else {
$python = "python.exe"
}
& reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\python.exe" /v "GlobalFlag" /t REG_SZ /d "0x02000000" /f
cd $pillow
& $venv\Scripts\$python -VV
if (!$?) { exit $LASTEXITCODE }
& $venv\Scripts\$python selftest.py
if (!$?) { exit $LASTEXITCODE }
& $venv\Scripts\$python -m pytest -vv -x checks\check_wheel.py
if (!$?) { exit $LASTEXITCODE }
& $venv\Scripts\$python -m pytest -vv -x Tests
if (!$?) { exit $LASTEXITCODE }

View File

@ -1,37 +0,0 @@
#!/bin/bash
set -e
# Ensure fribidi is installed by the system.
if [[ "$OSTYPE" == "darwin"* ]]; then
# If Homebrew is on the path during the build, it may leak into the wheels.
# However, we *do* need Homebrew to provide a copy of fribidi for
# testing purposes so that we can verify the fribidi shim works as expected.
if [[ "$(uname -m)" == "x86_64" ]]; then
HOMEBREW_PREFIX=/usr/local
else
HOMEBREW_PREFIX=/opt/homebrew
fi
$HOMEBREW_PREFIX/bin/brew install fribidi
# Add the lib folder for fribidi so that the vendored library can be found.
# Don't use $HOMEWBREW_PREFIX/lib directly - use the lib folder where the
# installed copy of fribidi is cellared. This ensures we don't pick up the
# Homebrew version of any other library that we're dependent on (most notably,
# freetype).
export DYLD_LIBRARY_PATH=$(dirname $(realpath $HOMEBREW_PREFIX/lib/libfribidi.dylib))
elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then
apk add curl fribidi
else
yum install -y fribidi
fi
if [ ! -d "test-images-main" ]; then
curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip
unzip pillow-test-images.zip
mv test-images-main/* Tests/images
fi
# Runs tests
python3 selftest.py
python3 -m pytest -vv -x checks/check_wheel.py
python3 -m pytest -vv -x

View File

@ -1,289 +0,0 @@
name: Wheels
on:
schedule:
# ┌───────────── minute (0 - 59)
# │ ┌───────────── hour (0 - 23)
# │ │ ┌───────────── day of the month (1 - 31)
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
# │ │ │ │ │
- cron: "42 1 * * 0,3"
push:
paths:
- ".ci/requirements-cibw.txt"
- ".github/workflows/wheel*"
- "pyproject.toml"
- "setup.py"
- "wheels/*"
- "winbuild/build_prepare.py"
- "winbuild/fribidi.cmake"
tags:
- "*"
pull_request:
paths:
- ".ci/requirements-cibw.txt"
- ".github/workflows/wheel*"
- "pyproject.toml"
- "setup.py"
- "wheels/*"
- "winbuild/build_prepare.py"
- "winbuild/fribidi.cmake"
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
FORCE_COLOR: 1
jobs:
build-native-wheels:
if: github.event_name != 'schedule' || github.repository_owner == 'python-pillow'
name: ${{ matrix.name }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- name: "macOS 10.10 x86_64"
platform: macos
os: macos-13
cibw_arch: x86_64
build: "cp3{9,10,11}*"
macosx_deployment_target: "10.10"
- name: "macOS 10.13 x86_64"
platform: macos
os: macos-13
cibw_arch: x86_64
build: "cp3{12,13,14}*"
macosx_deployment_target: "10.13"
- name: "macOS 10.15 x86_64"
platform: macos
os: macos-13
cibw_arch: x86_64
build: "pp3*"
macosx_deployment_target: "10.15"
- name: "macOS arm64"
platform: macos
os: macos-latest
cibw_arch: arm64
macosx_deployment_target: "11.0"
- name: "manylinux2014 and musllinux x86_64"
platform: linux
os: ubuntu-latest
cibw_arch: x86_64
manylinux: "manylinux2014"
- name: "manylinux_2_28 x86_64"
platform: linux
os: ubuntu-latest
cibw_arch: x86_64
build: "*manylinux*"
- name: "manylinux2014 and musllinux aarch64"
platform: linux
os: ubuntu-24.04-arm
cibw_arch: aarch64
manylinux: "manylinux2014"
- name: "manylinux_2_28 aarch64"
platform: linux
os: ubuntu-24.04-arm
cibw_arch: aarch64
build: "*manylinux*"
- name: "iOS arm64 device"
platform: ios
os: macos-latest
cibw_arch: arm64_iphoneos
- name: "iOS arm64 simulator"
platform: ios
os: macos-latest
cibw_arch: arm64_iphonesimulator
- name: "iOS x86_64 simulator"
platform: ios
os: macos-13
cibw_arch: x86_64_iphonesimulator
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
submodules: true
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install cibuildwheel
run: |
python3 -m pip install -r .ci/requirements-cibw.txt
- name: Build wheels
run: |
python3 -m cibuildwheel --output-dir wheelhouse
env:
CIBW_PLATFORM: ${{ matrix.platform }}
CIBW_ARCHS: ${{ matrix.cibw_arch }}
CIBW_BUILD: ${{ matrix.build }}
CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }}
- uses: actions/upload-artifact@v4
with:
name: dist-${{ matrix.name }}
path: ./wheelhouse/*.whl
windows:
if: github.event_name != 'schedule' || github.repository_owner == 'python-pillow'
name: Windows ${{ matrix.cibw_arch }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- cibw_arch: x86
os: windows-latest
- cibw_arch: AMD64
os: windows-latest
- cibw_arch: ARM64
os: windows-11-arm
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Checkout extra test images
uses: actions/checkout@v4
with:
persist-credentials: false
repository: python-pillow/test-images
path: Tests\test-images
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install cibuildwheel
run: |
python.exe -m pip install -r .ci/requirements-cibw.txt
- name: Prepare for build
run: |
choco install nasm --no-progress
echo "C:\Program Files\NASM" >> $env:GITHUB_PATH
# Install extra test images
xcopy /S /Y Tests\test-images\* Tests\images
& python.exe winbuild\build_prepare.py -v --no-imagequant --architecture=${{ matrix.cibw_arch }}
shell: pwsh
- name: Build wheels
run: |
setlocal EnableDelayedExpansion
for %%f in (winbuild\build\license\*) do (
set x=%%~nf
rem Skip FriBiDi license, it is not included in the wheel.
set fribidi=!x:~0,7!
if NOT !fribidi!==fribidi (
rem Skip imagequant license, it is not included in the wheel.
set libimagequant=!x:~0,13!
if NOT !libimagequant!==libimagequant (
echo. >> LICENSE
echo ===== %%~nf ===== >> LICENSE
echo. >> LICENSE
type %%f >> LICENSE
)
)
)
call winbuild\\build\\build_env.cmd
%pythonLocation%\python.exe -m cibuildwheel . --output-dir wheelhouse
env:
CIBW_ARCHS: ${{ matrix.cibw_arch }}
CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd"
CIBW_CACHE_PATH: "C:\\cibw"
CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy
CIBW_TEST_SKIP: "*-win_arm64"
CIBW_TEST_COMMAND: 'docker run --rm
-v {project}:C:\pillow
-v C:\cibw:C:\cibw
-v %CD%\..\venv-test:%CD%\..\venv-test
-e CI -e GITHUB_ACTIONS
mcr.microsoft.com/windows/servercore:ltsc2022
powershell C:\pillow\.github\workflows\wheels-test.ps1 %CD%\..\venv-test'
shell: cmd
- name: Upload wheels
uses: actions/upload-artifact@v4
with:
name: dist-windows-${{ matrix.cibw_arch }}
path: ./wheelhouse/*.whl
- name: Upload fribidi.dll
uses: actions/upload-artifact@v4
with:
name: fribidi-windows-${{ matrix.cibw_arch }}
path: winbuild\build\bin\fribidi*
sdist:
if: github.event_name != 'schedule'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: make sdist
- uses: actions/upload-artifact@v4
with:
name: dist-sdist
path: dist/*.tar.gz
scientific-python-nightly-wheels-publish:
if: github.repository_owner == 'python-pillow' && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch')
needs: [build-native-wheels, windows]
runs-on: ubuntu-latest
name: Upload wheels to scientific-python-nightly-wheels
steps:
- uses: actions/download-artifact@v4
with:
pattern: dist-*
path: dist
merge-multiple: true
- name: Upload wheels to scientific-python-nightly-wheels
uses: scientific-python/upload-nightly-action@b36e8c0c10dbcfd2e05bf95f17ef8c14fd708dbf # 0.6.2
with:
artifacts_path: dist
anaconda_nightly_upload_token: ${{ secrets.ANACONDA_ORG_UPLOAD_TOKEN }}
pypi-publish:
if: github.repository_owner == 'python-pillow' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
needs: [build-native-wheels, windows, sdist]
runs-on: ubuntu-latest
name: Upload release to PyPI
environment:
name: release-pypi
url: https://pypi.org/p/Pillow
permissions:
id-token: write
steps:
- uses: actions/download-artifact@v4
with:
pattern: dist-*
path: dist
merge-multiple: true
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
attestations: true

7
.github/zizmor.yml vendored
View File

@ -1,7 +0,0 @@
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
# https://woodruffw.github.io/zizmor/configuration/
rules:
unpinned-uses:
config:
policies:
"*": ref-pin

15
.gitignore vendored
View File

@ -19,7 +19,6 @@ lib64/
parts/
sdist/
var/
wheelhouse/
*.egg-info/
.installed.cfg
*.egg
@ -80,20 +79,8 @@ docs/_build/
# JetBrains
.idea
# Extra test images installed from python-pillow/test-images
# Extra test images installed from pillow-depends/test_images
Tests/images/README.md
Tests/images/crash_1.tif
Tests/images/crash_2.tif
Tests/images/crash-81154a65438ba5aaeca73fd502fa4850fbde60f8.tif
Tests/images/string_dimension.tiff
Tests/images/jpeg2000
Tests/images/msp
Tests/images/picins
Tests/images/sunraster
# Test and dependency downloads
pillow-depends-main.zip
pillow-test-images.zip
# pyinstaller
*.spec

3
.gitmodules vendored
View File

@ -1,3 +0,0 @@
[submodule "multibuild"]
path = wheels/multibuild
url = https://github.com/multi-build/multibuild.git

View File

@ -1,92 +1,43 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.2
hooks:
- id: ruff-check
args: [--exit-non-zero-on-fix]
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 25.1.0
- repo: https://github.com/psf/black
rev: e66be67b9b6811913470f70c28b4d50f94d05b22 # frozen: 20.8b1
hooks:
- id: black
args: ["--target-version", "py36"]
# Only .py files, until https://github.com/psf/black/issues/402 resolved
files: \.py$
types: []
- repo: https://github.com/PyCQA/bandit
rev: 1.8.6
- repo: https://github.com/timothycrosley/isort
rev: 377d260ffa6f746693f97b46d95025afc4bd8275 # frozen: 5.4.2
hooks:
- id: bandit
args: [--severity-level=high]
files: ^src/
- id: isort
- repo: https://github.com/asottile/yesqa
rev: 7a009f3ee493c796827ee334f9058b110a0e0db8 # frozen: v1.2.1
hooks:
- id: yesqa
- repo: https://github.com/Lucas-C/pre-commit-hooks
rev: v1.5.5
rev: f30f4974a08a6b2f6a1eeaf30a4d501cf909163a # frozen: v1.1.9
hooks:
- id: remove-tabs
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$|\.patch$)
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.opt$)
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v20.1.7
- repo: https://gitlab.com/pycqa/flake8
rev: 05f6544aef321e2fee03a1277ce2eef8880fb927 # frozen: 3.8.3
hooks:
- id: clang-format
types: [c]
exclude: ^src/thirdparty/
- id: flake8
additional_dependencies: [flake8-2020, flake8-implicit-str-concat]
- repo: https://github.com/pre-commit/pygrep-hooks
rev: v1.10.0
rev: eae6397e4c259ed3d057511f6dd5330b92867e62 # frozen: v1.6.0
hooks:
- id: python-check-blanket-noqa
- id: rst-backticks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: e1668fe86af3810fbca72b8653fe478e66a0afdc # frozen: v3.2.0
hooks:
- id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable
- id: check-merge-conflict
- id: check-json
- id: check-toml
- id: check-yaml
args: [--allow-multiple-documents]
- id: end-of-file-fixer
exclude: ^Tests/images/|\.patch$
- id: trailing-whitespace
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/|\.patch$
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.33.2
hooks:
- id: check-github-workflows
- id: check-readthedocs
- id: check-renovate
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.11.0
hooks:
- id: zizmor
- repo: https://github.com/sphinx-contrib/sphinx-lint
rev: v1.0.0
hooks:
- id: sphinx-lint
- repo: https://github.com/tox-dev/pyproject-fmt
rev: v2.6.0
hooks:
- id: pyproject-fmt
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.24.1
hooks:
- id: validate-pyproject
additional_dependencies: [trove-classifiers>=2024.10.12]
- repo: https://github.com/tox-dev/tox-ini-fmt
rev: 1.5.0
hooks:
- id: tox-ini-fmt
- repo: meta
hooks:
- id: check-hooks-apply
- id: check-useless-excludes
ci:
autoupdate_schedule: monthly

View File

@ -1,22 +1,2 @@
version: 2
sphinx:
configuration: docs/conf.py
formats: [pdf]
build:
os: ubuntu-lts-latest
tools:
python: "3"
jobs:
post_checkout:
- git remote add upstream https://github.com/python-pillow/Pillow.git # For forks
- git fetch upstream --tags
python:
install:
- method: pip
path: .
extra_requirements:
- docs
pip_install: true

70
.travis.yml Normal file
View File

@ -0,0 +1,70 @@
dist: xenial
language: python
cache:
pip: true
directories:
- $HOME/.cache/pre-commit
notifications:
irc: "chat.freenode.net#pil"
# Run fast lint first to get fast feedback.
# Run slower CPUs next, to give them a headstart and reduce waiting time.
# Then run the remainder.
matrix:
fast_finish: true
include:
- python: "3.6"
name: "Lint"
env: LINT="true"
- python: "3.6"
arch: arm64
- python: "3.7"
arch: ppc64le
- python: "3.8"
arch: s390x
- python: "pypy3.6-7.3.1"
name: "PyPy3 Xenial"
- python: "3.9-dev"
name: "3.9-dev Xenial"
services: xvfb
- python: "3.8"
name: "3.8 Xenial"
services: xvfb
- python: '3.7'
name: "3.7 Xenial PYTHONOPTIMIZE=2"
env: PYTHONOPTIMIZE=2
services: xvfb
- python: '3.6'
name: "3.6 Xenial PYTHONOPTIMIZE=1"
env: PYTHONOPTIMIZE=1
services: xvfb
allow_failures:
- python: "3.9-dev"
install:
- |
if [ "$LINT" == "true" ]; then
pip install tox
else
.ci/install.sh;
fi
script:
- |
if [ "$LINT" == "true" ]; then
tox -e lint
else
.ci/build.sh
.ci/test.sh
fi
after_success:
- |
if [ "$LINT" == "" ]; then
.ci/after_success.sh
fi

File diff suppressed because it is too large Load Diff

10
LICENSE
View File

@ -1,20 +1,20 @@
The Python Imaging Library (PIL) is
Copyright © 1997-2011 by Secret Labs AB
Copyright © 1995-2011 by Fredrik Lundh and contributors
Copyright © 1995-2011 by Fredrik Lundh
Pillow is the friendly PIL fork. It is
Copyright © 2010 by Jeffrey A. Clark and contributors
Copyright © 2010-2020 by Alex Clark and contributors
Like PIL, Pillow is licensed under the open source MIT-CMU License:
Like PIL, Pillow is licensed under the open source PIL Software License:
By obtaining, using, and/or copying this software and/or its associated
documentation, you agree that you have read, understood, and will comply
with the following terms and conditions:
Permission to use, copy, modify and distribute this software and its
documentation for any purpose and without fee is hereby granted,
Permission to use, copy, modify, and distribute this software and its
associated documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appears in all copies, and that
both that copyright notice and this permission notice appear in supporting
documentation, and that the name of Secret Labs AB or the author not be

View File

@ -5,43 +5,24 @@ include *.md
include *.py
include *.rst
include *.sh
include *.toml
include *.txt
include *.yaml
include .flake8
include LICENSE
include Makefile
include tox.ini
graft Tests
graft Tests/images
graft checks
graft patches
graft src
graft depends
graft winbuild
graft docs
graft _custom_build
# build/src control detritus
exclude .clang-format
exclude .appveyor.yml
exclude .coveragerc
exclude .editorconfig
exclude .readthedocs.yml
exclude codecov.yml
exclude renovate.json
exclude Tests/images/README.md
exclude Tests/images/crash*.tif
exclude Tests/images/string_dimension.tiff
global-exclude .git*
global-exclude *.pyc
global-exclude *.so
prune .ci
prune wheels
prune winbuild/build
prune winbuild/depends
prune Tests/errors
prune Tests/images/jpeg2000
prune Tests/images/msp
prune Tests/images/picins
prune Tests/images/sunraster
prune Tests/test-images

131
Makefile
View File

@ -1,68 +1,70 @@
.DEFAULT_GOAL := help
.DEFAULT_GOAL := release-test
.PHONY: clean
clean:
python3 setup.py clean
rm src/PIL/*.so || true
rm -r build || true
find . -name __pycache__ | xargs rm -r || true
BRANCHES=`git branch -a | grep -v HEAD | grep -v master | grep remote`
.PHONY: co
co:
-for i in $(BRANCHES) ; do \
git checkout -t $$i ; \
done
.PHONY: coverage
coverage:
python3 -c "import pytest" > /dev/null 2>&1 || python3 -m pip install pytest
python3 -m pytest -qq
pytest -qq
rm -r htmlcov || true
python3 -c "import coverage" > /dev/null 2>&1 || python3 -m pip install coverage
python3 -m coverage report
coverage report
.PHONY: doc
.PHONY: html
doc html:
doc:
$(MAKE) -C docs html
.PHONY: htmlview
htmlview:
$(MAKE) -C docs htmlview
.PHONY: htmllive
htmllive:
$(MAKE) -C docs htmllive
.PHONY: doccheck
doccheck:
$(MAKE) doc
$(MAKE) -C docs html
# Don't make our tests rely on the links in the docs being up every single build.
# We don't control them. But do check, and update them to the target of their redirects.
$(MAKE) -C docs linkcheck || true
.PHONY: docserve
docserve:
cd docs/_build/html && python3 -m http.server 2> /dev/null&
cd docs/_build/html && python3 -mSimpleHTTPServer 2> /dev/null&
.PHONY: help
help:
@echo "Welcome to Pillow development. Please use \`make <target>\` where <target> is one of"
@echo " clean remove build products"
@echo " coverage run coverage test (in progress)"
@echo " doc make HTML docs"
@echo " docserve run an HTTP server on the docs directory"
@echo " html make HTML docs"
@echo " htmlview open the index page built by the html target in your browser"
@echo " htmllive rebuild and reload HTML files in your browser"
@echo " doc make html docs"
@echo " docserve run an http server on the docs directory"
@echo " html to make standalone HTML files"
@echo " inplace make inplace extension"
@echo " install make and install"
@echo " install-coverage make and install with C coverage"
@echo " lint run the lint checks"
@echo " lint-fix run Ruff to (mostly) fix lint issues"
@echo " install-req install documentation and test dependencies"
@echo " install-venv install in virtualenv"
@echo " release-test run code and package tests before release"
@echo " test run tests on installed Pillow"
@echo " test run tests on installed pillow"
@echo " upload build and upload sdists to PyPI"
@echo " upload-test build and upload sdists to test.pythonpackages.com"
.PHONY: inplace
inplace: clean
python3 setup.py develop build_ext --inplace
.PHONY: install
install:
python3 -m pip -v install .
python3 setup.py install
python3 selftest.py
.PHONY: install-coverage
install-coverage:
CFLAGS="-coverage -Werror=implicit-function-declaration" python3 -m pip -v install .
CFLAGS="-coverage -Werror=implicit-function-declaration" python3 setup.py build_ext install
python3 selftest.py
.PHONY: debug
@ -71,72 +73,39 @@ debug:
# for our stuff, kills optimization, and redirects to dev null so we
# see any build failures.
make clean > /dev/null
CFLAGS='-g -O0' python3 -m pip -v install . > /dev/null
CFLAGS='-g -O0' python3 setup.py build_ext install > /dev/null
.PHONY: install-req
install-req:
python3 -m pip install -r requirements.txt
.PHONY: install-venv
install-venv:
virtualenv .
bin/pip install -r requirements.txt
.PHONY: release-test
release-test:
python3 checks/check_release_notes.py
python3 -m pip install -e .[tests]
$(MAKE) install-req
python3 setup.py develop
python3 selftest.py
python3 -m pytest Tests
python3 -m pip install .
python3 setup.py install
-rm dist/*.egg
-rmdir dist
python3 -m pytest -qq
python3 -m check_manifest
python3 -m pyroma .
$(MAKE) readme
check-manifest
pyroma .
viewdoc
.PHONY: sdist
sdist:
python3 -m build --help > /dev/null 2>&1 || python3 -m pip install build
python3 -m build --sdist
python3 -m twine --help > /dev/null 2>&1 || python3 -m pip install twine
python3 -m twine check --strict dist/*
python3 setup.py sdist --format=gztar
.PHONY: test
test:
python3 -c "import pytest" > /dev/null 2>&1 || python3 -m pip install pytest
python3 -m pytest -qq
.PHONY: test-p
test-p:
python3 -c "import xdist" > /dev/null 2>&1 || python3 -m pip install pytest-xdist
python3 -m pytest -qq -n auto
.PHONY: valgrind
valgrind:
python3 -c "import pytest_valgrind" > /dev/null 2>&1 || python3 -m pip install pytest-valgrind
PILLOW_VALGRIND_TEST=true PYTHONMALLOC=malloc valgrind --suppressions=Tests/oss-fuzz/python.supp --leak-check=no \
--log-file=/tmp/valgrind-output \
python3 -m pytest --no-memcheck -vv --valgrind --valgrind-log=/tmp/valgrind-output
.PHONY: valgrind-leak
valgrind-leak:
python3 -c "import pytest_valgrind" > /dev/null 2>&1 || python3 -m pip install pytest-valgrind
PILLOW_VALGRIND_TEST=true PYTHONMALLOC=malloc valgrind --suppressions=Tests/oss-fuzz/python.supp \
--leak-check=full --show-leak-kinds=definite --errors-for-leak-kinds=definite \
--log-file=/tmp/valgrind-output \
python3 -m pytest -vv --valgrind --valgrind-log=/tmp/valgrind-output
pytest -qq
.PHONY: readme
readme:
python3 -c "import markdown2" > /dev/null 2>&1 || python3 -m pip install markdown2
python3 -m markdown2 README.md > .long-description.html && open .long-description.html
.PHONY: lint
lint:
python3 -c "import tox" > /dev/null 2>&1 || python3 -m pip install tox
python3 -m tox -e lint
.PHONY: lint-fix
lint-fix:
python3 -c "import black" > /dev/null 2>&1 || python3 -m pip install black
python3 -m black .
python3 -c "import ruff" > /dev/null 2>&1 || python3 -m pip install ruff
python3 -m ruff check --fix .
.PHONY: mypy
mypy:
python3 -c "import tox" > /dev/null 2>&1 || python3 -m pip install tox
python3 -m tox -e mypy
viewdoc

View File

@ -1,14 +1,14 @@
<p align="center">
<img width="248" height="250" src="https://raw.githubusercontent.com/python-pillow/pillow-logo/main/pillow-logo-248x250.png" alt="Pillow logo">
<img width="248" height="250" src="https://raw.githubusercontent.com/python-pillow/pillow-logo/master/pillow-logo-248x250.png" alt="Pillow logo">
</p>
# Pillow
## Python Imaging Library (Fork)
Pillow is the friendly PIL fork by [Jeffrey A. Clark and
contributors](https://github.com/python-pillow/Pillow/graphs/contributors).
PIL is the Python Imaging Library by Fredrik Lundh and contributors.
Pillow is the friendly PIL fork by [Alex Clark and
Contributors](https://github.com/python-pillow/Pillow/graphs/contributors).
PIL is the Python Imaging Library by Fredrik Lundh and Contributors.
As of 2019, Pillow development is
[supported by Tidelift](https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=readme&utm_campaign=enterprise).
@ -24,33 +24,30 @@ As of 2019, Pillow development is
<tr>
<th>tests</th>
<td>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/lint.yml"><img
<a href="https://travis-ci.org/python-pillow/Pillow"><img
alt="Travis CI build status (Linux)"
src="https://img.shields.io/travis/python-pillow/Pillow/master.svg?label=Linux%20build"></a>
<a href="https://travis-ci.org/python-pillow/pillow-wheels"><img
alt="Travis CI build status (macOS)"
src="https://img.shields.io/travis/python-pillow/pillow-wheels/master.svg?label=macOS%20build"></a>
<a href="https://ci.appveyor.com/project/python-pillow/Pillow"><img
alt="AppVeyor CI build status (Windows)"
src="https://img.shields.io/appveyor/build/python-pillow/Pillow/master.svg?label=Windows%20build"></a>
<a href="https://github.com/python-pillow/Pillow/actions?query=workflow%3ALint"><img
alt="GitHub Actions build status (Lint)"
src="https://github.com/python-pillow/Pillow/workflows/Lint/badge.svg"></a>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test.yml"><img
<a href="https://github.com/python-pillow/Pillow/actions?query=workflow%3ATest"><img
alt="GitHub Actions build status (Test Linux and macOS)"
src="https://github.com/python-pillow/Pillow/workflows/Test/badge.svg"></a>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test-windows.yml"><img
<a href="https://github.com/python-pillow/Pillow/actions?query=workflow%3A%22Test+Windows%22"><img
alt="GitHub Actions build status (Test Windows)"
src="https://github.com/python-pillow/Pillow/workflows/Test%20Windows/badge.svg"></a>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test-mingw.yml"><img
alt="GitHub Actions build status (Test MinGW)"
src="https://github.com/python-pillow/Pillow/workflows/Test%20MinGW/badge.svg"></a>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test-cygwin.yml"><img
alt="GitHub Actions build status (Test Cygwin)"
src="https://github.com/python-pillow/Pillow/workflows/Test%20Cygwin/badge.svg"></a>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test-docker.yml"><img
<a href="https://github.com/python-pillow/Pillow/actions?query=workflow%3A%22Test+Docker%22"><img
alt="GitHub Actions build status (Test Docker)"
src="https://github.com/python-pillow/Pillow/workflows/Test%20Docker/badge.svg"></a>
<a href="https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml"><img
alt="GitHub Actions build status (Wheels)"
src="https://github.com/python-pillow/Pillow/workflows/Wheels/badge.svg"></a>
<a href="https://app.codecov.io/gh/python-pillow/Pillow"><img
<a href="https://codecov.io/gh/python-pillow/Pillow"><img
alt="Code coverage"
src="https://codecov.io/gh/python-pillow/Pillow/branch/main/graph/badge.svg"></a>
<a href="https://issues.oss-fuzz.com/issues?q=title:pillow"><img
alt="Fuzzing Status"
src="https://oss-fuzz-build-logs.storage.googleapis.com/badges/pillow.svg"></a>
src="https://codecov.io/gh/python-pillow/Pillow/branch/master/graph/badge.svg"></a>
</td>
</tr>
<tr>
@ -61,16 +58,13 @@ As of 2019, Pillow development is
src="https://zenodo.org/badge/17549/python-pillow/Pillow.svg"></a>
<a href="https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=badge"><img
alt="Tidelift"
src="https://tidelift.com/badges/package/pypi/pillow?style=flat"></a>
<a href="https://pypi.org/project/pillow/"><img
src="https://tidelift.com/badges/package/pypi/Pillow?style=flat"></a>
<a href="https://pypi.org/project/Pillow/"><img
alt="Newest PyPI version"
src="https://img.shields.io/pypi/v/pillow.svg"></a>
<a href="https://pypi.org/project/pillow/"><img
<a href="https://pypi.org/project/Pillow/"><img
alt="Number of PyPI downloads"
src="https://img.shields.io/pypi/dm/pillow.svg"></a>
<a href="https://www.bestpractices.dev/projects/6331"><img
alt="OpenSSF Best Practices"
src="https://www.bestpractices.dev/projects/6331/badge"></a>
</td>
</tr>
<tr>
@ -79,10 +73,9 @@ As of 2019, Pillow development is
<a href="https://gitter.im/python-pillow/Pillow?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge"><img
alt="Join the chat at https://gitter.im/python-pillow/Pillow"
src="https://badges.gitter.im/python-pillow/Pillow.svg"></a>
<a href="https://fosstodon.org/@pillow"><img
alt="Follow on https://fosstodon.org/@pillow"
src="https://img.shields.io/badge/publish-on%20Mastodon-595aff.svg"
rel="me"></a>
<a href="https://twitter.com/PythonPillow"><img
alt="Follow on https://twitter.com/PythonPillow"
src="https://img.shields.io/badge/tweet-on%20Twitter-00aced.svg"></a>
</td>
</tr>
</table>
@ -95,18 +88,17 @@ This library provides extensive file format support, an efficient internal repre
The core image library is designed for fast access to data stored in a few basic pixel formats. It should provide a solid foundation for a general image processing tool.
## More information
## More Information
- [Documentation](https://pillow.readthedocs.io/)
- [Installation](https://pillow.readthedocs.io/en/latest/installation/basic-installation.html)
- [Installation](https://pillow.readthedocs.io/en/latest/installation.html)
- [Handbook](https://pillow.readthedocs.io/en/latest/handbook/index.html)
- [Contribute](https://github.com/python-pillow/Pillow/blob/main/.github/CONTRIBUTING.md)
- [Contribute](https://github.com/python-pillow/Pillow/blob/master/.github/CONTRIBUTING.md)
- [Issues](https://github.com/python-pillow/Pillow/issues)
- [Pull requests](https://github.com/python-pillow/Pillow/pulls)
- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html)
- [Changelog](https://github.com/python-pillow/Pillow/releases)
- [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork)
- [Changelog](https://github.com/python-pillow/Pillow/blob/master/CHANGES.rst)
- [Pre-fork](https://github.com/python-pillow/Pillow/blob/master/CHANGES.rst#pre-fork)
## Report a vulnerability
## Report a Vulnerability
To report a security vulnerability, please follow the procedure described in the [Tidelift security policy](https://tidelift.com/docs/security).

View File

@ -1,50 +1,75 @@
# Release checklist
# Release Checklist
See https://pillow.readthedocs.io/en/stable/releasenotes/versioning.html for
information about how the version numbers line up with releases.
## Main release
## Main Release
Released quarterly on January 2nd, April 1st, July 1st and October 15th.
* [ ] Create a new issue and select the "Maintainers only: Release" template.
* [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154
* [ ] Develop and prepare release in `master` branch.
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions),
[Travis CI](https://travis-ci.org/github/python-pillow/Pillow) and
[AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm
passing tests in `master` branch.
* [ ] Check that all of the wheel builds [Pillow Wheel Builder](https://github.com/python-pillow/pillow-wheels) pass the tests in Travis CI.
* [ ] In compliance with [PEP 440](https://www.python.org/dev/peps/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Update `CHANGES.rst`.
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
* [ ] Create branch and tag for release e.g.:
```bash
git branch 5.2.x
git tag 5.2.0
git push --all
git push --tags
```
* [ ] Create source distributions e.g.:
```bash
make sdist
```
* [ ] Create [binary distributions](https://github.com/python-pillow/Pillow/blob/master/RELEASING.md#binary-distributions)
* [ ] Upload all binaries and source distributions e.g. `twine upload dist/Pillow-5.2.0*`
* [ ] Create a [new release on GitHub](https://github.com/python-pillow/Pillow/releases/new)
* [ ] In compliance with [PEP 440](https://www.python.org/dev/peps/pep-0440/), increment and append `.dev0` to version identifier in `src/PIL/_version.py`
## Point release
## Point Release
Released as needed for security, installation or critical bug fixes.
* [ ] Make necessary changes in `main` branch.
* [ ] Make necessary changes in `master` branch.
* [ ] Update `CHANGES.rst`.
* [ ] Check out release branch e.g.:
```bash
git checkout -t remotes/origin/5.2.x
```
* [ ] Cherry pick individual commits from `main` branch to release branch e.g. `5.2.x`, then `git push`.
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) to confirm passing tests in release branch e.g. `5.2.x`.
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Cherry pick individual commits from `master` branch to release branch e.g. `5.2.x`.
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions),
[Travis CI](https://travis-ci.org/github/python-pillow/Pillow) and
[AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm
passing tests in release branch e.g. `5.2.x`.
* [ ] In compliance with [PEP 440](https://www.python.org/dev/peps/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Run pre-release check via `make release-test`.
* [ ] Create tag for release e.g.:
```bash
git tag 5.2.1
git push
git push --tags
```
* [ ] Create and check source distribution:
* [ ] Create source distributions e.g.:
```bash
make sdist
```
* [ ] Check the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml)
has passed, including the "Upload release to PyPI" job. This will have been triggered
by the new tag.
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases) and then:
```bash
git push
```
* [ ] Create [binary distributions](https://github.com/python-pillow/Pillow/blob/master/RELEASING.md#binary-distributions)
* [ ] Upload all binaries and source distributions e.g. `twine upload dist/Pillow-5.2.1*`
* [ ] Create a [new release on GitHub](https://github.com/python-pillow/Pillow/releases/new)
## Embargoed release
## Embargoed Release
Released as needed privately to individual vendors for critical security-related bug fixes.
* [ ] Prepare patch for all versions that will get a fix. Test against local installations.
* [ ] Commit against `main`, cherry pick to affected release branches.
* [ ] Commit against master, cherry pick to affected release branches.
* [ ] Run local test matrix on each release & Python version.
* [ ] Privately send to distros.
* [ ] Run pre-release check via `make release-test`
@ -53,25 +78,40 @@ Released as needed privately to individual vendors for critical security-related
```bash
git checkout 2.5.x
git tag 2.5.3
git push origin 2.5.x
git push origin --tags
```
* [ ] Check the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml)
has passed, including the "Upload release to PyPI" job. This will have been triggered
by the new tag.
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases) and then:
* [ ] Create source distributions e.g.:
```bash
git push origin 2.5.x
make sdist
```
* [ ] Create [binary distributions](https://github.com/python-pillow/Pillow/blob/master/RELEASING.md#binary-distributions)
* [ ] Create a [new release on GitHub](https://github.com/python-pillow/Pillow/releases/new)
## Publicize release
## Binary Distributions
* [ ] Announce release availability via [Mastodon](https://fosstodon.org/@pillow) e.g. https://fosstodon.org/@pillow/110639450470725321
### Windows
* [ ] Contact `@cgohlke` for Windows binaries via release ticket e.g. https://github.com/python-pillow/Pillow/issues/1174.
* [ ] Download and extract tarball from `@cgohlke` and `twine upload *`.
### Mac and Linux
* [ ] Use the [Pillow Wheel Builder](https://github.com/python-pillow/pillow-wheels):
```bash
git clone https://github.com/python-pillow/pillow-wheels
cd pillow-wheels
./update-pillow-tag.sh [[release tag]]
```
* [ ] Download wheels from the [Pillow Wheel Builder release](https://github.com/python-pillow/pillow-wheels/releases).
## Publicize Release
* [ ] Announce release availability via [Twitter](https://twitter.com/pythonpillow) e.g. https://twitter.com/PythonPillow/status/1013789184354603010
## Documentation
* [ ] Make sure the [default version for Read the Docs](https://pillow.readthedocs.io/en/stable/) is up-to-date with the release changes
## Docker images
## Docker Images
* [ ] Update Pillow in the Docker Images repository
```bash

8
Tests/32bit_segfault_check.py Executable file
View File

@ -0,0 +1,8 @@
#!/usr/bin/env python
import sys
from PIL import Image
if sys.maxsize < 2 ** 32:
im = Image.new("L", (999999, 999999), 0)

View File

@ -1,4 +1,4 @@
Pillow tests
Pillow Tests
============
Test scripts are named ``test_xxx.py``. Helper classes and functions can be found in ``helper.py``.
@ -8,7 +8,7 @@ Dependencies
Install::
python3 -m pip install pytest pytest-cov pytest-timeout
python3 -m pip install pytest pytest-cov
Execution
---------

View File

@ -0,0 +1,58 @@
import time
from PIL import PyAccess
from .helper import hopper
# Not running this test by default. No DOS against Travis CI.
def iterate_get(size, access):
(w, h) = size
for x in range(w):
for y in range(h):
access[(x, y)]
def iterate_set(size, access):
(w, h) = size
for x in range(w):
for y in range(h):
access[(x, y)] = (x % 256, y % 256, 0)
def timer(func, label, *args):
iterations = 5000
starttime = time.time()
for x in range(iterations):
func(*args)
if time.time() - starttime > 10:
print(
"{}: breaking at {} iterations, {:.6f} per iteration".format(
label, x + 1, (time.time() - starttime) / (x + 1.0)
)
)
break
if x == iterations - 1:
endtime = time.time()
print(
"{}: {:.4f} s {:.6f} per iteration".format(
label, endtime - starttime, (endtime - starttime) / (x + 1.0)
)
)
def test_direct():
im = hopper()
im.load()
# im = Image.new( "RGB", (2000, 2000), (1, 3, 2))
caccess = im.im.pixel_access(False)
access = PyAccess.new(im, False)
assert caccess[(0, 0)] == access[(0, 0)]
print("Size: %sx%s" % im.size)
timer(iterate_get, "PyAccess - get", im.size, access)
timer(iterate_set, "PyAccess - set", im.size, access)
timer(iterate_get, "C-api - get", im.size, caccess)
timer(iterate_set, "C-api - set", im.size, caccess)

View File

@ -1,4 +1,4 @@
from __future__ import annotations
#!/usr/bin/env python
from PIL import Image
@ -61,8 +61,8 @@ repro_copy = (
for path in repro_ss2 + repro_lc + repro_advance + repro_brun + repro_copy:
with Image.open(path) as im:
try:
im.load()
except Exception as msg:
print(msg)
im = Image.open(path)
try:
im.load()
except Exception as msg:
print(msg)

View File

@ -1,11 +1,10 @@
from __future__ import annotations
from PIL import Image
TEST_FILE = "Tests/images/fli_overflow.fli"
def test_fli_overflow() -> None:
def test_fli_overflow():
# this should not crash with a malloc error or access violation
with Image.open(TEST_FILE) as im:
im.load()

View File

@ -1,10 +1,8 @@
# Tests potential DOS of IcnsImagePlugin with 0 length block.
# Run from anywhere that PIL is importable.
from __future__ import annotations
from io import BytesIO
from PIL import Image
with Image.open(BytesIO(b"icns\x00\x00\x00\x10hang\x00\x00\x00\x00")):
pass
Image.open(BytesIO(b"icns\x00\x00\x00\x10hang\x00\x00\x00\x00"))

View File

@ -1,8 +1,4 @@
#!/usr/bin/env python3
from __future__ import annotations
from typing import Any, Callable
#!/usr/bin/env python
import pytest
from PIL import Image
@ -15,37 +11,31 @@ max_iterations = 10000
pytestmark = pytest.mark.skipif(is_win32(), reason="requires Unix or macOS")
def _get_mem_usage() -> float:
def _get_mem_usage():
from resource import RUSAGE_SELF, getpagesize, getrusage
mem = getrusage(RUSAGE_SELF).ru_maxrss
return mem * getpagesize() / 1024 / 1024
def _test_leak(
min_iterations: int,
max_iterations: int,
fn: Callable[..., Image.Image | None],
*args: Any,
) -> None:
def _test_leak(min_iterations, max_iterations, fn, *args, **kwargs):
mem_limit = None
for i in range(max_iterations):
fn(*args)
fn(*args, **kwargs)
mem = _get_mem_usage()
if i < min_iterations:
mem_limit = mem + 1
continue
msg = f"memory usage limit exceeded after {i + 1} iterations"
assert mem_limit is not None
assert mem <= mem_limit, msg
def test_leak_putdata() -> None:
def test_leak_putdata():
im = Image.new("RGB", (25, 25))
_test_leak(min_iterations, max_iterations, im.putdata, im.getdata())
def test_leak_getlist() -> None:
def test_leak_getlist():
im = Image.new("P", (25, 25))
_test_leak(
min_iterations,

View File

@ -1,12 +1,8 @@
# Tests potential DOS of Jpeg2kImagePlugin with 0 length block.
# Run from anywhere that PIL is importable.
from __future__ import annotations
from io import BytesIO
from PIL import Image
with Image.open(
BytesIO(b"\x00\x00\x00\x0cjP\x20\x20\x0d\x0a\x87\x0a\x00\x00\x00\x00hang")
):
pass
Image.open(BytesIO(b"\x00\x00\x00\x0cjP\x20\x20\x0d\x0a\x87\x0a\x00\x00\x00\x00hang"))

6
checks/check_j2k_leaks.py → Tests/check_j2k_leaks.py Normal file → Executable file
View File

@ -1,5 +1,3 @@
from __future__ import annotations
from io import BytesIO
import pytest
@ -20,7 +18,7 @@ pytestmark = [
]
def test_leak_load() -> None:
def test_leak_load():
from resource import RLIMIT_AS, RLIMIT_STACK, setrlimit
setrlimit(RLIMIT_STACK, (stack_size, stack_size))
@ -30,7 +28,7 @@ def test_leak_load() -> None:
im.load()
def test_leak_save() -> None:
def test_leak_save():
from resource import RLIMIT_AS, RLIMIT_STACK, setrlimit
setrlimit(RLIMIT_STACK, (stack_size, stack_size))

View File

@ -0,0 +1,10 @@
import pytest
from PIL import Image
def test_j2k_overflow(tmp_path):
im = Image.new("RGBA", (1024, 131584))
target = str(tmp_path / "temp.jpc")
with pytest.raises(OSError):
im.save(target)

View File

@ -1,3 +1,5 @@
#!/usr/bin/env python
# Reproductions/tests for OOB read errors in FliDecode.c
# When run in python, all of these images should fail for
@ -10,15 +12,15 @@
# the output should be empty. There may be python issues
# in the valgrind especially if run in a debug python
# version.
from __future__ import annotations
from PIL import Image
repro = ("00r0_gray_l.jp2", "00r1_graya_la.jp2")
for path in repro:
with Image.open(path) as im:
try:
im.load()
except Exception as msg:
print(msg)
im = Image.open(path)
try:
im.load()
except Exception as msg:
print(msg)

View File

@ -1,5 +1,3 @@
from __future__ import annotations
from io import BytesIO
import pytest
@ -13,7 +11,7 @@ iterations = 5000
When run on a system without the jpeg leak fixes,
the valgrind runs look like this.
valgrind --tool=massif python test-installed.py -s -v checks/check_jpeg_leaks.py
valgrind --tool=massif python test-installed.py -s -v Tests/check_jpeg_leaks.py
"""
@ -77,48 +75,49 @@ post-patch:
"""
standard_l_qtable = (
# fmt: off
16, 11, 10, 16, 24, 40, 51, 61,
12, 12, 14, 19, 26, 58, 60, 55,
14, 13, 16, 24, 40, 57, 69, 56,
14, 17, 22, 29, 51, 87, 80, 62,
18, 22, 37, 56, 68, 109, 103, 77,
24, 35, 55, 64, 81, 104, 113, 92,
49, 64, 78, 87, 103, 121, 120, 101,
72, 92, 95, 98, 112, 100, 103, 99,
# fmt: on
)
standard_chrominance_qtable = (
# fmt: off
17, 18, 24, 47, 99, 99, 99, 99,
18, 21, 26, 66, 99, 99, 99, 99,
24, 26, 56, 99, 99, 99, 99, 99,
47, 66, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
# fmt: on
)
@pytest.mark.parametrize(
"qtables",
(
(standard_l_qtable, standard_chrominance_qtable),
[standard_l_qtable, standard_chrominance_qtable],
),
)
def test_qtables_leak(qtables: tuple[tuple[int, ...]] | list[tuple[int, ...]]) -> None:
def test_qtables_leak():
im = hopper("RGB")
standard_l_qtable = [
int(s)
for s in """
16 11 10 16 24 40 51 61
12 12 14 19 26 58 60 55
14 13 16 24 40 57 69 56
14 17 22 29 51 87 80 62
18 22 37 56 68 109 103 77
24 35 55 64 81 104 113 92
49 64 78 87 103 121 120 101
72 92 95 98 112 100 103 99
""".split(
None
)
]
standard_chrominance_qtable = [
int(s)
for s in """
17 18 24 47 99 99 99 99
18 21 26 66 99 99 99 99
24 26 56 99 99 99 99 99
47 66 99 99 99 99 99 99
99 99 99 99 99 99 99 99
99 99 99 99 99 99 99 99
99 99 99 99 99 99 99 99
99 99 99 99 99 99 99 99
""".split(
None
)
]
qtables = [standard_l_qtable, standard_chrominance_qtable]
for _ in range(iterations):
test_output = BytesIO()
im.save(test_output, "JPEG", qtables=qtables)
def test_exif_leak() -> None:
def test_exif_leak():
"""
pre patch:
@ -181,7 +180,7 @@ def test_exif_leak() -> None:
im.save(test_output, "JPEG", exif=exif)
def test_base_save() -> None:
def test_base_save():
"""
base case:
MB

View File

@ -1,8 +1,4 @@
from __future__ import annotations
import sys
from pathlib import Path
from types import ModuleType
import pytest
@ -18,7 +14,6 @@ from PIL import Image
# 2.7 and 3.2.
numpy: ModuleType | None
try:
import numpy
except ImportError:
@ -28,27 +23,26 @@ YDIM = 32769
XDIM = 48000
pytestmark = pytest.mark.skipif(sys.maxsize <= 2**32, reason="requires 64-bit system")
pytestmark = pytest.mark.skipif(sys.maxsize <= 2 ** 32, reason="requires 64-bit system")
def _write_png(tmp_path: Path, xdim: int, ydim: int) -> None:
f = tmp_path / "temp.png"
def _write_png(tmp_path, xdim, ydim):
f = str(tmp_path / "temp.png")
im = Image.new("L", (xdim, ydim), 0)
im.save(f)
def test_large(tmp_path: Path) -> None:
"""succeeded prepatch"""
def test_large(tmp_path):
""" succeeded prepatch"""
_write_png(tmp_path, XDIM, YDIM)
def test_2gpx(tmp_path: Path) -> None:
def test_2gpx(tmp_path):
"""failed prepatch"""
_write_png(tmp_path, XDIM, XDIM)
@pytest.mark.skipif(numpy is None, reason="Numpy is not installed")
def test_size_greater_than_int() -> None:
assert numpy is not None
def test_size_greater_than_int():
arr = numpy.ndarray(shape=(16394, 16394))
Image.fromarray(arr)

View File

@ -1,7 +1,4 @@
from __future__ import annotations
import sys
from pathlib import Path
import pytest
@ -22,22 +19,22 @@ YDIM = 32769
XDIM = 48000
pytestmark = pytest.mark.skipif(sys.maxsize <= 2**32, reason="requires 64-bit system")
pytestmark = pytest.mark.skipif(sys.maxsize <= 2 ** 32, reason="requires 64-bit system")
def _write_png(tmp_path: Path, xdim: int, ydim: int) -> None:
def _write_png(tmp_path, xdim, ydim):
dtype = np.uint8
a = np.zeros((xdim, ydim), dtype=dtype)
f = tmp_path / "temp.png"
f = str(tmp_path / "temp.png")
im = Image.fromarray(a, "L")
im.save(f)
def test_large(tmp_path: Path) -> None:
"""succeeded prepatch"""
def test_large(tmp_path):
""" succeeded prepatch"""
_write_png(tmp_path, XDIM, YDIM)
def test_2gpx(tmp_path: Path) -> None:
def test_2gpx(tmp_path):
"""failed prepatch"""
_write_png(tmp_path, XDIM, XDIM)

View File

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest
from PIL import Image
@ -7,7 +5,7 @@ from PIL import Image
TEST_FILE = "Tests/images/libtiff_segfault.tif"
def test_libtiff_segfault() -> None:
def test_libtiff_segfault():
"""This test should not segfault. It will on Pillow <= 3.1.0 and
libtiff >= 4.0.0
"""

View File

@ -1,30 +1,29 @@
from __future__ import annotations
import zlib
from io import BytesIO
import pytest
from PIL import Image, ImageFile, PngImagePlugin
TEST_FILE = "Tests/images/png_decompression_dos.png"
def test_ignore_dos_text(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
def test_ignore_dos_text():
ImageFile.LOAD_TRUNCATED_IMAGES = True
with Image.open(TEST_FILE) as im:
try:
im = Image.open(TEST_FILE)
im.load()
finally:
ImageFile.LOAD_TRUNCATED_IMAGES = False
assert isinstance(im, PngImagePlugin.PngImageFile)
for s in im.text.values():
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
for s in im.text.values():
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
for s in im.info.values():
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
for s in im.info.values():
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
def test_dos_text() -> None:
def test_dos_text():
try:
im = Image.open(TEST_FILE)
im.load()
@ -32,12 +31,11 @@ def test_dos_text() -> None:
assert msg, "Decompressed Data Too Large"
return
assert isinstance(im, PngImagePlugin.PngImageFile)
for s in im.text.values():
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
def test_dos_total_memory() -> None:
def test_dos_total_memory():
im = Image.new("L", (1, 1))
compressed_data = zlib.compress(b"a" * 1024 * 1023)
@ -54,11 +52,10 @@ def test_dos_total_memory() -> None:
try:
im2 = Image.open(b)
except ValueError as msg:
assert "Too much memory" in str(msg)
assert "Too much memory" in msg
return
total_len = 0
assert isinstance(im2, PngImagePlugin.PngImageFile)
for txt in im2.text.values():
total_len += len(txt)
assert total_len < 64 * 1024 * 1024, "Total text chunks greater than 64M"

View File

@ -1,11 +1,7 @@
from __future__ import annotations
import io
import pytest
def pytest_report_header(config: pytest.Config) -> str:
def pytest_report_header(config):
try:
from PIL import features
@ -14,22 +10,3 @@ def pytest_report_header(config: pytest.Config) -> str:
return out.getvalue()
except Exception as e:
return f"pytest_report_header failed: {e}"
def pytest_configure(config: pytest.Config) -> None:
config.addinivalue_line(
"markers",
"pil_noop_mark: A conditional mark where nothing special happens",
)
# We're marking some tests to ignore valgrind errors and XFAIL them.
# Ensure that the mark is defined
# even in cases where pytest-valgrind isn't installed
try:
config.addinivalue_line(
"markers",
"valgrind_known_error: Tests that have known issues with valgrind",
)
except Exception:
# valgrind is already installed
pass

View File

@ -1,6 +1,4 @@
#!/usr/bin/env python3
from __future__ import annotations
#!/usr/bin/env python
import base64
import os

Binary file not shown.

View File

@ -1,40 +0,0 @@
DejaVuSans-24-{1,2,4,8}-stripped.ttf are based on DejaVuSans.ttf converted using FontForge to add bitmap strikes and keep only the ASCII range.
DejaVu Fonts — License
Fonts are © Bitstream (see below). DejaVu changes are in public domain. Explanation of copyright is on Gnome page on Bitstream Vera fonts. Glyphs imported from Arev fonts are © Tavmjung Bah (see below)
Bitstream Vera Fonts Copyright
Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is a trademark of Bitstream, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of the fonts accompanying this license ("Fonts") and associated documentation files (the "Font Software"), to reproduce and distribute the Font Software, including without limitation the rights to use, copy, merge, publish, distribute, and/or sell copies of the Font Software, and to permit persons to whom the Font Software is furnished to do so, subject to the following conditions:
The above copyright and trademark notices and this permission notice shall be included in all copies of one or more of the Font Software typefaces.
The Font Software may be modified, altered, or added to, and in particular the designs of glyphs or characters in the Fonts may be modified and additional glyphs or characters may be added to the Fonts, only if the fonts are renamed to names not containing either the words "Bitstream" or the word "Vera".
This License becomes null and void to the extent applicable to Fonts or Font Software that has been modified and is distributed under the "Bitstream Vera" names.
The Font Software may be sold as part of a larger software package but no copy of one or more of the Font Software typefaces may be sold by itself.
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME FOUNDATION BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
Except as contained in this notice, the names of Gnome, the Gnome Foundation, and Bitstream Inc., shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Font Software without prior written authorization from the Gnome Foundation or Bitstream Inc., respectively. For further information, contact: fonts at gnome dot org.
Arev Fonts Copyright
Original text
Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of the fonts accompanying this license ("Fonts") and associated documentation files (the "Font Software"), to reproduce and distribute the modifications to the Bitstream Vera Font Software, including without limitation the rights to use, copy, merge, publish, distribute, and/or sell copies of the Font Software, and to permit persons to whom the Font Software is furnished to do so, subject to the following conditions:
The above copyright and trademark notices and this permission notice shall be included in all copies of one or more of the Font Software typefaces.
The Font Software may be modified, altered, or added to, and in particular the designs of glyphs or characters in the Fonts may be modified and additional glyphs or characters may be added to the Fonts, only if the fonts are renamed to names not containing either the words "Tavmjong Bah" or the word "Arev".
This License becomes null and void to the extent applicable to Fonts or Font Software that has been modified and is distributed under the "Tavmjong Bah Arev" names.
The Font Software may be sold as part of a larger software package but no copy of one or more of the Font Software typefaces may be sold by itself.
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
Except as contained in this notice, the name of Tavmjong Bah shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Font Software without prior written authorization from Tavmjong Bah. For further information, contact: tavmjong @ free . fr.

Binary file not shown.

View File

@ -2,27 +2,20 @@
NotoNastaliqUrdu-Regular.ttf and NotoSansSymbols-Regular.ttf, from https://github.com/googlei18n/noto-fonts
NotoSans-Regular.ttf, from https://www.google.com/get/noto/
NotoSansJP-Thin.otf, from https://www.google.com/get/noto/help/cjk/
NotoColorEmoji.ttf, from https://github.com/googlefonts/noto-emoji
AdobeVFPrototype.ttf, from https://github.com/adobe-fonts/adobe-variable-font-prototype
TINY5x3GX.ttf, from http://velvetyne.fr/fonts/tiny
ArefRuqaa-Regular.ttf, from https://github.com/google/fonts/tree/master/ofl/arefruqaa
ter-x20b.pcf, from http://terminus-font.sourceforge.net/
BungeeColor-Regular_colr_Windows.ttf, from https://github.com/djrrb/bungee
OpenSans.woff2, from https://fonts.googleapis.com/css?family=Open+Sans
All of the above fonts are published under the SIL Open Font License (OFL) v1.1 (http://scripts.sil.org/cms/scripts/page.php?site_id=nrsi&id=OFL), which allows you to copy, modify, and redistribute them if you need to.
FreeMono.ttf is licensed under GPLv3, with the GPL font exception.
OpenSansCondensed-LightItalic.tt, from https://fonts.google.com/specimen/Open+Sans, under Apache License 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
chromacheck-sbix.woff, from https://github.com/RoelN/ChromaCheck, under The MIT License (MIT), Copyright (c) 2018 Roel Nieskens, https://pixelambacht.nl Copyright (c) 2018 Google LLC
DejaVuSans-24-{1,2,4,8}-stripped.ttf are based on DejaVuSans.ttf converted using FontForge to add bitmap strikes and keep only the ASCII range.
KhmerOSBattambang-Regular.ttf is licensed under LGPL-2.1 or later.
FreeMono.ttf is licensed under GPLv3.
10x20-ISO8859-1.pcf, from https://packages.ubuntu.com/xenial/xfonts-base
"Public domain font. Share and enjoy."
CBDTTestFont.ttf and EBDTTestFont.ttf from https://github.com/nulano/font-tests are public domain.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,10 +0,0 @@
STARTFONT
FONT ÿ
SIZE 10
FONTBOUNDINGBOX
CHARS
STARTCHAR
ENCODING
BBX 2 5
ENDCHAR
ENDFONT

View File

@ -2,52 +2,59 @@
Helper functions.
"""
from __future__ import annotations
import logging
import os
import shutil
import subprocess
import sys
import sysconfig
import tempfile
from collections.abc import Sequence
from functools import lru_cache
from io import BytesIO
from pathlib import Path
from typing import Any, Callable
import pytest
from packaging.version import parse as parse_version
from PIL import Image, ImageFile, ImageMath, features
from PIL import Image, ImageMath, features
logger = logging.getLogger(__name__)
uploader = None
if os.environ.get("SHOW_ERRORS"):
uploader = "show"
HAS_UPLOADER = False
if os.environ.get("SHOW_ERRORS", None):
# local img.show for errors.
HAS_UPLOADER = True
class test_image_results:
@staticmethod
def upload(a, b):
a.show()
b.show()
elif "GITHUB_ACTIONS" in os.environ:
uploader = "github_actions"
HAS_UPLOADER = True
class test_image_results:
@staticmethod
def upload(a, b):
dir_errors = os.path.join(os.path.dirname(__file__), "errors")
os.makedirs(dir_errors, exist_ok=True)
tmpdir = tempfile.mkdtemp(dir=dir_errors)
a.save(os.path.join(tmpdir, "a.png"))
b.save(os.path.join(tmpdir, "b.png"))
return tmpdir
def upload(a: Image.Image, b: Image.Image) -> str | None:
if uploader == "show":
# local img.show for errors.
a.show()
b.show()
elif uploader == "github_actions":
dir_errors = os.path.join(os.path.dirname(__file__), "errors")
os.makedirs(dir_errors, exist_ok=True)
tmpdir = tempfile.mkdtemp(dir=dir_errors)
a.save(os.path.join(tmpdir, "a.png"))
b.save(os.path.join(tmpdir, "b.png"))
return tmpdir
return None
else:
try:
import test_image_results
HAS_UPLOADER = True
except ImportError:
pass
def convert_to_comparable(
a: Image.Image, b: Image.Image
) -> tuple[Image.Image, Image.Image]:
def convert_to_comparable(a, b):
new_a, new_b = a, b
if a.mode == "P":
new_a = Image.new("L", a.size)
@ -60,16 +67,14 @@ def convert_to_comparable(
return new_a, new_b
def assert_deep_equal(a: Any, b: Any, msg: str | None = None) -> None:
def assert_deep_equal(a, b, msg=None):
try:
assert len(a) == len(b), msg or f"got length {len(a)}, expected {len(b)}"
except Exception:
assert a == b, msg
def assert_image(
im: Image.Image, mode: str, size: tuple[int, int], msg: str | None = None
) -> None:
def assert_image(im, mode, size, msg=None):
if mode is not None:
assert im.mode == mode, (
msg or f"got mode {repr(im.mode)}, expected {repr(mode)}"
@ -81,35 +86,28 @@ def assert_image(
)
def assert_image_equal(a: Image.Image, b: Image.Image, msg: str | None = None) -> None:
def assert_image_equal(a, b, msg=None):
assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}"
assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}"
if a.tobytes() != b.tobytes():
try:
url = upload(a, b)
if url:
logger.error("URL for test images: %s", url)
except Exception:
pass
if HAS_UPLOADER:
try:
url = test_image_results.upload(a, b)
logger.error(f"Url for test images: {url}")
except Exception:
pass
pytest.fail(msg or "got different content")
assert False, msg or "got different content"
def assert_image_equal_tofile(
a: Image.Image,
filename: str | Path,
msg: str | None = None,
mode: str | None = None,
) -> None:
def assert_image_equal_tofile(a, filename, msg=None, mode=None):
with Image.open(filename) as img:
if mode:
img = img.convert(mode)
assert_image_equal(a, img, msg)
def assert_image_similar(
a: Image.Image, b: Image.Image, epsilon: float, msg: str | None = None
) -> None:
def assert_image_similar(a, b, epsilon, msg=None):
assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}"
assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}"
@ -117,9 +115,7 @@ def assert_image_similar(
diff = 0
for ach, bch in zip(a.split(), b.split()):
chdiff = ImageMath.lambda_eval(
lambda args: abs(args["a"] - args["b"]), a=ach, b=bch
).convert("L")
chdiff = ImageMath.eval("abs(a - b)", a=ach, b=bch).convert("L")
diff += sum(i * num for i, num in enumerate(chdiff.histogram()))
ave_diff = diff / (a.size[0] * a.size[1])
@ -129,90 +125,61 @@ def assert_image_similar(
+ f" average pixel value difference {ave_diff:.4f} > epsilon {epsilon:.4f}"
)
except Exception as e:
try:
url = upload(a, b)
if url:
logger.exception("URL for test images: %s", url)
except Exception:
pass
if HAS_UPLOADER:
try:
url = test_image_results.upload(a, b)
logger.error(f"Url for test images: {url}")
except Exception:
pass
raise e
def assert_image_similar_tofile(
a: Image.Image,
filename: str | Path,
epsilon: float,
msg: str | None = None,
) -> None:
def assert_image_similar_tofile(a, filename, epsilon, msg=None, mode=None):
with Image.open(filename) as img:
if mode:
img = img.convert(mode)
assert_image_similar(a, img, epsilon, msg)
def assert_not_all_same(items: Sequence[Any], msg: str | None = None) -> None:
def assert_all_same(items, msg=None):
assert items.count(items[0]) == len(items), msg
def assert_not_all_same(items, msg=None):
assert items.count(items[0]) != len(items), msg
def assert_tuple_approx_equal(
actuals: Sequence[int], targets: tuple[int, ...], threshold: int, msg: str
) -> None:
def assert_tuple_approx_equal(actuals, targets, threshold, msg):
"""Tests if actuals has values within threshold from targets"""
value = True
for i, target in enumerate(targets):
if not (target - threshold <= actuals[i] <= target + threshold):
pytest.fail(msg + ": " + repr(actuals) + " != " + repr(targets))
value *= target - threshold <= actuals[i] <= target + threshold
assert value, msg + ": " + repr(actuals) + " != " + repr(targets)
def timeout_unless_slower_valgrind(timeout: float) -> pytest.MarkDecorator:
if "PILLOW_VALGRIND_TEST" in os.environ:
return pytest.mark.pil_noop_mark()
return pytest.mark.timeout(timeout)
def skip_unless_feature(feature: str) -> pytest.MarkDecorator:
def skip_unless_feature(feature):
reason = f"{feature} not available"
return pytest.mark.skipif(not features.check(feature), reason=reason)
def skip_unless_feature_version(
feature: str, required: str, reason: str | None = None
) -> pytest.MarkDecorator:
version = features.version(feature)
if version is None:
def skip_unless_feature_version(feature, version_required, reason=None):
if not features.check(feature):
return pytest.mark.skip(f"{feature} not available")
if reason is None:
reason = f"{feature} is older than {required}"
version_required = parse_version(required)
version_available = parse_version(version)
reason = f"{feature} is older than {version_required}"
version_required = parse_version(version_required)
version_available = parse_version(features.version(feature))
return pytest.mark.skipif(version_available < version_required, reason=reason)
def mark_if_feature_version(
mark: pytest.MarkDecorator,
feature: str,
version_blacklist: str,
reason: str | None = None,
) -> pytest.MarkDecorator:
version = features.version(feature)
if version is None:
return pytest.mark.pil_noop_mark()
if reason is None:
reason = f"{feature} is {version_blacklist}"
version_required = parse_version(version_blacklist)
version_available = parse_version(version)
if (
version_available.major == version_required.major
and version_available.minor == version_required.minor
):
return mark(reason=reason)
return pytest.mark.pil_noop_mark()
@pytest.mark.skipif(sys.platform.startswith("win32"), reason="Requires Unix or macOS")
class PillowLeakTestCase:
# requires unix/macOS
iterations = 100 # count
mem_limit = 512 # k
def _get_mem_usage(self) -> float:
def _get_mem_usage(self):
"""
Gets the RUSAGE memory usage, returns in K. Encapsulates the difference
between macOS and Linux rss reporting
@ -223,13 +190,19 @@ class PillowLeakTestCase:
from resource import RUSAGE_SELF, getrusage
mem = getrusage(RUSAGE_SELF).ru_maxrss
# man 2 getrusage:
# ru_maxrss
# This is the maximum resident set size utilized
# in bytes on macOS, in kilobytes on Linux
return mem / 1024 if sys.platform == "darwin" else mem
if sys.platform == "darwin":
# man 2 getrusage:
# ru_maxrss
# This is the maximum resident set size utilized (in bytes).
return mem / 1024 # Kb
else:
# linux
# man 2 getrusage
# ru_maxrss (since Linux 2.6.32)
# This is the maximum resident set size used (in kilobytes).
return mem # Kb
def _test_leak(self, core: Callable[[], None]) -> None:
def _test_leak(self, core):
start_mem = self._get_mem_usage()
for cycle in range(self.iterations):
core()
@ -241,106 +214,94 @@ class PillowLeakTestCase:
# helpers
def fromstring(data: bytes) -> ImageFile.ImageFile:
def fromstring(data):
return Image.open(BytesIO(data))
def tostring(im: Image.Image, string_format: str, **options: Any) -> bytes:
def tostring(im, string_format, **options):
out = BytesIO()
im.save(out, string_format, **options)
return out.getvalue()
def hopper(mode: str | None = None) -> Image.Image:
# Use caching to reduce reading from disk, but return a copy
# so that the cached image isn't modified by the tests
# (for fast, isolated, repeatable tests).
def hopper(mode=None, cache={}):
if mode is None:
# Always return fresh not-yet-loaded version of image.
# Operations on not-yet-loaded images are a separate class of errors
# that we should catch.
# Operations on not-yet-loaded images is separate class of errors
# what we should catch.
return Image.open("Tests/images/hopper.ppm")
return _cached_hopper(mode).copy()
@lru_cache
def _cached_hopper(mode: str) -> Image.Image:
if mode == "F":
im = hopper("L")
else:
im = hopper()
try:
im = im.convert(mode)
except ImportError:
if mode == "LAB":
im = Image.open("Tests/images/hopper.Lab.tif")
# Use caching to reduce reading from disk but so an original copy is
# returned each time and the cached image isn't modified by tests
# (for fast, isolated, repeatable tests).
im = cache.get(mode)
if im is None:
if mode == "F":
im = hopper("L").convert(mode)
elif mode[:4] == "I;16":
im = hopper("I").convert(mode)
else:
raise
return im
im = hopper().convert(mode)
cache[mode] = im
return im.copy()
def djpeg_available() -> bool:
if shutil.which("djpeg"):
try:
subprocess.check_call(["djpeg", "-version"])
return True
except subprocess.CalledProcessError: # pragma: no cover
return False
return False
def djpeg_available():
return bool(shutil.which("djpeg"))
def netpbm_available() -> bool:
def cjpeg_available():
return bool(shutil.which("cjpeg"))
def netpbm_available():
return bool(shutil.which("ppmquant") and shutil.which("ppmtogif"))
def magick_command() -> list[str] | None:
if sys.platform == "win32":
magickhome = os.environ.get("MAGICK_HOME")
if magickhome:
imagemagick = [os.path.join(magickhome, "convert.exe")]
graphicsmagick = [os.path.join(magickhome, "gm.exe"), "convert"]
else:
imagemagick = None
graphicsmagick = None
else:
imagemagick = ["convert"]
graphicsmagick = ["gm", "convert"]
if imagemagick and shutil.which(imagemagick[0]):
return imagemagick
if graphicsmagick and shutil.which(graphicsmagick[0]):
return graphicsmagick
return None
def imagemagick_available():
return bool(IMCONVERT and shutil.which(IMCONVERT))
def on_ci() -> bool:
def on_appveyor():
return "APPVEYOR" in os.environ
def on_github_actions():
return "GITHUB_ACTIONS" in os.environ
def on_ci():
# GitHub Actions, Travis and AppVeyor have "CI"
return "CI" in os.environ
def is_big_endian() -> bool:
def is_big_endian():
return sys.byteorder == "big"
def is_ppc64le() -> bool:
import platform
return platform.machine() == "ppc64le"
def is_win32() -> bool:
def is_win32():
return sys.platform.startswith("win32")
def is_pypy() -> bool:
def is_pypy():
return hasattr(sys, "pypy_translation_info")
class CachedProperty:
def __init__(self, func: Callable[[Any], Any]) -> None:
def is_mingw():
return sysconfig.get_platform() == "mingw"
if sys.platform == "win32":
IMCONVERT = os.environ.get("MAGICK_HOME", "")
if IMCONVERT:
IMCONVERT = os.path.join(IMCONVERT, "convert.exe")
else:
IMCONVERT = "convert"
class cached_property:
def __init__(self, func):
self.func = func
def __get__(self, instance: Any, cls: type[Any] | None = None) -> Any:
def __get__(self, instance, cls=None):
result = instance.__dict__[self.func.__name__] = self.func(instance)
return result

View File

@ -22,3 +22,4 @@ and that the name of ICC shall not be used in advertising or publicity
pertaining to distribution of the software without specific, written
prior permission. ICC makes no representations about the suitability
of this software for any purpose.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 578 B

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 106 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 233 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 582 B

View File

Before

Width:  |  Height:  |  Size: 331 B

After

Width:  |  Height:  |  Size: 331 B

View File

Before

Width:  |  Height:  |  Size: 668 B

After

Width:  |  Height:  |  Size: 668 B

Some files were not shown because too many files have changed in this diff Show More