Merge branch 'main' into buffer-updates
|
@ -1,3 +1,10 @@
|
||||||
|
skip_commits:
|
||||||
|
files:
|
||||||
|
- ".github/**/*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**/*"
|
||||||
|
- "wheels/**/*"
|
||||||
|
|
||||||
version: '{build}'
|
version: '{build}'
|
||||||
clone_folder: c:\pillow
|
clone_folder: c:\pillow
|
||||||
init:
|
init:
|
||||||
|
@ -6,52 +13,53 @@ init:
|
||||||
# Uncomment previous line to get RDP access during the build.
|
# Uncomment previous line to get RDP access during the build.
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
|
COVERAGE_CORE: sysmon
|
||||||
EXECUTABLE: python.exe
|
EXECUTABLE: python.exe
|
||||||
TEST_OPTIONS:
|
TEST_OPTIONS:
|
||||||
DEPLOY: YES
|
DEPLOY: YES
|
||||||
matrix:
|
matrix:
|
||||||
- PYTHON: C:/Python310
|
- PYTHON: C:/Python312
|
||||||
ARCHITECTURE: x86
|
ARCHITECTURE: x86
|
||||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022
|
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022
|
||||||
- PYTHON: C:/Python37-x64
|
- PYTHON: C:/Python38-x64
|
||||||
ARCHITECTURE: x64
|
ARCHITECTURE: AMD64
|
||||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||||
|
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- '%PYTHON%\%EXECUTABLE% --version'
|
- '%PYTHON%\%EXECUTABLE% --version'
|
||||||
- curl -fsSL -o pillow-depends.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
|
- '%PYTHON%\%EXECUTABLE% -m pip install --upgrade pip'
|
||||||
- 7z x pillow-depends.zip -oc:\
|
- curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip
|
||||||
- mv c:\pillow-depends-main c:\pillow-depends
|
- 7z x pillow-test-images.zip -oc:\
|
||||||
- xcopy /S /Y c:\pillow-depends\test_images\* c:\pillow\tests\images
|
- xcopy /S /Y c:\test-images-main\* c:\pillow\tests\images
|
||||||
- 7z x ..\pillow-depends\nasm-2.15.05-win64.zip -oc:\
|
- curl -fsSL -o nasm-win64.zip https://raw.githubusercontent.com/python-pillow/pillow-depends/main/nasm-2.16.01-win64.zip
|
||||||
- ..\pillow-depends\gs9561w32.exe /S
|
- 7z x nasm-win64.zip -oc:\
|
||||||
- path c:\nasm-2.15.05;C:\Program Files (x86)\gs\gs9.56.1\bin;%PATH%
|
- choco install ghostscript --version=10.3.0
|
||||||
|
- path c:\nasm-2.16.01;C:\Program Files\gs\gs10.00.0\bin;%PATH%
|
||||||
- cd c:\pillow\winbuild\
|
- cd c:\pillow\winbuild\
|
||||||
- ps: |
|
- ps: |
|
||||||
c:\python37\python.exe c:\pillow\winbuild\build_prepare.py -v --depends=C:\pillow-depends\
|
c:\python38\python.exe c:\pillow\winbuild\build_prepare.py -v --depends=C:\pillow-depends\
|
||||||
c:\pillow\winbuild\build\build_dep_all.cmd
|
c:\pillow\winbuild\build\build_dep_all.cmd
|
||||||
$host.SetShouldExit(0)
|
$host.SetShouldExit(0)
|
||||||
- path C:\pillow\winbuild\build\bin;%PATH%
|
- path C:\pillow\winbuild\build\bin;%PATH%
|
||||||
|
|
||||||
build_script:
|
build_script:
|
||||||
- ps: |
|
|
||||||
c:\pillow\winbuild\build\build_pillow.cmd install
|
|
||||||
$host.SetShouldExit(0)
|
|
||||||
- cd c:\pillow
|
- cd c:\pillow
|
||||||
|
- winbuild\build\build_env.cmd
|
||||||
|
- '%PYTHON%\%EXECUTABLE% -m pip install -v -C raqm=vendor -C fribidi=vendor .'
|
||||||
- '%PYTHON%\%EXECUTABLE% selftest.py --installed'
|
- '%PYTHON%\%EXECUTABLE% selftest.py --installed'
|
||||||
|
|
||||||
test_script:
|
test_script:
|
||||||
- cd c:\pillow
|
- cd c:\pillow
|
||||||
- '%PYTHON%\%EXECUTABLE% -m pip install pytest pytest-cov pytest-timeout'
|
- '%PYTHON%\%EXECUTABLE% -m pip install pytest pytest-cov pytest-timeout defusedxml numpy olefile pyroma'
|
||||||
- c:\"Program Files (x86)"\"Windows Kits"\10\Debuggers\x86\gflags.exe /p /enable %PYTHON%\%EXECUTABLE%
|
- c:\"Program Files (x86)"\"Windows Kits"\10\Debuggers\x86\gflags.exe /p /enable %PYTHON%\%EXECUTABLE%
|
||||||
- '%PYTHON%\%EXECUTABLE% -c "from PIL import Image"'
|
- '%PYTHON%\%EXECUTABLE% -c "from PIL import Image"'
|
||||||
- '%PYTHON%\%EXECUTABLE% -m pytest -vx --cov PIL --cov Tests --cov-report term --cov-report xml Tests'
|
- '%PYTHON%\%EXECUTABLE% -m pytest -vx --cov PIL --cov Tests --cov-report term --cov-report xml Tests'
|
||||||
#- '%PYTHON%\%EXECUTABLE% test-installed.py -v -s %TEST_OPTIONS%' TODO TEST_OPTIONS with pytest?
|
#- '%PYTHON%\%EXECUTABLE% test-installed.py -v -s %TEST_OPTIONS%' TODO TEST_OPTIONS with pytest?
|
||||||
|
|
||||||
after_test:
|
after_test:
|
||||||
- python -m pip install codecov
|
- curl -Os https://uploader.codecov.io/latest/windows/codecov.exe
|
||||||
- codecov --file coverage.xml --name %PYTHON% --flags AppVeyor
|
- .\codecov.exe --file coverage.xml --name %PYTHON% --flags AppVeyor
|
||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
fast_finish: true
|
fast_finish: true
|
||||||
|
@ -60,18 +68,15 @@ cache:
|
||||||
- '%LOCALAPPDATA%\pip\Cache'
|
- '%LOCALAPPDATA%\pip\Cache'
|
||||||
|
|
||||||
artifacts:
|
artifacts:
|
||||||
- path: pillow\dist\*.egg
|
- path: pillow\*.egg
|
||||||
name: egg
|
name: egg
|
||||||
- path: pillow\dist\*.wheel
|
- path: pillow\*.whl
|
||||||
name: wheel
|
name: wheel
|
||||||
|
|
||||||
before_deploy:
|
before_deploy:
|
||||||
- cd c:\pillow
|
- cd c:\pillow
|
||||||
- '%PYTHON%\%EXECUTABLE% -m pip install wheel'
|
- '%PYTHON%\%EXECUTABLE% -m pip wheel -v -C raqm=vendor -C fribidi=vendor .'
|
||||||
- cd c:\pillow\winbuild\
|
- ps: Get-ChildItem .\*.whl | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
|
||||||
- c:\pillow\winbuild\build\build_pillow.cmd bdist_wheel
|
|
||||||
- cd c:\pillow
|
|
||||||
- ps: Get-ChildItem .\dist\*.* | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
|
|
||||||
|
|
||||||
deploy:
|
deploy:
|
||||||
provider: S3
|
provider: S3
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# gather the coverage data
|
# gather the coverage data
|
||||||
python3 -m pip install codecov
|
python3 -m pip install coverage
|
||||||
if [[ $MATRIX_DOCKER ]]; then
|
if [[ $MATRIX_DOCKER ]]; then
|
||||||
python3 -m coverage xml --ignore-errors
|
python3 -m coverage xml --ignore-errors
|
||||||
else
|
else
|
||||||
|
|
|
@ -22,12 +22,14 @@ set -e
|
||||||
if [[ $(uname) != CYGWIN* ]]; then
|
if [[ $(uname) != CYGWIN* ]]; then
|
||||||
sudo apt-get -qq install libfreetype6-dev liblcms2-dev python3-tk\
|
sudo apt-get -qq install libfreetype6-dev liblcms2-dev python3-tk\
|
||||||
ghostscript libffi-dev libjpeg-turbo-progs libopenjp2-7-dev\
|
ghostscript libffi-dev libjpeg-turbo-progs libopenjp2-7-dev\
|
||||||
cmake meson imagemagick libharfbuzz-dev libfribidi-dev
|
cmake meson imagemagick libharfbuzz-dev libfribidi-dev\
|
||||||
|
sway wl-clipboard libopenblas-dev
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -m pip install --upgrade pip
|
python3 -m pip install --upgrade pip
|
||||||
python3 -m pip install --upgrade wheel
|
python3 -m pip install --upgrade wheel
|
||||||
PYTHONOPTIMIZE=0 python3 -m pip install cffi
|
# TODO Update condition when cffi supports 3.13
|
||||||
|
if ! [[ "$GHA_PYTHON_VERSION" == "3.13" ]]; then PYTHONOPTIMIZE=0 python3 -m pip install cffi ; fi
|
||||||
python3 -m pip install coverage
|
python3 -m pip install coverage
|
||||||
python3 -m pip install defusedxml
|
python3 -m pip install defusedxml
|
||||||
python3 -m pip install olefile
|
python3 -m pip install olefile
|
||||||
|
@ -35,18 +37,27 @@ python3 -m pip install -U pytest
|
||||||
python3 -m pip install -U pytest-cov
|
python3 -m pip install -U pytest-cov
|
||||||
python3 -m pip install -U pytest-timeout
|
python3 -m pip install -U pytest-timeout
|
||||||
python3 -m pip install pyroma
|
python3 -m pip install pyroma
|
||||||
python3 -m pip install test-image-results
|
|
||||||
|
|
||||||
if [[ $(uname) != CYGWIN* ]]; then
|
if [[ $(uname) != CYGWIN* ]]; then
|
||||||
# TODO Remove condition when NumPy supports 3.11
|
# TODO Update condition when NumPy supports 3.13
|
||||||
if ! [ "$GHA_PYTHON_VERSION" == "3.11-dev" ]; then python3 -m pip install numpy ; fi
|
if ! [[ "$GHA_PYTHON_VERSION" == "3.13" ]]; then python3 -m pip install numpy ; fi
|
||||||
|
|
||||||
# PyQt6 doesn't support PyPy3
|
# PyQt6 doesn't support PyPy3
|
||||||
if [[ $GHA_PYTHON_VERSION == 3.* ]]; then
|
if [[ $GHA_PYTHON_VERSION == 3.* ]]; then
|
||||||
sudo apt-get -qq install libegl1 libxcb-icccm4 libxcb-image0 libxcb-keysyms1 libxcb-randr0 libxcb-render-util0 libxkbcommon-x11-0
|
sudo apt-get -qq install libegl1 libxcb-cursor0 libxcb-icccm4 libxcb-image0 libxcb-keysyms1 libxcb-randr0 libxcb-render-util0 libxcb-shape0 libxkbcommon-x11-0
|
||||||
python3 -m pip install pyqt6
|
python3 -m pip install pyqt6
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Pyroma uses non-isolated build and fails with old setuptools
|
||||||
|
if [[
|
||||||
|
$GHA_PYTHON_VERSION == pypy3.9
|
||||||
|
|| $GHA_PYTHON_VERSION == 3.8
|
||||||
|
|| $GHA_PYTHON_VERSION == 3.9
|
||||||
|
]]; then
|
||||||
|
# To match pyproject.toml
|
||||||
|
python3 -m pip install "setuptools>=67.8"
|
||||||
|
fi
|
||||||
|
|
||||||
# webp
|
# webp
|
||||||
pushd depends && ./install_webp.sh && popd
|
pushd depends && ./install_webp.sh && popd
|
||||||
|
|
||||||
|
|
1
.ci/requirements-cibw.txt
Normal file
|
@ -0,0 +1 @@
|
||||||
|
cibuildwheel==2.18.1
|
1
.ci/requirements-mypy.txt
Normal file
|
@ -0,0 +1 @@
|
||||||
|
mypy==1.10.0
|
|
@ -9,6 +9,7 @@ BinPackParameters: false
|
||||||
BreakBeforeBraces: Attach
|
BreakBeforeBraces: Attach
|
||||||
ColumnLimit: 88
|
ColumnLimit: 88
|
||||||
DerivePointerAlignment: false
|
DerivePointerAlignment: false
|
||||||
|
IndentGotoLabels: false
|
||||||
IndentWidth: 4
|
IndentWidth: 4
|
||||||
Language: Cpp
|
Language: Cpp
|
||||||
PointerAlignment: Right
|
PointerAlignment: Right
|
||||||
|
|
14
.coveragerc
|
@ -2,15 +2,19 @@
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
# Regexes for lines to exclude from consideration
|
# Regexes for lines to exclude from consideration
|
||||||
exclude_lines =
|
exclude_also =
|
||||||
# Have to re-enable the standard pragma:
|
# Don't complain if non-runnable code isn't run
|
||||||
pragma: no cover
|
|
||||||
|
|
||||||
# Don't complain if non-runnable code isn't run:
|
|
||||||
if 0:
|
if 0:
|
||||||
if __name__ == .__main__.:
|
if __name__ == .__main__.:
|
||||||
# Don't complain about debug code
|
# Don't complain about debug code
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
|
# Don't complain about compatibility code for missing optional dependencies
|
||||||
|
except ImportError
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
@abc.abstractmethod
|
||||||
|
# Empty bodies in protocols or abstract methods
|
||||||
|
^\s*def [a-zA-Z0-9_]+\(.*\)(\s*->.*)?:\s*\.\.\.(\s*#.*)?$
|
||||||
|
^\s*\.\.\.(\s*#.*)?$
|
||||||
|
|
||||||
[run]
|
[run]
|
||||||
omit =
|
omit =
|
||||||
|
|
|
@ -13,7 +13,7 @@ indent_style = space
|
||||||
|
|
||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
[*.yml]
|
[*.{toml,yml}]
|
||||||
# Two-space indentation
|
# Two-space indentation
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
|
6
.git-blame-ignore-revs
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
# Flake8
|
||||||
|
8de95676e0fd89f2326b3953488ab66ff29cd2d0
|
||||||
|
# Format with Black
|
||||||
|
53a7e3500437a9fd5826bc04758f7116bd7e52dc
|
||||||
|
# Format the C code with ClangFormat
|
||||||
|
46b7e86bab79450ec0a2866c6c0c679afb659d17
|
1
.github/CONTRIBUTING.md
vendored
|
@ -19,6 +19,7 @@ Please send a pull request to the `main` branch. Please include [documentation](
|
||||||
- Follow PEP 8.
|
- Follow PEP 8.
|
||||||
- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running tests on AppVeyor.
|
- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running tests on AppVeyor.
|
||||||
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
|
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
|
||||||
|
- Do not add to the [changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) for proposed changes, as that is updated after changes are merged.
|
||||||
|
|
||||||
## Reporting Issues
|
## Reporting Issues
|
||||||
|
|
||||||
|
|
2
.github/FUNDING.yml
vendored
|
@ -1 +1 @@
|
||||||
tidelift: "pypi/Pillow"
|
tidelift: "pypi/pillow"
|
||||||
|
|
15
.github/ISSUE_TEMPLATE/ISSUE_REPORT.md
vendored
|
@ -48,6 +48,21 @@ Thank you.
|
||||||
* Python:
|
* Python:
|
||||||
* Pillow:
|
* Pillow:
|
||||||
|
|
||||||
|
```text
|
||||||
|
Please paste here the output of running:
|
||||||
|
|
||||||
|
python3 -m PIL.report
|
||||||
|
or
|
||||||
|
python3 -m PIL --report
|
||||||
|
|
||||||
|
Or the output of the following Python code:
|
||||||
|
|
||||||
|
from PIL import report
|
||||||
|
# or
|
||||||
|
from PIL import features
|
||||||
|
features.pilinfo(supported_formats=False)
|
||||||
|
```
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
Please include **code** that reproduces the issue and whenever possible, an **image** that demonstrates the issue. Please upload images to GitHub, not to third-party file hosting sites. If necessary, add the image to a zip or tar archive.
|
Please include **code** that reproduces the issue and whenever possible, an **image** that demonstrates the issue. Please upload images to GitHub, not to third-party file hosting sites. If necessary, add the image to a zip or tar archive.
|
||||||
|
|
||||||
|
|
2
.github/mergify.yml
vendored
|
@ -7,7 +7,7 @@ pull_request_rules:
|
||||||
- status-success=Test Successful
|
- status-success=Test Successful
|
||||||
- status-success=Docker Test Successful
|
- status-success=Docker Test Successful
|
||||||
- status-success=Windows Test Successful
|
- status-success=Windows Test Successful
|
||||||
- status-success=MinGW Test Successful
|
- status-success=MinGW
|
||||||
- status-success=Cygwin Test Successful
|
- status-success=Cygwin Test Successful
|
||||||
- status-success=continuous-integration/appveyor/pr
|
- status-success=continuous-integration/appveyor/pr
|
||||||
actions:
|
actions:
|
||||||
|
|
18
.github/problem-matchers/gcc.json
vendored
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
{
|
||||||
|
"__comment": "Based on vscode-cpptools' Extension/package.json gcc rule",
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "gcc-problem-matcher",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^\\s*(.*):(\\d+):(\\d+):\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"severity": 4,
|
||||||
|
"message": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
2
.github/release-drafter.yml
vendored
|
@ -13,6 +13,8 @@ categories:
|
||||||
label: "Removal"
|
label: "Removal"
|
||||||
- title: "Testing"
|
- title: "Testing"
|
||||||
label: "Testing"
|
label: "Testing"
|
||||||
|
- title: "Type hints"
|
||||||
|
label: "Type hints"
|
||||||
|
|
||||||
exclude-labels:
|
exclude-labels:
|
||||||
- "changelog: skip"
|
- "changelog: skip"
|
||||||
|
|
17
.github/renovate.json
vendored
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
{
|
||||||
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
|
"extends": [
|
||||||
|
"config:base"
|
||||||
|
],
|
||||||
|
"labels": [
|
||||||
|
"Dependency"
|
||||||
|
],
|
||||||
|
"packageRules": [
|
||||||
|
{
|
||||||
|
"groupName": "github-actions",
|
||||||
|
"matchManagers": ["github-actions"],
|
||||||
|
"separateMajorMinor": "false"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"schedule": ["on the 3rd day of the month"]
|
||||||
|
}
|
15
.github/workflows/cifuzz.yml
vendored
|
@ -2,15 +2,26 @@ name: CIFuzz
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
branches:
|
||||||
|
- "**"
|
||||||
paths:
|
paths:
|
||||||
|
- ".github/workflows/cifuzz.yml"
|
||||||
- "**.c"
|
- "**.c"
|
||||||
- "**.h"
|
- "**.h"
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
|
- ".github/workflows/cifuzz.yml"
|
||||||
- "**.c"
|
- "**.c"
|
||||||
- "**.h"
|
- "**.h"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
Fuzzing:
|
Fuzzing:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
@ -31,13 +42,13 @@ jobs:
|
||||||
language: python
|
language: python
|
||||||
dry-run: false
|
dry-run: false
|
||||||
- name: Upload New Crash
|
- name: Upload New Crash
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
if: failure() && steps.build.outcome == 'success'
|
if: failure() && steps.build.outcome == 'success'
|
||||||
with:
|
with:
|
||||||
name: artifacts
|
name: artifacts
|
||||||
path: ./out/artifacts
|
path: ./out/artifacts
|
||||||
- name: Upload Legacy Crash
|
- name: Upload Legacy Crash
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
if: steps.run.outcome == 'success'
|
if: steps.run.outcome == 'success'
|
||||||
with:
|
with:
|
||||||
name: crash
|
name: crash
|
||||||
|
|
69
.github/workflows/docs.yml
vendored
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
name: Docs
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "**"
|
||||||
|
paths:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- "docs/**"
|
||||||
|
- "src/PIL/**"
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- "docs/**"
|
||||||
|
- "src/PIL/**"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
FORCE_COLOR: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Docs
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
cache: pip
|
||||||
|
cache-dependency-path: |
|
||||||
|
".ci/*.sh"
|
||||||
|
"pyproject.toml"
|
||||||
|
|
||||||
|
- name: Build system information
|
||||||
|
run: python3 .github/workflows/system-info.py
|
||||||
|
|
||||||
|
- name: Cache libimagequant
|
||||||
|
uses: actions/cache@v4
|
||||||
|
id: cache-libimagequant
|
||||||
|
with:
|
||||||
|
path: ~/cache-libimagequant
|
||||||
|
key: ${{ runner.os }}-libimagequant-${{ hashFiles('depends/install_imagequant.sh') }}
|
||||||
|
|
||||||
|
- name: Install Linux dependencies
|
||||||
|
run: |
|
||||||
|
.ci/install.sh
|
||||||
|
env:
|
||||||
|
GHA_PYTHON_VERSION: "3.x"
|
||||||
|
GHA_LIBIMAGEQUANT_CACHE_HIT: ${{ steps.cache-libimagequant.outputs.cache-hit }}
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
.ci/build.sh
|
||||||
|
|
||||||
|
- name: Docs
|
||||||
|
run: |
|
||||||
|
make doccheck
|
18
.github/workflows/lint.yml
vendored
|
@ -2,9 +2,16 @@ name: Lint
|
||||||
|
|
||||||
on: [push, pull_request, workflow_dispatch]
|
on: [push, pull_request, workflow_dispatch]
|
||||||
|
|
||||||
|
env:
|
||||||
|
FORCE_COLOR: 1
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|
||||||
|
@ -13,10 +20,10 @@ jobs:
|
||||||
name: Lint
|
name: Lint
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: pre-commit cache
|
- name: pre-commit cache
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/pre-commit
|
path: ~/.cache/pre-commit
|
||||||
key: lint-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }}
|
key: lint-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }}
|
||||||
|
@ -24,9 +31,9 @@ jobs:
|
||||||
lint-pre-commit-
|
lint-pre-commit-
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v3
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.x"
|
||||||
cache: pip
|
cache: pip
|
||||||
cache-dependency-path: "setup.py"
|
cache-dependency-path: "setup.py"
|
||||||
|
|
||||||
|
@ -42,3 +49,6 @@ jobs:
|
||||||
run: tox -e lint
|
run: tox -e lint
|
||||||
env:
|
env:
|
||||||
PRE_COMMIT_COLOR: always
|
PRE_COMMIT_COLOR: always
|
||||||
|
|
||||||
|
- name: Mypy
|
||||||
|
run: tox -e mypy
|
||||||
|
|
22
.github/workflows/macos-install.sh
vendored
|
@ -2,9 +2,21 @@
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
brew install libtiff libjpeg openjpeg libimagequant webp little-cms2 freetype openblas libraqm
|
brew install \
|
||||||
|
freetype \
|
||||||
|
ghostscript \
|
||||||
|
libimagequant \
|
||||||
|
libjpeg \
|
||||||
|
libraqm \
|
||||||
|
libtiff \
|
||||||
|
little-cms2 \
|
||||||
|
openjpeg \
|
||||||
|
webp
|
||||||
|
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig"
|
||||||
|
|
||||||
|
# TODO Update condition when cffi supports 3.13
|
||||||
|
if ! [[ "$GHA_PYTHON_VERSION" == "3.13" ]]; then PYTHONOPTIMIZE=0 python3 -m pip install cffi ; fi
|
||||||
|
|
||||||
PYTHONOPTIMIZE=0 python3 -m pip install cffi
|
|
||||||
python3 -m pip install coverage
|
python3 -m pip install coverage
|
||||||
python3 -m pip install defusedxml
|
python3 -m pip install defusedxml
|
||||||
python3 -m pip install olefile
|
python3 -m pip install olefile
|
||||||
|
@ -12,11 +24,9 @@ python3 -m pip install -U pytest
|
||||||
python3 -m pip install -U pytest-cov
|
python3 -m pip install -U pytest-cov
|
||||||
python3 -m pip install -U pytest-timeout
|
python3 -m pip install -U pytest-timeout
|
||||||
python3 -m pip install pyroma
|
python3 -m pip install pyroma
|
||||||
python3 -m pip install test-image-results
|
|
||||||
|
|
||||||
echo -e "[openblas]\nlibraries = openblas\nlibrary_dirs = /usr/local/opt/openblas/lib" >> ~/.numpy-site.cfg
|
# TODO Update condition when NumPy supports 3.13
|
||||||
# TODO Remove condition when NumPy supports 3.11
|
if ! [[ "$GHA_PYTHON_VERSION" == "3.13" ]]; then python3 -m pip install numpy ; fi
|
||||||
if ! [ "$GHA_PYTHON_VERSION" == "3.11-dev" ]; then python3 -m pip install numpy ; fi
|
|
||||||
|
|
||||||
# extra test images
|
# extra test images
|
||||||
pushd depends && ./install_extra_test_images.sh && popd
|
pushd depends && ./install_extra_test_images.sh && popd
|
||||||
|
|
6
.github/workflows/release-drafter.yml
vendored
|
@ -10,6 +10,10 @@ on:
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update_release_draft:
|
update_release_draft:
|
||||||
permissions:
|
permissions:
|
||||||
|
@ -19,6 +23,6 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
# Drafts your next release notes as pull requests are merged into "main"
|
# Drafts your next release notes as pull requests are merged into "main"
|
||||||
- uses: release-drafter/release-drafter@v5
|
- uses: release-drafter/release-drafter@v6
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
6
.github/workflows/stale.yml
vendored
|
@ -8,6 +8,10 @@ on:
|
||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
if: github.repository_owner == 'python-pillow'
|
if: github.repository_owner == 'python-pillow'
|
||||||
|
@ -16,7 +20,7 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Check issues"
|
- name: "Check issues"
|
||||||
uses: actions/stale@v5
|
uses: actions/stale@v9
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
only-labels: "Awaiting OP Action"
|
only-labels: "Awaiting OP Action"
|
||||||
|
|
3
.github/workflows/system-info.py
vendored
|
@ -6,6 +6,9 @@ This sort of info is missing from GitHub Actions.
|
||||||
Requested here:
|
Requested here:
|
||||||
https://github.com/actions/virtual-environments/issues/79
|
https://github.com/actions/virtual-environments/issues/79
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
|
|
82
.github/workflows/test-cygwin.yml
vendored
|
@ -1,6 +1,33 @@
|
||||||
name: Test Cygwin
|
name: Test Cygwin
|
||||||
|
|
||||||
on: [push, pull_request, workflow_dispatch]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "**"
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- ".github/workflows/wheels*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**"
|
||||||
|
- "wheels/**"
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- ".github/workflows/wheels*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**"
|
||||||
|
- "wheels/**"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
COVERAGE_CORE: sysmon
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
@ -8,7 +35,7 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-minor-version: [7, 8, 9]
|
python-minor-version: [8, 9]
|
||||||
|
|
||||||
timeout-minutes: 40
|
timeout-minutes: 40
|
||||||
|
|
||||||
|
@ -20,33 +47,52 @@ jobs:
|
||||||
git config --global core.autocrlf input
|
git config --global core.autocrlf input
|
||||||
|
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Cygwin
|
- name: Install Cygwin
|
||||||
uses: cygwin/cygwin-install-action@v2
|
uses: cygwin/cygwin-install-action@v4
|
||||||
with:
|
with:
|
||||||
platform: x86_64
|
|
||||||
packages: >
|
packages: >
|
||||||
ImageMagick gcc-g++ ghostscript jpeg libfreetype-devel
|
gcc-g++
|
||||||
libimagequant-devel libjpeg-devel liblapack-devel
|
ghostscript
|
||||||
liblcms2-devel libopenjp2-devel libraqm-devel
|
git
|
||||||
libtiff-devel libwebp-devel libxcb-devel libxcb-xinerama0
|
ImageMagick
|
||||||
make netpbm perl
|
jpeg
|
||||||
|
libfreetype-devel
|
||||||
|
libimagequant-devel
|
||||||
|
libjpeg-devel
|
||||||
|
liblapack-devel
|
||||||
|
liblcms2-devel
|
||||||
|
libopenjp2-devel
|
||||||
|
libraqm-devel
|
||||||
|
libtiff-devel
|
||||||
|
libwebp-devel
|
||||||
|
libxcb-devel
|
||||||
|
libxcb-xinerama0
|
||||||
|
make
|
||||||
|
netpbm
|
||||||
|
perl
|
||||||
python3${{ matrix.python-minor-version }}-cffi
|
python3${{ matrix.python-minor-version }}-cffi
|
||||||
python3${{ matrix.python-minor-version }}-cython
|
python3${{ matrix.python-minor-version }}-cython
|
||||||
python3${{ matrix.python-minor-version }}-devel
|
python3${{ matrix.python-minor-version }}-devel
|
||||||
python3${{ matrix.python-minor-version }}-numpy
|
python3${{ matrix.python-minor-version }}-numpy
|
||||||
python3${{ matrix.python-minor-version }}-sip
|
python3${{ matrix.python-minor-version }}-sip
|
||||||
python3${{ matrix.python-minor-version }}-tkinter
|
python3${{ matrix.python-minor-version }}-tkinter
|
||||||
qt5-devel-tools subversion xorg-server-extra zlib-devel
|
wget
|
||||||
|
xorg-server-extra
|
||||||
|
zlib-devel
|
||||||
|
|
||||||
- name: Add Lapack to PATH
|
- name: Add Lapack to PATH
|
||||||
uses: egor-tensin/cleanup-path@v1
|
uses: egor-tensin/cleanup-path@v4
|
||||||
with:
|
with:
|
||||||
dirs: 'C:\cygwin\bin;C:\cygwin\lib\lapack'
|
dirs: 'C:\cygwin\bin;C:\cygwin\lib\lapack'
|
||||||
|
|
||||||
|
- name: Select Python version
|
||||||
|
run: |
|
||||||
|
ln -sf c:/cygwin/bin/python3.${{ matrix.python-minor-version }} c:/cygwin/bin/python3
|
||||||
|
|
||||||
- name: pip cache
|
- name: pip cache
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: 'C:\cygwin\home\runneradmin\.cache\pip'
|
path: 'C:\cygwin\home\runneradmin\.cache\pip'
|
||||||
key: ${{ runner.os }}-cygwin-pip3.${{ matrix.python-minor-version }}-${{ hashFiles('.ci/install.sh') }}
|
key: ${{ runner.os }}-cygwin-pip3.${{ matrix.python-minor-version }}-${{ hashFiles('.ci/install.sh') }}
|
||||||
|
@ -61,11 +107,6 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
bash.exe .ci/install.sh
|
bash.exe .ci/install.sh
|
||||||
|
|
||||||
- name: Install a different NumPy
|
|
||||||
shell: dash.exe -l "{0}"
|
|
||||||
run: |
|
|
||||||
python3 -m pip install -U 'numpy!=1.21.*'
|
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
shell: bash.exe -eo pipefail -o igncr "{0}"
|
shell: bash.exe -eo pipefail -o igncr "{0}"
|
||||||
run: |
|
run: |
|
||||||
|
@ -81,7 +122,7 @@ jobs:
|
||||||
dash.exe -c "mkdir -p Tests/errors"
|
dash.exe -c "mkdir -p Tests/errors"
|
||||||
|
|
||||||
- name: Upload errors
|
- name: Upload errors
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
if: failure()
|
if: failure()
|
||||||
with:
|
with:
|
||||||
name: errors
|
name: errors
|
||||||
|
@ -92,11 +133,12 @@ jobs:
|
||||||
bash.exe .ci/after_success.sh
|
bash.exe .ci/after_success.sh
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v3
|
uses: codecov/codecov-action@v4
|
||||||
with:
|
with:
|
||||||
file: ./coverage.xml
|
file: ./coverage.xml
|
||||||
flags: GHA_Cygwin
|
flags: GHA_Cygwin
|
||||||
name: Cygwin Python 3.${{ matrix.python-minor-version }}
|
name: Cygwin Python 3.${{ matrix.python-minor-version }}
|
||||||
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
||||||
success:
|
success:
|
||||||
permissions:
|
permissions:
|
||||||
|
|
55
.github/workflows/test-docker.yml
vendored
|
@ -1,10 +1,31 @@
|
||||||
name: Test Docker
|
name: Test Docker
|
||||||
|
|
||||||
on: [push, pull_request, workflow_dispatch]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "**"
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- ".github/workflows/wheels*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**"
|
||||||
|
- "wheels/**"
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- ".github/workflows/wheels*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**"
|
||||||
|
- "wheels/**"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|
||||||
|
@ -15,37 +36,37 @@ jobs:
|
||||||
docker: [
|
docker: [
|
||||||
# Run slower jobs first to give them a headstart and reduce waiting time
|
# Run slower jobs first to give them a headstart and reduce waiting time
|
||||||
ubuntu-22.04-jammy-arm64v8,
|
ubuntu-22.04-jammy-arm64v8,
|
||||||
ubuntu-22.04-jammy-ppc64le,
|
ubuntu-24.04-noble-ppc64le,
|
||||||
ubuntu-22.04-jammy-s390x,
|
ubuntu-24.04-noble-s390x,
|
||||||
# Then run the remainder
|
# Then run the remainder
|
||||||
alpine,
|
alpine,
|
||||||
amazon-2-amd64,
|
amazon-2-amd64,
|
||||||
|
amazon-2023-amd64,
|
||||||
arch,
|
arch,
|
||||||
centos-7-amd64,
|
|
||||||
centos-stream-8-amd64,
|
|
||||||
centos-stream-9-amd64,
|
centos-stream-9-amd64,
|
||||||
debian-10-buster-x86,
|
debian-11-bullseye-amd64,
|
||||||
debian-11-bullseye-x86,
|
debian-12-bookworm-x86,
|
||||||
fedora-35-amd64,
|
debian-12-bookworm-amd64,
|
||||||
fedora-36-amd64,
|
fedora-39-amd64,
|
||||||
|
fedora-40-amd64,
|
||||||
gentoo,
|
gentoo,
|
||||||
ubuntu-18.04-bionic-amd64,
|
|
||||||
ubuntu-20.04-focal-amd64,
|
ubuntu-20.04-focal-amd64,
|
||||||
ubuntu-22.04-jammy-amd64,
|
ubuntu-22.04-jammy-amd64,
|
||||||
|
ubuntu-24.04-noble-amd64,
|
||||||
]
|
]
|
||||||
dockerTag: [main]
|
dockerTag: [main]
|
||||||
include:
|
include:
|
||||||
- docker: "ubuntu-22.04-jammy-arm64v8"
|
- docker: "ubuntu-22.04-jammy-arm64v8"
|
||||||
qemu-arch: "aarch64"
|
qemu-arch: "aarch64"
|
||||||
- docker: "ubuntu-22.04-jammy-ppc64le"
|
- docker: "ubuntu-24.04-noble-ppc64le"
|
||||||
qemu-arch: "ppc64le"
|
qemu-arch: "ppc64le"
|
||||||
- docker: "ubuntu-22.04-jammy-s390x"
|
- docker: "ubuntu-24.04-noble-s390x"
|
||||||
qemu-arch: "s390x"
|
qemu-arch: "s390x"
|
||||||
|
|
||||||
name: ${{ matrix.docker }}
|
name: ${{ matrix.docker }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build system information
|
- name: Build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
@ -61,8 +82,8 @@ jobs:
|
||||||
|
|
||||||
- name: Docker build
|
- name: Docker build
|
||||||
run: |
|
run: |
|
||||||
# The Pillow user in the docker container is UID 1000
|
# The Pillow user in the docker container is UID 1001
|
||||||
sudo chown -R 1000 $GITHUB_WORKSPACE
|
sudo chown -R 1001 $GITHUB_WORKSPACE
|
||||||
docker run --name pillow_container -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
|
docker run --name pillow_container -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
|
||||||
sudo chown -R runner $GITHUB_WORKSPACE
|
sudo chown -R runner $GITHUB_WORKSPACE
|
||||||
|
|
||||||
|
@ -79,10 +100,12 @@ jobs:
|
||||||
MATRIX_DOCKER: ${{ matrix.docker }}
|
MATRIX_DOCKER: ${{ matrix.docker }}
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v1
|
uses: codecov/codecov-action@v4
|
||||||
with:
|
with:
|
||||||
flags: GHA_Docker
|
flags: GHA_Docker
|
||||||
name: ${{ matrix.docker }}
|
name: ${{ matrix.docker }}
|
||||||
|
gcov: true
|
||||||
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
||||||
success:
|
success:
|
||||||
permissions:
|
permissions:
|
||||||
|
|
99
.github/workflows/test-mingw.yml
vendored
|
@ -1,38 +1,51 @@
|
||||||
name: Test MinGW
|
name: Test MinGW
|
||||||
|
|
||||||
on: [push, pull_request, workflow_dispatch]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "**"
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- ".github/workflows/wheels*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**"
|
||||||
|
- "wheels/**"
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- ".github/workflows/wheels*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**"
|
||||||
|
- "wheels/**"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
COVERAGE_CORE: sysmon
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
mingw: ["MINGW32", "MINGW64"]
|
|
||||||
include:
|
|
||||||
- mingw: "MINGW32"
|
|
||||||
name: "MSYS2 MinGW 32-bit"
|
|
||||||
package: "mingw-w64-i686"
|
|
||||||
- mingw: "MINGW64"
|
|
||||||
name: "MSYS2 MinGW 64-bit"
|
|
||||||
package: "mingw-w64-x86_64"
|
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash.exe --login -eo pipefail "{0}"
|
shell: bash.exe --login -eo pipefail "{0}"
|
||||||
env:
|
env:
|
||||||
MSYSTEM: ${{ matrix.mingw }}
|
MSYSTEM: MINGW64
|
||||||
CHERE_INVOKING: 1
|
CHERE_INVOKING: 1
|
||||||
|
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
name: ${{ matrix.name }}
|
name: "MinGW"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up shell
|
- name: Set up shell
|
||||||
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
|
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
|
||||||
|
@ -41,30 +54,29 @@ jobs:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
pacman -S --noconfirm \
|
pacman -S --noconfirm \
|
||||||
${{ matrix.package }}-python3-cffi \
|
mingw-w64-x86_64-freetype \
|
||||||
${{ matrix.package }}-python3-numpy \
|
mingw-w64-x86_64-gcc \
|
||||||
${{ matrix.package }}-python3-olefile \
|
mingw-w64-x86_64-ghostscript \
|
||||||
${{ matrix.package }}-python3-pip \
|
mingw-w64-x86_64-lcms2 \
|
||||||
${{ matrix.package }}-python-pyqt6 \
|
mingw-w64-x86_64-libimagequant \
|
||||||
${{ matrix.package }}-python3-setuptools \
|
mingw-w64-x86_64-libjpeg-turbo \
|
||||||
${{ matrix.package }}-freetype \
|
mingw-w64-x86_64-libraqm \
|
||||||
${{ matrix.package }}-gcc \
|
mingw-w64-x86_64-libtiff \
|
||||||
${{ matrix.package }}-ghostscript \
|
mingw-w64-x86_64-libwebp \
|
||||||
${{ matrix.package }}-lcms2 \
|
mingw-w64-x86_64-openjpeg2 \
|
||||||
${{ matrix.package }}-libimagequant \
|
mingw-w64-x86_64-python3-cffi \
|
||||||
${{ matrix.package }}-libjpeg-turbo \
|
mingw-w64-x86_64-python3-numpy \
|
||||||
${{ matrix.package }}-libraqm \
|
mingw-w64-x86_64-python3-olefile \
|
||||||
${{ matrix.package }}-libtiff \
|
mingw-w64-x86_64-python3-setuptools \
|
||||||
${{ matrix.package }}-libwebp \
|
mingw-w64-x86_64-python-pyqt6
|
||||||
${{ matrix.package }}-openjpeg2 \
|
|
||||||
subversion
|
|
||||||
|
|
||||||
|
python3 -m ensurepip
|
||||||
python3 -m pip install pyroma pytest pytest-cov pytest-timeout
|
python3 -m pip install pyroma pytest pytest-cov pytest-timeout
|
||||||
|
|
||||||
pushd depends && ./install_extra_test_images.sh && popd
|
pushd depends && ./install_extra_test_images.sh && popd
|
||||||
|
|
||||||
- name: Build Pillow
|
- name: Build Pillow
|
||||||
run: CFLAGS="-coverage" python3 -m pip install --global-option="build_ext" .
|
run: SETUPTOOLS_USE_DISTUTILS="stdlib" CFLAGS="-coverage" python3 -m pip install .
|
||||||
|
|
||||||
- name: Test Pillow
|
- name: Test Pillow
|
||||||
run: |
|
run: |
|
||||||
|
@ -73,18 +85,9 @@ jobs:
|
||||||
python3 -m pytest -vx --cov PIL --cov Tests --cov-report term --cov-report xml Tests
|
python3 -m pytest -vx --cov PIL --cov Tests --cov-report term --cov-report xml Tests
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
run: |
|
uses: codecov/codecov-action@v4
|
||||||
python3 -m pip install codecov
|
with:
|
||||||
bash <(curl -s https://codecov.io/bash) -F GHA_Windows
|
file: ./coverage.xml
|
||||||
env:
|
flags: GHA_Windows
|
||||||
CODECOV_NAME: ${{ matrix.name }}
|
name: "MSYS2 MinGW"
|
||||||
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
success:
|
|
||||||
permissions:
|
|
||||||
contents: none
|
|
||||||
needs: build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: MinGW Test Successful
|
|
||||||
steps:
|
|
||||||
- name: Success
|
|
||||||
run: echo MinGW Test Successful
|
|
||||||
|
|
20
.github/workflows/test-valgrind.yml
vendored
|
@ -1,14 +1,18 @@
|
||||||
name: Test Valgrind
|
name: Test Valgrind
|
||||||
|
|
||||||
# like the docker tests, but running valgrind only on *.c/*.h changes.
|
# like the Docker tests, but running valgrind only on *.c/*.h changes.
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
branches:
|
||||||
|
- "**"
|
||||||
paths:
|
paths:
|
||||||
|
- ".github/workflows/test-valgrind.yml"
|
||||||
- "**.c"
|
- "**.c"
|
||||||
- "**.h"
|
- "**.h"
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
|
- ".github/workflows/test-valgrind.yml"
|
||||||
- "**.c"
|
- "**.c"
|
||||||
- "**.h"
|
- "**.h"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
@ -16,6 +20,10 @@ on:
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|
||||||
|
@ -24,14 +32,14 @@ jobs:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
docker: [
|
docker: [
|
||||||
ubuntu-20.04-focal-amd64-valgrind,
|
ubuntu-22.04-jammy-amd64-valgrind,
|
||||||
]
|
]
|
||||||
dockerTag: [main]
|
dockerTag: [main]
|
||||||
|
|
||||||
name: ${{ matrix.docker }}
|
name: ${{ matrix.docker }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build system information
|
- name: Build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
@ -42,7 +50,7 @@ jobs:
|
||||||
|
|
||||||
- name: Build and Run Valgrind
|
- name: Build and Run Valgrind
|
||||||
run: |
|
run: |
|
||||||
# The Pillow user in the docker container is UID 1000
|
# The Pillow user in the docker container is UID 1001
|
||||||
sudo chown -R 1000 $GITHUB_WORKSPACE
|
sudo chown -R 1001 $GITHUB_WORKSPACE
|
||||||
docker run --name pillow_container -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
|
docker run --name pillow_container -e "PILLOW_VALGRIND_TEST=true" -v $GITHUB_WORKSPACE:/Pillow pythonpillow/${{ matrix.docker }}:${{ matrix.dockerTag }}
|
||||||
sudo chown -R runner $GITHUB_WORKSPACE
|
sudo chown -R runner $GITHUB_WORKSPACE
|
||||||
|
|
123
.github/workflows/test-windows.yml
vendored
|
@ -1,72 +1,106 @@
|
||||||
name: Test Windows
|
name: Test Windows
|
||||||
|
|
||||||
on: [push, pull_request, workflow_dispatch]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "**"
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- ".github/workflows/wheels*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**"
|
||||||
|
- "wheels/**"
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- ".github/workflows/wheels*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**"
|
||||||
|
- "wheels/**"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
COVERAGE_CORE: sysmon
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"]
|
python-version: ["pypy3.10", "pypy3.9", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||||
architecture: ["x86", "x64"]
|
|
||||||
include:
|
|
||||||
# PyPy 7.3.4+ only ships 64-bit binaries for Windows
|
|
||||||
- python-version: "pypy-3.7"
|
|
||||||
architecture: "x64"
|
|
||||||
- python-version: "pypy-3.8"
|
|
||||||
architecture: "x64"
|
|
||||||
|
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
|
|
||||||
name: Python ${{ matrix.python-version }} ${{ matrix.architecture }}
|
name: Python ${{ matrix.python-version }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Checkout cached dependencies
|
- name: Checkout cached dependencies
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
repository: python-pillow/pillow-depends
|
repository: python-pillow/pillow-depends
|
||||||
path: winbuild\depends
|
path: winbuild\depends
|
||||||
|
|
||||||
|
- name: Checkout extra test images
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: python-pillow/test-images
|
||||||
|
path: Tests\test-images
|
||||||
|
|
||||||
# sets env: pythonLocation
|
# sets env: pythonLocation
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v3
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
architecture: ${{ matrix.architecture }}
|
allow-prereleases: true
|
||||||
cache: pip
|
cache: pip
|
||||||
cache-dependency-path: ".github/workflows/test-windows.yml"
|
cache-dependency-path: ".github/workflows/test-windows.yml"
|
||||||
|
|
||||||
- name: Print build system information
|
- name: Print build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
|
||||||
- name: python3 -m pip install wheel pytest pytest-cov pytest-timeout defusedxml
|
- name: Install Python dependencies
|
||||||
run: python3 -m pip install wheel pytest pytest-cov pytest-timeout defusedxml
|
run: >
|
||||||
|
python3 -m pip install
|
||||||
|
coverage>=7.4.2
|
||||||
|
defusedxml
|
||||||
|
olefile
|
||||||
|
pyroma
|
||||||
|
pytest
|
||||||
|
pytest-cov
|
||||||
|
pytest-timeout
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
id: install
|
id: install
|
||||||
run: |
|
run: |
|
||||||
7z x winbuild\depends\nasm-2.15.05-win64.zip "-o$env:RUNNER_WORKSPACE\"
|
choco install nasm --no-progress
|
||||||
echo "$env:RUNNER_WORKSPACE\nasm-2.15.05" >> $env:GITHUB_PATH
|
echo "C:\Program Files\NASM" >> $env:GITHUB_PATH
|
||||||
|
|
||||||
winbuild\depends\gs9561w32.exe /S
|
choco install ghostscript --version=10.3.0 --no-progress
|
||||||
echo "C:\Program Files (x86)\gs\gs9.56.1\bin" >> $env:GITHUB_PATH
|
echo "C:\Program Files\gs\gs10.00.0\bin" >> $env:GITHUB_PATH
|
||||||
|
|
||||||
xcopy /S /Y winbuild\depends\test_images\* Tests\images\
|
# Install extra test images
|
||||||
|
xcopy /S /Y Tests\test-images\* Tests\images
|
||||||
|
|
||||||
# make cache key depend on VS version
|
# make cache key depend on VS version
|
||||||
& "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe" | find """catalog_buildVersion""" | ForEach-Object { $a = $_.split(" ")[1]; echo "::set-output name=vs::$a" }
|
& "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe" `
|
||||||
|
| find """catalog_buildVersion""" `
|
||||||
|
| ForEach-Object { $a = $_.split(" ")[1]; echo "vs=$a" >> $env:GITHUB_OUTPUT }
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
|
|
||||||
- name: Cache build
|
- name: Cache build
|
||||||
id: build-cache
|
id: build-cache
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: winbuild\build
|
path: winbuild\build
|
||||||
key:
|
key:
|
||||||
|
@ -75,7 +109,7 @@ jobs:
|
||||||
- name: Prepare build
|
- name: Prepare build
|
||||||
if: steps.build-cache.outputs.cache-hit != 'true'
|
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
& python.exe winbuild\build_prepare.py -v --python=$env:pythonLocation --srcdir
|
& python.exe winbuild\build_prepare.py -v
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
|
|
||||||
- name: Build dependencies / libjpeg-turbo
|
- name: Build dependencies / libjpeg-turbo
|
||||||
|
@ -86,19 +120,28 @@ jobs:
|
||||||
if: steps.build-cache.outputs.cache-hit != 'true'
|
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||||
run: "& winbuild\\build\\build_dep_zlib.cmd"
|
run: "& winbuild\\build\\build_dep_zlib.cmd"
|
||||||
|
|
||||||
- name: Build dependencies / LibTiff
|
- name: Build dependencies / xz
|
||||||
if: steps.build-cache.outputs.cache-hit != 'true'
|
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||||
run: "& winbuild\\build\\build_dep_libtiff.cmd"
|
run: "& winbuild\\build\\build_dep_xz.cmd"
|
||||||
|
|
||||||
- name: Build dependencies / WebP
|
- name: Build dependencies / WebP
|
||||||
if: steps.build-cache.outputs.cache-hit != 'true'
|
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||||
run: "& winbuild\\build\\build_dep_libwebp.cmd"
|
run: "& winbuild\\build\\build_dep_libwebp.cmd"
|
||||||
|
|
||||||
|
- name: Build dependencies / LibTiff
|
||||||
|
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||||
|
run: "& winbuild\\build\\build_dep_libtiff.cmd"
|
||||||
|
|
||||||
# for FreeType CBDT/SBIX font support
|
# for FreeType CBDT/SBIX font support
|
||||||
- name: Build dependencies / libpng
|
- name: Build dependencies / libpng
|
||||||
if: steps.build-cache.outputs.cache-hit != 'true'
|
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||||
run: "& winbuild\\build\\build_dep_libpng.cmd"
|
run: "& winbuild\\build\\build_dep_libpng.cmd"
|
||||||
|
|
||||||
|
# for FreeType WOFF2 font support
|
||||||
|
- name: Build dependencies / brotli
|
||||||
|
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||||
|
run: "& winbuild\\build\\build_dep_brotli.cmd"
|
||||||
|
|
||||||
- name: Build dependencies / FreeType
|
- name: Build dependencies / FreeType
|
||||||
if: steps.build-cache.outputs.cache-hit != 'true'
|
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||||
run: "& winbuild\\build\\build_dep_freetype.cmd"
|
run: "& winbuild\\build\\build_dep_freetype.cmd"
|
||||||
|
@ -126,7 +169,7 @@ jobs:
|
||||||
if: steps.build-cache.outputs.cache-hit != 'true'
|
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||||
run: "& winbuild\\build\\build_dep_fribidi.cmd"
|
run: "& winbuild\\build\\build_dep_fribidi.cmd"
|
||||||
|
|
||||||
# trim ~150MB x 9
|
# trim ~150MB for each job
|
||||||
- name: Optimize build cache
|
- name: Optimize build cache
|
||||||
if: steps.build-cache.outputs.cache-hit != 'true'
|
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||||
run: rmdir /S /Q winbuild\build\src
|
run: rmdir /S /Q winbuild\build\src
|
||||||
|
@ -134,9 +177,8 @@ jobs:
|
||||||
|
|
||||||
- name: Build Pillow
|
- name: Build Pillow
|
||||||
run: |
|
run: |
|
||||||
$FLAGS=""
|
$FLAGS="-C raqm=vendor -C fribidi=vendor"
|
||||||
if ('${{ github.event_name }}' -ne 'pull_request') { $FLAGS="--disable-imagequant" }
|
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS ."
|
||||||
& winbuild\build\build_pillow.cmd $FLAGS install
|
|
||||||
& $env:pythonLocation\python.exe selftest.py --installed
|
& $env:pythonLocation\python.exe selftest.py --installed
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
|
|
||||||
|
@ -159,7 +201,7 @@ jobs:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
- name: Upload errors
|
- name: Upload errors
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
if: failure()
|
if: failure()
|
||||||
with:
|
with:
|
||||||
name: errors
|
name: errors
|
||||||
|
@ -171,25 +213,12 @@ jobs:
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v1
|
uses: codecov/codecov-action@v4
|
||||||
with:
|
with:
|
||||||
file: ./coverage.xml
|
file: ./coverage.xml
|
||||||
flags: GHA_Windows
|
flags: GHA_Windows
|
||||||
name: ${{ runner.os }} Python ${{ matrix.python-version }} ${{ matrix.architecture }}
|
name: ${{ runner.os }} Python ${{ matrix.python-version }}
|
||||||
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
- name: Build wheel
|
|
||||||
id: wheel
|
|
||||||
if: "github.event_name != 'pull_request'"
|
|
||||||
run: |
|
|
||||||
for /f "tokens=3 delims=/" %%a in ("${{ github.ref }}") do echo ::set-output name=dist::dist-%%a
|
|
||||||
winbuild\\build\\build_pillow.cmd --disable-imagequant bdist_wheel
|
|
||||||
shell: cmd
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
|
||||||
if: "github.event_name != 'pull_request'"
|
|
||||||
with:
|
|
||||||
name: ${{ steps.wheel.outputs.dist }}
|
|
||||||
path: dist\*.whl
|
|
||||||
|
|
||||||
success:
|
success:
|
||||||
permissions:
|
permissions:
|
||||||
|
|
100
.github/workflows/test.yml
vendored
|
@ -1,10 +1,35 @@
|
||||||
name: Test
|
name: Test
|
||||||
|
|
||||||
on: [push, pull_request, workflow_dispatch]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "**"
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- ".github/workflows/wheels*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**"
|
||||||
|
- "wheels/**"
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/docs.yml"
|
||||||
|
- ".github/workflows/wheels*"
|
||||||
|
- ".gitmodules"
|
||||||
|
- "docs/**"
|
||||||
|
- "wheels/**"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
COVERAGE_CORE: sysmon
|
||||||
|
FORCE_COLOR: 1
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|
||||||
|
@ -12,52 +37,70 @@ jobs:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [
|
os: [
|
||||||
"macos-latest",
|
"macos-14",
|
||||||
"ubuntu-latest",
|
"ubuntu-latest",
|
||||||
]
|
]
|
||||||
python-version: [
|
python-version: [
|
||||||
"pypy-3.8",
|
"pypy3.10",
|
||||||
"pypy-3.7",
|
"pypy3.9",
|
||||||
"3.11-dev",
|
"3.13",
|
||||||
|
"3.12",
|
||||||
|
"3.11",
|
||||||
"3.10",
|
"3.10",
|
||||||
"3.9",
|
"3.9",
|
||||||
"3.8",
|
"3.8",
|
||||||
"3.7",
|
|
||||||
]
|
]
|
||||||
include:
|
include:
|
||||||
- python-version: "3.7"
|
- python-version: "3.11"
|
||||||
PYTHONOPTIMIZE: 1
|
PYTHONOPTIMIZE: 1
|
||||||
REVERSE: "--reverse"
|
REVERSE: "--reverse"
|
||||||
- python-version: "3.8"
|
- python-version: "3.10"
|
||||||
PYTHONOPTIMIZE: 2
|
PYTHONOPTIMIZE: 2
|
||||||
# Include new variables for Codecov
|
# M1 only available for 3.10+
|
||||||
- os: ubuntu-latest
|
- os: "macos-13"
|
||||||
codecov-flag: GHA_Ubuntu
|
python-version: "3.9"
|
||||||
- os: macos-latest
|
- os: "macos-13"
|
||||||
codecov-flag: GHA_macOS
|
python-version: "3.8"
|
||||||
|
exclude:
|
||||||
|
- os: "macos-14"
|
||||||
|
python-version: "3.9"
|
||||||
|
- os: "macos-14"
|
||||||
|
python-version: "3.8"
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v3
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
allow-prereleases: true
|
||||||
cache: pip
|
cache: pip
|
||||||
cache-dependency-path: ".ci/*.sh"
|
cache-dependency-path: |
|
||||||
|
".ci/*.sh"
|
||||||
|
"pyproject.toml"
|
||||||
|
|
||||||
- name: Build system information
|
- name: Build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
|
||||||
|
- name: Cache libimagequant
|
||||||
|
if: startsWith(matrix.os, 'ubuntu')
|
||||||
|
uses: actions/cache@v4
|
||||||
|
id: cache-libimagequant
|
||||||
|
with:
|
||||||
|
path: ~/cache-libimagequant
|
||||||
|
key: ${{ runner.os }}-libimagequant-${{ hashFiles('depends/install_imagequant.sh') }}
|
||||||
|
|
||||||
- name: Install Linux dependencies
|
- name: Install Linux dependencies
|
||||||
if: startsWith(matrix.os, 'ubuntu')
|
if: startsWith(matrix.os, 'ubuntu')
|
||||||
run: |
|
run: |
|
||||||
.ci/install.sh
|
.ci/install.sh
|
||||||
env:
|
env:
|
||||||
GHA_PYTHON_VERSION: ${{ matrix.python-version }}
|
GHA_PYTHON_VERSION: ${{ matrix.python-version }}
|
||||||
|
GHA_LIBIMAGEQUANT_CACHE_HIT: ${{ steps.cache-libimagequant.outputs.cache-hit }}
|
||||||
|
|
||||||
- name: Install macOS dependencies
|
- name: Install macOS dependencies
|
||||||
if: startsWith(matrix.os, 'macOS')
|
if: startsWith(matrix.os, 'macOS')
|
||||||
|
@ -66,6 +109,10 @@ jobs:
|
||||||
env:
|
env:
|
||||||
GHA_PYTHON_VERSION: ${{ matrix.python-version }}
|
GHA_PYTHON_VERSION: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Register gcc problem matcher
|
||||||
|
if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.12'"
|
||||||
|
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
.ci/build.sh
|
.ci/build.sh
|
||||||
|
@ -76,7 +123,9 @@ jobs:
|
||||||
python3 -m pip install pytest-reverse
|
python3 -m pip install pytest-reverse
|
||||||
fi
|
fi
|
||||||
if [ "${{ matrix.os }}" = "ubuntu-latest" ]; then
|
if [ "${{ matrix.os }}" = "ubuntu-latest" ]; then
|
||||||
xvfb-run -s '-screen 0 1024x768x24' .ci/test.sh
|
xvfb-run -s '-screen 0 1024x768x24' sway&
|
||||||
|
export WAYLAND_DISPLAY=wayland-1
|
||||||
|
.ci/test.sh
|
||||||
else
|
else
|
||||||
.ci/test.sh
|
.ci/test.sh
|
||||||
fi
|
fi
|
||||||
|
@ -90,26 +139,23 @@ jobs:
|
||||||
mkdir -p Tests/errors
|
mkdir -p Tests/errors
|
||||||
|
|
||||||
- name: Upload errors
|
- name: Upload errors
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
if: failure()
|
if: failure()
|
||||||
with:
|
with:
|
||||||
name: errors
|
name: errors
|
||||||
path: Tests/errors
|
path: Tests/errors
|
||||||
|
|
||||||
- name: Docs
|
|
||||||
if: startsWith(matrix.os, 'ubuntu') && matrix.python-version == 3.10
|
|
||||||
run: |
|
|
||||||
python3 -m pip install furo sphinx-copybutton sphinx-issues sphinx-removed-in sphinxext-opengraph
|
|
||||||
make doccheck
|
|
||||||
|
|
||||||
- name: After success
|
- name: After success
|
||||||
run: |
|
run: |
|
||||||
.ci/after_success.sh
|
.ci/after_success.sh
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
run: bash <(curl -s https://codecov.io/bash) -F ${{ matrix.codecov-flag }}
|
uses: codecov/codecov-action@v4
|
||||||
env:
|
with:
|
||||||
CODECOV_NAME: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
|
||||||
|
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
||||||
|
gcov: true
|
||||||
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
||||||
success:
|
success:
|
||||||
permissions:
|
permissions:
|
||||||
|
|
31
.github/workflows/tidelift.yml
vendored
|
@ -1,31 +0,0 @@
|
||||||
name: Tidelift Align
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "30 2 * * *" # daily at 02:30 UTC
|
|
||||||
push:
|
|
||||||
paths:
|
|
||||||
- "Pipfile*"
|
|
||||||
- ".github/workflows/tidelift.yml"
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "Pipfile*"
|
|
||||||
- ".github/workflows/tidelift.yml"
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
if: github.repository_owner == 'python-pillow'
|
|
||||||
name: Run Tidelift to ensure approved open source packages are in use
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: Scan
|
|
||||||
uses: tidelift/alignment-action@main
|
|
||||||
env:
|
|
||||||
TIDELIFT_API_KEY: ${{ secrets.TIDELIFT_API_KEY }}
|
|
||||||
TIDELIFT_ORGANIZATION: team/aclark4life
|
|
||||||
TIDELIFT_PROJECT: pillow
|
|
152
.github/workflows/wheels-dependencies.sh
vendored
Executable file
|
@ -0,0 +1,152 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# Define custom utilities
|
||||||
|
# Test for macOS with [ -n "$IS_MACOS" ]
|
||||||
|
if [ -z "$IS_MACOS" ]; then
|
||||||
|
export MB_ML_LIBC=${AUDITWHEEL_POLICY::9}
|
||||||
|
export MB_ML_VER=${AUDITWHEEL_POLICY:9}
|
||||||
|
fi
|
||||||
|
export PLAT=$CIBW_ARCHS
|
||||||
|
source wheels/multibuild/common_utils.sh
|
||||||
|
source wheels/multibuild/library_builders.sh
|
||||||
|
if [ -z "$IS_MACOS" ]; then
|
||||||
|
source wheels/multibuild/manylinux_utils.sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
ARCHIVE_SDIR=pillow-depends-main
|
||||||
|
|
||||||
|
# Package versions for fresh source builds
|
||||||
|
FREETYPE_VERSION=2.13.2
|
||||||
|
HARFBUZZ_VERSION=8.4.0
|
||||||
|
LIBPNG_VERSION=1.6.43
|
||||||
|
JPEGTURBO_VERSION=3.0.2
|
||||||
|
OPENJPEG_VERSION=2.5.2
|
||||||
|
XZ_VERSION=5.4.5
|
||||||
|
TIFF_VERSION=4.6.0
|
||||||
|
LCMS2_VERSION=2.16
|
||||||
|
if [[ -n "$IS_MACOS" ]]; then
|
||||||
|
GIFLIB_VERSION=5.2.2
|
||||||
|
else
|
||||||
|
GIFLIB_VERSION=5.2.1
|
||||||
|
fi
|
||||||
|
if [[ -n "$IS_MACOS" ]] || [[ "$MB_ML_VER" != 2014 ]]; then
|
||||||
|
ZLIB_VERSION=1.3.1
|
||||||
|
else
|
||||||
|
ZLIB_VERSION=1.2.8
|
||||||
|
fi
|
||||||
|
LIBWEBP_VERSION=1.3.2
|
||||||
|
BZIP2_VERSION=1.0.8
|
||||||
|
LIBXCB_VERSION=1.16.1
|
||||||
|
BROTLI_VERSION=1.1.0
|
||||||
|
|
||||||
|
if [[ -n "$IS_MACOS" ]] && [[ "$CIBW_ARCHS" == "x86_64" ]]; then
|
||||||
|
function build_openjpeg {
|
||||||
|
local out_dir=$(fetch_unpack https://github.com/uclouvain/openjpeg/archive/v${OPENJPEG_VERSION}.tar.gz openjpeg-${OPENJPEG_VERSION}.tar.gz)
|
||||||
|
(cd $out_dir \
|
||||||
|
&& cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \
|
||||||
|
&& make install)
|
||||||
|
touch openjpeg-stamp
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
|
function build_brotli {
|
||||||
|
local cmake=$(get_modern_cmake)
|
||||||
|
local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-1.1.0.tar.gz)
|
||||||
|
(cd $out_dir \
|
||||||
|
&& $cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \
|
||||||
|
&& make install)
|
||||||
|
if [[ "$MB_ML_LIBC" == "manylinux" ]]; then
|
||||||
|
cp /usr/local/lib64/libbrotli* /usr/local/lib
|
||||||
|
cp /usr/local/lib64/pkgconfig/libbrotli* /usr/local/lib/pkgconfig
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function build {
|
||||||
|
if [[ -n "$IS_MACOS" ]] && [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
||||||
|
sudo chown -R runner /usr/local
|
||||||
|
fi
|
||||||
|
build_xz
|
||||||
|
if [ -z "$IS_ALPINE" ] && [ -z "$IS_MACOS" ]; then
|
||||||
|
yum remove -y zlib-devel
|
||||||
|
fi
|
||||||
|
build_new_zlib
|
||||||
|
|
||||||
|
build_simple xcb-proto 1.16.0 https://xorg.freedesktop.org/archive/individual/proto
|
||||||
|
if [ -n "$IS_MACOS" ]; then
|
||||||
|
build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto
|
||||||
|
build_simple libXau 1.0.11 https://www.x.org/pub/individual/lib
|
||||||
|
build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist
|
||||||
|
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
||||||
|
cp /usr/local/share/pkgconfig/xcb-proto.pc /usr/local/lib/pkgconfig
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
sed s/\${pc_sysrootdir\}// /usr/local/share/pkgconfig/xcb-proto.pc > /usr/local/lib/pkgconfig/xcb-proto.pc
|
||||||
|
fi
|
||||||
|
build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib
|
||||||
|
|
||||||
|
build_libjpeg_turbo
|
||||||
|
build_tiff
|
||||||
|
build_libpng
|
||||||
|
build_lcms2
|
||||||
|
build_openjpeg
|
||||||
|
if [ -f /usr/local/lib64/libopenjp2.so ]; then
|
||||||
|
cp /usr/local/lib64/libopenjp2.so /usr/local/lib
|
||||||
|
fi
|
||||||
|
|
||||||
|
ORIGINAL_CFLAGS=$CFLAGS
|
||||||
|
CFLAGS="$CFLAGS -O3 -DNDEBUG"
|
||||||
|
if [[ -n "$IS_MACOS" ]]; then
|
||||||
|
CFLAGS="$CFLAGS -Wl,-headerpad_max_install_names"
|
||||||
|
fi
|
||||||
|
build_libwebp
|
||||||
|
CFLAGS=$ORIGINAL_CFLAGS
|
||||||
|
|
||||||
|
build_brotli
|
||||||
|
|
||||||
|
if [ -n "$IS_MACOS" ]; then
|
||||||
|
# Custom freetype build
|
||||||
|
build_simple freetype $FREETYPE_VERSION https://download.savannah.gnu.org/releases/freetype tar.gz --with-harfbuzz=no
|
||||||
|
else
|
||||||
|
build_freetype
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$IS_MACOS" ]; then
|
||||||
|
export FREETYPE_LIBS=-lfreetype
|
||||||
|
export FREETYPE_CFLAGS=-I/usr/local/include/freetype2/
|
||||||
|
fi
|
||||||
|
build_simple harfbuzz $HARFBUZZ_VERSION https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION tar.xz --with-freetype=yes --with-glib=no
|
||||||
|
if [ -z "$IS_MACOS" ]; then
|
||||||
|
export FREETYPE_LIBS=""
|
||||||
|
export FREETYPE_CFLAGS=""
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Any stuff that you need to do before you start building the wheels
|
||||||
|
# Runs in the root directory of this repository.
|
||||||
|
curl -fsSL -o pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
|
||||||
|
untar pillow-depends-main.zip
|
||||||
|
|
||||||
|
if [[ -n "$IS_MACOS" ]]; then
|
||||||
|
# libtiff and libxcb cause a conflict with building libtiff and libxcb
|
||||||
|
# libxau and libxdmcp cause an issue on macOS < 11
|
||||||
|
# remove cairo to fix building harfbuzz on arm64
|
||||||
|
# remove lcms2 and libpng to fix building openjpeg on arm64
|
||||||
|
# remove jpeg-turbo to avoid inclusion on arm64
|
||||||
|
# remove webp and zstd to avoid inclusion on x86_64
|
||||||
|
# curl from brew requires zstd, use system curl
|
||||||
|
brew remove --ignore-dependencies libpng libtiff libxcb libxau libxdmcp curl cairo lcms2 zstd
|
||||||
|
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
||||||
|
brew remove --ignore-dependencies jpeg-turbo
|
||||||
|
else
|
||||||
|
brew remove --ignore-dependencies webp
|
||||||
|
fi
|
||||||
|
|
||||||
|
brew install pkg-config
|
||||||
|
fi
|
||||||
|
|
||||||
|
wrap_wheel_builder build
|
||||||
|
|
||||||
|
# Append licenses
|
||||||
|
for filename in wheels/dependency_licenses/*; do
|
||||||
|
echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE
|
||||||
|
cat $filename >> LICENSE
|
||||||
|
done
|
22
.github/workflows/wheels-test.ps1
vendored
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
param ([string]$venv, [string]$pillow="C:\pillow")
|
||||||
|
$ErrorActionPreference = 'Stop'
|
||||||
|
$ProgressPreference = 'SilentlyContinue'
|
||||||
|
Set-PSDebug -Trace 1
|
||||||
|
if ("$venv" -like "*\cibw-run-*\pp*-win_amd64\*") {
|
||||||
|
# unlike CPython, PyPy requires Visual C++ Redistributable to be installed
|
||||||
|
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||||
|
Invoke-WebRequest -Uri 'https://aka.ms/vs/15/release/vc_redist.x64.exe' -OutFile 'vc_redist.x64.exe'
|
||||||
|
C:\vc_redist.x64.exe /install /quiet /norestart | Out-Null
|
||||||
|
}
|
||||||
|
$env:path += ";$pillow\winbuild\build\bin\"
|
||||||
|
& "$venv\Scripts\activate.ps1"
|
||||||
|
& reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\python.exe" /v "GlobalFlag" /t REG_SZ /d "0x02000000" /f
|
||||||
|
cd $pillow
|
||||||
|
& python -VV
|
||||||
|
if (!$?) { exit $LASTEXITCODE }
|
||||||
|
& python selftest.py
|
||||||
|
if (!$?) { exit $LASTEXITCODE }
|
||||||
|
& python -m pytest -vx Tests\check_wheel.py
|
||||||
|
if (!$?) { exit $LASTEXITCODE }
|
||||||
|
& python -m pytest -vx Tests
|
||||||
|
if (!$?) { exit $LASTEXITCODE }
|
28
.github/workflows/wheels-test.sh
vendored
Executable file
|
@ -0,0 +1,28 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||||
|
brew install fribidi
|
||||||
|
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig"
|
||||||
|
if [ -f /opt/homebrew/lib/libfribidi.dylib ]; then
|
||||||
|
sudo cp /opt/homebrew/lib/libfribidi.dylib /usr/local/lib
|
||||||
|
fi
|
||||||
|
elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then
|
||||||
|
apk add curl fribidi
|
||||||
|
else
|
||||||
|
yum install -y fribidi
|
||||||
|
fi
|
||||||
|
if [ "${AUDITWHEEL_POLICY::9}" != "musllinux" ]; then
|
||||||
|
python3 -m pip install numpy
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -d "test-images-main" ]; then
|
||||||
|
curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip
|
||||||
|
unzip pillow-test-images.zip
|
||||||
|
mv test-images-main/* Tests/images
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Runs tests
|
||||||
|
python3 selftest.py
|
||||||
|
python3 -m pytest Tests/check_wheel.py
|
||||||
|
python3 -m pytest
|
266
.github/workflows/wheels.yml
vendored
Normal file
|
@ -0,0 +1,266 @@
|
||||||
|
name: Wheels
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- ".ci/requirements-cibw.txt"
|
||||||
|
- ".github/workflows/wheel*"
|
||||||
|
- "setup.py"
|
||||||
|
- "wheels/*"
|
||||||
|
- "winbuild/build_prepare.py"
|
||||||
|
- "winbuild/fribidi.cmake"
|
||||||
|
tags:
|
||||||
|
- "*"
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- ".ci/requirements-cibw.txt"
|
||||||
|
- ".github/workflows/wheel*"
|
||||||
|
- "setup.py"
|
||||||
|
- "wheels/*"
|
||||||
|
- "winbuild/build_prepare.py"
|
||||||
|
- "winbuild/fribidi.cmake"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
FORCE_COLOR: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-1-QEMU-emulated-wheels:
|
||||||
|
name: aarch64 ${{ matrix.python-version }} ${{ matrix.spec }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
python-version:
|
||||||
|
- pp39
|
||||||
|
- pp310
|
||||||
|
- cp38
|
||||||
|
- cp39
|
||||||
|
- cp310
|
||||||
|
- cp311
|
||||||
|
- cp312
|
||||||
|
spec:
|
||||||
|
- manylinux2014
|
||||||
|
- manylinux_2_28
|
||||||
|
- musllinux
|
||||||
|
exclude:
|
||||||
|
- { python-version: pp39, spec: musllinux }
|
||||||
|
- { python-version: pp310, spec: musllinux }
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-qemu-action
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Install cibuildwheel
|
||||||
|
run: |
|
||||||
|
python3 -m pip install -r .ci/requirements-cibw.txt
|
||||||
|
|
||||||
|
- name: Build wheels
|
||||||
|
run: |
|
||||||
|
python3 -m cibuildwheel --output-dir wheelhouse
|
||||||
|
env:
|
||||||
|
# Build only the currently selected Linux architecture (so we can
|
||||||
|
# parallelise for speed).
|
||||||
|
CIBW_ARCHS: "aarch64"
|
||||||
|
# Likewise, select only one Python version per job to speed this up.
|
||||||
|
CIBW_BUILD: "${{ matrix.python-version }}-${{ matrix.spec == 'musllinux' && 'musllinux' || 'manylinux' }}*"
|
||||||
|
# Extra options for manylinux.
|
||||||
|
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.spec }}
|
||||||
|
CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.spec }}
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-qemu-${{ matrix.python-version }}-${{ matrix.spec }}
|
||||||
|
path: ./wheelhouse/*.whl
|
||||||
|
|
||||||
|
build-2-native-wheels:
|
||||||
|
name: ${{ matrix.name }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- name: "macOS x86_64"
|
||||||
|
os: macos-13
|
||||||
|
cibw_arch: x86_64
|
||||||
|
macosx_deployment_target: "10.10"
|
||||||
|
- name: "macOS arm64"
|
||||||
|
os: macos-14
|
||||||
|
cibw_arch: arm64
|
||||||
|
macosx_deployment_target: "11.0"
|
||||||
|
- name: "manylinux2014 and musllinux x86_64"
|
||||||
|
os: ubuntu-latest
|
||||||
|
cibw_arch: x86_64
|
||||||
|
- name: "manylinux_2_28 x86_64"
|
||||||
|
os: ubuntu-latest
|
||||||
|
cibw_arch: x86_64
|
||||||
|
build: "*manylinux*"
|
||||||
|
manylinux: "manylinux_2_28"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
|
||||||
|
- name: Install cibuildwheel
|
||||||
|
run: |
|
||||||
|
python3 -m pip install -r .ci/requirements-cibw.txt
|
||||||
|
|
||||||
|
- name: Build wheels
|
||||||
|
run: |
|
||||||
|
python3 -m cibuildwheel --output-dir wheelhouse
|
||||||
|
env:
|
||||||
|
CIBW_ARCHS: ${{ matrix.cibw_arch }}
|
||||||
|
CIBW_BUILD: ${{ matrix.build }}
|
||||||
|
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
|
||||||
|
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
|
||||||
|
CIBW_SKIP: pp38-*
|
||||||
|
CIBW_TEST_SKIP: cp38-macosx_arm64
|
||||||
|
MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }}
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-${{ matrix.os }}-${{ matrix.cibw_arch }}${{ matrix.manylinux && format('-{0}', matrix.manylinux) }}
|
||||||
|
path: ./wheelhouse/*.whl
|
||||||
|
|
||||||
|
windows:
|
||||||
|
name: Windows ${{ matrix.cibw_arch }}
|
||||||
|
runs-on: windows-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- cibw_arch: x86
|
||||||
|
- cibw_arch: AMD64
|
||||||
|
- cibw_arch: ARM64
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Checkout extra test images
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: python-pillow/test-images
|
||||||
|
path: Tests\test-images
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
|
||||||
|
- name: Install cibuildwheel
|
||||||
|
run: |
|
||||||
|
python.exe -m pip install -r .ci/requirements-cibw.txt
|
||||||
|
|
||||||
|
- name: Prepare for build
|
||||||
|
run: |
|
||||||
|
choco install nasm --no-progress
|
||||||
|
echo "C:\Program Files\NASM" >> $env:GITHUB_PATH
|
||||||
|
|
||||||
|
# Install extra test images
|
||||||
|
xcopy /S /Y Tests\test-images\* Tests\images
|
||||||
|
|
||||||
|
& python.exe winbuild\build_prepare.py -v --no-imagequant --architecture=${{ matrix.cibw_arch }}
|
||||||
|
shell: pwsh
|
||||||
|
|
||||||
|
- name: Build wheels
|
||||||
|
run: |
|
||||||
|
setlocal EnableDelayedExpansion
|
||||||
|
for %%f in (winbuild\build\license\*) do (
|
||||||
|
set x=%%~nf
|
||||||
|
rem Skip FriBiDi license, it is not included in the wheel.
|
||||||
|
set fribidi=!x:~0,7!
|
||||||
|
if NOT !fribidi!==fribidi (
|
||||||
|
rem Skip imagequant license, it is not included in the wheel.
|
||||||
|
set libimagequant=!x:~0,13!
|
||||||
|
if NOT !libimagequant!==libimagequant (
|
||||||
|
echo. >> LICENSE
|
||||||
|
echo ===== %%~nf ===== >> LICENSE
|
||||||
|
echo. >> LICENSE
|
||||||
|
type %%f >> LICENSE
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
call winbuild\\build\\build_env.cmd
|
||||||
|
%pythonLocation%\python.exe -m cibuildwheel . --output-dir wheelhouse
|
||||||
|
env:
|
||||||
|
CIBW_ARCHS: ${{ matrix.cibw_arch }}
|
||||||
|
CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd"
|
||||||
|
CIBW_CACHE_PATH: "C:\\cibw"
|
||||||
|
CIBW_SKIP: pp38-*
|
||||||
|
CIBW_TEST_SKIP: "*-win_arm64"
|
||||||
|
CIBW_TEST_COMMAND: 'docker run --rm
|
||||||
|
-v {project}:C:\pillow
|
||||||
|
-v C:\cibw:C:\cibw
|
||||||
|
-v %CD%\..\venv-test:%CD%\..\venv-test
|
||||||
|
-e CI -e GITHUB_ACTIONS
|
||||||
|
mcr.microsoft.com/windows/servercore:ltsc2022
|
||||||
|
powershell C:\pillow\.github\workflows\wheels-test.ps1 %CD%\..\venv-test'
|
||||||
|
shell: cmd
|
||||||
|
|
||||||
|
- name: Upload wheels
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-windows-${{ matrix.cibw_arch }}
|
||||||
|
path: ./wheelhouse/*.whl
|
||||||
|
|
||||||
|
- name: Upload fribidi.dll
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: fribidi-windows-${{ matrix.cibw_arch }}
|
||||||
|
path: winbuild\build\bin\fribidi*
|
||||||
|
|
||||||
|
sdist:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
cache: pip
|
||||||
|
cache-dependency-path: "Makefile"
|
||||||
|
|
||||||
|
- run: make sdist
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-sdist
|
||||||
|
path: dist/*.tar.gz
|
||||||
|
|
||||||
|
pypi-publish:
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
|
||||||
|
needs: [build-1-QEMU-emulated-wheels, build-2-native-wheels, windows, sdist]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Upload release to PyPI
|
||||||
|
environment:
|
||||||
|
name: release-pypi
|
||||||
|
url: https://pypi.org/p/Pillow
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
pattern: dist-*
|
||||||
|
path: dist
|
||||||
|
merge-multiple: true
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
2
.gitignore
vendored
|
@ -79,7 +79,7 @@ docs/_build/
|
||||||
# JetBrains
|
# JetBrains
|
||||||
.idea
|
.idea
|
||||||
|
|
||||||
# Extra test images installed from pillow-depends/test_images
|
# Extra test images installed from python-pillow/test-images
|
||||||
Tests/images/README.md
|
Tests/images/README.md
|
||||||
Tests/images/crash_1.tif
|
Tests/images/crash_1.tif
|
||||||
Tests/images/crash_2.tif
|
Tests/images/crash_2.tif
|
||||||
|
|
3
.gitmodules
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
[submodule "multibuild"]
|
||||||
|
path = wheels/multibuild
|
||||||
|
url = https://github.com/multi-build/multibuild.git
|
|
@ -1,51 +1,85 @@
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: 22.6.0
|
rev: v0.4.3
|
||||||
|
hooks:
|
||||||
|
- id: ruff
|
||||||
|
args: [--exit-non-zero-on-fix]
|
||||||
|
|
||||||
|
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||||
|
rev: 24.4.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
args: ["--target-version", "py37"]
|
|
||||||
# Only .py files, until https://github.com/psf/black/issues/402 resolved
|
|
||||||
files: \.py$
|
|
||||||
types: []
|
|
||||||
|
|
||||||
- repo: https://github.com/PyCQA/isort
|
- repo: https://github.com/PyCQA/bandit
|
||||||
rev: 5.10.1
|
rev: 1.7.8
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: bandit
|
||||||
|
args: [--severity-level=high]
|
||||||
- repo: https://github.com/asottile/yesqa
|
files: ^src/
|
||||||
rev: v1.3.0
|
|
||||||
hooks:
|
|
||||||
- id: yesqa
|
|
||||||
|
|
||||||
- repo: https://github.com/Lucas-C/pre-commit-hooks
|
- repo: https://github.com/Lucas-C/pre-commit-hooks
|
||||||
rev: v1.2.0
|
rev: v1.5.5
|
||||||
hooks:
|
hooks:
|
||||||
- id: remove-tabs
|
- id: remove-tabs
|
||||||
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.opt$)
|
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
|
||||||
|
|
||||||
- repo: https://github.com/PyCQA/flake8
|
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||||
rev: 4.0.1
|
rev: v18.1.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: clang-format
|
||||||
additional_dependencies: [flake8-2020, flake8-implicit-str-concat]
|
types: [c]
|
||||||
|
exclude: ^src/thirdparty/
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/pygrep-hooks
|
- repo: https://github.com/pre-commit/pygrep-hooks
|
||||||
rev: v1.9.0
|
rev: v1.10.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: python-check-blanket-noqa
|
|
||||||
- id: rst-backticks
|
- id: rst-backticks
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.3.0
|
rev: v4.6.0
|
||||||
hooks:
|
hooks:
|
||||||
|
- id: check-executables-have-shebangs
|
||||||
|
- id: check-shebang-scripts-are-executable
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
|
- id: check-json
|
||||||
|
- id: check-toml
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
exclude: ^Tests/images/
|
||||||
|
- id: trailing-whitespace
|
||||||
|
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
|
||||||
|
|
||||||
|
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||||
|
rev: 0.28.2
|
||||||
|
hooks:
|
||||||
|
- id: check-github-workflows
|
||||||
|
- id: check-readthedocs
|
||||||
|
- id: check-renovate
|
||||||
|
|
||||||
- repo: https://github.com/sphinx-contrib/sphinx-lint
|
- repo: https://github.com/sphinx-contrib/sphinx-lint
|
||||||
rev: v0.6.1
|
rev: v0.9.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: sphinx-lint
|
- id: sphinx-lint
|
||||||
|
|
||||||
|
- repo: https://github.com/tox-dev/pyproject-fmt
|
||||||
|
rev: 1.8.0
|
||||||
|
hooks:
|
||||||
|
- id: pyproject-fmt
|
||||||
|
|
||||||
|
- repo: https://github.com/abravalheri/validate-pyproject
|
||||||
|
rev: v0.16
|
||||||
|
hooks:
|
||||||
|
- id: validate-pyproject
|
||||||
|
|
||||||
|
- repo: https://github.com/tox-dev/tox-ini-fmt
|
||||||
|
rev: 1.3.1
|
||||||
|
hooks:
|
||||||
|
- id: tox-ini-fmt
|
||||||
|
|
||||||
|
- repo: meta
|
||||||
|
hooks:
|
||||||
|
- id: check-hooks-apply
|
||||||
|
- id: check-useless-excludes
|
||||||
|
|
||||||
ci:
|
ci:
|
||||||
autoupdate_schedule: monthly
|
autoupdate_schedule: monthly
|
||||||
|
|
|
@ -1,5 +1,16 @@
|
||||||
version: 2
|
version: 2
|
||||||
|
|
||||||
|
formats: [pdf]
|
||||||
|
|
||||||
|
build:
|
||||||
|
os: ubuntu-22.04
|
||||||
|
tools:
|
||||||
|
python: "3"
|
||||||
|
jobs:
|
||||||
|
post_checkout:
|
||||||
|
- git remote add upstream https://github.com/python-pillow/Pillow.git # For forks
|
||||||
|
- git fetch upstream --tags
|
||||||
|
|
||||||
python:
|
python:
|
||||||
install:
|
install:
|
||||||
- method: pip
|
- method: pip
|
||||||
|
|
931
CHANGES.rst
8
LICENSE
|
@ -1,11 +1,11 @@
|
||||||
The Python Imaging Library (PIL) is
|
The Python Imaging Library (PIL) is
|
||||||
|
|
||||||
Copyright © 1997-2011 by Secret Labs AB
|
Copyright © 1997-2011 by Secret Labs AB
|
||||||
Copyright © 1995-2011 by Fredrik Lundh
|
Copyright © 1995-2011 by Fredrik Lundh and contributors
|
||||||
|
|
||||||
Pillow is the friendly PIL fork. It is
|
Pillow is the friendly PIL fork. It is
|
||||||
|
|
||||||
Copyright © 2010-2022 by Alex Clark and contributors
|
Copyright © 2010-2024 by Jeffrey A. Clark and contributors
|
||||||
|
|
||||||
Like PIL, Pillow is licensed under the open source HPND License:
|
Like PIL, Pillow is licensed under the open source HPND License:
|
||||||
|
|
||||||
|
@ -13,8 +13,8 @@ By obtaining, using, and/or copying this software and/or its associated
|
||||||
documentation, you agree that you have read, understood, and will comply
|
documentation, you agree that you have read, understood, and will comply
|
||||||
with the following terms and conditions:
|
with the following terms and conditions:
|
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this software and its
|
Permission to use, copy, modify and distribute this software and its
|
||||||
associated documentation for any purpose and without fee is hereby granted,
|
documentation for any purpose and without fee is hereby granted,
|
||||||
provided that the above copyright notice appears in all copies, and that
|
provided that the above copyright notice appears in all copies, and that
|
||||||
both that copyright notice and this permission notice appear in supporting
|
both that copyright notice and this permission notice appear in supporting
|
||||||
documentation, and that the name of Secret Labs AB or the author not be
|
documentation, and that the name of Secret Labs AB or the author not be
|
||||||
|
|
|
@ -1,22 +1,23 @@
|
||||||
include *.c
|
include *.c
|
||||||
include *.h
|
include *.h
|
||||||
include *.in
|
include *.in
|
||||||
include *.lock
|
|
||||||
include *.md
|
include *.md
|
||||||
include *.py
|
include *.py
|
||||||
include *.rst
|
include *.rst
|
||||||
include *.sh
|
include *.sh
|
||||||
|
include *.toml
|
||||||
include *.txt
|
include *.txt
|
||||||
include *.yaml
|
include *.yaml
|
||||||
|
include .flake8
|
||||||
include LICENSE
|
include LICENSE
|
||||||
include Makefile
|
include Makefile
|
||||||
include Pipfile
|
|
||||||
include tox.ini
|
include tox.ini
|
||||||
graft Tests
|
graft Tests
|
||||||
graft src
|
graft src
|
||||||
graft depends
|
graft depends
|
||||||
graft winbuild
|
graft winbuild
|
||||||
graft docs
|
graft docs
|
||||||
|
graft _custom_build
|
||||||
|
|
||||||
# build/src control detritus
|
# build/src control detritus
|
||||||
exclude .appveyor.yml
|
exclude .appveyor.yml
|
||||||
|
@ -25,7 +26,9 @@ exclude .coveragerc
|
||||||
exclude .editorconfig
|
exclude .editorconfig
|
||||||
exclude .readthedocs.yml
|
exclude .readthedocs.yml
|
||||||
exclude codecov.yml
|
exclude codecov.yml
|
||||||
|
exclude renovate.json
|
||||||
global-exclude .git*
|
global-exclude .git*
|
||||||
global-exclude *.pyc
|
global-exclude *.pyc
|
||||||
global-exclude *.so
|
global-exclude *.so
|
||||||
prune .ci
|
prune .ci
|
||||||
|
prune wheels
|
||||||
|
|
37
Makefile
|
@ -2,7 +2,6 @@
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean:
|
clean:
|
||||||
python3 setup.py clean
|
|
||||||
rm src/PIL/*.so || true
|
rm src/PIL/*.so || true
|
||||||
rm -r build || true
|
rm -r build || true
|
||||||
find . -name __pycache__ | xargs rm -r || true
|
find . -name __pycache__ | xargs rm -r || true
|
||||||
|
@ -16,12 +15,19 @@ coverage:
|
||||||
python3 -m coverage report
|
python3 -m coverage report
|
||||||
|
|
||||||
.PHONY: doc
|
.PHONY: doc
|
||||||
doc:
|
.PHONY: html
|
||||||
|
doc html:
|
||||||
|
python3 -c "import PIL" > /dev/null 2>&1 || python3 -m pip install .
|
||||||
$(MAKE) -C docs html
|
$(MAKE) -C docs html
|
||||||
|
|
||||||
|
.PHONY: htmlview
|
||||||
|
htmlview:
|
||||||
|
python3 -c "import PIL" > /dev/null 2>&1 || python3 -m pip install .
|
||||||
|
$(MAKE) -C docs htmlview
|
||||||
|
|
||||||
.PHONY: doccheck
|
.PHONY: doccheck
|
||||||
doccheck:
|
doccheck:
|
||||||
$(MAKE) -C docs html
|
$(MAKE) doc
|
||||||
# Don't make our tests rely on the links in the docs being up every single build.
|
# Don't make our tests rely on the links in the docs being up every single build.
|
||||||
# We don't control them. But do check, and update them to the target of their redirects.
|
# We don't control them. But do check, and update them to the target of their redirects.
|
||||||
$(MAKE) -C docs linkcheck || true
|
$(MAKE) -C docs linkcheck || true
|
||||||
|
@ -37,27 +43,23 @@ help:
|
||||||
@echo " coverage run coverage test (in progress)"
|
@echo " coverage run coverage test (in progress)"
|
||||||
@echo " doc make HTML docs"
|
@echo " doc make HTML docs"
|
||||||
@echo " docserve run an HTTP server on the docs directory"
|
@echo " docserve run an HTTP server on the docs directory"
|
||||||
@echo " html to make standalone HTML files"
|
@echo " html make HTML docs"
|
||||||
@echo " inplace make inplace extension"
|
@echo " htmlview open the index page built by the html target in your browser"
|
||||||
@echo " install make and install"
|
@echo " install make and install"
|
||||||
@echo " install-coverage make and install with C coverage"
|
@echo " install-coverage make and install with C coverage"
|
||||||
@echo " lint run the lint checks"
|
@echo " lint run the lint checks"
|
||||||
@echo " lint-fix run Black and isort to (mostly) fix lint issues"
|
@echo " lint-fix run Ruff to (mostly) fix lint issues"
|
||||||
@echo " release-test run code and package tests before release"
|
@echo " release-test run code and package tests before release"
|
||||||
@echo " test run tests on installed Pillow"
|
@echo " test run tests on installed Pillow"
|
||||||
|
|
||||||
.PHONY: inplace
|
|
||||||
inplace: clean
|
|
||||||
python3 -m pip install -e --global-option="build_ext" --global-option="--inplace" .
|
|
||||||
|
|
||||||
.PHONY: install
|
.PHONY: install
|
||||||
install:
|
install:
|
||||||
python3 -m pip install .
|
python3 -m pip -v install .
|
||||||
python3 selftest.py
|
python3 selftest.py
|
||||||
|
|
||||||
.PHONY: install-coverage
|
.PHONY: install-coverage
|
||||||
install-coverage:
|
install-coverage:
|
||||||
CFLAGS="-coverage -Werror=implicit-function-declaration" python3 -m pip install --global-option="build_ext" .
|
CFLAGS="-coverage -Werror=implicit-function-declaration" python3 -m pip -v install .
|
||||||
python3 selftest.py
|
python3 selftest.py
|
||||||
|
|
||||||
.PHONY: debug
|
.PHONY: debug
|
||||||
|
@ -66,16 +68,15 @@ debug:
|
||||||
# for our stuff, kills optimization, and redirects to dev null so we
|
# for our stuff, kills optimization, and redirects to dev null so we
|
||||||
# see any build failures.
|
# see any build failures.
|
||||||
make clean > /dev/null
|
make clean > /dev/null
|
||||||
CFLAGS='-g -O0' python3 -m pip install --global-option="build_ext" . > /dev/null
|
CFLAGS='-g -O0' python3 -m pip -v install . > /dev/null
|
||||||
|
|
||||||
.PHONY: release-test
|
.PHONY: release-test
|
||||||
release-test:
|
release-test:
|
||||||
|
python3 Tests/check_release_notes.py
|
||||||
python3 -m pip install -e .[tests]
|
python3 -m pip install -e .[tests]
|
||||||
python3 selftest.py
|
python3 selftest.py
|
||||||
python3 -m pytest Tests
|
python3 -m pytest Tests
|
||||||
python3 -m pip install .
|
python3 -m pip install .
|
||||||
-rm dist/*.egg
|
|
||||||
-rmdir dist
|
|
||||||
python3 -m pytest -qq
|
python3 -m pytest -qq
|
||||||
python3 -m check_manifest
|
python3 -m check_manifest
|
||||||
python3 -m pyroma .
|
python3 -m pyroma .
|
||||||
|
@ -114,6 +115,6 @@ lint:
|
||||||
.PHONY: lint-fix
|
.PHONY: lint-fix
|
||||||
lint-fix:
|
lint-fix:
|
||||||
python3 -c "import black" > /dev/null 2>&1 || python3 -m pip install black
|
python3 -c "import black" > /dev/null 2>&1 || python3 -m pip install black
|
||||||
python3 -c "import isort" > /dev/null 2>&1 || python3 -m pip install isort
|
python3 -m black .
|
||||||
python3 -m black --target-version py37 .
|
python3 -c "import ruff" > /dev/null 2>&1 || python3 -m pip install ruff
|
||||||
python3 -m isort .
|
python3 -m ruff --fix .
|
||||||
|
|
22
Pipfile
|
@ -1,22 +0,0 @@
|
||||||
[[source]]
|
|
||||||
url = "https://pypi.org/simple"
|
|
||||||
verify_ssl = true
|
|
||||||
name = "pypi"
|
|
||||||
|
|
||||||
[packages]
|
|
||||||
black = "*"
|
|
||||||
check-manifest = "*"
|
|
||||||
coverage = "*"
|
|
||||||
defusedxml = "*"
|
|
||||||
packaging = "*"
|
|
||||||
markdown2 = "*"
|
|
||||||
olefile = "*"
|
|
||||||
pyroma = "*"
|
|
||||||
pytest = "*"
|
|
||||||
pytest-cov = "*"
|
|
||||||
pytest-timeout = "*"
|
|
||||||
|
|
||||||
[dev-packages]
|
|
||||||
|
|
||||||
[requires]
|
|
||||||
python_version = "3.9"
|
|
324
Pipfile.lock
generated
|
@ -1,324 +0,0 @@
|
||||||
{
|
|
||||||
"_meta": {
|
|
||||||
"hash": {
|
|
||||||
"sha256": "e5cad23bf4187647d53b613a64dc4792b7064bf86b08dfb5737580e32943f54d"
|
|
||||||
},
|
|
||||||
"pipfile-spec": 6,
|
|
||||||
"requires": {
|
|
||||||
"python_version": "3.9"
|
|
||||||
},
|
|
||||||
"sources": [
|
|
||||||
{
|
|
||||||
"name": "pypi",
|
|
||||||
"url": "https://pypi.org/simple",
|
|
||||||
"verify_ssl": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"default": {
|
|
||||||
"attrs": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1",
|
|
||||||
"sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
|
||||||
"version": "==21.2.0"
|
|
||||||
},
|
|
||||||
"black": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3",
|
|
||||||
"sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==21.12b0"
|
|
||||||
},
|
|
||||||
"build": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1aaadcd69338252ade4f7ec1265e1a19184bf916d84c9b7df095f423948cb89f",
|
|
||||||
"sha256:21b7ebbd1b22499c4dac536abc7606696ea4d909fd755e00f09f3c0f2c05e3c8"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3.6'",
|
|
||||||
"version": "==0.7.0"
|
|
||||||
},
|
|
||||||
"certifi": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872",
|
|
||||||
"sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"
|
|
||||||
],
|
|
||||||
"version": "==2021.10.8"
|
|
||||||
},
|
|
||||||
"charset-normalizer": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1eecaa09422db5be9e29d7fc65664e6c33bd06f9ced7838578ba40d58bdf3721",
|
|
||||||
"sha256:b0b883e8e874edfdece9c28f314e3dd5badf067342e42fb162203335ae61aa2c"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3'",
|
|
||||||
"version": "==2.0.9"
|
|
||||||
},
|
|
||||||
"check-manifest": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:365c94d65de4c927d9d8b505371d08ee19f9f369c86b9ac3db97c2754c827c95",
|
|
||||||
"sha256:56dadd260a9c7d550b159796d2894b6d0bcc176a94cbc426d9bb93e5e48d12ce"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.47"
|
|
||||||
},
|
|
||||||
"click": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3",
|
|
||||||
"sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3.6'",
|
|
||||||
"version": "==8.0.3"
|
|
||||||
},
|
|
||||||
"coverage": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0",
|
|
||||||
"sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd",
|
|
||||||
"sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884",
|
|
||||||
"sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48",
|
|
||||||
"sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76",
|
|
||||||
"sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0",
|
|
||||||
"sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64",
|
|
||||||
"sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685",
|
|
||||||
"sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47",
|
|
||||||
"sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d",
|
|
||||||
"sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840",
|
|
||||||
"sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f",
|
|
||||||
"sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971",
|
|
||||||
"sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c",
|
|
||||||
"sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a",
|
|
||||||
"sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de",
|
|
||||||
"sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17",
|
|
||||||
"sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4",
|
|
||||||
"sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521",
|
|
||||||
"sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57",
|
|
||||||
"sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b",
|
|
||||||
"sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282",
|
|
||||||
"sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644",
|
|
||||||
"sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475",
|
|
||||||
"sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d",
|
|
||||||
"sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da",
|
|
||||||
"sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953",
|
|
||||||
"sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2",
|
|
||||||
"sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e",
|
|
||||||
"sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c",
|
|
||||||
"sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc",
|
|
||||||
"sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64",
|
|
||||||
"sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74",
|
|
||||||
"sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617",
|
|
||||||
"sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3",
|
|
||||||
"sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d",
|
|
||||||
"sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa",
|
|
||||||
"sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739",
|
|
||||||
"sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8",
|
|
||||||
"sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8",
|
|
||||||
"sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781",
|
|
||||||
"sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58",
|
|
||||||
"sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9",
|
|
||||||
"sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c",
|
|
||||||
"sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd",
|
|
||||||
"sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e",
|
|
||||||
"sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==6.2"
|
|
||||||
},
|
|
||||||
"defusedxml": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69",
|
|
||||||
"sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.7.1"
|
|
||||||
},
|
|
||||||
"docutils": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c",
|
|
||||||
"sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
|
||||||
"version": "==0.18.1"
|
|
||||||
},
|
|
||||||
"idna": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff",
|
|
||||||
"sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3'",
|
|
||||||
"version": "==3.3"
|
|
||||||
},
|
|
||||||
"iniconfig": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3",
|
|
||||||
"sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"
|
|
||||||
],
|
|
||||||
"version": "==1.1.1"
|
|
||||||
},
|
|
||||||
"markdown2": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:8f4ac8d9a124ab408c67361090ed512deda746c04362c36c2ec16190c720c2b0",
|
|
||||||
"sha256:91113caf23aa662570fe21984f08fe74f814695c0a0ea8e863a8b4c4f63f9f6e"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==2.4.2"
|
|
||||||
},
|
|
||||||
"mypy-extensions": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d",
|
|
||||||
"sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"
|
|
||||||
],
|
|
||||||
"version": "==0.4.3"
|
|
||||||
},
|
|
||||||
"olefile": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.46"
|
|
||||||
},
|
|
||||||
"packaging": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb",
|
|
||||||
"sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==21.3"
|
|
||||||
},
|
|
||||||
"pathspec": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a",
|
|
||||||
"sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"
|
|
||||||
],
|
|
||||||
"version": "==0.9.0"
|
|
||||||
},
|
|
||||||
"pep517": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0",
|
|
||||||
"sha256:dd884c326898e2c6e11f9e0b64940606a93eb10ea022a2e067959f3a110cf161"
|
|
||||||
],
|
|
||||||
"version": "==0.12.0"
|
|
||||||
},
|
|
||||||
"platformdirs": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2",
|
|
||||||
"sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3.6'",
|
|
||||||
"version": "==2.4.0"
|
|
||||||
},
|
|
||||||
"pluggy": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159",
|
|
||||||
"sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3.6'",
|
|
||||||
"version": "==1.0.0"
|
|
||||||
},
|
|
||||||
"py": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719",
|
|
||||||
"sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
|
||||||
"version": "==1.11.0"
|
|
||||||
},
|
|
||||||
"pygments": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380",
|
|
||||||
"sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3.5'",
|
|
||||||
"version": "==2.10.0"
|
|
||||||
},
|
|
||||||
"pyparsing": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4",
|
|
||||||
"sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3.6'",
|
|
||||||
"version": "==3.0.6"
|
|
||||||
},
|
|
||||||
"pyroma": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:0fba67322913026091590e68e0d9e0d4fbd6420fcf34d315b2ad6985ab104d65",
|
|
||||||
"sha256:f8c181e0d5d292f11791afc18f7d0218a83c85cf64d6f8fb1571ce9d29a24e4a"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==3.2"
|
|
||||||
},
|
|
||||||
"pytest": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89",
|
|
||||||
"sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==6.2.5"
|
|
||||||
},
|
|
||||||
"pytest-cov": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6",
|
|
||||||
"sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==3.0.0"
|
|
||||||
},
|
|
||||||
"pytest-timeout": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:e6f98b54dafde8d70e4088467ff621260b641eb64895c4195b6e5c8f45638112",
|
|
||||||
"sha256:fe9c3d5006c053bb9e062d60f641e6a76d6707aedb645350af9593e376fcc717"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==2.0.2"
|
|
||||||
},
|
|
||||||
"requests": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24",
|
|
||||||
"sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
|
||||||
"version": "==2.26.0"
|
|
||||||
},
|
|
||||||
"setuptools": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:5ec2bbb534ed160b261acbbdd1b463eb3cf52a8d223d96a8ab9981f63798e85c",
|
|
||||||
"sha256:75fd345a47ce3d79595b27bf57e6f49c2ca7904f3c7ce75f8a87012046c86b0b"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3.7'",
|
|
||||||
"version": "==60.0.0"
|
|
||||||
},
|
|
||||||
"toml": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
|
|
||||||
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'",
|
|
||||||
"version": "==0.10.2"
|
|
||||||
},
|
|
||||||
"tomli": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f",
|
|
||||||
"sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3.6'",
|
|
||||||
"version": "==1.2.3"
|
|
||||||
},
|
|
||||||
"typing-extensions": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e",
|
|
||||||
"sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '3.6'",
|
|
||||||
"version": "==4.0.1"
|
|
||||||
},
|
|
||||||
"urllib3": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece",
|
|
||||||
"sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"
|
|
||||||
],
|
|
||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
|
|
||||||
"version": "==1.26.7"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"develop": {}
|
|
||||||
}
|
|
39
README.md
|
@ -6,9 +6,9 @@
|
||||||
|
|
||||||
## Python Imaging Library (Fork)
|
## Python Imaging Library (Fork)
|
||||||
|
|
||||||
Pillow is the friendly PIL fork by [Alex Clark and
|
Pillow is the friendly PIL fork by [Jeffrey A. Clark and
|
||||||
Contributors](https://github.com/python-pillow/Pillow/graphs/contributors).
|
contributors](https://github.com/python-pillow/Pillow/graphs/contributors).
|
||||||
PIL is the Python Imaging Library by Fredrik Lundh and Contributors.
|
PIL is the Python Imaging Library by Fredrik Lundh and contributors.
|
||||||
As of 2019, Pillow development is
|
As of 2019, Pillow development is
|
||||||
[supported by Tidelift](https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=readme&utm_campaign=enterprise).
|
[supported by Tidelift](https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=readme&utm_campaign=enterprise).
|
||||||
|
|
||||||
|
@ -45,18 +45,15 @@ As of 2019, Pillow development is
|
||||||
<a href="https://ci.appveyor.com/project/python-pillow/Pillow"><img
|
<a href="https://ci.appveyor.com/project/python-pillow/Pillow"><img
|
||||||
alt="AppVeyor CI build status (Windows)"
|
alt="AppVeyor CI build status (Windows)"
|
||||||
src="https://img.shields.io/appveyor/build/python-pillow/Pillow/main.svg?label=Windows%20build"></a>
|
src="https://img.shields.io/appveyor/build/python-pillow/Pillow/main.svg?label=Windows%20build"></a>
|
||||||
<a href="https://github.com/python-pillow/pillow-wheels/actions"><img
|
<a href="https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml"><img
|
||||||
alt="GitHub Actions wheels build status (Wheels)"
|
alt="GitHub Actions build status (Wheels)"
|
||||||
src="https://github.com/python-pillow/pillow-wheels/workflows/Wheels/badge.svg"></a>
|
src="https://github.com/python-pillow/Pillow/workflows/Wheels/badge.svg"></a>
|
||||||
<a href="https://app.travis-ci.com/github/python-pillow/pillow-wheels"><img
|
|
||||||
alt="Travis CI wheels build status (aarch64)"
|
|
||||||
src="https://img.shields.io/travis/com/python-pillow/pillow-wheels/main.svg?label=aarch64%20wheels"></a>
|
|
||||||
<a href="https://app.codecov.io/gh/python-pillow/Pillow"><img
|
<a href="https://app.codecov.io/gh/python-pillow/Pillow"><img
|
||||||
alt="Code coverage"
|
alt="Code coverage"
|
||||||
src="https://codecov.io/gh/python-pillow/Pillow/branch/main/graph/badge.svg"></a>
|
src="https://codecov.io/gh/python-pillow/Pillow/branch/main/graph/badge.svg"></a>
|
||||||
<a href="https://github.com/python-pillow/Pillow/actions/workflows/tidelift.yml"><img
|
<a href="https://bugs.chromium.org/p/oss-fuzz/issues/list?sort=-opened&can=1&q=proj:pillow"><img
|
||||||
alt="Tidelift Align"
|
alt="Fuzzing Status"
|
||||||
src="https://github.com/python-pillow/Pillow/actions/workflows/tidelift.yml/badge.svg"></a>
|
src="https://oss-fuzz-build-logs.storage.googleapis.com/badges/pillow.svg"></a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
|
@ -67,13 +64,16 @@ As of 2019, Pillow development is
|
||||||
src="https://zenodo.org/badge/17549/python-pillow/Pillow.svg"></a>
|
src="https://zenodo.org/badge/17549/python-pillow/Pillow.svg"></a>
|
||||||
<a href="https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=badge"><img
|
<a href="https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=badge"><img
|
||||||
alt="Tidelift"
|
alt="Tidelift"
|
||||||
src="https://tidelift.com/badges/package/pypi/Pillow?style=flat"></a>
|
src="https://tidelift.com/badges/package/pypi/pillow?style=flat"></a>
|
||||||
<a href="https://pypi.org/project/Pillow/"><img
|
<a href="https://pypi.org/project/pillow/"><img
|
||||||
alt="Newest PyPI version"
|
alt="Newest PyPI version"
|
||||||
src="https://img.shields.io/pypi/v/pillow.svg"></a>
|
src="https://img.shields.io/pypi/v/pillow.svg"></a>
|
||||||
<a href="https://pypi.org/project/Pillow/"><img
|
<a href="https://pypi.org/project/pillow/"><img
|
||||||
alt="Number of PyPI downloads"
|
alt="Number of PyPI downloads"
|
||||||
src="https://img.shields.io/pypi/dm/pillow.svg"></a>
|
src="https://img.shields.io/pypi/dm/pillow.svg"></a>
|
||||||
|
<a href="https://www.bestpractices.dev/projects/6331"><img
|
||||||
|
alt="OpenSSF Best Practices"
|
||||||
|
src="https://www.bestpractices.dev/projects/6331/badge"></a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
|
@ -82,9 +82,10 @@ As of 2019, Pillow development is
|
||||||
<a href="https://gitter.im/python-pillow/Pillow?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge"><img
|
<a href="https://gitter.im/python-pillow/Pillow?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge"><img
|
||||||
alt="Join the chat at https://gitter.im/python-pillow/Pillow"
|
alt="Join the chat at https://gitter.im/python-pillow/Pillow"
|
||||||
src="https://badges.gitter.im/python-pillow/Pillow.svg"></a>
|
src="https://badges.gitter.im/python-pillow/Pillow.svg"></a>
|
||||||
<a href="https://twitter.com/PythonPillow"><img
|
<a href="https://fosstodon.org/@pillow"><img
|
||||||
alt="Follow on https://twitter.com/PythonPillow"
|
alt="Follow on https://fosstodon.org/@pillow"
|
||||||
src="https://img.shields.io/badge/tweet-on%20Twitter-00aced.svg"></a>
|
src="https://img.shields.io/badge/publish-on%20Mastodon-595aff.svg"
|
||||||
|
rel="me"></a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
@ -100,7 +101,7 @@ The core image library is designed for fast access to data stored in a few basic
|
||||||
## More Information
|
## More Information
|
||||||
|
|
||||||
- [Documentation](https://pillow.readthedocs.io/)
|
- [Documentation](https://pillow.readthedocs.io/)
|
||||||
- [Installation](https://pillow.readthedocs.io/en/latest/installation.html)
|
- [Installation](https://pillow.readthedocs.io/en/latest/installation/basic-installation.html)
|
||||||
- [Handbook](https://pillow.readthedocs.io/en/latest/handbook/index.html)
|
- [Handbook](https://pillow.readthedocs.io/en/latest/handbook/index.html)
|
||||||
- [Contribute](https://github.com/python-pillow/Pillow/blob/main/.github/CONTRIBUTING.md)
|
- [Contribute](https://github.com/python-pillow/Pillow/blob/main/.github/CONTRIBUTING.md)
|
||||||
- [Issues](https://github.com/python-pillow/Pillow/issues)
|
- [Issues](https://github.com/python-pillow/Pillow/issues)
|
||||||
|
|
67
RELEASING.md
|
@ -10,30 +10,25 @@ Released quarterly on January 2nd, April 1st, July 1st and October 15th.
|
||||||
* [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154
|
* [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154
|
||||||
* [ ] Develop and prepare release in `main` branch.
|
* [ ] Develop and prepare release in `main` branch.
|
||||||
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in `main` branch.
|
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in `main` branch.
|
||||||
* [ ] Check that all of the wheel builds [Pillow Wheel Builder](https://github.com/python-pillow/pillow-wheels) pass the tests in Travis CI and GitHub Actions.
|
* [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them.
|
||||||
* [ ] In compliance with [PEP 440](https://www.python.org/dev/peps/pep-0440/), update version identifier in `src/PIL/_version.py`
|
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
|
||||||
* [ ] Update `CHANGES.rst`.
|
* [ ] Update `CHANGES.rst`.
|
||||||
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
|
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
|
||||||
* [ ] Create branch and tag for release e.g.:
|
* [ ] Create branch and tag for release e.g.:
|
||||||
```bash
|
```bash
|
||||||
git branch 5.2.x
|
git branch 5.2.x
|
||||||
git tag 5.2.0
|
git tag 5.2.0
|
||||||
git push --all
|
|
||||||
git push --tags
|
git push --tags
|
||||||
```
|
```
|
||||||
* [ ] Create and check source distribution:
|
* [ ] Check the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml)
|
||||||
|
has passed, including the "Upload release to PyPI" job. This will have been triggered
|
||||||
|
by the new tag.
|
||||||
|
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases).
|
||||||
|
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/),
|
||||||
|
increment and append `.dev0` to version identifier in `src/PIL/_version.py` and then:
|
||||||
```bash
|
```bash
|
||||||
make sdist
|
git push --all
|
||||||
```
|
```
|
||||||
* [ ] Create [binary distributions](https://github.com/python-pillow/Pillow/blob/main/RELEASING.md#binary-distributions)
|
|
||||||
* [ ] Check and upload all binaries and source distributions e.g.:
|
|
||||||
```bash
|
|
||||||
python3 -m twine check --strict dist/*
|
|
||||||
python3 -m twine upload dist/Pillow-5.2.0*
|
|
||||||
```
|
|
||||||
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases)
|
|
||||||
* [ ] In compliance with [PEP 440](https://www.python.org/dev/peps/pep-0440/), increment and append `.dev0` to version identifier in `src/PIL/_version.py`
|
|
||||||
|
|
||||||
## Point Release
|
## Point Release
|
||||||
|
|
||||||
Released as needed for security, installation or critical bug fixes.
|
Released as needed for security, installation or critical bug fixes.
|
||||||
|
@ -45,29 +40,25 @@ Released as needed for security, installation or critical bug fixes.
|
||||||
git checkout -t remotes/origin/5.2.x
|
git checkout -t remotes/origin/5.2.x
|
||||||
```
|
```
|
||||||
* [ ] Cherry pick individual commits from `main` branch to release branch e.g. `5.2.x`, then `git push`.
|
* [ ] Cherry pick individual commits from `main` branch to release branch e.g. `5.2.x`, then `git push`.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in release branch e.g. `5.2.x`.
|
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in release branch e.g. `5.2.x`.
|
||||||
* [ ] In compliance with [PEP 440](https://www.python.org/dev/peps/pep-0440/), update version identifier in `src/PIL/_version.py`
|
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
|
||||||
* [ ] Run pre-release check via `make release-test`.
|
* [ ] Run pre-release check via `make release-test`.
|
||||||
* [ ] Create tag for release e.g.:
|
* [ ] Create tag for release e.g.:
|
||||||
```bash
|
```bash
|
||||||
git tag 5.2.1
|
git tag 5.2.1
|
||||||
git push
|
|
||||||
git push --tags
|
git push --tags
|
||||||
```
|
```
|
||||||
* [ ] Create and check source distribution:
|
* [ ] Create and check source distribution:
|
||||||
```bash
|
```bash
|
||||||
make sdist
|
make sdist
|
||||||
```
|
```
|
||||||
* [ ] Create [binary distributions](https://github.com/python-pillow/Pillow/blob/main/RELEASING.md#binary-distributions)
|
* [ ] Check the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml)
|
||||||
* [ ] Check and upload all binaries and source distributions e.g.:
|
has passed, including the "Upload release to PyPI" job. This will have been triggered
|
||||||
|
by the new tag.
|
||||||
|
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases) and then:
|
||||||
```bash
|
```bash
|
||||||
python3 -m twine check --strict dist/*
|
git push
|
||||||
python3 -m twine upload dist/Pillow-5.2.1*
|
|
||||||
```
|
```
|
||||||
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases)
|
|
||||||
|
|
||||||
## Embargoed Release
|
## Embargoed Release
|
||||||
|
|
||||||
|
@ -83,35 +74,19 @@ Released as needed privately to individual vendors for critical security-related
|
||||||
```bash
|
```bash
|
||||||
git checkout 2.5.x
|
git checkout 2.5.x
|
||||||
git tag 2.5.3
|
git tag 2.5.3
|
||||||
git push origin 2.5.x
|
|
||||||
git push origin --tags
|
git push origin --tags
|
||||||
```
|
```
|
||||||
* [ ] Create and check source distribution:
|
* [ ] Check the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml)
|
||||||
|
has passed, including the "Upload release to PyPI" job. This will have been triggered
|
||||||
|
by the new tag.
|
||||||
|
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases) and then:
|
||||||
```bash
|
```bash
|
||||||
make sdist
|
git push origin 2.5.x
|
||||||
```
|
```
|
||||||
* [ ] Create [binary distributions](https://github.com/python-pillow/Pillow/blob/main/RELEASING.md#binary-distributions)
|
|
||||||
* [ ] Publish the [release on GitHub](https://github.com/python-pillow/Pillow/releases)
|
|
||||||
|
|
||||||
## Binary Distributions
|
|
||||||
|
|
||||||
### Windows
|
|
||||||
* [ ] Contact `@cgohlke` for Windows binaries via release ticket e.g. https://github.com/python-pillow/Pillow/issues/1174.
|
|
||||||
* [ ] Download and extract tarball from `@cgohlke` and copy into `dist/`
|
|
||||||
|
|
||||||
### Mac and Linux
|
|
||||||
* [ ] Use the [Pillow Wheel Builder](https://github.com/python-pillow/pillow-wheels):
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/python-pillow/pillow-wheels
|
|
||||||
cd pillow-wheels
|
|
||||||
./update-pillow-tag.sh [[release tag]]
|
|
||||||
```
|
|
||||||
* [ ] Download wheels from the [Pillow Wheel Builder release](https://github.com/python-pillow/pillow-wheels/releases)
|
|
||||||
and copy into `dist/`
|
|
||||||
|
|
||||||
## Publicize Release
|
## Publicize Release
|
||||||
|
|
||||||
* [ ] Announce release availability via [Twitter](https://twitter.com/pythonpillow) e.g. https://twitter.com/PythonPillow/status/1013789184354603010
|
* [ ] Announce release availability via [Mastodon](https://fosstodon.org/@pillow) e.g. https://fosstodon.org/@pillow/110639450470725321
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from PIL import PyAccess
|
from PIL import PyAccess
|
||||||
|
@ -7,51 +9,44 @@ from .helper import hopper
|
||||||
# Not running this test by default. No DOS against CI.
|
# Not running this test by default. No DOS against CI.
|
||||||
|
|
||||||
|
|
||||||
def iterate_get(size, access):
|
def iterate_get(size, access) -> None:
|
||||||
(w, h) = size
|
(w, h) = size
|
||||||
for x in range(w):
|
for x in range(w):
|
||||||
for y in range(h):
|
for y in range(h):
|
||||||
access[(x, y)]
|
access[(x, y)]
|
||||||
|
|
||||||
|
|
||||||
def iterate_set(size, access):
|
def iterate_set(size, access) -> None:
|
||||||
(w, h) = size
|
(w, h) = size
|
||||||
for x in range(w):
|
for x in range(w):
|
||||||
for y in range(h):
|
for y in range(h):
|
||||||
access[(x, y)] = (x % 256, y % 256, 0)
|
access[(x, y)] = (x % 256, y % 256, 0)
|
||||||
|
|
||||||
|
|
||||||
def timer(func, label, *args):
|
def timer(func, label, *args) -> None:
|
||||||
iterations = 5000
|
iterations = 5000
|
||||||
starttime = time.time()
|
starttime = time.time()
|
||||||
for x in range(iterations):
|
for x in range(iterations):
|
||||||
func(*args)
|
func(*args)
|
||||||
if time.time() - starttime > 10:
|
if time.time() - starttime > 10:
|
||||||
print(
|
|
||||||
"{}: breaking at {} iterations, {:.6f} per iteration".format(
|
|
||||||
label, x + 1, (time.time() - starttime) / (x + 1.0)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
break
|
break
|
||||||
if x == iterations - 1:
|
|
||||||
endtime = time.time()
|
endtime = time.time()
|
||||||
print(
|
print(
|
||||||
"{}: {:.4f} s {:.6f} per iteration".format(
|
f"{label}: completed {x + 1} iterations in {endtime - starttime:.4f}s, "
|
||||||
label, endtime - starttime, (endtime - starttime) / (x + 1.0)
|
f"{(endtime - starttime) / (x + 1.0):.6f}s per iteration"
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_direct():
|
def test_direct() -> None:
|
||||||
im = hopper()
|
im = hopper()
|
||||||
im.load()
|
im.load()
|
||||||
# im = Image.new( "RGB", (2000, 2000), (1, 3, 2))
|
# im = Image.new("RGB", (2000, 2000), (1, 3, 2))
|
||||||
caccess = im.im.pixel_access(False)
|
caccess = im.im.pixel_access(False)
|
||||||
access = PyAccess.new(im, False)
|
access = PyAccess.new(im, False)
|
||||||
|
|
||||||
assert caccess[(0, 0)] == access[(0, 0)]
|
assert caccess[(0, 0)] == access[(0, 0)]
|
||||||
|
|
||||||
print("Size: %sx%s" % im.size)
|
print(f"Size: {im.width}x{im.height}")
|
||||||
timer(iterate_get, "PyAccess - get", im.size, access)
|
timer(iterate_get, "PyAccess - get", im.size, access)
|
||||||
timer(iterate_set, "PyAccess - set", im.size, access)
|
timer(iterate_set, "PyAccess - set", im.size, access)
|
||||||
timer(iterate_get, "C-api - get", im.size, caccess)
|
timer(iterate_get, "C-api - get", im.size, caccess)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3
|
from __future__ import annotations
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
TEST_FILE = "Tests/images/fli_overflow.fli"
|
TEST_FILE = "Tests/images/fli_overflow.fli"
|
||||||
|
|
||||||
|
|
||||||
def test_fli_overflow():
|
def test_fli_overflow() -> None:
|
||||||
|
|
||||||
# this should not crash with a malloc error or access violation
|
# this should not crash with a malloc error or access violation
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# Tests potential DOS of IcnsImagePlugin with 0 length block.
|
# Tests potential DOS of IcnsImagePlugin with 0 length block.
|
||||||
# Run from anywhere that PIL is importable.
|
# Run from anywhere that PIL is importable.
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Callable
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
@ -11,31 +15,37 @@ max_iterations = 10000
|
||||||
pytestmark = pytest.mark.skipif(is_win32(), reason="requires Unix or macOS")
|
pytestmark = pytest.mark.skipif(is_win32(), reason="requires Unix or macOS")
|
||||||
|
|
||||||
|
|
||||||
def _get_mem_usage():
|
def _get_mem_usage() -> float:
|
||||||
from resource import RUSAGE_SELF, getpagesize, getrusage
|
from resource import RUSAGE_SELF, getpagesize, getrusage
|
||||||
|
|
||||||
mem = getrusage(RUSAGE_SELF).ru_maxrss
|
mem = getrusage(RUSAGE_SELF).ru_maxrss
|
||||||
return mem * getpagesize() / 1024 / 1024
|
return mem * getpagesize() / 1024 / 1024
|
||||||
|
|
||||||
|
|
||||||
def _test_leak(min_iterations, max_iterations, fn, *args, **kwargs):
|
def _test_leak(
|
||||||
|
min_iterations: int,
|
||||||
|
max_iterations: int,
|
||||||
|
fn: Callable[..., Image.Image | None],
|
||||||
|
*args: Any,
|
||||||
|
) -> None:
|
||||||
mem_limit = None
|
mem_limit = None
|
||||||
for i in range(max_iterations):
|
for i in range(max_iterations):
|
||||||
fn(*args, **kwargs)
|
fn(*args)
|
||||||
mem = _get_mem_usage()
|
mem = _get_mem_usage()
|
||||||
if i < min_iterations:
|
if i < min_iterations:
|
||||||
mem_limit = mem + 1
|
mem_limit = mem + 1
|
||||||
continue
|
continue
|
||||||
msg = f"memory usage limit exceeded after {i + 1} iterations"
|
msg = f"memory usage limit exceeded after {i + 1} iterations"
|
||||||
|
assert mem_limit is not None
|
||||||
assert mem <= mem_limit, msg
|
assert mem <= mem_limit, msg
|
||||||
|
|
||||||
|
|
||||||
def test_leak_putdata():
|
def test_leak_putdata() -> None:
|
||||||
im = Image.new("RGB", (25, 25))
|
im = Image.new("RGB", (25, 25))
|
||||||
_test_leak(min_iterations, max_iterations, im.putdata, im.getdata())
|
_test_leak(min_iterations, max_iterations, im.putdata, im.getdata())
|
||||||
|
|
||||||
|
|
||||||
def test_leak_getlist():
|
def test_leak_getlist() -> None:
|
||||||
im = Image.new("P", (25, 25))
|
im = Image.new("P", (25, 25))
|
||||||
_test_leak(
|
_test_leak(
|
||||||
min_iterations,
|
min_iterations,
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# Tests potential DOS of Jpeg2kImagePlugin with 0 length block.
|
# Tests potential DOS of Jpeg2kImagePlugin with 0 length block.
|
||||||
# Run from anywhere that PIL is importable.
|
# Run from anywhere that PIL is importable.
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
|
|
6
Tests/check_j2k_leaks.py
Executable file → Normal file
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
@ -18,7 +20,7 @@ pytestmark = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_leak_load():
|
def test_leak_load() -> None:
|
||||||
from resource import RLIMIT_AS, RLIMIT_STACK, setrlimit
|
from resource import RLIMIT_AS, RLIMIT_STACK, setrlimit
|
||||||
|
|
||||||
setrlimit(RLIMIT_STACK, (stack_size, stack_size))
|
setrlimit(RLIMIT_STACK, (stack_size, stack_size))
|
||||||
|
@ -28,7 +30,7 @@ def test_leak_load():
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
||||||
def test_leak_save():
|
def test_leak_save() -> None:
|
||||||
from resource import RLIMIT_AS, RLIMIT_STACK, setrlimit
|
from resource import RLIMIT_AS, RLIMIT_STACK, setrlimit
|
||||||
|
|
||||||
setrlimit(RLIMIT_STACK, (stack_size, stack_size))
|
setrlimit(RLIMIT_STACK, (stack_size, stack_size))
|
||||||
|
|
|
@ -1,9 +1,13 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
|
|
||||||
def test_j2k_overflow(tmp_path):
|
def test_j2k_overflow(tmp_path: Path) -> None:
|
||||||
im = Image.new("RGBA", (1024, 131584))
|
im = Image.new("RGBA", (1024, 131584))
|
||||||
target = str(tmp_path / "temp.jpc")
|
target = str(tmp_path / "temp.jpc")
|
||||||
with pytest.raises(OSError):
|
with pytest.raises(OSError):
|
||||||
|
|
4
Tests/check_jp2_overflow.py
Executable file → Normal file
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Reproductions/tests for OOB read errors in FliDecode.c
|
# Reproductions/tests for OOB read errors in FliDecode.c
|
||||||
|
|
||||||
# When run in python, all of these images should fail for
|
# When run in python, all of these images should fail for
|
||||||
|
@ -12,7 +10,7 @@
|
||||||
# the output should be empty. There may be python issues
|
# the output should be empty. There may be python issues
|
||||||
# in the valgrind especially if run in a debug python
|
# in the valgrind especially if run in a debug python
|
||||||
# version.
|
# version.
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
@ -75,49 +77,48 @@ post-patch:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def test_qtables_leak():
|
standard_l_qtable = (
|
||||||
|
# fmt: off
|
||||||
|
16, 11, 10, 16, 24, 40, 51, 61,
|
||||||
|
12, 12, 14, 19, 26, 58, 60, 55,
|
||||||
|
14, 13, 16, 24, 40, 57, 69, 56,
|
||||||
|
14, 17, 22, 29, 51, 87, 80, 62,
|
||||||
|
18, 22, 37, 56, 68, 109, 103, 77,
|
||||||
|
24, 35, 55, 64, 81, 104, 113, 92,
|
||||||
|
49, 64, 78, 87, 103, 121, 120, 101,
|
||||||
|
72, 92, 95, 98, 112, 100, 103, 99,
|
||||||
|
# fmt: on
|
||||||
|
)
|
||||||
|
|
||||||
|
standard_chrominance_qtable = (
|
||||||
|
# fmt: off
|
||||||
|
17, 18, 24, 47, 99, 99, 99, 99,
|
||||||
|
18, 21, 26, 66, 99, 99, 99, 99,
|
||||||
|
24, 26, 56, 99, 99, 99, 99, 99,
|
||||||
|
47, 66, 99, 99, 99, 99, 99, 99,
|
||||||
|
99, 99, 99, 99, 99, 99, 99, 99,
|
||||||
|
99, 99, 99, 99, 99, 99, 99, 99,
|
||||||
|
99, 99, 99, 99, 99, 99, 99, 99,
|
||||||
|
99, 99, 99, 99, 99, 99, 99, 99,
|
||||||
|
# fmt: on
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"qtables",
|
||||||
|
(
|
||||||
|
(standard_l_qtable, standard_chrominance_qtable),
|
||||||
|
[standard_l_qtable, standard_chrominance_qtable],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_qtables_leak(qtables: tuple[tuple[int, ...]] | list[tuple[int, ...]]) -> None:
|
||||||
im = hopper("RGB")
|
im = hopper("RGB")
|
||||||
|
|
||||||
standard_l_qtable = [
|
|
||||||
int(s)
|
|
||||||
for s in """
|
|
||||||
16 11 10 16 24 40 51 61
|
|
||||||
12 12 14 19 26 58 60 55
|
|
||||||
14 13 16 24 40 57 69 56
|
|
||||||
14 17 22 29 51 87 80 62
|
|
||||||
18 22 37 56 68 109 103 77
|
|
||||||
24 35 55 64 81 104 113 92
|
|
||||||
49 64 78 87 103 121 120 101
|
|
||||||
72 92 95 98 112 100 103 99
|
|
||||||
""".split(
|
|
||||||
None
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
standard_chrominance_qtable = [
|
|
||||||
int(s)
|
|
||||||
for s in """
|
|
||||||
17 18 24 47 99 99 99 99
|
|
||||||
18 21 26 66 99 99 99 99
|
|
||||||
24 26 56 99 99 99 99 99
|
|
||||||
47 66 99 99 99 99 99 99
|
|
||||||
99 99 99 99 99 99 99 99
|
|
||||||
99 99 99 99 99 99 99 99
|
|
||||||
99 99 99 99 99 99 99 99
|
|
||||||
99 99 99 99 99 99 99 99
|
|
||||||
""".split(
|
|
||||||
None
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
qtables = [standard_l_qtable, standard_chrominance_qtable]
|
|
||||||
|
|
||||||
for _ in range(iterations):
|
for _ in range(iterations):
|
||||||
test_output = BytesIO()
|
test_output = BytesIO()
|
||||||
im.save(test_output, "JPEG", qtables=qtables)
|
im.save(test_output, "JPEG", qtables=qtables)
|
||||||
|
|
||||||
|
|
||||||
def test_exif_leak():
|
def test_exif_leak() -> None:
|
||||||
"""
|
"""
|
||||||
pre patch:
|
pre patch:
|
||||||
|
|
||||||
|
@ -180,7 +181,7 @@ def test_exif_leak():
|
||||||
im.save(test_output, "JPEG", exif=exif)
|
im.save(test_output, "JPEG", exif=exif)
|
||||||
|
|
||||||
|
|
||||||
def test_base_save():
|
def test_base_save() -> None:
|
||||||
"""
|
"""
|
||||||
base case:
|
base case:
|
||||||
MB
|
MB
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from types import ModuleType
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -14,6 +18,7 @@ from PIL import Image
|
||||||
# 2.7 and 3.2.
|
# 2.7 and 3.2.
|
||||||
|
|
||||||
|
|
||||||
|
numpy: ModuleType | None
|
||||||
try:
|
try:
|
||||||
import numpy
|
import numpy
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -26,23 +31,24 @@ XDIM = 48000
|
||||||
pytestmark = pytest.mark.skipif(sys.maxsize <= 2**32, reason="requires 64-bit system")
|
pytestmark = pytest.mark.skipif(sys.maxsize <= 2**32, reason="requires 64-bit system")
|
||||||
|
|
||||||
|
|
||||||
def _write_png(tmp_path, xdim, ydim):
|
def _write_png(tmp_path: Path, xdim: int, ydim: int) -> None:
|
||||||
f = str(tmp_path / "temp.png")
|
f = str(tmp_path / "temp.png")
|
||||||
im = Image.new("L", (xdim, ydim), 0)
|
im = Image.new("L", (xdim, ydim), 0)
|
||||||
im.save(f)
|
im.save(f)
|
||||||
|
|
||||||
|
|
||||||
def test_large(tmp_path):
|
def test_large(tmp_path: Path) -> None:
|
||||||
"""succeeded prepatch"""
|
"""succeeded prepatch"""
|
||||||
_write_png(tmp_path, XDIM, YDIM)
|
_write_png(tmp_path, XDIM, YDIM)
|
||||||
|
|
||||||
|
|
||||||
def test_2gpx(tmp_path):
|
def test_2gpx(tmp_path: Path) -> None:
|
||||||
"""failed prepatch"""
|
"""failed prepatch"""
|
||||||
_write_png(tmp_path, XDIM, XDIM)
|
_write_png(tmp_path, XDIM, XDIM)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(numpy is None, reason="Numpy is not installed")
|
@pytest.mark.skipif(numpy is None, reason="Numpy is not installed")
|
||||||
def test_size_greater_than_int():
|
def test_size_greater_than_int() -> None:
|
||||||
|
assert numpy is not None
|
||||||
arr = numpy.ndarray(shape=(16394, 16394))
|
arr = numpy.ndarray(shape=(16394, 16394))
|
||||||
Image.fromarray(arr)
|
Image.fromarray(arr)
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -22,7 +25,7 @@ XDIM = 48000
|
||||||
pytestmark = pytest.mark.skipif(sys.maxsize <= 2**32, reason="requires 64-bit system")
|
pytestmark = pytest.mark.skipif(sys.maxsize <= 2**32, reason="requires 64-bit system")
|
||||||
|
|
||||||
|
|
||||||
def _write_png(tmp_path, xdim, ydim):
|
def _write_png(tmp_path: Path, xdim: int, ydim: int) -> None:
|
||||||
dtype = np.uint8
|
dtype = np.uint8
|
||||||
a = np.zeros((xdim, ydim), dtype=dtype)
|
a = np.zeros((xdim, ydim), dtype=dtype)
|
||||||
f = str(tmp_path / "temp.png")
|
f = str(tmp_path / "temp.png")
|
||||||
|
@ -30,11 +33,11 @@ def _write_png(tmp_path, xdim, ydim):
|
||||||
im.save(f)
|
im.save(f)
|
||||||
|
|
||||||
|
|
||||||
def test_large(tmp_path):
|
def test_large(tmp_path: Path) -> None:
|
||||||
"""succeeded prepatch"""
|
"""succeeded prepatch"""
|
||||||
_write_png(tmp_path, XDIM, YDIM)
|
_write_png(tmp_path, XDIM, YDIM)
|
||||||
|
|
||||||
|
|
||||||
def test_2gpx(tmp_path):
|
def test_2gpx(tmp_path: Path) -> None:
|
||||||
"""failed prepatch"""
|
"""failed prepatch"""
|
||||||
_write_png(tmp_path, XDIM, XDIM)
|
_write_png(tmp_path, XDIM, XDIM)
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
@ -5,7 +7,7 @@ from PIL import Image
|
||||||
TEST_FILE = "Tests/images/libtiff_segfault.tif"
|
TEST_FILE = "Tests/images/libtiff_segfault.tif"
|
||||||
|
|
||||||
|
|
||||||
def test_libtiff_segfault():
|
def test_libtiff_segfault() -> None:
|
||||||
"""This test should not segfault. It will on Pillow <= 3.1.0 and
|
"""This test should not segfault. It will on Pillow <= 3.1.0 and
|
||||||
libtiff >= 4.0.0
|
libtiff >= 4.0.0
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import zlib
|
import zlib
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
|
@ -6,7 +8,7 @@ from PIL import Image, ImageFile, PngImagePlugin
|
||||||
TEST_FILE = "Tests/images/png_decompression_dos.png"
|
TEST_FILE = "Tests/images/png_decompression_dos.png"
|
||||||
|
|
||||||
|
|
||||||
def test_ignore_dos_text():
|
def test_ignore_dos_text() -> None:
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -15,6 +17,7 @@ def test_ignore_dos_text():
|
||||||
finally:
|
finally:
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
||||||
|
|
||||||
|
assert isinstance(im, PngImagePlugin.PngImageFile)
|
||||||
for s in im.text.values():
|
for s in im.text.values():
|
||||||
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
|
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
|
||||||
|
|
||||||
|
@ -22,8 +25,7 @@ def test_ignore_dos_text():
|
||||||
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
|
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
|
||||||
|
|
||||||
|
|
||||||
def test_dos_text():
|
def test_dos_text() -> None:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
im = Image.open(TEST_FILE)
|
im = Image.open(TEST_FILE)
|
||||||
im.load()
|
im.load()
|
||||||
|
@ -31,11 +33,12 @@ def test_dos_text():
|
||||||
assert msg, "Decompressed Data Too Large"
|
assert msg, "Decompressed Data Too Large"
|
||||||
return
|
return
|
||||||
|
|
||||||
|
assert isinstance(im, PngImagePlugin.PngImageFile)
|
||||||
for s in im.text.values():
|
for s in im.text.values():
|
||||||
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
|
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
|
||||||
|
|
||||||
|
|
||||||
def test_dos_total_memory():
|
def test_dos_total_memory() -> None:
|
||||||
im = Image.new("L", (1, 1))
|
im = Image.new("L", (1, 1))
|
||||||
compressed_data = zlib.compress(b"a" * 1024 * 1023)
|
compressed_data = zlib.compress(b"a" * 1024 * 1023)
|
||||||
|
|
||||||
|
@ -52,10 +55,11 @@ def test_dos_total_memory():
|
||||||
try:
|
try:
|
||||||
im2 = Image.open(b)
|
im2 = Image.open(b)
|
||||||
except ValueError as msg:
|
except ValueError as msg:
|
||||||
assert "Too much memory" in msg
|
assert "Too much memory" in str(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
total_len = 0
|
total_len = 0
|
||||||
|
assert isinstance(im2, PngImagePlugin.PngImageFile)
|
||||||
for txt in im2.text.values():
|
for txt in im2.text.values():
|
||||||
total_len += len(txt)
|
total_len += len(txt)
|
||||||
assert total_len < 64 * 1024 * 1024, "Total text chunks greater than 64M"
|
assert total_len < 64 * 1024 * 1024, "Total text chunks greater than 64M"
|
||||||
|
|
8
Tests/check_release_notes.py
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
for rst in Path("docs/releasenotes").glob("[1-9]*.rst"):
|
||||||
|
if "TODO" in open(rst).read():
|
||||||
|
sys.exit(f"Error: remove TODO from {rst}")
|
43
Tests/check_wheel.py
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from PIL import features
|
||||||
|
|
||||||
|
|
||||||
|
def test_wheel_modules() -> None:
|
||||||
|
expected_modules = {"pil", "tkinter", "freetype2", "littlecms2", "webp"}
|
||||||
|
|
||||||
|
# tkinter is not available in cibuildwheel installed CPython on Windows
|
||||||
|
try:
|
||||||
|
import tkinter
|
||||||
|
|
||||||
|
assert tkinter
|
||||||
|
except ImportError:
|
||||||
|
expected_modules.remove("tkinter")
|
||||||
|
|
||||||
|
assert set(features.get_supported_modules()) == expected_modules
|
||||||
|
|
||||||
|
|
||||||
|
def test_wheel_codecs() -> None:
|
||||||
|
expected_codecs = {"jpg", "jpg_2000", "zlib", "libtiff"}
|
||||||
|
|
||||||
|
assert set(features.get_supported_codecs()) == expected_codecs
|
||||||
|
|
||||||
|
|
||||||
|
def test_wheel_features() -> None:
|
||||||
|
expected_features = {
|
||||||
|
"webp_anim",
|
||||||
|
"webp_mux",
|
||||||
|
"transp_webp",
|
||||||
|
"raqm",
|
||||||
|
"fribidi",
|
||||||
|
"harfbuzz",
|
||||||
|
"libjpeg_turbo",
|
||||||
|
"xcb",
|
||||||
|
}
|
||||||
|
|
||||||
|
if sys.platform == "win32":
|
||||||
|
expected_features.remove("xcb")
|
||||||
|
|
||||||
|
assert set(features.get_supported_features()) == expected_features
|
|
@ -1,7 +1,11 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import io
|
import io
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
def pytest_report_header(config):
|
|
||||||
|
def pytest_report_header(config: pytest.Config) -> str:
|
||||||
try:
|
try:
|
||||||
from PIL import features
|
from PIL import features
|
||||||
|
|
||||||
|
@ -12,7 +16,7 @@ def pytest_report_header(config):
|
||||||
return f"pytest_report_header failed: {e}"
|
return f"pytest_report_header failed: {e}"
|
||||||
|
|
||||||
|
|
||||||
def pytest_configure(config):
|
def pytest_configure(config: pytest.Config) -> None:
|
||||||
config.addinivalue_line(
|
config.addinivalue_line(
|
||||||
"markers",
|
"markers",
|
||||||
"pil_noop_mark: A conditional mark where nothing special happens",
|
"pil_noop_mark: A conditional mark where nothing special happens",
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
BIN
Tests/fonts/CBDTTestFont.ttf
Normal file
BIN
Tests/fonts/EBDTTestFont.ttf
Normal file
|
@ -2,12 +2,12 @@
|
||||||
NotoNastaliqUrdu-Regular.ttf and NotoSansSymbols-Regular.ttf, from https://github.com/googlei18n/noto-fonts
|
NotoNastaliqUrdu-Regular.ttf and NotoSansSymbols-Regular.ttf, from https://github.com/googlei18n/noto-fonts
|
||||||
NotoSans-Regular.ttf, from https://www.google.com/get/noto/
|
NotoSans-Regular.ttf, from https://www.google.com/get/noto/
|
||||||
NotoSansJP-Thin.otf, from https://www.google.com/get/noto/help/cjk/
|
NotoSansJP-Thin.otf, from https://www.google.com/get/noto/help/cjk/
|
||||||
NotoColorEmoji.ttf, from https://github.com/googlefonts/noto-emoji
|
|
||||||
AdobeVFPrototype.ttf, from https://github.com/adobe-fonts/adobe-variable-font-prototype
|
AdobeVFPrototype.ttf, from https://github.com/adobe-fonts/adobe-variable-font-prototype
|
||||||
TINY5x3GX.ttf, from http://velvetyne.fr/fonts/tiny
|
TINY5x3GX.ttf, from http://velvetyne.fr/fonts/tiny
|
||||||
ArefRuqaa-Regular.ttf, from https://github.com/google/fonts/tree/master/ofl/arefruqaa
|
ArefRuqaa-Regular.ttf, from https://github.com/google/fonts/tree/master/ofl/arefruqaa
|
||||||
ter-x20b.pcf, from http://terminus-font.sourceforge.net/
|
ter-x20b.pcf, from http://terminus-font.sourceforge.net/
|
||||||
BungeeColor-Regular_colr_Windows.ttf, from https://github.com/djrrb/bungee
|
BungeeColor-Regular_colr_Windows.ttf, from https://github.com/djrrb/bungee
|
||||||
|
OpenSans.woff2, from https://fonts.googleapis.com/css?family=Open+Sans
|
||||||
|
|
||||||
All of the above fonts are published under the SIL Open Font License (OFL) v1.1 (http://scripts.sil.org/cms/scripts/page.php?site_id=nrsi&id=OFL), which allows you to copy, modify, and redistribute them if you need to.
|
All of the above fonts are published under the SIL Open Font License (OFL) v1.1 (http://scripts.sil.org/cms/scripts/page.php?site_id=nrsi&id=OFL), which allows you to copy, modify, and redistribute them if you need to.
|
||||||
|
|
||||||
|
@ -24,3 +24,5 @@ FreeMono.ttf is licensed under GPLv3.
|
||||||
10x20-ISO8859-1.pcf, from https://packages.ubuntu.com/xenial/xfonts-base
|
10x20-ISO8859-1.pcf, from https://packages.ubuntu.com/xenial/xfonts-base
|
||||||
|
|
||||||
"Public domain font. Share and enjoy."
|
"Public domain font. Share and enjoy."
|
||||||
|
|
||||||
|
CBDTTestFont.ttf and EBDTTestFont.ttf from https://github.com/nulano/font-tests are public domain.
|
||||||
|
|
BIN
Tests/fonts/OpenSans.woff2
Normal file
10
Tests/fonts/fuzz_font-5203009437302784
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
STARTFONT
|
||||||
|
FONT ÿ
|
||||||
|
SIZE 10
|
||||||
|
FONTBOUNDINGBOX
|
||||||
|
CHARS
|
||||||
|
STARTCHAR
|
||||||
|
ENCODING
|
||||||
|
BBX 2 5
|
||||||
|
ENDCHAR
|
||||||
|
ENDFONT
|
BIN
Tests/fonts/oom-4da0210eb7081b0bf15bf16cc4c52ce02c1e1bbc.ttf
Normal file
214
Tests/helper.py
|
@ -2,13 +2,18 @@
|
||||||
Helper functions.
|
Helper functions.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from functools import lru_cache
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
from typing import Any, Callable, Sequence
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from packaging.version import parse as parse_version
|
from packaging.version import parse as parse_version
|
||||||
|
@ -17,42 +22,31 @@ from PIL import Image, ImageMath, features
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
uploader = None
|
||||||
|
if os.environ.get("SHOW_ERRORS"):
|
||||||
|
uploader = "show"
|
||||||
|
elif "GITHUB_ACTIONS" in os.environ:
|
||||||
|
uploader = "github_actions"
|
||||||
|
|
||||||
HAS_UPLOADER = False
|
|
||||||
|
|
||||||
if os.environ.get("SHOW_ERRORS", None):
|
def upload(a: Image.Image, b: Image.Image) -> str | None:
|
||||||
|
if uploader == "show":
|
||||||
# local img.show for errors.
|
# local img.show for errors.
|
||||||
HAS_UPLOADER = True
|
|
||||||
|
|
||||||
class test_image_results:
|
|
||||||
@staticmethod
|
|
||||||
def upload(a, b):
|
|
||||||
a.show()
|
a.show()
|
||||||
b.show()
|
b.show()
|
||||||
|
elif uploader == "github_actions":
|
||||||
elif "GITHUB_ACTIONS" in os.environ:
|
|
||||||
HAS_UPLOADER = True
|
|
||||||
|
|
||||||
class test_image_results:
|
|
||||||
@staticmethod
|
|
||||||
def upload(a, b):
|
|
||||||
dir_errors = os.path.join(os.path.dirname(__file__), "errors")
|
dir_errors = os.path.join(os.path.dirname(__file__), "errors")
|
||||||
os.makedirs(dir_errors, exist_ok=True)
|
os.makedirs(dir_errors, exist_ok=True)
|
||||||
tmpdir = tempfile.mkdtemp(dir=dir_errors)
|
tmpdir = tempfile.mkdtemp(dir=dir_errors)
|
||||||
a.save(os.path.join(tmpdir, "a.png"))
|
a.save(os.path.join(tmpdir, "a.png"))
|
||||||
b.save(os.path.join(tmpdir, "b.png"))
|
b.save(os.path.join(tmpdir, "b.png"))
|
||||||
return tmpdir
|
return tmpdir
|
||||||
|
return None
|
||||||
else:
|
|
||||||
try:
|
|
||||||
import test_image_results
|
|
||||||
|
|
||||||
HAS_UPLOADER = True
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_comparable(a, b):
|
def convert_to_comparable(
|
||||||
|
a: Image.Image, b: Image.Image
|
||||||
|
) -> tuple[Image.Image, Image.Image]:
|
||||||
new_a, new_b = a, b
|
new_a, new_b = a, b
|
||||||
if a.mode == "P":
|
if a.mode == "P":
|
||||||
new_a = Image.new("L", a.size)
|
new_a = Image.new("L", a.size)
|
||||||
|
@ -65,14 +59,18 @@ def convert_to_comparable(a, b):
|
||||||
return new_a, new_b
|
return new_a, new_b
|
||||||
|
|
||||||
|
|
||||||
def assert_deep_equal(a, b, msg=None):
|
def assert_deep_equal(
|
||||||
|
a: Sequence[Any], b: Sequence[Any], msg: str | None = None
|
||||||
|
) -> None:
|
||||||
try:
|
try:
|
||||||
assert len(a) == len(b), msg or f"got length {len(a)}, expected {len(b)}"
|
assert len(a) == len(b), msg or f"got length {len(a)}, expected {len(b)}"
|
||||||
except Exception:
|
except Exception:
|
||||||
assert a == b, msg
|
assert a == b, msg
|
||||||
|
|
||||||
|
|
||||||
def assert_image(im, mode, size, msg=None):
|
def assert_image(
|
||||||
|
im: Image.Image, mode: str, size: tuple[int, int], msg: str | None = None
|
||||||
|
) -> None:
|
||||||
if mode is not None:
|
if mode is not None:
|
||||||
assert im.mode == mode, (
|
assert im.mode == mode, (
|
||||||
msg or f"got mode {repr(im.mode)}, expected {repr(mode)}"
|
msg or f"got mode {repr(im.mode)}, expected {repr(mode)}"
|
||||||
|
@ -84,28 +82,32 @@ def assert_image(im, mode, size, msg=None):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def assert_image_equal(a, b, msg=None):
|
def assert_image_equal(a: Image.Image, b: Image.Image, msg: str | None = None) -> None:
|
||||||
assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}"
|
assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}"
|
||||||
assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}"
|
assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}"
|
||||||
if a.tobytes() != b.tobytes():
|
if a.tobytes() != b.tobytes():
|
||||||
if HAS_UPLOADER:
|
|
||||||
try:
|
try:
|
||||||
url = test_image_results.upload(a, b)
|
url = upload(a, b)
|
||||||
logger.error(f"Url for test images: {url}")
|
if url:
|
||||||
|
logger.error("URL for test images: %s", url)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
assert False, msg or "got different content"
|
pytest.fail(msg or "got different content")
|
||||||
|
|
||||||
|
|
||||||
def assert_image_equal_tofile(a, filename, msg=None, mode=None):
|
def assert_image_equal_tofile(
|
||||||
|
a: Image.Image, filename: str, msg: str | None = None, mode: str | None = None
|
||||||
|
) -> None:
|
||||||
with Image.open(filename) as img:
|
with Image.open(filename) as img:
|
||||||
if mode:
|
if mode:
|
||||||
img = img.convert(mode)
|
img = img.convert(mode)
|
||||||
assert_image_equal(a, img, msg)
|
assert_image_equal(a, img, msg)
|
||||||
|
|
||||||
|
|
||||||
def assert_image_similar(a, b, epsilon, msg=None):
|
def assert_image_similar(
|
||||||
|
a: Image.Image, b: Image.Image, epsilon: float, msg: str | None = None
|
||||||
|
) -> None:
|
||||||
assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}"
|
assert a.mode == b.mode, msg or f"got mode {repr(a.mode)}, expected {repr(b.mode)}"
|
||||||
assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}"
|
assert a.size == b.size, msg or f"got size {repr(a.size)}, expected {repr(b.size)}"
|
||||||
|
|
||||||
|
@ -113,7 +115,9 @@ def assert_image_similar(a, b, epsilon, msg=None):
|
||||||
|
|
||||||
diff = 0
|
diff = 0
|
||||||
for ach, bch in zip(a.split(), b.split()):
|
for ach, bch in zip(a.split(), b.split()):
|
||||||
chdiff = ImageMath.eval("abs(a - b)", a=ach, b=bch).convert("L")
|
chdiff = ImageMath.lambda_eval(
|
||||||
|
lambda args: abs(args["a"] - args["b"]), a=ach, b=bch
|
||||||
|
).convert("L")
|
||||||
diff += sum(i * num for i, num in enumerate(chdiff.histogram()))
|
diff += sum(i * num for i, num in enumerate(chdiff.histogram()))
|
||||||
|
|
||||||
ave_diff = diff / (a.size[0] * a.size[1])
|
ave_diff = diff / (a.size[0] * a.size[1])
|
||||||
|
@ -123,55 +127,68 @@ def assert_image_similar(a, b, epsilon, msg=None):
|
||||||
+ f" average pixel value difference {ave_diff:.4f} > epsilon {epsilon:.4f}"
|
+ f" average pixel value difference {ave_diff:.4f} > epsilon {epsilon:.4f}"
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if HAS_UPLOADER:
|
|
||||||
try:
|
try:
|
||||||
url = test_image_results.upload(a, b)
|
url = upload(a, b)
|
||||||
logger.error(f"Url for test images: {url}")
|
if url:
|
||||||
|
logger.exception("URL for test images: %s", url)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
|
||||||
def assert_image_similar_tofile(a, filename, epsilon, msg=None, mode=None):
|
def assert_image_similar_tofile(
|
||||||
|
a: Image.Image,
|
||||||
|
filename: str,
|
||||||
|
epsilon: float,
|
||||||
|
msg: str | None = None,
|
||||||
|
mode: str | None = None,
|
||||||
|
) -> None:
|
||||||
with Image.open(filename) as img:
|
with Image.open(filename) as img:
|
||||||
if mode:
|
if mode:
|
||||||
img = img.convert(mode)
|
img = img.convert(mode)
|
||||||
assert_image_similar(a, img, epsilon, msg)
|
assert_image_similar(a, img, epsilon, msg)
|
||||||
|
|
||||||
|
|
||||||
def assert_all_same(items, msg=None):
|
def assert_all_same(items: Sequence[Any], msg: str | None = None) -> None:
|
||||||
assert items.count(items[0]) == len(items), msg
|
assert items.count(items[0]) == len(items), msg
|
||||||
|
|
||||||
|
|
||||||
def assert_not_all_same(items, msg=None):
|
def assert_not_all_same(items: Sequence[Any], msg: str | None = None) -> None:
|
||||||
assert items.count(items[0]) != len(items), msg
|
assert items.count(items[0]) != len(items), msg
|
||||||
|
|
||||||
|
|
||||||
def assert_tuple_approx_equal(actuals, targets, threshold, msg):
|
def assert_tuple_approx_equal(
|
||||||
|
actuals: Sequence[int], targets: tuple[int, ...], threshold: int, msg: str
|
||||||
|
) -> None:
|
||||||
"""Tests if actuals has values within threshold from targets"""
|
"""Tests if actuals has values within threshold from targets"""
|
||||||
value = True
|
|
||||||
for i, target in enumerate(targets):
|
for i, target in enumerate(targets):
|
||||||
value *= target - threshold <= actuals[i] <= target + threshold
|
if not (target - threshold <= actuals[i] <= target + threshold):
|
||||||
|
pytest.fail(msg + ": " + repr(actuals) + " != " + repr(targets))
|
||||||
assert value, msg + ": " + repr(actuals) + " != " + repr(targets)
|
|
||||||
|
|
||||||
|
|
||||||
def skip_unless_feature(feature):
|
def skip_unless_feature(feature: str) -> pytest.MarkDecorator:
|
||||||
reason = f"{feature} not available"
|
reason = f"{feature} not available"
|
||||||
return pytest.mark.skipif(not features.check(feature), reason=reason)
|
return pytest.mark.skipif(not features.check(feature), reason=reason)
|
||||||
|
|
||||||
|
|
||||||
def skip_unless_feature_version(feature, version_required, reason=None):
|
def skip_unless_feature_version(
|
||||||
|
feature: str, required: str, reason: str | None = None
|
||||||
|
) -> pytest.MarkDecorator:
|
||||||
if not features.check(feature):
|
if not features.check(feature):
|
||||||
return pytest.mark.skip(f"{feature} not available")
|
return pytest.mark.skip(f"{feature} not available")
|
||||||
if reason is None:
|
if reason is None:
|
||||||
reason = f"{feature} is older than {version_required}"
|
reason = f"{feature} is older than {required}"
|
||||||
version_required = parse_version(version_required)
|
version_required = parse_version(required)
|
||||||
version_available = parse_version(features.version(feature))
|
version_available = parse_version(features.version(feature))
|
||||||
return pytest.mark.skipif(version_available < version_required, reason=reason)
|
return pytest.mark.skipif(version_available < version_required, reason=reason)
|
||||||
|
|
||||||
|
|
||||||
def mark_if_feature_version(mark, feature, version_blacklist, reason=None):
|
def mark_if_feature_version(
|
||||||
|
mark: pytest.MarkDecorator,
|
||||||
|
feature: str,
|
||||||
|
version_blacklist: str,
|
||||||
|
reason: str | None = None,
|
||||||
|
) -> pytest.MarkDecorator:
|
||||||
if not features.check(feature):
|
if not features.check(feature):
|
||||||
return pytest.mark.pil_noop_mark()
|
return pytest.mark.pil_noop_mark()
|
||||||
if reason is None:
|
if reason is None:
|
||||||
|
@ -192,7 +209,7 @@ class PillowLeakTestCase:
|
||||||
iterations = 100 # count
|
iterations = 100 # count
|
||||||
mem_limit = 512 # k
|
mem_limit = 512 # k
|
||||||
|
|
||||||
def _get_mem_usage(self):
|
def _get_mem_usage(self) -> float:
|
||||||
"""
|
"""
|
||||||
Gets the RUSAGE memory usage, returns in K. Encapsulates the difference
|
Gets the RUSAGE memory usage, returns in K. Encapsulates the difference
|
||||||
between macOS and Linux rss reporting
|
between macOS and Linux rss reporting
|
||||||
|
@ -208,14 +225,13 @@ class PillowLeakTestCase:
|
||||||
# ru_maxrss
|
# ru_maxrss
|
||||||
# This is the maximum resident set size utilized (in bytes).
|
# This is the maximum resident set size utilized (in bytes).
|
||||||
return mem / 1024 # Kb
|
return mem / 1024 # Kb
|
||||||
else:
|
|
||||||
# linux
|
# linux
|
||||||
# man 2 getrusage
|
# man 2 getrusage
|
||||||
# ru_maxrss (since Linux 2.6.32)
|
# ru_maxrss (since Linux 2.6.32)
|
||||||
# This is the maximum resident set size used (in kilobytes).
|
# This is the maximum resident set size used (in kilobytes).
|
||||||
return mem # Kb
|
return mem # Kb
|
||||||
|
|
||||||
def _test_leak(self, core):
|
def _test_leak(self, core: Callable[[], None]) -> None:
|
||||||
start_mem = self._get_mem_usage()
|
start_mem = self._get_mem_usage()
|
||||||
for cycle in range(self.iterations):
|
for cycle in range(self.iterations):
|
||||||
core()
|
core()
|
||||||
|
@ -227,52 +243,77 @@ class PillowLeakTestCase:
|
||||||
# helpers
|
# helpers
|
||||||
|
|
||||||
|
|
||||||
def fromstring(data):
|
def fromstring(data: bytes) -> Image.Image:
|
||||||
return Image.open(BytesIO(data))
|
return Image.open(BytesIO(data))
|
||||||
|
|
||||||
|
|
||||||
def tostring(im, string_format, **options):
|
def tostring(im: Image.Image, string_format: str, **options: Any) -> bytes:
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
im.save(out, string_format, **options)
|
im.save(out, string_format, **options)
|
||||||
return out.getvalue()
|
return out.getvalue()
|
||||||
|
|
||||||
|
|
||||||
def hopper(mode=None, cache={}):
|
def hopper(mode: str | None = None) -> Image.Image:
|
||||||
|
# Use caching to reduce reading from disk, but return a copy
|
||||||
|
# so that the cached image isn't modified by the tests
|
||||||
|
# (for fast, isolated, repeatable tests).
|
||||||
|
|
||||||
if mode is None:
|
if mode is None:
|
||||||
# Always return fresh not-yet-loaded version of image.
|
# Always return fresh not-yet-loaded version of image.
|
||||||
# Operations on not-yet-loaded images is separate class of errors
|
# Operations on not-yet-loaded images are a separate class of errors
|
||||||
# what we should catch.
|
# that we should catch.
|
||||||
return Image.open("Tests/images/hopper.ppm")
|
return Image.open("Tests/images/hopper.ppm")
|
||||||
# Use caching to reduce reading from disk but so an original copy is
|
|
||||||
# returned each time and the cached image isn't modified by tests
|
return _cached_hopper(mode).copy()
|
||||||
# (for fast, isolated, repeatable tests).
|
|
||||||
im = cache.get(mode)
|
|
||||||
if im is None:
|
@lru_cache
|
||||||
|
def _cached_hopper(mode: str) -> Image.Image:
|
||||||
if mode == "F":
|
if mode == "F":
|
||||||
im = hopper("L").convert(mode)
|
im = hopper("L")
|
||||||
elif mode[:4] == "I;16":
|
|
||||||
im = hopper("I").convert(mode)
|
|
||||||
else:
|
else:
|
||||||
im = hopper().convert(mode)
|
im = hopper()
|
||||||
cache[mode] = im
|
if mode.startswith("BGR;"):
|
||||||
return im.copy()
|
with pytest.warns(DeprecationWarning):
|
||||||
|
im = im.convert(mode)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
im = im.convert(mode)
|
||||||
|
except ImportError:
|
||||||
|
if mode == "LAB":
|
||||||
|
im = Image.open("Tests/images/hopper.Lab.tif")
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
return im
|
||||||
|
|
||||||
|
|
||||||
def djpeg_available():
|
def djpeg_available() -> bool:
|
||||||
return bool(shutil.which("djpeg"))
|
if shutil.which("djpeg"):
|
||||||
|
try:
|
||||||
|
subprocess.check_call(["djpeg", "-version"])
|
||||||
|
return True
|
||||||
|
except subprocess.CalledProcessError: # pragma: no cover
|
||||||
|
return False
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def cjpeg_available():
|
def cjpeg_available() -> bool:
|
||||||
return bool(shutil.which("cjpeg"))
|
if shutil.which("cjpeg"):
|
||||||
|
try:
|
||||||
|
subprocess.check_call(["cjpeg", "-version"])
|
||||||
|
return True
|
||||||
|
except subprocess.CalledProcessError: # pragma: no cover
|
||||||
|
return False
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def netpbm_available():
|
def netpbm_available() -> bool:
|
||||||
return bool(shutil.which("ppmquant") and shutil.which("ppmtogif"))
|
return bool(shutil.which("ppmquant") and shutil.which("ppmtogif"))
|
||||||
|
|
||||||
|
|
||||||
def magick_command():
|
def magick_command() -> list[str] | None:
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
magickhome = os.environ.get("MAGICK_HOME", "")
|
magickhome = os.environ.get("MAGICK_HOME")
|
||||||
if magickhome:
|
if magickhome:
|
||||||
imagemagick = [os.path.join(magickhome, "convert.exe")]
|
imagemagick = [os.path.join(magickhome, "convert.exe")]
|
||||||
graphicsmagick = [os.path.join(magickhome, "gm.exe"), "convert"]
|
graphicsmagick = [os.path.join(magickhome, "gm.exe"), "convert"]
|
||||||
|
@ -285,49 +326,50 @@ def magick_command():
|
||||||
|
|
||||||
if imagemagick and shutil.which(imagemagick[0]):
|
if imagemagick and shutil.which(imagemagick[0]):
|
||||||
return imagemagick
|
return imagemagick
|
||||||
elif graphicsmagick and shutil.which(graphicsmagick[0]):
|
if graphicsmagick and shutil.which(graphicsmagick[0]):
|
||||||
return graphicsmagick
|
return graphicsmagick
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def on_appveyor():
|
def on_appveyor() -> bool:
|
||||||
return "APPVEYOR" in os.environ
|
return "APPVEYOR" in os.environ
|
||||||
|
|
||||||
|
|
||||||
def on_github_actions():
|
def on_github_actions() -> bool:
|
||||||
return "GITHUB_ACTIONS" in os.environ
|
return "GITHUB_ACTIONS" in os.environ
|
||||||
|
|
||||||
|
|
||||||
def on_ci():
|
def on_ci() -> bool:
|
||||||
# GitHub Actions and AppVeyor have "CI"
|
# GitHub Actions and AppVeyor have "CI"
|
||||||
return "CI" in os.environ
|
return "CI" in os.environ
|
||||||
|
|
||||||
|
|
||||||
def is_big_endian():
|
def is_big_endian() -> bool:
|
||||||
return sys.byteorder == "big"
|
return sys.byteorder == "big"
|
||||||
|
|
||||||
|
|
||||||
def is_ppc64le():
|
def is_ppc64le() -> bool:
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
return platform.machine() == "ppc64le"
|
return platform.machine() == "ppc64le"
|
||||||
|
|
||||||
|
|
||||||
def is_win32():
|
def is_win32() -> bool:
|
||||||
return sys.platform.startswith("win32")
|
return sys.platform.startswith("win32")
|
||||||
|
|
||||||
|
|
||||||
def is_pypy():
|
def is_pypy() -> bool:
|
||||||
return hasattr(sys, "pypy_translation_info")
|
return hasattr(sys, "pypy_translation_info")
|
||||||
|
|
||||||
|
|
||||||
def is_mingw():
|
def is_mingw() -> bool:
|
||||||
return sysconfig.get_platform() == "mingw"
|
return sysconfig.get_platform() == "mingw"
|
||||||
|
|
||||||
|
|
||||||
class CachedProperty:
|
class CachedProperty:
|
||||||
def __init__(self, func):
|
def __init__(self, func: Callable[[Any], Any]) -> None:
|
||||||
self.func = func
|
self.func = func
|
||||||
|
|
||||||
def __get__(self, instance, cls=None):
|
def __get__(self, instance: Any, cls: type[Any] | None = None) -> Any:
|
||||||
result = instance.__dict__[self.func.__name__] = self.func(instance)
|
result = instance.__dict__[self.func.__name__] = self.func(instance)
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -22,4 +22,3 @@ and that the name of ICC shall not be used in advertising or publicity
|
||||||
pertaining to distribution of the software without specific, written
|
pertaining to distribution of the software without specific, written
|
||||||
prior permission. ICC makes no representations about the suitability
|
prior permission. ICC makes no representations about the suitability
|
||||||
of this software for any purpose.
|
of this software for any purpose.
|
||||||
|
|
||||||
|
|
BIN
Tests/icc/sGrey-v2-nano.icc
Normal file
BIN
Tests/images/1.eps
Normal file
Before Width: | Height: | Size: 578 B |
BIN
Tests/images/16_bit_binary_pgm.tiff
Normal file
BIN
Tests/images/2422.flc
Normal file
BIN
Tests/images/8bit.s.tif
Normal file
BIN
Tests/images/9bit.j2k
Normal file
BIN
Tests/images/apng/different_durations.png
Normal file
After Width: | Height: | Size: 233 B |
Before Width: | Height: | Size: 331 B After Width: | Height: | Size: 331 B |
Before Width: | Height: | Size: 668 B After Width: | Height: | Size: 668 B |
BIN
Tests/images/ati1.dds
Normal file
BIN
Tests/images/ati1.png
Normal file
After Width: | Height: | Size: 969 B |
BIN
Tests/images/ati2.dds
Normal file
BIN
Tests/images/background_outside_palette.gif
Normal file
After Width: | Height: | Size: 82 B |
BIN
Tests/images/bc1.dds
Executable file
BIN
Tests/images/bc1_typeless.dds
Executable file
BIN
Tests/images/bc4_typeless.dds
Normal file
BIN
Tests/images/bc4_unorm.dds
Normal file
BIN
Tests/images/bc4_unorm.png
Normal file
After Width: | Height: | Size: 982 B |
BIN
Tests/images/bc4u.dds
Normal file
Before Width: | Height: | Size: 82 KiB After Width: | Height: | Size: 95 KiB |
BIN
Tests/images/bc5u.dds
Normal file
BIN
Tests/images/bc6h.dds
Normal file
BIN
Tests/images/bc6h.png
Normal file
After Width: | Height: | Size: 25 KiB |
BIN
Tests/images/bc6h_sf.dds
Normal file
BIN
Tests/images/bc6h_sf.png
Normal file
After Width: | Height: | Size: 25 KiB |