mirror of
https://github.com/python-pillow/Pillow.git
synced 2025-07-10 16:22:22 +03:00
Merge branch 'main' into image_equals
This commit is contained in:
commit
63cbebe3bc
|
@ -1,99 +0,0 @@
|
||||||
skip_commits:
|
|
||||||
files:
|
|
||||||
- ".github/**/*"
|
|
||||||
- ".gitmodules"
|
|
||||||
- "docs/**/*"
|
|
||||||
- "wheels/**/*"
|
|
||||||
|
|
||||||
version: '{build}'
|
|
||||||
clone_folder: c:\pillow
|
|
||||||
init:
|
|
||||||
- ECHO %PYTHON%
|
|
||||||
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
|
||||||
# Uncomment previous line to get RDP access during the build.
|
|
||||||
|
|
||||||
environment:
|
|
||||||
COVERAGE_CORE: sysmon
|
|
||||||
EXECUTABLE: python.exe
|
|
||||||
TEST_OPTIONS:
|
|
||||||
DEPLOY: YES
|
|
||||||
matrix:
|
|
||||||
- PYTHON: C:/Python313
|
|
||||||
ARCHITECTURE: x86
|
|
||||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022
|
|
||||||
- PYTHON: C:/Python39-x64
|
|
||||||
ARCHITECTURE: AMD64
|
|
||||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
|
||||||
|
|
||||||
|
|
||||||
install:
|
|
||||||
- '%PYTHON%\%EXECUTABLE% --version'
|
|
||||||
- '%PYTHON%\%EXECUTABLE% -m pip install --upgrade pip'
|
|
||||||
- curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip
|
|
||||||
- 7z x pillow-test-images.zip -oc:\
|
|
||||||
- xcopy /S /Y c:\test-images-main\* c:\pillow\tests\images
|
|
||||||
- curl -fsSL -o nasm-win64.zip https://raw.githubusercontent.com/python-pillow/pillow-depends/main/nasm-2.16.03-win64.zip
|
|
||||||
- 7z x nasm-win64.zip -oc:\
|
|
||||||
- choco install ghostscript --version=10.4.0
|
|
||||||
- path c:\nasm-2.16.03;C:\Program Files\gs\gs10.04.0\bin;%PATH%
|
|
||||||
- cd c:\pillow\winbuild\
|
|
||||||
- ps: |
|
|
||||||
c:\python39\python.exe c:\pillow\winbuild\build_prepare.py -v --depends=C:\pillow-depends\
|
|
||||||
c:\pillow\winbuild\build\build_dep_all.cmd
|
|
||||||
$host.SetShouldExit(0)
|
|
||||||
- path C:\pillow\winbuild\build\bin;%PATH%
|
|
||||||
|
|
||||||
build_script:
|
|
||||||
- cd c:\pillow
|
|
||||||
- winbuild\build\build_env.cmd
|
|
||||||
- '%PYTHON%\%EXECUTABLE% -m pip install -v -C raqm=vendor -C fribidi=vendor .'
|
|
||||||
- '%PYTHON%\%EXECUTABLE% selftest.py --installed'
|
|
||||||
|
|
||||||
test_script:
|
|
||||||
- cd c:\pillow
|
|
||||||
- '%PYTHON%\%EXECUTABLE% -m pip install pytest pytest-cov pytest-timeout defusedxml ipython numpy olefile pyroma'
|
|
||||||
- c:\"Program Files (x86)"\"Windows Kits"\10\Debuggers\x86\gflags.exe /p /enable %PYTHON%\%EXECUTABLE%
|
|
||||||
- path %PYTHON%;%PATH%
|
|
||||||
- .ci\test.cmd
|
|
||||||
|
|
||||||
after_test:
|
|
||||||
- curl -Os https://uploader.codecov.io/latest/windows/codecov.exe
|
|
||||||
- .\codecov.exe --file coverage.xml --name %PYTHON% --flags AppVeyor
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
fast_finish: true
|
|
||||||
|
|
||||||
cache:
|
|
||||||
- '%LOCALAPPDATA%\pip\Cache'
|
|
||||||
|
|
||||||
artifacts:
|
|
||||||
- path: pillow\*.egg
|
|
||||||
name: egg
|
|
||||||
- path: pillow\*.whl
|
|
||||||
name: wheel
|
|
||||||
|
|
||||||
before_deploy:
|
|
||||||
- cd c:\pillow
|
|
||||||
- '%PYTHON%\%EXECUTABLE% -m pip wheel -v -C raqm=vendor -C fribidi=vendor .'
|
|
||||||
- ps: Get-ChildItem .\*.whl | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
|
|
||||||
|
|
||||||
deploy:
|
|
||||||
provider: S3
|
|
||||||
region: us-west-2
|
|
||||||
access_key_id: AKIAIRAXC62ZNTVQJMOQ
|
|
||||||
secret_access_key:
|
|
||||||
secure: Hwb6klTqtBeMgxAjRoDltiiqpuH8xbwD4UooDzBSiCWXjuFj1lyl4kHgHwTCCGqi
|
|
||||||
bucket: pillow-nightly
|
|
||||||
folder: win/$(APPVEYOR_BUILD_NUMBER)/
|
|
||||||
artifact: /.*egg|wheel/
|
|
||||||
on:
|
|
||||||
APPVEYOR_REPO_NAME: python-pillow/Pillow
|
|
||||||
branch: main
|
|
||||||
deploy: YES
|
|
||||||
|
|
||||||
|
|
||||||
# Uncomment the following lines to get RDP access after the build/test and block for
|
|
||||||
# up to the timeout limit (~1hr)
|
|
||||||
#
|
|
||||||
#on_finish:
|
|
||||||
#- ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
|
|
@ -2,8 +2,4 @@
|
||||||
|
|
||||||
# gather the coverage data
|
# gather the coverage data
|
||||||
python3 -m pip install coverage
|
python3 -m pip install coverage
|
||||||
if [[ $MATRIX_DOCKER ]]; then
|
python3 -m coverage xml
|
||||||
python3 -m coverage xml --ignore-errors
|
|
||||||
else
|
|
||||||
python3 -m coverage xml
|
|
||||||
fi
|
|
||||||
|
|
|
@ -3,8 +3,5 @@
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
python3 -m coverage erase
|
python3 -m coverage erase
|
||||||
if [ $(uname) == "Darwin" ]; then
|
|
||||||
export CPPFLAGS="-I/usr/local/miniconda/include";
|
|
||||||
fi
|
|
||||||
make clean
|
make clean
|
||||||
make install-coverage
|
make install-coverage
|
||||||
|
|
|
@ -2,12 +2,12 @@
|
||||||
|
|
||||||
aptget_update()
|
aptget_update()
|
||||||
{
|
{
|
||||||
if [ ! -z $1 ]; then
|
if [ -n "$1" ]; then
|
||||||
echo ""
|
echo ""
|
||||||
echo "Retrying apt-get update..."
|
echo "Retrying apt-get update..."
|
||||||
echo ""
|
echo ""
|
||||||
fi
|
fi
|
||||||
output=`sudo apt-get update 2>&1`
|
output=$(sudo apt-get update 2>&1)
|
||||||
echo "$output"
|
echo "$output"
|
||||||
if [[ $output == *[WE]:\ * ]]; then
|
if [[ $output == *[WE]:\ * ]]; then
|
||||||
return 1
|
return 1
|
||||||
|
@ -21,7 +21,7 @@ set -e
|
||||||
|
|
||||||
if [[ $(uname) != CYGWIN* ]]; then
|
if [[ $(uname) != CYGWIN* ]]; then
|
||||||
sudo apt-get -qq install libfreetype6-dev liblcms2-dev python3-tk\
|
sudo apt-get -qq install libfreetype6-dev liblcms2-dev python3-tk\
|
||||||
ghostscript libjpeg-turbo-progs libopenjp2-7-dev\
|
ghostscript libjpeg-turbo8-dev libopenjp2-7-dev\
|
||||||
cmake meson imagemagick libharfbuzz-dev libfribidi-dev\
|
cmake meson imagemagick libharfbuzz-dev libfribidi-dev\
|
||||||
sway wl-clipboard libopenblas-dev
|
sway wl-clipboard libopenblas-dev
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
cibuildwheel==2.21.3
|
cibuildwheel==2.23.0
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
mypy==1.11.2
|
mypy==1.15.0
|
||||||
IceSpringPySideStubs-PyQt6
|
IceSpringPySideStubs-PyQt6
|
||||||
IceSpringPySideStubs-PySide6
|
IceSpringPySideStubs-PySide6
|
||||||
ipython
|
ipython
|
||||||
|
|
5
.github/CONTRIBUTING.md
vendored
5
.github/CONTRIBUTING.md
vendored
|
@ -9,7 +9,7 @@ Please send a pull request to the `main` branch. Please include [documentation](
|
||||||
- Fork the Pillow repository.
|
- Fork the Pillow repository.
|
||||||
- Create a branch from `main`.
|
- Create a branch from `main`.
|
||||||
- Develop bug fixes, features, tests, etc.
|
- Develop bug fixes, features, tests, etc.
|
||||||
- Run the test suite. You can enable GitHub Actions (https://github.com/MY-USERNAME/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/projects/new) on your repo to catch test failures prior to the pull request, and [Codecov](https://codecov.io/gh) to see if the changed code is covered by tests.
|
- Run the test suite. You can enable GitHub Actions (https://github.com/MY-USERNAME/Pillow/actions) on your repo to catch test failures prior to the pull request, and [Codecov](https://codecov.io/gh) to see if the changed code is covered by tests.
|
||||||
- Create a pull request to pull the changes from your branch to the Pillow `main`.
|
- Create a pull request to pull the changes from your branch to the Pillow `main`.
|
||||||
|
|
||||||
### Guidelines
|
### Guidelines
|
||||||
|
@ -17,9 +17,8 @@ Please send a pull request to the `main` branch. Please include [documentation](
|
||||||
- Separate code commits from reformatting commits.
|
- Separate code commits from reformatting commits.
|
||||||
- Provide tests for any newly added code.
|
- Provide tests for any newly added code.
|
||||||
- Follow PEP 8.
|
- Follow PEP 8.
|
||||||
- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running tests on AppVeyor.
|
- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running extra tests.
|
||||||
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
|
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
|
||||||
- Do not add to the [changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) for proposed changes, as that is updated after changes are merged.
|
|
||||||
|
|
||||||
## Reporting Issues
|
## Reporting Issues
|
||||||
|
|
||||||
|
|
1
.github/mergify.yml
vendored
1
.github/mergify.yml
vendored
|
@ -9,7 +9,6 @@ pull_request_rules:
|
||||||
- status-success=Windows Test Successful
|
- status-success=Windows Test Successful
|
||||||
- status-success=MinGW
|
- status-success=MinGW
|
||||||
- status-success=Cygwin Test Successful
|
- status-success=Cygwin Test Successful
|
||||||
- status-success=continuous-integration/appveyor/pr
|
|
||||||
actions:
|
actions:
|
||||||
merge:
|
merge:
|
||||||
method: merge
|
method: merge
|
||||||
|
|
11
.github/release-drafter.yml
vendored
11
.github/release-drafter.yml
vendored
|
@ -3,18 +3,19 @@ tag-template: "$NEXT_MINOR_VERSION"
|
||||||
change-template: '- $TITLE #$NUMBER [@$AUTHOR]'
|
change-template: '- $TITLE #$NUMBER [@$AUTHOR]'
|
||||||
|
|
||||||
categories:
|
categories:
|
||||||
- title: "Dependencies"
|
- title: "Removals"
|
||||||
label: "Dependency"
|
label: "Removal"
|
||||||
- title: "Deprecations"
|
- title: "Deprecations"
|
||||||
label: "Deprecation"
|
label: "Deprecation"
|
||||||
- title: "Documentation"
|
- title: "Documentation"
|
||||||
label: "Documentation"
|
label: "Documentation"
|
||||||
- title: "Removals"
|
- title: "Dependencies"
|
||||||
label: "Removal"
|
label: "Dependency"
|
||||||
- title: "Testing"
|
- title: "Testing"
|
||||||
label: "Testing"
|
label: "Testing"
|
||||||
- title: "Type hints"
|
- title: "Type hints"
|
||||||
label: "Type hints"
|
label: "Type hints"
|
||||||
|
- title: "Other changes"
|
||||||
|
|
||||||
exclude-labels:
|
exclude-labels:
|
||||||
- "changelog: skip"
|
- "changelog: skip"
|
||||||
|
@ -23,6 +24,4 @@ template: |
|
||||||
|
|
||||||
https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html
|
https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html
|
||||||
|
|
||||||
## Changes
|
|
||||||
|
|
||||||
$CHANGES
|
$CHANGES
|
||||||
|
|
12
.github/renovate.json
vendored
12
.github/renovate.json
vendored
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
"extends": [
|
"extends": [
|
||||||
"config:base"
|
"config:recommended"
|
||||||
],
|
],
|
||||||
"labels": [
|
"labels": [
|
||||||
"Dependency"
|
"Dependency"
|
||||||
|
@ -9,9 +9,13 @@
|
||||||
"packageRules": [
|
"packageRules": [
|
||||||
{
|
{
|
||||||
"groupName": "github-actions",
|
"groupName": "github-actions",
|
||||||
"matchManagers": ["github-actions"],
|
"matchManagers": [
|
||||||
"separateMajorMinor": "false"
|
"github-actions"
|
||||||
|
],
|
||||||
|
"separateMajorMinor": false
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"schedule": ["on the 3rd day of the month"]
|
"schedule": [
|
||||||
|
"on the 3rd day of the month"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
|
@ -33,6 +33,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
|
|
2
.github/workflows/lint.yml
vendored
2
.github/workflows/lint.yml
vendored
|
@ -21,6 +21,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: pre-commit cache
|
- name: pre-commit cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
|
|
8
.github/workflows/macos-install.sh
vendored
8
.github/workflows/macos-install.sh
vendored
|
@ -8,17 +8,13 @@ fi
|
||||||
brew install \
|
brew install \
|
||||||
freetype \
|
freetype \
|
||||||
ghostscript \
|
ghostscript \
|
||||||
|
jpeg-turbo \
|
||||||
libimagequant \
|
libimagequant \
|
||||||
libjpeg \
|
libraqm \
|
||||||
libtiff \
|
libtiff \
|
||||||
little-cms2 \
|
little-cms2 \
|
||||||
openjpeg \
|
openjpeg \
|
||||||
webp
|
webp
|
||||||
if [[ "$ImageOS" == "macos13" ]]; then
|
|
||||||
brew install --ignore-dependencies libraqm
|
|
||||||
else
|
|
||||||
brew install libraqm
|
|
||||||
fi
|
|
||||||
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig"
|
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig"
|
||||||
|
|
||||||
python3 -m pip install coverage
|
python3 -m pip install coverage
|
||||||
|
|
4
.github/workflows/stale.yml
vendored
4
.github/workflows/stale.yml
vendored
|
@ -6,7 +6,7 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
issues: write
|
contents: read
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
@ -15,6 +15,8 @@ concurrency:
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
if: github.repository_owner == 'python-pillow'
|
if: github.repository_owner == 'python-pillow'
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
|
9
.github/workflows/test-cygwin.yml
vendored
9
.github/workflows/test-cygwin.yml
vendored
|
@ -48,9 +48,11 @@ jobs:
|
||||||
|
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Install Cygwin
|
- name: Install Cygwin
|
||||||
uses: cygwin/cygwin-install-action@v4
|
uses: cygwin/cygwin-install-action@v5
|
||||||
with:
|
with:
|
||||||
packages: >
|
packages: >
|
||||||
gcc-g++
|
gcc-g++
|
||||||
|
@ -131,11 +133,12 @@ jobs:
|
||||||
- name: After success
|
- name: After success
|
||||||
run: |
|
run: |
|
||||||
bash.exe .ci/after_success.sh
|
bash.exe .ci/after_success.sh
|
||||||
|
rm C:\cygwin\bin\bash.EXE
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
file: ./coverage.xml
|
files: ./coverage.xml
|
||||||
flags: GHA_Cygwin
|
flags: GHA_Cygwin
|
||||||
name: Cygwin Python 3.${{ matrix.python-minor-version }}
|
name: Cygwin Python 3.${{ matrix.python-minor-version }}
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
31
.github/workflows/test-docker.yml
vendored
31
.github/workflows/test-docker.yml
vendored
|
@ -29,42 +29,46 @@ concurrency:
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
os: ["ubuntu-latest"]
|
||||||
docker: [
|
docker: [
|
||||||
# Run slower jobs first to give them a headstart and reduce waiting time
|
|
||||||
ubuntu-22.04-jammy-arm64v8,
|
|
||||||
ubuntu-24.04-noble-ppc64le,
|
|
||||||
ubuntu-24.04-noble-s390x,
|
|
||||||
# Then run the remainder
|
|
||||||
alpine,
|
alpine,
|
||||||
amazon-2-amd64,
|
amazon-2-amd64,
|
||||||
amazon-2023-amd64,
|
amazon-2023-amd64,
|
||||||
arch,
|
arch,
|
||||||
centos-stream-9-amd64,
|
centos-stream-9-amd64,
|
||||||
|
centos-stream-10-amd64,
|
||||||
debian-12-bookworm-x86,
|
debian-12-bookworm-x86,
|
||||||
debian-12-bookworm-amd64,
|
debian-12-bookworm-amd64,
|
||||||
fedora-39-amd64,
|
|
||||||
fedora-40-amd64,
|
fedora-40-amd64,
|
||||||
|
fedora-41-amd64,
|
||||||
gentoo,
|
gentoo,
|
||||||
ubuntu-22.04-jammy-amd64,
|
ubuntu-22.04-jammy-amd64,
|
||||||
ubuntu-24.04-noble-amd64,
|
ubuntu-24.04-noble-amd64,
|
||||||
]
|
]
|
||||||
dockerTag: [main]
|
dockerTag: [main]
|
||||||
include:
|
include:
|
||||||
- docker: "ubuntu-22.04-jammy-arm64v8"
|
|
||||||
qemu-arch: "aarch64"
|
|
||||||
- docker: "ubuntu-24.04-noble-ppc64le"
|
- docker: "ubuntu-24.04-noble-ppc64le"
|
||||||
|
os: "ubuntu-22.04"
|
||||||
qemu-arch: "ppc64le"
|
qemu-arch: "ppc64le"
|
||||||
|
dockerTag: main
|
||||||
- docker: "ubuntu-24.04-noble-s390x"
|
- docker: "ubuntu-24.04-noble-s390x"
|
||||||
|
os: "ubuntu-22.04"
|
||||||
qemu-arch: "s390x"
|
qemu-arch: "s390x"
|
||||||
|
dockerTag: main
|
||||||
|
- docker: "ubuntu-24.04-noble-arm64v8"
|
||||||
|
os: "ubuntu-24.04-arm"
|
||||||
|
dockerTag: main
|
||||||
|
|
||||||
name: ${{ matrix.docker }}
|
name: ${{ matrix.docker }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Build system information
|
- name: Build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
@ -87,22 +91,21 @@ jobs:
|
||||||
|
|
||||||
- name: After success
|
- name: After success
|
||||||
run: |
|
run: |
|
||||||
PATH="$PATH:~/.local/bin"
|
|
||||||
docker start pillow_container
|
docker start pillow_container
|
||||||
|
sudo docker cp pillow_container:/Pillow /Pillow
|
||||||
|
sudo chown -R runner /Pillow
|
||||||
pil_path=`docker exec pillow_container /vpy3/bin/python -c 'import os, PIL;print(os.path.realpath(os.path.dirname(PIL.__file__)))'`
|
pil_path=`docker exec pillow_container /vpy3/bin/python -c 'import os, PIL;print(os.path.realpath(os.path.dirname(PIL.__file__)))'`
|
||||||
docker stop pillow_container
|
docker stop pillow_container
|
||||||
sudo mkdir -p $pil_path
|
sudo mkdir -p $pil_path
|
||||||
sudo cp src/PIL/*.py $pil_path
|
sudo cp src/PIL/*.py $pil_path
|
||||||
|
cd /Pillow
|
||||||
.ci/after_success.sh
|
.ci/after_success.sh
|
||||||
env:
|
|
||||||
MATRIX_DOCKER: ${{ matrix.docker }}
|
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: GHA_Docker
|
flags: GHA_Docker
|
||||||
name: ${{ matrix.docker }}
|
name: ${{ matrix.docker }}
|
||||||
gcov: true
|
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
||||||
success:
|
success:
|
||||||
|
|
20
.github/workflows/test-mingw.yml
vendored
20
.github/workflows/test-mingw.yml
vendored
|
@ -46,6 +46,8 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up shell
|
- name: Set up shell
|
||||||
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
|
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
|
||||||
|
@ -64,18 +66,18 @@ jobs:
|
||||||
mingw-w64-x86_64-libtiff \
|
mingw-w64-x86_64-libtiff \
|
||||||
mingw-w64-x86_64-libwebp \
|
mingw-w64-x86_64-libwebp \
|
||||||
mingw-w64-x86_64-openjpeg2 \
|
mingw-w64-x86_64-openjpeg2 \
|
||||||
mingw-w64-x86_64-python3-numpy \
|
mingw-w64-x86_64-python-numpy \
|
||||||
mingw-w64-x86_64-python3-olefile \
|
mingw-w64-x86_64-python-olefile \
|
||||||
mingw-w64-x86_64-python3-setuptools \
|
mingw-w64-x86_64-python-pip \
|
||||||
|
mingw-w64-x86_64-python-pytest \
|
||||||
|
mingw-w64-x86_64-python-pytest-cov \
|
||||||
|
mingw-w64-x86_64-python-pytest-timeout \
|
||||||
mingw-w64-x86_64-python-pyqt6
|
mingw-w64-x86_64-python-pyqt6
|
||||||
|
|
||||||
python3 -m ensurepip
|
|
||||||
python3 -m pip install pyroma pytest pytest-cov pytest-timeout
|
|
||||||
|
|
||||||
pushd depends && ./install_extra_test_images.sh && popd
|
pushd depends && ./install_extra_test_images.sh && popd
|
||||||
|
|
||||||
- name: Build Pillow
|
- name: Build Pillow
|
||||||
run: SETUPTOOLS_USE_DISTUTILS="stdlib" CFLAGS="-coverage" python3 -m pip install .
|
run: CFLAGS="-coverage" python3 -m pip install .
|
||||||
|
|
||||||
- name: Test Pillow
|
- name: Test Pillow
|
||||||
run: |
|
run: |
|
||||||
|
@ -83,9 +85,9 @@ jobs:
|
||||||
.ci/test.sh
|
.ci/test.sh
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
file: ./coverage.xml
|
files: ./coverage.xml
|
||||||
flags: GHA_Windows
|
flags: GHA_Windows
|
||||||
name: "MSYS2 MinGW"
|
name: "MSYS2 MinGW"
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
2
.github/workflows/test-valgrind.yml
vendored
2
.github/workflows/test-valgrind.yml
vendored
|
@ -40,6 +40,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Build system information
|
- name: Build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
|
40
.github/workflows/test-windows.yml
vendored
40
.github/workflows/test-windows.yml
vendored
|
@ -31,29 +31,38 @@ env:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: windows-latest
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["pypy3.10", "3.9", "3.10", "3.11", "3.12", "3.13"]
|
python-version: ["pypy3.11", "pypy3.10", "3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||||
|
architecture: ["x64"]
|
||||||
|
os: ["windows-latest"]
|
||||||
|
include:
|
||||||
|
# Test the oldest Python on 32-bit
|
||||||
|
- { python-version: "3.9", architecture: "x86", os: "windows-2019" }
|
||||||
|
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
|
|
||||||
name: Python ${{ matrix.python-version }}
|
name: Python ${{ matrix.python-version }} (${{ matrix.architecture }})
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Checkout cached dependencies
|
- name: Checkout cached dependencies
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
repository: python-pillow/pillow-depends
|
repository: python-pillow/pillow-depends
|
||||||
path: winbuild\depends
|
path: winbuild\depends
|
||||||
|
|
||||||
- name: Checkout extra test images
|
- name: Checkout extra test images
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
repository: python-pillow/test-images
|
repository: python-pillow/test-images
|
||||||
path: Tests\test-images
|
path: Tests\test-images
|
||||||
|
|
||||||
|
@ -63,22 +72,21 @@ jobs:
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
allow-prereleases: true
|
allow-prereleases: true
|
||||||
|
architecture: ${{ matrix.architecture }}
|
||||||
cache: pip
|
cache: pip
|
||||||
cache-dependency-path: ".github/workflows/test-windows.yml"
|
cache-dependency-path: ".github/workflows/test-windows.yml"
|
||||||
|
|
||||||
- name: Print build system information
|
- name: Print build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
|
||||||
- name: Install Python dependencies
|
- name: Upgrade pip
|
||||||
run: >
|
run: |
|
||||||
python3 -m pip install
|
python3 -m pip install --upgrade pip
|
||||||
coverage>=7.4.2
|
|
||||||
defusedxml
|
- name: Install CPython dependencies
|
||||||
olefile
|
if: "!contains(matrix.python-version, 'pypy') && matrix.architecture != 'x86'"
|
||||||
pyroma
|
run: |
|
||||||
pytest
|
python3 -m pip install PyQt6
|
||||||
pytest-cov
|
|
||||||
pytest-timeout
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
id: install
|
id: install
|
||||||
|
@ -178,7 +186,7 @@ jobs:
|
||||||
- name: Build Pillow
|
- name: Build Pillow
|
||||||
run: |
|
run: |
|
||||||
$FLAGS="-C raqm=vendor -C fribidi=vendor"
|
$FLAGS="-C raqm=vendor -C fribidi=vendor"
|
||||||
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS ."
|
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS .[tests]"
|
||||||
& $env:pythonLocation\python.exe selftest.py --installed
|
& $env:pythonLocation\python.exe selftest.py --installed
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
|
|
||||||
|
@ -213,9 +221,9 @@ jobs:
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
file: ./coverage.xml
|
files: ./coverage.xml
|
||||||
flags: GHA_Windows
|
flags: GHA_Windows
|
||||||
name: ${{ runner.os }} Python ${{ matrix.python-version }}
|
name: ${{ runner.os }} Python ${{ matrix.python-version }}
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
24
.github/workflows/test.yml
vendored
24
.github/workflows/test.yml
vendored
|
@ -41,7 +41,10 @@ jobs:
|
||||||
"ubuntu-latest",
|
"ubuntu-latest",
|
||||||
]
|
]
|
||||||
python-version: [
|
python-version: [
|
||||||
|
"pypy3.11",
|
||||||
"pypy3.10",
|
"pypy3.10",
|
||||||
|
"3.14",
|
||||||
|
"3.13t",
|
||||||
"3.13",
|
"3.13",
|
||||||
"3.12",
|
"3.12",
|
||||||
"3.11",
|
"3.11",
|
||||||
|
@ -52,21 +55,22 @@ jobs:
|
||||||
- { python-version: "3.11", PYTHONOPTIMIZE: 1, REVERSE: "--reverse" }
|
- { python-version: "3.11", PYTHONOPTIMIZE: 1, REVERSE: "--reverse" }
|
||||||
- { python-version: "3.10", PYTHONOPTIMIZE: 2 }
|
- { python-version: "3.10", PYTHONOPTIMIZE: 2 }
|
||||||
# Free-threaded
|
# Free-threaded
|
||||||
- { os: "ubuntu-latest", python-version: "3.13-dev", disable-gil: true }
|
- { python-version: "3.13t", disable-gil: true }
|
||||||
# M1 only available for 3.10+
|
# M1 only available for 3.10+
|
||||||
- { os: "macos-13", python-version: "3.9" }
|
- { os: "macos-13", python-version: "3.9" }
|
||||||
exclude:
|
exclude:
|
||||||
- { os: "macos-latest", python-version: "3.9" }
|
- { os: "macos-latest", python-version: "3.9" }
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
name: ${{ matrix.os }} Python ${{ matrix.python-version }} ${{ matrix.disable-gil && 'free-threaded' || '' }}
|
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: Quansight-Labs/setup-python@v5
|
||||||
if: "${{ !matrix.disable-gil }}"
|
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
allow-prereleases: true
|
allow-prereleases: true
|
||||||
|
@ -75,13 +79,6 @@ jobs:
|
||||||
".ci/*.sh"
|
".ci/*.sh"
|
||||||
"pyproject.toml"
|
"pyproject.toml"
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }} (free-threaded)
|
|
||||||
uses: deadsnakes/action@v3.2.0
|
|
||||||
if: "${{ matrix.disable-gil }}"
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
nogil: ${{ matrix.disable-gil }}
|
|
||||||
|
|
||||||
- name: Set PYTHON_GIL
|
- name: Set PYTHON_GIL
|
||||||
if: "${{ matrix.disable-gil }}"
|
if: "${{ matrix.disable-gil }}"
|
||||||
run: |
|
run: |
|
||||||
|
@ -114,7 +111,7 @@ jobs:
|
||||||
GHA_PYTHON_VERSION: ${{ matrix.python-version }}
|
GHA_PYTHON_VERSION: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Register gcc problem matcher
|
- name: Register gcc problem matcher
|
||||||
if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.12'"
|
if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.13'"
|
||||||
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
|
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
|
@ -154,11 +151,10 @@ jobs:
|
||||||
.ci/after_success.sh
|
.ci/after_success.sh
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
|
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
|
||||||
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
||||||
gcov: true
|
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
||||||
success:
|
success:
|
||||||
|
|
196
.github/workflows/wheels-dependencies.sh
vendored
196
.github/workflows/wheels-dependencies.sh
vendored
|
@ -1,11 +1,33 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# Define custom utilities
|
|
||||||
# Test for macOS with [ -n "$IS_MACOS" ]
|
# Setup that needs to be done before multibuild utils are invoked
|
||||||
if [ -z "$IS_MACOS" ]; then
|
PROJECTDIR=$(pwd)
|
||||||
export MB_ML_LIBC=${AUDITWHEEL_POLICY::9}
|
if [[ "$(uname -s)" == "Darwin" ]]; then
|
||||||
export MB_ML_VER=${AUDITWHEEL_POLICY:9}
|
# Safety check - macOS builds require that CIBW_ARCHS is set, and that it
|
||||||
|
# only contains a single value (even though cibuildwheel allows multiple
|
||||||
|
# values in CIBW_ARCHS).
|
||||||
|
if [[ -z "$CIBW_ARCHS" ]]; then
|
||||||
|
echo "ERROR: Pillow macOS builds require CIBW_ARCHS be defined."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$CIBW_ARCHS" == *" "* ]]; then
|
||||||
|
echo "ERROR: Pillow macOS builds only support a single architecture in CIBW_ARCHS."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Build macOS dependencies in `build/darwin`
|
||||||
|
# Install them into `build/deps/darwin`
|
||||||
|
WORKDIR=$(pwd)/build/darwin
|
||||||
|
BUILD_PREFIX=$(pwd)/build/deps/darwin
|
||||||
|
else
|
||||||
|
# Build prefix will default to /usr/local
|
||||||
|
WORKDIR=$(pwd)/build
|
||||||
|
MB_ML_LIBC=${AUDITWHEEL_POLICY::9}
|
||||||
|
MB_ML_VER=${AUDITWHEEL_POLICY:9}
|
||||||
fi
|
fi
|
||||||
export PLAT=$CIBW_ARCHS
|
PLAT=$CIBW_ARCHS
|
||||||
|
|
||||||
|
# Define custom utilities
|
||||||
source wheels/multibuild/common_utils.sh
|
source wheels/multibuild/common_utils.sh
|
||||||
source wheels/multibuild/library_builders.sh
|
source wheels/multibuild/library_builders.sh
|
||||||
if [ -z "$IS_MACOS" ]; then
|
if [ -z "$IS_MACOS" ]; then
|
||||||
|
@ -15,93 +37,111 @@ fi
|
||||||
ARCHIVE_SDIR=pillow-depends-main
|
ARCHIVE_SDIR=pillow-depends-main
|
||||||
|
|
||||||
# Package versions for fresh source builds
|
# Package versions for fresh source builds
|
||||||
FREETYPE_VERSION=2.13.2
|
FREETYPE_VERSION=2.13.3
|
||||||
HARFBUZZ_VERSION=10.0.1
|
HARFBUZZ_VERSION=10.4.0
|
||||||
LIBPNG_VERSION=1.6.44
|
LIBPNG_VERSION=1.6.47
|
||||||
JPEGTURBO_VERSION=3.0.4
|
JPEGTURBO_VERSION=3.1.0
|
||||||
OPENJPEG_VERSION=2.5.2
|
OPENJPEG_VERSION=2.5.3
|
||||||
XZ_VERSION=5.6.3
|
XZ_VERSION=5.6.4
|
||||||
TIFF_VERSION=4.6.0
|
TIFF_VERSION=4.6.0
|
||||||
LCMS2_VERSION=2.16
|
LCMS2_VERSION=2.17
|
||||||
if [[ -n "$IS_MACOS" ]]; then
|
ZLIB_NG_VERSION=2.2.4
|
||||||
GIFLIB_VERSION=5.2.2
|
LIBWEBP_VERSION=1.5.0
|
||||||
else
|
|
||||||
GIFLIB_VERSION=5.2.1
|
|
||||||
fi
|
|
||||||
if [[ -n "$IS_MACOS" ]] || [[ "$MB_ML_VER" != 2014 ]]; then
|
|
||||||
ZLIB_VERSION=1.3.1
|
|
||||||
else
|
|
||||||
ZLIB_VERSION=1.2.8
|
|
||||||
fi
|
|
||||||
LIBWEBP_VERSION=1.4.0
|
|
||||||
BZIP2_VERSION=1.0.8
|
BZIP2_VERSION=1.0.8
|
||||||
LIBXCB_VERSION=1.17.0
|
LIBXCB_VERSION=1.17.0
|
||||||
BROTLI_VERSION=1.1.0
|
BROTLI_VERSION=1.1.0
|
||||||
|
|
||||||
|
function build_pkg_config {
|
||||||
|
if [ -e pkg-config-stamp ]; then return; fi
|
||||||
|
# This essentially duplicates the Homebrew recipe
|
||||||
|
CFLAGS="$CFLAGS -Wno-int-conversion" build_simple pkg-config 0.29.2 https://pkg-config.freedesktop.org/releases tar.gz \
|
||||||
|
--disable-debug --disable-host-tool --with-internal-glib \
|
||||||
|
--with-pc-path=$BUILD_PREFIX/share/pkgconfig:$BUILD_PREFIX/lib/pkgconfig \
|
||||||
|
--with-system-include-path=$(xcrun --show-sdk-path --sdk macosx)/usr/include
|
||||||
|
export PKG_CONFIG=$BUILD_PREFIX/bin/pkg-config
|
||||||
|
touch pkg-config-stamp
|
||||||
|
}
|
||||||
|
|
||||||
|
function build_zlib_ng {
|
||||||
|
if [ -e zlib-stamp ]; then return; fi
|
||||||
|
fetch_unpack https://github.com/zlib-ng/zlib-ng/archive/$ZLIB_NG_VERSION.tar.gz zlib-ng-$ZLIB_NG_VERSION.tar.gz
|
||||||
|
(cd zlib-ng-$ZLIB_NG_VERSION \
|
||||||
|
&& ./configure --prefix=$BUILD_PREFIX --zlib-compat \
|
||||||
|
&& make -j4 \
|
||||||
|
&& make install)
|
||||||
|
|
||||||
|
if [ -n "$IS_MACOS" ]; then
|
||||||
|
# Ensure that on macOS, the library name is an absolute path, not an
|
||||||
|
# @rpath, so that delocate picks up the right library (and doesn't need
|
||||||
|
# DYLD_LIBRARY_PATH to be set). The default Makefile doesn't have an
|
||||||
|
# option to control the install_name.
|
||||||
|
install_name_tool -id $BUILD_PREFIX/lib/libz.1.dylib $BUILD_PREFIX/lib/libz.1.dylib
|
||||||
|
fi
|
||||||
|
touch zlib-stamp
|
||||||
|
}
|
||||||
|
|
||||||
function build_brotli {
|
function build_brotli {
|
||||||
local cmake=$(get_modern_cmake)
|
if [ -e brotli-stamp ]; then return; fi
|
||||||
local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-$BROTLI_VERSION.tar.gz)
|
local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-$BROTLI_VERSION.tar.gz)
|
||||||
(cd $out_dir \
|
(cd $out_dir \
|
||||||
&& $cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \
|
&& cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_LIBDIR=$BUILD_PREFIX/lib -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \
|
||||||
&& make install)
|
&& make install)
|
||||||
if [[ "$MB_ML_LIBC" == "manylinux" ]]; then
|
touch brotli-stamp
|
||||||
cp /usr/local/lib64/libbrotli* /usr/local/lib
|
|
||||||
cp /usr/local/lib64/pkgconfig/libbrotli* /usr/local/lib/pkgconfig
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function build_harfbuzz {
|
function build_harfbuzz {
|
||||||
|
if [ -e harfbuzz-stamp ]; then return; fi
|
||||||
python3 -m pip install meson ninja
|
python3 -m pip install meson ninja
|
||||||
|
|
||||||
local out_dir=$(fetch_unpack https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION/$HARFBUZZ_VERSION.tar.xz harfbuzz-$HARFBUZZ_VERSION.tar.xz)
|
local out_dir=$(fetch_unpack https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION/harfbuzz-$HARFBUZZ_VERSION.tar.xz harfbuzz-$HARFBUZZ_VERSION.tar.xz)
|
||||||
(cd $out_dir \
|
(cd $out_dir \
|
||||||
&& meson setup build --buildtype=release -Dfreetype=enabled -Dglib=disabled)
|
&& meson setup build --prefix=$BUILD_PREFIX --libdir=$BUILD_PREFIX/lib --buildtype=release -Dfreetype=enabled -Dglib=disabled)
|
||||||
(cd $out_dir/build \
|
(cd $out_dir/build \
|
||||||
&& meson install)
|
&& meson install)
|
||||||
if [[ "$MB_ML_LIBC" == "manylinux" ]]; then
|
touch harfbuzz-stamp
|
||||||
cp /usr/local/lib64/libharfbuzz* /usr/local/lib
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function build {
|
function build {
|
||||||
if [[ -n "$IS_MACOS" ]] && [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
|
||||||
sudo chown -R runner /usr/local
|
|
||||||
fi
|
|
||||||
build_xz
|
build_xz
|
||||||
if [ -z "$IS_ALPINE" ] && [ -z "$IS_MACOS" ]; then
|
if [ -z "$IS_ALPINE" ] && [ -z "$SANITIZER" ] && [ -z "$IS_MACOS" ]; then
|
||||||
yum remove -y zlib-devel
|
yum remove -y zlib-devel
|
||||||
fi
|
fi
|
||||||
build_new_zlib
|
build_zlib_ng
|
||||||
|
|
||||||
build_simple xcb-proto 1.17.0 https://xorg.freedesktop.org/archive/individual/proto
|
build_simple xcb-proto 1.17.0 https://xorg.freedesktop.org/archive/individual/proto
|
||||||
if [ -n "$IS_MACOS" ]; then
|
if [ -n "$IS_MACOS" ]; then
|
||||||
build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto
|
build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto
|
||||||
build_simple libXau 1.0.11 https://www.x.org/pub/individual/lib
|
build_simple libXau 1.0.12 https://www.x.org/pub/individual/lib
|
||||||
build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist
|
build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist
|
||||||
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
|
||||||
cp /usr/local/share/pkgconfig/xcb-proto.pc /usr/local/lib/pkgconfig
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
sed s/\${pc_sysrootdir\}// /usr/local/share/pkgconfig/xcb-proto.pc > /usr/local/lib/pkgconfig/xcb-proto.pc
|
sed s/\${pc_sysrootdir\}// $BUILD_PREFIX/share/pkgconfig/xcb-proto.pc > $BUILD_PREFIX/lib/pkgconfig/xcb-proto.pc
|
||||||
fi
|
fi
|
||||||
build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib
|
build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib
|
||||||
|
|
||||||
build_libjpeg_turbo
|
build_libjpeg_turbo
|
||||||
build_tiff
|
if [ -n "$IS_MACOS" ]; then
|
||||||
|
# Custom tiff build to include jpeg; by default, configure won't include
|
||||||
|
# headers/libs in the custom macOS prefix. Explicitly disable webp,
|
||||||
|
# libdeflate and zstd, because on x86_64 macs, it will pick up the
|
||||||
|
# Homebrew versions of those libraries from /usr/local.
|
||||||
|
build_simple tiff $TIFF_VERSION https://download.osgeo.org/libtiff tar.gz \
|
||||||
|
--with-jpeg-include-dir=$BUILD_PREFIX/include --with-jpeg-lib-dir=$BUILD_PREFIX/lib \
|
||||||
|
--disable-webp --disable-libdeflate --disable-zstd
|
||||||
|
else
|
||||||
|
build_tiff
|
||||||
|
fi
|
||||||
|
|
||||||
build_libpng
|
build_libpng
|
||||||
build_lcms2
|
build_lcms2
|
||||||
build_openjpeg
|
build_openjpeg
|
||||||
if [ -f /usr/local/lib64/libopenjp2.so ]; then
|
|
||||||
cp /usr/local/lib64/libopenjp2.so /usr/local/lib
|
|
||||||
fi
|
|
||||||
|
|
||||||
ORIGINAL_CFLAGS=$CFLAGS
|
webp_cflags="-O3 -DNDEBUG"
|
||||||
CFLAGS="$CFLAGS -O3 -DNDEBUG"
|
|
||||||
if [[ -n "$IS_MACOS" ]]; then
|
if [[ -n "$IS_MACOS" ]]; then
|
||||||
CFLAGS="$CFLAGS -Wl,-headerpad_max_install_names"
|
webp_cflags="$webp_cflags -Wl,-headerpad_max_install_names"
|
||||||
fi
|
fi
|
||||||
build_libwebp
|
CFLAGS="$CFLAGS $webp_cflags" build_simple libwebp $LIBWEBP_VERSION \
|
||||||
CFLAGS=$ORIGINAL_CFLAGS
|
https://storage.googleapis.com/downloads.webmproject.org/releases/webp tar.gz \
|
||||||
|
--enable-libwebpmux --enable-libwebpdemux
|
||||||
|
|
||||||
build_brotli
|
build_brotli
|
||||||
|
|
||||||
|
@ -115,31 +155,47 @@ function build {
|
||||||
build_harfbuzz
|
build_harfbuzz
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Perform all dependency builds in the build subfolder.
|
||||||
|
mkdir -p $WORKDIR
|
||||||
|
pushd $WORKDIR > /dev/null
|
||||||
|
|
||||||
# Any stuff that you need to do before you start building the wheels
|
# Any stuff that you need to do before you start building the wheels
|
||||||
# Runs in the root directory of this repository.
|
# Runs in the root directory of this repository.
|
||||||
curl -fsSL -o pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
|
if [[ ! -d $WORKDIR/pillow-depends-main ]]; then
|
||||||
untar pillow-depends-main.zip
|
if [[ ! -f $PROJECTDIR/pillow-depends-main.zip ]]; then
|
||||||
|
echo "Download pillow dependency sources..."
|
||||||
|
curl -fSL -o $PROJECTDIR/pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
|
||||||
|
fi
|
||||||
|
echo "Unpacking pillow dependency sources..."
|
||||||
|
untar $PROJECTDIR/pillow-depends-main.zip
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ -n "$IS_MACOS" ]]; then
|
if [[ -n "$IS_MACOS" ]]; then
|
||||||
# libtiff and libxcb cause a conflict with building libtiff and libxcb
|
# Homebrew (or similar packaging environments) install can contain some of
|
||||||
# libxau and libxdmcp cause an issue on macOS < 11
|
# the libraries that we're going to build. However, they may be compiled
|
||||||
# remove cairo to fix building harfbuzz on arm64
|
# with a MACOSX_DEPLOYMENT_TARGET that doesn't match what we want to use,
|
||||||
# remove lcms2 and libpng to fix building openjpeg on arm64
|
# and they may bring in other dependencies that we don't want. The same will
|
||||||
# remove jpeg-turbo to avoid inclusion on arm64
|
# be true of any other locations on the path. To avoid conflicts, strip the
|
||||||
# remove webp and zstd to avoid inclusion on x86_64
|
# path down to the bare minimum (which, on macOS, won't include any
|
||||||
# curl from brew requires zstd, use system curl
|
# development dependencies).
|
||||||
brew remove --ignore-dependencies libpng libtiff libxcb libxau libxdmcp curl cairo lcms2 zstd
|
export PATH="$BUILD_PREFIX/bin:$(dirname $(which python3)):/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin"
|
||||||
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
export CMAKE_PREFIX_PATH=$BUILD_PREFIX
|
||||||
brew remove --ignore-dependencies jpeg-turbo
|
|
||||||
else
|
|
||||||
brew remove --ignore-dependencies webp
|
|
||||||
fi
|
|
||||||
|
|
||||||
brew install pkg-config
|
# Ensure the basic structure of the build prefix directory exists.
|
||||||
|
mkdir -p "$BUILD_PREFIX/bin"
|
||||||
|
mkdir -p "$BUILD_PREFIX/lib"
|
||||||
|
|
||||||
|
# Ensure pkg-config is available
|
||||||
|
build_pkg_config
|
||||||
|
# Ensure cmake is available
|
||||||
|
python3 -m pip install cmake
|
||||||
fi
|
fi
|
||||||
|
|
||||||
wrap_wheel_builder build
|
wrap_wheel_builder build
|
||||||
|
|
||||||
|
# Return to the project root to finish the build
|
||||||
|
popd > /dev/null
|
||||||
|
|
||||||
# Append licenses
|
# Append licenses
|
||||||
for filename in wheels/dependency_licenses/*; do
|
for filename in wheels/dependency_licenses/*; do
|
||||||
echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE
|
echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE
|
||||||
|
|
3
.github/workflows/wheels-test.ps1
vendored
3
.github/workflows/wheels-test.ps1
vendored
|
@ -11,6 +11,9 @@ if ("$venv" -like "*\cibw-run-*\pp*-win_amd64\*") {
|
||||||
$env:path += ";$pillow\winbuild\build\bin\"
|
$env:path += ";$pillow\winbuild\build\bin\"
|
||||||
& "$venv\Scripts\activate.ps1"
|
& "$venv\Scripts\activate.ps1"
|
||||||
& reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\python.exe" /v "GlobalFlag" /t REG_SZ /d "0x02000000" /f
|
& reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\python.exe" /v "GlobalFlag" /t REG_SZ /d "0x02000000" /f
|
||||||
|
if ("$venv" -like "*\cibw-run-*-win_amd64\*") {
|
||||||
|
& python -m pip install numpy
|
||||||
|
}
|
||||||
cd $pillow
|
cd $pillow
|
||||||
& python -VV
|
& python -VV
|
||||||
if (!$?) { exit $LASTEXITCODE }
|
if (!$?) { exit $LASTEXITCODE }
|
||||||
|
|
20
.github/workflows/wheels-test.sh
vendored
20
.github/workflows/wheels-test.sh
vendored
|
@ -1,12 +1,24 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
# Ensure fribidi is installed by the system.
|
||||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||||
brew install fribidi
|
# If Homebrew is on the path during the build, it may leak into the wheels.
|
||||||
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig"
|
# However, we *do* need Homebrew to provide a copy of fribidi for
|
||||||
if [ -f /opt/homebrew/lib/libfribidi.dylib ]; then
|
# testing purposes so that we can verify the fribidi shim works as expected.
|
||||||
sudo cp /opt/homebrew/lib/libfribidi.dylib /usr/local/lib
|
if [[ "$(uname -m)" == "x86_64" ]]; then
|
||||||
|
HOMEBREW_PREFIX=/usr/local
|
||||||
|
else
|
||||||
|
HOMEBREW_PREFIX=/opt/homebrew
|
||||||
fi
|
fi
|
||||||
|
$HOMEBREW_PREFIX/bin/brew install fribidi
|
||||||
|
|
||||||
|
# Add the lib folder for fribidi so that the vendored library can be found.
|
||||||
|
# Don't use $HOMEWBREW_PREFIX/lib directly - use the lib folder where the
|
||||||
|
# installed copy of fribidi is cellared. This ensures we don't pick up the
|
||||||
|
# Homebrew version of any other library that we're dependent on (most notably,
|
||||||
|
# freetype).
|
||||||
|
export DYLD_LIBRARY_PATH=$(dirname $(realpath $HOMEBREW_PREFIX/lib/libfribidi.dylib))
|
||||||
elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then
|
elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then
|
||||||
apk add curl fribidi
|
apk add curl fribidi
|
||||||
else
|
else
|
||||||
|
|
90
.github/workflows/wheels.yml
vendored
90
.github/workflows/wheels.yml
vendored
|
@ -13,6 +13,7 @@ on:
|
||||||
paths:
|
paths:
|
||||||
- ".ci/requirements-cibw.txt"
|
- ".ci/requirements-cibw.txt"
|
||||||
- ".github/workflows/wheel*"
|
- ".github/workflows/wheel*"
|
||||||
|
- "pyproject.toml"
|
||||||
- "setup.py"
|
- "setup.py"
|
||||||
- "wheels/*"
|
- "wheels/*"
|
||||||
- "winbuild/build_prepare.py"
|
- "winbuild/build_prepare.py"
|
||||||
|
@ -23,6 +24,7 @@ on:
|
||||||
paths:
|
paths:
|
||||||
- ".ci/requirements-cibw.txt"
|
- ".ci/requirements-cibw.txt"
|
||||||
- ".github/workflows/wheel*"
|
- ".github/workflows/wheel*"
|
||||||
|
- "pyproject.toml"
|
||||||
- "setup.py"
|
- "setup.py"
|
||||||
- "wheels/*"
|
- "wheels/*"
|
||||||
- "winbuild/build_prepare.py"
|
- "winbuild/build_prepare.py"
|
||||||
|
@ -40,61 +42,7 @@ env:
|
||||||
FORCE_COLOR: 1
|
FORCE_COLOR: 1
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-1-QEMU-emulated-wheels:
|
build-native-wheels:
|
||||||
if: github.event_name != 'schedule'
|
|
||||||
name: aarch64 ${{ matrix.python-version }} ${{ matrix.spec }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
python-version:
|
|
||||||
- pp310
|
|
||||||
- cp3{9,10,11}
|
|
||||||
- cp3{12,13}
|
|
||||||
spec:
|
|
||||||
- manylinux2014
|
|
||||||
- manylinux_2_28
|
|
||||||
- musllinux
|
|
||||||
exclude:
|
|
||||||
- { python-version: pp310, spec: musllinux }
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: true
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
|
|
||||||
# https://github.com/docker/setup-qemu-action
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Install cibuildwheel
|
|
||||||
run: |
|
|
||||||
python3 -m pip install -r .ci/requirements-cibw.txt
|
|
||||||
|
|
||||||
- name: Build wheels
|
|
||||||
run: |
|
|
||||||
python3 -m cibuildwheel --output-dir wheelhouse
|
|
||||||
env:
|
|
||||||
# Build only the currently selected Linux architecture (so we can
|
|
||||||
# parallelise for speed).
|
|
||||||
CIBW_ARCHS: "aarch64"
|
|
||||||
# Likewise, select only one Python version per job to speed this up.
|
|
||||||
CIBW_BUILD: "${{ matrix.python-version }}-${{ matrix.spec == 'musllinux' && 'musllinux' || 'manylinux' }}*"
|
|
||||||
CIBW_PRERELEASE_PYTHONS: True
|
|
||||||
# Extra options for manylinux.
|
|
||||||
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.spec }}
|
|
||||||
CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.spec }}
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: dist-qemu-${{ matrix.python-version }}-${{ matrix.spec }}
|
|
||||||
path: ./wheelhouse/*.whl
|
|
||||||
|
|
||||||
build-2-native-wheels:
|
|
||||||
if: github.event_name != 'schedule' || github.repository_owner == 'python-pillow'
|
if: github.event_name != 'schedule' || github.repository_owner == 'python-pillow'
|
||||||
name: ${{ matrix.name }}
|
name: ${{ matrix.name }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
@ -115,7 +63,7 @@ jobs:
|
||||||
- name: "macOS 10.15 x86_64"
|
- name: "macOS 10.15 x86_64"
|
||||||
os: macos-13
|
os: macos-13
|
||||||
cibw_arch: x86_64
|
cibw_arch: x86_64
|
||||||
build: "pp310*"
|
build: "pp3*"
|
||||||
macosx_deployment_target: "10.15"
|
macosx_deployment_target: "10.15"
|
||||||
- name: "macOS arm64"
|
- name: "macOS arm64"
|
||||||
os: macos-latest
|
os: macos-latest
|
||||||
|
@ -129,9 +77,18 @@ jobs:
|
||||||
cibw_arch: x86_64
|
cibw_arch: x86_64
|
||||||
build: "*manylinux*"
|
build: "*manylinux*"
|
||||||
manylinux: "manylinux_2_28"
|
manylinux: "manylinux_2_28"
|
||||||
|
- name: "manylinux2014 and musllinux aarch64"
|
||||||
|
os: ubuntu-24.04-arm
|
||||||
|
cibw_arch: aarch64
|
||||||
|
- name: "manylinux_2_28 aarch64"
|
||||||
|
os: ubuntu-24.04-arm
|
||||||
|
cibw_arch: aarch64
|
||||||
|
build: "*manylinux*"
|
||||||
|
manylinux: "manylinux_2_28"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
|
@ -148,10 +105,12 @@ jobs:
|
||||||
env:
|
env:
|
||||||
CIBW_ARCHS: ${{ matrix.cibw_arch }}
|
CIBW_ARCHS: ${{ matrix.cibw_arch }}
|
||||||
CIBW_BUILD: ${{ matrix.build }}
|
CIBW_BUILD: ${{ matrix.build }}
|
||||||
CIBW_FREE_THREADED_SUPPORT: True
|
CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy
|
||||||
|
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.manylinux }}
|
||||||
|
CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.manylinux }}
|
||||||
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
|
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
|
||||||
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
|
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
|
||||||
CIBW_PRERELEASE_PYTHONS: True
|
CIBW_SKIP: pp39-*
|
||||||
MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }}
|
MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v4
|
||||||
|
@ -172,10 +131,13 @@ jobs:
|
||||||
- cibw_arch: ARM64
|
- cibw_arch: ARM64
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Checkout extra test images
|
- name: Checkout extra test images
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
repository: python-pillow/test-images
|
repository: python-pillow/test-images
|
||||||
path: Tests\test-images
|
path: Tests\test-images
|
||||||
|
|
||||||
|
@ -222,8 +184,8 @@ jobs:
|
||||||
CIBW_ARCHS: ${{ matrix.cibw_arch }}
|
CIBW_ARCHS: ${{ matrix.cibw_arch }}
|
||||||
CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd"
|
CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd"
|
||||||
CIBW_CACHE_PATH: "C:\\cibw"
|
CIBW_CACHE_PATH: "C:\\cibw"
|
||||||
CIBW_FREE_THREADED_SUPPORT: True
|
CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy
|
||||||
CIBW_PRERELEASE_PYTHONS: True
|
CIBW_SKIP: pp39-*
|
||||||
CIBW_TEST_SKIP: "*-win_arm64"
|
CIBW_TEST_SKIP: "*-win_arm64"
|
||||||
CIBW_TEST_COMMAND: 'docker run --rm
|
CIBW_TEST_COMMAND: 'docker run --rm
|
||||||
-v {project}:C:\pillow
|
-v {project}:C:\pillow
|
||||||
|
@ -251,13 +213,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
cache: pip
|
|
||||||
cache-dependency-path: "Makefile"
|
|
||||||
|
|
||||||
- run: make sdist
|
- run: make sdist
|
||||||
|
|
||||||
|
@ -268,7 +230,7 @@ jobs:
|
||||||
|
|
||||||
scientific-python-nightly-wheels-publish:
|
scientific-python-nightly-wheels-publish:
|
||||||
if: github.repository_owner == 'python-pillow' && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch')
|
if: github.repository_owner == 'python-pillow' && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch')
|
||||||
needs: [build-2-native-wheels, windows]
|
needs: [build-native-wheels, windows]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Upload wheels to scientific-python-nightly-wheels
|
name: Upload wheels to scientific-python-nightly-wheels
|
||||||
steps:
|
steps:
|
||||||
|
@ -285,7 +247,7 @@ jobs:
|
||||||
|
|
||||||
pypi-publish:
|
pypi-publish:
|
||||||
if: github.repository_owner == 'python-pillow' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
|
if: github.repository_owner == 'python-pillow' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
|
||||||
needs: [build-1-QEMU-emulated-wheels, build-2-native-wheels, windows, sdist]
|
needs: [build-native-wheels, windows, sdist]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Upload release to PyPI
|
name: Upload release to PyPI
|
||||||
environment:
|
environment:
|
||||||
|
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -19,6 +19,7 @@ lib64/
|
||||||
parts/
|
parts/
|
||||||
sdist/
|
sdist/
|
||||||
var/
|
var/
|
||||||
|
wheelhouse/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
*.egg
|
*.egg
|
||||||
|
@ -90,5 +91,9 @@ Tests/images/msp
|
||||||
Tests/images/picins
|
Tests/images/picins
|
||||||
Tests/images/sunraster
|
Tests/images/sunraster
|
||||||
|
|
||||||
|
# Test and dependency downloads
|
||||||
|
pillow-depends-main.zip
|
||||||
|
pillow-test-images.zip
|
||||||
|
|
||||||
# pyinstaller
|
# pyinstaller
|
||||||
*.spec
|
*.spec
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.6.9
|
rev: v0.9.9
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: [--exit-non-zero-on-fix]
|
args: [--exit-non-zero-on-fix]
|
||||||
|
|
||||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||||
rev: 24.8.0
|
rev: 25.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
|
|
||||||
- repo: https://github.com/PyCQA/bandit
|
- repo: https://github.com/PyCQA/bandit
|
||||||
rev: 1.7.10
|
rev: 1.8.3
|
||||||
hooks:
|
hooks:
|
||||||
- id: bandit
|
- id: bandit
|
||||||
args: [--severity-level=high]
|
args: [--severity-level=high]
|
||||||
|
@ -24,7 +24,7 @@ repos:
|
||||||
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
|
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||||
rev: v19.1.1
|
rev: v19.1.7
|
||||||
hooks:
|
hooks:
|
||||||
- id: clang-format
|
- id: clang-format
|
||||||
types: [c]
|
types: [c]
|
||||||
|
@ -50,30 +50,35 @@ repos:
|
||||||
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
|
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
|
||||||
|
|
||||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||||
rev: 0.29.3
|
rev: 0.31.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-github-workflows
|
- id: check-github-workflows
|
||||||
- id: check-readthedocs
|
- id: check-readthedocs
|
||||||
- id: check-renovate
|
- id: check-renovate
|
||||||
|
|
||||||
|
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||||
|
rev: v1.4.1
|
||||||
|
hooks:
|
||||||
|
- id: zizmor
|
||||||
|
|
||||||
- repo: https://github.com/sphinx-contrib/sphinx-lint
|
- repo: https://github.com/sphinx-contrib/sphinx-lint
|
||||||
rev: v1.0.0
|
rev: v1.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: sphinx-lint
|
- id: sphinx-lint
|
||||||
|
|
||||||
- repo: https://github.com/tox-dev/pyproject-fmt
|
- repo: https://github.com/tox-dev/pyproject-fmt
|
||||||
rev: 2.2.4
|
rev: v2.5.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyproject-fmt
|
- id: pyproject-fmt
|
||||||
|
|
||||||
- repo: https://github.com/abravalheri/validate-pyproject
|
- repo: https://github.com/abravalheri/validate-pyproject
|
||||||
rev: v0.20.2
|
rev: v0.23
|
||||||
hooks:
|
hooks:
|
||||||
- id: validate-pyproject
|
- id: validate-pyproject
|
||||||
additional_dependencies: [trove-classifiers>=2024.10.12]
|
additional_dependencies: [trove-classifiers>=2024.10.12]
|
||||||
|
|
||||||
- repo: https://github.com/tox-dev/tox-ini-fmt
|
- repo: https://github.com/tox-dev/tox-ini-fmt
|
||||||
rev: 1.4.1
|
rev: 1.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: tox-ini-fmt
|
- id: tox-ini-fmt
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
version: 2
|
version: 2
|
||||||
|
|
||||||
|
sphinx:
|
||||||
|
configuration: docs/conf.py
|
||||||
|
|
||||||
formats: [pdf]
|
formats: [pdf]
|
||||||
|
|
||||||
build:
|
build:
|
||||||
|
|
12
CHANGES.rst
12
CHANGES.rst
|
@ -2,9 +2,19 @@
|
||||||
Changelog (Pillow)
|
Changelog (Pillow)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
11.0.0 (unreleased)
|
11.1.0 and newer
|
||||||
|
----------------
|
||||||
|
|
||||||
|
See GitHub Releases:
|
||||||
|
|
||||||
|
- https://github.com/python-pillow/Pillow/releases
|
||||||
|
|
||||||
|
11.0.0 (2024-10-15)
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
|
- Update licence to MIT-CMU #8460
|
||||||
|
[hugovk]
|
||||||
|
|
||||||
- Conditionally define ImageCms type hint to avoid requiring core #8197
|
- Conditionally define ImageCms type hint to avoid requiring core #8197
|
||||||
[radarhere]
|
[radarhere]
|
||||||
|
|
||||||
|
|
4
LICENSE
4
LICENSE
|
@ -5,9 +5,9 @@ The Python Imaging Library (PIL) is
|
||||||
|
|
||||||
Pillow is the friendly PIL fork. It is
|
Pillow is the friendly PIL fork. It is
|
||||||
|
|
||||||
Copyright © 2010-2024 by Jeffrey A. Clark and contributors
|
Copyright © 2010 by Jeffrey A. Clark and contributors
|
||||||
|
|
||||||
Like PIL, Pillow is licensed under the open source HPND License:
|
Like PIL, Pillow is licensed under the open source MIT-CMU License:
|
||||||
|
|
||||||
By obtaining, using, and/or copying this software and/or its associated
|
By obtaining, using, and/or copying this software and/or its associated
|
||||||
documentation, you agree that you have read, understood, and will comply
|
documentation, you agree that you have read, understood, and will comply
|
||||||
|
|
|
@ -20,7 +20,6 @@ graft docs
|
||||||
graft _custom_build
|
graft _custom_build
|
||||||
|
|
||||||
# build/src control detritus
|
# build/src control detritus
|
||||||
exclude .appveyor.yml
|
|
||||||
exclude .clang-format
|
exclude .clang-format
|
||||||
exclude .coveragerc
|
exclude .coveragerc
|
||||||
exclude .editorconfig
|
exclude .editorconfig
|
||||||
|
|
|
@ -42,9 +42,6 @@ As of 2019, Pillow development is
|
||||||
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test-docker.yml"><img
|
<a href="https://github.com/python-pillow/Pillow/actions/workflows/test-docker.yml"><img
|
||||||
alt="GitHub Actions build status (Test Docker)"
|
alt="GitHub Actions build status (Test Docker)"
|
||||||
src="https://github.com/python-pillow/Pillow/workflows/Test%20Docker/badge.svg"></a>
|
src="https://github.com/python-pillow/Pillow/workflows/Test%20Docker/badge.svg"></a>
|
||||||
<a href="https://ci.appveyor.com/project/python-pillow/Pillow"><img
|
|
||||||
alt="AppVeyor CI build status (Windows)"
|
|
||||||
src="https://img.shields.io/appveyor/build/python-pillow/Pillow/main.svg?label=Windows%20build"></a>
|
|
||||||
<a href="https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml"><img
|
<a href="https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml"><img
|
||||||
alt="GitHub Actions build status (Wheels)"
|
alt="GitHub Actions build status (Wheels)"
|
||||||
src="https://github.com/python-pillow/Pillow/workflows/Wheels/badge.svg"></a>
|
src="https://github.com/python-pillow/Pillow/workflows/Wheels/badge.svg"></a>
|
||||||
|
@ -107,7 +104,7 @@ The core image library is designed for fast access to data stored in a few basic
|
||||||
- [Issues](https://github.com/python-pillow/Pillow/issues)
|
- [Issues](https://github.com/python-pillow/Pillow/issues)
|
||||||
- [Pull requests](https://github.com/python-pillow/Pillow/pulls)
|
- [Pull requests](https://github.com/python-pillow/Pillow/pulls)
|
||||||
- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html)
|
- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html)
|
||||||
- [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst)
|
- [Changelog](https://github.com/python-pillow/Pillow/releases)
|
||||||
- [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork)
|
- [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork)
|
||||||
|
|
||||||
## Report a Vulnerability
|
## Report a Vulnerability
|
||||||
|
|
|
@ -9,10 +9,9 @@ Released quarterly on January 2nd, April 1st, July 1st and October 15th.
|
||||||
|
|
||||||
* [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154
|
* [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154
|
||||||
* [ ] Develop and prepare release in `main` branch.
|
* [ ] Develop and prepare release in `main` branch.
|
||||||
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in `main` branch.
|
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) to confirm passing tests in `main` branch.
|
||||||
* [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them.
|
* [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them.
|
||||||
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
|
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
|
||||||
* [ ] Update `CHANGES.rst`.
|
|
||||||
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
|
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
|
||||||
* [ ] Create branch and tag for release e.g.:
|
* [ ] Create branch and tag for release e.g.:
|
||||||
```bash
|
```bash
|
||||||
|
@ -34,13 +33,12 @@ Released quarterly on January 2nd, April 1st, July 1st and October 15th.
|
||||||
Released as needed for security, installation or critical bug fixes.
|
Released as needed for security, installation or critical bug fixes.
|
||||||
|
|
||||||
* [ ] Make necessary changes in `main` branch.
|
* [ ] Make necessary changes in `main` branch.
|
||||||
* [ ] Update `CHANGES.rst`.
|
|
||||||
* [ ] Check out release branch e.g.:
|
* [ ] Check out release branch e.g.:
|
||||||
```bash
|
```bash
|
||||||
git checkout -t remotes/origin/5.2.x
|
git checkout -t remotes/origin/5.2.x
|
||||||
```
|
```
|
||||||
* [ ] Cherry pick individual commits from `main` branch to release branch e.g. `5.2.x`, then `git push`.
|
* [ ] Cherry pick individual commits from `main` branch to release branch e.g. `5.2.x`, then `git push`.
|
||||||
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in release branch e.g. `5.2.x`.
|
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) to confirm passing tests in release branch e.g. `5.2.x`.
|
||||||
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
|
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
|
||||||
* [ ] Run pre-release check via `make release-test`.
|
* [ ] Run pre-release check via `make release-test`.
|
||||||
* [ ] Create tag for release e.g.:
|
* [ ] Create tag for release e.g.:
|
||||||
|
|
|
@ -3,26 +3,25 @@ from __future__ import annotations
|
||||||
import zlib
|
import zlib
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
from PIL import Image, ImageFile, PngImagePlugin
|
from PIL import Image, ImageFile, PngImagePlugin
|
||||||
|
|
||||||
TEST_FILE = "Tests/images/png_decompression_dos.png"
|
TEST_FILE = "Tests/images/png_decompression_dos.png"
|
||||||
|
|
||||||
|
|
||||||
def test_ignore_dos_text() -> None:
|
def test_ignore_dos_text(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
|
|
||||||
try:
|
with Image.open(TEST_FILE) as im:
|
||||||
im = Image.open(TEST_FILE)
|
|
||||||
im.load()
|
im.load()
|
||||||
finally:
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
assert isinstance(im, PngImagePlugin.PngImageFile)
|
assert isinstance(im, PngImagePlugin.PngImageFile)
|
||||||
for s in im.text.values():
|
for s in im.text.values():
|
||||||
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
|
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
|
||||||
|
|
||||||
for s in im.info.values():
|
for s in im.info.values():
|
||||||
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
|
assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
|
||||||
|
|
||||||
|
|
||||||
def test_dos_text() -> None:
|
def test_dos_text() -> None:
|
||||||
|
|
|
@ -34,6 +34,7 @@ def test_wheel_features() -> None:
|
||||||
"fribidi",
|
"fribidi",
|
||||||
"harfbuzz",
|
"harfbuzz",
|
||||||
"libjpeg_turbo",
|
"libjpeg_turbo",
|
||||||
|
"zlib_ng",
|
||||||
"xcb",
|
"xcb",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
|
||||||
import tempfile
|
import tempfile
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
@ -140,18 +139,11 @@ def assert_image_similar_tofile(
|
||||||
filename: str,
|
filename: str,
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
msg: str | None = None,
|
msg: str | None = None,
|
||||||
mode: str | None = None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
with Image.open(filename) as img:
|
with Image.open(filename) as img:
|
||||||
if mode:
|
|
||||||
img = img.convert(mode)
|
|
||||||
assert_image_similar(a, img, epsilon, msg)
|
assert_image_similar(a, img, epsilon, msg)
|
||||||
|
|
||||||
|
|
||||||
def assert_all_same(items: Sequence[Any], msg: str | None = None) -> None:
|
|
||||||
assert items.count(items[0]) == len(items), msg
|
|
||||||
|
|
||||||
|
|
||||||
def assert_not_all_same(items: Sequence[Any], msg: str | None = None) -> None:
|
def assert_not_all_same(items: Sequence[Any], msg: str | None = None) -> None:
|
||||||
assert items.count(items[0]) != len(items), msg
|
assert items.count(items[0]) != len(items), msg
|
||||||
|
|
||||||
|
@ -327,16 +319,7 @@ def magick_command() -> list[str] | None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def on_appveyor() -> bool:
|
|
||||||
return "APPVEYOR" in os.environ
|
|
||||||
|
|
||||||
|
|
||||||
def on_github_actions() -> bool:
|
|
||||||
return "GITHUB_ACTIONS" in os.environ
|
|
||||||
|
|
||||||
|
|
||||||
def on_ci() -> bool:
|
def on_ci() -> bool:
|
||||||
# GitHub Actions and AppVeyor have "CI"
|
|
||||||
return "CI" in os.environ
|
return "CI" in os.environ
|
||||||
|
|
||||||
|
|
||||||
|
@ -358,10 +341,6 @@ def is_pypy() -> bool:
|
||||||
return hasattr(sys, "pypy_translation_info")
|
return hasattr(sys, "pypy_translation_info")
|
||||||
|
|
||||||
|
|
||||||
def is_mingw() -> bool:
|
|
||||||
return sysconfig.get_platform() == "mingw"
|
|
||||||
|
|
||||||
|
|
||||||
class CachedProperty:
|
class CachedProperty:
|
||||||
def __init__(self, func: Callable[[Any], Any]) -> None:
|
def __init__(self, func: Callable[[Any], Any]) -> None:
|
||||||
self.func = func
|
self.func = func
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 486 B After Width: | Height: | Size: 533 B |
BIN
Tests/images/jfif_unit_cm.jpg
Normal file
BIN
Tests/images/jfif_unit_cm.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 391 B |
BIN
Tests/images/multiline_text_justify.png
Normal file
BIN
Tests/images/multiline_text_justify.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.2 KiB |
|
@ -7,7 +7,7 @@ import fuzzers
|
||||||
import packaging
|
import packaging
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import Image, UnidentifiedImageError, features
|
from PIL import Image, features
|
||||||
from Tests.helper import skip_unless_feature
|
from Tests.helper import skip_unless_feature
|
||||||
|
|
||||||
if sys.platform.startswith("win32"):
|
if sys.platform.startswith("win32"):
|
||||||
|
@ -32,21 +32,17 @@ def test_fuzz_images(path: str) -> None:
|
||||||
fuzzers.fuzz_image(f.read())
|
fuzzers.fuzz_image(f.read())
|
||||||
assert True
|
assert True
|
||||||
except (
|
except (
|
||||||
|
# Known exceptions from Pillow
|
||||||
OSError,
|
OSError,
|
||||||
SyntaxError,
|
SyntaxError,
|
||||||
MemoryError,
|
MemoryError,
|
||||||
ValueError,
|
ValueError,
|
||||||
NotImplementedError,
|
NotImplementedError,
|
||||||
OverflowError,
|
OverflowError,
|
||||||
):
|
# Known Image.* exceptions
|
||||||
# Known exceptions that are through from Pillow
|
|
||||||
assert True
|
|
||||||
except (
|
|
||||||
Image.DecompressionBombError,
|
Image.DecompressionBombError,
|
||||||
Image.DecompressionBombWarning,
|
Image.DecompressionBombWarning,
|
||||||
UnidentifiedImageError,
|
|
||||||
):
|
):
|
||||||
# Known Image.* exceptions
|
|
||||||
assert True
|
assert True
|
||||||
finally:
|
finally:
|
||||||
fuzzers.disable_decompressionbomb_error()
|
fuzzers.disable_decompressionbomb_error()
|
||||||
|
|
|
@ -22,6 +22,8 @@ def test_bad() -> None:
|
||||||
for f in get_files("b"):
|
for f in get_files("b"):
|
||||||
# Assert that there is no unclosed file warning
|
# Assert that there is no unclosed file warning
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with Image.open(f) as im:
|
with Image.open(f) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
|
@ -19,7 +19,7 @@ except ImportError:
|
||||||
class TestColorLut3DCoreAPI:
|
class TestColorLut3DCoreAPI:
|
||||||
def generate_identity_table(
|
def generate_identity_table(
|
||||||
self, channels: int, size: int | tuple[int, int, int]
|
self, channels: int, size: int | tuple[int, int, int]
|
||||||
) -> tuple[int, int, int, int, list[float]]:
|
) -> tuple[int, tuple[int, int, int], list[float]]:
|
||||||
if isinstance(size, tuple):
|
if isinstance(size, tuple):
|
||||||
size_1d, size_2d, size_3d = size
|
size_1d, size_2d, size_3d = size
|
||||||
else:
|
else:
|
||||||
|
@ -39,9 +39,7 @@ class TestColorLut3DCoreAPI:
|
||||||
]
|
]
|
||||||
return (
|
return (
|
||||||
channels,
|
channels,
|
||||||
size_1d,
|
(size_1d, size_2d, size_3d),
|
||||||
size_2d,
|
|
||||||
size_3d,
|
|
||||||
[item for sublist in table for item in sublist],
|
[item for sublist in table for item in sublist],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -89,21 +87,21 @@ class TestColorLut3DCoreAPI:
|
||||||
|
|
||||||
with pytest.raises(ValueError, match=r"size1D \* size2D \* size3D"):
|
with pytest.raises(ValueError, match=r"size1D \* size2D \* size3D"):
|
||||||
im.im.color_lut_3d(
|
im.im.color_lut_3d(
|
||||||
"RGB", Image.Resampling.BILINEAR, 3, 2, 2, 2, [0, 0, 0] * 7
|
"RGB", Image.Resampling.BILINEAR, 3, (2, 2, 2), [0, 0, 0] * 7
|
||||||
)
|
)
|
||||||
|
|
||||||
with pytest.raises(ValueError, match=r"size1D \* size2D \* size3D"):
|
with pytest.raises(ValueError, match=r"size1D \* size2D \* size3D"):
|
||||||
im.im.color_lut_3d(
|
im.im.color_lut_3d(
|
||||||
"RGB", Image.Resampling.BILINEAR, 3, 2, 2, 2, [0, 0, 0] * 9
|
"RGB", Image.Resampling.BILINEAR, 3, (2, 2, 2), [0, 0, 0] * 9
|
||||||
)
|
)
|
||||||
|
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
im.im.color_lut_3d(
|
im.im.color_lut_3d(
|
||||||
"RGB", Image.Resampling.BILINEAR, 3, 2, 2, 2, [0, 0, "0"] * 8
|
"RGB", Image.Resampling.BILINEAR, 3, (2, 2, 2), [0, 0, "0"] * 8
|
||||||
)
|
)
|
||||||
|
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
im.im.color_lut_3d("RGB", Image.Resampling.BILINEAR, 3, 2, 2, 2, 16)
|
im.im.color_lut_3d("RGB", Image.Resampling.BILINEAR, 3, (2, 2, 2), 16)
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"lut_mode, table_channels, table_size",
|
"lut_mode, table_channels, table_size",
|
||||||
|
@ -264,7 +262,7 @@ class TestColorLut3DCoreAPI:
|
||||||
assert_image_equal(
|
assert_image_equal(
|
||||||
Image.merge('RGB', im.split()[::-1]),
|
Image.merge('RGB', im.split()[::-1]),
|
||||||
im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR,
|
im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR,
|
||||||
3, 2, 2, 2, [
|
3, (2, 2, 2), [
|
||||||
0, 0, 0, 0, 0, 1,
|
0, 0, 0, 0, 0, 1,
|
||||||
0, 1, 0, 0, 1, 1,
|
0, 1, 0, 0, 1, 1,
|
||||||
|
|
||||||
|
@ -286,7 +284,7 @@ class TestColorLut3DCoreAPI:
|
||||||
|
|
||||||
# fmt: off
|
# fmt: off
|
||||||
transformed = im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR,
|
transformed = im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR,
|
||||||
3, 2, 2, 2,
|
3, (2, 2, 2),
|
||||||
[
|
[
|
||||||
-1, -1, -1, 2, -1, -1,
|
-1, -1, -1, 2, -1, -1,
|
||||||
-1, 2, -1, 2, 2, -1,
|
-1, 2, -1, 2, 2, -1,
|
||||||
|
@ -307,7 +305,7 @@ class TestColorLut3DCoreAPI:
|
||||||
|
|
||||||
# fmt: off
|
# fmt: off
|
||||||
transformed = im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR,
|
transformed = im._new(im.im.color_lut_3d('RGB', Image.Resampling.BILINEAR,
|
||||||
3, 2, 2, 2,
|
3, (2, 2, 2),
|
||||||
[
|
[
|
||||||
-3, -3, -3, 5, -3, -3,
|
-3, -3, -3, 5, -3, -3,
|
||||||
-3, 5, -3, 5, 5, -3,
|
-3, 5, -3, 5, 5, -3,
|
||||||
|
@ -388,10 +386,12 @@ class TestColorLut3DFilter:
|
||||||
|
|
||||||
table = numpy.ones((7 * 6 * 5, 3), dtype=numpy.float16)
|
table = numpy.ones((7 * 6 * 5, 3), dtype=numpy.float16)
|
||||||
lut = ImageFilter.Color3DLUT((5, 6, 7), table)
|
lut = ImageFilter.Color3DLUT((5, 6, 7), table)
|
||||||
|
assert isinstance(lut.table, numpy.ndarray)
|
||||||
assert lut.table.shape == (table.size,)
|
assert lut.table.shape == (table.size,)
|
||||||
|
|
||||||
table = numpy.ones((7 * 6 * 5 * 3), dtype=numpy.float16)
|
table = numpy.ones((7 * 6 * 5 * 3), dtype=numpy.float16)
|
||||||
lut = ImageFilter.Color3DLUT((5, 6, 7), table)
|
lut = ImageFilter.Color3DLUT((5, 6, 7), table)
|
||||||
|
assert isinstance(lut.table, numpy.ndarray)
|
||||||
assert lut.table.shape == (table.size,)
|
assert lut.table.shape == (table.size,)
|
||||||
|
|
||||||
# Check application
|
# Check application
|
||||||
|
|
|
@ -12,19 +12,16 @@ ORIGINAL_LIMIT = Image.MAX_IMAGE_PIXELS
|
||||||
|
|
||||||
|
|
||||||
class TestDecompressionBomb:
|
class TestDecompressionBomb:
|
||||||
def teardown_method(self) -> None:
|
|
||||||
Image.MAX_IMAGE_PIXELS = ORIGINAL_LIMIT
|
|
||||||
|
|
||||||
def test_no_warning_small_file(self) -> None:
|
def test_no_warning_small_file(self) -> None:
|
||||||
# Implicit assert: no warning.
|
# Implicit assert: no warning.
|
||||||
# A warning would cause a failure.
|
# A warning would cause a failure.
|
||||||
with Image.open(TEST_FILE):
|
with Image.open(TEST_FILE):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def test_no_warning_no_limit(self) -> None:
|
def test_no_warning_no_limit(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# Arrange
|
# Arrange
|
||||||
# Turn limit off
|
# Turn limit off
|
||||||
Image.MAX_IMAGE_PIXELS = None
|
monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", None)
|
||||||
assert Image.MAX_IMAGE_PIXELS is None
|
assert Image.MAX_IMAGE_PIXELS is None
|
||||||
|
|
||||||
# Act / Assert
|
# Act / Assert
|
||||||
|
@ -33,18 +30,18 @@ class TestDecompressionBomb:
|
||||||
with Image.open(TEST_FILE):
|
with Image.open(TEST_FILE):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def test_warning(self) -> None:
|
def test_warning(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# Set limit to trigger warning on the test file
|
# Set limit to trigger warning on the test file
|
||||||
Image.MAX_IMAGE_PIXELS = 128 * 128 - 1
|
monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", 128 * 128 - 1)
|
||||||
assert Image.MAX_IMAGE_PIXELS == 128 * 128 - 1
|
assert Image.MAX_IMAGE_PIXELS == 128 * 128 - 1
|
||||||
|
|
||||||
with pytest.warns(Image.DecompressionBombWarning):
|
with pytest.warns(Image.DecompressionBombWarning):
|
||||||
with Image.open(TEST_FILE):
|
with Image.open(TEST_FILE):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def test_exception(self) -> None:
|
def test_exception(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# Set limit to trigger exception on the test file
|
# Set limit to trigger exception on the test file
|
||||||
Image.MAX_IMAGE_PIXELS = 64 * 128 - 1
|
monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", 64 * 128 - 1)
|
||||||
assert Image.MAX_IMAGE_PIXELS == 64 * 128 - 1
|
assert Image.MAX_IMAGE_PIXELS == 64 * 128 - 1
|
||||||
|
|
||||||
with pytest.raises(Image.DecompressionBombError):
|
with pytest.raises(Image.DecompressionBombError):
|
||||||
|
@ -66,9 +63,9 @@ class TestDecompressionBomb:
|
||||||
with pytest.raises(Image.DecompressionBombError):
|
with pytest.raises(Image.DecompressionBombError):
|
||||||
im.seek(1)
|
im.seek(1)
|
||||||
|
|
||||||
def test_exception_gif_zero_width(self) -> None:
|
def test_exception_gif_zero_width(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# Set limit to trigger exception on the test file
|
# Set limit to trigger exception on the test file
|
||||||
Image.MAX_IMAGE_PIXELS = 4 * 64 * 128
|
monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", 4 * 64 * 128)
|
||||||
assert Image.MAX_IMAGE_PIXELS == 4 * 64 * 128
|
assert Image.MAX_IMAGE_PIXELS == 4 * 64 * 128
|
||||||
|
|
||||||
with pytest.raises(Image.DecompressionBombError):
|
with pytest.raises(Image.DecompressionBombError):
|
||||||
|
|
|
@ -36,10 +36,11 @@ def test_version() -> None:
|
||||||
else:
|
else:
|
||||||
assert function(name) == version
|
assert function(name) == version
|
||||||
if name != "PIL":
|
if name != "PIL":
|
||||||
if name == "zlib" and version is not None:
|
if version is not None:
|
||||||
version = re.sub(".zlib-ng$", "", version)
|
if name == "zlib" and features.check_feature("zlib_ng"):
|
||||||
elif name == "libtiff" and version is not None:
|
version = re.sub(".zlib-ng$", "", version)
|
||||||
version = re.sub("t$", "", version)
|
elif name == "libtiff":
|
||||||
|
version = re.sub("t$", "", version)
|
||||||
assert version is None or re.search(r"\d+(\.\d+)*$", version)
|
assert version is None or re.search(r"\d+(\.\d+)*$", version)
|
||||||
|
|
||||||
for module in features.modules:
|
for module in features.modules:
|
||||||
|
|
|
@ -307,13 +307,8 @@ def test_apng_syntax_errors() -> None:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
# we can handle this case gracefully
|
# we can handle this case gracefully
|
||||||
exception = None
|
|
||||||
with Image.open("Tests/images/apng/syntax_num_frames_low.png") as im:
|
with Image.open("Tests/images/apng/syntax_num_frames_low.png") as im:
|
||||||
try:
|
im.seek(im.n_frames - 1)
|
||||||
im.seek(im.n_frames - 1)
|
|
||||||
except Exception as e:
|
|
||||||
exception = e
|
|
||||||
assert exception is None
|
|
||||||
|
|
||||||
with pytest.raises(OSError):
|
with pytest.raises(OSError):
|
||||||
with Image.open("Tests/images/apng/syntax_num_frames_high.png") as im:
|
with Image.open("Tests/images/apng/syntax_num_frames_high.png") as im:
|
||||||
|
@ -405,13 +400,8 @@ def test_apng_save_split_fdat(tmp_path: Path) -> None:
|
||||||
append_images=frames,
|
append_images=frames,
|
||||||
)
|
)
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
exception = None
|
im.seek(im.n_frames - 1)
|
||||||
try:
|
im.load()
|
||||||
im.seek(im.n_frames - 1)
|
|
||||||
im.load()
|
|
||||||
except Exception as e:
|
|
||||||
exception = e
|
|
||||||
assert exception is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_apng_save_duration_loop(tmp_path: Path) -> None:
|
def test_apng_save_duration_loop(tmp_path: Path) -> None:
|
||||||
|
|
|
@ -4,7 +4,7 @@ from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import BlpImagePlugin, Image
|
||||||
|
|
||||||
from .helper import (
|
from .helper import (
|
||||||
assert_image_equal,
|
assert_image_equal,
|
||||||
|
@ -19,6 +19,7 @@ def test_load_blp1() -> None:
|
||||||
assert_image_equal_tofile(im, "Tests/images/blp/blp1_jpeg.png")
|
assert_image_equal_tofile(im, "Tests/images/blp/blp1_jpeg.png")
|
||||||
|
|
||||||
with Image.open("Tests/images/blp/blp1_jpeg2.blp") as im:
|
with Image.open("Tests/images/blp/blp1_jpeg2.blp") as im:
|
||||||
|
assert im.mode == "RGBA"
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,6 +38,13 @@ def test_load_blp2_dxt1a() -> None:
|
||||||
assert_image_equal_tofile(im, "Tests/images/blp/blp2_dxt1a.png")
|
assert_image_equal_tofile(im, "Tests/images/blp/blp2_dxt1a.png")
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_file() -> None:
|
||||||
|
invalid_file = "Tests/images/flower.jpg"
|
||||||
|
|
||||||
|
with pytest.raises(BlpImagePlugin.BLPFormatError):
|
||||||
|
BlpImagePlugin.BlpImageFile(invalid_file)
|
||||||
|
|
||||||
|
|
||||||
def test_save(tmp_path: Path) -> None:
|
def test_save(tmp_path: Path) -> None:
|
||||||
f = str(tmp_path / "temp.blp")
|
f = str(tmp_path / "temp.blp")
|
||||||
|
|
||||||
|
|
|
@ -83,4 +83,4 @@ def test_handler(tmp_path: Path) -> None:
|
||||||
im.save(temp_file)
|
im.save(temp_file)
|
||||||
assert handler.saved
|
assert handler.saved
|
||||||
|
|
||||||
BufrStubImagePlugin._handler = None
|
BufrStubImagePlugin.register_handler(None)
|
||||||
|
|
|
@ -4,8 +4,6 @@ import pytest
|
||||||
|
|
||||||
from PIL import ContainerIO, Image
|
from PIL import ContainerIO, Image
|
||||||
|
|
||||||
from .helper import hopper
|
|
||||||
|
|
||||||
TEST_FILE = "Tests/images/dummy.container"
|
TEST_FILE = "Tests/images/dummy.container"
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,15 +13,15 @@ def test_sanity() -> None:
|
||||||
|
|
||||||
|
|
||||||
def test_isatty() -> None:
|
def test_isatty() -> None:
|
||||||
with hopper() as im:
|
with open(TEST_FILE, "rb") as fh:
|
||||||
container = ContainerIO.ContainerIO(im, 0, 0)
|
container = ContainerIO.ContainerIO(fh, 0, 0)
|
||||||
|
|
||||||
assert container.isatty() is False
|
assert container.isatty() is False
|
||||||
|
|
||||||
|
|
||||||
def test_seekable() -> None:
|
def test_seekable() -> None:
|
||||||
with hopper() as im:
|
with open(TEST_FILE, "rb") as fh:
|
||||||
container = ContainerIO.ContainerIO(im, 0, 0)
|
container = ContainerIO.ContainerIO(fh, 0, 0)
|
||||||
|
|
||||||
assert container.seekable() is True
|
assert container.seekable() is True
|
||||||
|
|
||||||
|
|
|
@ -26,16 +26,18 @@ def test_sanity() -> None:
|
||||||
|
|
||||||
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
||||||
def test_unclosed_file() -> None:
|
def test_unclosed_file() -> None:
|
||||||
def open() -> None:
|
def open_test_image() -> None:
|
||||||
im = Image.open(TEST_FILE)
|
im = Image.open(TEST_FILE)
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
with pytest.warns(ResourceWarning):
|
with pytest.warns(ResourceWarning):
|
||||||
open()
|
open_test_image()
|
||||||
|
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_FILE)
|
im = Image.open(TEST_FILE)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -43,6 +45,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -331,11 +331,13 @@ def test_dxt5_colorblock_alpha_issue_4142() -> None:
|
||||||
|
|
||||||
with Image.open("Tests/images/dxt5-colorblock-alpha-issue-4142.dds") as im:
|
with Image.open("Tests/images/dxt5-colorblock-alpha-issue-4142.dds") as im:
|
||||||
px = im.getpixel((0, 0))
|
px = im.getpixel((0, 0))
|
||||||
|
assert isinstance(px, tuple)
|
||||||
assert px[0] != 0
|
assert px[0] != 0
|
||||||
assert px[1] != 0
|
assert px[1] != 0
|
||||||
assert px[2] != 0
|
assert px[2] != 0
|
||||||
|
|
||||||
px = im.getpixel((1, 0))
|
px = im.getpixel((1, 0))
|
||||||
|
assert isinstance(px, tuple)
|
||||||
assert px[0] != 0
|
assert px[0] != 0
|
||||||
assert px[1] != 0
|
assert px[1] != 0
|
||||||
assert px[2] != 0
|
assert px[2] != 0
|
||||||
|
|
|
@ -95,10 +95,14 @@ def test_sanity(filename: str, size: tuple[int, int], scale: int) -> None:
|
||||||
@pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available")
|
@pytest.mark.skipif(not HAS_GHOSTSCRIPT, reason="Ghostscript not available")
|
||||||
def test_load() -> None:
|
def test_load() -> None:
|
||||||
with Image.open(FILE1) as im:
|
with Image.open(FILE1) as im:
|
||||||
assert im.load()[0, 0] == (255, 255, 255)
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[0, 0] == (255, 255, 255)
|
||||||
|
|
||||||
# Test again now that it has already been loaded once
|
# Test again now that it has already been loaded once
|
||||||
assert im.load()[0, 0] == (255, 255, 255)
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[0, 0] == (255, 255, 255)
|
||||||
|
|
||||||
|
|
||||||
def test_binary() -> None:
|
def test_binary() -> None:
|
||||||
|
|
|
@ -35,36 +35,35 @@ def test_sanity() -> None:
|
||||||
assert im.is_animated
|
assert im.is_animated
|
||||||
|
|
||||||
|
|
||||||
def test_prefix_chunk() -> None:
|
def test_prefix_chunk(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
try:
|
with Image.open(animated_test_file_with_prefix_chunk) as im:
|
||||||
with Image.open(animated_test_file_with_prefix_chunk) as im:
|
assert im.mode == "P"
|
||||||
assert im.mode == "P"
|
assert im.size == (320, 200)
|
||||||
assert im.size == (320, 200)
|
assert im.format == "FLI"
|
||||||
assert im.format == "FLI"
|
assert im.info["duration"] == 171
|
||||||
assert im.info["duration"] == 171
|
assert im.is_animated
|
||||||
assert im.is_animated
|
|
||||||
|
|
||||||
palette = im.getpalette()
|
palette = im.getpalette()
|
||||||
assert palette[3:6] == [255, 255, 255]
|
assert palette[3:6] == [255, 255, 255]
|
||||||
assert palette[381:384] == [204, 204, 12]
|
assert palette[381:384] == [204, 204, 12]
|
||||||
assert palette[765:] == [252, 0, 0]
|
assert palette[765:] == [252, 0, 0]
|
||||||
finally:
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
||||||
def test_unclosed_file() -> None:
|
def test_unclosed_file() -> None:
|
||||||
def open() -> None:
|
def open_test_image() -> None:
|
||||||
im = Image.open(static_test_file)
|
im = Image.open(static_test_file)
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
with pytest.warns(ResourceWarning):
|
with pytest.warns(ResourceWarning):
|
||||||
open()
|
open_test_image()
|
||||||
|
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(static_test_file)
|
im = Image.open(static_test_file)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -81,6 +80,8 @@ def test_seek_after_close() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(static_test_file) as im:
|
with Image.open(static_test_file) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import io
|
||||||
|
import struct
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import FtexImagePlugin, Image
|
from PIL import FtexImagePlugin, Image
|
||||||
|
@ -23,3 +26,15 @@ def test_invalid_file() -> None:
|
||||||
|
|
||||||
with pytest.raises(SyntaxError):
|
with pytest.raises(SyntaxError):
|
||||||
FtexImagePlugin.FtexImageFile(invalid_file)
|
FtexImagePlugin.FtexImageFile(invalid_file)
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_texture() -> None:
|
||||||
|
with open("Tests/images/ftex_dxt1.ftc", "rb") as fp:
|
||||||
|
data = fp.read()
|
||||||
|
|
||||||
|
# Change texture compression format
|
||||||
|
data = data[:24] + struct.pack("<i", 2) + data[28:]
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match="Invalid texture compression format: 2"):
|
||||||
|
with Image.open(io.BytesIO(data)):
|
||||||
|
pass
|
||||||
|
|
|
@ -14,10 +14,14 @@ def test_gbr_file() -> None:
|
||||||
|
|
||||||
def test_load() -> None:
|
def test_load() -> None:
|
||||||
with Image.open("Tests/images/gbr.gbr") as im:
|
with Image.open("Tests/images/gbr.gbr") as im:
|
||||||
assert im.load()[0, 0] == (0, 0, 0, 0)
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[0, 0] == (0, 0, 0, 0)
|
||||||
|
|
||||||
# Test again now that it has already been loaded once
|
# Test again now that it has already been loaded once
|
||||||
assert im.load()[0, 0] == (0, 0, 0, 0)
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[0, 0] == (0, 0, 0, 0)
|
||||||
|
|
||||||
|
|
||||||
def test_multiple_load_operations() -> None:
|
def test_multiple_load_operations() -> None:
|
||||||
|
|
|
@ -4,6 +4,8 @@ import pytest
|
||||||
|
|
||||||
from PIL import GdImageFile, UnidentifiedImageError
|
from PIL import GdImageFile, UnidentifiedImageError
|
||||||
|
|
||||||
|
from .helper import assert_image_similar_tofile
|
||||||
|
|
||||||
TEST_GD_FILE = "Tests/images/hopper.gd"
|
TEST_GD_FILE = "Tests/images/hopper.gd"
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,6 +13,7 @@ def test_sanity() -> None:
|
||||||
with GdImageFile.open(TEST_GD_FILE) as im:
|
with GdImageFile.open(TEST_GD_FILE) as im:
|
||||||
assert im.size == (128, 128)
|
assert im.size == (128, 128)
|
||||||
assert im.format == "GD"
|
assert im.format == "GD"
|
||||||
|
assert_image_similar_tofile(im.convert("RGB"), "Tests/images/hopper.jpg", 14)
|
||||||
|
|
||||||
|
|
||||||
def test_bad_mode() -> None:
|
def test_bad_mode() -> None:
|
||||||
|
|
|
@ -4,6 +4,7 @@ import warnings
|
||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -21,9 +22,6 @@ from .helper import (
|
||||||
# sample gif stream
|
# sample gif stream
|
||||||
TEST_GIF = "Tests/images/hopper.gif"
|
TEST_GIF = "Tests/images/hopper.gif"
|
||||||
|
|
||||||
with open(TEST_GIF, "rb") as f:
|
|
||||||
data = f.read()
|
|
||||||
|
|
||||||
|
|
||||||
def test_sanity() -> None:
|
def test_sanity() -> None:
|
||||||
with Image.open(TEST_GIF) as im:
|
with Image.open(TEST_GIF) as im:
|
||||||
|
@ -36,16 +34,18 @@ def test_sanity() -> None:
|
||||||
|
|
||||||
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
||||||
def test_unclosed_file() -> None:
|
def test_unclosed_file() -> None:
|
||||||
def open() -> None:
|
def open_test_image() -> None:
|
||||||
im = Image.open(TEST_GIF)
|
im = Image.open(TEST_GIF)
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
with pytest.warns(ResourceWarning):
|
with pytest.warns(ResourceWarning):
|
||||||
open()
|
open_test_image()
|
||||||
|
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_GIF)
|
im = Image.open(TEST_GIF)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -67,6 +67,8 @@ def test_seek_after_close() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_GIF) as im:
|
with Image.open(TEST_GIF) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
@ -81,12 +83,12 @@ def test_invalid_file() -> None:
|
||||||
def test_l_mode_transparency() -> None:
|
def test_l_mode_transparency() -> None:
|
||||||
with Image.open("Tests/images/no_palette_with_transparency.gif") as im:
|
with Image.open("Tests/images/no_palette_with_transparency.gif") as im:
|
||||||
assert im.mode == "L"
|
assert im.mode == "L"
|
||||||
assert im.load()[0, 0] == 128
|
assert im.getpixel((0, 0)) == 128
|
||||||
assert im.info["transparency"] == 255
|
assert im.info["transparency"] == 255
|
||||||
|
|
||||||
im.seek(1)
|
im.seek(1)
|
||||||
assert im.mode == "L"
|
assert im.mode == "L"
|
||||||
assert im.load()[0, 0] == 128
|
assert im.getpixel((0, 0)) == 128
|
||||||
|
|
||||||
|
|
||||||
def test_l_mode_after_rgb() -> None:
|
def test_l_mode_after_rgb() -> None:
|
||||||
|
@ -104,7 +106,7 @@ def test_palette_not_needed_for_second_frame() -> None:
|
||||||
assert_image_similar(im, hopper("L").convert("RGB"), 8)
|
assert_image_similar(im, hopper("L").convert("RGB"), 8)
|
||||||
|
|
||||||
|
|
||||||
def test_strategy() -> None:
|
def test_strategy(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
with Image.open("Tests/images/iss634.gif") as im:
|
with Image.open("Tests/images/iss634.gif") as im:
|
||||||
expected_rgb_always = im.convert("RGB")
|
expected_rgb_always = im.convert("RGB")
|
||||||
|
|
||||||
|
@ -114,35 +116,36 @@ def test_strategy() -> None:
|
||||||
im.seek(1)
|
im.seek(1)
|
||||||
expected_different = im.convert("RGB")
|
expected_different = im.convert("RGB")
|
||||||
|
|
||||||
try:
|
monkeypatch.setattr(
|
||||||
GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_ALWAYS
|
GifImagePlugin, "LOADING_STRATEGY", GifImagePlugin.LoadingStrategy.RGB_ALWAYS
|
||||||
with Image.open("Tests/images/iss634.gif") as im:
|
)
|
||||||
assert im.mode == "RGB"
|
with Image.open("Tests/images/iss634.gif") as im:
|
||||||
assert_image_equal(im, expected_rgb_always)
|
assert im.mode == "RGB"
|
||||||
|
assert_image_equal(im, expected_rgb_always)
|
||||||
|
|
||||||
with Image.open("Tests/images/chi.gif") as im:
|
with Image.open("Tests/images/chi.gif") as im:
|
||||||
assert im.mode == "RGBA"
|
assert im.mode == "RGBA"
|
||||||
assert_image_equal(im, expected_rgb_always_rgba)
|
assert_image_equal(im, expected_rgb_always_rgba)
|
||||||
|
|
||||||
GifImagePlugin.LOADING_STRATEGY = (
|
monkeypatch.setattr(
|
||||||
GifImagePlugin.LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY
|
GifImagePlugin,
|
||||||
)
|
"LOADING_STRATEGY",
|
||||||
# Stay in P mode with only a global palette
|
GifImagePlugin.LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY,
|
||||||
with Image.open("Tests/images/chi.gif") as im:
|
)
|
||||||
assert im.mode == "P"
|
# Stay in P mode with only a global palette
|
||||||
|
with Image.open("Tests/images/chi.gif") as im:
|
||||||
|
assert im.mode == "P"
|
||||||
|
|
||||||
im.seek(1)
|
im.seek(1)
|
||||||
assert im.mode == "P"
|
assert im.mode == "P"
|
||||||
assert_image_equal(im.convert("RGB"), expected_different)
|
assert_image_equal(im.convert("RGB"), expected_different)
|
||||||
|
|
||||||
# Change to RGB mode when a frame has an individual palette
|
# Change to RGB mode when a frame has an individual palette
|
||||||
with Image.open("Tests/images/iss634.gif") as im:
|
with Image.open("Tests/images/iss634.gif") as im:
|
||||||
assert im.mode == "P"
|
assert im.mode == "P"
|
||||||
|
|
||||||
im.seek(1)
|
im.seek(1)
|
||||||
assert im.mode == "RGB"
|
assert im.mode == "RGB"
|
||||||
finally:
|
|
||||||
GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_AFTER_FIRST
|
|
||||||
|
|
||||||
|
|
||||||
def test_optimize() -> None:
|
def test_optimize() -> None:
|
||||||
|
@ -304,8 +307,9 @@ def test_roundtrip_save_all_1(tmp_path: Path) -> None:
|
||||||
def test_loading_multiple_palettes(path: str, mode: str) -> None:
|
def test_loading_multiple_palettes(path: str, mode: str) -> None:
|
||||||
with Image.open(path) as im:
|
with Image.open(path) as im:
|
||||||
assert im.mode == "P"
|
assert im.mode == "P"
|
||||||
|
assert im.palette is not None
|
||||||
first_frame_colors = im.palette.colors.keys()
|
first_frame_colors = im.palette.colors.keys()
|
||||||
original_color = im.convert("RGB").load()[0, 0]
|
original_color = im.convert("RGB").getpixel((0, 0))
|
||||||
|
|
||||||
im.seek(1)
|
im.seek(1)
|
||||||
assert im.mode == mode
|
assert im.mode == mode
|
||||||
|
@ -313,10 +317,10 @@ def test_loading_multiple_palettes(path: str, mode: str) -> None:
|
||||||
im = im.convert("RGB")
|
im = im.convert("RGB")
|
||||||
|
|
||||||
# Check a color only from the old palette
|
# Check a color only from the old palette
|
||||||
assert im.load()[0, 0] == original_color
|
assert im.getpixel((0, 0)) == original_color
|
||||||
|
|
||||||
# Check a color from the new palette
|
# Check a color from the new palette
|
||||||
assert im.load()[24, 24] not in first_frame_colors
|
assert im.getpixel((24, 24)) not in first_frame_colors
|
||||||
|
|
||||||
|
|
||||||
def test_headers_saving_for_animated_gifs(tmp_path: Path) -> None:
|
def test_headers_saving_for_animated_gifs(tmp_path: Path) -> None:
|
||||||
|
@ -482,8 +486,7 @@ def test_eoferror() -> None:
|
||||||
|
|
||||||
def test_first_frame_transparency() -> None:
|
def test_first_frame_transparency() -> None:
|
||||||
with Image.open("Tests/images/first_frame_transparency.gif") as im:
|
with Image.open("Tests/images/first_frame_transparency.gif") as im:
|
||||||
px = im.load()
|
assert im.getpixel((0, 0)) == im.info["transparency"]
|
||||||
assert px[0, 0] == im.info["transparency"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_dispose_none() -> None:
|
def test_dispose_none() -> None:
|
||||||
|
@ -523,6 +526,7 @@ def test_dispose_background_transparency() -> None:
|
||||||
with Image.open("Tests/images/dispose_bgnd_transparency.gif") as img:
|
with Image.open("Tests/images/dispose_bgnd_transparency.gif") as img:
|
||||||
img.seek(2)
|
img.seek(2)
|
||||||
px = img.load()
|
px = img.load()
|
||||||
|
assert px is not None
|
||||||
assert px[35, 30][3] == 0
|
assert px[35, 30][3] == 0
|
||||||
|
|
||||||
|
|
||||||
|
@ -550,17 +554,15 @@ def test_dispose_background_transparency() -> None:
|
||||||
def test_transparent_dispose(
|
def test_transparent_dispose(
|
||||||
loading_strategy: GifImagePlugin.LoadingStrategy,
|
loading_strategy: GifImagePlugin.LoadingStrategy,
|
||||||
expected_colors: tuple[tuple[int | tuple[int, int, int, int], ...]],
|
expected_colors: tuple[tuple[int | tuple[int, int, int, int], ...]],
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
) -> None:
|
) -> None:
|
||||||
GifImagePlugin.LOADING_STRATEGY = loading_strategy
|
monkeypatch.setattr(GifImagePlugin, "LOADING_STRATEGY", loading_strategy)
|
||||||
try:
|
with Image.open("Tests/images/transparent_dispose.gif") as img:
|
||||||
with Image.open("Tests/images/transparent_dispose.gif") as img:
|
for frame in range(3):
|
||||||
for frame in range(3):
|
img.seek(frame)
|
||||||
img.seek(frame)
|
for x in range(3):
|
||||||
for x in range(3):
|
color = img.getpixel((x, 0))
|
||||||
color = img.getpixel((x, 0))
|
assert color == expected_colors[frame][x]
|
||||||
assert color == expected_colors[frame][x]
|
|
||||||
finally:
|
|
||||||
GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_AFTER_FIRST
|
|
||||||
|
|
||||||
|
|
||||||
def test_dispose_previous() -> None:
|
def test_dispose_previous() -> None:
|
||||||
|
@ -759,6 +761,21 @@ def test_dispose2_previous_frame(tmp_path: Path) -> None:
|
||||||
assert im.getpixel((0, 0)) == (0, 0, 0, 255)
|
assert im.getpixel((0, 0)) == (0, 0, 0, 255)
|
||||||
|
|
||||||
|
|
||||||
|
def test_dispose2_without_transparency(tmp_path: Path) -> None:
|
||||||
|
out = str(tmp_path / "temp.gif")
|
||||||
|
|
||||||
|
im = Image.new("P", (100, 100))
|
||||||
|
|
||||||
|
im2 = Image.new("P", (100, 100), (0, 0, 0))
|
||||||
|
im2.putpixel((50, 50), (255, 0, 0))
|
||||||
|
|
||||||
|
im.save(out, save_all=True, append_images=[im2], disposal=2)
|
||||||
|
|
||||||
|
with Image.open(out) as reloaded:
|
||||||
|
reloaded.seek(1)
|
||||||
|
assert reloaded.tile[0].extents == (0, 0, 100, 100)
|
||||||
|
|
||||||
|
|
||||||
def test_transparency_in_second_frame(tmp_path: Path) -> None:
|
def test_transparency_in_second_frame(tmp_path: Path) -> None:
|
||||||
out = str(tmp_path / "temp.gif")
|
out = str(tmp_path / "temp.gif")
|
||||||
with Image.open("Tests/images/different_transparency.gif") as im:
|
with Image.open("Tests/images/different_transparency.gif") as im:
|
||||||
|
@ -1308,6 +1325,7 @@ def test_palette_save_all_P(tmp_path: Path) -> None:
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
# Assert that the frames are correct, and each frame has the same palette
|
# Assert that the frames are correct, and each frame has the same palette
|
||||||
assert_image_equal(im.convert("RGB"), frames[0].convert("RGB"))
|
assert_image_equal(im.convert("RGB"), frames[0].convert("RGB"))
|
||||||
|
assert im.palette is not None
|
||||||
assert im.palette.palette == im.global_palette.palette
|
assert im.palette.palette == im.global_palette.palette
|
||||||
|
|
||||||
im.seek(1)
|
im.seek(1)
|
||||||
|
@ -1342,32 +1360,30 @@ def test_save_I(tmp_path: Path) -> None:
|
||||||
assert_image_equal(reloaded.convert("L"), im.convert("L"))
|
assert_image_equal(reloaded.convert("L"), im.convert("L"))
|
||||||
|
|
||||||
|
|
||||||
def test_getdata() -> None:
|
def test_getdata(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# Test getheader/getdata against legacy values.
|
# Test getheader/getdata against legacy values.
|
||||||
# Create a 'P' image with holes in the palette.
|
# Create a 'P' image with holes in the palette.
|
||||||
im = Image._wedge().resize((16, 16), Image.Resampling.NEAREST)
|
im = Image.linear_gradient(mode="L").resize((16, 16), Image.Resampling.NEAREST)
|
||||||
im.putpalette(ImagePalette.ImagePalette("RGB"))
|
im.putpalette(ImagePalette.ImagePalette("RGB"))
|
||||||
im.info = {"background": 0}
|
im.info = {"background": 0}
|
||||||
|
|
||||||
passed_palette = bytes(255 - i // 3 for i in range(768))
|
passed_palette = bytes(255 - i // 3 for i in range(768))
|
||||||
|
|
||||||
GifImagePlugin._FORCE_OPTIMIZE = True
|
monkeypatch.setattr(GifImagePlugin, "_FORCE_OPTIMIZE", True)
|
||||||
try:
|
|
||||||
h = GifImagePlugin.getheader(im, passed_palette)
|
|
||||||
d = GifImagePlugin.getdata(im)
|
|
||||||
|
|
||||||
import pickle
|
h = GifImagePlugin.getheader(im, passed_palette)
|
||||||
|
d = GifImagePlugin.getdata(im)
|
||||||
|
|
||||||
# Enable to get target values on pre-refactor version
|
import pickle
|
||||||
# with open('Tests/images/gif_header_data.pkl', 'wb') as f:
|
|
||||||
# pickle.dump((h, d), f, 1)
|
|
||||||
with open("Tests/images/gif_header_data.pkl", "rb") as f:
|
|
||||||
(h_target, d_target) = pickle.load(f)
|
|
||||||
|
|
||||||
assert h == h_target
|
# Enable to get target values on pre-refactor version
|
||||||
assert d == d_target
|
# with open('Tests/images/gif_header_data.pkl', 'wb') as f:
|
||||||
finally:
|
# pickle.dump((h, d), f, 1)
|
||||||
GifImagePlugin._FORCE_OPTIMIZE = False
|
with open("Tests/images/gif_header_data.pkl", "rb") as f:
|
||||||
|
(h_target, d_target) = pickle.load(f)
|
||||||
|
|
||||||
|
assert h == h_target
|
||||||
|
assert d == d_target
|
||||||
|
|
||||||
|
|
||||||
def test_lzw_bits() -> None:
|
def test_lzw_bits() -> None:
|
||||||
|
@ -1393,24 +1409,23 @@ def test_lzw_bits() -> None:
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_extents(
|
def test_extents(
|
||||||
test_file: str, loading_strategy: GifImagePlugin.LoadingStrategy
|
test_file: str,
|
||||||
|
loading_strategy: GifImagePlugin.LoadingStrategy,
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
) -> None:
|
) -> None:
|
||||||
GifImagePlugin.LOADING_STRATEGY = loading_strategy
|
monkeypatch.setattr(GifImagePlugin, "LOADING_STRATEGY", loading_strategy)
|
||||||
try:
|
with Image.open("Tests/images/" + test_file) as im:
|
||||||
with Image.open("Tests/images/" + test_file) as im:
|
assert im.size == (100, 100)
|
||||||
assert im.size == (100, 100)
|
|
||||||
|
|
||||||
# Check that n_frames does not change the size
|
# Check that n_frames does not change the size
|
||||||
assert im.n_frames == 2
|
assert im.n_frames == 2
|
||||||
assert im.size == (100, 100)
|
assert im.size == (100, 100)
|
||||||
|
|
||||||
im.seek(1)
|
im.seek(1)
|
||||||
assert im.size == (150, 150)
|
assert im.size == (150, 150)
|
||||||
|
|
||||||
im.load()
|
im.load()
|
||||||
assert im.im.size == (150, 150)
|
assert im.im.size == (150, 150)
|
||||||
finally:
|
|
||||||
GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_AFTER_FIRST
|
|
||||||
|
|
||||||
|
|
||||||
def test_missing_background() -> None:
|
def test_missing_background() -> None:
|
||||||
|
@ -1431,7 +1446,8 @@ def test_saving_rgba(tmp_path: Path) -> None:
|
||||||
assert reloaded_rgba.load()[0, 0][3] == 0
|
assert reloaded_rgba.load()[0, 0][3] == 0
|
||||||
|
|
||||||
|
|
||||||
def test_optimizing_p_rgba(tmp_path: Path) -> None:
|
@pytest.mark.parametrize("params", ({}, {"disposal": 2, "optimize": False}))
|
||||||
|
def test_p_rgba(tmp_path: Path, params: dict[str, Any]) -> None:
|
||||||
out = str(tmp_path / "temp.gif")
|
out = str(tmp_path / "temp.gif")
|
||||||
|
|
||||||
im1 = Image.new("P", (100, 100))
|
im1 = Image.new("P", (100, 100))
|
||||||
|
@ -1443,7 +1459,7 @@ def test_optimizing_p_rgba(tmp_path: Path) -> None:
|
||||||
im2 = Image.new("P", (100, 100))
|
im2 = Image.new("P", (100, 100))
|
||||||
im2.putpalette(data, "RGBA")
|
im2.putpalette(data, "RGBA")
|
||||||
|
|
||||||
im1.save(out, save_all=True, append_images=[im2])
|
im1.save(out, save_all=True, append_images=[im2], **params)
|
||||||
|
|
||||||
with Image.open(out) as reloaded:
|
with Image.open(out) as reloaded:
|
||||||
assert reloaded.n_frames == 2
|
assert reloaded.n_frames == 2
|
||||||
|
|
|
@ -83,4 +83,4 @@ def test_handler(tmp_path: Path) -> None:
|
||||||
im.save(temp_file)
|
im.save(temp_file)
|
||||||
assert handler.saved
|
assert handler.saved
|
||||||
|
|
||||||
GribStubImagePlugin._handler = None
|
GribStubImagePlugin.register_handler(None)
|
||||||
|
|
|
@ -85,4 +85,4 @@ def test_handler(tmp_path: Path) -> None:
|
||||||
im.save(temp_file)
|
im.save(temp_file)
|
||||||
assert handler.saved
|
assert handler.saved
|
||||||
|
|
||||||
Hdf5StubImagePlugin._handler = None
|
Hdf5StubImagePlugin.register_handler(None)
|
||||||
|
|
|
@ -21,6 +21,8 @@ def test_sanity() -> None:
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
# Assert that there is no unclosed file warning
|
# Assert that there is no unclosed file warning
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
assert im.mode == "RGBA"
|
assert im.mode == "RGBA"
|
||||||
|
@ -30,10 +32,14 @@ def test_sanity() -> None:
|
||||||
|
|
||||||
def test_load() -> None:
|
def test_load() -> None:
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
assert im.load()[0, 0] == (0, 0, 0, 0)
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[0, 0] == (0, 0, 0, 0)
|
||||||
|
|
||||||
# Test again now that it has already been loaded once
|
# Test again now that it has already been loaded once
|
||||||
assert im.load()[0, 0] == (0, 0, 0, 0)
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[0, 0] == (0, 0, 0, 0)
|
||||||
|
|
||||||
|
|
||||||
def test_save(tmp_path: Path) -> None:
|
def test_save(tmp_path: Path) -> None:
|
||||||
|
|
|
@ -24,7 +24,9 @@ def test_sanity() -> None:
|
||||||
|
|
||||||
def test_load() -> None:
|
def test_load() -> None:
|
||||||
with Image.open(TEST_ICO_FILE) as im:
|
with Image.open(TEST_ICO_FILE) as im:
|
||||||
assert im.load()[0, 0] == (1, 1, 9, 255)
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[0, 0] == (1, 1, 9, 255)
|
||||||
|
|
||||||
|
|
||||||
def test_mask() -> None:
|
def test_mask() -> None:
|
||||||
|
@ -243,27 +245,23 @@ def test_draw_reloaded(tmp_path: Path) -> None:
|
||||||
assert_image_equal_tofile(im, "Tests/images/hopper_draw.ico")
|
assert_image_equal_tofile(im, "Tests/images/hopper_draw.ico")
|
||||||
|
|
||||||
|
|
||||||
def test_truncated_mask() -> None:
|
def test_truncated_mask(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# 1 bpp
|
# 1 bpp
|
||||||
with open("Tests/images/hopper_mask.ico", "rb") as fp:
|
with open("Tests/images/hopper_mask.ico", "rb") as fp:
|
||||||
data = fp.read()
|
data = fp.read()
|
||||||
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
data = data[:-3]
|
data = data[:-3]
|
||||||
|
|
||||||
try:
|
with Image.open(io.BytesIO(data)) as im:
|
||||||
with Image.open(io.BytesIO(data)) as im:
|
assert im.mode == "1"
|
||||||
with Image.open("Tests/images/hopper_mask.png") as expected:
|
|
||||||
assert im.mode == "1"
|
|
||||||
|
|
||||||
# 32 bpp
|
# 32 bpp
|
||||||
output = io.BytesIO()
|
output = io.BytesIO()
|
||||||
expected = hopper("RGBA")
|
expected = hopper("RGBA")
|
||||||
expected.save(output, "ico", bitmap_format="bmp")
|
expected.save(output, "ico", bitmap_format="bmp")
|
||||||
|
|
||||||
data = output.getvalue()[:-1]
|
data = output.getvalue()[:-1]
|
||||||
|
|
||||||
with Image.open(io.BytesIO(data)) as im:
|
with Image.open(io.BytesIO(data)) as im:
|
||||||
assert im.mode == "RGB"
|
assert im.mode == "RGB"
|
||||||
finally:
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
|
@ -31,16 +31,18 @@ def test_name_limit(tmp_path: Path) -> None:
|
||||||
|
|
||||||
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
||||||
def test_unclosed_file() -> None:
|
def test_unclosed_file() -> None:
|
||||||
def open() -> None:
|
def open_test_image() -> None:
|
||||||
im = Image.open(TEST_IM)
|
im = Image.open(TEST_IM)
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
with pytest.warns(ResourceWarning):
|
with pytest.warns(ResourceWarning):
|
||||||
open()
|
open_test_image()
|
||||||
|
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_IM)
|
im = Image.open(TEST_IM)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -48,6 +50,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_IM) as im:
|
with Image.open(TEST_IM) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -58,10 +58,7 @@ def test_getiptcinfo_fotostation() -> None:
|
||||||
|
|
||||||
# Assert
|
# Assert
|
||||||
assert iptc is not None
|
assert iptc is not None
|
||||||
for tag in iptc.keys():
|
assert 240 in (tag[0] for tag in iptc.keys()), "FotoStation tag not found"
|
||||||
if tag[0] == 240:
|
|
||||||
return
|
|
||||||
pytest.fail("FotoStation tag not found")
|
|
||||||
|
|
||||||
|
|
||||||
def test_getiptcinfo_zero_padding() -> None:
|
def test_getiptcinfo_zero_padding() -> None:
|
||||||
|
|
|
@ -181,6 +181,10 @@ class TestFileJpeg:
|
||||||
assert test(100, 200) == (100, 200)
|
assert test(100, 200) == (100, 200)
|
||||||
assert test(0) is None # square pixels
|
assert test(0) is None # square pixels
|
||||||
|
|
||||||
|
def test_dpi_jfif_cm(self) -> None:
|
||||||
|
with Image.open("Tests/images/jfif_unit_cm.jpg") as im:
|
||||||
|
assert im.info["dpi"] == (2.54, 5.08)
|
||||||
|
|
||||||
@mark_if_feature_version(
|
@mark_if_feature_version(
|
||||||
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
||||||
)
|
)
|
||||||
|
@ -277,7 +281,10 @@ class TestFileJpeg:
|
||||||
assert not im2.info.get("progressive")
|
assert not im2.info.get("progressive")
|
||||||
assert im3.info.get("progressive")
|
assert im3.info.get("progressive")
|
||||||
|
|
||||||
assert_image_equal(im1, im3)
|
if features.check_feature("mozjpeg"):
|
||||||
|
assert_image_similar(im1, im3, 9.39)
|
||||||
|
else:
|
||||||
|
assert_image_equal(im1, im3)
|
||||||
assert im1_bytes >= im3_bytes
|
assert im1_bytes >= im3_bytes
|
||||||
|
|
||||||
def test_progressive_large_buffer(self, tmp_path: Path) -> None:
|
def test_progressive_large_buffer(self, tmp_path: Path) -> None:
|
||||||
|
@ -349,7 +356,6 @@ class TestFileJpeg:
|
||||||
assert exif.get_ifd(0x8825) == {}
|
assert exif.get_ifd(0x8825) == {}
|
||||||
|
|
||||||
transposed = ImageOps.exif_transpose(im)
|
transposed = ImageOps.exif_transpose(im)
|
||||||
assert transposed is not None
|
|
||||||
exif = transposed.getexif()
|
exif = transposed.getexif()
|
||||||
assert exif.get_ifd(0x8825) == {}
|
assert exif.get_ifd(0x8825) == {}
|
||||||
|
|
||||||
|
@ -420,8 +426,12 @@ class TestFileJpeg:
|
||||||
|
|
||||||
im2 = self.roundtrip(hopper(), progressive=1)
|
im2 = self.roundtrip(hopper(), progressive=1)
|
||||||
im3 = self.roundtrip(hopper(), progression=1) # compatibility
|
im3 = self.roundtrip(hopper(), progression=1) # compatibility
|
||||||
assert_image_equal(im1, im2)
|
if features.check_feature("mozjpeg"):
|
||||||
assert_image_equal(im1, im3)
|
assert_image_similar(im1, im2, 9.39)
|
||||||
|
assert_image_similar(im1, im3, 9.39)
|
||||||
|
else:
|
||||||
|
assert_image_equal(im1, im2)
|
||||||
|
assert_image_equal(im1, im3)
|
||||||
assert im2.info.get("progressive")
|
assert im2.info.get("progressive")
|
||||||
assert im2.info.get("progression")
|
assert im2.info.get("progression")
|
||||||
assert im3.info.get("progressive")
|
assert im3.info.get("progressive")
|
||||||
|
@ -520,12 +530,13 @@ class TestFileJpeg:
|
||||||
@mark_if_feature_version(
|
@mark_if_feature_version(
|
||||||
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
||||||
)
|
)
|
||||||
def test_truncated_jpeg_should_read_all_the_data(self) -> None:
|
def test_truncated_jpeg_should_read_all_the_data(
|
||||||
|
self, monkeypatch: pytest.MonkeyPatch
|
||||||
|
) -> None:
|
||||||
filename = "Tests/images/truncated_jpeg.jpg"
|
filename = "Tests/images/truncated_jpeg.jpg"
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
with Image.open(filename) as im:
|
with Image.open(filename) as im:
|
||||||
im.load()
|
im.load()
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
assert im.getbbox() is not None
|
assert im.getbbox() is not None
|
||||||
|
|
||||||
def test_truncated_jpeg_throws_oserror(self) -> None:
|
def test_truncated_jpeg_throws_oserror(self) -> None:
|
||||||
|
@ -541,12 +552,12 @@ class TestFileJpeg:
|
||||||
@mark_if_feature_version(
|
@mark_if_feature_version(
|
||||||
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
||||||
)
|
)
|
||||||
def test_qtables(self, tmp_path: Path) -> None:
|
def test_qtables(self) -> None:
|
||||||
def _n_qtables_helper(n: int, test_file: str) -> None:
|
def _n_qtables_helper(n: int, test_file: str) -> None:
|
||||||
|
b = BytesIO()
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
f = str(tmp_path / "temp.jpg")
|
im.save(b, "JPEG", qtables=[[n] * 64] * n)
|
||||||
im.save(f, qtables=[[n] * 64] * n)
|
with Image.open(b) as im:
|
||||||
with Image.open(f) as im:
|
|
||||||
assert len(im.quantization) == n
|
assert len(im.quantization) == n
|
||||||
reloaded = self.roundtrip(im, qtables="keep")
|
reloaded = self.roundtrip(im, qtables="keep")
|
||||||
assert im.quantization == reloaded.quantization
|
assert im.quantization == reloaded.quantization
|
||||||
|
@ -850,6 +861,8 @@ class TestFileJpeg:
|
||||||
|
|
||||||
out = str(tmp_path / "out.jpg")
|
out = str(tmp_path / "out.jpg")
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.save(out, exif=exif)
|
im.save(out, exif=exif)
|
||||||
|
|
||||||
with Image.open(out) as reloaded:
|
with Image.open(out) as reloaded:
|
||||||
|
@ -921,7 +934,7 @@ class TestFileJpeg:
|
||||||
|
|
||||||
def test_jpeg_magic_number(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
def test_jpeg_magic_number(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
size = 4097
|
size = 4097
|
||||||
buffer = BytesIO(b"\xFF" * size) # Many xFF bytes
|
buffer = BytesIO(b"\xff" * size) # Many xff bytes
|
||||||
max_pos = 0
|
max_pos = 0
|
||||||
orig_read = buffer.read
|
orig_read = buffer.read
|
||||||
|
|
||||||
|
@ -998,8 +1011,13 @@ class TestFileJpeg:
|
||||||
with Image.open(f) as reloaded:
|
with Image.open(f) as reloaded:
|
||||||
assert reloaded.info["xmp"] == b"XMP test"
|
assert reloaded.info["xmp"] == b"XMP test"
|
||||||
|
|
||||||
im.info["xmp"] = b"1" * 65504
|
# Check that XMP is not saved from image info
|
||||||
im.save(f)
|
reloaded.save(f)
|
||||||
|
|
||||||
|
with Image.open(f) as reloaded:
|
||||||
|
assert "xmp" not in reloaded.info
|
||||||
|
|
||||||
|
im.save(f, xmp=b"1" * 65504)
|
||||||
with Image.open(f) as reloaded:
|
with Image.open(f) as reloaded:
|
||||||
assert reloaded.info["xmp"] == b"1" * 65504
|
assert reloaded.info["xmp"] == b"1" * 65504
|
||||||
|
|
||||||
|
@ -1007,7 +1025,7 @@ class TestFileJpeg:
|
||||||
im.save(f, xmp=b"1" * 65505)
|
im.save(f, xmp=b"1" * 65505)
|
||||||
|
|
||||||
@pytest.mark.timeout(timeout=1)
|
@pytest.mark.timeout(timeout=1)
|
||||||
def test_eof(self) -> None:
|
def test_eof(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# Even though this decoder never says that it is finished
|
# Even though this decoder never says that it is finished
|
||||||
# the image should still end when there is no new data
|
# the image should still end when there is no new data
|
||||||
class InfiniteMockPyDecoder(ImageFile.PyDecoder):
|
class InfiniteMockPyDecoder(ImageFile.PyDecoder):
|
||||||
|
@ -1020,11 +1038,10 @@ class TestFileJpeg:
|
||||||
|
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
im.tile = [
|
im.tile = [
|
||||||
("INFINITE", (0, 0, 128, 128), 0, ("RGB", 0, 1)),
|
ImageFile._Tile("INFINITE", (0, 0, 128, 128), 0, ("RGB", 0, 1)),
|
||||||
]
|
]
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
im.load()
|
im.load()
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
def test_separate_tables(self) -> None:
|
def test_separate_tables(self) -> None:
|
||||||
im = hopper()
|
im = hopper()
|
||||||
|
|
|
@ -2,6 +2,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from collections.abc import Generator
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
@ -29,8 +30,16 @@ EXTRA_DIR = "Tests/images/jpeg2000"
|
||||||
|
|
||||||
pytestmark = skip_unless_feature("jpg_2000")
|
pytestmark = skip_unless_feature("jpg_2000")
|
||||||
|
|
||||||
test_card = Image.open("Tests/images/test-card.png")
|
|
||||||
test_card.load()
|
@pytest.fixture
|
||||||
|
def card() -> Generator[ImageFile.ImageFile, None, None]:
|
||||||
|
with Image.open("Tests/images/test-card.png") as im:
|
||||||
|
im.load()
|
||||||
|
try:
|
||||||
|
yield im
|
||||||
|
finally:
|
||||||
|
im.close()
|
||||||
|
|
||||||
|
|
||||||
# OpenJPEG 2.0.0 outputs this debugging message sometimes; we should
|
# OpenJPEG 2.0.0 outputs this debugging message sometimes; we should
|
||||||
# ignore it---it doesn't represent a test failure.
|
# ignore it---it doesn't represent a test failure.
|
||||||
|
@ -54,6 +63,7 @@ def test_sanity() -> None:
|
||||||
|
|
||||||
with Image.open("Tests/images/test-card-lossless.jp2") as im:
|
with Image.open("Tests/images/test-card-lossless.jp2") as im:
|
||||||
px = im.load()
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
assert px[0, 0] == (0, 0, 0)
|
assert px[0, 0] == (0, 0, 0)
|
||||||
assert im.mode == "RGB"
|
assert im.mode == "RGB"
|
||||||
assert im.size == (640, 480)
|
assert im.size == (640, 480)
|
||||||
|
@ -74,76 +84,76 @@ def test_invalid_file() -> None:
|
||||||
Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file)
|
Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file)
|
||||||
|
|
||||||
|
|
||||||
def test_bytesio() -> None:
|
def test_bytesio(card: ImageFile.ImageFile) -> None:
|
||||||
with open("Tests/images/test-card-lossless.jp2", "rb") as f:
|
with open("Tests/images/test-card-lossless.jp2", "rb") as f:
|
||||||
data = BytesIO(f.read())
|
data = BytesIO(f.read())
|
||||||
with Image.open(data) as im:
|
with Image.open(data) as im:
|
||||||
im.load()
|
im.load()
|
||||||
assert_image_similar(im, test_card, 1.0e-3)
|
assert_image_similar(im, card, 1.0e-3)
|
||||||
|
|
||||||
|
|
||||||
# These two test pre-written JPEG 2000 files that were not written with
|
# These two test pre-written JPEG 2000 files that were not written with
|
||||||
# PIL (they were made using Adobe Photoshop)
|
# PIL (they were made using Adobe Photoshop)
|
||||||
|
|
||||||
|
|
||||||
def test_lossless(tmp_path: Path) -> None:
|
def test_lossless(card: ImageFile.ImageFile, tmp_path: Path) -> None:
|
||||||
with Image.open("Tests/images/test-card-lossless.jp2") as im:
|
with Image.open("Tests/images/test-card-lossless.jp2") as im:
|
||||||
im.load()
|
im.load()
|
||||||
outfile = str(tmp_path / "temp_test-card.png")
|
outfile = str(tmp_path / "temp_test-card.png")
|
||||||
im.save(outfile)
|
im.save(outfile)
|
||||||
assert_image_similar(im, test_card, 1.0e-3)
|
assert_image_similar(im, card, 1.0e-3)
|
||||||
|
|
||||||
|
|
||||||
def test_lossy_tiled() -> None:
|
def test_lossy_tiled(card: ImageFile.ImageFile) -> None:
|
||||||
assert_image_similar_tofile(
|
assert_image_similar_tofile(card, "Tests/images/test-card-lossy-tiled.jp2", 2.0)
|
||||||
test_card, "Tests/images/test-card-lossy-tiled.jp2", 2.0
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_lossless_rt() -> None:
|
def test_lossless_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card)
|
im = roundtrip(card)
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
def test_lossy_rt() -> None:
|
def test_lossy_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, quality_layers=[20])
|
im = roundtrip(card, quality_layers=[20])
|
||||||
assert_image_similar(im, test_card, 2.0)
|
assert_image_similar(im, card, 2.0)
|
||||||
|
|
||||||
|
|
||||||
def test_tiled_rt() -> None:
|
def test_tiled_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, tile_size=(128, 128))
|
im = roundtrip(card, tile_size=(128, 128))
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
def test_tiled_offset_rt() -> None:
|
def test_tiled_offset_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32))
|
im = roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32))
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
def test_tiled_offset_too_small() -> None:
|
def test_tiled_offset_too_small(card: ImageFile.ImageFile) -> None:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32))
|
roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32))
|
||||||
|
|
||||||
|
|
||||||
def test_irreversible_rt() -> None:
|
def test_irreversible_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, irreversible=True, quality_layers=[20])
|
im = roundtrip(card, irreversible=True, quality_layers=[20])
|
||||||
assert_image_similar(im, test_card, 2.0)
|
assert_image_similar(im, card, 2.0)
|
||||||
|
|
||||||
|
|
||||||
def test_prog_qual_rt() -> None:
|
def test_prog_qual_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, quality_layers=[60, 40, 20], progression="LRCP")
|
im = roundtrip(card, quality_layers=[60, 40, 20], progression="LRCP")
|
||||||
assert_image_similar(im, test_card, 2.0)
|
assert_image_similar(im, card, 2.0)
|
||||||
|
|
||||||
|
|
||||||
def test_prog_res_rt() -> None:
|
def test_prog_res_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, num_resolutions=8, progression="RLCP")
|
im = roundtrip(card, num_resolutions=8, progression="RLCP")
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("num_resolutions", range(2, 6))
|
@pytest.mark.parametrize("num_resolutions", range(2, 6))
|
||||||
def test_default_num_resolutions(num_resolutions: int) -> None:
|
def test_default_num_resolutions(
|
||||||
|
card: ImageFile.ImageFile, num_resolutions: int
|
||||||
|
) -> None:
|
||||||
d = 1 << (num_resolutions - 1)
|
d = 1 << (num_resolutions - 1)
|
||||||
im = test_card.resize((d - 1, d - 1))
|
im = card.resize((d - 1, d - 1))
|
||||||
with pytest.raises(OSError):
|
with pytest.raises(OSError):
|
||||||
roundtrip(im, num_resolutions=num_resolutions)
|
roundtrip(im, num_resolutions=num_resolutions)
|
||||||
reloaded = roundtrip(im)
|
reloaded = roundtrip(im)
|
||||||
|
@ -172,14 +182,11 @@ def test_load_dpi() -> None:
|
||||||
assert "dpi" not in im.info
|
assert "dpi" not in im.info
|
||||||
|
|
||||||
|
|
||||||
def test_restricted_icc_profile() -> None:
|
def test_restricted_icc_profile(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
try:
|
# JPEG2000 image with a restricted ICC profile and a known colorspace
|
||||||
# JPEG2000 image with a restricted ICC profile and a known colorspace
|
with Image.open("Tests/images/balloon_eciRGBv2_aware.jp2") as im:
|
||||||
with Image.open("Tests/images/balloon_eciRGBv2_aware.jp2") as im:
|
assert im.mode == "RGB"
|
||||||
assert im.mode == "RGB"
|
|
||||||
finally:
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
|
@ -205,31 +212,31 @@ def test_header_errors() -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_layers_type(tmp_path: Path) -> None:
|
def test_layers_type(card: ImageFile.ImageFile, tmp_path: Path) -> None:
|
||||||
outfile = str(tmp_path / "temp_layers.jp2")
|
outfile = str(tmp_path / "temp_layers.jp2")
|
||||||
for quality_layers in [[100, 50, 10], (100, 50, 10), None]:
|
for quality_layers in [[100, 50, 10], (100, 50, 10), None]:
|
||||||
test_card.save(outfile, quality_layers=quality_layers)
|
card.save(outfile, quality_layers=quality_layers)
|
||||||
|
|
||||||
for quality_layers_str in ["quality_layers", ("100", "50", "10")]:
|
for quality_layers_str in ["quality_layers", ("100", "50", "10")]:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
test_card.save(outfile, quality_layers=quality_layers_str)
|
card.save(outfile, quality_layers=quality_layers_str)
|
||||||
|
|
||||||
|
|
||||||
def test_layers() -> None:
|
def test_layers(card: ImageFile.ImageFile) -> None:
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP")
|
card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP")
|
||||||
out.seek(0)
|
out.seek(0)
|
||||||
|
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
im.layers = 1
|
im.layers = 1
|
||||||
im.load()
|
im.load()
|
||||||
assert_image_similar(im, test_card, 13)
|
assert_image_similar(im, card, 13)
|
||||||
|
|
||||||
out.seek(0)
|
out.seek(0)
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
im.layers = 3
|
im.layers = 3
|
||||||
im.load()
|
im.load()
|
||||||
assert_image_similar(im, test_card, 0.4)
|
assert_image_similar(im, card, 0.4)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -245,24 +252,30 @@ def test_layers() -> None:
|
||||||
(None, {"no_jp2": False}, 4, b"jP"),
|
(None, {"no_jp2": False}, 4, b"jP"),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_no_jp2(name: str, args: dict[str, bool], offset: int, data: bytes) -> None:
|
def test_no_jp2(
|
||||||
|
card: ImageFile.ImageFile,
|
||||||
|
name: str,
|
||||||
|
args: dict[str, bool],
|
||||||
|
offset: int,
|
||||||
|
data: bytes,
|
||||||
|
) -> None:
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
if name:
|
if name:
|
||||||
out.name = name
|
out.name = name
|
||||||
test_card.save(out, "JPEG2000", **args)
|
card.save(out, "JPEG2000", **args)
|
||||||
out.seek(offset)
|
out.seek(offset)
|
||||||
assert out.read(2) == data
|
assert out.read(2) == data
|
||||||
|
|
||||||
|
|
||||||
def test_mct() -> None:
|
def test_mct(card: ImageFile.ImageFile) -> None:
|
||||||
# Three component
|
# Three component
|
||||||
for val in (0, 1):
|
for val in (0, 1):
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", mct=val, no_jp2=True)
|
card.save(out, "JPEG2000", mct=val, no_jp2=True)
|
||||||
|
|
||||||
assert out.getvalue()[59] == val
|
assert out.getvalue()[59] == val
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
assert_image_similar(im, test_card, 1.0e-3)
|
assert_image_similar(im, card, 1.0e-3)
|
||||||
|
|
||||||
# Single component should have MCT disabled
|
# Single component should have MCT disabled
|
||||||
for val in (0, 1):
|
for val in (0, 1):
|
||||||
|
@ -310,6 +323,18 @@ def test_cmyk() -> None:
|
||||||
assert im.getpixel((0, 0)) == (185, 134, 0, 0)
|
assert im.getpixel((0, 0)) == (185, 134, 0, 0)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
not os.path.exists(EXTRA_DIR), reason="Extra image files not installed"
|
||||||
|
)
|
||||||
|
@skip_unless_feature_version("jpg_2000", "2.5.3")
|
||||||
|
def test_cmyk_save() -> None:
|
||||||
|
with Image.open(f"{EXTRA_DIR}/issue205.jp2") as jp2:
|
||||||
|
assert jp2.mode == "CMYK"
|
||||||
|
|
||||||
|
im = roundtrip(jp2)
|
||||||
|
assert_image_equal(im, jp2)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("ext", (".j2k", ".jp2"))
|
@pytest.mark.parametrize("ext", (".j2k", ".jp2"))
|
||||||
def test_16bit_monochrome_has_correct_mode(ext: str) -> None:
|
def test_16bit_monochrome_has_correct_mode(ext: str) -> None:
|
||||||
with Image.open("Tests/images/16bit.cropped" + ext) as im:
|
with Image.open("Tests/images/16bit.cropped" + ext) as im:
|
||||||
|
@ -397,6 +422,7 @@ def test_subsampling_decode(name: str) -> None:
|
||||||
def test_pclr() -> None:
|
def test_pclr() -> None:
|
||||||
with Image.open(f"{EXTRA_DIR}/issue104_jpxstream.jp2") as im:
|
with Image.open(f"{EXTRA_DIR}/issue104_jpxstream.jp2") as im:
|
||||||
assert im.mode == "P"
|
assert im.mode == "P"
|
||||||
|
assert im.palette is not None
|
||||||
assert len(im.palette.colors) == 256
|
assert len(im.palette.colors) == 256
|
||||||
assert im.palette.colors[(255, 255, 255)] == 0
|
assert im.palette.colors[(255, 255, 255)] == 0
|
||||||
|
|
||||||
|
@ -404,13 +430,15 @@ def test_pclr() -> None:
|
||||||
f"{EXTRA_DIR}/147af3f1083de4393666b7d99b01b58b_signal_sigsegv_130c531_6155_5136.jp2"
|
f"{EXTRA_DIR}/147af3f1083de4393666b7d99b01b58b_signal_sigsegv_130c531_6155_5136.jp2"
|
||||||
) as im:
|
) as im:
|
||||||
assert im.mode == "P"
|
assert im.mode == "P"
|
||||||
|
assert im.palette is not None
|
||||||
assert len(im.palette.colors) == 139
|
assert len(im.palette.colors) == 139
|
||||||
assert im.palette.colors[(0, 0, 0, 0)] == 0
|
assert im.palette.colors[(0, 0, 0, 0)] == 0
|
||||||
|
|
||||||
|
|
||||||
def test_comment() -> None:
|
def test_comment() -> None:
|
||||||
with Image.open("Tests/images/comment.jp2") as im:
|
for path in ("Tests/images/9bit.j2k", "Tests/images/comment.jp2"):
|
||||||
assert im.info["comment"] == b"Created by OpenJPEG version 2.5.0"
|
with Image.open(path) as im:
|
||||||
|
assert im.info["comment"] == b"Created by OpenJPEG version 2.5.0"
|
||||||
|
|
||||||
# Test an image that is truncated partway through a codestream
|
# Test an image that is truncated partway through a codestream
|
||||||
with open("Tests/images/comment.jp2", "rb") as fp:
|
with open("Tests/images/comment.jp2", "rb") as fp:
|
||||||
|
@ -419,22 +447,22 @@ def test_comment() -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_save_comment() -> None:
|
def test_save_comment(card: ImageFile.ImageFile) -> None:
|
||||||
for comment in ("Created by Pillow", b"Created by Pillow"):
|
for comment in ("Created by Pillow", b"Created by Pillow"):
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", comment=comment)
|
card.save(out, "JPEG2000", comment=comment)
|
||||||
|
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
assert im.info["comment"] == b"Created by Pillow"
|
assert im.info["comment"] == b"Created by Pillow"
|
||||||
|
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
long_comment = b" " * 65531
|
long_comment = b" " * 65531
|
||||||
test_card.save(out, "JPEG2000", comment=long_comment)
|
card.save(out, "JPEG2000", comment=long_comment)
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
assert im.info["comment"] == long_comment
|
assert im.info["comment"] == long_comment
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
test_card.save(out, "JPEG2000", comment=long_comment + b" ")
|
card.save(out, "JPEG2000", comment=long_comment + b" ")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -457,15 +485,14 @@ def test_crashes(test_file: str) -> None:
|
||||||
|
|
||||||
|
|
||||||
@skip_unless_feature_version("jpg_2000", "2.4.0")
|
@skip_unless_feature_version("jpg_2000", "2.4.0")
|
||||||
def test_plt_marker() -> None:
|
def test_plt_marker(card: ImageFile.ImageFile) -> None:
|
||||||
# Search the start of the codesteam for PLT
|
# Search the start of the codesteam for PLT
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", no_jp2=True, plt=True)
|
card.save(out, "JPEG2000", no_jp2=True, plt=True)
|
||||||
out.seek(0)
|
out.seek(0)
|
||||||
while True:
|
while True:
|
||||||
marker = out.read(2)
|
marker = out.read(2)
|
||||||
if not marker:
|
assert marker, "End of stream without PLT"
|
||||||
pytest.fail("End of stream without PLT")
|
|
||||||
|
|
||||||
jp2_boxid = _binary.i16be(marker)
|
jp2_boxid = _binary.i16be(marker)
|
||||||
if jp2_boxid == 0xFF4F:
|
if jp2_boxid == 0xFF4F:
|
||||||
|
|
|
@ -36,11 +36,7 @@ class LibTiffTestCase:
|
||||||
im.load()
|
im.load()
|
||||||
im.getdata()
|
im.getdata()
|
||||||
|
|
||||||
try:
|
assert im._compression == "group4"
|
||||||
assert im._compression == "group4"
|
|
||||||
except AttributeError:
|
|
||||||
print("No _compression")
|
|
||||||
print(dir(im))
|
|
||||||
|
|
||||||
# can we write it back out, in a different form.
|
# can we write it back out, in a different form.
|
||||||
out = str(tmp_path / "temp.png")
|
out = str(tmp_path / "temp.png")
|
||||||
|
@ -313,7 +309,7 @@ class TestFileLibTiff(LibTiffTestCase):
|
||||||
}
|
}
|
||||||
|
|
||||||
def check_tags(
|
def check_tags(
|
||||||
tiffinfo: TiffImagePlugin.ImageFileDirectory_v2 | dict[int, str]
|
tiffinfo: TiffImagePlugin.ImageFileDirectory_v2 | dict[int, str],
|
||||||
) -> None:
|
) -> None:
|
||||||
im = hopper()
|
im = hopper()
|
||||||
|
|
||||||
|
@ -1098,6 +1094,27 @@ class TestFileLibTiff(LibTiffTestCase):
|
||||||
|
|
||||||
assert_image_similar(base_im, im, 0.7)
|
assert_image_similar(base_im, im, 0.7)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"test_file",
|
||||||
|
[
|
||||||
|
"Tests/images/old-style-jpeg-compression-no-samplesperpixel.tif",
|
||||||
|
"Tests/images/old-style-jpeg-compression.tif",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_buffering(self, test_file: str) -> None:
|
||||||
|
# load exif first
|
||||||
|
with open(test_file, "rb", buffering=1048576) as f:
|
||||||
|
with Image.open(f) as im:
|
||||||
|
exif = dict(im.getexif())
|
||||||
|
|
||||||
|
# load image before exif
|
||||||
|
with open(test_file, "rb", buffering=1048576) as f:
|
||||||
|
with Image.open(f) as im2:
|
||||||
|
im2.load()
|
||||||
|
exif_after_load = dict(im2.getexif())
|
||||||
|
|
||||||
|
assert exif == exif_after_load
|
||||||
|
|
||||||
@pytest.mark.valgrind_known_error(reason="Backtrace in Python Core")
|
@pytest.mark.valgrind_known_error(reason="Backtrace in Python Core")
|
||||||
def test_sampleformat_not_corrupted(self) -> None:
|
def test_sampleformat_not_corrupted(self) -> None:
|
||||||
# Assert that a TIFF image with SampleFormat=UINT tag is not corrupted
|
# Assert that a TIFF image with SampleFormat=UINT tag is not corrupted
|
||||||
|
@ -1127,7 +1144,7 @@ class TestFileLibTiff(LibTiffTestCase):
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
# Assert that the error code is IMAGING_CODEC_MEMORY
|
# Assert that the error code is IMAGING_CODEC_MEMORY
|
||||||
assert str(e.value) == "-9"
|
assert str(e.value) == "decoder error -9"
|
||||||
|
|
||||||
@pytest.mark.parametrize("compression", ("tiff_adobe_deflate", "jpeg"))
|
@pytest.mark.parametrize("compression", ("tiff_adobe_deflate", "jpeg"))
|
||||||
def test_save_multistrip(self, compression: str, tmp_path: Path) -> None:
|
def test_save_multistrip(self, compression: str, tmp_path: Path) -> None:
|
||||||
|
@ -1141,23 +1158,22 @@ class TestFileLibTiff(LibTiffTestCase):
|
||||||
assert len(im.tag_v2[STRIPOFFSETS]) > 1
|
assert len(im.tag_v2[STRIPOFFSETS]) > 1
|
||||||
|
|
||||||
@pytest.mark.parametrize("argument", (True, False))
|
@pytest.mark.parametrize("argument", (True, False))
|
||||||
def test_save_single_strip(self, argument: bool, tmp_path: Path) -> None:
|
def test_save_single_strip(
|
||||||
|
self, argument: bool, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
|
||||||
|
) -> None:
|
||||||
im = hopper("RGB").resize((256, 256))
|
im = hopper("RGB").resize((256, 256))
|
||||||
out = str(tmp_path / "temp.tif")
|
out = str(tmp_path / "temp.tif")
|
||||||
|
|
||||||
if not argument:
|
if not argument:
|
||||||
TiffImagePlugin.STRIP_SIZE = 2**18
|
monkeypatch.setattr(TiffImagePlugin, "STRIP_SIZE", 2**18)
|
||||||
try:
|
arguments: dict[str, str | int] = {"compression": "tiff_adobe_deflate"}
|
||||||
arguments: dict[str, str | int] = {"compression": "tiff_adobe_deflate"}
|
if argument:
|
||||||
if argument:
|
arguments["strip_size"] = 2**18
|
||||||
arguments["strip_size"] = 2**18
|
im.save(out, "TIFF", **arguments)
|
||||||
im.save(out, "TIFF", **arguments)
|
|
||||||
|
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
assert isinstance(im, TiffImagePlugin.TiffImageFile)
|
assert isinstance(im, TiffImagePlugin.TiffImageFile)
|
||||||
assert len(im.tag_v2[STRIPOFFSETS]) == 1
|
assert len(im.tag_v2[STRIPOFFSETS]) == 1
|
||||||
finally:
|
|
||||||
TiffImagePlugin.STRIP_SIZE = 65536
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("compression", ("tiff_adobe_deflate", None))
|
@pytest.mark.parametrize("compression", ("tiff_adobe_deflate", None))
|
||||||
def test_save_zero(self, compression: str | None, tmp_path: Path) -> None:
|
def test_save_zero(self, compression: str | None, tmp_path: Path) -> None:
|
||||||
|
|
|
@ -29,25 +29,32 @@ def roundtrip(im: Image.Image, **options: Any) -> ImageFile.ImageFile:
|
||||||
|
|
||||||
@pytest.mark.parametrize("test_file", test_files)
|
@pytest.mark.parametrize("test_file", test_files)
|
||||||
def test_sanity(test_file: str) -> None:
|
def test_sanity(test_file: str) -> None:
|
||||||
with Image.open(test_file) as im:
|
def check(im: ImageFile.ImageFile) -> None:
|
||||||
im.load()
|
im.load()
|
||||||
assert im.mode == "RGB"
|
assert im.mode == "RGB"
|
||||||
assert im.size == (640, 480)
|
assert im.size == (640, 480)
|
||||||
assert im.format == "MPO"
|
assert im.format == "MPO"
|
||||||
|
|
||||||
|
with Image.open(test_file) as im:
|
||||||
|
check(im)
|
||||||
|
with MpoImagePlugin.MpoImageFile(test_file) as im:
|
||||||
|
check(im)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
||||||
def test_unclosed_file() -> None:
|
def test_unclosed_file() -> None:
|
||||||
def open() -> None:
|
def open_test_image() -> None:
|
||||||
im = Image.open(test_files[0])
|
im = Image.open(test_files[0])
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
with pytest.warns(ResourceWarning):
|
with pytest.warns(ResourceWarning):
|
||||||
open()
|
open_test_image()
|
||||||
|
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(test_files[0])
|
im = Image.open(test_files[0])
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -63,6 +70,8 @@ def test_seek_after_close() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(test_files[0]) as im:
|
with Image.open(test_files[0]) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
@ -73,8 +82,8 @@ def test_app(test_file: str) -> None:
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
assert im.applist[0][0] == "APP1"
|
assert im.applist[0][0] == "APP1"
|
||||||
assert im.applist[1][0] == "APP2"
|
assert im.applist[1][0] == "APP2"
|
||||||
assert (
|
assert im.applist[1][1].startswith(
|
||||||
im.applist[1][1][:16] == b"MPF\x00MM\x00*\x00\x00\x00\x08\x00\x03\xb0\x00"
|
b"MPF\x00MM\x00*\x00\x00\x00\x08\x00\x03\xb0\x00"
|
||||||
)
|
)
|
||||||
assert len(im.applist) == 2
|
assert len(im.applist) == 2
|
||||||
|
|
||||||
|
@ -293,3 +302,15 @@ def test_save_all() -> None:
|
||||||
# Test that a single frame image will not be saved as an MPO
|
# Test that a single frame image will not be saved as an MPO
|
||||||
jpg = roundtrip(im, save_all=True)
|
jpg = roundtrip(im, save_all=True)
|
||||||
assert "mp" not in jpg.info
|
assert "mp" not in jpg.info
|
||||||
|
|
||||||
|
|
||||||
|
def test_save_xmp() -> None:
|
||||||
|
im = Image.new("RGB", (1, 1))
|
||||||
|
im2 = Image.new("RGB", (1, 1), "#f00")
|
||||||
|
im2.encoderinfo = {"xmp": b"Second frame"}
|
||||||
|
im_reloaded = roundtrip(im, xmp=b"First frame", save_all=True, append_images=[im2])
|
||||||
|
|
||||||
|
assert im_reloaded.info["xmp"] == b"First frame"
|
||||||
|
|
||||||
|
im_reloaded.seek(1)
|
||||||
|
assert im_reloaded.info["xmp"] == b"Second frame"
|
||||||
|
|
|
@ -264,7 +264,7 @@ def test_pdf_append(tmp_path: Path) -> None:
|
||||||
# append some info
|
# append some info
|
||||||
pdf.info.Title = "abc"
|
pdf.info.Title = "abc"
|
||||||
pdf.info.Author = "def"
|
pdf.info.Author = "def"
|
||||||
pdf.info.Subject = "ghi\uABCD"
|
pdf.info.Subject = "ghi\uabcd"
|
||||||
pdf.info.Keywords = "qw)e\\r(ty"
|
pdf.info.Keywords = "qw)e\\r(ty"
|
||||||
pdf.info.Creator = "hopper()"
|
pdf.info.Creator = "hopper()"
|
||||||
pdf.start_writing()
|
pdf.start_writing()
|
||||||
|
@ -292,7 +292,7 @@ def test_pdf_append(tmp_path: Path) -> None:
|
||||||
assert pdf.info.Title == "abc"
|
assert pdf.info.Title == "abc"
|
||||||
assert pdf.info.Producer == "PdfParser"
|
assert pdf.info.Producer == "PdfParser"
|
||||||
assert pdf.info.Keywords == "qw)e\\r(ty"
|
assert pdf.info.Keywords == "qw)e\\r(ty"
|
||||||
assert pdf.info.Subject == "ghi\uABCD"
|
assert pdf.info.Subject == "ghi\uabcd"
|
||||||
assert b"CreationDate" in pdf.info
|
assert b"CreationDate" in pdf.info
|
||||||
assert b"ModDate" in pdf.info
|
assert b"ModDate" in pdf.info
|
||||||
check_pdf_pages_consistency(pdf)
|
check_pdf_pages_consistency(pdf)
|
||||||
|
|
|
@ -338,6 +338,8 @@ class TestFilePng:
|
||||||
with Image.open(TEST_PNG_FILE) as im:
|
with Image.open(TEST_PNG_FILE) as im:
|
||||||
# Assert that there is no unclosed file warning
|
# Assert that there is no unclosed file warning
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.verify()
|
im.verify()
|
||||||
|
|
||||||
with Image.open(TEST_PNG_FILE) as im:
|
with Image.open(TEST_PNG_FILE) as im:
|
||||||
|
@ -361,7 +363,7 @@ class TestFilePng:
|
||||||
with pytest.raises((OSError, SyntaxError)):
|
with pytest.raises((OSError, SyntaxError)):
|
||||||
im.verify()
|
im.verify()
|
||||||
|
|
||||||
def test_verify_ignores_crc_error(self) -> None:
|
def test_verify_ignores_crc_error(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# check ignores crc errors in ancillary chunks
|
# check ignores crc errors in ancillary chunks
|
||||||
|
|
||||||
chunk_data = chunk(b"tEXt", b"spam")
|
chunk_data = chunk(b"tEXt", b"spam")
|
||||||
|
@ -371,24 +373,20 @@ class TestFilePng:
|
||||||
with pytest.raises(SyntaxError):
|
with pytest.raises(SyntaxError):
|
||||||
PngImagePlugin.PngImageFile(BytesIO(image_data))
|
PngImagePlugin.PngImageFile(BytesIO(image_data))
|
||||||
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
try:
|
im = load(image_data)
|
||||||
im = load(image_data)
|
assert im is not None
|
||||||
assert im is not None
|
|
||||||
finally:
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
def test_verify_not_ignores_crc_error_in_required_chunk(self) -> None:
|
def test_verify_not_ignores_crc_error_in_required_chunk(
|
||||||
|
self, monkeypatch: pytest.MonkeyPatch
|
||||||
|
) -> None:
|
||||||
# check does not ignore crc errors in required chunks
|
# check does not ignore crc errors in required chunks
|
||||||
|
|
||||||
image_data = MAGIC + IHDR[:-1] + b"q" + TAIL
|
image_data = MAGIC + IHDR[:-1] + b"q" + TAIL
|
||||||
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
try:
|
with pytest.raises(SyntaxError):
|
||||||
with pytest.raises(SyntaxError):
|
PngImagePlugin.PngImageFile(BytesIO(image_data))
|
||||||
PngImagePlugin.PngImageFile(BytesIO(image_data))
|
|
||||||
finally:
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
def test_roundtrip_dpi(self) -> None:
|
def test_roundtrip_dpi(self) -> None:
|
||||||
# Check dpi roundtripping
|
# Check dpi roundtripping
|
||||||
|
@ -598,7 +596,7 @@ class TestFilePng:
|
||||||
(b"prIV", b"VALUE3", True),
|
(b"prIV", b"VALUE3", True),
|
||||||
]
|
]
|
||||||
|
|
||||||
def test_textual_chunks_after_idat(self) -> None:
|
def test_textual_chunks_after_idat(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
with Image.open("Tests/images/hopper.png") as im:
|
with Image.open("Tests/images/hopper.png") as im:
|
||||||
assert "comment" in im.text
|
assert "comment" in im.text
|
||||||
for k, v in {
|
for k, v in {
|
||||||
|
@ -612,18 +610,17 @@ class TestFilePng:
|
||||||
with pytest.raises(OSError):
|
with pytest.raises(OSError):
|
||||||
assert isinstance(im.text, dict)
|
assert isinstance(im.text, dict)
|
||||||
|
|
||||||
|
# Raises an EOFError in load_end
|
||||||
|
with Image.open("Tests/images/hopper_idat_after_image_end.png") as im:
|
||||||
|
assert im.text == {"TXT": "VALUE", "ZIP": "VALUE"}
|
||||||
|
|
||||||
# Raises a UnicodeDecodeError in load_end
|
# Raises a UnicodeDecodeError in load_end
|
||||||
with Image.open("Tests/images/truncated_image.png") as im:
|
with Image.open("Tests/images/truncated_image.png") as im:
|
||||||
# The file is truncated
|
# The file is truncated
|
||||||
with pytest.raises(OSError):
|
with pytest.raises(OSError):
|
||||||
im.text()
|
im.text
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
assert isinstance(im.text, dict)
|
assert isinstance(im.text, dict)
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
# Raises an EOFError in load_end
|
|
||||||
with Image.open("Tests/images/hopper_idat_after_image_end.png") as im:
|
|
||||||
assert im.text == {"TXT": "VALUE", "ZIP": "VALUE"}
|
|
||||||
|
|
||||||
def test_unknown_compression_method(self) -> None:
|
def test_unknown_compression_method(self) -> None:
|
||||||
with pytest.raises(SyntaxError, match="Unknown compression method"):
|
with pytest.raises(SyntaxError, match="Unknown compression method"):
|
||||||
|
@ -649,15 +646,16 @@ class TestFilePng:
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"cid", (b"IHDR", b"sRGB", b"pHYs", b"acTL", b"fcTL", b"fdAT")
|
"cid", (b"IHDR", b"sRGB", b"pHYs", b"acTL", b"fcTL", b"fdAT")
|
||||||
)
|
)
|
||||||
def test_truncated_chunks(self, cid: bytes) -> None:
|
def test_truncated_chunks(
|
||||||
|
self, cid: bytes, monkeypatch: pytest.MonkeyPatch
|
||||||
|
) -> None:
|
||||||
fp = BytesIO()
|
fp = BytesIO()
|
||||||
with PngImagePlugin.PngStream(fp) as png:
|
with PngImagePlugin.PngStream(fp) as png:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
png.call(cid, 0, 0)
|
png.call(cid, 0, 0)
|
||||||
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
png.call(cid, 0, 0)
|
png.call(cid, 0, 0)
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("save_all", (True, False))
|
@pytest.mark.parametrize("save_all", (True, False))
|
||||||
def test_specify_bits(self, save_all: bool, tmp_path: Path) -> None:
|
def test_specify_bits(self, save_all: bool, tmp_path: Path) -> None:
|
||||||
|
@ -770,38 +768,31 @@ class TestFilePng:
|
||||||
im.seek(1)
|
im.seek(1)
|
||||||
|
|
||||||
@pytest.mark.parametrize("buffer", (True, False))
|
@pytest.mark.parametrize("buffer", (True, False))
|
||||||
def test_save_stdout(self, buffer: bool) -> None:
|
def test_save_stdout(self, buffer: bool, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
old_stdout = sys.stdout
|
|
||||||
|
|
||||||
class MyStdOut:
|
class MyStdOut:
|
||||||
buffer = BytesIO()
|
buffer = BytesIO()
|
||||||
|
|
||||||
mystdout: MyStdOut | BytesIO = MyStdOut() if buffer else BytesIO()
|
mystdout: MyStdOut | BytesIO = MyStdOut() if buffer else BytesIO()
|
||||||
|
|
||||||
sys.stdout = mystdout
|
monkeypatch.setattr(sys, "stdout", mystdout)
|
||||||
|
|
||||||
with Image.open(TEST_PNG_FILE) as im:
|
with Image.open(TEST_PNG_FILE) as im:
|
||||||
im.save(sys.stdout, "PNG")
|
im.save(sys.stdout, "PNG")
|
||||||
|
|
||||||
# Reset stdout
|
|
||||||
sys.stdout = old_stdout
|
|
||||||
|
|
||||||
if isinstance(mystdout, MyStdOut):
|
if isinstance(mystdout, MyStdOut):
|
||||||
mystdout = mystdout.buffer
|
mystdout = mystdout.buffer
|
||||||
with Image.open(mystdout) as reloaded:
|
with Image.open(mystdout) as reloaded:
|
||||||
assert_image_equal_tofile(reloaded, TEST_PNG_FILE)
|
assert_image_equal_tofile(reloaded, TEST_PNG_FILE)
|
||||||
|
|
||||||
def test_truncated_end_chunk(self) -> None:
|
def test_truncated_end_chunk(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
with Image.open("Tests/images/truncated_end_chunk.png") as im:
|
with Image.open("Tests/images/truncated_end_chunk.png") as im:
|
||||||
with pytest.raises(OSError):
|
with pytest.raises(OSError):
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
try:
|
with Image.open("Tests/images/truncated_end_chunk.png") as im:
|
||||||
with Image.open("Tests/images/truncated_end_chunk.png") as im:
|
assert_image_equal_tofile(im, "Tests/images/hopper.png")
|
||||||
assert_image_equal_tofile(im, "Tests/images/hopper.png")
|
|
||||||
finally:
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(is_win32(), reason="Requires Unix or macOS")
|
@pytest.mark.skipif(is_win32(), reason="Requires Unix or macOS")
|
||||||
|
@ -810,11 +801,11 @@ class TestTruncatedPngPLeaks(PillowLeakTestCase):
|
||||||
mem_limit = 2 * 1024 # max increase in K
|
mem_limit = 2 * 1024 # max increase in K
|
||||||
iterations = 100 # Leak is 56k/iteration, this will leak 5.6megs
|
iterations = 100 # Leak is 56k/iteration, this will leak 5.6megs
|
||||||
|
|
||||||
def test_leak_load(self) -> None:
|
def test_leak_load(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
with open("Tests/images/hopper.png", "rb") as f:
|
with open("Tests/images/hopper.png", "rb") as f:
|
||||||
DATA = BytesIO(f.read(16 * 1024))
|
DATA = BytesIO(f.read(16 * 1024))
|
||||||
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
with Image.open(DATA) as im:
|
with Image.open(DATA) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
@ -822,7 +813,4 @@ class TestTruncatedPngPLeaks(PillowLeakTestCase):
|
||||||
with Image.open(DATA) as im:
|
with Image.open(DATA) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
try:
|
self._test_leak(core)
|
||||||
self._test_leak(core)
|
|
||||||
finally:
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
|
@ -49,7 +49,7 @@ def test_sanity() -> None:
|
||||||
(b"P5 3 1 257 \x00\x00\x00\x80\x01\x01", "I", (0, 32640, 65535)),
|
(b"P5 3 1 257 \x00\x00\x00\x80\x01\x01", "I", (0, 32640, 65535)),
|
||||||
# P6 with maxval < 255
|
# P6 with maxval < 255
|
||||||
(
|
(
|
||||||
b"P6 3 1 17 \x00\x01\x02\x08\x09\x0A\x0F\x10\x11",
|
b"P6 3 1 17 \x00\x01\x02\x08\x09\x0a\x0f\x10\x11",
|
||||||
"RGB",
|
"RGB",
|
||||||
(
|
(
|
||||||
(0, 15, 30),
|
(0, 15, 30),
|
||||||
|
@ -60,7 +60,7 @@ def test_sanity() -> None:
|
||||||
# P6 with maxval > 255
|
# P6 with maxval > 255
|
||||||
(
|
(
|
||||||
b"P6 3 1 257 \x00\x00\x00\x01\x00\x02"
|
b"P6 3 1 257 \x00\x00\x00\x01\x00\x02"
|
||||||
b"\x00\x80\x00\x81\x00\x82\x01\x00\x01\x01\xFF\xFF",
|
b"\x00\x80\x00\x81\x00\x82\x01\x00\x01\x01\xff\xff",
|
||||||
"RGB",
|
"RGB",
|
||||||
(
|
(
|
||||||
(0, 1, 2),
|
(0, 1, 2),
|
||||||
|
@ -79,6 +79,7 @@ def test_arbitrary_maxval(
|
||||||
assert im.mode == mode
|
assert im.mode == mode
|
||||||
|
|
||||||
px = im.load()
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
assert tuple(px[x, 0] for x in range(3)) == pixels
|
assert tuple(px[x, 0] for x in range(3)) == pixels
|
||||||
|
|
||||||
|
|
||||||
|
@ -367,22 +368,18 @@ def test_mimetypes(tmp_path: Path) -> None:
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("buffer", (True, False))
|
@pytest.mark.parametrize("buffer", (True, False))
|
||||||
def test_save_stdout(buffer: bool) -> None:
|
def test_save_stdout(buffer: bool, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
old_stdout = sys.stdout
|
|
||||||
|
|
||||||
class MyStdOut:
|
class MyStdOut:
|
||||||
buffer = BytesIO()
|
buffer = BytesIO()
|
||||||
|
|
||||||
mystdout: MyStdOut | BytesIO = MyStdOut() if buffer else BytesIO()
|
mystdout: MyStdOut | BytesIO = MyStdOut() if buffer else BytesIO()
|
||||||
|
|
||||||
sys.stdout = mystdout
|
monkeypatch.setattr(sys, "stdout", mystdout)
|
||||||
|
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
im.save(sys.stdout, "PPM")
|
im.save(sys.stdout, "PPM")
|
||||||
|
|
||||||
# Reset stdout
|
|
||||||
sys.stdout = old_stdout
|
|
||||||
|
|
||||||
if isinstance(mystdout, MyStdOut):
|
if isinstance(mystdout, MyStdOut):
|
||||||
mystdout = mystdout.buffer
|
mystdout = mystdout.buffer
|
||||||
with Image.open(mystdout) as reloaded:
|
with Image.open(mystdout) as reloaded:
|
||||||
|
|
|
@ -25,16 +25,18 @@ def test_sanity() -> None:
|
||||||
|
|
||||||
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
||||||
def test_unclosed_file() -> None:
|
def test_unclosed_file() -> None:
|
||||||
def open() -> None:
|
def open_test_image() -> None:
|
||||||
im = Image.open(test_file)
|
im = Image.open(test_file)
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
with pytest.warns(ResourceWarning):
|
with pytest.warns(ResourceWarning):
|
||||||
open()
|
open_test_image()
|
||||||
|
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(test_file)
|
im = Image.open(test_file)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -42,6 +44,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import Image, ImageSequence, SpiderImagePlugin
|
from PIL import Image, SpiderImagePlugin
|
||||||
|
|
||||||
from .helper import assert_image_equal, hopper, is_pypy
|
from .helper import assert_image_equal, hopper, is_pypy
|
||||||
|
|
||||||
|
@ -24,16 +24,18 @@ def test_sanity() -> None:
|
||||||
|
|
||||||
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
||||||
def test_unclosed_file() -> None:
|
def test_unclosed_file() -> None:
|
||||||
def open() -> None:
|
def open_test_image() -> None:
|
||||||
im = Image.open(TEST_FILE)
|
im = Image.open(TEST_FILE)
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
with pytest.warns(ResourceWarning):
|
with pytest.warns(ResourceWarning):
|
||||||
open()
|
open_test_image()
|
||||||
|
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_FILE)
|
im = Image.open(TEST_FILE)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -41,6 +43,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
@ -149,8 +153,8 @@ def test_nonstack_file() -> None:
|
||||||
|
|
||||||
def test_nonstack_dos() -> None:
|
def test_nonstack_dos() -> None:
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
for i, frame in enumerate(ImageSequence.Iterator(im)):
|
with pytest.raises(EOFError):
|
||||||
assert i <= 1, "Non-stack DOS file test failed"
|
im.seek(0)
|
||||||
|
|
||||||
|
|
||||||
# for issue #4093
|
# for issue #4093
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import Image, SunImagePlugin
|
from PIL import Image, SunImagePlugin, _binary
|
||||||
|
|
||||||
from .helper import assert_image_equal_tofile, assert_image_similar, hopper
|
from .helper import assert_image_equal_tofile, assert_image_similar, hopper
|
||||||
|
|
||||||
|
@ -33,6 +34,60 @@ def test_im1() -> None:
|
||||||
assert_image_equal_tofile(im, "Tests/images/sunraster.im1.png")
|
assert_image_equal_tofile(im, "Tests/images/sunraster.im1.png")
|
||||||
|
|
||||||
|
|
||||||
|
def _sun_header(
|
||||||
|
depth: int = 0, file_type: int = 0, palette_length: int = 0
|
||||||
|
) -> io.BytesIO:
|
||||||
|
return io.BytesIO(
|
||||||
|
_binary.o32be(0x59A66A95)
|
||||||
|
+ b"\x00" * 8
|
||||||
|
+ _binary.o32be(depth)
|
||||||
|
+ b"\x00" * 4
|
||||||
|
+ _binary.o32be(file_type)
|
||||||
|
+ b"\x00" * 4
|
||||||
|
+ _binary.o32be(palette_length)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_unsupported_mode_bit_depth() -> None:
|
||||||
|
with pytest.raises(SyntaxError, match="Unsupported Mode/Bit Depth"):
|
||||||
|
with SunImagePlugin.SunImageFile(_sun_header()):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def test_unsupported_color_palette_length() -> None:
|
||||||
|
with pytest.raises(SyntaxError, match="Unsupported Color Palette Length"):
|
||||||
|
with SunImagePlugin.SunImageFile(_sun_header(depth=1, palette_length=1025)):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def test_unsupported_palette_type() -> None:
|
||||||
|
with pytest.raises(SyntaxError, match="Unsupported Palette Type"):
|
||||||
|
with SunImagePlugin.SunImageFile(_sun_header(depth=1, palette_length=1)):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def test_unsupported_file_type() -> None:
|
||||||
|
with pytest.raises(SyntaxError, match="Unsupported Sun Raster file type"):
|
||||||
|
with SunImagePlugin.SunImageFile(_sun_header(depth=1, file_type=6)):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
not os.path.exists(EXTRA_DIR), reason="Extra image files not installed"
|
||||||
|
)
|
||||||
|
def test_rgbx() -> None:
|
||||||
|
with open(os.path.join(EXTRA_DIR, "32bpp.ras"), "rb") as fp:
|
||||||
|
data = fp.read()
|
||||||
|
|
||||||
|
# Set file type to 3
|
||||||
|
data = data[:20] + _binary.o32be(3) + data[24:]
|
||||||
|
|
||||||
|
with Image.open(io.BytesIO(data)) as im:
|
||||||
|
r, g, b = im.split()
|
||||||
|
im = Image.merge("RGB", (b, g, r))
|
||||||
|
assert_image_equal_tofile(im, os.path.join(EXTRA_DIR, "32bpp.png"))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
not os.path.exists(EXTRA_DIR), reason="Extra image files not installed"
|
not os.path.exists(EXTRA_DIR), reason="Extra image files not installed"
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import warnings
|
import warnings
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -29,6 +30,22 @@ def test_sanity(codec: str, test_path: str, format: str) -> None:
|
||||||
assert im.format == format
|
assert im.format == format
|
||||||
|
|
||||||
|
|
||||||
|
def test_unexpected_end(tmp_path: Path) -> None:
|
||||||
|
tmpfile = str(tmp_path / "temp.tar")
|
||||||
|
with open(tmpfile, "w"):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with pytest.raises(OSError, match="unexpected end of tar file"):
|
||||||
|
with TarIO.TarIO(tmpfile, "test"):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def test_cannot_find_subfile() -> None:
|
||||||
|
with pytest.raises(OSError, match="cannot find subfile"):
|
||||||
|
with TarIO.TarIO(TEST_TAR_FILE, "test"):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
||||||
def test_unclosed_file() -> None:
|
def test_unclosed_file() -> None:
|
||||||
with pytest.warns(ResourceWarning):
|
with pytest.warns(ResourceWarning):
|
||||||
|
@ -37,11 +54,15 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_close() -> None:
|
def test_close() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
tar = TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg")
|
tar = TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg")
|
||||||
tar.close()
|
tar.close()
|
||||||
|
|
||||||
|
|
||||||
def test_contextmanager() -> None:
|
def test_contextmanager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg"):
|
with TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg"):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -72,6 +72,7 @@ def test_palette_depth_8(tmp_path: Path) -> None:
|
||||||
|
|
||||||
def test_palette_depth_16(tmp_path: Path) -> None:
|
def test_palette_depth_16(tmp_path: Path) -> None:
|
||||||
with Image.open("Tests/images/p_16.tga") as im:
|
with Image.open("Tests/images/p_16.tga") as im:
|
||||||
|
assert im.palette is not None
|
||||||
assert im.palette.mode == "RGBA"
|
assert im.palette.mode == "RGBA"
|
||||||
assert_image_equal_tofile(im.convert("RGBA"), "Tests/images/p_16.png")
|
assert_image_equal_tofile(im.convert("RGBA"), "Tests/images/p_16.png")
|
||||||
|
|
||||||
|
@ -213,10 +214,14 @@ def test_save_orientation(tmp_path: Path) -> None:
|
||||||
def test_horizontal_orientations() -> None:
|
def test_horizontal_orientations() -> None:
|
||||||
# These images have been manually hexedited to have the relevant orientations
|
# These images have been manually hexedited to have the relevant orientations
|
||||||
with Image.open("Tests/images/rgb32rle_top_right.tga") as im:
|
with Image.open("Tests/images/rgb32rle_top_right.tga") as im:
|
||||||
assert im.load()[90, 90][:3] == (0, 0, 0)
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[90, 90][:3] == (0, 0, 0)
|
||||||
|
|
||||||
with Image.open("Tests/images/rgb32rle_bottom_right.tga") as im:
|
with Image.open("Tests/images/rgb32rle_bottom_right.tga") as im:
|
||||||
assert im.load()[90, 90][:3] == (0, 255, 0)
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[90, 90][:3] == (0, 255, 0)
|
||||||
|
|
||||||
|
|
||||||
def test_save_rle(tmp_path: Path) -> None:
|
def test_save_rle(tmp_path: Path) -> None:
|
||||||
|
|
|
@ -63,15 +63,17 @@ class TestFileTiff:
|
||||||
|
|
||||||
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
|
||||||
def test_unclosed_file(self) -> None:
|
def test_unclosed_file(self) -> None:
|
||||||
def open() -> None:
|
def open_test_image() -> None:
|
||||||
im = Image.open("Tests/images/multipage.tiff")
|
im = Image.open("Tests/images/multipage.tiff")
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
with pytest.warns(ResourceWarning):
|
with pytest.warns(ResourceWarning):
|
||||||
open()
|
open_test_image()
|
||||||
|
|
||||||
def test_closed_file(self) -> None:
|
def test_closed_file(self) -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open("Tests/images/multipage.tiff")
|
im = Image.open("Tests/images/multipage.tiff")
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -88,6 +90,8 @@ class TestFileTiff:
|
||||||
|
|
||||||
def test_context_manager(self) -> None:
|
def test_context_manager(self) -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open("Tests/images/multipage.tiff") as im:
|
with Image.open("Tests/images/multipage.tiff") as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
@ -111,6 +115,19 @@ class TestFileTiff:
|
||||||
outfile = str(tmp_path / "temp.tif")
|
outfile = str(tmp_path / "temp.tif")
|
||||||
im.save(outfile, save_all=True, append_images=[im], tiffinfo=im.tag_v2)
|
im.save(outfile, save_all=True, append_images=[im], tiffinfo=im.tag_v2)
|
||||||
|
|
||||||
|
def test_bigtiff_save(self, tmp_path: Path) -> None:
|
||||||
|
outfile = str(tmp_path / "temp.tif")
|
||||||
|
im = hopper()
|
||||||
|
im.save(outfile, big_tiff=True)
|
||||||
|
|
||||||
|
with Image.open(outfile) as reloaded:
|
||||||
|
assert reloaded.tag_v2._bigtiff is True
|
||||||
|
|
||||||
|
im.save(outfile, save_all=True, append_images=[im], big_tiff=True)
|
||||||
|
|
||||||
|
with Image.open(outfile) as reloaded:
|
||||||
|
assert reloaded.tag_v2._bigtiff is True
|
||||||
|
|
||||||
def test_seek_too_large(self) -> None:
|
def test_seek_too_large(self) -> None:
|
||||||
with pytest.raises(ValueError, match="Unable to seek to frame"):
|
with pytest.raises(ValueError, match="Unable to seek to frame"):
|
||||||
Image.open("Tests/images/seek_too_large.tif")
|
Image.open("Tests/images/seek_too_large.tif")
|
||||||
|
@ -742,6 +759,39 @@ class TestFileTiff:
|
||||||
with pytest.raises(RuntimeError):
|
with pytest.raises(RuntimeError):
|
||||||
a.fixOffsets(1)
|
a.fixOffsets(1)
|
||||||
|
|
||||||
|
b = BytesIO(b"II\x2a\x00\x00\x00\x00\x00")
|
||||||
|
with TiffImagePlugin.AppendingTiffWriter(b) as a:
|
||||||
|
a.offsetOfNewPage = 2**16
|
||||||
|
|
||||||
|
b.seek(0)
|
||||||
|
a.fixOffsets(1, isShort=True)
|
||||||
|
|
||||||
|
b = BytesIO(b"II\x2b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00")
|
||||||
|
with TiffImagePlugin.AppendingTiffWriter(b) as a:
|
||||||
|
a.offsetOfNewPage = 2**32
|
||||||
|
|
||||||
|
b.seek(0)
|
||||||
|
a.fixOffsets(1, isShort=True)
|
||||||
|
|
||||||
|
b.seek(0)
|
||||||
|
a.fixOffsets(1, isLong=True)
|
||||||
|
|
||||||
|
def test_appending_tiff_writer_writelong(self) -> None:
|
||||||
|
data = b"II\x2a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
|
||||||
|
b = BytesIO(data)
|
||||||
|
with TiffImagePlugin.AppendingTiffWriter(b) as a:
|
||||||
|
a.seek(-4, os.SEEK_CUR)
|
||||||
|
a.writeLong(2**32 - 1)
|
||||||
|
assert b.getvalue() == data[:-4] + b"\xff\xff\xff\xff"
|
||||||
|
|
||||||
|
def test_appending_tiff_writer_rewritelastshorttolong(self) -> None:
|
||||||
|
data = b"II\x2a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
|
||||||
|
b = BytesIO(data)
|
||||||
|
with TiffImagePlugin.AppendingTiffWriter(b) as a:
|
||||||
|
a.seek(-2, os.SEEK_CUR)
|
||||||
|
a.rewriteLastShortToLong(2**32 - 1)
|
||||||
|
assert b.getvalue() == data[:-4] + b"\xff\xff\xff\xff"
|
||||||
|
|
||||||
def test_saving_icc_profile(self, tmp_path: Path) -> None:
|
def test_saving_icc_profile(self, tmp_path: Path) -> None:
|
||||||
# Tests saving TIFF with icc_profile set.
|
# Tests saving TIFF with icc_profile set.
|
||||||
# At the time of writing this will only work for non-compressed tiffs
|
# At the time of writing this will only work for non-compressed tiffs
|
||||||
|
@ -891,11 +941,10 @@ class TestFileTiff:
|
||||||
|
|
||||||
@pytest.mark.timeout(6)
|
@pytest.mark.timeout(6)
|
||||||
@pytest.mark.filterwarnings("ignore:Truncated File Read")
|
@pytest.mark.filterwarnings("ignore:Truncated File Read")
|
||||||
def test_timeout(self) -> None:
|
def test_timeout(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
with Image.open("Tests/images/timeout-6646305047838720") as im:
|
with Image.open("Tests/images/timeout-6646305047838720") as im:
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
im.load()
|
im.load()
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"test_file",
|
"test_file",
|
||||||
|
|
|
@ -21,7 +21,11 @@ def test_open() -> None:
|
||||||
|
|
||||||
def test_load() -> None:
|
def test_load() -> None:
|
||||||
with WalImageFile.open(TEST_FILE) as im:
|
with WalImageFile.open(TEST_FILE) as im:
|
||||||
assert im.load()[0, 0] == 122
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[0, 0] == 122
|
||||||
|
|
||||||
# Test again now that it has already been loaded once
|
# Test again now that it has already been loaded once
|
||||||
assert im.load()[0, 0] == 122
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[0, 0] == 122
|
||||||
|
|
|
@ -28,9 +28,9 @@ except ImportError:
|
||||||
|
|
||||||
|
|
||||||
class TestUnsupportedWebp:
|
class TestUnsupportedWebp:
|
||||||
def test_unsupported(self) -> None:
|
def test_unsupported(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
if HAVE_WEBP:
|
if HAVE_WEBP:
|
||||||
WebPImagePlugin.SUPPORTED = False
|
monkeypatch.setattr(WebPImagePlugin, "SUPPORTED", False)
|
||||||
|
|
||||||
file_path = "Tests/images/hopper.webp"
|
file_path = "Tests/images/hopper.webp"
|
||||||
with pytest.warns(UserWarning):
|
with pytest.warns(UserWarning):
|
||||||
|
@ -38,9 +38,6 @@ class TestUnsupportedWebp:
|
||||||
with Image.open(file_path):
|
with Image.open(file_path):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if HAVE_WEBP:
|
|
||||||
WebPImagePlugin.SUPPORTED = True
|
|
||||||
|
|
||||||
|
|
||||||
@skip_unless_feature("webp")
|
@skip_unless_feature("webp")
|
||||||
class TestFileWebp:
|
class TestFileWebp:
|
||||||
|
@ -191,6 +188,8 @@ class TestFileWebp:
|
||||||
file_path = "Tests/images/hopper.webp"
|
file_path = "Tests/images/hopper.webp"
|
||||||
with Image.open(file_path) as image:
|
with Image.open(file_path) as image:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
image.save(tmp_path / "temp.webp")
|
image.save(tmp_path / "temp.webp")
|
||||||
|
|
||||||
def test_file_pointer_could_be_reused(self) -> None:
|
def test_file_pointer_could_be_reused(self) -> None:
|
||||||
|
|
|
@ -40,7 +40,7 @@ def test_read_exif_metadata() -> None:
|
||||||
def test_read_exif_metadata_without_prefix() -> None:
|
def test_read_exif_metadata_without_prefix() -> None:
|
||||||
with Image.open("Tests/images/flower2.webp") as im:
|
with Image.open("Tests/images/flower2.webp") as im:
|
||||||
# Assert prefix is not present
|
# Assert prefix is not present
|
||||||
assert im.info["exif"][:6] != b"Exif\x00\x00"
|
assert not im.info["exif"].startswith(b"Exif\x00\x00")
|
||||||
|
|
||||||
exif = im.getexif()
|
exif = im.getexif()
|
||||||
assert exif[305] == "Adobe Photoshop CS6 (Macintosh)"
|
assert exif[305] == "Adobe Photoshop CS6 (Macintosh)"
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import IO
|
from typing import IO
|
||||||
|
|
||||||
|
@ -31,7 +32,16 @@ def test_load_raw() -> None:
|
||||||
def test_load() -> None:
|
def test_load() -> None:
|
||||||
with Image.open("Tests/images/drawing.emf") as im:
|
with Image.open("Tests/images/drawing.emf") as im:
|
||||||
if hasattr(Image.core, "drawwmf"):
|
if hasattr(Image.core, "drawwmf"):
|
||||||
assert im.load()[0, 0] == (255, 255, 255)
|
px = im.load()
|
||||||
|
assert px is not None
|
||||||
|
assert px[0, 0] == (255, 255, 255)
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_zero_inch() -> None:
|
||||||
|
b = BytesIO(b"\xd7\xcd\xc6\x9a\x00\x00" + b"\x00" * 10)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
with Image.open(b):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_register_handler(tmp_path: Path) -> None:
|
def test_register_handler(tmp_path: Path) -> None:
|
||||||
|
@ -61,6 +71,12 @@ def test_load_float_dpi() -> None:
|
||||||
with Image.open("Tests/images/drawing.emf") as im:
|
with Image.open("Tests/images/drawing.emf") as im:
|
||||||
assert im.info["dpi"] == 1423.7668161434979
|
assert im.info["dpi"] == 1423.7668161434979
|
||||||
|
|
||||||
|
with open("Tests/images/drawing.emf", "rb") as fp:
|
||||||
|
data = fp.read()
|
||||||
|
b = BytesIO(data[:8] + b"\x06\xfa" + data[10:])
|
||||||
|
with Image.open(b) as im:
|
||||||
|
assert im.info["dpi"][0] == 2540
|
||||||
|
|
||||||
|
|
||||||
def test_load_set_dpi() -> None:
|
def test_load_set_dpi() -> None:
|
||||||
with Image.open("Tests/images/drawing.wmf") as im:
|
with Image.open("Tests/images/drawing.wmf") as im:
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import io
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import BdfFontFile, FontFile
|
from PIL import BdfFontFile, FontFile
|
||||||
|
@ -8,13 +10,20 @@ filename = "Tests/images/courB08.bdf"
|
||||||
|
|
||||||
|
|
||||||
def test_sanity() -> None:
|
def test_sanity() -> None:
|
||||||
with open(filename, "rb") as test_file:
|
with open(filename, "rb") as fp:
|
||||||
font = BdfFontFile.BdfFontFile(test_file)
|
font = BdfFontFile.BdfFontFile(fp)
|
||||||
|
|
||||||
assert isinstance(font, FontFile.FontFile)
|
assert isinstance(font, FontFile.FontFile)
|
||||||
assert len([_f for _f in font.glyph if _f]) == 190
|
assert len([_f for _f in font.glyph if _f]) == 190
|
||||||
|
|
||||||
|
|
||||||
|
def test_zero_width_chars() -> None:
|
||||||
|
with open(filename, "rb") as fp:
|
||||||
|
data = fp.read()
|
||||||
|
data = data[:2650] + b"\x00\x00" + data[2652:]
|
||||||
|
BdfFontFile.BdfFontFile(io.BytesIO(data))
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_file() -> None:
|
def test_invalid_file() -> None:
|
||||||
with open("Tests/images/flower.jpg", "rb") as fp:
|
with open("Tests/images/flower.jpg", "rb") as fp:
|
||||||
with pytest.raises(SyntaxError):
|
with pytest.raises(SyntaxError):
|
||||||
|
|
|
@ -4,7 +4,20 @@ from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import FontFile
|
from PIL import FontFile, Image
|
||||||
|
|
||||||
|
|
||||||
|
def test_compile() -> None:
|
||||||
|
font = FontFile.FontFile()
|
||||||
|
font.glyph[0] = ((0, 0), (0, 0, 0, 0), (0, 0, 0, 1), Image.new("L", (0, 0)))
|
||||||
|
font.compile()
|
||||||
|
assert font.ysize == 1
|
||||||
|
|
||||||
|
font.ysize = 2
|
||||||
|
font.compile()
|
||||||
|
|
||||||
|
# Assert that compiling again did not change anything
|
||||||
|
assert font.ysize == 2
|
||||||
|
|
||||||
|
|
||||||
def test_save(tmp_path: Path) -> None:
|
def test_save(tmp_path: Path) -> None:
|
||||||
|
|
|
@ -22,28 +22,26 @@ def test_sanity() -> None:
|
||||||
Image.new("HSV", (100, 100))
|
Image.new("HSV", (100, 100))
|
||||||
|
|
||||||
|
|
||||||
def wedge() -> Image.Image:
|
def linear_gradient() -> Image.Image:
|
||||||
w = Image._wedge()
|
im = Image.linear_gradient(mode="L")
|
||||||
w90 = w.rotate(90)
|
im90 = im.rotate(90)
|
||||||
|
|
||||||
(px, h) = w.size
|
(px, h) = im.size
|
||||||
|
|
||||||
r = Image.new("L", (px * 3, h))
|
r = Image.new("L", (px * 3, h))
|
||||||
g = r.copy()
|
g = r.copy()
|
||||||
b = r.copy()
|
b = r.copy()
|
||||||
|
|
||||||
r.paste(w, (0, 0))
|
r.paste(im, (0, 0))
|
||||||
r.paste(w90, (px, 0))
|
r.paste(im90, (px, 0))
|
||||||
|
|
||||||
g.paste(w90, (0, 0))
|
g.paste(im90, (0, 0))
|
||||||
g.paste(w, (2 * px, 0))
|
g.paste(im, (2 * px, 0))
|
||||||
|
|
||||||
b.paste(w, (px, 0))
|
b.paste(im, (px, 0))
|
||||||
b.paste(w90, (2 * px, 0))
|
b.paste(im90, (2 * px, 0))
|
||||||
|
|
||||||
img = Image.merge("RGB", (r, g, b))
|
return Image.merge("RGB", (r, g, b))
|
||||||
|
|
||||||
return img
|
|
||||||
|
|
||||||
|
|
||||||
def to_xxx_colorsys(
|
def to_xxx_colorsys(
|
||||||
|
@ -79,8 +77,8 @@ def to_rgb_colorsys(im: Image.Image) -> Image.Image:
|
||||||
return to_xxx_colorsys(im, colorsys.hsv_to_rgb, "RGB")
|
return to_xxx_colorsys(im, colorsys.hsv_to_rgb, "RGB")
|
||||||
|
|
||||||
|
|
||||||
def test_wedge() -> None:
|
def test_linear_gradient() -> None:
|
||||||
src = wedge().resize((3 * 32, 32), Image.Resampling.BILINEAR)
|
src = linear_gradient().resize((3 * 32, 32), Image.Resampling.BILINEAR)
|
||||||
im = src.convert("HSV")
|
im = src.convert("HSV")
|
||||||
comparable = to_hsv_colorsys(src)
|
comparable = to_hsv_colorsys(src)
|
||||||
|
|
||||||
|
|
|
@ -74,12 +74,12 @@ class TestImage:
|
||||||
|
|
||||||
def test_sanity(self) -> None:
|
def test_sanity(self) -> None:
|
||||||
im = Image.new("L", (100, 100))
|
im = Image.new("L", (100, 100))
|
||||||
assert repr(im)[:45] == "<PIL.Image.Image image mode=L size=100x100 at"
|
assert repr(im).startswith("<PIL.Image.Image image mode=L size=100x100 at")
|
||||||
assert im.mode == "L"
|
assert im.mode == "L"
|
||||||
assert im.size == (100, 100)
|
assert im.size == (100, 100)
|
||||||
|
|
||||||
im = Image.new("RGB", (100, 100))
|
im = Image.new("RGB", (100, 100))
|
||||||
assert repr(im)[:45] == "<PIL.Image.Image image mode=RGB size=100x100 "
|
assert repr(im).startswith("<PIL.Image.Image image mode=RGB size=100x100 ")
|
||||||
assert im.mode == "RGB"
|
assert im.mode == "RGB"
|
||||||
assert im.size == (100, 100)
|
assert im.size == (100, 100)
|
||||||
|
|
||||||
|
@ -189,8 +189,6 @@ class TestImage:
|
||||||
if ext == ".jp2" and not features.check_codec("jpg_2000"):
|
if ext == ".jp2" and not features.check_codec("jpg_2000"):
|
||||||
pytest.skip("jpg_2000 not available")
|
pytest.skip("jpg_2000 not available")
|
||||||
temp_file = str(tmp_path / ("temp." + ext))
|
temp_file = str(tmp_path / ("temp." + ext))
|
||||||
if os.path.exists(temp_file):
|
|
||||||
os.remove(temp_file)
|
|
||||||
im.save(Path(temp_file))
|
im.save(Path(temp_file))
|
||||||
|
|
||||||
def test_fp_name(self, tmp_path: Path) -> None:
|
def test_fp_name(self, tmp_path: Path) -> None:
|
||||||
|
@ -580,9 +578,7 @@ class TestImage:
|
||||||
def test_one_item_tuple(self) -> None:
|
def test_one_item_tuple(self) -> None:
|
||||||
for mode in ("I", "F", "L"):
|
for mode in ("I", "F", "L"):
|
||||||
im = Image.new(mode, (100, 100), (5,))
|
im = Image.new(mode, (100, 100), (5,))
|
||||||
px = im.load()
|
assert im.getpixel((0, 0)) == 5
|
||||||
assert px is not None
|
|
||||||
assert px[0, 0] == 5
|
|
||||||
|
|
||||||
def test_linear_gradient_wrong_mode(self) -> None:
|
def test_linear_gradient_wrong_mode(self) -> None:
|
||||||
# Arrange
|
# Arrange
|
||||||
|
@ -662,12 +658,13 @@ class TestImage:
|
||||||
im.putpalette(list(range(256)) * 4, "RGBA")
|
im.putpalette(list(range(256)) * 4, "RGBA")
|
||||||
im_remapped = im.remap_palette(list(range(256)))
|
im_remapped = im.remap_palette(list(range(256)))
|
||||||
assert_image_equal(im, im_remapped)
|
assert_image_equal(im, im_remapped)
|
||||||
|
assert im.palette is not None
|
||||||
assert im.palette.palette == im_remapped.palette.palette
|
assert im.palette.palette == im_remapped.palette.palette
|
||||||
|
|
||||||
# Test illegal image mode
|
# Test illegal image mode
|
||||||
with hopper() as im:
|
with hopper() as im:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
im.remap_palette(None)
|
im.remap_palette([])
|
||||||
|
|
||||||
def test_remap_palette_transparency(self) -> None:
|
def test_remap_palette_transparency(self) -> None:
|
||||||
im = Image.new("P", (1, 2), (0, 0, 0))
|
im = Image.new("P", (1, 2), (0, 0, 0))
|
||||||
|
@ -737,6 +734,8 @@ class TestImage:
|
||||||
# Act/Assert
|
# Act/Assert
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.save(temp_file)
|
im.save(temp_file)
|
||||||
|
|
||||||
def test_no_new_file_on_error(self, tmp_path: Path) -> None:
|
def test_no_new_file_on_error(self, tmp_path: Path) -> None:
|
||||||
|
@ -768,7 +767,7 @@ class TestImage:
|
||||||
assert dict(exif)
|
assert dict(exif)
|
||||||
|
|
||||||
# Test that exif data is cleared after another load
|
# Test that exif data is cleared after another load
|
||||||
exif.load(None)
|
exif.load(b"")
|
||||||
assert not dict(exif)
|
assert not dict(exif)
|
||||||
|
|
||||||
# Test loading just the EXIF header
|
# Test loading just the EXIF header
|
||||||
|
@ -791,6 +790,10 @@ class TestImage:
|
||||||
ifd[36864] = b"0220"
|
ifd[36864] = b"0220"
|
||||||
assert exif.get_ifd(0x8769) == {36864: b"0220"}
|
assert exif.get_ifd(0x8769) == {36864: b"0220"}
|
||||||
|
|
||||||
|
reloaded_exif = Image.Exif()
|
||||||
|
reloaded_exif.load(exif.tobytes())
|
||||||
|
assert reloaded_exif.get_ifd(0x8769) == {36864: b"0220"}
|
||||||
|
|
||||||
@mark_if_feature_version(
|
@mark_if_feature_version(
|
||||||
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
||||||
)
|
)
|
||||||
|
@ -985,6 +988,11 @@ class TestImage:
|
||||||
else:
|
else:
|
||||||
assert im.getxmp() == {"xmpmeta": None}
|
assert im.getxmp() == {"xmpmeta": None}
|
||||||
|
|
||||||
|
def test_get_child_images(self) -> None:
|
||||||
|
im = Image.new("RGB", (1, 1))
|
||||||
|
with pytest.warns(DeprecationWarning):
|
||||||
|
assert im.get_child_images() == []
|
||||||
|
|
||||||
@pytest.mark.parametrize("size", ((1, 0), (0, 1), (0, 0)))
|
@pytest.mark.parametrize("size", ((1, 0), (0, 1), (0, 0)))
|
||||||
def test_zero_tobytes(self, size: tuple[int, int]) -> None:
|
def test_zero_tobytes(self, size: tuple[int, int]) -> None:
|
||||||
im = Image.new("RGB", size)
|
im = Image.new("RGB", size)
|
||||||
|
|
|
@ -271,13 +271,25 @@ class TestImagePutPixelError:
|
||||||
|
|
||||||
|
|
||||||
class TestEmbeddable:
|
class TestEmbeddable:
|
||||||
@pytest.mark.xfail(reason="failing test")
|
@pytest.mark.xfail(not (sys.version_info >= (3, 13)), reason="failing test")
|
||||||
@pytest.mark.skipif(not is_win32(), reason="requires Windows")
|
@pytest.mark.skipif(not is_win32(), reason="requires Windows")
|
||||||
def test_embeddable(self) -> None:
|
def test_embeddable(self) -> None:
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
from setuptools.command import build_ext
|
from setuptools.command import build_ext
|
||||||
|
|
||||||
|
compiler = getattr(build_ext, "new_compiler")()
|
||||||
|
compiler.add_include_dir(sysconfig.get_config_var("INCLUDEPY"))
|
||||||
|
|
||||||
|
libdir = sysconfig.get_config_var("LIBDIR") or sysconfig.get_config_var(
|
||||||
|
"INCLUDEPY"
|
||||||
|
).replace("include", "libs")
|
||||||
|
compiler.add_library_dir(libdir)
|
||||||
|
try:
|
||||||
|
compiler.initialize()
|
||||||
|
except Exception:
|
||||||
|
pytest.skip("Compiler could not be initialized")
|
||||||
|
|
||||||
with open("embed_pil.c", "w", encoding="utf-8") as fh:
|
with open("embed_pil.c", "w", encoding="utf-8") as fh:
|
||||||
home = sys.prefix.replace("\\", "\\\\")
|
home = sys.prefix.replace("\\", "\\\\")
|
||||||
fh.write(
|
fh.write(
|
||||||
|
@ -305,13 +317,6 @@ int main(int argc, char* argv[])
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
compiler = getattr(build_ext, "new_compiler")()
|
|
||||||
compiler.add_include_dir(sysconfig.get_config_var("INCLUDEPY"))
|
|
||||||
|
|
||||||
libdir = sysconfig.get_config_var("LIBDIR") or sysconfig.get_config_var(
|
|
||||||
"INCLUDEPY"
|
|
||||||
).replace("include", "libs")
|
|
||||||
compiler.add_library_dir(libdir)
|
|
||||||
objects = compiler.compile(["embed_pil.c"])
|
objects = compiler.compile(["embed_pil.c"])
|
||||||
compiler.link_executable(objects, "embed_pil")
|
compiler.link_executable(objects, "embed_pil")
|
||||||
|
|
||||||
|
|
|
@ -222,9 +222,7 @@ def test_l_macro_rounding(convert_mode: str) -> None:
|
||||||
im.palette.getcolor((0, 1, 2))
|
im.palette.getcolor((0, 1, 2))
|
||||||
|
|
||||||
converted_im = im.convert(convert_mode)
|
converted_im = im.convert(convert_mode)
|
||||||
px = converted_im.load()
|
converted_color = converted_im.getpixel((0, 0))
|
||||||
assert px is not None
|
|
||||||
converted_color = px[0, 0]
|
|
||||||
if convert_mode == "LA":
|
if convert_mode == "LA":
|
||||||
assert isinstance(converted_color, tuple)
|
assert isinstance(converted_color, tuple)
|
||||||
converted_color = converted_color[0]
|
converted_color = converted_color[0]
|
||||||
|
@ -236,6 +234,7 @@ def test_gif_with_rgba_palette_to_p() -> None:
|
||||||
with Image.open("Tests/images/hopper.gif") as im:
|
with Image.open("Tests/images/hopper.gif") as im:
|
||||||
im.info["transparency"] = 255
|
im.info["transparency"] = 255
|
||||||
im.load()
|
im.load()
|
||||||
|
assert im.palette is not None
|
||||||
assert im.palette.mode == "RGB"
|
assert im.palette.mode == "RGB"
|
||||||
im_p = im.convert("P")
|
im_p = im.convert("P")
|
||||||
|
|
||||||
|
|
|
@ -148,10 +148,8 @@ def test_palette(method: Image.Quantize, color: tuple[int, ...]) -> None:
|
||||||
im = Image.new("RGBA" if len(color) == 4 else "RGB", (1, 1), color)
|
im = Image.new("RGBA" if len(color) == 4 else "RGB", (1, 1), color)
|
||||||
|
|
||||||
converted = im.quantize(method=method)
|
converted = im.quantize(method=method)
|
||||||
converted_px = converted.load()
|
|
||||||
assert converted_px is not None
|
|
||||||
assert converted.palette is not None
|
assert converted.palette is not None
|
||||||
assert converted_px[0, 0] == converted.palette.colors[color]
|
assert converted.getpixel((0, 0)) == converted.palette.colors[color]
|
||||||
|
|
||||||
|
|
||||||
def test_small_palette() -> None:
|
def test_small_palette() -> None:
|
||||||
|
|
|
@ -10,7 +10,7 @@ from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image, ImageFile
|
||||||
|
|
||||||
from .helper import (
|
from .helper import (
|
||||||
assert_image_equal,
|
assert_image_equal,
|
||||||
|
@ -179,7 +179,7 @@ class TestImagingCoreResize:
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def gradients_image() -> Generator[Image.Image, None, None]:
|
def gradients_image() -> Generator[ImageFile.ImageFile, None, None]:
|
||||||
with Image.open("Tests/images/radial_gradients.png") as im:
|
with Image.open("Tests/images/radial_gradients.png") as im:
|
||||||
im.load()
|
im.load()
|
||||||
try:
|
try:
|
||||||
|
@ -189,7 +189,7 @@ def gradients_image() -> Generator[Image.Image, None, None]:
|
||||||
|
|
||||||
|
|
||||||
class TestReducingGapResize:
|
class TestReducingGapResize:
|
||||||
def test_reducing_gap_values(self, gradients_image: Image.Image) -> None:
|
def test_reducing_gap_values(self, gradients_image: ImageFile.ImageFile) -> None:
|
||||||
ref = gradients_image.resize(
|
ref = gradients_image.resize(
|
||||||
(52, 34), Image.Resampling.BICUBIC, reducing_gap=None
|
(52, 34), Image.Resampling.BICUBIC, reducing_gap=None
|
||||||
)
|
)
|
||||||
|
@ -210,7 +210,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_reducing_gap_1(
|
def test_reducing_gap_1(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -230,7 +230,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_reducing_gap_2(
|
def test_reducing_gap_2(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -250,7 +250,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_reducing_gap_3(
|
def test_reducing_gap_3(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -266,7 +266,9 @@ class TestReducingGapResize:
|
||||||
|
|
||||||
@pytest.mark.parametrize("box", (None, (1.1, 2.2, 510.8, 510.9), (3, 10, 410, 256)))
|
@pytest.mark.parametrize("box", (None, (1.1, 2.2, 510.8, 510.9), (3, 10, 410, 256)))
|
||||||
def test_reducing_gap_8(
|
def test_reducing_gap_8(
|
||||||
self, gradients_image: Image.Image, box: tuple[float, float, float, float]
|
self,
|
||||||
|
gradients_image: ImageFile.ImageFile,
|
||||||
|
box: tuple[float, float, float, float],
|
||||||
) -> None:
|
) -> None:
|
||||||
ref = gradients_image.resize((52, 34), Image.Resampling.BICUBIC, box=box)
|
ref = gradients_image.resize((52, 34), Image.Resampling.BICUBIC, box=box)
|
||||||
im = gradients_image.resize(
|
im = gradients_image.resize(
|
||||||
|
@ -281,7 +283,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_box_filter(
|
def test_box_filter(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -307,7 +309,7 @@ class TestImageResize:
|
||||||
# Test unknown resampling filter
|
# Test unknown resampling filter
|
||||||
with hopper() as im:
|
with hopper() as im:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
im.resize((10, 10), "unknown")
|
im.resize((10, 10), -1)
|
||||||
|
|
||||||
@skip_unless_feature("libtiff")
|
@skip_unless_feature("libtiff")
|
||||||
def test_transposed(self) -> None:
|
def test_transposed(self) -> None:
|
||||||
|
|
|
@ -104,20 +104,20 @@ def test_transposed() -> None:
|
||||||
assert im.size == (590, 88)
|
assert im.size == (590, 88)
|
||||||
|
|
||||||
|
|
||||||
def test_load_first_unless_jpeg() -> None:
|
def test_load_first_unless_jpeg(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# Test that thumbnail() still uses draft() for JPEG
|
# Test that thumbnail() still uses draft() for JPEG
|
||||||
with Image.open("Tests/images/hopper.jpg") as im:
|
with Image.open("Tests/images/hopper.jpg") as im:
|
||||||
draft = im.draft
|
original_draft = im.draft
|
||||||
|
|
||||||
def im_draft(
|
def im_draft(
|
||||||
mode: str, size: tuple[int, int]
|
mode: str | None, size: tuple[int, int] | None
|
||||||
) -> tuple[str, tuple[int, int, float, float]] | None:
|
) -> tuple[str, tuple[int, int, float, float]] | None:
|
||||||
result = draft(mode, size)
|
result = original_draft(mode, size)
|
||||||
assert result is not None
|
assert result is not None
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
im.draft = im_draft
|
monkeypatch.setattr(im, "draft", im_draft)
|
||||||
|
|
||||||
im.thumbnail((64, 64))
|
im.thumbnail((64, 64))
|
||||||
|
|
||||||
|
|
|
@ -47,6 +47,7 @@ class TestImageTransform:
|
||||||
transformed = im.transform(
|
transformed = im.transform(
|
||||||
im.size, Image.Transform.AFFINE, [1, 0, 0, 0, 1, 0]
|
im.size, Image.Transform.AFFINE, [1, 0, 0, 0, 1, 0]
|
||||||
)
|
)
|
||||||
|
assert im.palette is not None
|
||||||
assert im.palette.palette == transformed.palette.palette
|
assert im.palette.palette == transformed.palette.palette
|
||||||
|
|
||||||
def test_extent(self) -> None:
|
def test_extent(self) -> None:
|
||||||
|
|
|
@ -448,7 +448,6 @@ def test_shape1() -> None:
|
||||||
x3, y3 = 95, 5
|
x3, y3 = 95, 5
|
||||||
|
|
||||||
# Act
|
# Act
|
||||||
assert ImageDraw.Outline is not None
|
|
||||||
s = ImageDraw.Outline()
|
s = ImageDraw.Outline()
|
||||||
s.move(x0, y0)
|
s.move(x0, y0)
|
||||||
s.curve(x1, y1, x2, y2, x3, y3)
|
s.curve(x1, y1, x2, y2, x3, y3)
|
||||||
|
@ -470,7 +469,6 @@ def test_shape2() -> None:
|
||||||
x3, y3 = 5, 95
|
x3, y3 = 5, 95
|
||||||
|
|
||||||
# Act
|
# Act
|
||||||
assert ImageDraw.Outline is not None
|
|
||||||
s = ImageDraw.Outline()
|
s = ImageDraw.Outline()
|
||||||
s.move(x0, y0)
|
s.move(x0, y0)
|
||||||
s.curve(x1, y1, x2, y2, x3, y3)
|
s.curve(x1, y1, x2, y2, x3, y3)
|
||||||
|
@ -489,7 +487,6 @@ def test_transform() -> None:
|
||||||
draw = ImageDraw.Draw(im)
|
draw = ImageDraw.Draw(im)
|
||||||
|
|
||||||
# Act
|
# Act
|
||||||
assert ImageDraw.Outline is not None
|
|
||||||
s = ImageDraw.Outline()
|
s = ImageDraw.Outline()
|
||||||
s.line(0, 0)
|
s.line(0, 0)
|
||||||
s.transform((0, 0, 0, 0, 0, 0))
|
s.transform((0, 0, 0, 0, 0, 0))
|
||||||
|
@ -812,7 +809,7 @@ def test_rounded_rectangle(
|
||||||
tuple[int, int, int, int]
|
tuple[int, int, int, int]
|
||||||
| tuple[list[int]]
|
| tuple[list[int]]
|
||||||
| tuple[tuple[int, int], tuple[int, int]]
|
| tuple[tuple[int, int], tuple[int, int]]
|
||||||
)
|
),
|
||||||
) -> None:
|
) -> None:
|
||||||
# Arrange
|
# Arrange
|
||||||
im = Image.new("RGB", (200, 200))
|
im = Image.new("RGB", (200, 200))
|
||||||
|
@ -1396,6 +1393,28 @@ def test_stroke_descender() -> None:
|
||||||
assert_image_similar_tofile(im, "Tests/images/imagedraw_stroke_descender.png", 6.76)
|
assert_image_similar_tofile(im, "Tests/images/imagedraw_stroke_descender.png", 6.76)
|
||||||
|
|
||||||
|
|
||||||
|
@skip_unless_feature("freetype2")
|
||||||
|
def test_stroke_inside_gap() -> None:
|
||||||
|
# Arrange
|
||||||
|
im = Image.new("RGB", (120, 130))
|
||||||
|
draw = ImageDraw.Draw(im)
|
||||||
|
font = ImageFont.truetype("Tests/fonts/FreeMono.ttf", 120)
|
||||||
|
|
||||||
|
# Act
|
||||||
|
draw.text((12, 12), "i", "#f00", font, stroke_width=20)
|
||||||
|
|
||||||
|
# Assert
|
||||||
|
for y in range(im.height):
|
||||||
|
glyph = ""
|
||||||
|
for x in range(im.width):
|
||||||
|
if im.getpixel((x, y)) == (0, 0, 0):
|
||||||
|
if glyph == "started":
|
||||||
|
glyph = "ended"
|
||||||
|
else:
|
||||||
|
assert glyph != "ended", "Gap inside stroked glyph"
|
||||||
|
glyph = "started"
|
||||||
|
|
||||||
|
|
||||||
@skip_unless_feature("freetype2")
|
@skip_unless_feature("freetype2")
|
||||||
def test_split_word() -> None:
|
def test_split_word() -> None:
|
||||||
# Arrange
|
# Arrange
|
||||||
|
@ -1504,7 +1523,6 @@ def test_same_color_outline(bbox: Coords) -> None:
|
||||||
x2, y2 = 95, 50
|
x2, y2 = 95, 50
|
||||||
x3, y3 = 95, 5
|
x3, y3 = 95, 5
|
||||||
|
|
||||||
assert ImageDraw.Outline is not None
|
|
||||||
s = ImageDraw.Outline()
|
s = ImageDraw.Outline()
|
||||||
s.move(x0, y0)
|
s.move(x0, y0)
|
||||||
s.curve(x1, y1, x2, y2, x3, y3)
|
s.curve(x1, y1, x2, y2, x3, y3)
|
||||||
|
@ -1674,6 +1692,9 @@ def test_continuous_horizontal_edges_polygon() -> None:
|
||||||
def test_discontiguous_corners_polygon() -> None:
|
def test_discontiguous_corners_polygon() -> None:
|
||||||
img, draw = create_base_image_draw((84, 68))
|
img, draw = create_base_image_draw((84, 68))
|
||||||
draw.polygon(((1, 21), (34, 4), (71, 1), (38, 18)), BLACK)
|
draw.polygon(((1, 21), (34, 4), (71, 1), (38, 18)), BLACK)
|
||||||
|
draw.polygon(
|
||||||
|
((82, 29), (82, 26), (82, 24), (67, 22), (52, 29), (52, 15), (67, 22)), BLACK
|
||||||
|
)
|
||||||
draw.polygon(((71, 44), (38, 27), (1, 24)), BLACK)
|
draw.polygon(((71, 44), (38, 27), (1, 24)), BLACK)
|
||||||
draw.polygon(
|
draw.polygon(
|
||||||
((38, 66), (5, 49), (77, 49), (47, 66), (82, 63), (82, 47), (1, 47), (1, 63)),
|
((38, 66), (5, 49), (77, 49), (47, 66), (82, 63), (82, 47), (1, 47), (1, 63)),
|
||||||
|
|
|
@ -93,6 +93,19 @@ class TestImageFile:
|
||||||
assert p.image is not None
|
assert p.image is not None
|
||||||
assert (48, 48) == p.image.size
|
assert (48, 48) == p.image.size
|
||||||
|
|
||||||
|
@pytest.mark.filterwarnings("ignore:Corrupt EXIF data")
|
||||||
|
def test_incremental_tiff(self) -> None:
|
||||||
|
with ImageFile.Parser() as p:
|
||||||
|
with open("Tests/images/hopper.tif", "rb") as f:
|
||||||
|
p.feed(f.read(1024))
|
||||||
|
|
||||||
|
# Check that insufficient data was given in the first feed
|
||||||
|
assert not p.image
|
||||||
|
|
||||||
|
p.feed(f.read())
|
||||||
|
assert p.image is not None
|
||||||
|
assert (128, 128) == p.image.size
|
||||||
|
|
||||||
@skip_unless_feature("webp")
|
@skip_unless_feature("webp")
|
||||||
def test_incremental_webp(self) -> None:
|
def test_incremental_webp(self) -> None:
|
||||||
with ImageFile.Parser() as p:
|
with ImageFile.Parser() as p:
|
||||||
|
@ -178,13 +191,10 @@ class TestImageFile:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
@skip_unless_feature("zlib")
|
@skip_unless_feature("zlib")
|
||||||
def test_truncated_without_errors(self) -> None:
|
def test_truncated_without_errors(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
with Image.open("Tests/images/truncated_image.png") as im:
|
with Image.open("Tests/images/truncated_image.png") as im:
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
try:
|
im.load()
|
||||||
im.load()
|
|
||||||
finally:
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
@skip_unless_feature("zlib")
|
@skip_unless_feature("zlib")
|
||||||
def test_broken_datastream_with_errors(self) -> None:
|
def test_broken_datastream_with_errors(self) -> None:
|
||||||
|
@ -193,13 +203,12 @@ class TestImageFile:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
@skip_unless_feature("zlib")
|
@skip_unless_feature("zlib")
|
||||||
def test_broken_datastream_without_errors(self) -> None:
|
def test_broken_datastream_without_errors(
|
||||||
|
self, monkeypatch: pytest.MonkeyPatch
|
||||||
|
) -> None:
|
||||||
with Image.open("Tests/images/broken_data_stream.png") as im:
|
with Image.open("Tests/images/broken_data_stream.png") as im:
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
|
||||||
try:
|
im.load()
|
||||||
im.load()
|
|
||||||
finally:
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = False
|
|
||||||
|
|
||||||
|
|
||||||
class MockPyDecoder(ImageFile.PyDecoder):
|
class MockPyDecoder(ImageFile.PyDecoder):
|
||||||
|
|
|
@ -254,7 +254,8 @@ def test_render_multiline_text(font: ImageFont.FreeTypeFont) -> None:
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"align, ext", (("left", ""), ("center", "_center"), ("right", "_right"))
|
"align, ext",
|
||||||
|
(("left", ""), ("center", "_center"), ("right", "_right"), ("justify", "_justify")),
|
||||||
)
|
)
|
||||||
def test_render_multiline_text_align(
|
def test_render_multiline_text_align(
|
||||||
font: ImageFont.FreeTypeFont, align: str, ext: str
|
font: ImageFont.FreeTypeFont, align: str, ext: str
|
||||||
|
@ -461,6 +462,20 @@ def test_free_type_font_get_mask(font: ImageFont.FreeTypeFont) -> None:
|
||||||
assert mask.size == (108, 13)
|
assert mask.size == (108, 13)
|
||||||
|
|
||||||
|
|
||||||
|
def test_stroke_mask() -> None:
|
||||||
|
# Arrange
|
||||||
|
text = "i"
|
||||||
|
|
||||||
|
# Act
|
||||||
|
font = ImageFont.truetype(FONT_PATH, 128)
|
||||||
|
mask = font.getmask(text, stroke_width=2)
|
||||||
|
|
||||||
|
# Assert
|
||||||
|
assert mask.getpixel((34, 5)) == 255
|
||||||
|
assert mask.getpixel((38, 5)) == 0
|
||||||
|
assert mask.getpixel((42, 5)) == 255
|
||||||
|
|
||||||
|
|
||||||
def test_load_when_image_not_found() -> None:
|
def test_load_when_image_not_found() -> None:
|
||||||
with tempfile.NamedTemporaryFile(delete=False) as tmp:
|
with tempfile.NamedTemporaryFile(delete=False) as tmp:
|
||||||
pass
|
pass
|
||||||
|
@ -543,7 +558,7 @@ def test_render_empty(font: ImageFont.FreeTypeFont) -> None:
|
||||||
|
|
||||||
def test_unicode_extended(layout_engine: ImageFont.Layout) -> None:
|
def test_unicode_extended(layout_engine: ImageFont.Layout) -> None:
|
||||||
# issue #3777
|
# issue #3777
|
||||||
text = "A\u278A\U0001F12B"
|
text = "A\u278a\U0001f12b"
|
||||||
target = "Tests/images/unicode_extended.png"
|
target = "Tests/images/unicode_extended.png"
|
||||||
|
|
||||||
ttf = ImageFont.truetype(
|
ttf = ImageFont.truetype(
|
||||||
|
@ -1012,7 +1027,7 @@ def test_sbix(layout_engine: ImageFont.Layout) -> None:
|
||||||
im = Image.new("RGB", (400, 400), "white")
|
im = Image.new("RGB", (400, 400), "white")
|
||||||
d = ImageDraw.Draw(im)
|
d = ImageDraw.Draw(im)
|
||||||
|
|
||||||
d.text((50, 50), "\uE901", font=font, embedded_color=True)
|
d.text((50, 50), "\ue901", font=font, embedded_color=True)
|
||||||
|
|
||||||
assert_image_similar_tofile(im, "Tests/images/chromacheck-sbix.png", 1)
|
assert_image_similar_tofile(im, "Tests/images/chromacheck-sbix.png", 1)
|
||||||
except OSError as e: # pragma: no cover
|
except OSError as e: # pragma: no cover
|
||||||
|
@ -1029,7 +1044,7 @@ def test_sbix_mask(layout_engine: ImageFont.Layout) -> None:
|
||||||
im = Image.new("RGB", (400, 400), "white")
|
im = Image.new("RGB", (400, 400), "white")
|
||||||
d = ImageDraw.Draw(im)
|
d = ImageDraw.Draw(im)
|
||||||
|
|
||||||
d.text((50, 50), "\uE901", (100, 0, 0), font=font)
|
d.text((50, 50), "\ue901", (100, 0, 0), font=font)
|
||||||
|
|
||||||
assert_image_similar_tofile(im, "Tests/images/chromacheck-sbix_mask.png", 1)
|
assert_image_similar_tofile(im, "Tests/images/chromacheck-sbix_mask.png", 1)
|
||||||
except OSError as e: # pragma: no cover
|
except OSError as e: # pragma: no cover
|
||||||
|
|
|
@ -229,7 +229,7 @@ def test_getlength(
|
||||||
@pytest.mark.parametrize("direction", ("ltr", "ttb"))
|
@pytest.mark.parametrize("direction", ("ltr", "ttb"))
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"text",
|
"text",
|
||||||
("i" + ("\u030C" * 15) + "i", "i" + "\u032C" * 15 + "i", "\u035Cii", "i\u0305i"),
|
("i" + ("\u030c" * 15) + "i", "i" + "\u032c" * 15 + "i", "\u035cii", "i\u0305i"),
|
||||||
ids=("caron-above", "caron-below", "double-breve", "overline"),
|
ids=("caron-above", "caron-below", "double-breve", "overline"),
|
||||||
)
|
)
|
||||||
def test_getlength_combine(mode: str, direction: str, text: str) -> None:
|
def test_getlength_combine(mode: str, direction: str, text: str) -> None:
|
||||||
|
@ -272,27 +272,27 @@ def test_anchor_ttb(anchor: str) -> None:
|
||||||
|
|
||||||
combine_tests = (
|
combine_tests = (
|
||||||
# extends above (e.g. issue #4553)
|
# extends above (e.g. issue #4553)
|
||||||
("caron", "a\u030C\u030C\u030C\u030C\u030Cb", None, None, 0.08),
|
("caron", "a\u030c\u030c\u030c\u030c\u030cb", None, None, 0.08),
|
||||||
("caron_la", "a\u030C\u030C\u030C\u030C\u030Cb", "la", None, 0.08),
|
("caron_la", "a\u030c\u030c\u030c\u030c\u030cb", "la", None, 0.08),
|
||||||
("caron_lt", "a\u030C\u030C\u030C\u030C\u030Cb", "lt", None, 0.08),
|
("caron_lt", "a\u030c\u030c\u030c\u030c\u030cb", "lt", None, 0.08),
|
||||||
("caron_ls", "a\u030C\u030C\u030C\u030C\u030Cb", "ls", None, 0.08),
|
("caron_ls", "a\u030c\u030c\u030c\u030c\u030cb", "ls", None, 0.08),
|
||||||
("caron_ttb", "ca" + ("\u030C" * 15) + "b", None, "ttb", 0.3),
|
("caron_ttb", "ca" + ("\u030c" * 15) + "b", None, "ttb", 0.3),
|
||||||
("caron_ttb_lt", "ca" + ("\u030C" * 15) + "b", "lt", "ttb", 0.3),
|
("caron_ttb_lt", "ca" + ("\u030c" * 15) + "b", "lt", "ttb", 0.3),
|
||||||
# extends below
|
# extends below
|
||||||
("caron_below", "a\u032C\u032C\u032C\u032C\u032Cb", None, None, 0.02),
|
("caron_below", "a\u032c\u032c\u032c\u032c\u032cb", None, None, 0.02),
|
||||||
("caron_below_ld", "a\u032C\u032C\u032C\u032C\u032Cb", "ld", None, 0.02),
|
("caron_below_ld", "a\u032c\u032c\u032c\u032c\u032cb", "ld", None, 0.02),
|
||||||
("caron_below_lb", "a\u032C\u032C\u032C\u032C\u032Cb", "lb", None, 0.02),
|
("caron_below_lb", "a\u032c\u032c\u032c\u032c\u032cb", "lb", None, 0.02),
|
||||||
("caron_below_ls", "a\u032C\u032C\u032C\u032C\u032Cb", "ls", None, 0.02),
|
("caron_below_ls", "a\u032c\u032c\u032c\u032c\u032cb", "ls", None, 0.02),
|
||||||
("caron_below_ttb", "a" + ("\u032C" * 15) + "b", None, "ttb", 0.03),
|
("caron_below_ttb", "a" + ("\u032c" * 15) + "b", None, "ttb", 0.03),
|
||||||
("caron_below_ttb_lb", "a" + ("\u032C" * 15) + "b", "lb", "ttb", 0.03),
|
("caron_below_ttb_lb", "a" + ("\u032c" * 15) + "b", "lb", "ttb", 0.03),
|
||||||
# extends to the right (e.g. issue #3745)
|
# extends to the right (e.g. issue #3745)
|
||||||
("double_breve_below", "a\u035Ci", None, None, 0.02),
|
("double_breve_below", "a\u035ci", None, None, 0.02),
|
||||||
("double_breve_below_ma", "a\u035Ci", "ma", None, 0.02),
|
("double_breve_below_ma", "a\u035ci", "ma", None, 0.02),
|
||||||
("double_breve_below_ra", "a\u035Ci", "ra", None, 0.02),
|
("double_breve_below_ra", "a\u035ci", "ra", None, 0.02),
|
||||||
("double_breve_below_ttb", "a\u035Cb", None, "ttb", 0.02),
|
("double_breve_below_ttb", "a\u035cb", None, "ttb", 0.02),
|
||||||
("double_breve_below_ttb_rt", "a\u035Cb", "rt", "ttb", 0.02),
|
("double_breve_below_ttb_rt", "a\u035cb", "rt", "ttb", 0.02),
|
||||||
("double_breve_below_ttb_mt", "a\u035Cb", "mt", "ttb", 0.02),
|
("double_breve_below_ttb_mt", "a\u035cb", "mt", "ttb", 0.02),
|
||||||
("double_breve_below_ttb_st", "a\u035Cb", "st", "ttb", 0.02),
|
("double_breve_below_ttb_st", "a\u035cb", "st", "ttb", 0.02),
|
||||||
# extends to the left (fail=0.064)
|
# extends to the left (fail=0.064)
|
||||||
("overline", "i\u0305", None, None, 0.02),
|
("overline", "i\u0305", None, None, 0.02),
|
||||||
("overline_la", "i\u0305", "la", None, 0.02),
|
("overline_la", "i\u0305", "la", None, 0.02),
|
||||||
|
@ -346,7 +346,7 @@ def test_combine_multiline(anchor: str, align: str) -> None:
|
||||||
|
|
||||||
path = f"Tests/images/test_combine_multiline_{anchor}_{align}.png"
|
path = f"Tests/images/test_combine_multiline_{anchor}_{align}.png"
|
||||||
f = ImageFont.truetype("Tests/fonts/NotoSans-Regular.ttf", 48)
|
f = ImageFont.truetype("Tests/fonts/NotoSans-Regular.ttf", 48)
|
||||||
text = "i\u0305\u035C\ntext" # i with overline and double breve, and a word
|
text = "i\u0305\u035c\ntext" # i with overline and double breve, and a word
|
||||||
|
|
||||||
im = Image.new("RGB", (400, 400), "white")
|
im = Image.new("RGB", (400, 400), "white")
|
||||||
d = ImageDraw.Draw(im)
|
d = ImageDraw.Draw(im)
|
||||||
|
|
|
@ -165,14 +165,10 @@ def test_pad() -> None:
|
||||||
def test_pad_round() -> None:
|
def test_pad_round() -> None:
|
||||||
im = Image.new("1", (1, 1), 1)
|
im = Image.new("1", (1, 1), 1)
|
||||||
new_im = ImageOps.pad(im, (4, 1))
|
new_im = ImageOps.pad(im, (4, 1))
|
||||||
px = new_im.load()
|
assert new_im.getpixel((2, 0)) == 1
|
||||||
assert px is not None
|
|
||||||
assert px[2, 0] == 1
|
|
||||||
|
|
||||||
new_im = ImageOps.pad(im, (1, 4))
|
new_im = ImageOps.pad(im, (1, 4))
|
||||||
px = new_im.load()
|
assert new_im.getpixel((0, 2)) == 1
|
||||||
assert px is not None
|
|
||||||
assert px[0, 2] == 1
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("mode", ("P", "PA"))
|
@pytest.mark.parametrize("mode", ("P", "PA"))
|
||||||
|
@ -405,7 +401,6 @@ def test_exif_transpose() -> None:
|
||||||
else:
|
else:
|
||||||
original_exif = im.info["exif"]
|
original_exif = im.info["exif"]
|
||||||
transposed_im = ImageOps.exif_transpose(im)
|
transposed_im = ImageOps.exif_transpose(im)
|
||||||
assert transposed_im is not None
|
|
||||||
assert_image_similar(base_im, transposed_im, 17)
|
assert_image_similar(base_im, transposed_im, 17)
|
||||||
if orientation_im is base_im:
|
if orientation_im is base_im:
|
||||||
assert "exif" not in im.info
|
assert "exif" not in im.info
|
||||||
|
@ -417,7 +412,6 @@ def test_exif_transpose() -> None:
|
||||||
|
|
||||||
# Repeat the operation to test that it does not keep transposing
|
# Repeat the operation to test that it does not keep transposing
|
||||||
transposed_im2 = ImageOps.exif_transpose(transposed_im)
|
transposed_im2 = ImageOps.exif_transpose(transposed_im)
|
||||||
assert transposed_im2 is not None
|
|
||||||
assert_image_equal(transposed_im2, transposed_im)
|
assert_image_equal(transposed_im2, transposed_im)
|
||||||
|
|
||||||
check(base_im)
|
check(base_im)
|
||||||
|
@ -433,7 +427,6 @@ def test_exif_transpose() -> None:
|
||||||
assert im.getexif()[0x0112] == 3
|
assert im.getexif()[0x0112] == 3
|
||||||
|
|
||||||
transposed_im = ImageOps.exif_transpose(im)
|
transposed_im = ImageOps.exif_transpose(im)
|
||||||
assert transposed_im is not None
|
|
||||||
assert 0x0112 not in transposed_im.getexif()
|
assert 0x0112 not in transposed_im.getexif()
|
||||||
|
|
||||||
transposed_im._reload_exif()
|
transposed_im._reload_exif()
|
||||||
|
@ -446,17 +439,24 @@ def test_exif_transpose() -> None:
|
||||||
assert im.getexif()[0x0112] == 3
|
assert im.getexif()[0x0112] == 3
|
||||||
|
|
||||||
transposed_im = ImageOps.exif_transpose(im)
|
transposed_im = ImageOps.exif_transpose(im)
|
||||||
assert transposed_im is not None
|
|
||||||
assert 0x0112 not in transposed_im.getexif()
|
assert 0x0112 not in transposed_im.getexif()
|
||||||
|
|
||||||
# Orientation set directly on Image.Exif
|
# Orientation set directly on Image.Exif
|
||||||
im = hopper()
|
im = hopper()
|
||||||
im.getexif()[0x0112] = 3
|
im.getexif()[0x0112] = 3
|
||||||
transposed_im = ImageOps.exif_transpose(im)
|
transposed_im = ImageOps.exif_transpose(im)
|
||||||
assert transposed_im is not None
|
|
||||||
assert 0x0112 not in transposed_im.getexif()
|
assert 0x0112 not in transposed_im.getexif()
|
||||||
|
|
||||||
|
|
||||||
|
def test_exif_transpose_with_xmp_tuple() -> None:
|
||||||
|
with Image.open("Tests/images/xmp_tags_orientation.png") as im:
|
||||||
|
assert im.getexif()[0x0112] == 3
|
||||||
|
|
||||||
|
im.info["xmp"] = (b"test",)
|
||||||
|
transposed_im = ImageOps.exif_transpose(im)
|
||||||
|
assert 0x0112 not in transposed_im.getexif()
|
||||||
|
|
||||||
|
|
||||||
def test_exif_transpose_xml_without_xmp() -> None:
|
def test_exif_transpose_xml_without_xmp() -> None:
|
||||||
with Image.open("Tests/images/xmp_tags_orientation.png") as im:
|
with Image.open("Tests/images/xmp_tags_orientation.png") as im:
|
||||||
assert im.getexif()[0x0112] == 3
|
assert im.getexif()[0x0112] == 3
|
||||||
|
@ -464,7 +464,6 @@ def test_exif_transpose_xml_without_xmp() -> None:
|
||||||
|
|
||||||
del im.info["xmp"]
|
del im.info["xmp"]
|
||||||
transposed_im = ImageOps.exif_transpose(im)
|
transposed_im = ImageOps.exif_transpose(im)
|
||||||
assert transposed_im is not None
|
|
||||||
assert 0x0112 not in transposed_im.getexif()
|
assert 0x0112 not in transposed_im.getexif()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ def test_sanity() -> None:
|
||||||
def test_reload() -> None:
|
def test_reload() -> None:
|
||||||
with Image.open("Tests/images/hopper.gif") as im:
|
with Image.open("Tests/images/hopper.gif") as im:
|
||||||
original = im.copy()
|
original = im.copy()
|
||||||
|
assert im.palette is not None
|
||||||
im.palette.dirty = 1
|
im.palette.dirty = 1
|
||||||
assert_image_equal(im.convert("RGB"), original.convert("RGB"))
|
assert_image_equal(im.convert("RGB"), original.convert("RGB"))
|
||||||
|
|
||||||
|
@ -189,7 +190,7 @@ def test_2bit_palette(tmp_path: Path) -> None:
|
||||||
|
|
||||||
rgb = b"\x00" * 2 + b"\x01" * 2 + b"\x02" * 2
|
rgb = b"\x00" * 2 + b"\x01" * 2 + b"\x02" * 2
|
||||||
img = Image.frombytes("P", (6, 1), rgb)
|
img = Image.frombytes("P", (6, 1), rgb)
|
||||||
img.putpalette(b"\xFF\x00\x00\x00\xFF\x00\x00\x00\xFF") # RGB
|
img.putpalette(b"\xff\x00\x00\x00\xff\x00\x00\x00\xff") # RGB
|
||||||
img.save(outfile, format="PNG")
|
img.save(outfile, format="PNG")
|
||||||
|
|
||||||
assert_image_equal_tofile(img, outfile)
|
assert_image_equal_tofile(img, outfile)
|
||||||
|
|
|
@ -79,7 +79,7 @@ def test_path_constructors(
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_invalid_path_constructors(
|
def test_invalid_path_constructors(
|
||||||
coords: tuple[str, str] | Sequence[Sequence[int]]
|
coords: tuple[str, str] | Sequence[Sequence[int]],
|
||||||
) -> None:
|
) -> None:
|
||||||
# Act
|
# Act
|
||||||
with pytest.raises(ValueError) as e:
|
with pytest.raises(ValueError) as e:
|
||||||
|
|
|
@ -52,4 +52,6 @@ def test_image(mode: str) -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
ImageQt.ImageQt("Tests/images/hopper.gif")
|
ImageQt.ImageQt("Tests/images/hopper.gif")
|
||||||
|
|
|
@ -7,36 +7,30 @@ import pytest
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
|
|
||||||
def test_overflow() -> None:
|
def test_overflow(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# There is the potential to overflow comparisons in map.c
|
# There is the potential to overflow comparisons in map.c
|
||||||
# if there are > SIZE_MAX bytes in the image or if
|
# if there are > SIZE_MAX bytes in the image or if
|
||||||
# the file encodes an offset that makes
|
# the file encodes an offset that makes
|
||||||
# (offset + size(bytes)) > SIZE_MAX
|
# (offset + size(bytes)) > SIZE_MAX
|
||||||
|
|
||||||
# Note that this image triggers the decompression bomb warning:
|
# Note that this image triggers the decompression bomb warning:
|
||||||
max_pixels = Image.MAX_IMAGE_PIXELS
|
monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", None)
|
||||||
Image.MAX_IMAGE_PIXELS = None
|
|
||||||
|
|
||||||
# This image hits the offset test.
|
# This image hits the offset test.
|
||||||
with Image.open("Tests/images/l2rgb_read.bmp") as im:
|
with Image.open("Tests/images/l2rgb_read.bmp") as im:
|
||||||
with pytest.raises((ValueError, MemoryError, OSError)):
|
with pytest.raises((ValueError, MemoryError, OSError)):
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
Image.MAX_IMAGE_PIXELS = max_pixels
|
|
||||||
|
|
||||||
|
def test_tobytes(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
def test_tobytes() -> None:
|
|
||||||
# Note that this image triggers the decompression bomb warning:
|
# Note that this image triggers the decompression bomb warning:
|
||||||
max_pixels = Image.MAX_IMAGE_PIXELS
|
monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", None)
|
||||||
Image.MAX_IMAGE_PIXELS = None
|
|
||||||
|
|
||||||
# Previously raised an access violation on Windows
|
# Previously raised an access violation on Windows
|
||||||
with Image.open("Tests/images/l2rgb_read.bmp") as im:
|
with Image.open("Tests/images/l2rgb_read.bmp") as im:
|
||||||
with pytest.raises((ValueError, MemoryError, OSError)):
|
with pytest.raises((ValueError, MemoryError, OSError)):
|
||||||
im.tobytes()
|
im.tobytes()
|
||||||
|
|
||||||
Image.MAX_IMAGE_PIXELS = max_pixels
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.maxsize <= 2**32, reason="Requires 64-bit system")
|
@pytest.mark.skipif(sys.maxsize <= 2**32, reason="Requires 64-bit system")
|
||||||
def test_ysize() -> None:
|
def test_ysize() -> None:
|
||||||
|
|
|
@ -141,9 +141,7 @@ def test_save_tiff_uint16() -> None:
|
||||||
a.shape = TEST_IMAGE_SIZE
|
a.shape = TEST_IMAGE_SIZE
|
||||||
img = Image.fromarray(a)
|
img = Image.fromarray(a)
|
||||||
|
|
||||||
img_px = img.load()
|
assert img.getpixel((0, 0)) == pixel_value
|
||||||
assert img_px is not None
|
|
||||||
assert img_px[0, 0] == pixel_value
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -264,4 +262,6 @@ def test_no_resource_warning_for_numpy_array() -> None:
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
# Act/Assert
|
# Act/Assert
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
array(im)
|
array(im)
|
||||||
|
|
|
@ -20,10 +20,10 @@ from PIL.PdfParser import (
|
||||||
|
|
||||||
|
|
||||||
def test_text_encode_decode() -> None:
|
def test_text_encode_decode() -> None:
|
||||||
assert encode_text("abc") == b"\xFE\xFF\x00a\x00b\x00c"
|
assert encode_text("abc") == b"\xfe\xff\x00a\x00b\x00c"
|
||||||
assert decode_text(b"\xFE\xFF\x00a\x00b\x00c") == "abc"
|
assert decode_text(b"\xfe\xff\x00a\x00b\x00c") == "abc"
|
||||||
assert decode_text(b"abc") == "abc"
|
assert decode_text(b"abc") == "abc"
|
||||||
assert decode_text(b"\x1B a \x1C") == "\u02D9 a \u02DD"
|
assert decode_text(b"\x1b a \x1c") == "\u02d9 a \u02dd"
|
||||||
|
|
||||||
|
|
||||||
def test_indirect_refs() -> None:
|
def test_indirect_refs() -> None:
|
||||||
|
@ -45,8 +45,8 @@ def test_parsing() -> None:
|
||||||
assert PdfParser.get_value(b"false%", 0) == (False, 5)
|
assert PdfParser.get_value(b"false%", 0) == (False, 5)
|
||||||
assert PdfParser.get_value(b"null<", 0) == (None, 4)
|
assert PdfParser.get_value(b"null<", 0) == (None, 4)
|
||||||
assert PdfParser.get_value(b"%cmt\n %cmt\n 123\n", 0) == (123, 15)
|
assert PdfParser.get_value(b"%cmt\n %cmt\n 123\n", 0) == (123, 15)
|
||||||
assert PdfParser.get_value(b"<901FA3>", 0) == (b"\x90\x1F\xA3", 8)
|
assert PdfParser.get_value(b"<901FA3>", 0) == (b"\x90\x1f\xa3", 8)
|
||||||
assert PdfParser.get_value(b"asd < 9 0 1 f A > qwe", 3) == (b"\x90\x1F\xA0", 17)
|
assert PdfParser.get_value(b"asd < 9 0 1 f A > qwe", 3) == (b"\x90\x1f\xa0", 17)
|
||||||
assert PdfParser.get_value(b"(asd)", 0) == (b"asd", 5)
|
assert PdfParser.get_value(b"(asd)", 0) == (b"asd", 5)
|
||||||
assert PdfParser.get_value(b"(asd(qwe)zxc)zzz(aaa)", 0) == (b"asd(qwe)zxc", 13)
|
assert PdfParser.get_value(b"(asd(qwe)zxc)zzz(aaa)", 0) == (b"asd(qwe)zxc", 13)
|
||||||
assert PdfParser.get_value(b"(Two \\\nwords.)", 0) == (b"Two words.", 14)
|
assert PdfParser.get_value(b"(Two \\\nwords.)", 0) == (b"Two words.", 14)
|
||||||
|
@ -56,9 +56,9 @@ def test_parsing() -> None:
|
||||||
assert PdfParser.get_value(b"(One\\(paren).", 0) == (b"One(paren", 12)
|
assert PdfParser.get_value(b"(One\\(paren).", 0) == (b"One(paren", 12)
|
||||||
assert PdfParser.get_value(b"(One\\)paren).", 0) == (b"One)paren", 12)
|
assert PdfParser.get_value(b"(One\\)paren).", 0) == (b"One)paren", 12)
|
||||||
assert PdfParser.get_value(b"(\\0053)", 0) == (b"\x053", 7)
|
assert PdfParser.get_value(b"(\\0053)", 0) == (b"\x053", 7)
|
||||||
assert PdfParser.get_value(b"(\\053)", 0) == (b"\x2B", 6)
|
assert PdfParser.get_value(b"(\\053)", 0) == (b"\x2b", 6)
|
||||||
assert PdfParser.get_value(b"(\\53)", 0) == (b"\x2B", 5)
|
assert PdfParser.get_value(b"(\\53)", 0) == (b"\x2b", 5)
|
||||||
assert PdfParser.get_value(b"(\\53a)", 0) == (b"\x2Ba", 6)
|
assert PdfParser.get_value(b"(\\53a)", 0) == (b"\x2ba", 6)
|
||||||
assert PdfParser.get_value(b"(\\1111)", 0) == (b"\x491", 7)
|
assert PdfParser.get_value(b"(\\1111)", 0) == (b"\x491", 7)
|
||||||
assert PdfParser.get_value(b" 123 (", 0) == (123, 4)
|
assert PdfParser.get_value(b" 123 (", 0) == (123, 4)
|
||||||
assert round(abs(PdfParser.get_value(b" 123.4 %", 0)[0] - 123.4), 7) == 0
|
assert round(abs(PdfParser.get_value(b" 123.4 %", 0)[0] - 123.4), 7) == 0
|
||||||
|
@ -118,7 +118,7 @@ def test_pdf_repr() -> None:
|
||||||
assert pdf_repr(None) == b"null"
|
assert pdf_repr(None) == b"null"
|
||||||
assert pdf_repr(b"a)/b\\(c") == rb"(a\)/b\\\(c)"
|
assert pdf_repr(b"a)/b\\(c") == rb"(a\)/b\\\(c)"
|
||||||
assert pdf_repr([123, True, {"a": PdfName(b"b")}]) == b"[ 123 true <<\n/a /b\n>> ]"
|
assert pdf_repr([123, True, {"a": PdfName(b"b")}]) == b"[ 123 true <<\n/a /b\n>> ]"
|
||||||
assert pdf_repr(PdfBinary(b"\x90\x1F\xA0")) == b"<901FA0>"
|
assert pdf_repr(PdfBinary(b"\x90\x1f\xa0")) == b"<901FA0>"
|
||||||
|
|
||||||
|
|
||||||
def test_duplicate_xref_entry() -> None:
|
def test_duplicate_xref_entry() -> None:
|
||||||
|
|
|
@ -74,6 +74,17 @@ def test_pickle_image(
|
||||||
helper_pickle_file(tmp_path, protocol, test_file, test_mode)
|
helper_pickle_file(tmp_path, protocol, test_file, test_mode)
|
||||||
|
|
||||||
|
|
||||||
|
def test_pickle_jpeg() -> None:
|
||||||
|
# Arrange
|
||||||
|
with Image.open("Tests/images/hopper.jpg") as image:
|
||||||
|
# Act: roundtrip
|
||||||
|
unpickled_image = pickle.loads(pickle.dumps(image))
|
||||||
|
|
||||||
|
# Assert
|
||||||
|
assert len(unpickled_image.layer) == 3
|
||||||
|
assert unpickled_image.layers == 3
|
||||||
|
|
||||||
|
|
||||||
def test_pickle_la_mode_with_palette(tmp_path: Path) -> None:
|
def test_pickle_la_mode_with_palette(tmp_path: Path) -> None:
|
||||||
# Arrange
|
# Arrange
|
||||||
filename = str(tmp_path / "temp.pkl")
|
filename = str(tmp_path / "temp.pkl")
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user