mirror of
https://github.com/python-pillow/Pillow.git
synced 2025-03-13 01:05:48 +03:00
Merge branch 'main' into progress
This commit is contained in:
commit
62786fd2c1
|
@ -21,7 +21,7 @@ set -e
|
||||||
|
|
||||||
if [[ $(uname) != CYGWIN* ]]; then
|
if [[ $(uname) != CYGWIN* ]]; then
|
||||||
sudo apt-get -qq install libfreetype6-dev liblcms2-dev python3-tk\
|
sudo apt-get -qq install libfreetype6-dev liblcms2-dev python3-tk\
|
||||||
ghostscript libjpeg-turbo-progs libopenjp2-7-dev\
|
ghostscript libjpeg-turbo8-dev libopenjp2-7-dev\
|
||||||
cmake meson imagemagick libharfbuzz-dev libfribidi-dev\
|
cmake meson imagemagick libharfbuzz-dev libfribidi-dev\
|
||||||
sway wl-clipboard libopenblas-dev
|
sway wl-clipboard libopenblas-dev
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
cibuildwheel==2.21.3
|
cibuildwheel==2.22.0
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
mypy==1.11.2
|
mypy==1.14.0
|
||||||
IceSpringPySideStubs-PyQt6
|
IceSpringPySideStubs-PyQt6
|
||||||
IceSpringPySideStubs-PySide6
|
IceSpringPySideStubs-PySide6
|
||||||
ipython
|
ipython
|
||||||
|
|
1
.github/CONTRIBUTING.md
vendored
1
.github/CONTRIBUTING.md
vendored
|
@ -19,7 +19,6 @@ Please send a pull request to the `main` branch. Please include [documentation](
|
||||||
- Follow PEP 8.
|
- Follow PEP 8.
|
||||||
- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running tests on AppVeyor.
|
- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running tests on AppVeyor.
|
||||||
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
|
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
|
||||||
- Do not add to the [changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) for proposed changes, as that is updated after changes are merged.
|
|
||||||
|
|
||||||
## Reporting Issues
|
## Reporting Issues
|
||||||
|
|
||||||
|
|
11
.github/release-drafter.yml
vendored
11
.github/release-drafter.yml
vendored
|
@ -3,18 +3,19 @@ tag-template: "$NEXT_MINOR_VERSION"
|
||||||
change-template: '- $TITLE #$NUMBER [@$AUTHOR]'
|
change-template: '- $TITLE #$NUMBER [@$AUTHOR]'
|
||||||
|
|
||||||
categories:
|
categories:
|
||||||
- title: "Dependencies"
|
- title: "Removals"
|
||||||
label: "Dependency"
|
label: "Removal"
|
||||||
- title: "Deprecations"
|
- title: "Deprecations"
|
||||||
label: "Deprecation"
|
label: "Deprecation"
|
||||||
- title: "Documentation"
|
- title: "Documentation"
|
||||||
label: "Documentation"
|
label: "Documentation"
|
||||||
- title: "Removals"
|
- title: "Dependencies"
|
||||||
label: "Removal"
|
label: "Dependency"
|
||||||
- title: "Testing"
|
- title: "Testing"
|
||||||
label: "Testing"
|
label: "Testing"
|
||||||
- title: "Type hints"
|
- title: "Type hints"
|
||||||
label: "Type hints"
|
label: "Type hints"
|
||||||
|
- title: "Other changes"
|
||||||
|
|
||||||
exclude-labels:
|
exclude-labels:
|
||||||
- "changelog: skip"
|
- "changelog: skip"
|
||||||
|
@ -23,6 +24,4 @@ template: |
|
||||||
|
|
||||||
https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html
|
https://pillow.readthedocs.io/en/stable/releasenotes/$NEXT_MINOR_VERSION.html
|
||||||
|
|
||||||
## Changes
|
|
||||||
|
|
||||||
$CHANGES
|
$CHANGES
|
||||||
|
|
12
.github/renovate.json
vendored
12
.github/renovate.json
vendored
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
"extends": [
|
"extends": [
|
||||||
"config:base"
|
"config:recommended"
|
||||||
],
|
],
|
||||||
"labels": [
|
"labels": [
|
||||||
"Dependency"
|
"Dependency"
|
||||||
|
@ -9,9 +9,13 @@
|
||||||
"packageRules": [
|
"packageRules": [
|
||||||
{
|
{
|
||||||
"groupName": "github-actions",
|
"groupName": "github-actions",
|
||||||
"matchManagers": ["github-actions"],
|
"matchManagers": [
|
||||||
"separateMajorMinor": "false"
|
"github-actions"
|
||||||
|
],
|
||||||
|
"separateMajorMinor": false
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"schedule": ["on the 3rd day of the month"]
|
"schedule": [
|
||||||
|
"on the 3rd day of the month"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
|
@ -33,6 +33,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
|
|
2
.github/workflows/lint.yml
vendored
2
.github/workflows/lint.yml
vendored
|
@ -21,6 +21,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: pre-commit cache
|
- name: pre-commit cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
|
|
2
.github/workflows/macos-install.sh
vendored
2
.github/workflows/macos-install.sh
vendored
|
@ -8,8 +8,8 @@ fi
|
||||||
brew install \
|
brew install \
|
||||||
freetype \
|
freetype \
|
||||||
ghostscript \
|
ghostscript \
|
||||||
|
jpeg-turbo \
|
||||||
libimagequant \
|
libimagequant \
|
||||||
libjpeg \
|
|
||||||
libtiff \
|
libtiff \
|
||||||
little-cms2 \
|
little-cms2 \
|
||||||
openjpeg \
|
openjpeg \
|
||||||
|
|
4
.github/workflows/stale.yml
vendored
4
.github/workflows/stale.yml
vendored
|
@ -6,7 +6,7 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
issues: write
|
contents: read
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
@ -15,6 +15,8 @@ concurrency:
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
if: github.repository_owner == 'python-pillow'
|
if: github.repository_owner == 'python-pillow'
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
|
7
.github/workflows/test-cygwin.yml
vendored
7
.github/workflows/test-cygwin.yml
vendored
|
@ -48,6 +48,8 @@ jobs:
|
||||||
|
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Install Cygwin
|
- name: Install Cygwin
|
||||||
uses: cygwin/cygwin-install-action@v4
|
uses: cygwin/cygwin-install-action@v4
|
||||||
|
@ -131,11 +133,12 @@ jobs:
|
||||||
- name: After success
|
- name: After success
|
||||||
run: |
|
run: |
|
||||||
bash.exe .ci/after_success.sh
|
bash.exe .ci/after_success.sh
|
||||||
|
rm C:\cygwin\bin\bash.EXE
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
file: ./coverage.xml
|
files: ./coverage.xml
|
||||||
flags: GHA_Cygwin
|
flags: GHA_Cygwin
|
||||||
name: Cygwin Python 3.${{ matrix.python-minor-version }}
|
name: Cygwin Python 3.${{ matrix.python-minor-version }}
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
7
.github/workflows/test-docker.yml
vendored
7
.github/workflows/test-docker.yml
vendored
|
@ -46,8 +46,8 @@ jobs:
|
||||||
centos-stream-9-amd64,
|
centos-stream-9-amd64,
|
||||||
debian-12-bookworm-x86,
|
debian-12-bookworm-x86,
|
||||||
debian-12-bookworm-amd64,
|
debian-12-bookworm-amd64,
|
||||||
fedora-39-amd64,
|
|
||||||
fedora-40-amd64,
|
fedora-40-amd64,
|
||||||
|
fedora-41-amd64,
|
||||||
gentoo,
|
gentoo,
|
||||||
ubuntu-22.04-jammy-amd64,
|
ubuntu-22.04-jammy-amd64,
|
||||||
ubuntu-24.04-noble-amd64,
|
ubuntu-24.04-noble-amd64,
|
||||||
|
@ -65,6 +65,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Build system information
|
- name: Build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
@ -98,11 +100,10 @@ jobs:
|
||||||
MATRIX_DOCKER: ${{ matrix.docker }}
|
MATRIX_DOCKER: ${{ matrix.docker }}
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: GHA_Docker
|
flags: GHA_Docker
|
||||||
name: ${{ matrix.docker }}
|
name: ${{ matrix.docker }}
|
||||||
gcov: true
|
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
||||||
success:
|
success:
|
||||||
|
|
16
.github/workflows/test-mingw.yml
vendored
16
.github/workflows/test-mingw.yml
vendored
|
@ -46,6 +46,8 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up shell
|
- name: Set up shell
|
||||||
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
|
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
|
||||||
|
@ -66,16 +68,16 @@ jobs:
|
||||||
mingw-w64-x86_64-openjpeg2 \
|
mingw-w64-x86_64-openjpeg2 \
|
||||||
mingw-w64-x86_64-python3-numpy \
|
mingw-w64-x86_64-python3-numpy \
|
||||||
mingw-w64-x86_64-python3-olefile \
|
mingw-w64-x86_64-python3-olefile \
|
||||||
mingw-w64-x86_64-python3-setuptools \
|
mingw-w64-x86_64-python3-pip \
|
||||||
|
mingw-w64-x86_64-python-pytest \
|
||||||
|
mingw-w64-x86_64-python-pytest-cov \
|
||||||
|
mingw-w64-x86_64-python-pytest-timeout \
|
||||||
mingw-w64-x86_64-python-pyqt6
|
mingw-w64-x86_64-python-pyqt6
|
||||||
|
|
||||||
python3 -m ensurepip
|
|
||||||
python3 -m pip install pyroma pytest pytest-cov pytest-timeout
|
|
||||||
|
|
||||||
pushd depends && ./install_extra_test_images.sh && popd
|
pushd depends && ./install_extra_test_images.sh && popd
|
||||||
|
|
||||||
- name: Build Pillow
|
- name: Build Pillow
|
||||||
run: SETUPTOOLS_USE_DISTUTILS="stdlib" CFLAGS="-coverage" python3 -m pip install .
|
run: CFLAGS="-coverage" python3 -m pip install .
|
||||||
|
|
||||||
- name: Test Pillow
|
- name: Test Pillow
|
||||||
run: |
|
run: |
|
||||||
|
@ -83,9 +85,9 @@ jobs:
|
||||||
.ci/test.sh
|
.ci/test.sh
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
file: ./coverage.xml
|
files: ./coverage.xml
|
||||||
flags: GHA_Windows
|
flags: GHA_Windows
|
||||||
name: "MSYS2 MinGW"
|
name: "MSYS2 MinGW"
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
2
.github/workflows/test-valgrind.yml
vendored
2
.github/workflows/test-valgrind.yml
vendored
|
@ -40,6 +40,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Build system information
|
- name: Build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
|
28
.github/workflows/test-windows.yml
vendored
28
.github/workflows/test-windows.yml
vendored
|
@ -44,16 +44,20 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Checkout cached dependencies
|
- name: Checkout cached dependencies
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
repository: python-pillow/pillow-depends
|
repository: python-pillow/pillow-depends
|
||||||
path: winbuild\depends
|
path: winbuild\depends
|
||||||
|
|
||||||
- name: Checkout extra test images
|
- name: Checkout extra test images
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
repository: python-pillow/test-images
|
repository: python-pillow/test-images
|
||||||
path: Tests\test-images
|
path: Tests\test-images
|
||||||
|
|
||||||
|
@ -69,16 +73,14 @@ jobs:
|
||||||
- name: Print build system information
|
- name: Print build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
|
||||||
- name: Install Python dependencies
|
- name: Upgrade pip
|
||||||
run: >
|
run: |
|
||||||
python3 -m pip install
|
python3 -m pip install --upgrade pip
|
||||||
coverage>=7.4.2
|
|
||||||
defusedxml
|
- name: Install CPython dependencies
|
||||||
olefile
|
if: "!contains(matrix.python-version, 'pypy')"
|
||||||
pyroma
|
run: |
|
||||||
pytest
|
python3 -m pip install PyQt6
|
||||||
pytest-cov
|
|
||||||
pytest-timeout
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
id: install
|
id: install
|
||||||
|
@ -178,7 +180,7 @@ jobs:
|
||||||
- name: Build Pillow
|
- name: Build Pillow
|
||||||
run: |
|
run: |
|
||||||
$FLAGS="-C raqm=vendor -C fribidi=vendor"
|
$FLAGS="-C raqm=vendor -C fribidi=vendor"
|
||||||
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS ."
|
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS .[tests]"
|
||||||
& $env:pythonLocation\python.exe selftest.py --installed
|
& $env:pythonLocation\python.exe selftest.py --installed
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
|
|
||||||
|
@ -213,9 +215,9 @@ jobs:
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
file: ./coverage.xml
|
files: ./coverage.xml
|
||||||
flags: GHA_Windows
|
flags: GHA_Windows
|
||||||
name: ${{ runner.os }} Python ${{ matrix.python-version }}
|
name: ${{ runner.os }} Python ${{ matrix.python-version }}
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
22
.github/workflows/test.yml
vendored
22
.github/workflows/test.yml
vendored
|
@ -42,6 +42,7 @@ jobs:
|
||||||
]
|
]
|
||||||
python-version: [
|
python-version: [
|
||||||
"pypy3.10",
|
"pypy3.10",
|
||||||
|
"3.13t",
|
||||||
"3.13",
|
"3.13",
|
||||||
"3.12",
|
"3.12",
|
||||||
"3.11",
|
"3.11",
|
||||||
|
@ -52,21 +53,22 @@ jobs:
|
||||||
- { python-version: "3.11", PYTHONOPTIMIZE: 1, REVERSE: "--reverse" }
|
- { python-version: "3.11", PYTHONOPTIMIZE: 1, REVERSE: "--reverse" }
|
||||||
- { python-version: "3.10", PYTHONOPTIMIZE: 2 }
|
- { python-version: "3.10", PYTHONOPTIMIZE: 2 }
|
||||||
# Free-threaded
|
# Free-threaded
|
||||||
- { os: "ubuntu-latest", python-version: "3.13-dev", disable-gil: true }
|
- { python-version: "3.13t", disable-gil: true }
|
||||||
# M1 only available for 3.10+
|
# M1 only available for 3.10+
|
||||||
- { os: "macos-13", python-version: "3.9" }
|
- { os: "macos-13", python-version: "3.9" }
|
||||||
exclude:
|
exclude:
|
||||||
- { os: "macos-latest", python-version: "3.9" }
|
- { os: "macos-latest", python-version: "3.9" }
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
name: ${{ matrix.os }} Python ${{ matrix.python-version }} ${{ matrix.disable-gil && 'free-threaded' || '' }}
|
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: Quansight-Labs/setup-python@v5
|
||||||
if: "${{ !matrix.disable-gil }}"
|
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
allow-prereleases: true
|
allow-prereleases: true
|
||||||
|
@ -75,13 +77,6 @@ jobs:
|
||||||
".ci/*.sh"
|
".ci/*.sh"
|
||||||
"pyproject.toml"
|
"pyproject.toml"
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }} (free-threaded)
|
|
||||||
uses: deadsnakes/action@v3.2.0
|
|
||||||
if: "${{ matrix.disable-gil }}"
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
nogil: ${{ matrix.disable-gil }}
|
|
||||||
|
|
||||||
- name: Set PYTHON_GIL
|
- name: Set PYTHON_GIL
|
||||||
if: "${{ matrix.disable-gil }}"
|
if: "${{ matrix.disable-gil }}"
|
||||||
run: |
|
run: |
|
||||||
|
@ -114,7 +109,7 @@ jobs:
|
||||||
GHA_PYTHON_VERSION: ${{ matrix.python-version }}
|
GHA_PYTHON_VERSION: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Register gcc problem matcher
|
- name: Register gcc problem matcher
|
||||||
if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.12'"
|
if: "matrix.os == 'ubuntu-latest' && matrix.python-version == '3.13'"
|
||||||
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
|
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
|
@ -154,11 +149,10 @@ jobs:
|
||||||
.ci/after_success.sh
|
.ci/after_success.sh
|
||||||
|
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
|
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
|
||||||
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
||||||
gcov: true
|
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
||||||
success:
|
success:
|
||||||
|
|
174
.github/workflows/wheels-dependencies.sh
vendored
174
.github/workflows/wheels-dependencies.sh
vendored
|
@ -1,11 +1,33 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# Define custom utilities
|
|
||||||
# Test for macOS with [ -n "$IS_MACOS" ]
|
# Setup that needs to be done before multibuild utils are invoked
|
||||||
if [ -z "$IS_MACOS" ]; then
|
PROJECTDIR=$(pwd)
|
||||||
export MB_ML_LIBC=${AUDITWHEEL_POLICY::9}
|
if [[ "$(uname -s)" == "Darwin" ]]; then
|
||||||
export MB_ML_VER=${AUDITWHEEL_POLICY:9}
|
# Safety check - macOS builds require that CIBW_ARCHS is set, and that it
|
||||||
|
# only contains a single value (even though cibuildwheel allows multiple
|
||||||
|
# values in CIBW_ARCHS).
|
||||||
|
if [[ -z "$CIBW_ARCHS" ]]; then
|
||||||
|
echo "ERROR: Pillow macOS builds require CIBW_ARCHS be defined."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$CIBW_ARCHS" == *" "* ]]; then
|
||||||
|
echo "ERROR: Pillow macOS builds only support a single architecture in CIBW_ARCHS."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Build macOS dependencies in `build/darwin`
|
||||||
|
# Install them into `build/deps/darwin`
|
||||||
|
WORKDIR=$(pwd)/build/darwin
|
||||||
|
BUILD_PREFIX=$(pwd)/build/deps/darwin
|
||||||
|
else
|
||||||
|
# Build prefix will default to /usr/local
|
||||||
|
WORKDIR=$(pwd)/build
|
||||||
|
MB_ML_LIBC=${AUDITWHEEL_POLICY::9}
|
||||||
|
MB_ML_VER=${AUDITWHEEL_POLICY:9}
|
||||||
fi
|
fi
|
||||||
export PLAT=$CIBW_ARCHS
|
PLAT=$CIBW_ARCHS
|
||||||
|
|
||||||
|
# Define custom utilities
|
||||||
source wheels/multibuild/common_utils.sh
|
source wheels/multibuild/common_utils.sh
|
||||||
source wheels/multibuild/library_builders.sh
|
source wheels/multibuild/library_builders.sh
|
||||||
if [ -z "$IS_MACOS" ]; then
|
if [ -z "$IS_MACOS" ]; then
|
||||||
|
@ -16,10 +38,10 @@ ARCHIVE_SDIR=pillow-depends-main
|
||||||
|
|
||||||
# Package versions for fresh source builds
|
# Package versions for fresh source builds
|
||||||
FREETYPE_VERSION=2.13.2
|
FREETYPE_VERSION=2.13.2
|
||||||
HARFBUZZ_VERSION=10.0.1
|
HARFBUZZ_VERSION=10.1.0
|
||||||
LIBPNG_VERSION=1.6.44
|
LIBPNG_VERSION=1.6.44
|
||||||
JPEGTURBO_VERSION=3.0.4
|
JPEGTURBO_VERSION=3.1.0
|
||||||
OPENJPEG_VERSION=2.5.2
|
OPENJPEG_VERSION=2.5.3
|
||||||
XZ_VERSION=5.6.3
|
XZ_VERSION=5.6.3
|
||||||
TIFF_VERSION=4.6.0
|
TIFF_VERSION=4.6.0
|
||||||
LCMS2_VERSION=2.16
|
LCMS2_VERSION=2.16
|
||||||
|
@ -28,82 +50,90 @@ if [[ -n "$IS_MACOS" ]]; then
|
||||||
else
|
else
|
||||||
GIFLIB_VERSION=5.2.1
|
GIFLIB_VERSION=5.2.1
|
||||||
fi
|
fi
|
||||||
if [[ -n "$IS_MACOS" ]] || [[ "$MB_ML_VER" != 2014 ]]; then
|
ZLIB_NG_VERSION=2.2.2
|
||||||
ZLIB_VERSION=1.3.1
|
LIBWEBP_VERSION=1.5.0
|
||||||
else
|
|
||||||
ZLIB_VERSION=1.2.8
|
|
||||||
fi
|
|
||||||
LIBWEBP_VERSION=1.4.0
|
|
||||||
BZIP2_VERSION=1.0.8
|
BZIP2_VERSION=1.0.8
|
||||||
LIBXCB_VERSION=1.17.0
|
LIBXCB_VERSION=1.17.0
|
||||||
BROTLI_VERSION=1.1.0
|
BROTLI_VERSION=1.1.0
|
||||||
|
|
||||||
if [[ -n "$IS_MACOS" ]] && [[ "$CIBW_ARCHS" == "x86_64" ]]; then
|
function build_pkg_config {
|
||||||
function build_openjpeg {
|
if [ -e pkg-config-stamp ]; then return; fi
|
||||||
local out_dir=$(fetch_unpack https://github.com/uclouvain/openjpeg/archive/v$OPENJPEG_VERSION.tar.gz openjpeg-$OPENJPEG_VERSION.tar.gz)
|
# This essentially duplicates the Homebrew recipe
|
||||||
(cd $out_dir \
|
ORIGINAL_CFLAGS=$CFLAGS
|
||||||
&& cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \
|
CFLAGS="$CFLAGS -Wno-int-conversion"
|
||||||
&& make install)
|
build_simple pkg-config 0.29.2 https://pkg-config.freedesktop.org/releases tar.gz \
|
||||||
touch openjpeg-stamp
|
--disable-debug --disable-host-tool --with-internal-glib \
|
||||||
}
|
--with-pc-path=$BUILD_PREFIX/share/pkgconfig:$BUILD_PREFIX/lib/pkgconfig \
|
||||||
fi
|
--with-system-include-path=$(xcrun --show-sdk-path --sdk macosx)/usr/include
|
||||||
|
CFLAGS=$ORIGINAL_CFLAGS
|
||||||
|
export PKG_CONFIG=$BUILD_PREFIX/bin/pkg-config
|
||||||
|
touch pkg-config-stamp
|
||||||
|
}
|
||||||
|
|
||||||
|
function build_zlib_ng {
|
||||||
|
if [ -e zlib-stamp ]; then return; fi
|
||||||
|
fetch_unpack https://github.com/zlib-ng/zlib-ng/archive/$ZLIB_NG_VERSION.tar.gz zlib-ng-$ZLIB_NG_VERSION.tar.gz
|
||||||
|
(cd zlib-ng-$ZLIB_NG_VERSION \
|
||||||
|
&& ./configure --prefix=$BUILD_PREFIX --zlib-compat \
|
||||||
|
&& make -j4 \
|
||||||
|
&& make install)
|
||||||
|
touch zlib-stamp
|
||||||
|
}
|
||||||
|
|
||||||
function build_brotli {
|
function build_brotli {
|
||||||
local cmake=$(get_modern_cmake)
|
if [ -e brotli-stamp ]; then return; fi
|
||||||
local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-$BROTLI_VERSION.tar.gz)
|
local out_dir=$(fetch_unpack https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz brotli-$BROTLI_VERSION.tar.gz)
|
||||||
(cd $out_dir \
|
(cd $out_dir \
|
||||||
&& $cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \
|
&& cmake -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DCMAKE_INSTALL_LIBDIR=$BUILD_PREFIX/lib -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib . \
|
||||||
&& make install)
|
&& make install)
|
||||||
if [[ "$MB_ML_LIBC" == "manylinux" ]]; then
|
touch brotli-stamp
|
||||||
cp /usr/local/lib64/libbrotli* /usr/local/lib
|
|
||||||
cp /usr/local/lib64/pkgconfig/libbrotli* /usr/local/lib/pkgconfig
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function build_harfbuzz {
|
function build_harfbuzz {
|
||||||
|
if [ -e harfbuzz-stamp ]; then return; fi
|
||||||
python3 -m pip install meson ninja
|
python3 -m pip install meson ninja
|
||||||
|
|
||||||
local out_dir=$(fetch_unpack https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION/$HARFBUZZ_VERSION.tar.xz harfbuzz-$HARFBUZZ_VERSION.tar.xz)
|
local out_dir=$(fetch_unpack https://github.com/harfbuzz/harfbuzz/releases/download/$HARFBUZZ_VERSION/harfbuzz-$HARFBUZZ_VERSION.tar.xz harfbuzz-$HARFBUZZ_VERSION.tar.xz)
|
||||||
(cd $out_dir \
|
(cd $out_dir \
|
||||||
&& meson setup build --buildtype=release -Dfreetype=enabled -Dglib=disabled)
|
&& meson setup build --prefix=$BUILD_PREFIX --libdir=$BUILD_PREFIX/lib --buildtype=release -Dfreetype=enabled -Dglib=disabled)
|
||||||
(cd $out_dir/build \
|
(cd $out_dir/build \
|
||||||
&& meson install)
|
&& meson install)
|
||||||
if [[ "$MB_ML_LIBC" == "manylinux" ]]; then
|
touch harfbuzz-stamp
|
||||||
cp /usr/local/lib64/libharfbuzz* /usr/local/lib
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function build {
|
function build {
|
||||||
if [[ -n "$IS_MACOS" ]] && [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
|
||||||
sudo chown -R runner /usr/local
|
|
||||||
fi
|
|
||||||
build_xz
|
build_xz
|
||||||
if [ -z "$IS_ALPINE" ] && [ -z "$IS_MACOS" ]; then
|
if [ -z "$IS_ALPINE" ] && [ -z "$IS_MACOS" ]; then
|
||||||
yum remove -y zlib-devel
|
yum remove -y zlib-devel
|
||||||
fi
|
fi
|
||||||
build_new_zlib
|
build_zlib_ng
|
||||||
|
|
||||||
build_simple xcb-proto 1.17.0 https://xorg.freedesktop.org/archive/individual/proto
|
build_simple xcb-proto 1.17.0 https://xorg.freedesktop.org/archive/individual/proto
|
||||||
if [ -n "$IS_MACOS" ]; then
|
if [ -n "$IS_MACOS" ]; then
|
||||||
build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto
|
build_simple xorgproto 2024.1 https://www.x.org/pub/individual/proto
|
||||||
build_simple libXau 1.0.11 https://www.x.org/pub/individual/lib
|
build_simple libXau 1.0.12 https://www.x.org/pub/individual/lib
|
||||||
build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist
|
build_simple libpthread-stubs 0.5 https://xcb.freedesktop.org/dist
|
||||||
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
|
||||||
cp /usr/local/share/pkgconfig/xcb-proto.pc /usr/local/lib/pkgconfig
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
sed s/\${pc_sysrootdir\}// /usr/local/share/pkgconfig/xcb-proto.pc > /usr/local/lib/pkgconfig/xcb-proto.pc
|
sed s/\${pc_sysrootdir\}// $BUILD_PREFIX/share/pkgconfig/xcb-proto.pc > $BUILD_PREFIX/lib/pkgconfig/xcb-proto.pc
|
||||||
fi
|
fi
|
||||||
build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib
|
build_simple libxcb $LIBXCB_VERSION https://www.x.org/releases/individual/lib
|
||||||
|
|
||||||
build_libjpeg_turbo
|
build_libjpeg_turbo
|
||||||
build_tiff
|
if [ -n "$IS_MACOS" ]; then
|
||||||
|
# Custom tiff build to include jpeg; by default, configure won't include
|
||||||
|
# headers/libs in the custom macOS prefix. Explicitly disable webp,
|
||||||
|
# libdeflate and zstd, because on x86_64 macs, it will pick up the
|
||||||
|
# Homebrew versions of those libraries from /usr/local.
|
||||||
|
build_simple tiff $TIFF_VERSION https://download.osgeo.org/libtiff tar.gz \
|
||||||
|
--with-jpeg-include-dir=$BUILD_PREFIX/include --with-jpeg-lib-dir=$BUILD_PREFIX/lib \
|
||||||
|
--disable-webp --disable-libdeflate --disable-zstd
|
||||||
|
else
|
||||||
|
build_tiff
|
||||||
|
fi
|
||||||
|
|
||||||
build_libpng
|
build_libpng
|
||||||
build_lcms2
|
build_lcms2
|
||||||
build_openjpeg
|
build_openjpeg
|
||||||
if [ -f /usr/local/lib64/libopenjp2.so ]; then
|
|
||||||
cp /usr/local/lib64/libopenjp2.so /usr/local/lib
|
|
||||||
fi
|
|
||||||
|
|
||||||
ORIGINAL_CFLAGS=$CFLAGS
|
ORIGINAL_CFLAGS=$CFLAGS
|
||||||
CFLAGS="$CFLAGS -O3 -DNDEBUG"
|
CFLAGS="$CFLAGS -O3 -DNDEBUG"
|
||||||
|
@ -125,31 +155,47 @@ function build {
|
||||||
build_harfbuzz
|
build_harfbuzz
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Perform all dependency builds in the build subfolder.
|
||||||
|
mkdir -p $WORKDIR
|
||||||
|
pushd $WORKDIR > /dev/null
|
||||||
|
|
||||||
# Any stuff that you need to do before you start building the wheels
|
# Any stuff that you need to do before you start building the wheels
|
||||||
# Runs in the root directory of this repository.
|
# Runs in the root directory of this repository.
|
||||||
curl -fsSL -o pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
|
if [[ ! -d $WORKDIR/pillow-depends-main ]]; then
|
||||||
untar pillow-depends-main.zip
|
if [[ ! -f $PROJECTDIR/pillow-depends-main.zip ]]; then
|
||||||
|
echo "Download pillow dependency sources..."
|
||||||
|
curl -fSL -o $PROJECTDIR/pillow-depends-main.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
|
||||||
|
fi
|
||||||
|
echo "Unpacking pillow dependency sources..."
|
||||||
|
untar $PROJECTDIR/pillow-depends-main.zip
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ -n "$IS_MACOS" ]]; then
|
if [[ -n "$IS_MACOS" ]]; then
|
||||||
# libtiff and libxcb cause a conflict with building libtiff and libxcb
|
# Homebrew (or similar packaging environments) install can contain some of
|
||||||
# libxau and libxdmcp cause an issue on macOS < 11
|
# the libraries that we're going to build. However, they may be compiled
|
||||||
# remove cairo to fix building harfbuzz on arm64
|
# with a MACOSX_DEPLOYMENT_TARGET that doesn't match what we want to use,
|
||||||
# remove lcms2 and libpng to fix building openjpeg on arm64
|
# and they may bring in other dependencies that we don't want. The same will
|
||||||
# remove jpeg-turbo to avoid inclusion on arm64
|
# be true of any other locations on the path. To avoid conflicts, strip the
|
||||||
# remove webp and zstd to avoid inclusion on x86_64
|
# path down to the bare minimum (which, on macOS, won't include any
|
||||||
# curl from brew requires zstd, use system curl
|
# development dependencies).
|
||||||
brew remove --ignore-dependencies libpng libtiff libxcb libxau libxdmcp curl cairo lcms2 zstd
|
export PATH="$BUILD_PREFIX/bin:$(dirname $(which python3)):/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin"
|
||||||
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
export CMAKE_PREFIX_PATH=$BUILD_PREFIX
|
||||||
brew remove --ignore-dependencies jpeg-turbo
|
|
||||||
else
|
|
||||||
brew remove --ignore-dependencies webp
|
|
||||||
fi
|
|
||||||
|
|
||||||
brew install pkg-config
|
# Ensure the basic structure of the build prefix directory exists.
|
||||||
|
mkdir -p "$BUILD_PREFIX/bin"
|
||||||
|
mkdir -p "$BUILD_PREFIX/lib"
|
||||||
|
|
||||||
|
# Ensure pkg-config is available
|
||||||
|
build_pkg_config
|
||||||
|
# Ensure cmake is available
|
||||||
|
python3 -m pip install cmake
|
||||||
fi
|
fi
|
||||||
|
|
||||||
wrap_wheel_builder build
|
wrap_wheel_builder build
|
||||||
|
|
||||||
|
# Return to the project root to finish the build
|
||||||
|
popd > /dev/null
|
||||||
|
|
||||||
# Append licenses
|
# Append licenses
|
||||||
for filename in wheels/dependency_licenses/*; do
|
for filename in wheels/dependency_licenses/*; do
|
||||||
echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE
|
echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE
|
||||||
|
|
20
.github/workflows/wheels-test.sh
vendored
20
.github/workflows/wheels-test.sh
vendored
|
@ -1,12 +1,24 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
# Ensure fribidi is installed by the system.
|
||||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||||
brew install fribidi
|
# If Homebrew is on the path during the build, it may leak into the wheels.
|
||||||
export PKG_CONFIG_PATH="/usr/local/opt/openblas/lib/pkgconfig"
|
# However, we *do* need Homebrew to provide a copy of fribidi for
|
||||||
if [ -f /opt/homebrew/lib/libfribidi.dylib ]; then
|
# testing purposes so that we can verify the fribidi shim works as expected.
|
||||||
sudo cp /opt/homebrew/lib/libfribidi.dylib /usr/local/lib
|
if [[ "$(uname -m)" == "x86_64" ]]; then
|
||||||
|
HOMEBREW_PREFIX=/usr/local
|
||||||
|
else
|
||||||
|
HOMEBREW_PREFIX=/opt/homebrew
|
||||||
fi
|
fi
|
||||||
|
$HOMEBREW_PREFIX/bin/brew install fribidi
|
||||||
|
|
||||||
|
# Add the lib folder for fribidi so that the vendored library can be found.
|
||||||
|
# Don't use $HOMEWBREW_PREFIX/lib directly - use the lib folder where the
|
||||||
|
# installed copy of fribidi is cellared. This ensures we don't pick up the
|
||||||
|
# Homebrew version of any other library that we're dependent on (most notably,
|
||||||
|
# freetype).
|
||||||
|
export DYLD_LIBRARY_PATH=$(dirname $(realpath $HOMEBREW_PREFIX/lib/libfribidi.dylib))
|
||||||
elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then
|
elif [ "${AUDITWHEEL_POLICY::9}" == "musllinux" ]; then
|
||||||
apk add curl fribidi
|
apk add curl fribidi
|
||||||
else
|
else
|
||||||
|
|
17
.github/workflows/wheels.yml
vendored
17
.github/workflows/wheels.yml
vendored
|
@ -61,6 +61,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
|
@ -84,7 +85,7 @@ jobs:
|
||||||
CIBW_ARCHS: "aarch64"
|
CIBW_ARCHS: "aarch64"
|
||||||
# Likewise, select only one Python version per job to speed this up.
|
# Likewise, select only one Python version per job to speed this up.
|
||||||
CIBW_BUILD: "${{ matrix.python-version }}-${{ matrix.spec == 'musllinux' && 'musllinux' || 'manylinux' }}*"
|
CIBW_BUILD: "${{ matrix.python-version }}-${{ matrix.spec == 'musllinux' && 'musllinux' || 'manylinux' }}*"
|
||||||
CIBW_PRERELEASE_PYTHONS: True
|
CIBW_ENABLE: cpython-prerelease
|
||||||
# Extra options for manylinux.
|
# Extra options for manylinux.
|
||||||
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.spec }}
|
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.spec }}
|
||||||
CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.spec }}
|
CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.spec }}
|
||||||
|
@ -132,6 +133,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
|
@ -148,10 +150,10 @@ jobs:
|
||||||
env:
|
env:
|
||||||
CIBW_ARCHS: ${{ matrix.cibw_arch }}
|
CIBW_ARCHS: ${{ matrix.cibw_arch }}
|
||||||
CIBW_BUILD: ${{ matrix.build }}
|
CIBW_BUILD: ${{ matrix.build }}
|
||||||
CIBW_FREE_THREADED_SUPPORT: True
|
CIBW_ENABLE: cpython-prerelease cpython-freethreading
|
||||||
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
|
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
|
||||||
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
|
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
|
||||||
CIBW_PRERELEASE_PYTHONS: True
|
CIBW_SKIP: pp39-*
|
||||||
MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }}
|
MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v4
|
||||||
|
@ -172,10 +174,13 @@ jobs:
|
||||||
- cibw_arch: ARM64
|
- cibw_arch: ARM64
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Checkout extra test images
|
- name: Checkout extra test images
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
repository: python-pillow/test-images
|
repository: python-pillow/test-images
|
||||||
path: Tests\test-images
|
path: Tests\test-images
|
||||||
|
|
||||||
|
@ -222,8 +227,8 @@ jobs:
|
||||||
CIBW_ARCHS: ${{ matrix.cibw_arch }}
|
CIBW_ARCHS: ${{ matrix.cibw_arch }}
|
||||||
CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd"
|
CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd"
|
||||||
CIBW_CACHE_PATH: "C:\\cibw"
|
CIBW_CACHE_PATH: "C:\\cibw"
|
||||||
CIBW_FREE_THREADED_SUPPORT: True
|
CIBW_ENABLE: cpython-prerelease cpython-freethreading
|
||||||
CIBW_PRERELEASE_PYTHONS: True
|
CIBW_SKIP: pp39-*
|
||||||
CIBW_TEST_SKIP: "*-win_arm64"
|
CIBW_TEST_SKIP: "*-win_arm64"
|
||||||
CIBW_TEST_COMMAND: 'docker run --rm
|
CIBW_TEST_COMMAND: 'docker run --rm
|
||||||
-v {project}:C:\pillow
|
-v {project}:C:\pillow
|
||||||
|
@ -251,6 +256,8 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
|
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -19,6 +19,7 @@ lib64/
|
||||||
parts/
|
parts/
|
||||||
sdist/
|
sdist/
|
||||||
var/
|
var/
|
||||||
|
wheelhouse/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
*.egg
|
*.egg
|
||||||
|
@ -90,5 +91,9 @@ Tests/images/msp
|
||||||
Tests/images/picins
|
Tests/images/picins
|
||||||
Tests/images/sunraster
|
Tests/images/sunraster
|
||||||
|
|
||||||
|
# Test and dependency downloads
|
||||||
|
pillow-depends-main.zip
|
||||||
|
pillow-test-images.zip
|
||||||
|
|
||||||
# pyinstaller
|
# pyinstaller
|
||||||
*.spec
|
*.spec
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.6.9
|
rev: v0.8.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: [--exit-non-zero-on-fix]
|
args: [--exit-non-zero-on-fix]
|
||||||
|
|
||||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||||
rev: 24.8.0
|
rev: 24.10.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
|
|
||||||
- repo: https://github.com/PyCQA/bandit
|
- repo: https://github.com/PyCQA/bandit
|
||||||
rev: 1.7.10
|
rev: 1.8.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: bandit
|
- id: bandit
|
||||||
args: [--severity-level=high]
|
args: [--severity-level=high]
|
||||||
|
@ -24,7 +24,7 @@ repos:
|
||||||
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
|
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||||
rev: v19.1.1
|
rev: v19.1.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: clang-format
|
- id: clang-format
|
||||||
types: [c]
|
types: [c]
|
||||||
|
@ -50,7 +50,7 @@ repos:
|
||||||
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
|
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
|
||||||
|
|
||||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||||
rev: 0.29.3
|
rev: 0.30.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-github-workflows
|
- id: check-github-workflows
|
||||||
- id: check-readthedocs
|
- id: check-readthedocs
|
||||||
|
@ -62,14 +62,15 @@ repos:
|
||||||
- id: sphinx-lint
|
- id: sphinx-lint
|
||||||
|
|
||||||
- repo: https://github.com/tox-dev/pyproject-fmt
|
- repo: https://github.com/tox-dev/pyproject-fmt
|
||||||
rev: 2.2.4
|
rev: v2.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyproject-fmt
|
- id: pyproject-fmt
|
||||||
|
|
||||||
- repo: https://github.com/abravalheri/validate-pyproject
|
- repo: https://github.com/abravalheri/validate-pyproject
|
||||||
rev: v0.20.2
|
rev: v0.23
|
||||||
hooks:
|
hooks:
|
||||||
- id: validate-pyproject
|
- id: validate-pyproject
|
||||||
|
additional_dependencies: [trove-classifiers>=2024.10.12]
|
||||||
|
|
||||||
- repo: https://github.com/tox-dev/tox-ini-fmt
|
- repo: https://github.com/tox-dev/tox-ini-fmt
|
||||||
rev: 1.4.1
|
rev: 1.4.1
|
||||||
|
|
21
CHANGES.rst
21
CHANGES.rst
|
@ -2,9 +2,28 @@
|
||||||
Changelog (Pillow)
|
Changelog (Pillow)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
11.0.0 (unreleased)
|
11.1.0 and newer
|
||||||
|
----------------
|
||||||
|
|
||||||
|
See GitHub Releases:
|
||||||
|
|
||||||
|
- https://github.com/python-pillow/Pillow/releases
|
||||||
|
|
||||||
|
11.0.0 (2024-10-15)
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
|
- Update licence to MIT-CMU #8460
|
||||||
|
[hugovk]
|
||||||
|
|
||||||
|
- Conditionally define ImageCms type hint to avoid requiring core #8197
|
||||||
|
[radarhere]
|
||||||
|
|
||||||
|
- Support writing LONG8 offsets in AppendingTiffWriter #8417
|
||||||
|
[radarhere]
|
||||||
|
|
||||||
|
- Use ImageFile.MAXBLOCK when saving TIFF images #8461
|
||||||
|
[radarhere]
|
||||||
|
|
||||||
- Do not close provided file handles with libtiff when saving #8458
|
- Do not close provided file handles with libtiff when saving #8458
|
||||||
[radarhere]
|
[radarhere]
|
||||||
|
|
||||||
|
|
4
LICENSE
4
LICENSE
|
@ -5,9 +5,9 @@ The Python Imaging Library (PIL) is
|
||||||
|
|
||||||
Pillow is the friendly PIL fork. It is
|
Pillow is the friendly PIL fork. It is
|
||||||
|
|
||||||
Copyright © 2010-2024 by Jeffrey A. Clark and contributors
|
Copyright © 2010 by Jeffrey A. Clark and contributors
|
||||||
|
|
||||||
Like PIL, Pillow is licensed under the open source HPND License:
|
Like PIL, Pillow is licensed under the open source MIT-CMU License:
|
||||||
|
|
||||||
By obtaining, using, and/or copying this software and/or its associated
|
By obtaining, using, and/or copying this software and/or its associated
|
||||||
documentation, you agree that you have read, understood, and will comply
|
documentation, you agree that you have read, understood, and will comply
|
||||||
|
|
|
@ -107,7 +107,7 @@ The core image library is designed for fast access to data stored in a few basic
|
||||||
- [Issues](https://github.com/python-pillow/Pillow/issues)
|
- [Issues](https://github.com/python-pillow/Pillow/issues)
|
||||||
- [Pull requests](https://github.com/python-pillow/Pillow/pulls)
|
- [Pull requests](https://github.com/python-pillow/Pillow/pulls)
|
||||||
- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html)
|
- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html)
|
||||||
- [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst)
|
- [Changelog](https://github.com/python-pillow/Pillow/releases)
|
||||||
- [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork)
|
- [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork)
|
||||||
|
|
||||||
## Report a Vulnerability
|
## Report a Vulnerability
|
||||||
|
|
|
@ -12,7 +12,6 @@ Released quarterly on January 2nd, April 1st, July 1st and October 15th.
|
||||||
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in `main` branch.
|
* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in `main` branch.
|
||||||
* [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them.
|
* [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them.
|
||||||
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
|
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
|
||||||
* [ ] Update `CHANGES.rst`.
|
|
||||||
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
|
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
|
||||||
* [ ] Create branch and tag for release e.g.:
|
* [ ] Create branch and tag for release e.g.:
|
||||||
```bash
|
```bash
|
||||||
|
@ -34,7 +33,6 @@ Released quarterly on January 2nd, April 1st, July 1st and October 15th.
|
||||||
Released as needed for security, installation or critical bug fixes.
|
Released as needed for security, installation or critical bug fixes.
|
||||||
|
|
||||||
* [ ] Make necessary changes in `main` branch.
|
* [ ] Make necessary changes in `main` branch.
|
||||||
* [ ] Update `CHANGES.rst`.
|
|
||||||
* [ ] Check out release branch e.g.:
|
* [ ] Check out release branch e.g.:
|
||||||
```bash
|
```bash
|
||||||
git checkout -t remotes/origin/5.2.x
|
git checkout -t remotes/origin/5.2.x
|
||||||
|
|
|
@ -34,6 +34,7 @@ def test_wheel_features() -> None:
|
||||||
"fribidi",
|
"fribidi",
|
||||||
"harfbuzz",
|
"harfbuzz",
|
||||||
"libjpeg_turbo",
|
"libjpeg_turbo",
|
||||||
|
"zlib_ng",
|
||||||
"xcb",
|
"xcb",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 486 B After Width: | Height: | Size: 533 B |
|
@ -22,6 +22,8 @@ def test_bad() -> None:
|
||||||
for f in get_files("b"):
|
for f in get_files("b"):
|
||||||
# Assert that there is no unclosed file warning
|
# Assert that there is no unclosed file warning
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with Image.open(f) as im:
|
with Image.open(f) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
|
@ -388,10 +388,12 @@ class TestColorLut3DFilter:
|
||||||
|
|
||||||
table = numpy.ones((7 * 6 * 5, 3), dtype=numpy.float16)
|
table = numpy.ones((7 * 6 * 5, 3), dtype=numpy.float16)
|
||||||
lut = ImageFilter.Color3DLUT((5, 6, 7), table)
|
lut = ImageFilter.Color3DLUT((5, 6, 7), table)
|
||||||
|
assert isinstance(lut.table, numpy.ndarray)
|
||||||
assert lut.table.shape == (table.size,)
|
assert lut.table.shape == (table.size,)
|
||||||
|
|
||||||
table = numpy.ones((7 * 6 * 5 * 3), dtype=numpy.float16)
|
table = numpy.ones((7 * 6 * 5 * 3), dtype=numpy.float16)
|
||||||
lut = ImageFilter.Color3DLUT((5, 6, 7), table)
|
lut = ImageFilter.Color3DLUT((5, 6, 7), table)
|
||||||
|
assert isinstance(lut.table, numpy.ndarray)
|
||||||
assert lut.table.shape == (table.size,)
|
assert lut.table.shape == (table.size,)
|
||||||
|
|
||||||
# Check application
|
# Check application
|
||||||
|
|
|
@ -36,10 +36,11 @@ def test_version() -> None:
|
||||||
else:
|
else:
|
||||||
assert function(name) == version
|
assert function(name) == version
|
||||||
if name != "PIL":
|
if name != "PIL":
|
||||||
if name == "zlib" and version is not None:
|
if version is not None:
|
||||||
version = re.sub(".zlib-ng$", "", version)
|
if name == "zlib" and features.check_feature("zlib_ng"):
|
||||||
elif name == "libtiff" and version is not None:
|
version = re.sub(".zlib-ng$", "", version)
|
||||||
version = re.sub("t$", "", version)
|
elif name == "libtiff":
|
||||||
|
version = re.sub("t$", "", version)
|
||||||
assert version is None or re.search(r"\d+(\.\d+)*$", version)
|
assert version is None or re.search(r"\d+(\.\d+)*$", version)
|
||||||
|
|
||||||
for module in features.modules:
|
for module in features.modules:
|
||||||
|
|
|
@ -83,4 +83,4 @@ def test_handler(tmp_path: Path) -> None:
|
||||||
im.save(temp_file)
|
im.save(temp_file)
|
||||||
assert handler.saved
|
assert handler.saved
|
||||||
|
|
||||||
BufrStubImagePlugin._handler = None
|
BufrStubImagePlugin.register_handler(None)
|
||||||
|
|
|
@ -4,8 +4,6 @@ import pytest
|
||||||
|
|
||||||
from PIL import ContainerIO, Image
|
from PIL import ContainerIO, Image
|
||||||
|
|
||||||
from .helper import hopper
|
|
||||||
|
|
||||||
TEST_FILE = "Tests/images/dummy.container"
|
TEST_FILE = "Tests/images/dummy.container"
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,15 +13,15 @@ def test_sanity() -> None:
|
||||||
|
|
||||||
|
|
||||||
def test_isatty() -> None:
|
def test_isatty() -> None:
|
||||||
with hopper() as im:
|
with open(TEST_FILE, "rb") as fh:
|
||||||
container = ContainerIO.ContainerIO(im, 0, 0)
|
container = ContainerIO.ContainerIO(fh, 0, 0)
|
||||||
|
|
||||||
assert container.isatty() is False
|
assert container.isatty() is False
|
||||||
|
|
||||||
|
|
||||||
def test_seekable() -> None:
|
def test_seekable() -> None:
|
||||||
with hopper() as im:
|
with open(TEST_FILE, "rb") as fh:
|
||||||
container = ContainerIO.ContainerIO(im, 0, 0)
|
container = ContainerIO.ContainerIO(fh, 0, 0)
|
||||||
|
|
||||||
assert container.seekable() is True
|
assert container.seekable() is True
|
||||||
|
|
||||||
|
|
|
@ -36,6 +36,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_FILE)
|
im = Image.open(TEST_FILE)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -43,6 +45,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -65,6 +65,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(static_test_file)
|
im = Image.open(static_test_file)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -81,6 +83,8 @@ def test_seek_after_close() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(static_test_file) as im:
|
with Image.open(static_test_file) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ import warnings
|
||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -46,6 +47,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_GIF)
|
im = Image.open(TEST_GIF)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -67,6 +70,8 @@ def test_seek_after_close() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_GIF) as im:
|
with Image.open(TEST_GIF) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
@ -1479,7 +1484,8 @@ def test_saving_rgba(tmp_path: Path) -> None:
|
||||||
assert reloaded_rgba.load()[0, 0][3] == 0
|
assert reloaded_rgba.load()[0, 0][3] == 0
|
||||||
|
|
||||||
|
|
||||||
def test_optimizing_p_rgba(tmp_path: Path) -> None:
|
@pytest.mark.parametrize("params", ({}, {"disposal": 2, "optimize": False}))
|
||||||
|
def test_p_rgba(tmp_path: Path, params: dict[str, Any]) -> None:
|
||||||
out = str(tmp_path / "temp.gif")
|
out = str(tmp_path / "temp.gif")
|
||||||
|
|
||||||
im1 = Image.new("P", (100, 100))
|
im1 = Image.new("P", (100, 100))
|
||||||
|
@ -1491,7 +1497,7 @@ def test_optimizing_p_rgba(tmp_path: Path) -> None:
|
||||||
im2 = Image.new("P", (100, 100))
|
im2 = Image.new("P", (100, 100))
|
||||||
im2.putpalette(data, "RGBA")
|
im2.putpalette(data, "RGBA")
|
||||||
|
|
||||||
im1.save(out, save_all=True, append_images=[im2])
|
im1.save(out, save_all=True, append_images=[im2], **params)
|
||||||
|
|
||||||
with Image.open(out) as reloaded:
|
with Image.open(out) as reloaded:
|
||||||
assert reloaded.n_frames == 2
|
assert reloaded.n_frames == 2
|
||||||
|
|
|
@ -83,4 +83,4 @@ def test_handler(tmp_path: Path) -> None:
|
||||||
im.save(temp_file)
|
im.save(temp_file)
|
||||||
assert handler.saved
|
assert handler.saved
|
||||||
|
|
||||||
GribStubImagePlugin._handler = None
|
GribStubImagePlugin.register_handler(None)
|
||||||
|
|
|
@ -85,4 +85,4 @@ def test_handler(tmp_path: Path) -> None:
|
||||||
im.save(temp_file)
|
im.save(temp_file)
|
||||||
assert handler.saved
|
assert handler.saved
|
||||||
|
|
||||||
Hdf5StubImagePlugin._handler = None
|
Hdf5StubImagePlugin.register_handler(None)
|
||||||
|
|
|
@ -21,6 +21,8 @@ def test_sanity() -> None:
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
# Assert that there is no unclosed file warning
|
# Assert that there is no unclosed file warning
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
assert im.mode == "RGBA"
|
assert im.mode == "RGBA"
|
||||||
|
|
|
@ -41,6 +41,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_IM)
|
im = Image.open(TEST_IM)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -48,6 +50,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_IM) as im:
|
with Image.open(TEST_IM) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -541,12 +541,12 @@ class TestFileJpeg:
|
||||||
@mark_if_feature_version(
|
@mark_if_feature_version(
|
||||||
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
||||||
)
|
)
|
||||||
def test_qtables(self, tmp_path: Path) -> None:
|
def test_qtables(self) -> None:
|
||||||
def _n_qtables_helper(n: int, test_file: str) -> None:
|
def _n_qtables_helper(n: int, test_file: str) -> None:
|
||||||
|
b = BytesIO()
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
f = str(tmp_path / "temp.jpg")
|
im.save(b, "JPEG", qtables=[[n] * 64] * n)
|
||||||
im.save(f, qtables=[[n] * 64] * n)
|
with Image.open(b) as im:
|
||||||
with Image.open(f) as im:
|
|
||||||
assert len(im.quantization) == n
|
assert len(im.quantization) == n
|
||||||
reloaded = self.roundtrip(im, qtables="keep")
|
reloaded = self.roundtrip(im, qtables="keep")
|
||||||
assert im.quantization == reloaded.quantization
|
assert im.quantization == reloaded.quantization
|
||||||
|
@ -850,6 +850,8 @@ class TestFileJpeg:
|
||||||
|
|
||||||
out = str(tmp_path / "out.jpg")
|
out = str(tmp_path / "out.jpg")
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.save(out, exif=exif)
|
im.save(out, exif=exif)
|
||||||
|
|
||||||
with Image.open(out) as reloaded:
|
with Image.open(out) as reloaded:
|
||||||
|
@ -998,8 +1000,13 @@ class TestFileJpeg:
|
||||||
with Image.open(f) as reloaded:
|
with Image.open(f) as reloaded:
|
||||||
assert reloaded.info["xmp"] == b"XMP test"
|
assert reloaded.info["xmp"] == b"XMP test"
|
||||||
|
|
||||||
im.info["xmp"] = b"1" * 65504
|
# Check that XMP is not saved from image info
|
||||||
im.save(f)
|
reloaded.save(f)
|
||||||
|
|
||||||
|
with Image.open(f) as reloaded:
|
||||||
|
assert "xmp" not in reloaded.info
|
||||||
|
|
||||||
|
im.save(f, xmp=b"1" * 65504)
|
||||||
with Image.open(f) as reloaded:
|
with Image.open(f) as reloaded:
|
||||||
assert reloaded.info["xmp"] == b"1" * 65504
|
assert reloaded.info["xmp"] == b"1" * 65504
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from collections.abc import Generator
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
@ -29,8 +30,16 @@ EXTRA_DIR = "Tests/images/jpeg2000"
|
||||||
|
|
||||||
pytestmark = skip_unless_feature("jpg_2000")
|
pytestmark = skip_unless_feature("jpg_2000")
|
||||||
|
|
||||||
test_card = Image.open("Tests/images/test-card.png")
|
|
||||||
test_card.load()
|
@pytest.fixture
|
||||||
|
def card() -> Generator[ImageFile.ImageFile, None, None]:
|
||||||
|
with Image.open("Tests/images/test-card.png") as im:
|
||||||
|
im.load()
|
||||||
|
try:
|
||||||
|
yield im
|
||||||
|
finally:
|
||||||
|
im.close()
|
||||||
|
|
||||||
|
|
||||||
# OpenJPEG 2.0.0 outputs this debugging message sometimes; we should
|
# OpenJPEG 2.0.0 outputs this debugging message sometimes; we should
|
||||||
# ignore it---it doesn't represent a test failure.
|
# ignore it---it doesn't represent a test failure.
|
||||||
|
@ -74,76 +83,76 @@ def test_invalid_file() -> None:
|
||||||
Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file)
|
Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file)
|
||||||
|
|
||||||
|
|
||||||
def test_bytesio() -> None:
|
def test_bytesio(card: ImageFile.ImageFile) -> None:
|
||||||
with open("Tests/images/test-card-lossless.jp2", "rb") as f:
|
with open("Tests/images/test-card-lossless.jp2", "rb") as f:
|
||||||
data = BytesIO(f.read())
|
data = BytesIO(f.read())
|
||||||
with Image.open(data) as im:
|
with Image.open(data) as im:
|
||||||
im.load()
|
im.load()
|
||||||
assert_image_similar(im, test_card, 1.0e-3)
|
assert_image_similar(im, card, 1.0e-3)
|
||||||
|
|
||||||
|
|
||||||
# These two test pre-written JPEG 2000 files that were not written with
|
# These two test pre-written JPEG 2000 files that were not written with
|
||||||
# PIL (they were made using Adobe Photoshop)
|
# PIL (they were made using Adobe Photoshop)
|
||||||
|
|
||||||
|
|
||||||
def test_lossless(tmp_path: Path) -> None:
|
def test_lossless(card: ImageFile.ImageFile, tmp_path: Path) -> None:
|
||||||
with Image.open("Tests/images/test-card-lossless.jp2") as im:
|
with Image.open("Tests/images/test-card-lossless.jp2") as im:
|
||||||
im.load()
|
im.load()
|
||||||
outfile = str(tmp_path / "temp_test-card.png")
|
outfile = str(tmp_path / "temp_test-card.png")
|
||||||
im.save(outfile)
|
im.save(outfile)
|
||||||
assert_image_similar(im, test_card, 1.0e-3)
|
assert_image_similar(im, card, 1.0e-3)
|
||||||
|
|
||||||
|
|
||||||
def test_lossy_tiled() -> None:
|
def test_lossy_tiled(card: ImageFile.ImageFile) -> None:
|
||||||
assert_image_similar_tofile(
|
assert_image_similar_tofile(card, "Tests/images/test-card-lossy-tiled.jp2", 2.0)
|
||||||
test_card, "Tests/images/test-card-lossy-tiled.jp2", 2.0
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_lossless_rt() -> None:
|
def test_lossless_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card)
|
im = roundtrip(card)
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
def test_lossy_rt() -> None:
|
def test_lossy_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, quality_layers=[20])
|
im = roundtrip(card, quality_layers=[20])
|
||||||
assert_image_similar(im, test_card, 2.0)
|
assert_image_similar(im, card, 2.0)
|
||||||
|
|
||||||
|
|
||||||
def test_tiled_rt() -> None:
|
def test_tiled_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, tile_size=(128, 128))
|
im = roundtrip(card, tile_size=(128, 128))
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
def test_tiled_offset_rt() -> None:
|
def test_tiled_offset_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32))
|
im = roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32))
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
def test_tiled_offset_too_small() -> None:
|
def test_tiled_offset_too_small(card: ImageFile.ImageFile) -> None:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32))
|
roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32))
|
||||||
|
|
||||||
|
|
||||||
def test_irreversible_rt() -> None:
|
def test_irreversible_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, irreversible=True, quality_layers=[20])
|
im = roundtrip(card, irreversible=True, quality_layers=[20])
|
||||||
assert_image_similar(im, test_card, 2.0)
|
assert_image_similar(im, card, 2.0)
|
||||||
|
|
||||||
|
|
||||||
def test_prog_qual_rt() -> None:
|
def test_prog_qual_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, quality_layers=[60, 40, 20], progression="LRCP")
|
im = roundtrip(card, quality_layers=[60, 40, 20], progression="LRCP")
|
||||||
assert_image_similar(im, test_card, 2.0)
|
assert_image_similar(im, card, 2.0)
|
||||||
|
|
||||||
|
|
||||||
def test_prog_res_rt() -> None:
|
def test_prog_res_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, num_resolutions=8, progression="RLCP")
|
im = roundtrip(card, num_resolutions=8, progression="RLCP")
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("num_resolutions", range(2, 6))
|
@pytest.mark.parametrize("num_resolutions", range(2, 6))
|
||||||
def test_default_num_resolutions(num_resolutions: int) -> None:
|
def test_default_num_resolutions(
|
||||||
|
card: ImageFile.ImageFile, num_resolutions: int
|
||||||
|
) -> None:
|
||||||
d = 1 << (num_resolutions - 1)
|
d = 1 << (num_resolutions - 1)
|
||||||
im = test_card.resize((d - 1, d - 1))
|
im = card.resize((d - 1, d - 1))
|
||||||
with pytest.raises(OSError):
|
with pytest.raises(OSError):
|
||||||
roundtrip(im, num_resolutions=num_resolutions)
|
roundtrip(im, num_resolutions=num_resolutions)
|
||||||
reloaded = roundtrip(im)
|
reloaded = roundtrip(im)
|
||||||
|
@ -205,31 +214,31 @@ def test_header_errors() -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_layers_type(tmp_path: Path) -> None:
|
def test_layers_type(card: ImageFile.ImageFile, tmp_path: Path) -> None:
|
||||||
outfile = str(tmp_path / "temp_layers.jp2")
|
outfile = str(tmp_path / "temp_layers.jp2")
|
||||||
for quality_layers in [[100, 50, 10], (100, 50, 10), None]:
|
for quality_layers in [[100, 50, 10], (100, 50, 10), None]:
|
||||||
test_card.save(outfile, quality_layers=quality_layers)
|
card.save(outfile, quality_layers=quality_layers)
|
||||||
|
|
||||||
for quality_layers_str in ["quality_layers", ("100", "50", "10")]:
|
for quality_layers_str in ["quality_layers", ("100", "50", "10")]:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
test_card.save(outfile, quality_layers=quality_layers_str)
|
card.save(outfile, quality_layers=quality_layers_str)
|
||||||
|
|
||||||
|
|
||||||
def test_layers() -> None:
|
def test_layers(card: ImageFile.ImageFile) -> None:
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP")
|
card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP")
|
||||||
out.seek(0)
|
out.seek(0)
|
||||||
|
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
im.layers = 1
|
im.layers = 1
|
||||||
im.load()
|
im.load()
|
||||||
assert_image_similar(im, test_card, 13)
|
assert_image_similar(im, card, 13)
|
||||||
|
|
||||||
out.seek(0)
|
out.seek(0)
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
im.layers = 3
|
im.layers = 3
|
||||||
im.load()
|
im.load()
|
||||||
assert_image_similar(im, test_card, 0.4)
|
assert_image_similar(im, card, 0.4)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -245,24 +254,30 @@ def test_layers() -> None:
|
||||||
(None, {"no_jp2": False}, 4, b"jP"),
|
(None, {"no_jp2": False}, 4, b"jP"),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_no_jp2(name: str, args: dict[str, bool], offset: int, data: bytes) -> None:
|
def test_no_jp2(
|
||||||
|
card: ImageFile.ImageFile,
|
||||||
|
name: str,
|
||||||
|
args: dict[str, bool],
|
||||||
|
offset: int,
|
||||||
|
data: bytes,
|
||||||
|
) -> None:
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
if name:
|
if name:
|
||||||
out.name = name
|
out.name = name
|
||||||
test_card.save(out, "JPEG2000", **args)
|
card.save(out, "JPEG2000", **args)
|
||||||
out.seek(offset)
|
out.seek(offset)
|
||||||
assert out.read(2) == data
|
assert out.read(2) == data
|
||||||
|
|
||||||
|
|
||||||
def test_mct() -> None:
|
def test_mct(card: ImageFile.ImageFile) -> None:
|
||||||
# Three component
|
# Three component
|
||||||
for val in (0, 1):
|
for val in (0, 1):
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", mct=val, no_jp2=True)
|
card.save(out, "JPEG2000", mct=val, no_jp2=True)
|
||||||
|
|
||||||
assert out.getvalue()[59] == val
|
assert out.getvalue()[59] == val
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
assert_image_similar(im, test_card, 1.0e-3)
|
assert_image_similar(im, card, 1.0e-3)
|
||||||
|
|
||||||
# Single component should have MCT disabled
|
# Single component should have MCT disabled
|
||||||
for val in (0, 1):
|
for val in (0, 1):
|
||||||
|
@ -419,22 +434,22 @@ def test_comment() -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_save_comment() -> None:
|
def test_save_comment(card: ImageFile.ImageFile) -> None:
|
||||||
for comment in ("Created by Pillow", b"Created by Pillow"):
|
for comment in ("Created by Pillow", b"Created by Pillow"):
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", comment=comment)
|
card.save(out, "JPEG2000", comment=comment)
|
||||||
|
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
assert im.info["comment"] == b"Created by Pillow"
|
assert im.info["comment"] == b"Created by Pillow"
|
||||||
|
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
long_comment = b" " * 65531
|
long_comment = b" " * 65531
|
||||||
test_card.save(out, "JPEG2000", comment=long_comment)
|
card.save(out, "JPEG2000", comment=long_comment)
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
assert im.info["comment"] == long_comment
|
assert im.info["comment"] == long_comment
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
test_card.save(out, "JPEG2000", comment=long_comment + b" ")
|
card.save(out, "JPEG2000", comment=long_comment + b" ")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -457,10 +472,10 @@ def test_crashes(test_file: str) -> None:
|
||||||
|
|
||||||
|
|
||||||
@skip_unless_feature_version("jpg_2000", "2.4.0")
|
@skip_unless_feature_version("jpg_2000", "2.4.0")
|
||||||
def test_plt_marker() -> None:
|
def test_plt_marker(card: ImageFile.ImageFile) -> None:
|
||||||
# Search the start of the codesteam for PLT
|
# Search the start of the codesteam for PLT
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", no_jp2=True, plt=True)
|
card.save(out, "JPEG2000", no_jp2=True, plt=True)
|
||||||
out.seek(0)
|
out.seek(0)
|
||||||
while True:
|
while True:
|
||||||
marker = out.read(2)
|
marker = out.read(2)
|
||||||
|
|
|
@ -1098,6 +1098,25 @@ class TestFileLibTiff(LibTiffTestCase):
|
||||||
|
|
||||||
assert_image_similar(base_im, im, 0.7)
|
assert_image_similar(base_im, im, 0.7)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"test_file",
|
||||||
|
[
|
||||||
|
"Tests/images/old-style-jpeg-compression-no-samplesperpixel.tif",
|
||||||
|
"Tests/images/old-style-jpeg-compression.tif",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_buffering(self, test_file: str) -> None:
|
||||||
|
# load exif first
|
||||||
|
with Image.open(open(test_file, "rb", buffering=1048576)) as im:
|
||||||
|
exif = dict(im.getexif())
|
||||||
|
|
||||||
|
# load image before exif
|
||||||
|
with Image.open(open(test_file, "rb", buffering=1048576)) as im2:
|
||||||
|
im2.load()
|
||||||
|
exif_after_load = dict(im2.getexif())
|
||||||
|
|
||||||
|
assert exif == exif_after_load
|
||||||
|
|
||||||
@pytest.mark.valgrind_known_error(reason="Backtrace in Python Core")
|
@pytest.mark.valgrind_known_error(reason="Backtrace in Python Core")
|
||||||
def test_sampleformat_not_corrupted(self) -> None:
|
def test_sampleformat_not_corrupted(self) -> None:
|
||||||
# Assert that a TIFF image with SampleFormat=UINT tag is not corrupted
|
# Assert that a TIFF image with SampleFormat=UINT tag is not corrupted
|
||||||
|
|
|
@ -48,6 +48,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(test_files[0])
|
im = Image.open(test_files[0])
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -63,6 +65,8 @@ def test_seek_after_close() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(test_files[0]) as im:
|
with Image.open(test_files[0]) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
@ -295,11 +299,11 @@ def test_save_all() -> None:
|
||||||
assert "mp" not in jpg.info
|
assert "mp" not in jpg.info
|
||||||
|
|
||||||
|
|
||||||
def test_save_all_progress():
|
def test_save_all_progress() -> None:
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
progress = []
|
progress = []
|
||||||
|
|
||||||
def callback(state):
|
def callback(state) -> None:
|
||||||
if state["image_filename"]:
|
if state["image_filename"]:
|
||||||
state["image_filename"] = (
|
state["image_filename"] = (
|
||||||
state["image_filename"].replace("\\", "/").split("Tests/images/")[-1]
|
state["image_filename"].replace("\\", "/").split("Tests/images/")[-1]
|
||||||
|
@ -335,3 +339,15 @@ def test_save_all_progress():
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
assert progress == expected
|
assert progress == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_save_xmp() -> None:
|
||||||
|
im = Image.new("RGB", (1, 1))
|
||||||
|
im2 = Image.new("RGB", (1, 1), "#f00")
|
||||||
|
im2.encoderinfo = {"xmp": b"Second frame"}
|
||||||
|
im_reloaded = roundtrip(im, xmp=b"First frame", save_all=True, append_images=[im2])
|
||||||
|
|
||||||
|
assert im_reloaded.info["xmp"] == b"First frame"
|
||||||
|
|
||||||
|
im_reloaded.seek(1)
|
||||||
|
assert im_reloaded.info["xmp"] == b"Second frame"
|
||||||
|
|
|
@ -338,6 +338,8 @@ class TestFilePng:
|
||||||
with Image.open(TEST_PNG_FILE) as im:
|
with Image.open(TEST_PNG_FILE) as im:
|
||||||
# Assert that there is no unclosed file warning
|
# Assert that there is no unclosed file warning
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.verify()
|
im.verify()
|
||||||
|
|
||||||
with Image.open(TEST_PNG_FILE) as im:
|
with Image.open(TEST_PNG_FILE) as im:
|
||||||
|
|
|
@ -35,6 +35,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(test_file)
|
im = Image.open(test_file)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -42,6 +44,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_FILE)
|
im = Image.open(TEST_FILE)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -41,6 +43,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -37,11 +37,15 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_close() -> None:
|
def test_close() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
tar = TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg")
|
tar = TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg")
|
||||||
tar.close()
|
tar.close()
|
||||||
|
|
||||||
|
|
||||||
def test_contextmanager() -> None:
|
def test_contextmanager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg"):
|
with TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg"):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -72,6 +72,8 @@ class TestFileTiff:
|
||||||
|
|
||||||
def test_closed_file(self) -> None:
|
def test_closed_file(self) -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open("Tests/images/multipage.tiff")
|
im = Image.open("Tests/images/multipage.tiff")
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -88,6 +90,8 @@ class TestFileTiff:
|
||||||
|
|
||||||
def test_context_manager(self) -> None:
|
def test_context_manager(self) -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open("Tests/images/multipage.tiff") as im:
|
with Image.open("Tests/images/multipage.tiff") as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -242,6 +242,8 @@ class TestFileWebp:
|
||||||
file_path = "Tests/images/hopper.webp"
|
file_path = "Tests/images/hopper.webp"
|
||||||
with Image.open(file_path) as image:
|
with Image.open(file_path) as image:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
image.save(tmp_path / "temp.webp")
|
image.save(tmp_path / "temp.webp")
|
||||||
|
|
||||||
def test_file_pointer_could_be_reused(self) -> None:
|
def test_file_pointer_could_be_reused(self) -> None:
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import IO
|
from typing import IO
|
||||||
|
|
||||||
|
@ -34,6 +35,13 @@ def test_load() -> None:
|
||||||
assert im.load()[0, 0] == (255, 255, 255)
|
assert im.load()[0, 0] == (255, 255, 255)
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_zero_inch() -> None:
|
||||||
|
b = BytesIO(b"\xd7\xcd\xc6\x9a\x00\x00" + b"\x00" * 10)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
with Image.open(b):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_register_handler(tmp_path: Path) -> None:
|
def test_register_handler(tmp_path: Path) -> None:
|
||||||
class TestHandler(ImageFile.StubHandler):
|
class TestHandler(ImageFile.StubHandler):
|
||||||
methodCalled = False
|
methodCalled = False
|
||||||
|
@ -61,6 +69,12 @@ def test_load_float_dpi() -> None:
|
||||||
with Image.open("Tests/images/drawing.emf") as im:
|
with Image.open("Tests/images/drawing.emf") as im:
|
||||||
assert im.info["dpi"] == 1423.7668161434979
|
assert im.info["dpi"] == 1423.7668161434979
|
||||||
|
|
||||||
|
with open("Tests/images/drawing.emf", "rb") as fp:
|
||||||
|
data = fp.read()
|
||||||
|
b = BytesIO(data[:8] + b"\x06\xFA" + data[10:])
|
||||||
|
with Image.open(b) as im:
|
||||||
|
assert im.info["dpi"][0] == 2540
|
||||||
|
|
||||||
|
|
||||||
def test_load_set_dpi() -> None:
|
def test_load_set_dpi() -> None:
|
||||||
with Image.open("Tests/images/drawing.wmf") as im:
|
with Image.open("Tests/images/drawing.wmf") as im:
|
||||||
|
|
|
@ -737,6 +737,8 @@ class TestImage:
|
||||||
# Act/Assert
|
# Act/Assert
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.save(temp_file)
|
im.save(temp_file)
|
||||||
|
|
||||||
def test_no_new_file_on_error(self, tmp_path: Path) -> None:
|
def test_no_new_file_on_error(self, tmp_path: Path) -> None:
|
||||||
|
|
|
@ -10,7 +10,7 @@ from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image, ImageFile
|
||||||
|
|
||||||
from .helper import (
|
from .helper import (
|
||||||
assert_image_equal,
|
assert_image_equal,
|
||||||
|
@ -179,7 +179,7 @@ class TestImagingCoreResize:
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def gradients_image() -> Generator[Image.Image, None, None]:
|
def gradients_image() -> Generator[ImageFile.ImageFile, None, None]:
|
||||||
with Image.open("Tests/images/radial_gradients.png") as im:
|
with Image.open("Tests/images/radial_gradients.png") as im:
|
||||||
im.load()
|
im.load()
|
||||||
try:
|
try:
|
||||||
|
@ -189,7 +189,7 @@ def gradients_image() -> Generator[Image.Image, None, None]:
|
||||||
|
|
||||||
|
|
||||||
class TestReducingGapResize:
|
class TestReducingGapResize:
|
||||||
def test_reducing_gap_values(self, gradients_image: Image.Image) -> None:
|
def test_reducing_gap_values(self, gradients_image: ImageFile.ImageFile) -> None:
|
||||||
ref = gradients_image.resize(
|
ref = gradients_image.resize(
|
||||||
(52, 34), Image.Resampling.BICUBIC, reducing_gap=None
|
(52, 34), Image.Resampling.BICUBIC, reducing_gap=None
|
||||||
)
|
)
|
||||||
|
@ -210,7 +210,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_reducing_gap_1(
|
def test_reducing_gap_1(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -230,7 +230,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_reducing_gap_2(
|
def test_reducing_gap_2(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -250,7 +250,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_reducing_gap_3(
|
def test_reducing_gap_3(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -266,7 +266,9 @@ class TestReducingGapResize:
|
||||||
|
|
||||||
@pytest.mark.parametrize("box", (None, (1.1, 2.2, 510.8, 510.9), (3, 10, 410, 256)))
|
@pytest.mark.parametrize("box", (None, (1.1, 2.2, 510.8, 510.9), (3, 10, 410, 256)))
|
||||||
def test_reducing_gap_8(
|
def test_reducing_gap_8(
|
||||||
self, gradients_image: Image.Image, box: tuple[float, float, float, float]
|
self,
|
||||||
|
gradients_image: ImageFile.ImageFile,
|
||||||
|
box: tuple[float, float, float, float],
|
||||||
) -> None:
|
) -> None:
|
||||||
ref = gradients_image.resize((52, 34), Image.Resampling.BICUBIC, box=box)
|
ref = gradients_image.resize((52, 34), Image.Resampling.BICUBIC, box=box)
|
||||||
im = gradients_image.resize(
|
im = gradients_image.resize(
|
||||||
|
@ -281,7 +283,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_box_filter(
|
def test_box_filter(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
|
@ -104,20 +104,20 @@ def test_transposed() -> None:
|
||||||
assert im.size == (590, 88)
|
assert im.size == (590, 88)
|
||||||
|
|
||||||
|
|
||||||
def test_load_first_unless_jpeg() -> None:
|
def test_load_first_unless_jpeg(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
# Test that thumbnail() still uses draft() for JPEG
|
# Test that thumbnail() still uses draft() for JPEG
|
||||||
with Image.open("Tests/images/hopper.jpg") as im:
|
with Image.open("Tests/images/hopper.jpg") as im:
|
||||||
draft = im.draft
|
original_draft = im.draft
|
||||||
|
|
||||||
def im_draft(
|
def im_draft(
|
||||||
mode: str, size: tuple[int, int]
|
mode: str | None, size: tuple[int, int] | None
|
||||||
) -> tuple[str, tuple[int, int, float, float]] | None:
|
) -> tuple[str, tuple[int, int, float, float]] | None:
|
||||||
result = draft(mode, size)
|
result = original_draft(mode, size)
|
||||||
assert result is not None
|
assert result is not None
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
im.draft = im_draft
|
monkeypatch.setattr(im, "draft", im_draft)
|
||||||
|
|
||||||
im.thumbnail((64, 64))
|
im.thumbnail((64, 64))
|
||||||
|
|
||||||
|
|
|
@ -1674,6 +1674,9 @@ def test_continuous_horizontal_edges_polygon() -> None:
|
||||||
def test_discontiguous_corners_polygon() -> None:
|
def test_discontiguous_corners_polygon() -> None:
|
||||||
img, draw = create_base_image_draw((84, 68))
|
img, draw = create_base_image_draw((84, 68))
|
||||||
draw.polygon(((1, 21), (34, 4), (71, 1), (38, 18)), BLACK)
|
draw.polygon(((1, 21), (34, 4), (71, 1), (38, 18)), BLACK)
|
||||||
|
draw.polygon(
|
||||||
|
((82, 29), (82, 26), (82, 24), (67, 22), (52, 29), (52, 15), (67, 22)), BLACK
|
||||||
|
)
|
||||||
draw.polygon(((71, 44), (38, 27), (1, 24)), BLACK)
|
draw.polygon(((71, 44), (38, 27), (1, 24)), BLACK)
|
||||||
draw.polygon(
|
draw.polygon(
|
||||||
((38, 66), (5, 49), (77, 49), (47, 66), (82, 63), (82, 47), (1, 47), (1, 63)),
|
((38, 66), (5, 49), (77, 49), (47, 66), (82, 63), (82, 47), (1, 47), (1, 63)),
|
||||||
|
|
|
@ -93,6 +93,19 @@ class TestImageFile:
|
||||||
assert p.image is not None
|
assert p.image is not None
|
||||||
assert (48, 48) == p.image.size
|
assert (48, 48) == p.image.size
|
||||||
|
|
||||||
|
@pytest.mark.filterwarnings("ignore:Corrupt EXIF data")
|
||||||
|
def test_incremental_tiff(self) -> None:
|
||||||
|
with ImageFile.Parser() as p:
|
||||||
|
with open("Tests/images/hopper.tif", "rb") as f:
|
||||||
|
p.feed(f.read(1024))
|
||||||
|
|
||||||
|
# Check that insufficient data was given in the first feed
|
||||||
|
assert not p.image
|
||||||
|
|
||||||
|
p.feed(f.read())
|
||||||
|
assert p.image is not None
|
||||||
|
assert (128, 128) == p.image.size
|
||||||
|
|
||||||
@skip_unless_feature("webp")
|
@skip_unless_feature("webp")
|
||||||
def test_incremental_webp(self) -> None:
|
def test_incremental_webp(self) -> None:
|
||||||
with ImageFile.Parser() as p:
|
with ImageFile.Parser() as p:
|
||||||
|
|
|
@ -52,4 +52,6 @@ def test_image(mode: str) -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
ImageQt.ImageQt("Tests/images/hopper.gif")
|
ImageQt.ImageQt("Tests/images/hopper.gif")
|
||||||
|
|
|
@ -264,4 +264,6 @@ def test_no_resource_warning_for_numpy_array() -> None:
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
# Act/Assert
|
# Act/Assert
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
array(im)
|
array(im)
|
||||||
|
|
|
@ -74,6 +74,17 @@ def test_pickle_image(
|
||||||
helper_pickle_file(tmp_path, protocol, test_file, test_mode)
|
helper_pickle_file(tmp_path, protocol, test_file, test_mode)
|
||||||
|
|
||||||
|
|
||||||
|
def test_pickle_jpeg() -> None:
|
||||||
|
# Arrange
|
||||||
|
with Image.open("Tests/images/hopper.jpg") as image:
|
||||||
|
# Act: roundtrip
|
||||||
|
unpickled_image = pickle.loads(pickle.dumps(image))
|
||||||
|
|
||||||
|
# Assert
|
||||||
|
assert len(unpickled_image.layer) == 3
|
||||||
|
assert unpickled_image.layers == 3
|
||||||
|
|
||||||
|
|
||||||
def test_pickle_la_mode_with_palette(tmp_path: Path) -> None:
|
def test_pickle_la_mode_with_palette(tmp_path: Path) -> None:
|
||||||
# Arrange
|
# Arrange
|
||||||
filename = str(tmp_path / "temp.pkl")
|
filename = str(tmp_path / "temp.pkl")
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Documentation: https://docs.codecov.com/docs/codecov-yaml
|
# Documentation: https://docs.codecov.com/docs/codecov-yaml
|
||||||
|
|
||||||
codecov:
|
codecov:
|
||||||
# Avoid "Missing base report" due to committing CHANGES.rst with "[CI skip]"
|
# Avoid "Missing base report" due to committing with "[CI skip]"
|
||||||
# https://github.com/codecov/support/issues/363
|
# https://github.com/codecov/support/issues/363
|
||||||
# https://docs.codecov.com/docs/comparing-commits
|
# https://docs.codecov.com/docs/comparing-commits
|
||||||
allow_coverage_offsets: true
|
allow_coverage_offsets: true
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# install openjpeg
|
# install openjpeg
|
||||||
|
|
||||||
archive=openjpeg-2.5.2
|
archive=openjpeg-2.5.3
|
||||||
|
|
||||||
./download-and-extract.sh $archive https://raw.githubusercontent.com/python-pillow/pillow-depends/main/$archive.tar.gz
|
./download-and-extract.sh $archive https://raw.githubusercontent.com/python-pillow/pillow-depends/main/$archive.tar.gz
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# install webp
|
# install webp
|
||||||
|
|
||||||
archive=libwebp-1.4.0
|
archive=libwebp-1.5.0
|
||||||
|
|
||||||
./download-and-extract.sh $archive https://raw.githubusercontent.com/python-pillow/pillow-depends/main/$archive.tar.gz
|
./download-and-extract.sh $archive https://raw.githubusercontent.com/python-pillow/pillow-depends/main/$archive.tar.gz
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ The Python Imaging Library (PIL) is
|
||||||
|
|
||||||
Pillow is the friendly PIL fork. It is
|
Pillow is the friendly PIL fork. It is
|
||||||
|
|
||||||
Copyright © 2010-2024 by Jeffrey A. Clark and contributors
|
Copyright © 2010 by Jeffrey A. Clark and contributors
|
||||||
|
|
||||||
Like PIL, Pillow is licensed under the open source PIL
|
Like PIL, Pillow is licensed under the open source PIL
|
||||||
Software License:
|
Software License:
|
||||||
|
|
|
@ -18,7 +18,7 @@ The fork author's goal is to foster and support active development of PIL throug
|
||||||
License
|
License
|
||||||
-------
|
-------
|
||||||
|
|
||||||
Like PIL, Pillow is `licensed under the open source HPND License <https://raw.githubusercontent.com/python-pillow/Pillow/main/LICENSE>`_
|
Like PIL, Pillow is `licensed under the open source MIT-CMU License <https://raw.githubusercontent.com/python-pillow/Pillow/main/LICENSE>`_
|
||||||
|
|
||||||
Why a fork?
|
Why a fork?
|
||||||
-----------
|
-----------
|
||||||
|
|
|
@ -55,7 +55,7 @@ master_doc = "index"
|
||||||
project = "Pillow (PIL Fork)"
|
project = "Pillow (PIL Fork)"
|
||||||
copyright = (
|
copyright = (
|
||||||
"1995-2011 Fredrik Lundh and contributors, "
|
"1995-2011 Fredrik Lundh and contributors, "
|
||||||
"2010-2024 Jeffrey A. Clark and contributors."
|
"2010 Jeffrey A. Clark and contributors."
|
||||||
)
|
)
|
||||||
author = "Fredrik Lundh (PIL), Jeffrey A. Clark (Pillow)"
|
author = "Fredrik Lundh (PIL), Jeffrey A. Clark (Pillow)"
|
||||||
|
|
||||||
|
|
|
@ -692,6 +692,30 @@ The :py:meth:`~PIL.Image.Image.save` method supports the following options:
|
||||||
you fail to do this, you will get errors about not being able to load the
|
you fail to do this, you will get errors about not being able to load the
|
||||||
``_imaging`` DLL).
|
``_imaging`` DLL).
|
||||||
|
|
||||||
|
MPO
|
||||||
|
^^^
|
||||||
|
|
||||||
|
Pillow reads and writes Multi Picture Object (MPO) files. When first opened, it loads
|
||||||
|
the primary image. The :py:meth:`~PIL.Image.Image.seek` and
|
||||||
|
:py:meth:`~PIL.Image.Image.tell` methods may be used to read other pictures from the
|
||||||
|
file. The pictures are zero-indexed and random access is supported.
|
||||||
|
|
||||||
|
.. _mpo-saving:
|
||||||
|
|
||||||
|
Saving
|
||||||
|
~~~~~~
|
||||||
|
|
||||||
|
When calling :py:meth:`~PIL.Image.Image.save` to write an MPO file, by default
|
||||||
|
only the first frame of a multiframe image will be saved. If the ``save_all``
|
||||||
|
argument is present and true, then all frames will be saved, and the following
|
||||||
|
option will also be available.
|
||||||
|
|
||||||
|
**append_images**
|
||||||
|
A list of images to append as additional pictures. Each of the
|
||||||
|
images in the list can be single or multiframe images.
|
||||||
|
|
||||||
|
.. versionadded:: 9.3.0
|
||||||
|
|
||||||
MSP
|
MSP
|
||||||
^^^
|
^^^
|
||||||
|
|
||||||
|
@ -1435,30 +1459,6 @@ Note that there may be an embedded gamma of 2.2 in MIC files.
|
||||||
|
|
||||||
To enable MIC support, you must install :pypi:`olefile`.
|
To enable MIC support, you must install :pypi:`olefile`.
|
||||||
|
|
||||||
MPO
|
|
||||||
^^^
|
|
||||||
|
|
||||||
Pillow identifies and reads Multi Picture Object (MPO) files, loading the primary
|
|
||||||
image when first opened. The :py:meth:`~PIL.Image.Image.seek` and :py:meth:`~PIL.Image.Image.tell`
|
|
||||||
methods may be used to read other pictures from the file. The pictures are
|
|
||||||
zero-indexed and random access is supported.
|
|
||||||
|
|
||||||
.. _mpo-saving:
|
|
||||||
|
|
||||||
Saving
|
|
||||||
~~~~~~
|
|
||||||
|
|
||||||
When calling :py:meth:`~PIL.Image.Image.save` to write an MPO file, by default
|
|
||||||
only the first frame of a multiframe image will be saved. If the ``save_all``
|
|
||||||
argument is present and true, then all frames will be saved, and the following
|
|
||||||
option will also be available.
|
|
||||||
|
|
||||||
**append_images**
|
|
||||||
A list of images to append as additional pictures. Each of the
|
|
||||||
images in the list can be single or multiframe images.
|
|
||||||
|
|
||||||
.. versionadded:: 9.3.0
|
|
||||||
|
|
||||||
PCD
|
PCD
|
||||||
^^^
|
^^^
|
||||||
|
|
||||||
|
|
|
@ -678,7 +678,7 @@ Reading from URL
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
url = "https://python-pillow.org/assets/images/pillow-logo.png"
|
url = "https://python-pillow.github.io/assets/images/pillow-logo.png"
|
||||||
img = Image.open(urlopen(url))
|
img = Image.open(urlopen(url))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,7 @@ Many of Pillow's features require external libraries:
|
||||||
* **openjpeg** provides JPEG 2000 functionality.
|
* **openjpeg** provides JPEG 2000 functionality.
|
||||||
|
|
||||||
* Pillow has been tested with openjpeg **2.0.0**, **2.1.0**, **2.3.1**,
|
* Pillow has been tested with openjpeg **2.0.0**, **2.1.0**, **2.3.1**,
|
||||||
**2.4.0**, **2.5.0** and **2.5.2**.
|
**2.4.0**, **2.5.0**, **2.5.2** and **2.5.3**.
|
||||||
* Pillow does **not** support the earlier **1.5** series which ships
|
* Pillow does **not** support the earlier **1.5** series which ships
|
||||||
with Debian Jessie.
|
with Debian Jessie.
|
||||||
|
|
||||||
|
@ -148,13 +148,7 @@ Many of Pillow's features require external libraries:
|
||||||
The easiest way to install external libraries is via `Homebrew
|
The easiest way to install external libraries is via `Homebrew
|
||||||
<https://brew.sh/>`_. After you install Homebrew, run::
|
<https://brew.sh/>`_. After you install Homebrew, run::
|
||||||
|
|
||||||
brew install libjpeg libtiff little-cms2 openjpeg webp
|
brew install libjpeg libraqm libtiff little-cms2 openjpeg webp
|
||||||
|
|
||||||
To install libraqm on macOS use Homebrew to install its dependencies::
|
|
||||||
|
|
||||||
brew install freetype harfbuzz fribidi
|
|
||||||
|
|
||||||
Then see ``depends/install_raqm_cmake.sh`` to install libraqm.
|
|
||||||
|
|
||||||
.. tab:: Windows
|
.. tab:: Windows
|
||||||
|
|
||||||
|
@ -195,11 +189,6 @@ Many of Pillow's features require external libraries:
|
||||||
mingw-w64-x86_64-libimagequant \
|
mingw-w64-x86_64-libimagequant \
|
||||||
mingw-w64-x86_64-libraqm
|
mingw-w64-x86_64-libraqm
|
||||||
|
|
||||||
https://www.msys2.org/docs/python/ states that setuptools >= 60 does not work with
|
|
||||||
MSYS2. To workaround this, before installing Pillow you must run::
|
|
||||||
|
|
||||||
export SETUPTOOLS_USE_DISTUTILS=stdlib
|
|
||||||
|
|
||||||
.. tab:: FreeBSD
|
.. tab:: FreeBSD
|
||||||
|
|
||||||
.. Note:: Only FreeBSD 10 and 11 tested
|
.. Note:: Only FreeBSD 10 and 11 tested
|
||||||
|
|
|
@ -29,10 +29,10 @@ These platforms are built and tested for every change.
|
||||||
+----------------------------------+----------------------------+---------------------+
|
+----------------------------------+----------------------------+---------------------+
|
||||||
| Debian 12 Bookworm | 3.11 | x86, x86-64 |
|
| Debian 12 Bookworm | 3.11 | x86, x86-64 |
|
||||||
+----------------------------------+----------------------------+---------------------+
|
+----------------------------------+----------------------------+---------------------+
|
||||||
| Fedora 39 | 3.12 | x86-64 |
|
|
||||||
+----------------------------------+----------------------------+---------------------+
|
|
||||||
| Fedora 40 | 3.12 | x86-64 |
|
| Fedora 40 | 3.12 | x86-64 |
|
||||||
+----------------------------------+----------------------------+---------------------+
|
+----------------------------------+----------------------------+---------------------+
|
||||||
|
| Fedora 41 | 3.13 | x86-64 |
|
||||||
|
+----------------------------------+----------------------------+---------------------+
|
||||||
| Gentoo | 3.12 | x86-64 |
|
| Gentoo | 3.12 | x86-64 |
|
||||||
+----------------------------------+----------------------------+---------------------+
|
+----------------------------------+----------------------------+---------------------+
|
||||||
| macOS 13 Ventura | 3.9 | x86-64 |
|
| macOS 13 Ventura | 3.9 | x86-64 |
|
||||||
|
@ -55,7 +55,7 @@ These platforms are built and tested for every change.
|
||||||
| +----------------------------+---------------------+
|
| +----------------------------+---------------------+
|
||||||
| | 3.13 | x86 |
|
| | 3.13 | x86 |
|
||||||
| +----------------------------+---------------------+
|
| +----------------------------+---------------------+
|
||||||
| | 3.9 (MinGW) | x86-64 |
|
| | 3.12 (MinGW) | x86-64 |
|
||||||
| +----------------------------+---------------------+
|
| +----------------------------+---------------------+
|
||||||
| | 3.9 (Cygwin) | x86-64 |
|
| | 3.9 (Cygwin) | x86-64 |
|
||||||
+----------------------------------+----------------------------+---------------------+
|
+----------------------------------+----------------------------+---------------------+
|
||||||
|
@ -75,7 +75,9 @@ These platforms have been reported to work at the versions mentioned.
|
||||||
| Operating system | | Tested Python | | Latest tested | | Tested |
|
| Operating system | | Tested Python | | Latest tested | | Tested |
|
||||||
| | | versions | | Pillow version | | processors |
|
| | | versions | | Pillow version | | processors |
|
||||||
+==================================+============================+==================+==============+
|
+==================================+============================+==================+==============+
|
||||||
| macOS 15 Sequoia | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm |
|
| macOS 15 Sequoia | 3.9, 3.10, 3.11, 3.12, 3.13| 11.0.0 |arm |
|
||||||
|
| +----------------------------+------------------+ |
|
||||||
|
| | 3.8 | 10.4.0 | |
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
| macOS 14 Sonoma | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm |
|
| macOS 14 Sonoma | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm |
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
|
@ -148,7 +150,7 @@ These platforms have been reported to work at the versions mentioned.
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
| FreeBSD 10.2 | 2.7, 3.4 | 3.1.0 |x86-64 |
|
| FreeBSD 10.2 | 2.7, 3.4 | 3.1.0 |x86-64 |
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
| Windows 11 | 3.9, 3.10, 3.11, 3.12 | 10.2.0 |arm64 |
|
| Windows 11 23H2 | 3.9, 3.10, 3.11, 3.12, 3.13| 11.0.0 |arm64 |
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
| Windows 11 Pro | 3.11, 3.12 | 10.2.0 |x86-64 |
|
| Windows 11 Pro | 3.11, 3.12 | 10.2.0 |x86-64 |
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
|
|
|
@ -19,7 +19,7 @@ Example: Parse an image
|
||||||
|
|
||||||
from PIL import ImageFile
|
from PIL import ImageFile
|
||||||
|
|
||||||
fp = open("hopper.pgm", "rb")
|
fp = open("hopper.ppm", "rb")
|
||||||
|
|
||||||
p = ImageFile.Parser()
|
p = ImageFile.Parser()
|
||||||
|
|
||||||
|
|
|
@ -54,6 +54,7 @@ Feature version numbers are available only where stated.
|
||||||
Support for the following features can be checked:
|
Support for the following features can be checked:
|
||||||
|
|
||||||
* ``libjpeg_turbo``: (compile time) Whether Pillow was compiled against the libjpeg-turbo version of libjpeg. Compile-time version number is available.
|
* ``libjpeg_turbo``: (compile time) Whether Pillow was compiled against the libjpeg-turbo version of libjpeg. Compile-time version number is available.
|
||||||
|
* ``zlib_ng``: (compile time) Whether Pillow was compiled against the zlib-ng version of zlib. Compile-time version number is available.
|
||||||
* ``raqm``: Raqm library, required for ``ImageFont.Layout.RAQM`` in :py:func:`PIL.ImageFont.truetype`. Run-time version number is available for Raqm 0.7.0 or newer.
|
* ``raqm``: Raqm library, required for ``ImageFont.Layout.RAQM`` in :py:func:`PIL.ImageFont.truetype`. Run-time version number is available for Raqm 0.7.0 or newer.
|
||||||
* ``libimagequant``: (compile time) ImageQuant quantization support in :py:func:`PIL.Image.Image.quantize`. Run-time version number is available.
|
* ``libimagequant``: (compile time) ImageQuant quantization support in :py:func:`PIL.Image.Image.quantize`. Run-time version number is available.
|
||||||
* ``xcb``: (compile time) Support for X11 in :py:func:`PIL.ImageGrab.grab` via the XCB library.
|
* ``xcb``: (compile time) Support for X11 in :py:func:`PIL.ImageGrab.grab` via the XCB library.
|
||||||
|
|
|
@ -1,19 +1,6 @@
|
||||||
11.0.0
|
11.0.0
|
||||||
------
|
------
|
||||||
|
|
||||||
Security
|
|
||||||
========
|
|
||||||
|
|
||||||
TODO
|
|
||||||
^^^^
|
|
||||||
|
|
||||||
TODO
|
|
||||||
|
|
||||||
:cve:`YYYY-XXXXX`: TODO
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
TODO
|
|
||||||
|
|
||||||
Backwards Incompatible Changes
|
Backwards Incompatible Changes
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
|
@ -159,7 +146,7 @@ Python 3.13
|
||||||
|
|
||||||
Pillow 10.4.0 had wheels built against Python 3.13 beta, available as a preview to help
|
Pillow 10.4.0 had wheels built against Python 3.13 beta, available as a preview to help
|
||||||
others prepare for 3.13, and to ensure Pillow could be used immediately at the release
|
others prepare for 3.13, and to ensure Pillow could be used immediately at the release
|
||||||
of 3.13.0 final (2024-10-01, :pep:`719`).
|
of 3.13.0 final (2024-10-07, :pep:`719`).
|
||||||
|
|
||||||
Pillow 11.0.0 now officially supports Python 3.13.
|
Pillow 11.0.0 now officially supports Python 3.13.
|
||||||
|
|
||||||
|
|
59
docs/releasenotes/11.1.0.rst
Normal file
59
docs/releasenotes/11.1.0.rst
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
11.1.0
|
||||||
|
------
|
||||||
|
|
||||||
|
Security
|
||||||
|
========
|
||||||
|
|
||||||
|
TODO
|
||||||
|
^^^^
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
:cve:`YYYY-XXXXX`: TODO
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
Backwards Incompatible Changes
|
||||||
|
==============================
|
||||||
|
|
||||||
|
TODO
|
||||||
|
^^^^
|
||||||
|
|
||||||
|
Deprecations
|
||||||
|
============
|
||||||
|
|
||||||
|
TODO
|
||||||
|
^^^^
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
API Changes
|
||||||
|
===========
|
||||||
|
|
||||||
|
TODO
|
||||||
|
^^^^
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
API Additions
|
||||||
|
=============
|
||||||
|
|
||||||
|
Check for zlib-ng
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
You can check if Pillow has been built against the zlib-ng version of the
|
||||||
|
zlib library, and what version of zlib-ng is being used::
|
||||||
|
|
||||||
|
from PIL import features
|
||||||
|
features.check_feature("zlib_ng") # True or False
|
||||||
|
features.version_feature("zlib_ng") # "2.2.2" for example, or None
|
||||||
|
|
||||||
|
Other Changes
|
||||||
|
=============
|
||||||
|
|
||||||
|
zlib-ng in wheels
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Wheels are now built against zlib-ng for improved speed. In tests, saving a PNG
|
||||||
|
was found to be more than twice as fast at higher compression levels.
|
|
@ -14,6 +14,7 @@ expected to be backported to earlier versions.
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|
||||||
|
11.1.0
|
||||||
11.0.0
|
11.0.0
|
||||||
10.4.0
|
10.4.0
|
||||||
10.3.0
|
10.3.0
|
||||||
|
|
|
@ -14,14 +14,14 @@ readme = "README.md"
|
||||||
keywords = [
|
keywords = [
|
||||||
"Imaging",
|
"Imaging",
|
||||||
]
|
]
|
||||||
license = { text = "HPND" }
|
license = { text = "MIT-CMU" }
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "Jeffrey A. Clark", email = "aclark@aclark.net" },
|
{ name = "Jeffrey A. Clark", email = "aclark@aclark.net" },
|
||||||
]
|
]
|
||||||
requires-python = ">=3.9"
|
requires-python = ">=3.9"
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Development Status :: 6 - Mature",
|
"Development Status :: 6 - Mature",
|
||||||
"License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)",
|
"License :: OSI Approved :: CMU License (MIT-CMU)",
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
"Programming Language :: Python :: 3.9",
|
"Programming Language :: Python :: 3.9",
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
|
@ -56,7 +56,7 @@ optional-dependencies.mic = [
|
||||||
]
|
]
|
||||||
optional-dependencies.tests = [
|
optional-dependencies.tests = [
|
||||||
"check-manifest",
|
"check-manifest",
|
||||||
"coverage",
|
"coverage>=7.4.2",
|
||||||
"defusedxml",
|
"defusedxml",
|
||||||
"markdown2",
|
"markdown2",
|
||||||
"olefile",
|
"olefile",
|
||||||
|
@ -65,6 +65,7 @@ optional-dependencies.tests = [
|
||||||
"pytest",
|
"pytest",
|
||||||
"pytest-cov",
|
"pytest-cov",
|
||||||
"pytest-timeout",
|
"pytest-timeout",
|
||||||
|
"trove-classifiers>=2024.10.12",
|
||||||
]
|
]
|
||||||
optional-dependencies.typing = [
|
optional-dependencies.typing = [
|
||||||
"typing-extensions; python_version<'3.10'",
|
"typing-extensions; python_version<'3.10'",
|
||||||
|
@ -72,10 +73,10 @@ optional-dependencies.typing = [
|
||||||
optional-dependencies.xmp = [
|
optional-dependencies.xmp = [
|
||||||
"defusedxml",
|
"defusedxml",
|
||||||
]
|
]
|
||||||
urls.Changelog = "https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst"
|
urls.Changelog = "https://github.com/python-pillow/Pillow/releases"
|
||||||
urls.Documentation = "https://pillow.readthedocs.io"
|
urls.Documentation = "https://pillow.readthedocs.io"
|
||||||
urls.Funding = "https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=pypi"
|
urls.Funding = "https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=pypi"
|
||||||
urls.Homepage = "https://python-pillow.org"
|
urls.Homepage = "https://python-pillow.github.io"
|
||||||
urls.Mastodon = "https://fosstodon.org/@pillow"
|
urls.Mastodon = "https://fosstodon.org/@pillow"
|
||||||
urls."Release notes" = "https://pillow.readthedocs.io/en/stable/releasenotes/index.html"
|
urls."Release notes" = "https://pillow.readthedocs.io/en/stable/releasenotes/index.html"
|
||||||
urls.Source = "https://github.com/python-pillow/Pillow"
|
urls.Source = "https://github.com/python-pillow/Pillow"
|
||||||
|
@ -93,10 +94,18 @@ version = { attr = "PIL.__version__" }
|
||||||
[tool.cibuildwheel]
|
[tool.cibuildwheel]
|
||||||
before-all = ".github/workflows/wheels-dependencies.sh"
|
before-all = ".github/workflows/wheels-dependencies.sh"
|
||||||
build-verbosity = 1
|
build-verbosity = 1
|
||||||
|
|
||||||
config-settings = "raqm=enable raqm=vendor fribidi=vendor imagequant=disable"
|
config-settings = "raqm=enable raqm=vendor fribidi=vendor imagequant=disable"
|
||||||
|
# Disable platform guessing on macOS
|
||||||
|
macos.config-settings = "raqm=enable raqm=vendor fribidi=vendor imagequant=disable platform-guessing=disable"
|
||||||
|
|
||||||
test-command = "cd {project} && .github/workflows/wheels-test.sh"
|
test-command = "cd {project} && .github/workflows/wheels-test.sh"
|
||||||
test-extras = "tests"
|
test-extras = "tests"
|
||||||
|
|
||||||
|
[tool.cibuildwheel.macos.environment]
|
||||||
|
PATH = "$(pwd)/build/deps/darwin/bin:$(dirname $(which python3)):/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin"
|
||||||
|
DYLD_LIBRARY_PATH = "$(pwd)/build/deps/darwin/lib"
|
||||||
|
|
||||||
[tool.black]
|
[tool.black]
|
||||||
exclude = "wheels/multibuild"
|
exclude = "wheels/multibuild"
|
||||||
|
|
||||||
|
|
28
setup.py
28
setup.py
|
@ -344,7 +344,7 @@ class pil_build_ext(build_ext):
|
||||||
for x in ("raqm", "fribidi")
|
for x in ("raqm", "fribidi")
|
||||||
]
|
]
|
||||||
+ [
|
+ [
|
||||||
("disable-platform-guessing", None, "Disable platform guessing on Linux"),
|
("disable-platform-guessing", None, "Disable platform guessing"),
|
||||||
("debug", None, "Debug logging"),
|
("debug", None, "Debug logging"),
|
||||||
]
|
]
|
||||||
+ [("add-imaging-libs=", None, "Add libs to _imaging build")]
|
+ [("add-imaging-libs=", None, "Add libs to _imaging build")]
|
||||||
|
@ -389,17 +389,18 @@ class pil_build_ext(build_ext):
|
||||||
pass
|
pass
|
||||||
for x in self.feature:
|
for x in self.feature:
|
||||||
if getattr(self, f"disable_{x}"):
|
if getattr(self, f"disable_{x}"):
|
||||||
setattr(self.feature, x, False)
|
self.feature.set(x, False)
|
||||||
self.feature.required.discard(x)
|
self.feature.required.discard(x)
|
||||||
_dbg("Disabling %s", x)
|
_dbg("Disabling %s", x)
|
||||||
if getattr(self, f"enable_{x}"):
|
if getattr(self, f"enable_{x}"):
|
||||||
msg = f"Conflicting options: --enable-{x} and --disable-{x}"
|
msg = f"Conflicting options: '-C {x}=enable' and '-C {x}=disable'"
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
if x == "freetype":
|
if x == "freetype":
|
||||||
_dbg("--disable-freetype implies --disable-raqm")
|
_dbg("'-C freetype=disable' implies '-C raqm=disable'")
|
||||||
if getattr(self, "enable_raqm"):
|
if getattr(self, "enable_raqm"):
|
||||||
msg = (
|
msg = (
|
||||||
"Conflicting options: --enable-raqm and --disable-freetype"
|
"Conflicting options: "
|
||||||
|
"'-C raqm=enable' and '-C freetype=disable'"
|
||||||
)
|
)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
setattr(self, "disable_raqm", True)
|
setattr(self, "disable_raqm", True)
|
||||||
|
@ -407,15 +408,17 @@ class pil_build_ext(build_ext):
|
||||||
_dbg("Requiring %s", x)
|
_dbg("Requiring %s", x)
|
||||||
self.feature.required.add(x)
|
self.feature.required.add(x)
|
||||||
if x == "raqm":
|
if x == "raqm":
|
||||||
_dbg("--enable-raqm implies --enable-freetype")
|
_dbg("'-C raqm=enable' implies '-C freetype=enable'")
|
||||||
self.feature.required.add("freetype")
|
self.feature.required.add("freetype")
|
||||||
for x in ("raqm", "fribidi"):
|
for x in ("raqm", "fribidi"):
|
||||||
if getattr(self, f"vendor_{x}"):
|
if getattr(self, f"vendor_{x}"):
|
||||||
if getattr(self, "disable_raqm"):
|
if getattr(self, "disable_raqm"):
|
||||||
msg = f"Conflicting options: --vendor-{x} and --disable-raqm"
|
msg = f"Conflicting options: '-C {x}=vendor' and '-C raqm=disable'"
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
if x == "fribidi" and not getattr(self, "vendor_raqm"):
|
if x == "fribidi" and not getattr(self, "vendor_raqm"):
|
||||||
msg = f"Conflicting options: --vendor-{x} and not --vendor-raqm"
|
msg = (
|
||||||
|
f"Conflicting options: '-C {x}=vendor' and not '-C raqm=vendor'"
|
||||||
|
)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
_dbg("Using vendored version of %s", x)
|
_dbg("Using vendored version of %s", x)
|
||||||
self.feature.vendor.add(x)
|
self.feature.vendor.add(x)
|
||||||
|
@ -448,7 +451,7 @@ class pil_build_ext(build_ext):
|
||||||
def get_macos_sdk_path(self) -> str | None:
|
def get_macos_sdk_path(self) -> str | None:
|
||||||
try:
|
try:
|
||||||
sdk_path = (
|
sdk_path = (
|
||||||
subprocess.check_output(["xcrun", "--show-sdk-path"])
|
subprocess.check_output(["xcrun", "--show-sdk-path", "--sdk", "macosx"])
|
||||||
.strip()
|
.strip()
|
||||||
.decode("latin1")
|
.decode("latin1")
|
||||||
)
|
)
|
||||||
|
@ -606,6 +609,7 @@ class pil_build_ext(build_ext):
|
||||||
_add_directory(library_dirs, "/usr/X11/lib")
|
_add_directory(library_dirs, "/usr/X11/lib")
|
||||||
_add_directory(include_dirs, "/usr/X11/include")
|
_add_directory(include_dirs, "/usr/X11/include")
|
||||||
|
|
||||||
|
# Add the macOS SDK path.
|
||||||
sdk_path = self.get_macos_sdk_path()
|
sdk_path = self.get_macos_sdk_path()
|
||||||
if sdk_path:
|
if sdk_path:
|
||||||
_add_directory(library_dirs, os.path.join(sdk_path, "usr", "lib"))
|
_add_directory(library_dirs, os.path.join(sdk_path, "usr", "lib"))
|
||||||
|
@ -690,6 +694,8 @@ class pil_build_ext(build_ext):
|
||||||
feature.set("zlib", "z")
|
feature.set("zlib", "z")
|
||||||
elif sys.platform == "win32" and _find_library_file(self, "zlib"):
|
elif sys.platform == "win32" and _find_library_file(self, "zlib"):
|
||||||
feature.set("zlib", "zlib") # alternative name
|
feature.set("zlib", "zlib") # alternative name
|
||||||
|
elif sys.platform == "win32" and _find_library_file(self, "zdll"):
|
||||||
|
feature.set("zlib", "zdll") # dll import library
|
||||||
|
|
||||||
if feature.want("jpeg"):
|
if feature.want("jpeg"):
|
||||||
_dbg("Looking for jpeg")
|
_dbg("Looking for jpeg")
|
||||||
|
@ -1001,7 +1007,7 @@ def debug_build() -> bool:
|
||||||
return hasattr(sys, "gettotalrefcount") or FUZZING_BUILD
|
return hasattr(sys, "gettotalrefcount") or FUZZING_BUILD
|
||||||
|
|
||||||
|
|
||||||
files = ["src/_imaging.c"]
|
files: list[str | os.PathLike[str]] = ["src/_imaging.c"]
|
||||||
for src_file in _IMAGING:
|
for src_file in _IMAGING:
|
||||||
files.append("src/" + src_file + ".c")
|
files.append("src/" + src_file + ".c")
|
||||||
for src_file in _LIB_IMAGING:
|
for src_file in _LIB_IMAGING:
|
||||||
|
@ -1044,7 +1050,7 @@ except DependencyException as err:
|
||||||
msg = f"""
|
msg = f"""
|
||||||
|
|
||||||
The headers or library files could not be found for {str(err)},
|
The headers or library files could not be found for {str(err)},
|
||||||
which was requested by the option flag --enable-{str(err)}
|
which was requested by the option flag '-C {str(err)}=enable'
|
||||||
|
|
||||||
"""
|
"""
|
||||||
sys.stderr.write(msg)
|
sys.stderr.write(msg)
|
||||||
|
|
|
@ -273,7 +273,7 @@ class BlpImageFile(ImageFile.ImageFile):
|
||||||
raise BLPFormatError(msg)
|
raise BLPFormatError(msg)
|
||||||
|
|
||||||
self._mode = "RGBA" if self._blp_alpha_depth else "RGB"
|
self._mode = "RGBA" if self._blp_alpha_depth else "RGB"
|
||||||
self.tile = [ImageFile._Tile(decoder, (0, 0) + self.size, 0, (self.mode, 0, 1))]
|
self.tile = [ImageFile._Tile(decoder, (0, 0) + self.size, 0, self.mode)]
|
||||||
|
|
||||||
|
|
||||||
class _BLPBaseDecoder(ImageFile.PyDecoder):
|
class _BLPBaseDecoder(ImageFile.PyDecoder):
|
||||||
|
|
|
@ -560,9 +560,7 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||||
+ struct.pack("<4I", *rgba_mask) # dwRGBABitMask
|
+ struct.pack("<4I", *rgba_mask) # dwRGBABitMask
|
||||||
+ struct.pack("<5I", DDSCAPS.TEXTURE, 0, 0, 0, 0)
|
+ struct.pack("<5I", DDSCAPS.TEXTURE, 0, 0, 0, 0)
|
||||||
)
|
)
|
||||||
ImageFile._save(
|
ImageFile._save(im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 0, rawmode)])
|
||||||
im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix: bytes) -> bool:
|
def _accept(prefix: bytes) -> bool:
|
||||||
|
|
|
@ -454,7 +454,7 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes, eps: int = 1) -
|
||||||
if hasattr(fp, "flush"):
|
if hasattr(fp, "flush"):
|
||||||
fp.flush()
|
fp.flush()
|
||||||
|
|
||||||
ImageFile._save(im, fp, [ImageFile._Tile("eps", (0, 0) + im.size, 0, None)])
|
ImageFile._save(im, fp, [ImageFile._Tile("eps", (0, 0) + im.size)])
|
||||||
|
|
||||||
fp.write(b"\n%%%%EndBinary\n")
|
fp.write(b"\n%%%%EndBinary\n")
|
||||||
fp.write(b"grestore end\n")
|
fp.write(b"grestore end\n")
|
||||||
|
|
|
@ -303,38 +303,38 @@ TAGS = {
|
||||||
|
|
||||||
|
|
||||||
class GPS(IntEnum):
|
class GPS(IntEnum):
|
||||||
GPSVersionID = 0
|
GPSVersionID = 0x00
|
||||||
GPSLatitudeRef = 1
|
GPSLatitudeRef = 0x01
|
||||||
GPSLatitude = 2
|
GPSLatitude = 0x02
|
||||||
GPSLongitudeRef = 3
|
GPSLongitudeRef = 0x03
|
||||||
GPSLongitude = 4
|
GPSLongitude = 0x04
|
||||||
GPSAltitudeRef = 5
|
GPSAltitudeRef = 0x05
|
||||||
GPSAltitude = 6
|
GPSAltitude = 0x06
|
||||||
GPSTimeStamp = 7
|
GPSTimeStamp = 0x07
|
||||||
GPSSatellites = 8
|
GPSSatellites = 0x08
|
||||||
GPSStatus = 9
|
GPSStatus = 0x09
|
||||||
GPSMeasureMode = 10
|
GPSMeasureMode = 0x0A
|
||||||
GPSDOP = 11
|
GPSDOP = 0x0B
|
||||||
GPSSpeedRef = 12
|
GPSSpeedRef = 0x0C
|
||||||
GPSSpeed = 13
|
GPSSpeed = 0x0D
|
||||||
GPSTrackRef = 14
|
GPSTrackRef = 0x0E
|
||||||
GPSTrack = 15
|
GPSTrack = 0x0F
|
||||||
GPSImgDirectionRef = 16
|
GPSImgDirectionRef = 0x10
|
||||||
GPSImgDirection = 17
|
GPSImgDirection = 0x11
|
||||||
GPSMapDatum = 18
|
GPSMapDatum = 0x12
|
||||||
GPSDestLatitudeRef = 19
|
GPSDestLatitudeRef = 0x13
|
||||||
GPSDestLatitude = 20
|
GPSDestLatitude = 0x14
|
||||||
GPSDestLongitudeRef = 21
|
GPSDestLongitudeRef = 0x15
|
||||||
GPSDestLongitude = 22
|
GPSDestLongitude = 0x16
|
||||||
GPSDestBearingRef = 23
|
GPSDestBearingRef = 0x17
|
||||||
GPSDestBearing = 24
|
GPSDestBearing = 0x18
|
||||||
GPSDestDistanceRef = 25
|
GPSDestDistanceRef = 0x19
|
||||||
GPSDestDistance = 26
|
GPSDestDistance = 0x1A
|
||||||
GPSProcessingMethod = 27
|
GPSProcessingMethod = 0x1B
|
||||||
GPSAreaInformation = 28
|
GPSAreaInformation = 0x1C
|
||||||
GPSDateStamp = 29
|
GPSDateStamp = 0x1D
|
||||||
GPSDifferential = 30
|
GPSDifferential = 0x1E
|
||||||
GPSHPositioningError = 31
|
GPSHPositioningError = 0x1F
|
||||||
|
|
||||||
|
|
||||||
"""Maps EXIF GPS tags to tag names."""
|
"""Maps EXIF GPS tags to tag names."""
|
||||||
|
@ -342,40 +342,40 @@ GPSTAGS = {i.value: i.name for i in GPS}
|
||||||
|
|
||||||
|
|
||||||
class Interop(IntEnum):
|
class Interop(IntEnum):
|
||||||
InteropIndex = 1
|
InteropIndex = 0x0001
|
||||||
InteropVersion = 2
|
InteropVersion = 0x0002
|
||||||
RelatedImageFileFormat = 4096
|
RelatedImageFileFormat = 0x1000
|
||||||
RelatedImageWidth = 4097
|
RelatedImageWidth = 0x1001
|
||||||
RelatedImageHeight = 4098
|
RelatedImageHeight = 0x1002
|
||||||
|
|
||||||
|
|
||||||
class IFD(IntEnum):
|
class IFD(IntEnum):
|
||||||
Exif = 34665
|
Exif = 0x8769
|
||||||
GPSInfo = 34853
|
GPSInfo = 0x8825
|
||||||
Makernote = 37500
|
MakerNote = 0x927C
|
||||||
Interop = 40965
|
Interop = 0xA005
|
||||||
IFD1 = -1
|
IFD1 = -1
|
||||||
|
|
||||||
|
|
||||||
class LightSource(IntEnum):
|
class LightSource(IntEnum):
|
||||||
Unknown = 0
|
Unknown = 0x00
|
||||||
Daylight = 1
|
Daylight = 0x01
|
||||||
Fluorescent = 2
|
Fluorescent = 0x02
|
||||||
Tungsten = 3
|
Tungsten = 0x03
|
||||||
Flash = 4
|
Flash = 0x04
|
||||||
Fine = 9
|
Fine = 0x09
|
||||||
Cloudy = 10
|
Cloudy = 0x0A
|
||||||
Shade = 11
|
Shade = 0x0B
|
||||||
DaylightFluorescent = 12
|
DaylightFluorescent = 0x0C
|
||||||
DayWhiteFluorescent = 13
|
DayWhiteFluorescent = 0x0D
|
||||||
CoolWhiteFluorescent = 14
|
CoolWhiteFluorescent = 0x0E
|
||||||
WhiteFluorescent = 15
|
WhiteFluorescent = 0x0F
|
||||||
StandardLightA = 17
|
StandardLightA = 0x11
|
||||||
StandardLightB = 18
|
StandardLightB = 0x12
|
||||||
StandardLightC = 19
|
StandardLightC = 0x13
|
||||||
D55 = 20
|
D55 = 0x14
|
||||||
D65 = 21
|
D65 = 0x15
|
||||||
D75 = 22
|
D75 = 0x16
|
||||||
D50 = 23
|
D50 = 0x17
|
||||||
ISO = 24
|
ISO = 0x18
|
||||||
Other = 255
|
Other = 0xFF
|
||||||
|
|
|
@ -159,7 +159,7 @@ class FliImageFile(ImageFile.ImageFile):
|
||||||
framesize = i32(s)
|
framesize = i32(s)
|
||||||
|
|
||||||
self.decodermaxblock = framesize
|
self.decodermaxblock = framesize
|
||||||
self.tile = [ImageFile._Tile("fli", (0, 0) + self.size, self.__offset, None)]
|
self.tile = [ImageFile._Tile("fli", (0, 0) + self.size, self.__offset)]
|
||||||
|
|
||||||
self.__offset += framesize
|
self.__offset += framesize
|
||||||
|
|
||||||
|
|
|
@ -170,7 +170,7 @@ class FpxImageFile(ImageFile.ImageFile):
|
||||||
"raw",
|
"raw",
|
||||||
(x, y, x1, y1),
|
(x, y, x1, y1),
|
||||||
i32(s, i) + 28,
|
i32(s, i) + 28,
|
||||||
(self.rawmode,),
|
self.rawmode,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -95,7 +95,7 @@ class FtexImageFile(ImageFile.ImageFile):
|
||||||
self._mode = "RGBA"
|
self._mode = "RGBA"
|
||||||
self.tile = [ImageFile._Tile("bcn", (0, 0) + self.size, 0, (1,))]
|
self.tile = [ImageFile._Tile("bcn", (0, 0) + self.size, 0, (1,))]
|
||||||
elif format == Format.UNCOMPRESSED:
|
elif format == Format.UNCOMPRESSED:
|
||||||
self.tile = [ImageFile._Tile("raw", (0, 0) + self.size, 0, ("RGB", 0, 1))]
|
self.tile = [ImageFile._Tile("raw", (0, 0) + self.size, 0, "RGB")]
|
||||||
else:
|
else:
|
||||||
msg = f"Invalid texture compression format: {repr(format)}"
|
msg = f"Invalid texture compression format: {repr(format)}"
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
|
|
|
@ -76,7 +76,7 @@ class GdImageFile(ImageFile.ImageFile):
|
||||||
"raw",
|
"raw",
|
||||||
(0, 0) + self.size,
|
(0, 0) + self.size,
|
||||||
7 + true_color_offset + 4 + 256 * 4,
|
7 + true_color_offset + 4 + 256 * 4,
|
||||||
("L", 0, 1),
|
"L",
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -102,7 +102,6 @@ class GifImageFile(ImageFile.ImageFile):
|
||||||
|
|
||||||
self.info["version"] = s[:6]
|
self.info["version"] = s[:6]
|
||||||
self._size = i16(s, 6), i16(s, 8)
|
self._size = i16(s, 6), i16(s, 8)
|
||||||
self.tile = []
|
|
||||||
flags = s[10]
|
flags = s[10]
|
||||||
bits = (flags & 7) + 1
|
bits = (flags & 7) + 1
|
||||||
|
|
||||||
|
@ -695,7 +694,9 @@ def _write_multiple_frames(
|
||||||
if encoderinfo.get("duration"):
|
if encoderinfo.get("duration"):
|
||||||
im_frames[-1].encoderinfo["duration"] += encoderinfo["duration"]
|
im_frames[-1].encoderinfo["duration"] += encoderinfo["duration"]
|
||||||
if progress:
|
if progress:
|
||||||
im._save_all_progress(imSequence, i, frame_count, total)
|
im._save_all_progress(
|
||||||
|
progress, imSequence, i, frame_count, total
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
if im_frames[-1].encoderinfo.get("disposal") == 2:
|
if im_frames[-1].encoderinfo.get("disposal") == 2:
|
||||||
if background_im is None:
|
if background_im is None:
|
||||||
|
@ -704,8 +705,9 @@ def _write_multiple_frames(
|
||||||
)
|
)
|
||||||
background = _get_background(im_frame, color)
|
background = _get_background(im_frame, color)
|
||||||
background_im = Image.new("P", im_frame.size, background)
|
background_im = Image.new("P", im_frame.size, background)
|
||||||
assert im_frames[0].im.palette is not None
|
first_palette = im_frames[0].im.palette
|
||||||
background_im.putpalette(im_frames[0].im.palette)
|
assert first_palette is not None
|
||||||
|
background_im.putpalette(first_palette, first_palette.mode)
|
||||||
bbox = _getbbox(background_im, im_frame)[1]
|
bbox = _getbbox(background_im, im_frame)[1]
|
||||||
elif encoderinfo.get("optimize") and im_frame.mode != "1":
|
elif encoderinfo.get("optimize") and im_frame.mode != "1":
|
||||||
if "transparency" not in encoderinfo:
|
if "transparency" not in encoderinfo:
|
||||||
|
@ -754,7 +756,7 @@ def _write_multiple_frames(
|
||||||
previous_im = im_frame
|
previous_im = im_frame
|
||||||
im_frames.append(_Frame(diff_frame or im_frame, bbox, encoderinfo))
|
im_frames.append(_Frame(diff_frame or im_frame, bbox, encoderinfo))
|
||||||
if progress:
|
if progress:
|
||||||
im._save_all_progress(imSequence, i, frame_count, total)
|
im._save_all_progress(progress, imSequence, i, frame_count, total)
|
||||||
|
|
||||||
if len(im_frames) == 1:
|
if len(im_frames) == 1:
|
||||||
if "duration" in im.encoderinfo:
|
if "duration" in im.encoderinfo:
|
||||||
|
|
|
@ -357,7 +357,7 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||||
name = "".join([name[: 92 - len(ext)], ext])
|
name = "".join([name[: 92 - len(ext)], ext])
|
||||||
|
|
||||||
fp.write(f"Name: {name}\r\n".encode("ascii"))
|
fp.write(f"Name: {name}\r\n".encode("ascii"))
|
||||||
fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode("ascii"))
|
fp.write(f"Image size (x*y): {im.size[0]}*{im.size[1]}\r\n".encode("ascii"))
|
||||||
fp.write(f"File size (no of images): {frames}\r\n".encode("ascii"))
|
fp.write(f"File size (no of images): {frames}\r\n".encode("ascii"))
|
||||||
if im.mode in ["P", "PA"]:
|
if im.mode in ["P", "PA"]:
|
||||||
fp.write(b"Lut: 1\r\n")
|
fp.write(b"Lut: 1\r\n")
|
||||||
|
|
|
@ -692,13 +692,10 @@ class Image:
|
||||||
)
|
)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % (
|
return (
|
||||||
self.__class__.__module__,
|
f"<{self.__class__.__module__}.{self.__class__.__name__} "
|
||||||
self.__class__.__name__,
|
f"image mode={self.mode} size={self.size[0]}x{self.size[1]} "
|
||||||
self.mode,
|
f"at 0x{id(self):X}>"
|
||||||
self.size[0],
|
|
||||||
self.size[1],
|
|
||||||
id(self),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _repr_pretty_(self, p: PrettyPrinter, cycle: bool) -> None:
|
def _repr_pretty_(self, p: PrettyPrinter, cycle: bool) -> None:
|
||||||
|
@ -707,14 +704,8 @@ class Image:
|
||||||
# Same as __repr__ but without unpredictable id(self),
|
# Same as __repr__ but without unpredictable id(self),
|
||||||
# to keep Jupyter notebook `text/plain` output stable.
|
# to keep Jupyter notebook `text/plain` output stable.
|
||||||
p.text(
|
p.text(
|
||||||
"<%s.%s image mode=%s size=%dx%d>"
|
f"<{self.__class__.__module__}.{self.__class__.__name__} "
|
||||||
% (
|
f"image mode={self.mode} size={self.size[0]}x{self.size[1]}>"
|
||||||
self.__class__.__module__,
|
|
||||||
self.__class__.__name__,
|
|
||||||
self.mode,
|
|
||||||
self.size[0],
|
|
||||||
self.size[1],
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _repr_image(self, image_format: str, **kwargs: Any) -> bytes | None:
|
def _repr_image(self, image_format: str, **kwargs: Any) -> bytes | None:
|
||||||
|
@ -763,7 +754,7 @@ class Image:
|
||||||
|
|
||||||
def __setstate__(self, state: list[Any]) -> None:
|
def __setstate__(self, state: list[Any]) -> None:
|
||||||
Image.__init__(self)
|
Image.__init__(self)
|
||||||
info, mode, size, palette, data = state
|
info, mode, size, palette, data = state[:5]
|
||||||
self.info = info
|
self.info = info
|
||||||
self._mode = mode
|
self._mode = mode
|
||||||
self._size = size
|
self._size = size
|
||||||
|
@ -1574,7 +1565,7 @@ class Image:
|
||||||
for subifd_offset in subifd_offsets:
|
for subifd_offset in subifd_offsets:
|
||||||
ifds.append((exif._get_ifd_dict(subifd_offset), subifd_offset))
|
ifds.append((exif._get_ifd_dict(subifd_offset), subifd_offset))
|
||||||
ifd1 = exif.get_ifd(ExifTags.IFD.IFD1)
|
ifd1 = exif.get_ifd(ExifTags.IFD.IFD1)
|
||||||
if ifd1 and ifd1.get(513):
|
if ifd1 and ifd1.get(ExifTags.Base.JpegIFOffset):
|
||||||
assert exif._info is not None
|
assert exif._info is not None
|
||||||
ifds.append((ifd1, exif._info.next))
|
ifds.append((ifd1, exif._info.next))
|
||||||
|
|
||||||
|
@ -1586,11 +1577,11 @@ class Image:
|
||||||
|
|
||||||
fp = self.fp
|
fp = self.fp
|
||||||
if ifd is not None:
|
if ifd is not None:
|
||||||
thumbnail_offset = ifd.get(513)
|
thumbnail_offset = ifd.get(ExifTags.Base.JpegIFOffset)
|
||||||
if thumbnail_offset is not None:
|
if thumbnail_offset is not None:
|
||||||
thumbnail_offset += getattr(self, "_exif_offset", 0)
|
thumbnail_offset += getattr(self, "_exif_offset", 0)
|
||||||
self.fp.seek(thumbnail_offset)
|
self.fp.seek(thumbnail_offset)
|
||||||
data = self.fp.read(ifd.get(514))
|
data = self.fp.read(ifd.get(ExifTags.Base.JpegIFByteCount))
|
||||||
fp = io.BytesIO(data)
|
fp = io.BytesIO(data)
|
||||||
|
|
||||||
with open(fp) as im:
|
with open(fp) as im:
|
||||||
|
@ -2550,7 +2541,7 @@ class Image:
|
||||||
filename: str | bytes = ""
|
filename: str | bytes = ""
|
||||||
open_fp = False
|
open_fp = False
|
||||||
if is_path(fp):
|
if is_path(fp):
|
||||||
filename = os.path.realpath(os.fspath(fp))
|
filename = os.fspath(fp)
|
||||||
open_fp = True
|
open_fp = True
|
||||||
elif fp == sys.stdout:
|
elif fp == sys.stdout:
|
||||||
try:
|
try:
|
||||||
|
@ -2559,13 +2550,13 @@ class Image:
|
||||||
pass
|
pass
|
||||||
if not filename and hasattr(fp, "name") and is_path(fp.name):
|
if not filename and hasattr(fp, "name") and is_path(fp.name):
|
||||||
# only set the name for metadata purposes
|
# only set the name for metadata purposes
|
||||||
filename = os.path.realpath(os.fspath(fp.name))
|
filename = os.fspath(fp.name)
|
||||||
|
|
||||||
# may mutate self!
|
# may mutate self!
|
||||||
self._ensure_mutable()
|
self._ensure_mutable()
|
||||||
|
|
||||||
save_all = params.pop("save_all", False)
|
save_all = params.pop("save_all", False)
|
||||||
self.encoderinfo = params
|
self.encoderinfo = {**getattr(self, "encoderinfo", {}), **params}
|
||||||
self.encoderconfig: tuple[Any, ...] = ()
|
self.encoderconfig: tuple[Any, ...] = ()
|
||||||
|
|
||||||
preinit()
|
preinit()
|
||||||
|
@ -2612,14 +2603,22 @@ class Image:
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
pass
|
pass
|
||||||
raise
|
raise
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
del self.encoderinfo
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
if open_fp:
|
if open_fp:
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
def _save_all_progress(
|
def _save_all_progress(
|
||||||
self, im=None, im_index=0, completed=1, total=1, progress=None
|
self,
|
||||||
):
|
progress,
|
||||||
if not progress:
|
im: Image | None = None,
|
||||||
progress = self.encoderinfo.get("progress")
|
im_index: int = 0,
|
||||||
|
completed: int = 1,
|
||||||
|
total: int = 1,
|
||||||
|
) -> None:
|
||||||
if not progress:
|
if not progress:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -3480,7 +3479,7 @@ def open(
|
||||||
exclusive_fp = False
|
exclusive_fp = False
|
||||||
filename: str | bytes = ""
|
filename: str | bytes = ""
|
||||||
if is_path(fp):
|
if is_path(fp):
|
||||||
filename = os.path.realpath(os.fspath(fp))
|
filename = os.fspath(fp)
|
||||||
|
|
||||||
if filename:
|
if filename:
|
||||||
fp = builtins.open(filename, "rb")
|
fp = builtins.open(filename, "rb")
|
||||||
|
@ -3910,7 +3909,7 @@ class Exif(_ExifBase):
|
||||||
gps_ifd = exif.get_ifd(ExifTags.IFD.GPSInfo)
|
gps_ifd = exif.get_ifd(ExifTags.IFD.GPSInfo)
|
||||||
print(gps_ifd)
|
print(gps_ifd)
|
||||||
|
|
||||||
Other IFDs include ``ExifTags.IFD.Exif``, ``ExifTags.IFD.Makernote``,
|
Other IFDs include ``ExifTags.IFD.Exif``, ``ExifTags.IFD.MakerNote``,
|
||||||
``ExifTags.IFD.Interop`` and ``ExifTags.IFD.IFD1``.
|
``ExifTags.IFD.Interop`` and ``ExifTags.IFD.IFD1``.
|
||||||
|
|
||||||
:py:mod:`~PIL.ExifTags` also has enum classes to provide names for data::
|
:py:mod:`~PIL.ExifTags` also has enum classes to provide names for data::
|
||||||
|
@ -4073,11 +4072,11 @@ class Exif(_ExifBase):
|
||||||
ifd = self._get_ifd_dict(offset, tag)
|
ifd = self._get_ifd_dict(offset, tag)
|
||||||
if ifd is not None:
|
if ifd is not None:
|
||||||
self._ifds[tag] = ifd
|
self._ifds[tag] = ifd
|
||||||
elif tag in [ExifTags.IFD.Interop, ExifTags.IFD.Makernote]:
|
elif tag in [ExifTags.IFD.Interop, ExifTags.IFD.MakerNote]:
|
||||||
if ExifTags.IFD.Exif not in self._ifds:
|
if ExifTags.IFD.Exif not in self._ifds:
|
||||||
self.get_ifd(ExifTags.IFD.Exif)
|
self.get_ifd(ExifTags.IFD.Exif)
|
||||||
tag_data = self._ifds[ExifTags.IFD.Exif][tag]
|
tag_data = self._ifds[ExifTags.IFD.Exif][tag]
|
||||||
if tag == ExifTags.IFD.Makernote:
|
if tag == ExifTags.IFD.MakerNote:
|
||||||
from .TiffImagePlugin import ImageFileDirectory_v2
|
from .TiffImagePlugin import ImageFileDirectory_v2
|
||||||
|
|
||||||
if tag_data[:8] == b"FUJIFILM":
|
if tag_data[:8] == b"FUJIFILM":
|
||||||
|
@ -4164,7 +4163,7 @@ class Exif(_ExifBase):
|
||||||
ifd = {
|
ifd = {
|
||||||
k: v
|
k: v
|
||||||
for (k, v) in ifd.items()
|
for (k, v) in ifd.items()
|
||||||
if k not in (ExifTags.IFD.Interop, ExifTags.IFD.Makernote)
|
if k not in (ExifTags.IFD.Interop, ExifTags.IFD.MakerNote)
|
||||||
}
|
}
|
||||||
return ifd
|
return ifd
|
||||||
|
|
||||||
|
|
|
@ -98,8 +98,8 @@ def _tilesort(t: _Tile) -> int:
|
||||||
class _Tile(NamedTuple):
|
class _Tile(NamedTuple):
|
||||||
codec_name: str
|
codec_name: str
|
||||||
extents: tuple[int, int, int, int] | None
|
extents: tuple[int, int, int, int] | None
|
||||||
offset: int
|
offset: int = 0
|
||||||
args: tuple[Any, ...] | str | None
|
args: tuple[Any, ...] | str | None = None
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -120,7 +120,7 @@ class ImageFile(Image.Image):
|
||||||
self.custom_mimetype: str | None = None
|
self.custom_mimetype: str | None = None
|
||||||
|
|
||||||
self.tile: list[_Tile] = []
|
self.tile: list[_Tile] = []
|
||||||
""" A list of tile descriptors, or ``None`` """
|
""" A list of tile descriptors """
|
||||||
|
|
||||||
self.readonly = 1 # until we know better
|
self.readonly = 1 # until we know better
|
||||||
|
|
||||||
|
@ -130,7 +130,7 @@ class ImageFile(Image.Image):
|
||||||
if is_path(fp):
|
if is_path(fp):
|
||||||
# filename
|
# filename
|
||||||
self.fp = open(fp, "rb")
|
self.fp = open(fp, "rb")
|
||||||
self.filename = os.path.realpath(os.fspath(fp))
|
self.filename = os.fspath(fp)
|
||||||
self._exclusive_fp = True
|
self._exclusive_fp = True
|
||||||
else:
|
else:
|
||||||
# stream
|
# stream
|
||||||
|
|
|
@ -553,7 +553,7 @@ class Color3DLUT(MultibandFilter):
|
||||||
ch_out = channels or ch_in
|
ch_out = channels or ch_in
|
||||||
size_1d, size_2d, size_3d = self.size
|
size_1d, size_2d, size_3d = self.size
|
||||||
|
|
||||||
table = [0] * (size_1d * size_2d * size_3d * ch_out)
|
table: list[float] = [0] * (size_1d * size_2d * size_3d * ch_out)
|
||||||
idx_in = 0
|
idx_in = 0
|
||||||
idx_out = 0
|
idx_out = 0
|
||||||
for b in range(size_3d):
|
for b in range(size_3d):
|
||||||
|
|
|
@ -270,7 +270,7 @@ class FreeTypeFont:
|
||||||
)
|
)
|
||||||
|
|
||||||
if is_path(font):
|
if is_path(font):
|
||||||
font = os.path.realpath(os.fspath(font))
|
font = os.fspath(font)
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
font_bytes_path = font if isinstance(font, bytes) else font.encode()
|
font_bytes_path = font if isinstance(font, bytes) else font.encode()
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -173,10 +173,10 @@ class _Operand:
|
||||||
return self.apply("rshift", self, other)
|
return self.apply("rshift", self, other)
|
||||||
|
|
||||||
# logical
|
# logical
|
||||||
def __eq__(self, other):
|
def __eq__(self, other: _Operand | float) -> _Operand: # type: ignore[override]
|
||||||
return self.apply("eq", self, other)
|
return self.apply("eq", self, other)
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __ne__(self, other: _Operand | float) -> _Operand: # type: ignore[override]
|
||||||
return self.apply("ne", self, other)
|
return self.apply("ne", self, other)
|
||||||
|
|
||||||
def __lt__(self, other: _Operand | float) -> _Operand:
|
def __lt__(self, other: _Operand | float) -> _Operand:
|
||||||
|
|
|
@ -698,10 +698,11 @@ def exif_transpose(image: Image.Image, *, in_place: bool = False) -> Image.Image
|
||||||
8: Image.Transpose.ROTATE_90,
|
8: Image.Transpose.ROTATE_90,
|
||||||
}.get(orientation)
|
}.get(orientation)
|
||||||
if method is not None:
|
if method is not None:
|
||||||
transposed_image = image.transpose(method)
|
|
||||||
if in_place:
|
if in_place:
|
||||||
image.im = transposed_image.im
|
image.im = image.im.transpose(method)
|
||||||
image._size = transposed_image._size
|
image._size = image.im.size
|
||||||
|
else:
|
||||||
|
transposed_image = image.transpose(method)
|
||||||
exif_image = image if in_place else transposed_image
|
exif_image = image if in_place else transposed_image
|
||||||
|
|
||||||
exif = exif_image.getexif()
|
exif = exif_image.getexif()
|
||||||
|
|
|
@ -213,4 +213,7 @@ def toqimage(im: Image.Image | str | QByteArray) -> ImageQt:
|
||||||
|
|
||||||
def toqpixmap(im: Image.Image | str | QByteArray) -> QPixmap:
|
def toqpixmap(im: Image.Image | str | QByteArray) -> QPixmap:
|
||||||
qimage = toqimage(im)
|
qimage = toqimage(im)
|
||||||
return getattr(QPixmap, "fromImage")(qimage)
|
pixmap = getattr(QPixmap, "fromImage")(qimage)
|
||||||
|
if qt_version == "6":
|
||||||
|
pixmap.detach()
|
||||||
|
return pixmap
|
||||||
|
|
|
@ -62,7 +62,7 @@ class ImtImageFile(ImageFile.ImageFile):
|
||||||
"raw",
|
"raw",
|
||||||
(0, 0) + self.size,
|
(0, 0) + self.size,
|
||||||
self.fp.tell() - len(buffer),
|
self.fp.tell() - len(buffer),
|
||||||
(self.mode, 0, 1),
|
self.mode,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -72,7 +72,7 @@ def APP(self: JpegImageFile, marker: int) -> None:
|
||||||
n = i16(self.fp.read(2)) - 2
|
n = i16(self.fp.read(2)) - 2
|
||||||
s = ImageFile._safe_read(self.fp, n)
|
s = ImageFile._safe_read(self.fp, n)
|
||||||
|
|
||||||
app = "APP%d" % (marker & 15)
|
app = f"APP{marker & 15}"
|
||||||
|
|
||||||
self.app[app] = s # compatibility
|
self.app[app] = s # compatibility
|
||||||
self.applist.append((app, s))
|
self.applist.append((app, s))
|
||||||
|
@ -395,6 +395,13 @@ class JpegImageFile(ImageFile.ImageFile):
|
||||||
return getattr(self, "_" + name)
|
return getattr(self, "_" + name)
|
||||||
raise AttributeError(name)
|
raise AttributeError(name)
|
||||||
|
|
||||||
|
def __getstate__(self) -> list[Any]:
|
||||||
|
return super().__getstate__() + [self.layers, self.layer]
|
||||||
|
|
||||||
|
def __setstate__(self, state: list[Any]) -> None:
|
||||||
|
super().__setstate__(state)
|
||||||
|
self.layers, self.layer = state[5:]
|
||||||
|
|
||||||
def load_read(self, read_bytes: int) -> bytes:
|
def load_read(self, read_bytes: int) -> bytes:
|
||||||
"""
|
"""
|
||||||
internal: read more image data
|
internal: read more image data
|
||||||
|
@ -751,7 +758,7 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||||
extra = info.get("extra", b"")
|
extra = info.get("extra", b"")
|
||||||
|
|
||||||
MAX_BYTES_IN_MARKER = 65533
|
MAX_BYTES_IN_MARKER = 65533
|
||||||
xmp = info.get("xmp", im.info.get("xmp"))
|
xmp = info.get("xmp")
|
||||||
if xmp:
|
if xmp:
|
||||||
overhead_len = 29 # b"http://ns.adobe.com/xap/1.0/\x00"
|
overhead_len = 29 # b"http://ns.adobe.com/xap/1.0/\x00"
|
||||||
max_data_bytes_in_marker = MAX_BYTES_IN_MARKER - overhead_len
|
max_data_bytes_in_marker = MAX_BYTES_IN_MARKER - overhead_len
|
||||||
|
|
|
@ -39,15 +39,15 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||||
|
|
||||||
def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||||
append_images = im.encoderinfo.get("append_images", [])
|
append_images = im.encoderinfo.get("append_images", [])
|
||||||
|
progress = im.encoderinfo.get("progress")
|
||||||
if not append_images and not getattr(im, "is_animated", False):
|
if not append_images and not getattr(im, "is_animated", False):
|
||||||
_save(im, fp, filename)
|
_save(im, fp, filename)
|
||||||
im._save_all_progress()
|
im._save_all_progress(progress)
|
||||||
return
|
return
|
||||||
|
|
||||||
mpf_offset = 28
|
mpf_offset = 28
|
||||||
offsets: list[int] = []
|
offsets: list[int] = []
|
||||||
imSequences = [im] + list(append_images)
|
imSequences = [im] + list(append_images)
|
||||||
progress = im.encoderinfo.get("progress")
|
|
||||||
if progress:
|
if progress:
|
||||||
completed = 0
|
completed = 0
|
||||||
total = 0
|
total = 0
|
||||||
|
@ -74,7 +74,7 @@ def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||||
offsets.append(fp.tell() - offsets[-1])
|
offsets.append(fp.tell() - offsets[-1])
|
||||||
if progress:
|
if progress:
|
||||||
completed += 1
|
completed += 1
|
||||||
im._save_all_progress(imSequence, i, completed, total, progress)
|
im._save_all_progress(progress, imSequence, i, completed, total)
|
||||||
|
|
||||||
ifd = TiffImagePlugin.ImageFileDirectory_v2()
|
ifd = TiffImagePlugin.ImageFileDirectory_v2()
|
||||||
ifd[0xB000] = b"0100"
|
ifd[0xB000] = b"0100"
|
||||||
|
|
|
@ -70,9 +70,9 @@ class MspImageFile(ImageFile.ImageFile):
|
||||||
self._size = i16(s, 4), i16(s, 6)
|
self._size = i16(s, 4), i16(s, 6)
|
||||||
|
|
||||||
if s[:4] == b"DanM":
|
if s[:4] == b"DanM":
|
||||||
self.tile = [ImageFile._Tile("raw", (0, 0) + self.size, 32, ("1", 0, 1))]
|
self.tile = [ImageFile._Tile("raw", (0, 0) + self.size, 32, "1")]
|
||||||
else:
|
else:
|
||||||
self.tile = [ImageFile._Tile("MSP", (0, 0) + self.size, 32, None)]
|
self.tile = [ImageFile._Tile("MSP", (0, 0) + self.size, 32)]
|
||||||
|
|
||||||
|
|
||||||
class MspDecoder(ImageFile.PyDecoder):
|
class MspDecoder(ImageFile.PyDecoder):
|
||||||
|
@ -188,7 +188,7 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||||
fp.write(o16(h))
|
fp.write(o16(h))
|
||||||
|
|
||||||
# image body
|
# image body
|
||||||
ImageFile._save(im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 32, ("1", 0, 1))])
|
ImageFile._save(im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 32, "1")])
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
|
|
|
@ -47,7 +47,7 @@ class PcdImageFile(ImageFile.ImageFile):
|
||||||
|
|
||||||
self._mode = "RGB"
|
self._mode = "RGB"
|
||||||
self._size = 768, 512 # FIXME: not correct for rotated images!
|
self._size = 768, 512 # FIXME: not correct for rotated images!
|
||||||
self.tile = [ImageFile._Tile("pcd", (0, 0) + self.size, 96 * 2048, None)]
|
self.tile = [ImageFile._Tile("pcd", (0, 0) + self.size, 96 * 2048)]
|
||||||
|
|
||||||
def load_end(self) -> None:
|
def load_end(self) -> None:
|
||||||
if self.tile_post_rotate:
|
if self.tile_post_rotate:
|
||||||
|
|
|
@ -86,7 +86,7 @@ class PcxImageFile(ImageFile.ImageFile):
|
||||||
|
|
||||||
elif bits == 1 and planes in (2, 4):
|
elif bits == 1 and planes in (2, 4):
|
||||||
mode = "P"
|
mode = "P"
|
||||||
rawmode = "P;%dL" % planes
|
rawmode = f"P;{planes}L"
|
||||||
self.palette = ImagePalette.raw("RGB", s[16:64])
|
self.palette = ImagePalette.raw("RGB", s[16:64])
|
||||||
|
|
||||||
elif version == 5 and bits == 8 and planes == 1:
|
elif version == 5 and bits == 8 and planes == 1:
|
||||||
|
|
|
@ -254,6 +254,7 @@ def _save(
|
||||||
existing_pdf.write_catalog()
|
existing_pdf.write_catalog()
|
||||||
|
|
||||||
page_number = 0
|
page_number = 0
|
||||||
|
progress = im.encoderinfo.get("progress")
|
||||||
for i, im_sequence in enumerate(ims):
|
for i, im_sequence in enumerate(ims):
|
||||||
im_pages: ImageSequence.Iterator | list[Image.Image] = (
|
im_pages: ImageSequence.Iterator | list[Image.Image] = (
|
||||||
ImageSequence.Iterator(im_sequence) if save_all else [im_sequence]
|
ImageSequence.Iterator(im_sequence) if save_all else [im_sequence]
|
||||||
|
@ -290,7 +291,9 @@ def _save(
|
||||||
existing_pdf.write_obj(contents_refs[page_number], stream=page_contents)
|
existing_pdf.write_obj(contents_refs[page_number], stream=page_contents)
|
||||||
|
|
||||||
page_number += 1
|
page_number += 1
|
||||||
im._save_all_progress(im_sequence, i, page_number, number_of_pages)
|
im._save_all_progress(
|
||||||
|
progress, im_sequence, i, page_number, number_of_pages
|
||||||
|
)
|
||||||
|
|
||||||
#
|
#
|
||||||
# trailer
|
# trailer
|
||||||
|
|
|
@ -61,9 +61,7 @@ class PixarImageFile(ImageFile.ImageFile):
|
||||||
# FIXME: to be continued...
|
# FIXME: to be continued...
|
||||||
|
|
||||||
# create tile descriptor (assuming "dumped")
|
# create tile descriptor (assuming "dumped")
|
||||||
self.tile = [
|
self.tile = [ImageFile._Tile("raw", (0, 0) + self.size, 1024, self.mode)]
|
||||||
ImageFile._Tile("raw", (0, 0) + self.size, 1024, (self.mode, 0, 1))
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user