mirror of
https://github.com/python-pillow/Pillow.git
synced 2025-03-12 08:45:47 +03:00
Merge branch 'main' into libavif-plugin
This commit is contained in:
commit
d6a0a15f53
|
@ -1,4 +1,4 @@
|
||||||
mypy==1.11.2
|
mypy==1.13.0
|
||||||
IceSpringPySideStubs-PyQt6
|
IceSpringPySideStubs-PyQt6
|
||||||
IceSpringPySideStubs-PySide6
|
IceSpringPySideStubs-PySide6
|
||||||
ipython
|
ipython
|
||||||
|
|
12
.github/renovate.json
vendored
12
.github/renovate.json
vendored
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
"extends": [
|
"extends": [
|
||||||
"config:base"
|
"config:recommended"
|
||||||
],
|
],
|
||||||
"labels": [
|
"labels": [
|
||||||
"Dependency"
|
"Dependency"
|
||||||
|
@ -9,9 +9,13 @@
|
||||||
"packageRules": [
|
"packageRules": [
|
||||||
{
|
{
|
||||||
"groupName": "github-actions",
|
"groupName": "github-actions",
|
||||||
"matchManagers": ["github-actions"],
|
"matchManagers": [
|
||||||
"separateMajorMinor": "false"
|
"github-actions"
|
||||||
|
],
|
||||||
|
"separateMajorMinor": false
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"schedule": ["on the 3rd day of the month"]
|
"schedule": [
|
||||||
|
"on the 3rd day of the month"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
|
@ -33,6 +33,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
|
|
2
.github/workflows/lint.yml
vendored
2
.github/workflows/lint.yml
vendored
|
@ -21,6 +21,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: pre-commit cache
|
- name: pre-commit cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
|
|
4
.github/workflows/stale.yml
vendored
4
.github/workflows/stale.yml
vendored
|
@ -6,7 +6,7 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
issues: write
|
contents: read
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
@ -15,6 +15,8 @@ concurrency:
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
if: github.repository_owner == 'python-pillow'
|
if: github.repository_owner == 'python-pillow'
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
|
2
.github/workflows/test-cygwin.yml
vendored
2
.github/workflows/test-cygwin.yml
vendored
|
@ -48,6 +48,8 @@ jobs:
|
||||||
|
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Install Cygwin
|
- name: Install Cygwin
|
||||||
uses: cygwin/cygwin-install-action@v4
|
uses: cygwin/cygwin-install-action@v4
|
||||||
|
|
4
.github/workflows/test-docker.yml
vendored
4
.github/workflows/test-docker.yml
vendored
|
@ -47,6 +47,7 @@ jobs:
|
||||||
debian-12-bookworm-x86,
|
debian-12-bookworm-x86,
|
||||||
debian-12-bookworm-amd64,
|
debian-12-bookworm-amd64,
|
||||||
fedora-40-amd64,
|
fedora-40-amd64,
|
||||||
|
fedora-41-amd64,
|
||||||
gentoo,
|
gentoo,
|
||||||
ubuntu-22.04-jammy-amd64,
|
ubuntu-22.04-jammy-amd64,
|
||||||
ubuntu-24.04-noble-amd64,
|
ubuntu-24.04-noble-amd64,
|
||||||
|
@ -64,6 +65,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Build system information
|
- name: Build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
@ -101,7 +104,6 @@ jobs:
|
||||||
with:
|
with:
|
||||||
flags: GHA_Docker
|
flags: GHA_Docker
|
||||||
name: ${{ matrix.docker }}
|
name: ${{ matrix.docker }}
|
||||||
gcov: true
|
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
||||||
success:
|
success:
|
||||||
|
|
2
.github/workflows/test-mingw.yml
vendored
2
.github/workflows/test-mingw.yml
vendored
|
@ -46,6 +46,8 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up shell
|
- name: Set up shell
|
||||||
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
|
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
|
||||||
|
|
2
.github/workflows/test-valgrind.yml
vendored
2
.github/workflows/test-valgrind.yml
vendored
|
@ -40,6 +40,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Build system information
|
- name: Build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
|
24
.github/workflows/test-windows.yml
vendored
24
.github/workflows/test-windows.yml
vendored
|
@ -44,16 +44,20 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Pillow
|
- name: Checkout Pillow
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Checkout cached dependencies
|
- name: Checkout cached dependencies
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
repository: python-pillow/pillow-depends
|
repository: python-pillow/pillow-depends
|
||||||
path: winbuild\depends
|
path: winbuild\depends
|
||||||
|
|
||||||
- name: Checkout extra test images
|
- name: Checkout extra test images
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
repository: python-pillow/test-images
|
repository: python-pillow/test-images
|
||||||
path: Tests\test-images
|
path: Tests\test-images
|
||||||
|
|
||||||
|
@ -69,16 +73,14 @@ jobs:
|
||||||
- name: Print build system information
|
- name: Print build system information
|
||||||
run: python3 .github/workflows/system-info.py
|
run: python3 .github/workflows/system-info.py
|
||||||
|
|
||||||
- name: Install Python dependencies
|
- name: Upgrade pip
|
||||||
run: >
|
run: |
|
||||||
python3 -m pip install
|
python3 -m pip install --upgrade pip
|
||||||
coverage>=7.4.2
|
|
||||||
defusedxml
|
- name: Install CPython dependencies
|
||||||
olefile
|
if: "!contains(matrix.python-version, 'pypy')"
|
||||||
pyroma
|
run: |
|
||||||
pytest
|
python3 -m pip install PyQt6
|
||||||
pytest-cov
|
|
||||||
pytest-timeout
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
id: install
|
id: install
|
||||||
|
@ -190,7 +192,7 @@ jobs:
|
||||||
- name: Build Pillow
|
- name: Build Pillow
|
||||||
run: |
|
run: |
|
||||||
$FLAGS="-C raqm=vendor -C fribidi=vendor"
|
$FLAGS="-C raqm=vendor -C fribidi=vendor"
|
||||||
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS ."
|
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS .[tests]"
|
||||||
& $env:pythonLocation\python.exe selftest.py --installed
|
& $env:pythonLocation\python.exe selftest.py --installed
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
|
|
||||||
|
|
3
.github/workflows/test.yml
vendored
3
.github/workflows/test.yml
vendored
|
@ -63,6 +63,8 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
|
@ -158,7 +160,6 @@ jobs:
|
||||||
with:
|
with:
|
||||||
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
|
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
|
||||||
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
||||||
gcov: true
|
|
||||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||||
|
|
||||||
success:
|
success:
|
||||||
|
|
6
.github/workflows/wheels-dependencies.sh
vendored
6
.github/workflows/wheels-dependencies.sh
vendored
|
@ -169,9 +169,6 @@ function build {
|
||||||
build_libpng
|
build_libpng
|
||||||
build_lcms2
|
build_lcms2
|
||||||
build_openjpeg
|
build_openjpeg
|
||||||
if [ -f /usr/local/lib64/libopenjp2.so ]; then
|
|
||||||
cp /usr/local/lib64/libopenjp2.so /usr/local/lib
|
|
||||||
fi
|
|
||||||
|
|
||||||
ORIGINAL_CFLAGS=$CFLAGS
|
ORIGINAL_CFLAGS=$CFLAGS
|
||||||
CFLAGS="$CFLAGS -O3 -DNDEBUG"
|
CFLAGS="$CFLAGS -O3 -DNDEBUG"
|
||||||
|
@ -199,6 +196,7 @@ curl -fsSL -o pillow-depends-main.zip https://github.com/python-pillow/pillow-de
|
||||||
untar pillow-depends-main.zip
|
untar pillow-depends-main.zip
|
||||||
|
|
||||||
if [[ -n "$IS_MACOS" ]]; then
|
if [[ -n "$IS_MACOS" ]]; then
|
||||||
|
# libdeflate may cause a minimum target error when repairing the wheel
|
||||||
# libtiff and libxcb cause a conflict with building libtiff and libxcb
|
# libtiff and libxcb cause a conflict with building libtiff and libxcb
|
||||||
# libxau and libxdmcp cause an issue on macOS < 11
|
# libxau and libxdmcp cause an issue on macOS < 11
|
||||||
# remove cairo to fix building harfbuzz on arm64
|
# remove cairo to fix building harfbuzz on arm64
|
||||||
|
@ -211,7 +209,7 @@ if [[ -n "$IS_MACOS" ]]; then
|
||||||
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
||||||
brew remove --ignore-dependencies jpeg-turbo
|
brew remove --ignore-dependencies jpeg-turbo
|
||||||
else
|
else
|
||||||
brew remove --ignore-dependencies webp aom libavif
|
brew remove --ignore-dependencies libdeflate webp aom libavif
|
||||||
fi
|
fi
|
||||||
|
|
||||||
brew install pkg-config
|
brew install pkg-config
|
||||||
|
|
7
.github/workflows/wheels.yml
vendored
7
.github/workflows/wheels.yml
vendored
|
@ -61,6 +61,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
|
@ -132,6 +133,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
|
@ -173,10 +175,13 @@ jobs:
|
||||||
- cibw_arch: ARM64
|
- cibw_arch: ARM64
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Checkout extra test images
|
- name: Checkout extra test images
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
repository: python-pillow/test-images
|
repository: python-pillow/test-images
|
||||||
path: Tests\test-images
|
path: Tests\test-images
|
||||||
|
|
||||||
|
@ -253,6 +258,8 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.6.9
|
rev: v0.7.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: [--exit-non-zero-on-fix]
|
args: [--exit-non-zero-on-fix]
|
||||||
|
|
||||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||||
rev: 24.8.0
|
rev: 24.10.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ repos:
|
||||||
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
|
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||||
rev: v19.1.1
|
rev: v19.1.3
|
||||||
hooks:
|
hooks:
|
||||||
- id: clang-format
|
- id: clang-format
|
||||||
types: [c]
|
types: [c]
|
||||||
|
@ -50,7 +50,7 @@ repos:
|
||||||
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
|
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
|
||||||
|
|
||||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||||
rev: 0.29.3
|
rev: 0.29.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-github-workflows
|
- id: check-github-workflows
|
||||||
- id: check-readthedocs
|
- id: check-readthedocs
|
||||||
|
@ -62,12 +62,12 @@ repos:
|
||||||
- id: sphinx-lint
|
- id: sphinx-lint
|
||||||
|
|
||||||
- repo: https://github.com/tox-dev/pyproject-fmt
|
- repo: https://github.com/tox-dev/pyproject-fmt
|
||||||
rev: 2.2.4
|
rev: v2.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyproject-fmt
|
- id: pyproject-fmt
|
||||||
|
|
||||||
- repo: https://github.com/abravalheri/validate-pyproject
|
- repo: https://github.com/abravalheri/validate-pyproject
|
||||||
rev: v0.20.2
|
rev: v0.22
|
||||||
hooks:
|
hooks:
|
||||||
- id: validate-pyproject
|
- id: validate-pyproject
|
||||||
additional_dependencies: [trove-classifiers>=2024.10.12]
|
additional_dependencies: [trove-classifiers>=2024.10.12]
|
||||||
|
|
|
@ -5,6 +5,15 @@ Changelog (Pillow)
|
||||||
11.1.0 (unreleased)
|
11.1.0 (unreleased)
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
|
- Detach PyQt6 QPixmap instance before returning #8509
|
||||||
|
[radarhere]
|
||||||
|
|
||||||
|
- Corrected EMF DPI #8485
|
||||||
|
[radarhere]
|
||||||
|
|
||||||
|
- Fix IFDRational with a zero denominator #8474
|
||||||
|
[radarhere]
|
||||||
|
|
||||||
- Fixed disabling a feature during install #8469
|
- Fixed disabling a feature during install #8469
|
||||||
[radarhere]
|
[radarhere]
|
||||||
|
|
||||||
|
|
2
LICENSE
2
LICENSE
|
@ -7,7 +7,7 @@ Pillow is the friendly PIL fork. It is
|
||||||
|
|
||||||
Copyright © 2010-2024 by Jeffrey A. Clark and contributors
|
Copyright © 2010-2024 by Jeffrey A. Clark and contributors
|
||||||
|
|
||||||
Like PIL, Pillow is licensed under the open source HPND License:
|
Like PIL, Pillow is licensed under the open source MIT-CMU License:
|
||||||
|
|
||||||
By obtaining, using, and/or copying this software and/or its associated
|
By obtaining, using, and/or copying this software and/or its associated
|
||||||
documentation, you agree that you have read, understood, and will comply
|
documentation, you agree that you have read, understood, and will comply
|
||||||
|
|
|
@ -22,6 +22,8 @@ def test_bad() -> None:
|
||||||
for f in get_files("b"):
|
for f in get_files("b"):
|
||||||
# Assert that there is no unclosed file warning
|
# Assert that there is no unclosed file warning
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with Image.open(f) as im:
|
with Image.open(f) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
|
@ -36,6 +36,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_FILE)
|
im = Image.open(TEST_FILE)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -43,6 +45,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -65,6 +65,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(static_test_file)
|
im = Image.open(static_test_file)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -81,6 +83,8 @@ def test_seek_after_close() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(static_test_file) as im:
|
with Image.open(static_test_file) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -46,6 +46,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_GIF)
|
im = Image.open(TEST_GIF)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -67,6 +69,8 @@ def test_seek_after_close() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_GIF) as im:
|
with Image.open(TEST_GIF) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,8 @@ def test_sanity() -> None:
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
# Assert that there is no unclosed file warning
|
# Assert that there is no unclosed file warning
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
assert im.mode == "RGBA"
|
assert im.mode == "RGBA"
|
||||||
|
|
|
@ -41,6 +41,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_IM)
|
im = Image.open(TEST_IM)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -48,6 +50,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_IM) as im:
|
with Image.open(TEST_IM) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -541,12 +541,12 @@ class TestFileJpeg:
|
||||||
@mark_if_feature_version(
|
@mark_if_feature_version(
|
||||||
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
||||||
)
|
)
|
||||||
def test_qtables(self, tmp_path: Path) -> None:
|
def test_qtables(self) -> None:
|
||||||
def _n_qtables_helper(n: int, test_file: str) -> None:
|
def _n_qtables_helper(n: int, test_file: str) -> None:
|
||||||
|
b = BytesIO()
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
f = str(tmp_path / "temp.jpg")
|
im.save(b, "JPEG", qtables=[[n] * 64] * n)
|
||||||
im.save(f, qtables=[[n] * 64] * n)
|
with Image.open(b) as im:
|
||||||
with Image.open(f) as im:
|
|
||||||
assert len(im.quantization) == n
|
assert len(im.quantization) == n
|
||||||
reloaded = self.roundtrip(im, qtables="keep")
|
reloaded = self.roundtrip(im, qtables="keep")
|
||||||
assert im.quantization == reloaded.quantization
|
assert im.quantization == reloaded.quantization
|
||||||
|
@ -850,6 +850,8 @@ class TestFileJpeg:
|
||||||
|
|
||||||
out = str(tmp_path / "out.jpg")
|
out = str(tmp_path / "out.jpg")
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.save(out, exif=exif)
|
im.save(out, exif=exif)
|
||||||
|
|
||||||
with Image.open(out) as reloaded:
|
with Image.open(out) as reloaded:
|
||||||
|
|
|
@ -2,6 +2,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from collections.abc import Generator
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
@ -29,8 +30,16 @@ EXTRA_DIR = "Tests/images/jpeg2000"
|
||||||
|
|
||||||
pytestmark = skip_unless_feature("jpg_2000")
|
pytestmark = skip_unless_feature("jpg_2000")
|
||||||
|
|
||||||
test_card = Image.open("Tests/images/test-card.png")
|
|
||||||
test_card.load()
|
@pytest.fixture
|
||||||
|
def card() -> Generator[ImageFile.ImageFile, None, None]:
|
||||||
|
with Image.open("Tests/images/test-card.png") as im:
|
||||||
|
im.load()
|
||||||
|
try:
|
||||||
|
yield im
|
||||||
|
finally:
|
||||||
|
im.close()
|
||||||
|
|
||||||
|
|
||||||
# OpenJPEG 2.0.0 outputs this debugging message sometimes; we should
|
# OpenJPEG 2.0.0 outputs this debugging message sometimes; we should
|
||||||
# ignore it---it doesn't represent a test failure.
|
# ignore it---it doesn't represent a test failure.
|
||||||
|
@ -74,76 +83,76 @@ def test_invalid_file() -> None:
|
||||||
Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file)
|
Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file)
|
||||||
|
|
||||||
|
|
||||||
def test_bytesio() -> None:
|
def test_bytesio(card: ImageFile.ImageFile) -> None:
|
||||||
with open("Tests/images/test-card-lossless.jp2", "rb") as f:
|
with open("Tests/images/test-card-lossless.jp2", "rb") as f:
|
||||||
data = BytesIO(f.read())
|
data = BytesIO(f.read())
|
||||||
with Image.open(data) as im:
|
with Image.open(data) as im:
|
||||||
im.load()
|
im.load()
|
||||||
assert_image_similar(im, test_card, 1.0e-3)
|
assert_image_similar(im, card, 1.0e-3)
|
||||||
|
|
||||||
|
|
||||||
# These two test pre-written JPEG 2000 files that were not written with
|
# These two test pre-written JPEG 2000 files that were not written with
|
||||||
# PIL (they were made using Adobe Photoshop)
|
# PIL (they were made using Adobe Photoshop)
|
||||||
|
|
||||||
|
|
||||||
def test_lossless(tmp_path: Path) -> None:
|
def test_lossless(card: ImageFile.ImageFile, tmp_path: Path) -> None:
|
||||||
with Image.open("Tests/images/test-card-lossless.jp2") as im:
|
with Image.open("Tests/images/test-card-lossless.jp2") as im:
|
||||||
im.load()
|
im.load()
|
||||||
outfile = str(tmp_path / "temp_test-card.png")
|
outfile = str(tmp_path / "temp_test-card.png")
|
||||||
im.save(outfile)
|
im.save(outfile)
|
||||||
assert_image_similar(im, test_card, 1.0e-3)
|
assert_image_similar(im, card, 1.0e-3)
|
||||||
|
|
||||||
|
|
||||||
def test_lossy_tiled() -> None:
|
def test_lossy_tiled(card: ImageFile.ImageFile) -> None:
|
||||||
assert_image_similar_tofile(
|
assert_image_similar_tofile(card, "Tests/images/test-card-lossy-tiled.jp2", 2.0)
|
||||||
test_card, "Tests/images/test-card-lossy-tiled.jp2", 2.0
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_lossless_rt() -> None:
|
def test_lossless_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card)
|
im = roundtrip(card)
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
def test_lossy_rt() -> None:
|
def test_lossy_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, quality_layers=[20])
|
im = roundtrip(card, quality_layers=[20])
|
||||||
assert_image_similar(im, test_card, 2.0)
|
assert_image_similar(im, card, 2.0)
|
||||||
|
|
||||||
|
|
||||||
def test_tiled_rt() -> None:
|
def test_tiled_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, tile_size=(128, 128))
|
im = roundtrip(card, tile_size=(128, 128))
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
def test_tiled_offset_rt() -> None:
|
def test_tiled_offset_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32))
|
im = roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32))
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
def test_tiled_offset_too_small() -> None:
|
def test_tiled_offset_too_small(card: ImageFile.ImageFile) -> None:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32))
|
roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32))
|
||||||
|
|
||||||
|
|
||||||
def test_irreversible_rt() -> None:
|
def test_irreversible_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, irreversible=True, quality_layers=[20])
|
im = roundtrip(card, irreversible=True, quality_layers=[20])
|
||||||
assert_image_similar(im, test_card, 2.0)
|
assert_image_similar(im, card, 2.0)
|
||||||
|
|
||||||
|
|
||||||
def test_prog_qual_rt() -> None:
|
def test_prog_qual_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, quality_layers=[60, 40, 20], progression="LRCP")
|
im = roundtrip(card, quality_layers=[60, 40, 20], progression="LRCP")
|
||||||
assert_image_similar(im, test_card, 2.0)
|
assert_image_similar(im, card, 2.0)
|
||||||
|
|
||||||
|
|
||||||
def test_prog_res_rt() -> None:
|
def test_prog_res_rt(card: ImageFile.ImageFile) -> None:
|
||||||
im = roundtrip(test_card, num_resolutions=8, progression="RLCP")
|
im = roundtrip(card, num_resolutions=8, progression="RLCP")
|
||||||
assert_image_equal(im, test_card)
|
assert_image_equal(im, card)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("num_resolutions", range(2, 6))
|
@pytest.mark.parametrize("num_resolutions", range(2, 6))
|
||||||
def test_default_num_resolutions(num_resolutions: int) -> None:
|
def test_default_num_resolutions(
|
||||||
|
card: ImageFile.ImageFile, num_resolutions: int
|
||||||
|
) -> None:
|
||||||
d = 1 << (num_resolutions - 1)
|
d = 1 << (num_resolutions - 1)
|
||||||
im = test_card.resize((d - 1, d - 1))
|
im = card.resize((d - 1, d - 1))
|
||||||
with pytest.raises(OSError):
|
with pytest.raises(OSError):
|
||||||
roundtrip(im, num_resolutions=num_resolutions)
|
roundtrip(im, num_resolutions=num_resolutions)
|
||||||
reloaded = roundtrip(im)
|
reloaded = roundtrip(im)
|
||||||
|
@ -205,31 +214,31 @@ def test_header_errors() -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_layers_type(tmp_path: Path) -> None:
|
def test_layers_type(card: ImageFile.ImageFile, tmp_path: Path) -> None:
|
||||||
outfile = str(tmp_path / "temp_layers.jp2")
|
outfile = str(tmp_path / "temp_layers.jp2")
|
||||||
for quality_layers in [[100, 50, 10], (100, 50, 10), None]:
|
for quality_layers in [[100, 50, 10], (100, 50, 10), None]:
|
||||||
test_card.save(outfile, quality_layers=quality_layers)
|
card.save(outfile, quality_layers=quality_layers)
|
||||||
|
|
||||||
for quality_layers_str in ["quality_layers", ("100", "50", "10")]:
|
for quality_layers_str in ["quality_layers", ("100", "50", "10")]:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
test_card.save(outfile, quality_layers=quality_layers_str)
|
card.save(outfile, quality_layers=quality_layers_str)
|
||||||
|
|
||||||
|
|
||||||
def test_layers() -> None:
|
def test_layers(card: ImageFile.ImageFile) -> None:
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP")
|
card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP")
|
||||||
out.seek(0)
|
out.seek(0)
|
||||||
|
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
im.layers = 1
|
im.layers = 1
|
||||||
im.load()
|
im.load()
|
||||||
assert_image_similar(im, test_card, 13)
|
assert_image_similar(im, card, 13)
|
||||||
|
|
||||||
out.seek(0)
|
out.seek(0)
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
im.layers = 3
|
im.layers = 3
|
||||||
im.load()
|
im.load()
|
||||||
assert_image_similar(im, test_card, 0.4)
|
assert_image_similar(im, card, 0.4)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -245,24 +254,30 @@ def test_layers() -> None:
|
||||||
(None, {"no_jp2": False}, 4, b"jP"),
|
(None, {"no_jp2": False}, 4, b"jP"),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_no_jp2(name: str, args: dict[str, bool], offset: int, data: bytes) -> None:
|
def test_no_jp2(
|
||||||
|
card: ImageFile.ImageFile,
|
||||||
|
name: str,
|
||||||
|
args: dict[str, bool],
|
||||||
|
offset: int,
|
||||||
|
data: bytes,
|
||||||
|
) -> None:
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
if name:
|
if name:
|
||||||
out.name = name
|
out.name = name
|
||||||
test_card.save(out, "JPEG2000", **args)
|
card.save(out, "JPEG2000", **args)
|
||||||
out.seek(offset)
|
out.seek(offset)
|
||||||
assert out.read(2) == data
|
assert out.read(2) == data
|
||||||
|
|
||||||
|
|
||||||
def test_mct() -> None:
|
def test_mct(card: ImageFile.ImageFile) -> None:
|
||||||
# Three component
|
# Three component
|
||||||
for val in (0, 1):
|
for val in (0, 1):
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", mct=val, no_jp2=True)
|
card.save(out, "JPEG2000", mct=val, no_jp2=True)
|
||||||
|
|
||||||
assert out.getvalue()[59] == val
|
assert out.getvalue()[59] == val
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
assert_image_similar(im, test_card, 1.0e-3)
|
assert_image_similar(im, card, 1.0e-3)
|
||||||
|
|
||||||
# Single component should have MCT disabled
|
# Single component should have MCT disabled
|
||||||
for val in (0, 1):
|
for val in (0, 1):
|
||||||
|
@ -419,22 +434,22 @@ def test_comment() -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_save_comment() -> None:
|
def test_save_comment(card: ImageFile.ImageFile) -> None:
|
||||||
for comment in ("Created by Pillow", b"Created by Pillow"):
|
for comment in ("Created by Pillow", b"Created by Pillow"):
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", comment=comment)
|
card.save(out, "JPEG2000", comment=comment)
|
||||||
|
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
assert im.info["comment"] == b"Created by Pillow"
|
assert im.info["comment"] == b"Created by Pillow"
|
||||||
|
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
long_comment = b" " * 65531
|
long_comment = b" " * 65531
|
||||||
test_card.save(out, "JPEG2000", comment=long_comment)
|
card.save(out, "JPEG2000", comment=long_comment)
|
||||||
with Image.open(out) as im:
|
with Image.open(out) as im:
|
||||||
assert im.info["comment"] == long_comment
|
assert im.info["comment"] == long_comment
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
test_card.save(out, "JPEG2000", comment=long_comment + b" ")
|
card.save(out, "JPEG2000", comment=long_comment + b" ")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -457,10 +472,10 @@ def test_crashes(test_file: str) -> None:
|
||||||
|
|
||||||
|
|
||||||
@skip_unless_feature_version("jpg_2000", "2.4.0")
|
@skip_unless_feature_version("jpg_2000", "2.4.0")
|
||||||
def test_plt_marker() -> None:
|
def test_plt_marker(card: ImageFile.ImageFile) -> None:
|
||||||
# Search the start of the codesteam for PLT
|
# Search the start of the codesteam for PLT
|
||||||
out = BytesIO()
|
out = BytesIO()
|
||||||
test_card.save(out, "JPEG2000", no_jp2=True, plt=True)
|
card.save(out, "JPEG2000", no_jp2=True, plt=True)
|
||||||
out.seek(0)
|
out.seek(0)
|
||||||
while True:
|
while True:
|
||||||
marker = out.read(2)
|
marker = out.read(2)
|
||||||
|
|
|
@ -48,6 +48,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(test_files[0])
|
im = Image.open(test_files[0])
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -63,6 +65,8 @@ def test_seek_after_close() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(test_files[0]) as im:
|
with Image.open(test_files[0]) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -338,6 +338,8 @@ class TestFilePng:
|
||||||
with Image.open(TEST_PNG_FILE) as im:
|
with Image.open(TEST_PNG_FILE) as im:
|
||||||
# Assert that there is no unclosed file warning
|
# Assert that there is no unclosed file warning
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.verify()
|
im.verify()
|
||||||
|
|
||||||
with Image.open(TEST_PNG_FILE) as im:
|
with Image.open(TEST_PNG_FILE) as im:
|
||||||
|
|
|
@ -35,6 +35,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(test_file)
|
im = Image.open(test_file)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -42,6 +44,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,8 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open(TEST_FILE)
|
im = Image.open(TEST_FILE)
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -41,6 +43,8 @@ def test_closed_file() -> None:
|
||||||
|
|
||||||
def test_context_manager() -> None:
|
def test_context_manager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open(TEST_FILE) as im:
|
with Image.open(TEST_FILE) as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -37,11 +37,15 @@ def test_unclosed_file() -> None:
|
||||||
|
|
||||||
def test_close() -> None:
|
def test_close() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
tar = TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg")
|
tar = TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg")
|
||||||
tar.close()
|
tar.close()
|
||||||
|
|
||||||
|
|
||||||
def test_contextmanager() -> None:
|
def test_contextmanager() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg"):
|
with TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg"):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -72,6 +72,8 @@ class TestFileTiff:
|
||||||
|
|
||||||
def test_closed_file(self) -> None:
|
def test_closed_file(self) -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im = Image.open("Tests/images/multipage.tiff")
|
im = Image.open("Tests/images/multipage.tiff")
|
||||||
im.load()
|
im.load()
|
||||||
im.close()
|
im.close()
|
||||||
|
@ -88,6 +90,8 @@ class TestFileTiff:
|
||||||
|
|
||||||
def test_context_manager(self) -> None:
|
def test_context_manager(self) -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
with Image.open("Tests/images/multipage.tiff") as im:
|
with Image.open("Tests/images/multipage.tiff") as im:
|
||||||
im.load()
|
im.load()
|
||||||
|
|
||||||
|
|
|
@ -191,6 +191,8 @@ class TestFileWebp:
|
||||||
file_path = "Tests/images/hopper.webp"
|
file_path = "Tests/images/hopper.webp"
|
||||||
with Image.open(file_path) as image:
|
with Image.open(file_path) as image:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
image.save(tmp_path / "temp.webp")
|
image.save(tmp_path / "temp.webp")
|
||||||
|
|
||||||
def test_file_pointer_could_be_reused(self) -> None:
|
def test_file_pointer_could_be_reused(self) -> None:
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import IO
|
from typing import IO
|
||||||
|
|
||||||
|
@ -61,6 +62,12 @@ def test_load_float_dpi() -> None:
|
||||||
with Image.open("Tests/images/drawing.emf") as im:
|
with Image.open("Tests/images/drawing.emf") as im:
|
||||||
assert im.info["dpi"] == 1423.7668161434979
|
assert im.info["dpi"] == 1423.7668161434979
|
||||||
|
|
||||||
|
with open("Tests/images/drawing.emf", "rb") as fp:
|
||||||
|
data = fp.read()
|
||||||
|
b = BytesIO(data[:8] + b"\x06\xFA" + data[10:])
|
||||||
|
with Image.open(b) as im:
|
||||||
|
assert im.info["dpi"][0] == 2540
|
||||||
|
|
||||||
|
|
||||||
def test_load_set_dpi() -> None:
|
def test_load_set_dpi() -> None:
|
||||||
with Image.open("Tests/images/drawing.wmf") as im:
|
with Image.open("Tests/images/drawing.wmf") as im:
|
||||||
|
|
|
@ -737,6 +737,8 @@ class TestImage:
|
||||||
# Act/Assert
|
# Act/Assert
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
im.save(temp_file)
|
im.save(temp_file)
|
||||||
|
|
||||||
def test_no_new_file_on_error(self, tmp_path: Path) -> None:
|
def test_no_new_file_on_error(self, tmp_path: Path) -> None:
|
||||||
|
|
|
@ -10,7 +10,7 @@ from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image, ImageFile
|
||||||
|
|
||||||
from .helper import (
|
from .helper import (
|
||||||
assert_image_equal,
|
assert_image_equal,
|
||||||
|
@ -179,7 +179,7 @@ class TestImagingCoreResize:
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def gradients_image() -> Generator[Image.Image, None, None]:
|
def gradients_image() -> Generator[ImageFile.ImageFile, None, None]:
|
||||||
with Image.open("Tests/images/radial_gradients.png") as im:
|
with Image.open("Tests/images/radial_gradients.png") as im:
|
||||||
im.load()
|
im.load()
|
||||||
try:
|
try:
|
||||||
|
@ -189,7 +189,7 @@ def gradients_image() -> Generator[Image.Image, None, None]:
|
||||||
|
|
||||||
|
|
||||||
class TestReducingGapResize:
|
class TestReducingGapResize:
|
||||||
def test_reducing_gap_values(self, gradients_image: Image.Image) -> None:
|
def test_reducing_gap_values(self, gradients_image: ImageFile.ImageFile) -> None:
|
||||||
ref = gradients_image.resize(
|
ref = gradients_image.resize(
|
||||||
(52, 34), Image.Resampling.BICUBIC, reducing_gap=None
|
(52, 34), Image.Resampling.BICUBIC, reducing_gap=None
|
||||||
)
|
)
|
||||||
|
@ -210,7 +210,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_reducing_gap_1(
|
def test_reducing_gap_1(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -230,7 +230,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_reducing_gap_2(
|
def test_reducing_gap_2(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -250,7 +250,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_reducing_gap_3(
|
def test_reducing_gap_3(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -266,7 +266,9 @@ class TestReducingGapResize:
|
||||||
|
|
||||||
@pytest.mark.parametrize("box", (None, (1.1, 2.2, 510.8, 510.9), (3, 10, 410, 256)))
|
@pytest.mark.parametrize("box", (None, (1.1, 2.2, 510.8, 510.9), (3, 10, 410, 256)))
|
||||||
def test_reducing_gap_8(
|
def test_reducing_gap_8(
|
||||||
self, gradients_image: Image.Image, box: tuple[float, float, float, float]
|
self,
|
||||||
|
gradients_image: ImageFile.ImageFile,
|
||||||
|
box: tuple[float, float, float, float],
|
||||||
) -> None:
|
) -> None:
|
||||||
ref = gradients_image.resize((52, 34), Image.Resampling.BICUBIC, box=box)
|
ref = gradients_image.resize((52, 34), Image.Resampling.BICUBIC, box=box)
|
||||||
im = gradients_image.resize(
|
im = gradients_image.resize(
|
||||||
|
@ -281,7 +283,7 @@ class TestReducingGapResize:
|
||||||
)
|
)
|
||||||
def test_box_filter(
|
def test_box_filter(
|
||||||
self,
|
self,
|
||||||
gradients_image: Image.Image,
|
gradients_image: ImageFile.ImageFile,
|
||||||
box: tuple[float, float, float, float],
|
box: tuple[float, float, float, float],
|
||||||
epsilon: float,
|
epsilon: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
|
@ -52,4 +52,6 @@ def test_image(mode: str) -> None:
|
||||||
|
|
||||||
def test_closed_file() -> None:
|
def test_closed_file() -> None:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
ImageQt.ImageQt("Tests/images/hopper.gif")
|
ImageQt.ImageQt("Tests/images/hopper.gif")
|
||||||
|
|
|
@ -264,4 +264,6 @@ def test_no_resource_warning_for_numpy_array() -> None:
|
||||||
with Image.open(test_file) as im:
|
with Image.open(test_file) as im:
|
||||||
# Act/Assert
|
# Act/Assert
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
array(im)
|
array(im)
|
||||||
|
|
|
@ -18,7 +18,7 @@ The fork author's goal is to foster and support active development of PIL throug
|
||||||
License
|
License
|
||||||
-------
|
-------
|
||||||
|
|
||||||
Like PIL, Pillow is `licensed under the open source HPND License <https://raw.githubusercontent.com/python-pillow/Pillow/main/LICENSE>`_
|
Like PIL, Pillow is `licensed under the open source MIT-CMU License <https://raw.githubusercontent.com/python-pillow/Pillow/main/LICENSE>`_
|
||||||
|
|
||||||
Why a fork?
|
Why a fork?
|
||||||
-----------
|
-----------
|
||||||
|
|
|
@ -692,6 +692,30 @@ The :py:meth:`~PIL.Image.Image.save` method supports the following options:
|
||||||
you fail to do this, you will get errors about not being able to load the
|
you fail to do this, you will get errors about not being able to load the
|
||||||
``_imaging`` DLL).
|
``_imaging`` DLL).
|
||||||
|
|
||||||
|
MPO
|
||||||
|
^^^
|
||||||
|
|
||||||
|
Pillow reads and writes Multi Picture Object (MPO) files. When first opened, it loads
|
||||||
|
the primary image. The :py:meth:`~PIL.Image.Image.seek` and
|
||||||
|
:py:meth:`~PIL.Image.Image.tell` methods may be used to read other pictures from the
|
||||||
|
file. The pictures are zero-indexed and random access is supported.
|
||||||
|
|
||||||
|
.. _mpo-saving:
|
||||||
|
|
||||||
|
Saving
|
||||||
|
~~~~~~
|
||||||
|
|
||||||
|
When calling :py:meth:`~PIL.Image.Image.save` to write an MPO file, by default
|
||||||
|
only the first frame of a multiframe image will be saved. If the ``save_all``
|
||||||
|
argument is present and true, then all frames will be saved, and the following
|
||||||
|
option will also be available.
|
||||||
|
|
||||||
|
**append_images**
|
||||||
|
A list of images to append as additional pictures. Each of the
|
||||||
|
images in the list can be single or multiframe images.
|
||||||
|
|
||||||
|
.. versionadded:: 9.3.0
|
||||||
|
|
||||||
MSP
|
MSP
|
||||||
^^^
|
^^^
|
||||||
|
|
||||||
|
@ -1508,30 +1532,6 @@ Note that there may be an embedded gamma of 2.2 in MIC files.
|
||||||
|
|
||||||
To enable MIC support, you must install :pypi:`olefile`.
|
To enable MIC support, you must install :pypi:`olefile`.
|
||||||
|
|
||||||
MPO
|
|
||||||
^^^
|
|
||||||
|
|
||||||
Pillow identifies and reads Multi Picture Object (MPO) files, loading the primary
|
|
||||||
image when first opened. The :py:meth:`~PIL.Image.Image.seek` and :py:meth:`~PIL.Image.Image.tell`
|
|
||||||
methods may be used to read other pictures from the file. The pictures are
|
|
||||||
zero-indexed and random access is supported.
|
|
||||||
|
|
||||||
.. _mpo-saving:
|
|
||||||
|
|
||||||
Saving
|
|
||||||
~~~~~~
|
|
||||||
|
|
||||||
When calling :py:meth:`~PIL.Image.Image.save` to write an MPO file, by default
|
|
||||||
only the first frame of a multiframe image will be saved. If the ``save_all``
|
|
||||||
argument is present and true, then all frames will be saved, and the following
|
|
||||||
option will also be available.
|
|
||||||
|
|
||||||
**append_images**
|
|
||||||
A list of images to append as additional pictures. Each of the
|
|
||||||
images in the list can be single or multiframe images.
|
|
||||||
|
|
||||||
.. versionadded:: 9.3.0
|
|
||||||
|
|
||||||
PCD
|
PCD
|
||||||
^^^
|
^^^
|
||||||
|
|
||||||
|
|
|
@ -31,6 +31,8 @@ These platforms are built and tested for every change.
|
||||||
+----------------------------------+----------------------------+---------------------+
|
+----------------------------------+----------------------------+---------------------+
|
||||||
| Fedora 40 | 3.12 | x86-64 |
|
| Fedora 40 | 3.12 | x86-64 |
|
||||||
+----------------------------------+----------------------------+---------------------+
|
+----------------------------------+----------------------------+---------------------+
|
||||||
|
| Fedora 41 | 3.13 | x86-64 |
|
||||||
|
+----------------------------------+----------------------------+---------------------+
|
||||||
| Gentoo | 3.12 | x86-64 |
|
| Gentoo | 3.12 | x86-64 |
|
||||||
+----------------------------------+----------------------------+---------------------+
|
+----------------------------------+----------------------------+---------------------+
|
||||||
| macOS 13 Ventura | 3.9 | x86-64 |
|
| macOS 13 Ventura | 3.9 | x86-64 |
|
||||||
|
@ -73,7 +75,9 @@ These platforms have been reported to work at the versions mentioned.
|
||||||
| Operating system | | Tested Python | | Latest tested | | Tested |
|
| Operating system | | Tested Python | | Latest tested | | Tested |
|
||||||
| | | versions | | Pillow version | | processors |
|
| | | versions | | Pillow version | | processors |
|
||||||
+==================================+============================+==================+==============+
|
+==================================+============================+==================+==============+
|
||||||
| macOS 15 Sequoia | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm |
|
| macOS 15 Sequoia | 3.9, 3.10, 3.11, 3.12, 3.13| 11.0.0 |arm |
|
||||||
|
| +----------------------------+------------------+ |
|
||||||
|
| | 3.8 | 10.4.0 | |
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
| macOS 14 Sonoma | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm |
|
| macOS 14 Sonoma | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm |
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
|
@ -146,7 +150,7 @@ These platforms have been reported to work at the versions mentioned.
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
| FreeBSD 10.2 | 2.7, 3.4 | 3.1.0 |x86-64 |
|
| FreeBSD 10.2 | 2.7, 3.4 | 3.1.0 |x86-64 |
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
| Windows 11 | 3.9, 3.10, 3.11, 3.12 | 10.2.0 |arm64 |
|
| Windows 11 23H2 | 3.9, 3.10, 3.11, 3.12, 3.13| 11.0.0 |arm64 |
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
| Windows 11 Pro | 3.11, 3.12 | 10.2.0 |x86-64 |
|
| Windows 11 Pro | 3.11, 3.12 | 10.2.0 |x86-64 |
|
||||||
+----------------------------------+----------------------------+------------------+--------------+
|
+----------------------------------+----------------------------+------------------+--------------+
|
||||||
|
|
|
@ -19,7 +19,7 @@ Example: Parse an image
|
||||||
|
|
||||||
from PIL import ImageFile
|
from PIL import ImageFile
|
||||||
|
|
||||||
fp = open("hopper.pgm", "rb")
|
fp = open("hopper.ppm", "rb")
|
||||||
|
|
||||||
p = ImageFile.Parser()
|
p = ImageFile.Parser()
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ optional-dependencies.mic = [
|
||||||
]
|
]
|
||||||
optional-dependencies.tests = [
|
optional-dependencies.tests = [
|
||||||
"check-manifest",
|
"check-manifest",
|
||||||
"coverage",
|
"coverage>=7.4.2",
|
||||||
"defusedxml",
|
"defusedxml",
|
||||||
"markdown2",
|
"markdown2",
|
||||||
"olefile",
|
"olefile",
|
||||||
|
@ -65,6 +65,7 @@ optional-dependencies.tests = [
|
||||||
"pytest",
|
"pytest",
|
||||||
"pytest-cov",
|
"pytest-cov",
|
||||||
"pytest-timeout",
|
"pytest-timeout",
|
||||||
|
"trove-classifiers>=2024.10.12",
|
||||||
]
|
]
|
||||||
optional-dependencies.typing = [
|
optional-dependencies.typing = [
|
||||||
"typing-extensions; python_version<'3.10'",
|
"typing-extensions; python_version<'3.10'",
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -1017,7 +1017,7 @@ def debug_build() -> bool:
|
||||||
return hasattr(sys, "gettotalrefcount") or FUZZING_BUILD
|
return hasattr(sys, "gettotalrefcount") or FUZZING_BUILD
|
||||||
|
|
||||||
|
|
||||||
files = ["src/_imaging.c"]
|
files: list[str | os.PathLike[str]] = ["src/_imaging.c"]
|
||||||
for src_file in _IMAGING:
|
for src_file in _IMAGING:
|
||||||
files.append("src/" + src_file + ".c")
|
files.append("src/" + src_file + ".c")
|
||||||
for src_file in _LIB_IMAGING:
|
for src_file in _LIB_IMAGING:
|
||||||
|
|
|
@ -213,4 +213,7 @@ def toqimage(im: Image.Image | str | QByteArray) -> ImageQt:
|
||||||
|
|
||||||
def toqpixmap(im: Image.Image | str | QByteArray) -> QPixmap:
|
def toqpixmap(im: Image.Image | str | QByteArray) -> QPixmap:
|
||||||
qimage = toqimage(im)
|
qimage = toqimage(im)
|
||||||
return getattr(QPixmap, "fromImage")(qimage)
|
pixmap = getattr(QPixmap, "fromImage")(qimage)
|
||||||
|
if qt_version == "6":
|
||||||
|
pixmap.detach()
|
||||||
|
return pixmap
|
||||||
|
|
|
@ -294,7 +294,7 @@ def _accept(prefix: bytes) -> bool:
|
||||||
def _limit_rational(
|
def _limit_rational(
|
||||||
val: float | Fraction | IFDRational, max_val: int
|
val: float | Fraction | IFDRational, max_val: int
|
||||||
) -> tuple[IntegralLike, IntegralLike]:
|
) -> tuple[IntegralLike, IntegralLike]:
|
||||||
inv = abs(float(val)) > 1
|
inv = abs(val) > 1
|
||||||
n_d = IFDRational(1 / val if inv else val).limit_rational(max_val)
|
n_d = IFDRational(1 / val if inv else val).limit_rational(max_val)
|
||||||
return n_d[::-1] if inv else n_d
|
return n_d[::-1] if inv else n_d
|
||||||
|
|
||||||
|
@ -685,22 +685,33 @@ class ImageFileDirectory_v2(_IFDv2Base):
|
||||||
else:
|
else:
|
||||||
self.tagtype[tag] = TiffTags.UNDEFINED
|
self.tagtype[tag] = TiffTags.UNDEFINED
|
||||||
if all(isinstance(v, IFDRational) for v in values):
|
if all(isinstance(v, IFDRational) for v in values):
|
||||||
self.tagtype[tag] = (
|
for v in values:
|
||||||
TiffTags.RATIONAL
|
assert isinstance(v, IFDRational)
|
||||||
if all(v >= 0 for v in values)
|
if v < 0:
|
||||||
else TiffTags.SIGNED_RATIONAL
|
self.tagtype[tag] = TiffTags.SIGNED_RATIONAL
|
||||||
)
|
break
|
||||||
elif all(isinstance(v, int) for v in values):
|
|
||||||
if all(0 <= v < 2**16 for v in values):
|
|
||||||
self.tagtype[tag] = TiffTags.SHORT
|
|
||||||
elif all(-(2**15) < v < 2**15 for v in values):
|
|
||||||
self.tagtype[tag] = TiffTags.SIGNED_SHORT
|
|
||||||
else:
|
else:
|
||||||
self.tagtype[tag] = (
|
self.tagtype[tag] = TiffTags.RATIONAL
|
||||||
TiffTags.LONG
|
elif all(isinstance(v, int) for v in values):
|
||||||
if all(v >= 0 for v in values)
|
short = True
|
||||||
else TiffTags.SIGNED_LONG
|
signed_short = True
|
||||||
)
|
long = True
|
||||||
|
for v in values:
|
||||||
|
assert isinstance(v, int)
|
||||||
|
if short and not (0 <= v < 2**16):
|
||||||
|
short = False
|
||||||
|
if signed_short and not (-(2**15) < v < 2**15):
|
||||||
|
signed_short = False
|
||||||
|
if long and v < 0:
|
||||||
|
long = False
|
||||||
|
if short:
|
||||||
|
self.tagtype[tag] = TiffTags.SHORT
|
||||||
|
elif signed_short:
|
||||||
|
self.tagtype[tag] = TiffTags.SIGNED_SHORT
|
||||||
|
elif long:
|
||||||
|
self.tagtype[tag] = TiffTags.LONG
|
||||||
|
else:
|
||||||
|
self.tagtype[tag] = TiffTags.SIGNED_LONG
|
||||||
elif all(isinstance(v, float) for v in values):
|
elif all(isinstance(v, float) for v in values):
|
||||||
self.tagtype[tag] = TiffTags.DOUBLE
|
self.tagtype[tag] = TiffTags.DOUBLE
|
||||||
elif all(isinstance(v, str) for v in values):
|
elif all(isinstance(v, str) for v in values):
|
||||||
|
@ -718,7 +729,10 @@ class ImageFileDirectory_v2(_IFDv2Base):
|
||||||
|
|
||||||
is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict)
|
is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict)
|
||||||
if not is_ifd:
|
if not is_ifd:
|
||||||
values = tuple(info.cvt_enum(value) for value in values)
|
values = tuple(
|
||||||
|
info.cvt_enum(value) if isinstance(value, str) else value
|
||||||
|
for value in values
|
||||||
|
)
|
||||||
|
|
||||||
dest = self._tags_v1 if legacy_api else self._tags_v2
|
dest = self._tags_v1 if legacy_api else self._tags_v2
|
||||||
|
|
||||||
|
|
|
@ -128,7 +128,7 @@ class WmfStubImageFile(ImageFile.StubImageFile):
|
||||||
size = x1 - x0, y1 - y0
|
size = x1 - x0, y1 - y0
|
||||||
|
|
||||||
# calculate dots per inch from bbox and frame
|
# calculate dots per inch from bbox and frame
|
||||||
xdpi = 2540.0 * (x1 - y0) / (frame[2] - frame[0])
|
xdpi = 2540.0 * (x1 - x0) / (frame[2] - frame[0])
|
||||||
ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1])
|
ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1])
|
||||||
|
|
||||||
self.info["wmf_bbox"] = x0, y0, x1, y1
|
self.info["wmf_bbox"] = x0, y0, x1, y1
|
||||||
|
|
|
@ -82,6 +82,9 @@ struct {
|
||||||
/* font objects */
|
/* font objects */
|
||||||
|
|
||||||
static FT_Library library;
|
static FT_Library library;
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
|
static PyMutex ft_library_mutex;
|
||||||
|
#endif
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
PyObject_HEAD FT_Face face;
|
PyObject_HEAD FT_Face face;
|
||||||
|
@ -187,7 +190,9 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
|
||||||
|
|
||||||
if (filename && font_bytes_size <= 0) {
|
if (filename && font_bytes_size <= 0) {
|
||||||
self->font_bytes = NULL;
|
self->font_bytes = NULL;
|
||||||
|
MUTEX_LOCK(&ft_library_mutex);
|
||||||
error = FT_New_Face(library, filename, index, &self->face);
|
error = FT_New_Face(library, filename, index, &self->face);
|
||||||
|
MUTEX_UNLOCK(&ft_library_mutex);
|
||||||
} else {
|
} else {
|
||||||
/* need to have allocated storage for font_bytes for the life of the object.*/
|
/* need to have allocated storage for font_bytes for the life of the object.*/
|
||||||
/* Don't free this before FT_Done_Face */
|
/* Don't free this before FT_Done_Face */
|
||||||
|
@ -197,6 +202,7 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
|
||||||
}
|
}
|
||||||
if (!error) {
|
if (!error) {
|
||||||
memcpy(self->font_bytes, font_bytes, (size_t)font_bytes_size);
|
memcpy(self->font_bytes, font_bytes, (size_t)font_bytes_size);
|
||||||
|
MUTEX_LOCK(&ft_library_mutex);
|
||||||
error = FT_New_Memory_Face(
|
error = FT_New_Memory_Face(
|
||||||
library,
|
library,
|
||||||
(FT_Byte *)self->font_bytes,
|
(FT_Byte *)self->font_bytes,
|
||||||
|
@ -204,6 +210,7 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
|
||||||
index,
|
index,
|
||||||
&self->face
|
&self->face
|
||||||
);
|
);
|
||||||
|
MUTEX_UNLOCK(&ft_library_mutex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1433,7 +1440,9 @@ font_setvaraxes(FontObject *self, PyObject *args) {
|
||||||
static void
|
static void
|
||||||
font_dealloc(FontObject *self) {
|
font_dealloc(FontObject *self) {
|
||||||
if (self->face) {
|
if (self->face) {
|
||||||
|
MUTEX_LOCK(&ft_library_mutex);
|
||||||
FT_Done_Face(self->face);
|
FT_Done_Face(self->face);
|
||||||
|
MUTEX_UNLOCK(&ft_library_mutex);
|
||||||
}
|
}
|
||||||
if (self->font_bytes) {
|
if (self->font_bytes) {
|
||||||
PyMem_Free(self->font_bytes);
|
PyMem_Free(self->font_bytes);
|
||||||
|
|
341
src/thirdparty/pythoncapi_compat.h
vendored
341
src/thirdparty/pythoncapi_compat.h
vendored
|
@ -7,7 +7,10 @@
|
||||||
// https://github.com/python/pythoncapi_compat
|
// https://github.com/python/pythoncapi_compat
|
||||||
//
|
//
|
||||||
// Latest version:
|
// Latest version:
|
||||||
// https://raw.githubusercontent.com/python/pythoncapi_compat/master/pythoncapi_compat.h
|
// https://raw.githubusercontent.com/python/pythoncapi-compat/main/pythoncapi_compat.h
|
||||||
|
//
|
||||||
|
// This file was vendored from the following commit:
|
||||||
|
// https://github.com/python/pythoncapi-compat/commit/0041177c4f348c8952b4c8980b2c90856e61c7c7
|
||||||
//
|
//
|
||||||
// SPDX-License-Identifier: 0BSD
|
// SPDX-License-Identifier: 0BSD
|
||||||
|
|
||||||
|
@ -45,6 +48,13 @@ extern "C" {
|
||||||
# define _PyObject_CAST(op) _Py_CAST(PyObject*, op)
|
# define _PyObject_CAST(op) _Py_CAST(PyObject*, op)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifndef Py_BUILD_ASSERT
|
||||||
|
# define Py_BUILD_ASSERT(cond) \
|
||||||
|
do { \
|
||||||
|
(void)sizeof(char [1 - 2 * !(cond)]); \
|
||||||
|
} while(0)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
// bpo-42262 added Py_NewRef() to Python 3.10.0a3
|
// bpo-42262 added Py_NewRef() to Python 3.10.0a3
|
||||||
#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef)
|
#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef)
|
||||||
|
@ -1338,6 +1348,166 @@ PyDict_SetDefaultRef(PyObject *d, PyObject *key, PyObject *default_value,
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if PY_VERSION_HEX < 0x030D00B3
|
||||||
|
# define Py_BEGIN_CRITICAL_SECTION(op) {
|
||||||
|
# define Py_END_CRITICAL_SECTION() }
|
||||||
|
# define Py_BEGIN_CRITICAL_SECTION2(a, b) {
|
||||||
|
# define Py_END_CRITICAL_SECTION2() }
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if PY_VERSION_HEX < 0x030E0000 && PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION)
|
||||||
|
typedef struct PyUnicodeWriter PyUnicodeWriter;
|
||||||
|
|
||||||
|
static inline void PyUnicodeWriter_Discard(PyUnicodeWriter *writer)
|
||||||
|
{
|
||||||
|
_PyUnicodeWriter_Dealloc((_PyUnicodeWriter*)writer);
|
||||||
|
PyMem_Free(writer);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline PyUnicodeWriter* PyUnicodeWriter_Create(Py_ssize_t length)
|
||||||
|
{
|
||||||
|
if (length < 0) {
|
||||||
|
PyErr_SetString(PyExc_ValueError,
|
||||||
|
"length must be positive");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
const size_t size = sizeof(_PyUnicodeWriter);
|
||||||
|
PyUnicodeWriter *pub_writer = (PyUnicodeWriter *)PyMem_Malloc(size);
|
||||||
|
if (pub_writer == _Py_NULL) {
|
||||||
|
PyErr_NoMemory();
|
||||||
|
return _Py_NULL;
|
||||||
|
}
|
||||||
|
_PyUnicodeWriter *writer = (_PyUnicodeWriter *)pub_writer;
|
||||||
|
|
||||||
|
_PyUnicodeWriter_Init(writer);
|
||||||
|
if (_PyUnicodeWriter_Prepare(writer, length, 127) < 0) {
|
||||||
|
PyUnicodeWriter_Discard(pub_writer);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
writer->overallocate = 1;
|
||||||
|
return pub_writer;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline PyObject* PyUnicodeWriter_Finish(PyUnicodeWriter *writer)
|
||||||
|
{
|
||||||
|
PyObject *str = _PyUnicodeWriter_Finish((_PyUnicodeWriter*)writer);
|
||||||
|
assert(((_PyUnicodeWriter*)writer)->buffer == NULL);
|
||||||
|
PyMem_Free(writer);
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int
|
||||||
|
PyUnicodeWriter_WriteChar(PyUnicodeWriter *writer, Py_UCS4 ch)
|
||||||
|
{
|
||||||
|
if (ch > 0x10ffff) {
|
||||||
|
PyErr_SetString(PyExc_ValueError,
|
||||||
|
"character must be in range(0x110000)");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return _PyUnicodeWriter_WriteChar((_PyUnicodeWriter*)writer, ch);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int
|
||||||
|
PyUnicodeWriter_WriteStr(PyUnicodeWriter *writer, PyObject *obj)
|
||||||
|
{
|
||||||
|
PyObject *str = PyObject_Str(obj);
|
||||||
|
if (str == NULL) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
|
||||||
|
Py_DECREF(str);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int
|
||||||
|
PyUnicodeWriter_WriteRepr(PyUnicodeWriter *writer, PyObject *obj)
|
||||||
|
{
|
||||||
|
PyObject *str = PyObject_Repr(obj);
|
||||||
|
if (str == NULL) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
|
||||||
|
Py_DECREF(str);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int
|
||||||
|
PyUnicodeWriter_WriteUTF8(PyUnicodeWriter *writer,
|
||||||
|
const char *str, Py_ssize_t size)
|
||||||
|
{
|
||||||
|
if (size < 0) {
|
||||||
|
size = (Py_ssize_t)strlen(str);
|
||||||
|
}
|
||||||
|
|
||||||
|
PyObject *str_obj = PyUnicode_FromStringAndSize(str, size);
|
||||||
|
if (str_obj == _Py_NULL) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj);
|
||||||
|
Py_DECREF(str_obj);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int
|
||||||
|
PyUnicodeWriter_WriteWideChar(PyUnicodeWriter *writer,
|
||||||
|
const wchar_t *str, Py_ssize_t size)
|
||||||
|
{
|
||||||
|
if (size < 0) {
|
||||||
|
size = (Py_ssize_t)wcslen(str);
|
||||||
|
}
|
||||||
|
|
||||||
|
PyObject *str_obj = PyUnicode_FromWideChar(str, size);
|
||||||
|
if (str_obj == _Py_NULL) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj);
|
||||||
|
Py_DECREF(str_obj);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int
|
||||||
|
PyUnicodeWriter_WriteSubstring(PyUnicodeWriter *writer, PyObject *str,
|
||||||
|
Py_ssize_t start, Py_ssize_t end)
|
||||||
|
{
|
||||||
|
if (!PyUnicode_Check(str)) {
|
||||||
|
PyErr_Format(PyExc_TypeError, "expect str, not %T", str);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (start < 0 || start > end) {
|
||||||
|
PyErr_Format(PyExc_ValueError, "invalid start argument");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (end > PyUnicode_GET_LENGTH(str)) {
|
||||||
|
PyErr_Format(PyExc_ValueError, "invalid end argument");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return _PyUnicodeWriter_WriteSubstring((_PyUnicodeWriter*)writer, str,
|
||||||
|
start, end);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int
|
||||||
|
PyUnicodeWriter_Format(PyUnicodeWriter *writer, const char *format, ...)
|
||||||
|
{
|
||||||
|
va_list vargs;
|
||||||
|
va_start(vargs, format);
|
||||||
|
PyObject *str = PyUnicode_FromFormatV(format, vargs);
|
||||||
|
va_end(vargs);
|
||||||
|
if (str == _Py_NULL) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
|
||||||
|
Py_DECREF(str);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
#endif // PY_VERSION_HEX < 0x030E0000
|
||||||
|
|
||||||
// gh-116560 added PyLong_GetSign() to Python 3.14.0a0
|
// gh-116560 added PyLong_GetSign() to Python 3.14.0a0
|
||||||
#if PY_VERSION_HEX < 0x030E00A0
|
#if PY_VERSION_HEX < 0x030E00A0
|
||||||
|
@ -1354,6 +1524,175 @@ static inline int PyLong_GetSign(PyObject *obj, int *sign)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
// gh-124502 added PyUnicode_Equal() to Python 3.14.0a0
|
||||||
|
#if PY_VERSION_HEX < 0x030E00A0
|
||||||
|
static inline int PyUnicode_Equal(PyObject *str1, PyObject *str2)
|
||||||
|
{
|
||||||
|
if (!PyUnicode_Check(str1)) {
|
||||||
|
PyErr_Format(PyExc_TypeError, "first argument must be str, not %s",
|
||||||
|
Py_TYPE(str1)->tp_name);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (!PyUnicode_Check(str2)) {
|
||||||
|
PyErr_Format(PyExc_TypeError, "second argument must be str, not %s",
|
||||||
|
Py_TYPE(str2)->tp_name);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
#if PY_VERSION_HEX >= 0x030d0000 && !defined(PYPY_VERSION)
|
||||||
|
PyAPI_FUNC(int) _PyUnicode_Equal(PyObject *str1, PyObject *str2);
|
||||||
|
|
||||||
|
return _PyUnicode_Equal(str1, str2);
|
||||||
|
#elif PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION)
|
||||||
|
return _PyUnicode_EQ(str1, str2);
|
||||||
|
#elif PY_VERSION_HEX >= 0x03090000 && defined(PYPY_VERSION)
|
||||||
|
return _PyUnicode_EQ(str1, str2);
|
||||||
|
#else
|
||||||
|
return (PyUnicode_Compare(str1, str2) == 0);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
// gh-121645 added PyBytes_Join() to Python 3.14.0a0
|
||||||
|
#if PY_VERSION_HEX < 0x030E00A0
|
||||||
|
static inline PyObject* PyBytes_Join(PyObject *sep, PyObject *iterable)
|
||||||
|
{
|
||||||
|
return _PyBytes_Join(sep, iterable);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
#if PY_VERSION_HEX < 0x030E00A0
|
||||||
|
static inline Py_hash_t Py_HashBuffer(const void *ptr, Py_ssize_t len)
|
||||||
|
{
|
||||||
|
#if PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION)
|
||||||
|
PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void *src, Py_ssize_t len);
|
||||||
|
|
||||||
|
return _Py_HashBytes(ptr, len);
|
||||||
|
#else
|
||||||
|
Py_hash_t hash;
|
||||||
|
PyObject *bytes = PyBytes_FromStringAndSize((const char*)ptr, len);
|
||||||
|
if (bytes == NULL) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
hash = PyObject_Hash(bytes);
|
||||||
|
Py_DECREF(bytes);
|
||||||
|
return hash;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
#if PY_VERSION_HEX < 0x030E00A0
|
||||||
|
static inline int PyIter_NextItem(PyObject *iter, PyObject **item)
|
||||||
|
{
|
||||||
|
iternextfunc tp_iternext;
|
||||||
|
|
||||||
|
assert(iter != NULL);
|
||||||
|
assert(item != NULL);
|
||||||
|
|
||||||
|
tp_iternext = Py_TYPE(iter)->tp_iternext;
|
||||||
|
if (tp_iternext == NULL) {
|
||||||
|
*item = NULL;
|
||||||
|
PyErr_Format(PyExc_TypeError, "expected an iterator, got '%s'",
|
||||||
|
Py_TYPE(iter)->tp_name);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((*item = tp_iternext(iter))) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
if (!PyErr_Occurred()) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
if (PyErr_ExceptionMatches(PyExc_StopIteration)) {
|
||||||
|
PyErr_Clear();
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
#if PY_VERSION_HEX < 0x030E00A0
|
||||||
|
static inline PyObject* PyLong_FromInt32(int32_t value)
|
||||||
|
{
|
||||||
|
Py_BUILD_ASSERT(sizeof(long) >= 4);
|
||||||
|
return PyLong_FromLong(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline PyObject* PyLong_FromInt64(int64_t value)
|
||||||
|
{
|
||||||
|
Py_BUILD_ASSERT(sizeof(long long) >= 8);
|
||||||
|
return PyLong_FromLongLong(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline PyObject* PyLong_FromUInt32(uint32_t value)
|
||||||
|
{
|
||||||
|
Py_BUILD_ASSERT(sizeof(unsigned long) >= 4);
|
||||||
|
return PyLong_FromUnsignedLong(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline PyObject* PyLong_FromUInt64(uint64_t value)
|
||||||
|
{
|
||||||
|
Py_BUILD_ASSERT(sizeof(unsigned long long) >= 8);
|
||||||
|
return PyLong_FromUnsignedLongLong(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int PyLong_AsInt32(PyObject *obj, int32_t *pvalue)
|
||||||
|
{
|
||||||
|
Py_BUILD_ASSERT(sizeof(int) == 4);
|
||||||
|
int value = PyLong_AsInt(obj);
|
||||||
|
if (value == -1 && PyErr_Occurred()) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
*pvalue = (int32_t)value;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int PyLong_AsInt64(PyObject *obj, int64_t *pvalue)
|
||||||
|
{
|
||||||
|
Py_BUILD_ASSERT(sizeof(long long) == 8);
|
||||||
|
long long value = PyLong_AsLongLong(obj);
|
||||||
|
if (value == -1 && PyErr_Occurred()) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
*pvalue = (int64_t)value;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int PyLong_AsUInt32(PyObject *obj, uint32_t *pvalue)
|
||||||
|
{
|
||||||
|
Py_BUILD_ASSERT(sizeof(long) >= 4);
|
||||||
|
unsigned long value = PyLong_AsUnsignedLong(obj);
|
||||||
|
if (value == (unsigned long)-1 && PyErr_Occurred()) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
#if SIZEOF_LONG > 4
|
||||||
|
if ((unsigned long)UINT32_MAX < value) {
|
||||||
|
PyErr_SetString(PyExc_OverflowError,
|
||||||
|
"Python int too large to convert to C uint32_t");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
*pvalue = (uint32_t)value;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int PyLong_AsUInt64(PyObject *obj, uint64_t *pvalue)
|
||||||
|
{
|
||||||
|
Py_BUILD_ASSERT(sizeof(long long) == 8);
|
||||||
|
unsigned long long value = PyLong_AsUnsignedLongLong(obj);
|
||||||
|
if (value == (unsigned long long)-1 && PyErr_Occurred()) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
*pvalue = (uint64_t)value;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 452dd2d1705f6b2375369a6570c415beb3163f70
|
Subproject commit 9a9d1275f025f737cdaa3c451ba07129dd95f361
|
|
@ -134,8 +134,7 @@ V["ZLIB_DOTLESS"] = V["ZLIB"].replace(".", "")
|
||||||
# dependencies, listed in order of compilation
|
# dependencies, listed in order of compilation
|
||||||
DEPS: dict[str, dict[str, Any]] = {
|
DEPS: dict[str, dict[str, Any]] = {
|
||||||
"libjpeg": {
|
"libjpeg": {
|
||||||
"url": f"{SF_PROJECTS}/libjpeg-turbo/files/{V['JPEGTURBO']}/"
|
"url": f"{SF_PROJECTS}/libjpeg-turbo/files/{V['JPEGTURBO']}/FILENAME/download",
|
||||||
f"libjpeg-turbo-{V['JPEGTURBO']}.tar.gz/download",
|
|
||||||
"filename": f"libjpeg-turbo-{V['JPEGTURBO']}.tar.gz",
|
"filename": f"libjpeg-turbo-{V['JPEGTURBO']}.tar.gz",
|
||||||
"dir": f"libjpeg-turbo-{V['JPEGTURBO']}",
|
"dir": f"libjpeg-turbo-{V['JPEGTURBO']}",
|
||||||
"license": ["README.ijg", "LICENSE.md"],
|
"license": ["README.ijg", "LICENSE.md"],
|
||||||
|
@ -165,7 +164,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
||||||
"bins": ["cjpeg.exe", "djpeg.exe"],
|
"bins": ["cjpeg.exe", "djpeg.exe"],
|
||||||
},
|
},
|
||||||
"zlib": {
|
"zlib": {
|
||||||
"url": f"https://zlib.net/zlib{V['ZLIB_DOTLESS']}.zip",
|
"url": "https://zlib.net/FILENAME",
|
||||||
"filename": f"zlib{V['ZLIB_DOTLESS']}.zip",
|
"filename": f"zlib{V['ZLIB_DOTLESS']}.zip",
|
||||||
"dir": f"zlib-{V['ZLIB']}",
|
"dir": f"zlib-{V['ZLIB']}",
|
||||||
"license": "README",
|
"license": "README",
|
||||||
|
@ -179,7 +178,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
||||||
"libs": [r"*.lib"],
|
"libs": [r"*.lib"],
|
||||||
},
|
},
|
||||||
"xz": {
|
"xz": {
|
||||||
"url": f"https://github.com/tukaani-project/xz/releases/download/v{V['XZ']}/xz-{V['XZ']}.tar.gz",
|
"url": f"https://github.com/tukaani-project/xz/releases/download/v{V['XZ']}/FILENAME",
|
||||||
"filename": f"xz-{V['XZ']}.tar.gz",
|
"filename": f"xz-{V['XZ']}.tar.gz",
|
||||||
"dir": f"xz-{V['XZ']}",
|
"dir": f"xz-{V['XZ']}",
|
||||||
"license": "COPYING",
|
"license": "COPYING",
|
||||||
|
@ -192,7 +191,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
||||||
"libs": [r"lzma.lib"],
|
"libs": [r"lzma.lib"],
|
||||||
},
|
},
|
||||||
"libwebp": {
|
"libwebp": {
|
||||||
"url": f"http://downloads.webmproject.org/releases/webp/libwebp-{V['LIBWEBP']}.tar.gz",
|
"url": "http://downloads.webmproject.org/releases/webp/FILENAME",
|
||||||
"filename": f"libwebp-{V['LIBWEBP']}.tar.gz",
|
"filename": f"libwebp-{V['LIBWEBP']}.tar.gz",
|
||||||
"dir": f"libwebp-{V['LIBWEBP']}",
|
"dir": f"libwebp-{V['LIBWEBP']}",
|
||||||
"license": "COPYING",
|
"license": "COPYING",
|
||||||
|
@ -214,7 +213,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
||||||
"libs": [r"libsharpyuv.lib", r"libwebp*.lib"],
|
"libs": [r"libsharpyuv.lib", r"libwebp*.lib"],
|
||||||
},
|
},
|
||||||
"libtiff": {
|
"libtiff": {
|
||||||
"url": f"https://download.osgeo.org/libtiff/tiff-{V['TIFF']}.tar.gz",
|
"url": "https://download.osgeo.org/libtiff/FILENAME",
|
||||||
"filename": f"tiff-{V['TIFF']}.tar.gz",
|
"filename": f"tiff-{V['TIFF']}.tar.gz",
|
||||||
"dir": f"tiff-{V['TIFF']}",
|
"dir": f"tiff-{V['TIFF']}",
|
||||||
"license": "LICENSE.md",
|
"license": "LICENSE.md",
|
||||||
|
@ -272,7 +271,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
||||||
"libs": ["*.lib"],
|
"libs": ["*.lib"],
|
||||||
},
|
},
|
||||||
"freetype": {
|
"freetype": {
|
||||||
"url": f"https://download.savannah.gnu.org/releases/freetype/freetype-{V['FREETYPE']}.tar.gz",
|
"url": "https://download.savannah.gnu.org/releases/freetype/FILENAME",
|
||||||
"filename": f"freetype-{V['FREETYPE']}.tar.gz",
|
"filename": f"freetype-{V['FREETYPE']}.tar.gz",
|
||||||
"dir": f"freetype-{V['FREETYPE']}",
|
"dir": f"freetype-{V['FREETYPE']}",
|
||||||
"license": ["LICENSE.TXT", r"docs\FTL.TXT", r"docs\GPLv2.TXT"],
|
"license": ["LICENSE.TXT", r"docs\FTL.TXT", r"docs\GPLv2.TXT"],
|
||||||
|
@ -307,7 +306,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
||||||
"libs": [r"objs\{msbuild_arch}\Release Static\freetype.lib"],
|
"libs": [r"objs\{msbuild_arch}\Release Static\freetype.lib"],
|
||||||
},
|
},
|
||||||
"lcms2": {
|
"lcms2": {
|
||||||
"url": f"{SF_PROJECTS}/lcms/files/lcms/{V['LCMS2']}/lcms2-{V['LCMS2']}.tar.gz/download", # noqa: E501
|
"url": f"{SF_PROJECTS}/lcms/files/lcms/{V['LCMS2']}/FILENAME/download",
|
||||||
"filename": f"lcms2-{V['LCMS2']}.tar.gz",
|
"filename": f"lcms2-{V['LCMS2']}.tar.gz",
|
||||||
"dir": f"lcms2-{V['LCMS2']}",
|
"dir": f"lcms2-{V['LCMS2']}",
|
||||||
"license": "LICENSE",
|
"license": "LICENSE",
|
||||||
|
@ -552,7 +551,7 @@ def extract_dep(url: str, filename: str, prefs: dict[str, str]) -> None:
|
||||||
except RuntimeError as exc:
|
except RuntimeError as exc:
|
||||||
# Otherwise try upstream
|
# Otherwise try upstream
|
||||||
print(exc)
|
print(exc)
|
||||||
download_dep(url, file)
|
download_dep(url.replace("FILENAME", filename), file)
|
||||||
|
|
||||||
print("Extracting " + filename)
|
print("Extracting " + filename)
|
||||||
sources_dir_abs = os.path.abspath(sources_dir)
|
sources_dir_abs = os.path.abspath(sources_dir)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user