mirror of
https://github.com/python-pillow/Pillow.git
synced 2025-01-12 18:26:17 +03:00
Merge branch 'main' into context_manager
This commit is contained in:
commit
dc3c489690
|
@ -1,4 +1,4 @@
|
|||
mypy==1.11.2
|
||||
mypy==1.13.0
|
||||
IceSpringPySideStubs-PyQt6
|
||||
IceSpringPySideStubs-PySide6
|
||||
ipython
|
||||
|
|
12
.github/renovate.json
vendored
12
.github/renovate.json
vendored
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:base"
|
||||
"config:recommended"
|
||||
],
|
||||
"labels": [
|
||||
"Dependency"
|
||||
|
@ -9,9 +9,13 @@
|
|||
"packageRules": [
|
||||
{
|
||||
"groupName": "github-actions",
|
||||
"matchManagers": ["github-actions"],
|
||||
"separateMajorMinor": "false"
|
||||
"matchManagers": [
|
||||
"github-actions"
|
||||
],
|
||||
"separateMajorMinor": false
|
||||
}
|
||||
],
|
||||
"schedule": ["on the 3rd day of the month"]
|
||||
"schedule": [
|
||||
"on the 3rd day of the month"
|
||||
]
|
||||
}
|
||||
|
|
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
|
@ -33,6 +33,8 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
|
|
2
.github/workflows/lint.yml
vendored
2
.github/workflows/lint.yml
vendored
|
@ -21,6 +21,8 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: pre-commit cache
|
||||
uses: actions/cache@v4
|
||||
|
|
4
.github/workflows/stale.yml
vendored
4
.github/workflows/stale.yml
vendored
|
@ -6,7 +6,7 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
@ -15,6 +15,8 @@ concurrency:
|
|||
jobs:
|
||||
stale:
|
||||
if: github.repository_owner == 'python-pillow'
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
|
2
.github/workflows/test-cygwin.yml
vendored
2
.github/workflows/test-cygwin.yml
vendored
|
@ -48,6 +48,8 @@ jobs:
|
|||
|
||||
- name: Checkout Pillow
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install Cygwin
|
||||
uses: cygwin/cygwin-install-action@v4
|
||||
|
|
5
.github/workflows/test-docker.yml
vendored
5
.github/workflows/test-docker.yml
vendored
|
@ -46,8 +46,8 @@ jobs:
|
|||
centos-stream-9-amd64,
|
||||
debian-12-bookworm-x86,
|
||||
debian-12-bookworm-amd64,
|
||||
fedora-39-amd64,
|
||||
fedora-40-amd64,
|
||||
fedora-41-amd64,
|
||||
gentoo,
|
||||
ubuntu-22.04-jammy-amd64,
|
||||
ubuntu-24.04-noble-amd64,
|
||||
|
@ -65,6 +65,8 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Build system information
|
||||
run: python3 .github/workflows/system-info.py
|
||||
|
@ -102,7 +104,6 @@ jobs:
|
|||
with:
|
||||
flags: GHA_Docker
|
||||
name: ${{ matrix.docker }}
|
||||
gcov: true
|
||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||
|
||||
success:
|
||||
|
|
2
.github/workflows/test-mingw.yml
vendored
2
.github/workflows/test-mingw.yml
vendored
|
@ -46,6 +46,8 @@ jobs:
|
|||
steps:
|
||||
- name: Checkout Pillow
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up shell
|
||||
run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH
|
||||
|
|
2
.github/workflows/test-valgrind.yml
vendored
2
.github/workflows/test-valgrind.yml
vendored
|
@ -40,6 +40,8 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Build system information
|
||||
run: python3 .github/workflows/system-info.py
|
||||
|
|
24
.github/workflows/test-windows.yml
vendored
24
.github/workflows/test-windows.yml
vendored
|
@ -44,16 +44,20 @@ jobs:
|
|||
steps:
|
||||
- name: Checkout Pillow
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Checkout cached dependencies
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: python-pillow/pillow-depends
|
||||
path: winbuild\depends
|
||||
|
||||
- name: Checkout extra test images
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: python-pillow/test-images
|
||||
path: Tests\test-images
|
||||
|
||||
|
@ -69,16 +73,14 @@ jobs:
|
|||
- name: Print build system information
|
||||
run: python3 .github/workflows/system-info.py
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: >
|
||||
python3 -m pip install
|
||||
coverage>=7.4.2
|
||||
defusedxml
|
||||
olefile
|
||||
pyroma
|
||||
pytest
|
||||
pytest-cov
|
||||
pytest-timeout
|
||||
- name: Upgrade pip
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip
|
||||
|
||||
- name: Install CPython dependencies
|
||||
if: "!contains(matrix.python-version, 'pypy')"
|
||||
run: |
|
||||
python3 -m pip install PyQt6
|
||||
|
||||
- name: Install dependencies
|
||||
id: install
|
||||
|
@ -178,7 +180,7 @@ jobs:
|
|||
- name: Build Pillow
|
||||
run: |
|
||||
$FLAGS="-C raqm=vendor -C fribidi=vendor"
|
||||
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS ."
|
||||
cmd /c "winbuild\build\build_env.cmd && $env:pythonLocation\python.exe -m pip install -v $FLAGS .[tests]"
|
||||
& $env:pythonLocation\python.exe selftest.py --installed
|
||||
shell: pwsh
|
||||
|
||||
|
|
3
.github/workflows/test.yml
vendored
3
.github/workflows/test.yml
vendored
|
@ -63,6 +63,8 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
|
@ -158,7 +160,6 @@ jobs:
|
|||
with:
|
||||
flags: ${{ matrix.os == 'ubuntu-latest' && 'GHA_Ubuntu' || 'GHA_macOS' }}
|
||||
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
|
||||
gcov: true
|
||||
token: ${{ secrets.CODECOV_ORG_TOKEN }}
|
||||
|
||||
success:
|
||||
|
|
6
.github/workflows/wheels-dependencies.sh
vendored
6
.github/workflows/wheels-dependencies.sh
vendored
|
@ -91,9 +91,6 @@ function build {
|
|||
build_libpng
|
||||
build_lcms2
|
||||
build_openjpeg
|
||||
if [ -f /usr/local/lib64/libopenjp2.so ]; then
|
||||
cp /usr/local/lib64/libopenjp2.so /usr/local/lib
|
||||
fi
|
||||
|
||||
ORIGINAL_CFLAGS=$CFLAGS
|
||||
CFLAGS="$CFLAGS -O3 -DNDEBUG"
|
||||
|
@ -121,6 +118,7 @@ curl -fsSL -o pillow-depends-main.zip https://github.com/python-pillow/pillow-de
|
|||
untar pillow-depends-main.zip
|
||||
|
||||
if [[ -n "$IS_MACOS" ]]; then
|
||||
# libdeflate may cause a minimum target error when repairing the wheel
|
||||
# libtiff and libxcb cause a conflict with building libtiff and libxcb
|
||||
# libxau and libxdmcp cause an issue on macOS < 11
|
||||
# remove cairo to fix building harfbuzz on arm64
|
||||
|
@ -132,7 +130,7 @@ if [[ -n "$IS_MACOS" ]]; then
|
|||
if [[ "$CIBW_ARCHS" == "arm64" ]]; then
|
||||
brew remove --ignore-dependencies jpeg-turbo
|
||||
else
|
||||
brew remove --ignore-dependencies webp
|
||||
brew remove --ignore-dependencies libdeflate webp
|
||||
fi
|
||||
|
||||
brew install pkg-config
|
||||
|
|
9
.github/workflows/wheels.yml
vendored
9
.github/workflows/wheels.yml
vendored
|
@ -61,6 +61,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: true
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
|
@ -132,6 +133,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: true
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
|
@ -152,6 +154,7 @@ jobs:
|
|||
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
|
||||
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
|
||||
CIBW_PRERELEASE_PYTHONS: True
|
||||
CIBW_SKIP: pp39-*
|
||||
MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }}
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
|
@ -172,10 +175,13 @@ jobs:
|
|||
- cibw_arch: ARM64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Checkout extra test images
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: python-pillow/test-images
|
||||
path: Tests\test-images
|
||||
|
||||
|
@ -224,6 +230,7 @@ jobs:
|
|||
CIBW_CACHE_PATH: "C:\\cibw"
|
||||
CIBW_FREE_THREADED_SUPPORT: True
|
||||
CIBW_PRERELEASE_PYTHONS: True
|
||||
CIBW_SKIP: pp39-*
|
||||
CIBW_TEST_SKIP: "*-win_arm64"
|
||||
CIBW_TEST_COMMAND: 'docker run --rm
|
||||
-v {project}:C:\pillow
|
||||
|
@ -251,6 +258,8 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.9
|
||||
rev: v0.7.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--exit-non-zero-on-fix]
|
||||
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 24.8.0
|
||||
rev: 24.10.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
|
@ -24,7 +24,7 @@ repos:
|
|||
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||
rev: v19.1.1
|
||||
rev: v19.1.3
|
||||
hooks:
|
||||
- id: clang-format
|
||||
types: [c]
|
||||
|
@ -50,7 +50,7 @@ repos:
|
|||
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.29.3
|
||||
rev: 0.29.4
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
- id: check-readthedocs
|
||||
|
@ -62,12 +62,12 @@ repos:
|
|||
- id: sphinx-lint
|
||||
|
||||
- repo: https://github.com/tox-dev/pyproject-fmt
|
||||
rev: 2.2.4
|
||||
rev: v2.5.0
|
||||
hooks:
|
||||
- id: pyproject-fmt
|
||||
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.20.2
|
||||
rev: v0.22
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
additional_dependencies: [trove-classifiers>=2024.10.12]
|
||||
|
|
20
CHANGES.rst
20
CHANGES.rst
|
@ -2,9 +2,27 @@
|
|||
Changelog (Pillow)
|
||||
==================
|
||||
|
||||
11.0.0 (unreleased)
|
||||
11.1.0 (unreleased)
|
||||
-------------------
|
||||
|
||||
- Detach PyQt6 QPixmap instance before returning #8509
|
||||
[radarhere]
|
||||
|
||||
- Corrected EMF DPI #8485
|
||||
[radarhere]
|
||||
|
||||
- Fix IFDRational with a zero denominator #8474
|
||||
[radarhere]
|
||||
|
||||
- Fixed disabling a feature during install #8469
|
||||
[radarhere]
|
||||
|
||||
11.0.0 (2024-10-15)
|
||||
-------------------
|
||||
|
||||
- Update licence to MIT-CMU #8460
|
||||
[hugovk]
|
||||
|
||||
- Conditionally define ImageCms type hint to avoid requiring core #8197
|
||||
[radarhere]
|
||||
|
||||
|
|
2
LICENSE
2
LICENSE
|
@ -7,7 +7,7 @@ Pillow is the friendly PIL fork. It is
|
|||
|
||||
Copyright © 2010-2024 by Jeffrey A. Clark and contributors
|
||||
|
||||
Like PIL, Pillow is licensed under the open source HPND License:
|
||||
Like PIL, Pillow is licensed under the open source MIT-CMU License:
|
||||
|
||||
By obtaining, using, and/or copying this software and/or its associated
|
||||
documentation, you agree that you have read, understood, and will comply
|
||||
|
|
|
@ -22,6 +22,8 @@ def test_bad() -> None:
|
|||
for f in get_files("b"):
|
||||
# Assert that there is no unclosed file warning
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
try:
|
||||
with Image.open(f) as im:
|
||||
im.load()
|
||||
|
|
|
@ -36,6 +36,8 @@ def test_unclosed_file() -> None:
|
|||
|
||||
def test_closed_file() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im = Image.open(TEST_FILE)
|
||||
im.load()
|
||||
im.close()
|
||||
|
@ -43,6 +45,8 @@ def test_closed_file() -> None:
|
|||
|
||||
def test_context_manager() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
with Image.open(TEST_FILE) as im:
|
||||
im.load()
|
||||
|
||||
|
|
|
@ -72,6 +72,8 @@ def test_unclosed_file() -> None:
|
|||
|
||||
def test_closed_file() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im = Image.open(static_test_file)
|
||||
im.load()
|
||||
im.close()
|
||||
|
@ -88,6 +90,8 @@ def test_seek_after_close() -> None:
|
|||
|
||||
def test_context_manager() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
with Image.open(static_test_file) as im:
|
||||
im.load()
|
||||
|
||||
|
|
|
@ -46,6 +46,8 @@ def test_unclosed_file() -> None:
|
|||
|
||||
def test_closed_file() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im = Image.open(TEST_GIF)
|
||||
im.load()
|
||||
im.close()
|
||||
|
@ -67,6 +69,8 @@ def test_seek_after_close() -> None:
|
|||
|
||||
def test_context_manager() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
with Image.open(TEST_GIF) as im:
|
||||
im.load()
|
||||
|
||||
|
|
|
@ -21,6 +21,8 @@ def test_sanity() -> None:
|
|||
with Image.open(TEST_FILE) as im:
|
||||
# Assert that there is no unclosed file warning
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im.load()
|
||||
|
||||
assert im.mode == "RGBA"
|
||||
|
|
|
@ -41,6 +41,8 @@ def test_unclosed_file() -> None:
|
|||
|
||||
def test_closed_file() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im = Image.open(TEST_IM)
|
||||
im.load()
|
||||
im.close()
|
||||
|
@ -48,6 +50,8 @@ def test_closed_file() -> None:
|
|||
|
||||
def test_context_manager() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
with Image.open(TEST_IM) as im:
|
||||
im.load()
|
||||
|
||||
|
|
|
@ -561,12 +561,12 @@ class TestFileJpeg:
|
|||
@mark_if_feature_version(
|
||||
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
|
||||
)
|
||||
def test_qtables(self, tmp_path: Path) -> None:
|
||||
def test_qtables(self) -> None:
|
||||
def _n_qtables_helper(n: int, test_file: str) -> None:
|
||||
b = BytesIO()
|
||||
with Image.open(test_file) as im:
|
||||
f = str(tmp_path / "temp.jpg")
|
||||
im.save(f, qtables=[[n] * 64] * n)
|
||||
with Image.open(f) as im:
|
||||
im.save(b, "JPEG", qtables=[[n] * 64] * n)
|
||||
with Image.open(b) as im:
|
||||
assert isinstance(im, JpegImagePlugin.JpegImageFile)
|
||||
assert len(im.quantization) == n
|
||||
reloaded = self.roundtrip(im, qtables="keep")
|
||||
|
@ -876,6 +876,8 @@ class TestFileJpeg:
|
|||
|
||||
out = str(tmp_path / "out.jpg")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im.save(out, exif=exif)
|
||||
|
||||
with Image.open(out) as reloaded:
|
||||
|
|
|
@ -2,6 +2,7 @@ from __future__ import annotations
|
|||
|
||||
import os
|
||||
import re
|
||||
from collections.abc import Generator
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
@ -29,8 +30,16 @@ EXTRA_DIR = "Tests/images/jpeg2000"
|
|||
|
||||
pytestmark = skip_unless_feature("jpg_2000")
|
||||
|
||||
test_card = Image.open("Tests/images/test-card.png")
|
||||
test_card.load()
|
||||
|
||||
@pytest.fixture
|
||||
def card() -> Generator[ImageFile.ImageFile, None, None]:
|
||||
with Image.open("Tests/images/test-card.png") as im:
|
||||
im.load()
|
||||
try:
|
||||
yield im
|
||||
finally:
|
||||
im.close()
|
||||
|
||||
|
||||
# OpenJPEG 2.0.0 outputs this debugging message sometimes; we should
|
||||
# ignore it---it doesn't represent a test failure.
|
||||
|
@ -75,76 +84,76 @@ def test_invalid_file() -> None:
|
|||
Jpeg2KImagePlugin.Jpeg2KImageFile(invalid_file)
|
||||
|
||||
|
||||
def test_bytesio() -> None:
|
||||
def test_bytesio(card: ImageFile.ImageFile) -> None:
|
||||
with open("Tests/images/test-card-lossless.jp2", "rb") as f:
|
||||
data = BytesIO(f.read())
|
||||
with Image.open(data) as im:
|
||||
im.load()
|
||||
assert_image_similar(im, test_card, 1.0e-3)
|
||||
assert_image_similar(im, card, 1.0e-3)
|
||||
|
||||
|
||||
# These two test pre-written JPEG 2000 files that were not written with
|
||||
# PIL (they were made using Adobe Photoshop)
|
||||
|
||||
|
||||
def test_lossless(tmp_path: Path) -> None:
|
||||
def test_lossless(card: ImageFile.ImageFile, tmp_path: Path) -> None:
|
||||
with Image.open("Tests/images/test-card-lossless.jp2") as im:
|
||||
im.load()
|
||||
outfile = str(tmp_path / "temp_test-card.png")
|
||||
im.save(outfile)
|
||||
assert_image_similar(im, test_card, 1.0e-3)
|
||||
assert_image_similar(im, card, 1.0e-3)
|
||||
|
||||
|
||||
def test_lossy_tiled() -> None:
|
||||
assert_image_similar_tofile(
|
||||
test_card, "Tests/images/test-card-lossy-tiled.jp2", 2.0
|
||||
)
|
||||
def test_lossy_tiled(card: ImageFile.ImageFile) -> None:
|
||||
assert_image_similar_tofile(card, "Tests/images/test-card-lossy-tiled.jp2", 2.0)
|
||||
|
||||
|
||||
def test_lossless_rt() -> None:
|
||||
im = roundtrip(test_card)
|
||||
assert_image_equal(im, test_card)
|
||||
def test_lossless_rt(card: ImageFile.ImageFile) -> None:
|
||||
im = roundtrip(card)
|
||||
assert_image_equal(im, card)
|
||||
|
||||
|
||||
def test_lossy_rt() -> None:
|
||||
im = roundtrip(test_card, quality_layers=[20])
|
||||
assert_image_similar(im, test_card, 2.0)
|
||||
def test_lossy_rt(card: ImageFile.ImageFile) -> None:
|
||||
im = roundtrip(card, quality_layers=[20])
|
||||
assert_image_similar(im, card, 2.0)
|
||||
|
||||
|
||||
def test_tiled_rt() -> None:
|
||||
im = roundtrip(test_card, tile_size=(128, 128))
|
||||
assert_image_equal(im, test_card)
|
||||
def test_tiled_rt(card: ImageFile.ImageFile) -> None:
|
||||
im = roundtrip(card, tile_size=(128, 128))
|
||||
assert_image_equal(im, card)
|
||||
|
||||
|
||||
def test_tiled_offset_rt() -> None:
|
||||
im = roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32))
|
||||
assert_image_equal(im, test_card)
|
||||
def test_tiled_offset_rt(card: ImageFile.ImageFile) -> None:
|
||||
im = roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(32, 32))
|
||||
assert_image_equal(im, card)
|
||||
|
||||
|
||||
def test_tiled_offset_too_small() -> None:
|
||||
def test_tiled_offset_too_small(card: ImageFile.ImageFile) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
roundtrip(test_card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32))
|
||||
roundtrip(card, tile_size=(128, 128), tile_offset=(0, 0), offset=(128, 32))
|
||||
|
||||
|
||||
def test_irreversible_rt() -> None:
|
||||
im = roundtrip(test_card, irreversible=True, quality_layers=[20])
|
||||
assert_image_similar(im, test_card, 2.0)
|
||||
def test_irreversible_rt(card: ImageFile.ImageFile) -> None:
|
||||
im = roundtrip(card, irreversible=True, quality_layers=[20])
|
||||
assert_image_similar(im, card, 2.0)
|
||||
|
||||
|
||||
def test_prog_qual_rt() -> None:
|
||||
im = roundtrip(test_card, quality_layers=[60, 40, 20], progression="LRCP")
|
||||
assert_image_similar(im, test_card, 2.0)
|
||||
def test_prog_qual_rt(card: ImageFile.ImageFile) -> None:
|
||||
im = roundtrip(card, quality_layers=[60, 40, 20], progression="LRCP")
|
||||
assert_image_similar(im, card, 2.0)
|
||||
|
||||
|
||||
def test_prog_res_rt() -> None:
|
||||
im = roundtrip(test_card, num_resolutions=8, progression="RLCP")
|
||||
assert_image_equal(im, test_card)
|
||||
def test_prog_res_rt(card: ImageFile.ImageFile) -> None:
|
||||
im = roundtrip(card, num_resolutions=8, progression="RLCP")
|
||||
assert_image_equal(im, card)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("num_resolutions", range(2, 6))
|
||||
def test_default_num_resolutions(num_resolutions: int) -> None:
|
||||
def test_default_num_resolutions(
|
||||
card: ImageFile.ImageFile, num_resolutions: int
|
||||
) -> None:
|
||||
d = 1 << (num_resolutions - 1)
|
||||
im = test_card.resize((d - 1, d - 1))
|
||||
im = card.resize((d - 1, d - 1))
|
||||
with pytest.raises(OSError):
|
||||
roundtrip(im, num_resolutions=num_resolutions)
|
||||
reloaded = roundtrip(im)
|
||||
|
@ -206,33 +215,33 @@ def test_header_errors() -> None:
|
|||
pass
|
||||
|
||||
|
||||
def test_layers_type(tmp_path: Path) -> None:
|
||||
def test_layers_type(card: ImageFile.ImageFile, tmp_path: Path) -> None:
|
||||
outfile = str(tmp_path / "temp_layers.jp2")
|
||||
for quality_layers in [[100, 50, 10], (100, 50, 10), None]:
|
||||
test_card.save(outfile, quality_layers=quality_layers)
|
||||
card.save(outfile, quality_layers=quality_layers)
|
||||
|
||||
for quality_layers_str in ["quality_layers", ("100", "50", "10")]:
|
||||
with pytest.raises(ValueError):
|
||||
test_card.save(outfile, quality_layers=quality_layers_str)
|
||||
card.save(outfile, quality_layers=quality_layers_str)
|
||||
|
||||
|
||||
def test_layers() -> None:
|
||||
def test_layers(card: ImageFile.ImageFile) -> None:
|
||||
out = BytesIO()
|
||||
test_card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP")
|
||||
card.save(out, "JPEG2000", quality_layers=[100, 50, 10], progression="LRCP")
|
||||
out.seek(0)
|
||||
|
||||
with Image.open(out) as im:
|
||||
assert isinstance(im, Jpeg2KImagePlugin.Jpeg2KImageFile)
|
||||
im.layers = 1
|
||||
im.load()
|
||||
assert_image_similar(im, test_card, 13)
|
||||
assert_image_similar(im, card, 13)
|
||||
|
||||
out.seek(0)
|
||||
with Image.open(out) as im:
|
||||
assert isinstance(im, Jpeg2KImagePlugin.Jpeg2KImageFile)
|
||||
im.layers = 3
|
||||
im.load()
|
||||
assert_image_similar(im, test_card, 0.4)
|
||||
assert_image_similar(im, card, 0.4)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -248,24 +257,30 @@ def test_layers() -> None:
|
|||
(None, {"no_jp2": False}, 4, b"jP"),
|
||||
),
|
||||
)
|
||||
def test_no_jp2(name: str, args: dict[str, bool], offset: int, data: bytes) -> None:
|
||||
def test_no_jp2(
|
||||
card: ImageFile.ImageFile,
|
||||
name: str,
|
||||
args: dict[str, bool],
|
||||
offset: int,
|
||||
data: bytes,
|
||||
) -> None:
|
||||
out = BytesIO()
|
||||
if name:
|
||||
out.name = name
|
||||
test_card.save(out, "JPEG2000", **args)
|
||||
card.save(out, "JPEG2000", **args)
|
||||
out.seek(offset)
|
||||
assert out.read(2) == data
|
||||
|
||||
|
||||
def test_mct() -> None:
|
||||
def test_mct(card: ImageFile.ImageFile) -> None:
|
||||
# Three component
|
||||
for val in (0, 1):
|
||||
out = BytesIO()
|
||||
test_card.save(out, "JPEG2000", mct=val, no_jp2=True)
|
||||
card.save(out, "JPEG2000", mct=val, no_jp2=True)
|
||||
|
||||
assert out.getvalue()[59] == val
|
||||
with Image.open(out) as im:
|
||||
assert_image_similar(im, test_card, 1.0e-3)
|
||||
assert_image_similar(im, card, 1.0e-3)
|
||||
|
||||
# Single component should have MCT disabled
|
||||
for val in (0, 1):
|
||||
|
@ -424,22 +439,22 @@ def test_comment() -> None:
|
|||
pass
|
||||
|
||||
|
||||
def test_save_comment() -> None:
|
||||
def test_save_comment(card: ImageFile.ImageFile) -> None:
|
||||
for comment in ("Created by Pillow", b"Created by Pillow"):
|
||||
out = BytesIO()
|
||||
test_card.save(out, "JPEG2000", comment=comment)
|
||||
card.save(out, "JPEG2000", comment=comment)
|
||||
|
||||
with Image.open(out) as im:
|
||||
assert im.info["comment"] == b"Created by Pillow"
|
||||
|
||||
out = BytesIO()
|
||||
long_comment = b" " * 65531
|
||||
test_card.save(out, "JPEG2000", comment=long_comment)
|
||||
card.save(out, "JPEG2000", comment=long_comment)
|
||||
with Image.open(out) as im:
|
||||
assert im.info["comment"] == long_comment
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
test_card.save(out, "JPEG2000", comment=long_comment + b" ")
|
||||
card.save(out, "JPEG2000", comment=long_comment + b" ")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -462,10 +477,10 @@ def test_crashes(test_file: str) -> None:
|
|||
|
||||
|
||||
@skip_unless_feature_version("jpg_2000", "2.4.0")
|
||||
def test_plt_marker() -> None:
|
||||
def test_plt_marker(card: ImageFile.ImageFile) -> None:
|
||||
# Search the start of the codesteam for PLT
|
||||
out = BytesIO()
|
||||
test_card.save(out, "JPEG2000", no_jp2=True, plt=True)
|
||||
card.save(out, "JPEG2000", no_jp2=True, plt=True)
|
||||
out.seek(0)
|
||||
while True:
|
||||
marker = out.read(2)
|
||||
|
|
|
@ -48,6 +48,8 @@ def test_unclosed_file() -> None:
|
|||
|
||||
def test_closed_file() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im = Image.open(test_files[0])
|
||||
im.load()
|
||||
im.close()
|
||||
|
@ -63,6 +65,8 @@ def test_seek_after_close() -> None:
|
|||
|
||||
def test_context_manager() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
with Image.open(test_files[0]) as im:
|
||||
im.load()
|
||||
|
||||
|
|
|
@ -353,6 +353,8 @@ class TestFilePng:
|
|||
with Image.open(TEST_PNG_FILE) as im:
|
||||
# Assert that there is no unclosed file warning
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im.verify()
|
||||
|
||||
with Image.open(TEST_PNG_FILE) as im:
|
||||
|
|
|
@ -35,6 +35,8 @@ def test_unclosed_file() -> None:
|
|||
|
||||
def test_closed_file() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im = Image.open(test_file)
|
||||
im.load()
|
||||
im.close()
|
||||
|
@ -42,6 +44,8 @@ def test_closed_file() -> None:
|
|||
|
||||
def test_context_manager() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
with Image.open(test_file) as im:
|
||||
im.load()
|
||||
|
||||
|
|
|
@ -34,6 +34,8 @@ def test_unclosed_file() -> None:
|
|||
|
||||
def test_closed_file() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im = Image.open(TEST_FILE)
|
||||
im.load()
|
||||
im.close()
|
||||
|
@ -41,6 +43,8 @@ def test_closed_file() -> None:
|
|||
|
||||
def test_context_manager() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
with Image.open(TEST_FILE) as im:
|
||||
im.load()
|
||||
|
||||
|
|
|
@ -37,11 +37,15 @@ def test_unclosed_file() -> None:
|
|||
|
||||
def test_close() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
tar = TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg")
|
||||
tar.close()
|
||||
|
||||
|
||||
def test_contextmanager() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
with TarIO.TarIO(TEST_TAR_FILE, "hopper.jpg"):
|
||||
pass
|
||||
|
|
|
@ -78,6 +78,8 @@ class TestFileTiff:
|
|||
|
||||
def test_closed_file(self) -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im = Image.open("Tests/images/multipage.tiff")
|
||||
im.load()
|
||||
im.close()
|
||||
|
@ -94,6 +96,8 @@ class TestFileTiff:
|
|||
|
||||
def test_context_manager(self) -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
with Image.open("Tests/images/multipage.tiff") as im:
|
||||
im.load()
|
||||
|
||||
|
|
|
@ -191,6 +191,8 @@ class TestFileWebp:
|
|||
file_path = "Tests/images/hopper.webp"
|
||||
with Image.open(file_path) as image:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
image.save(tmp_path / "temp.webp")
|
||||
|
||||
def test_file_pointer_could_be_reused(self) -> None:
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from typing import IO
|
||||
|
||||
|
@ -63,6 +64,12 @@ def test_load_float_dpi() -> None:
|
|||
with Image.open("Tests/images/drawing.emf") as im:
|
||||
assert im.info["dpi"] == 1423.7668161434979
|
||||
|
||||
with open("Tests/images/drawing.emf", "rb") as fp:
|
||||
data = fp.read()
|
||||
b = BytesIO(data[:8] + b"\x06\xFA" + data[10:])
|
||||
with Image.open(b) as im:
|
||||
assert im.info["dpi"][0] == 2540
|
||||
|
||||
|
||||
def test_load_set_dpi() -> None:
|
||||
with Image.open("Tests/images/drawing.wmf") as im:
|
||||
|
|
|
@ -739,6 +739,8 @@ class TestImage:
|
|||
# Act/Assert
|
||||
with Image.open(test_file) as im:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
im.save(temp_file)
|
||||
|
||||
def test_no_new_file_on_error(self, tmp_path: Path) -> None:
|
||||
|
|
|
@ -10,7 +10,7 @@ from pathlib import Path
|
|||
|
||||
import pytest
|
||||
|
||||
from PIL import Image
|
||||
from PIL import Image, ImageFile
|
||||
|
||||
from .helper import (
|
||||
assert_image_equal,
|
||||
|
@ -179,7 +179,7 @@ class TestImagingCoreResize:
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def gradients_image() -> Generator[Image.Image, None, None]:
|
||||
def gradients_image() -> Generator[ImageFile.ImageFile, None, None]:
|
||||
with Image.open("Tests/images/radial_gradients.png") as im:
|
||||
im.load()
|
||||
try:
|
||||
|
@ -189,7 +189,7 @@ def gradients_image() -> Generator[Image.Image, None, None]:
|
|||
|
||||
|
||||
class TestReducingGapResize:
|
||||
def test_reducing_gap_values(self, gradients_image: Image.Image) -> None:
|
||||
def test_reducing_gap_values(self, gradients_image: ImageFile.ImageFile) -> None:
|
||||
ref = gradients_image.resize(
|
||||
(52, 34), Image.Resampling.BICUBIC, reducing_gap=None
|
||||
)
|
||||
|
@ -210,7 +210,7 @@ class TestReducingGapResize:
|
|||
)
|
||||
def test_reducing_gap_1(
|
||||
self,
|
||||
gradients_image: Image.Image,
|
||||
gradients_image: ImageFile.ImageFile,
|
||||
box: tuple[float, float, float, float],
|
||||
epsilon: float,
|
||||
) -> None:
|
||||
|
@ -230,7 +230,7 @@ class TestReducingGapResize:
|
|||
)
|
||||
def test_reducing_gap_2(
|
||||
self,
|
||||
gradients_image: Image.Image,
|
||||
gradients_image: ImageFile.ImageFile,
|
||||
box: tuple[float, float, float, float],
|
||||
epsilon: float,
|
||||
) -> None:
|
||||
|
@ -250,7 +250,7 @@ class TestReducingGapResize:
|
|||
)
|
||||
def test_reducing_gap_3(
|
||||
self,
|
||||
gradients_image: Image.Image,
|
||||
gradients_image: ImageFile.ImageFile,
|
||||
box: tuple[float, float, float, float],
|
||||
epsilon: float,
|
||||
) -> None:
|
||||
|
@ -266,7 +266,9 @@ class TestReducingGapResize:
|
|||
|
||||
@pytest.mark.parametrize("box", (None, (1.1, 2.2, 510.8, 510.9), (3, 10, 410, 256)))
|
||||
def test_reducing_gap_8(
|
||||
self, gradients_image: Image.Image, box: tuple[float, float, float, float]
|
||||
self,
|
||||
gradients_image: ImageFile.ImageFile,
|
||||
box: tuple[float, float, float, float],
|
||||
) -> None:
|
||||
ref = gradients_image.resize((52, 34), Image.Resampling.BICUBIC, box=box)
|
||||
im = gradients_image.resize(
|
||||
|
@ -281,7 +283,7 @@ class TestReducingGapResize:
|
|||
)
|
||||
def test_box_filter(
|
||||
self,
|
||||
gradients_image: Image.Image,
|
||||
gradients_image: ImageFile.ImageFile,
|
||||
box: tuple[float, float, float, float],
|
||||
epsilon: float,
|
||||
) -> None:
|
||||
|
|
|
@ -52,4 +52,6 @@ def test_image(mode: str) -> None:
|
|||
|
||||
def test_closed_file() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
ImageQt.ImageQt("Tests/images/hopper.gif")
|
||||
|
|
|
@ -264,4 +264,6 @@ def test_no_resource_warning_for_numpy_array() -> None:
|
|||
with Image.open(test_file) as im:
|
||||
# Act/Assert
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
|
||||
array(im)
|
||||
|
|
|
@ -18,7 +18,7 @@ The fork author's goal is to foster and support active development of PIL throug
|
|||
License
|
||||
-------
|
||||
|
||||
Like PIL, Pillow is `licensed under the open source HPND License <https://raw.githubusercontent.com/python-pillow/Pillow/main/LICENSE>`_
|
||||
Like PIL, Pillow is `licensed under the open source MIT-CMU License <https://raw.githubusercontent.com/python-pillow/Pillow/main/LICENSE>`_
|
||||
|
||||
Why a fork?
|
||||
-----------
|
||||
|
|
|
@ -692,6 +692,30 @@ The :py:meth:`~PIL.Image.Image.save` method supports the following options:
|
|||
you fail to do this, you will get errors about not being able to load the
|
||||
``_imaging`` DLL).
|
||||
|
||||
MPO
|
||||
^^^
|
||||
|
||||
Pillow reads and writes Multi Picture Object (MPO) files. When first opened, it loads
|
||||
the primary image. The :py:meth:`~PIL.Image.Image.seek` and
|
||||
:py:meth:`~PIL.Image.Image.tell` methods may be used to read other pictures from the
|
||||
file. The pictures are zero-indexed and random access is supported.
|
||||
|
||||
.. _mpo-saving:
|
||||
|
||||
Saving
|
||||
~~~~~~
|
||||
|
||||
When calling :py:meth:`~PIL.Image.Image.save` to write an MPO file, by default
|
||||
only the first frame of a multiframe image will be saved. If the ``save_all``
|
||||
argument is present and true, then all frames will be saved, and the following
|
||||
option will also be available.
|
||||
|
||||
**append_images**
|
||||
A list of images to append as additional pictures. Each of the
|
||||
images in the list can be single or multiframe images.
|
||||
|
||||
.. versionadded:: 9.3.0
|
||||
|
||||
MSP
|
||||
^^^
|
||||
|
||||
|
@ -1435,30 +1459,6 @@ Note that there may be an embedded gamma of 2.2 in MIC files.
|
|||
|
||||
To enable MIC support, you must install :pypi:`olefile`.
|
||||
|
||||
MPO
|
||||
^^^
|
||||
|
||||
Pillow identifies and reads Multi Picture Object (MPO) files, loading the primary
|
||||
image when first opened. The :py:meth:`~PIL.Image.Image.seek` and :py:meth:`~PIL.Image.Image.tell`
|
||||
methods may be used to read other pictures from the file. The pictures are
|
||||
zero-indexed and random access is supported.
|
||||
|
||||
.. _mpo-saving:
|
||||
|
||||
Saving
|
||||
~~~~~~
|
||||
|
||||
When calling :py:meth:`~PIL.Image.Image.save` to write an MPO file, by default
|
||||
only the first frame of a multiframe image will be saved. If the ``save_all``
|
||||
argument is present and true, then all frames will be saved, and the following
|
||||
option will also be available.
|
||||
|
||||
**append_images**
|
||||
A list of images to append as additional pictures. Each of the
|
||||
images in the list can be single or multiframe images.
|
||||
|
||||
.. versionadded:: 9.3.0
|
||||
|
||||
PCD
|
||||
^^^
|
||||
|
||||
|
|
|
@ -29,10 +29,10 @@ These platforms are built and tested for every change.
|
|||
+----------------------------------+----------------------------+---------------------+
|
||||
| Debian 12 Bookworm | 3.11 | x86, x86-64 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
| Fedora 39 | 3.12 | x86-64 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
| Fedora 40 | 3.12 | x86-64 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
| Fedora 41 | 3.13 | x86-64 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
| Gentoo | 3.12 | x86-64 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
| macOS 13 Ventura | 3.9 | x86-64 |
|
||||
|
@ -75,7 +75,9 @@ These platforms have been reported to work at the versions mentioned.
|
|||
| Operating system | | Tested Python | | Latest tested | | Tested |
|
||||
| | | versions | | Pillow version | | processors |
|
||||
+==================================+============================+==================+==============+
|
||||
| macOS 15 Sequoia | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm |
|
||||
| macOS 15 Sequoia | 3.9, 3.10, 3.11, 3.12, 3.13| 11.0.0 |arm |
|
||||
| +----------------------------+------------------+ |
|
||||
| | 3.8 | 10.4.0 | |
|
||||
+----------------------------------+----------------------------+------------------+--------------+
|
||||
| macOS 14 Sonoma | 3.8, 3.9, 3.10, 3.11, 3.12 | 10.4.0 |arm |
|
||||
+----------------------------------+----------------------------+------------------+--------------+
|
||||
|
@ -148,7 +150,7 @@ These platforms have been reported to work at the versions mentioned.
|
|||
+----------------------------------+----------------------------+------------------+--------------+
|
||||
| FreeBSD 10.2 | 2.7, 3.4 | 3.1.0 |x86-64 |
|
||||
+----------------------------------+----------------------------+------------------+--------------+
|
||||
| Windows 11 | 3.9, 3.10, 3.11, 3.12 | 10.2.0 |arm64 |
|
||||
| Windows 11 23H2 | 3.9, 3.10, 3.11, 3.12, 3.13| 11.0.0 |arm64 |
|
||||
+----------------------------------+----------------------------+------------------+--------------+
|
||||
| Windows 11 Pro | 3.11, 3.12 | 10.2.0 |x86-64 |
|
||||
+----------------------------------+----------------------------+------------------+--------------+
|
||||
|
|
|
@ -19,7 +19,7 @@ Example: Parse an image
|
|||
|
||||
from PIL import ImageFile
|
||||
|
||||
fp = open("hopper.pgm", "rb")
|
||||
fp = open("hopper.ppm", "rb")
|
||||
|
||||
p = ImageFile.Parser()
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ optional-dependencies.mic = [
|
|||
]
|
||||
optional-dependencies.tests = [
|
||||
"check-manifest",
|
||||
"coverage",
|
||||
"coverage>=7.4.2",
|
||||
"defusedxml",
|
||||
"markdown2",
|
||||
"olefile",
|
||||
|
@ -65,6 +65,7 @@ optional-dependencies.tests = [
|
|||
"pytest",
|
||||
"pytest-cov",
|
||||
"pytest-timeout",
|
||||
"trove-classifiers>=2024.10.12",
|
||||
]
|
||||
optional-dependencies.typing = [
|
||||
"typing-extensions; python_version<'3.10'",
|
||||
|
|
4
setup.py
4
setup.py
|
@ -389,7 +389,7 @@ class pil_build_ext(build_ext):
|
|||
pass
|
||||
for x in self.feature:
|
||||
if getattr(self, f"disable_{x}"):
|
||||
setattr(self.feature, x, False)
|
||||
self.feature.set(x, False)
|
||||
self.feature.required.discard(x)
|
||||
_dbg("Disabling %s", x)
|
||||
if getattr(self, f"enable_{x}"):
|
||||
|
@ -1001,7 +1001,7 @@ def debug_build() -> bool:
|
|||
return hasattr(sys, "gettotalrefcount") or FUZZING_BUILD
|
||||
|
||||
|
||||
files = ["src/_imaging.c"]
|
||||
files: list[str | os.PathLike[str]] = ["src/_imaging.c"]
|
||||
for src_file in _IMAGING:
|
||||
files.append("src/" + src_file + ".c")
|
||||
for src_file in _LIB_IMAGING:
|
||||
|
|
|
@ -106,7 +106,6 @@ class GifImageFile(ImageFile.ImageFile):
|
|||
|
||||
self.info["version"] = s[:6]
|
||||
self._size = i16(s, 6), i16(s, 8)
|
||||
self.tile = []
|
||||
flags = s[10]
|
||||
bits = (flags & 7) + 1
|
||||
|
||||
|
|
|
@ -213,4 +213,7 @@ def toqimage(im: Image.Image | str | QByteArray) -> ImageQt:
|
|||
|
||||
def toqpixmap(im: Image.Image | str | QByteArray) -> QPixmap:
|
||||
qimage = toqimage(im)
|
||||
return getattr(QPixmap, "fromImage")(qimage)
|
||||
pixmap = getattr(QPixmap, "fromImage")(qimage)
|
||||
if qt_version == "6":
|
||||
pixmap.detach()
|
||||
return pixmap
|
||||
|
|
|
@ -294,7 +294,7 @@ def _accept(prefix: bytes) -> bool:
|
|||
def _limit_rational(
|
||||
val: float | Fraction | IFDRational, max_val: int
|
||||
) -> tuple[IntegralLike, IntegralLike]:
|
||||
inv = abs(float(val)) > 1
|
||||
inv = abs(val) > 1
|
||||
n_d = IFDRational(1 / val if inv else val).limit_rational(max_val)
|
||||
return n_d[::-1] if inv else n_d
|
||||
|
||||
|
@ -685,22 +685,33 @@ class ImageFileDirectory_v2(_IFDv2Base):
|
|||
else:
|
||||
self.tagtype[tag] = TiffTags.UNDEFINED
|
||||
if all(isinstance(v, IFDRational) for v in values):
|
||||
self.tagtype[tag] = (
|
||||
TiffTags.RATIONAL
|
||||
if all(v >= 0 for v in values)
|
||||
else TiffTags.SIGNED_RATIONAL
|
||||
)
|
||||
elif all(isinstance(v, int) for v in values):
|
||||
if all(0 <= v < 2**16 for v in values):
|
||||
self.tagtype[tag] = TiffTags.SHORT
|
||||
elif all(-(2**15) < v < 2**15 for v in values):
|
||||
self.tagtype[tag] = TiffTags.SIGNED_SHORT
|
||||
for v in values:
|
||||
assert isinstance(v, IFDRational)
|
||||
if v < 0:
|
||||
self.tagtype[tag] = TiffTags.SIGNED_RATIONAL
|
||||
break
|
||||
else:
|
||||
self.tagtype[tag] = (
|
||||
TiffTags.LONG
|
||||
if all(v >= 0 for v in values)
|
||||
else TiffTags.SIGNED_LONG
|
||||
)
|
||||
self.tagtype[tag] = TiffTags.RATIONAL
|
||||
elif all(isinstance(v, int) for v in values):
|
||||
short = True
|
||||
signed_short = True
|
||||
long = True
|
||||
for v in values:
|
||||
assert isinstance(v, int)
|
||||
if short and not (0 <= v < 2**16):
|
||||
short = False
|
||||
if signed_short and not (-(2**15) < v < 2**15):
|
||||
signed_short = False
|
||||
if long and v < 0:
|
||||
long = False
|
||||
if short:
|
||||
self.tagtype[tag] = TiffTags.SHORT
|
||||
elif signed_short:
|
||||
self.tagtype[tag] = TiffTags.SIGNED_SHORT
|
||||
elif long:
|
||||
self.tagtype[tag] = TiffTags.LONG
|
||||
else:
|
||||
self.tagtype[tag] = TiffTags.SIGNED_LONG
|
||||
elif all(isinstance(v, float) for v in values):
|
||||
self.tagtype[tag] = TiffTags.DOUBLE
|
||||
elif all(isinstance(v, str) for v in values):
|
||||
|
@ -718,7 +729,10 @@ class ImageFileDirectory_v2(_IFDv2Base):
|
|||
|
||||
is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict)
|
||||
if not is_ifd:
|
||||
values = tuple(info.cvt_enum(value) for value in values)
|
||||
values = tuple(
|
||||
info.cvt_enum(value) if isinstance(value, str) else value
|
||||
for value in values
|
||||
)
|
||||
|
||||
dest = self._tags_v1 if legacy_api else self._tags_v2
|
||||
|
||||
|
|
|
@ -61,7 +61,6 @@ class WebPImageFile(ImageFile.ImageFile):
|
|||
self.is_animated = self.n_frames > 1
|
||||
self._mode = "RGB" if mode == "RGBX" else mode
|
||||
self.rawmode = mode
|
||||
self.tile = []
|
||||
|
||||
# Attempt to read ICC / EXIF / XMP chunks from file
|
||||
icc_profile = self._decoder.get_chunk("ICCP")
|
||||
|
|
|
@ -130,7 +130,7 @@ class WmfStubImageFile(ImageFile.StubImageFile):
|
|||
size = x1 - x0, y1 - y0
|
||||
|
||||
# calculate dots per inch from bbox and frame
|
||||
xdpi = 2540.0 * (x1 - y0) / (frame[2] - frame[0])
|
||||
xdpi = 2540.0 * (x1 - x0) / (frame[2] - frame[0])
|
||||
ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1])
|
||||
|
||||
self.info["wmf_bbox"] = x0, y0, x1, y1
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Master version for Pillow
|
||||
from __future__ import annotations
|
||||
|
||||
__version__ = "11.0.0.dev0"
|
||||
__version__ = "11.1.0.dev0"
|
||||
|
|
|
@ -82,6 +82,9 @@ struct {
|
|||
/* font objects */
|
||||
|
||||
static FT_Library library;
|
||||
#ifdef Py_GIL_DISABLED
|
||||
static PyMutex ft_library_mutex;
|
||||
#endif
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD FT_Face face;
|
||||
|
@ -187,7 +190,9 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
|
|||
|
||||
if (filename && font_bytes_size <= 0) {
|
||||
self->font_bytes = NULL;
|
||||
MUTEX_LOCK(&ft_library_mutex);
|
||||
error = FT_New_Face(library, filename, index, &self->face);
|
||||
MUTEX_UNLOCK(&ft_library_mutex);
|
||||
} else {
|
||||
/* need to have allocated storage for font_bytes for the life of the object.*/
|
||||
/* Don't free this before FT_Done_Face */
|
||||
|
@ -197,6 +202,7 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
|
|||
}
|
||||
if (!error) {
|
||||
memcpy(self->font_bytes, font_bytes, (size_t)font_bytes_size);
|
||||
MUTEX_LOCK(&ft_library_mutex);
|
||||
error = FT_New_Memory_Face(
|
||||
library,
|
||||
(FT_Byte *)self->font_bytes,
|
||||
|
@ -204,6 +210,7 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
|
|||
index,
|
||||
&self->face
|
||||
);
|
||||
MUTEX_UNLOCK(&ft_library_mutex);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1433,7 +1440,9 @@ font_setvaraxes(FontObject *self, PyObject *args) {
|
|||
static void
|
||||
font_dealloc(FontObject *self) {
|
||||
if (self->face) {
|
||||
MUTEX_LOCK(&ft_library_mutex);
|
||||
FT_Done_Face(self->face);
|
||||
MUTEX_UNLOCK(&ft_library_mutex);
|
||||
}
|
||||
if (self->font_bytes) {
|
||||
PyMem_Free(self->font_bytes);
|
||||
|
|
341
src/thirdparty/pythoncapi_compat.h
vendored
341
src/thirdparty/pythoncapi_compat.h
vendored
|
@ -7,7 +7,10 @@
|
|||
// https://github.com/python/pythoncapi_compat
|
||||
//
|
||||
// Latest version:
|
||||
// https://raw.githubusercontent.com/python/pythoncapi_compat/master/pythoncapi_compat.h
|
||||
// https://raw.githubusercontent.com/python/pythoncapi-compat/main/pythoncapi_compat.h
|
||||
//
|
||||
// This file was vendored from the following commit:
|
||||
// https://github.com/python/pythoncapi-compat/commit/0041177c4f348c8952b4c8980b2c90856e61c7c7
|
||||
//
|
||||
// SPDX-License-Identifier: 0BSD
|
||||
|
||||
|
@ -45,6 +48,13 @@ extern "C" {
|
|||
# define _PyObject_CAST(op) _Py_CAST(PyObject*, op)
|
||||
#endif
|
||||
|
||||
#ifndef Py_BUILD_ASSERT
|
||||
# define Py_BUILD_ASSERT(cond) \
|
||||
do { \
|
||||
(void)sizeof(char [1 - 2 * !(cond)]); \
|
||||
} while(0)
|
||||
#endif
|
||||
|
||||
|
||||
// bpo-42262 added Py_NewRef() to Python 3.10.0a3
|
||||
#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef)
|
||||
|
@ -1338,6 +1348,166 @@ PyDict_SetDefaultRef(PyObject *d, PyObject *key, PyObject *default_value,
|
|||
}
|
||||
#endif
|
||||
|
||||
#if PY_VERSION_HEX < 0x030D00B3
|
||||
# define Py_BEGIN_CRITICAL_SECTION(op) {
|
||||
# define Py_END_CRITICAL_SECTION() }
|
||||
# define Py_BEGIN_CRITICAL_SECTION2(a, b) {
|
||||
# define Py_END_CRITICAL_SECTION2() }
|
||||
#endif
|
||||
|
||||
#if PY_VERSION_HEX < 0x030E0000 && PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION)
|
||||
typedef struct PyUnicodeWriter PyUnicodeWriter;
|
||||
|
||||
static inline void PyUnicodeWriter_Discard(PyUnicodeWriter *writer)
|
||||
{
|
||||
_PyUnicodeWriter_Dealloc((_PyUnicodeWriter*)writer);
|
||||
PyMem_Free(writer);
|
||||
}
|
||||
|
||||
static inline PyUnicodeWriter* PyUnicodeWriter_Create(Py_ssize_t length)
|
||||
{
|
||||
if (length < 0) {
|
||||
PyErr_SetString(PyExc_ValueError,
|
||||
"length must be positive");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const size_t size = sizeof(_PyUnicodeWriter);
|
||||
PyUnicodeWriter *pub_writer = (PyUnicodeWriter *)PyMem_Malloc(size);
|
||||
if (pub_writer == _Py_NULL) {
|
||||
PyErr_NoMemory();
|
||||
return _Py_NULL;
|
||||
}
|
||||
_PyUnicodeWriter *writer = (_PyUnicodeWriter *)pub_writer;
|
||||
|
||||
_PyUnicodeWriter_Init(writer);
|
||||
if (_PyUnicodeWriter_Prepare(writer, length, 127) < 0) {
|
||||
PyUnicodeWriter_Discard(pub_writer);
|
||||
return NULL;
|
||||
}
|
||||
writer->overallocate = 1;
|
||||
return pub_writer;
|
||||
}
|
||||
|
||||
static inline PyObject* PyUnicodeWriter_Finish(PyUnicodeWriter *writer)
|
||||
{
|
||||
PyObject *str = _PyUnicodeWriter_Finish((_PyUnicodeWriter*)writer);
|
||||
assert(((_PyUnicodeWriter*)writer)->buffer == NULL);
|
||||
PyMem_Free(writer);
|
||||
return str;
|
||||
}
|
||||
|
||||
static inline int
|
||||
PyUnicodeWriter_WriteChar(PyUnicodeWriter *writer, Py_UCS4 ch)
|
||||
{
|
||||
if (ch > 0x10ffff) {
|
||||
PyErr_SetString(PyExc_ValueError,
|
||||
"character must be in range(0x110000)");
|
||||
return -1;
|
||||
}
|
||||
|
||||
return _PyUnicodeWriter_WriteChar((_PyUnicodeWriter*)writer, ch);
|
||||
}
|
||||
|
||||
static inline int
|
||||
PyUnicodeWriter_WriteStr(PyUnicodeWriter *writer, PyObject *obj)
|
||||
{
|
||||
PyObject *str = PyObject_Str(obj);
|
||||
if (str == NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
|
||||
Py_DECREF(str);
|
||||
return res;
|
||||
}
|
||||
|
||||
static inline int
|
||||
PyUnicodeWriter_WriteRepr(PyUnicodeWriter *writer, PyObject *obj)
|
||||
{
|
||||
PyObject *str = PyObject_Repr(obj);
|
||||
if (str == NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
|
||||
Py_DECREF(str);
|
||||
return res;
|
||||
}
|
||||
|
||||
static inline int
|
||||
PyUnicodeWriter_WriteUTF8(PyUnicodeWriter *writer,
|
||||
const char *str, Py_ssize_t size)
|
||||
{
|
||||
if (size < 0) {
|
||||
size = (Py_ssize_t)strlen(str);
|
||||
}
|
||||
|
||||
PyObject *str_obj = PyUnicode_FromStringAndSize(str, size);
|
||||
if (str_obj == _Py_NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj);
|
||||
Py_DECREF(str_obj);
|
||||
return res;
|
||||
}
|
||||
|
||||
static inline int
|
||||
PyUnicodeWriter_WriteWideChar(PyUnicodeWriter *writer,
|
||||
const wchar_t *str, Py_ssize_t size)
|
||||
{
|
||||
if (size < 0) {
|
||||
size = (Py_ssize_t)wcslen(str);
|
||||
}
|
||||
|
||||
PyObject *str_obj = PyUnicode_FromWideChar(str, size);
|
||||
if (str_obj == _Py_NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj);
|
||||
Py_DECREF(str_obj);
|
||||
return res;
|
||||
}
|
||||
|
||||
static inline int
|
||||
PyUnicodeWriter_WriteSubstring(PyUnicodeWriter *writer, PyObject *str,
|
||||
Py_ssize_t start, Py_ssize_t end)
|
||||
{
|
||||
if (!PyUnicode_Check(str)) {
|
||||
PyErr_Format(PyExc_TypeError, "expect str, not %T", str);
|
||||
return -1;
|
||||
}
|
||||
if (start < 0 || start > end) {
|
||||
PyErr_Format(PyExc_ValueError, "invalid start argument");
|
||||
return -1;
|
||||
}
|
||||
if (end > PyUnicode_GET_LENGTH(str)) {
|
||||
PyErr_Format(PyExc_ValueError, "invalid end argument");
|
||||
return -1;
|
||||
}
|
||||
|
||||
return _PyUnicodeWriter_WriteSubstring((_PyUnicodeWriter*)writer, str,
|
||||
start, end);
|
||||
}
|
||||
|
||||
static inline int
|
||||
PyUnicodeWriter_Format(PyUnicodeWriter *writer, const char *format, ...)
|
||||
{
|
||||
va_list vargs;
|
||||
va_start(vargs, format);
|
||||
PyObject *str = PyUnicode_FromFormatV(format, vargs);
|
||||
va_end(vargs);
|
||||
if (str == _Py_NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
|
||||
Py_DECREF(str);
|
||||
return res;
|
||||
}
|
||||
#endif // PY_VERSION_HEX < 0x030E0000
|
||||
|
||||
// gh-116560 added PyLong_GetSign() to Python 3.14.0a0
|
||||
#if PY_VERSION_HEX < 0x030E00A0
|
||||
|
@ -1354,6 +1524,175 @@ static inline int PyLong_GetSign(PyObject *obj, int *sign)
|
|||
#endif
|
||||
|
||||
|
||||
// gh-124502 added PyUnicode_Equal() to Python 3.14.0a0
|
||||
#if PY_VERSION_HEX < 0x030E00A0
|
||||
static inline int PyUnicode_Equal(PyObject *str1, PyObject *str2)
|
||||
{
|
||||
if (!PyUnicode_Check(str1)) {
|
||||
PyErr_Format(PyExc_TypeError, "first argument must be str, not %s",
|
||||
Py_TYPE(str1)->tp_name);
|
||||
return -1;
|
||||
}
|
||||
if (!PyUnicode_Check(str2)) {
|
||||
PyErr_Format(PyExc_TypeError, "second argument must be str, not %s",
|
||||
Py_TYPE(str2)->tp_name);
|
||||
return -1;
|
||||
}
|
||||
|
||||
#if PY_VERSION_HEX >= 0x030d0000 && !defined(PYPY_VERSION)
|
||||
PyAPI_FUNC(int) _PyUnicode_Equal(PyObject *str1, PyObject *str2);
|
||||
|
||||
return _PyUnicode_Equal(str1, str2);
|
||||
#elif PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION)
|
||||
return _PyUnicode_EQ(str1, str2);
|
||||
#elif PY_VERSION_HEX >= 0x03090000 && defined(PYPY_VERSION)
|
||||
return _PyUnicode_EQ(str1, str2);
|
||||
#else
|
||||
return (PyUnicode_Compare(str1, str2) == 0);
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
// gh-121645 added PyBytes_Join() to Python 3.14.0a0
|
||||
#if PY_VERSION_HEX < 0x030E00A0
|
||||
static inline PyObject* PyBytes_Join(PyObject *sep, PyObject *iterable)
|
||||
{
|
||||
return _PyBytes_Join(sep, iterable);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
#if PY_VERSION_HEX < 0x030E00A0
|
||||
static inline Py_hash_t Py_HashBuffer(const void *ptr, Py_ssize_t len)
|
||||
{
|
||||
#if PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION)
|
||||
PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void *src, Py_ssize_t len);
|
||||
|
||||
return _Py_HashBytes(ptr, len);
|
||||
#else
|
||||
Py_hash_t hash;
|
||||
PyObject *bytes = PyBytes_FromStringAndSize((const char*)ptr, len);
|
||||
if (bytes == NULL) {
|
||||
return -1;
|
||||
}
|
||||
hash = PyObject_Hash(bytes);
|
||||
Py_DECREF(bytes);
|
||||
return hash;
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
#if PY_VERSION_HEX < 0x030E00A0
|
||||
static inline int PyIter_NextItem(PyObject *iter, PyObject **item)
|
||||
{
|
||||
iternextfunc tp_iternext;
|
||||
|
||||
assert(iter != NULL);
|
||||
assert(item != NULL);
|
||||
|
||||
tp_iternext = Py_TYPE(iter)->tp_iternext;
|
||||
if (tp_iternext == NULL) {
|
||||
*item = NULL;
|
||||
PyErr_Format(PyExc_TypeError, "expected an iterator, got '%s'",
|
||||
Py_TYPE(iter)->tp_name);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if ((*item = tp_iternext(iter))) {
|
||||
return 1;
|
||||
}
|
||||
if (!PyErr_Occurred()) {
|
||||
return 0;
|
||||
}
|
||||
if (PyErr_ExceptionMatches(PyExc_StopIteration)) {
|
||||
PyErr_Clear();
|
||||
return 0;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
#if PY_VERSION_HEX < 0x030E00A0
|
||||
static inline PyObject* PyLong_FromInt32(int32_t value)
|
||||
{
|
||||
Py_BUILD_ASSERT(sizeof(long) >= 4);
|
||||
return PyLong_FromLong(value);
|
||||
}
|
||||
|
||||
static inline PyObject* PyLong_FromInt64(int64_t value)
|
||||
{
|
||||
Py_BUILD_ASSERT(sizeof(long long) >= 8);
|
||||
return PyLong_FromLongLong(value);
|
||||
}
|
||||
|
||||
static inline PyObject* PyLong_FromUInt32(uint32_t value)
|
||||
{
|
||||
Py_BUILD_ASSERT(sizeof(unsigned long) >= 4);
|
||||
return PyLong_FromUnsignedLong(value);
|
||||
}
|
||||
|
||||
static inline PyObject* PyLong_FromUInt64(uint64_t value)
|
||||
{
|
||||
Py_BUILD_ASSERT(sizeof(unsigned long long) >= 8);
|
||||
return PyLong_FromUnsignedLongLong(value);
|
||||
}
|
||||
|
||||
static inline int PyLong_AsInt32(PyObject *obj, int32_t *pvalue)
|
||||
{
|
||||
Py_BUILD_ASSERT(sizeof(int) == 4);
|
||||
int value = PyLong_AsInt(obj);
|
||||
if (value == -1 && PyErr_Occurred()) {
|
||||
return -1;
|
||||
}
|
||||
*pvalue = (int32_t)value;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline int PyLong_AsInt64(PyObject *obj, int64_t *pvalue)
|
||||
{
|
||||
Py_BUILD_ASSERT(sizeof(long long) == 8);
|
||||
long long value = PyLong_AsLongLong(obj);
|
||||
if (value == -1 && PyErr_Occurred()) {
|
||||
return -1;
|
||||
}
|
||||
*pvalue = (int64_t)value;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline int PyLong_AsUInt32(PyObject *obj, uint32_t *pvalue)
|
||||
{
|
||||
Py_BUILD_ASSERT(sizeof(long) >= 4);
|
||||
unsigned long value = PyLong_AsUnsignedLong(obj);
|
||||
if (value == (unsigned long)-1 && PyErr_Occurred()) {
|
||||
return -1;
|
||||
}
|
||||
#if SIZEOF_LONG > 4
|
||||
if ((unsigned long)UINT32_MAX < value) {
|
||||
PyErr_SetString(PyExc_OverflowError,
|
||||
"Python int too large to convert to C uint32_t");
|
||||
return -1;
|
||||
}
|
||||
#endif
|
||||
*pvalue = (uint32_t)value;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline int PyLong_AsUInt64(PyObject *obj, uint64_t *pvalue)
|
||||
{
|
||||
Py_BUILD_ASSERT(sizeof(long long) == 8);
|
||||
unsigned long long value = PyLong_AsUnsignedLongLong(obj);
|
||||
if (value == (unsigned long long)-1 && PyErr_Occurred()) {
|
||||
return -1;
|
||||
}
|
||||
*pvalue = (uint64_t)value;
|
||||
return 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 452dd2d1705f6b2375369a6570c415beb3163f70
|
||||
Subproject commit 9a9d1275f025f737cdaa3c451ba07129dd95f361
|
|
@ -130,8 +130,7 @@ V["ZLIB_DOTLESS"] = V["ZLIB"].replace(".", "")
|
|||
# dependencies, listed in order of compilation
|
||||
DEPS: dict[str, dict[str, Any]] = {
|
||||
"libjpeg": {
|
||||
"url": f"{SF_PROJECTS}/libjpeg-turbo/files/{V['JPEGTURBO']}/"
|
||||
f"libjpeg-turbo-{V['JPEGTURBO']}.tar.gz/download",
|
||||
"url": f"{SF_PROJECTS}/libjpeg-turbo/files/{V['JPEGTURBO']}/FILENAME/download",
|
||||
"filename": f"libjpeg-turbo-{V['JPEGTURBO']}.tar.gz",
|
||||
"dir": f"libjpeg-turbo-{V['JPEGTURBO']}",
|
||||
"license": ["README.ijg", "LICENSE.md"],
|
||||
|
@ -161,7 +160,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
|||
"bins": ["cjpeg.exe", "djpeg.exe"],
|
||||
},
|
||||
"zlib": {
|
||||
"url": f"https://zlib.net/zlib{V['ZLIB_DOTLESS']}.zip",
|
||||
"url": "https://zlib.net/FILENAME",
|
||||
"filename": f"zlib{V['ZLIB_DOTLESS']}.zip",
|
||||
"dir": f"zlib-{V['ZLIB']}",
|
||||
"license": "README",
|
||||
|
@ -175,7 +174,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
|||
"libs": [r"*.lib"],
|
||||
},
|
||||
"xz": {
|
||||
"url": f"https://github.com/tukaani-project/xz/releases/download/v{V['XZ']}/xz-{V['XZ']}.tar.gz",
|
||||
"url": f"https://github.com/tukaani-project/xz/releases/download/v{V['XZ']}/FILENAME",
|
||||
"filename": f"xz-{V['XZ']}.tar.gz",
|
||||
"dir": f"xz-{V['XZ']}",
|
||||
"license": "COPYING",
|
||||
|
@ -188,7 +187,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
|||
"libs": [r"lzma.lib"],
|
||||
},
|
||||
"libwebp": {
|
||||
"url": f"http://downloads.webmproject.org/releases/webp/libwebp-{V['LIBWEBP']}.tar.gz",
|
||||
"url": "http://downloads.webmproject.org/releases/webp/FILENAME",
|
||||
"filename": f"libwebp-{V['LIBWEBP']}.tar.gz",
|
||||
"dir": f"libwebp-{V['LIBWEBP']}",
|
||||
"license": "COPYING",
|
||||
|
@ -210,7 +209,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
|||
"libs": [r"libsharpyuv.lib", r"libwebp*.lib"],
|
||||
},
|
||||
"libtiff": {
|
||||
"url": f"https://download.osgeo.org/libtiff/tiff-{V['TIFF']}.tar.gz",
|
||||
"url": "https://download.osgeo.org/libtiff/FILENAME",
|
||||
"filename": f"tiff-{V['TIFF']}.tar.gz",
|
||||
"dir": f"tiff-{V['TIFF']}",
|
||||
"license": "LICENSE.md",
|
||||
|
@ -268,7 +267,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
|||
"libs": ["*.lib"],
|
||||
},
|
||||
"freetype": {
|
||||
"url": f"https://download.savannah.gnu.org/releases/freetype/freetype-{V['FREETYPE']}.tar.gz",
|
||||
"url": "https://download.savannah.gnu.org/releases/freetype/FILENAME",
|
||||
"filename": f"freetype-{V['FREETYPE']}.tar.gz",
|
||||
"dir": f"freetype-{V['FREETYPE']}",
|
||||
"license": ["LICENSE.TXT", r"docs\FTL.TXT", r"docs\GPLv2.TXT"],
|
||||
|
@ -303,7 +302,7 @@ DEPS: dict[str, dict[str, Any]] = {
|
|||
"libs": [r"objs\{msbuild_arch}\Release Static\freetype.lib"],
|
||||
},
|
||||
"lcms2": {
|
||||
"url": f"{SF_PROJECTS}/lcms/files/lcms/{V['LCMS2']}/lcms2-{V['LCMS2']}.tar.gz/download", # noqa: E501
|
||||
"url": f"{SF_PROJECTS}/lcms/files/lcms/{V['LCMS2']}/FILENAME/download",
|
||||
"filename": f"lcms2-{V['LCMS2']}.tar.gz",
|
||||
"dir": f"lcms2-{V['LCMS2']}",
|
||||
"license": "LICENSE",
|
||||
|
@ -497,7 +496,7 @@ def extract_dep(url: str, filename: str, prefs: dict[str, str]) -> None:
|
|||
except RuntimeError as exc:
|
||||
# Otherwise try upstream
|
||||
print(exc)
|
||||
download_dep(url, file)
|
||||
download_dep(url.replace("FILENAME", filename), file)
|
||||
|
||||
print("Extracting " + filename)
|
||||
sources_dir_abs = os.path.abspath(sources_dir)
|
||||
|
|
Loading…
Reference in New Issue
Block a user