Merge branch 'main' into signed
|
@ -10,7 +10,7 @@ environment:
|
|||
TEST_OPTIONS:
|
||||
DEPLOY: YES
|
||||
matrix:
|
||||
- PYTHON: C:/Python310
|
||||
- PYTHON: C:/Python311
|
||||
ARCHITECTURE: x86
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022
|
||||
- PYTHON: C:/Python37-x64
|
||||
|
|
|
@ -13,6 +13,10 @@ indent_style = space
|
|||
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.rst]
|
||||
# Four-space indentation
|
||||
indent_size = 4
|
||||
|
||||
[*.yml]
|
||||
# Two-space indentation
|
||||
indent_size = 2
|
||||
|
|
4
.github/workflows/lint.yml
vendored
|
@ -5,7 +5,7 @@ on: [push, pull_request, workflow_dispatch]
|
|||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
|
@ -30,7 +30,7 @@ jobs:
|
|||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.x"
|
||||
cache: pip
|
||||
cache-dependency-path: "setup.py"
|
||||
|
||||
|
|
3
.github/workflows/macos-install.sh
vendored
|
@ -2,7 +2,7 @@
|
|||
|
||||
set -e
|
||||
|
||||
brew install libtiff libjpeg openjpeg libimagequant webp little-cms2 freetype openblas libraqm
|
||||
brew install libtiff libjpeg openjpeg libimagequant webp little-cms2 freetype libraqm
|
||||
|
||||
PYTHONOPTIMIZE=0 python3 -m pip install cffi
|
||||
python3 -m pip install coverage
|
||||
|
@ -13,7 +13,6 @@ python3 -m pip install -U pytest-cov
|
|||
python3 -m pip install -U pytest-timeout
|
||||
python3 -m pip install pyroma
|
||||
|
||||
echo -e "[openblas]\nlibraries = openblas\nlibrary_dirs = /usr/local/opt/openblas/lib" >> ~/.numpy-site.cfg
|
||||
python3 -m pip install numpy
|
||||
|
||||
# extra test images
|
||||
|
|
2
.github/workflows/stale.yml
vendored
|
@ -20,7 +20,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: "Check issues"
|
||||
uses: actions/stale@v6
|
||||
uses: actions/stale@v7
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
only-labels: "Awaiting OP Action"
|
||||
|
|
10
.github/workflows/test-cygwin.yml
vendored
|
@ -5,7 +5,7 @@ on: [push, pull_request, workflow_dispatch]
|
|||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
|
@ -15,7 +15,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-minor-version: [7, 8, 9]
|
||||
python-minor-version: [8, 9]
|
||||
|
||||
timeout-minutes: 40
|
||||
|
||||
|
@ -30,7 +30,7 @@ jobs:
|
|||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Cygwin
|
||||
uses: cygwin/cygwin-install-action@v2
|
||||
uses: cygwin/cygwin-install-action@v3
|
||||
with:
|
||||
platform: x86_64
|
||||
packages: >
|
||||
|
@ -48,7 +48,7 @@ jobs:
|
|||
qt5-devel-tools subversion xorg-server-extra zlib-devel
|
||||
|
||||
- name: Add Lapack to PATH
|
||||
uses: egor-tensin/cleanup-path@v2
|
||||
uses: egor-tensin/cleanup-path@v3
|
||||
with:
|
||||
dirs: 'C:\cygwin\bin;C:\cygwin\lib\lapack'
|
||||
|
||||
|
@ -76,7 +76,7 @@ jobs:
|
|||
- name: Build
|
||||
shell: bash.exe -eo pipefail -o igncr "{0}"
|
||||
run: |
|
||||
.ci/build.sh
|
||||
SETUPTOOLS_USE_DISTUTILS=stdlib .ci/build.sh
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
|
|
4
.github/workflows/test-docker.yml
vendored
|
@ -5,7 +5,7 @@ on: [push, pull_request, workflow_dispatch]
|
|||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
|
@ -30,8 +30,8 @@ jobs:
|
|||
centos-stream-9-amd64,
|
||||
debian-10-buster-x86,
|
||||
debian-11-bullseye-x86,
|
||||
fedora-35-amd64,
|
||||
fedora-36-amd64,
|
||||
fedora-37-amd64,
|
||||
gentoo,
|
||||
ubuntu-18.04-bionic-amd64,
|
||||
ubuntu-20.04-focal-amd64,
|
||||
|
|
8
.github/workflows/test-windows.yml
vendored
|
@ -19,9 +19,9 @@ jobs:
|
|||
architecture: ["x86", "x64"]
|
||||
include:
|
||||
# PyPy 7.3.4+ only ships 64-bit binaries for Windows
|
||||
- python-version: "pypy-3.7"
|
||||
- python-version: "pypy3.8"
|
||||
architecture: "x64"
|
||||
- python-version: "pypy-3.8"
|
||||
- python-version: "pypy3.9"
|
||||
architecture: "x64"
|
||||
|
||||
timeout-minutes: 30
|
||||
|
@ -141,7 +141,7 @@ jobs:
|
|||
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||
run: "& winbuild\\build\\build_dep_fribidi.cmd"
|
||||
|
||||
# trim ~150MB x 9
|
||||
# trim ~150MB for each job
|
||||
- name: Optimize build cache
|
||||
if: steps.build-cache.outputs.cache-hit != 'true'
|
||||
run: rmdir /S /Q winbuild\build\src
|
||||
|
@ -226,7 +226,7 @@ jobs:
|
|||
path: dist\*.whl
|
||||
|
||||
- name: Upload fribidi.dll
|
||||
if: "github.event_name != 'pull_request' && matrix.python-version == 3.10"
|
||||
if: "github.event_name != 'pull_request' && matrix.python-version == 3.11"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: fribidi
|
||||
|
|
8
.github/workflows/test.yml
vendored
|
@ -5,7 +5,7 @@ on: [push, pull_request, workflow_dispatch]
|
|||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
|
@ -20,8 +20,8 @@ jobs:
|
|||
"ubuntu-latest",
|
||||
]
|
||||
python-version: [
|
||||
"pypy-3.8",
|
||||
"pypy-3.7",
|
||||
"pypy3.9",
|
||||
"pypy3.8",
|
||||
"3.11",
|
||||
"3.10",
|
||||
"3.9",
|
||||
|
@ -96,7 +96,7 @@ jobs:
|
|||
path: Tests/errors
|
||||
|
||||
- name: Docs
|
||||
if: startsWith(matrix.os, 'ubuntu') && matrix.python-version == 3.10
|
||||
if: startsWith(matrix.os, 'ubuntu') && matrix.python-version == 3.11
|
||||
run: |
|
||||
make doccheck
|
||||
|
||||
|
|
36
.github/workflows/tidelift.yml
vendored
|
@ -1,36 +0,0 @@
|
|||
name: Tidelift Align
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 2 * * *" # daily at 02:30 UTC
|
||||
push:
|
||||
paths:
|
||||
- "Pipfile*"
|
||||
- ".github/workflows/tidelift.yml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "Pipfile*"
|
||||
- ".github/workflows/tidelift.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'python-pillow'
|
||||
name: Run Tidelift to ensure approved open source packages are in use
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Scan
|
||||
uses: tidelift/alignment-action@main
|
||||
env:
|
||||
TIDELIFT_API_KEY: ${{ secrets.TIDELIFT_API_KEY }}
|
||||
TIDELIFT_ORGANIZATION: team/aclark4life
|
||||
TIDELIFT_PROJECT: pillow
|
|
@ -1,18 +1,25 @@
|
|||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.8.0
|
||||
rev: 22.12.0
|
||||
hooks:
|
||||
- id: black
|
||||
args: ["--target-version", "py37"]
|
||||
args: [--target-version=py37]
|
||||
# Only .py files, until https://github.com/psf/black/issues/402 resolved
|
||||
files: \.py$
|
||||
types: []
|
||||
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.10.1
|
||||
rev: 5.11.1
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
- repo: https://github.com/PyCQA/bandit
|
||||
rev: 1.7.4
|
||||
hooks:
|
||||
- id: bandit
|
||||
args: [--severity-level=high]
|
||||
files: ^src/
|
||||
|
||||
- repo: https://github.com/asottile/yesqa
|
||||
rev: v1.4.0
|
||||
hooks:
|
||||
|
@ -25,7 +32,7 @@ repos:
|
|||
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.opt$)
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 5.0.4
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: [flake8-2020, flake8-implicit-str-concat]
|
||||
|
@ -37,16 +44,21 @@ repos:
|
|||
- id: rst-backticks
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.3.0
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-json
|
||||
- id: check-yaml
|
||||
|
||||
- repo: https://github.com/sphinx-contrib/sphinx-lint
|
||||
rev: v0.6.1
|
||||
rev: v0.6.7
|
||||
hooks:
|
||||
- id: sphinx-lint
|
||||
|
||||
- repo: https://github.com/tox-dev/tox-ini-fmt
|
||||
rev: 0.5.2
|
||||
hooks:
|
||||
- id: tox-ini-fmt
|
||||
|
||||
ci:
|
||||
autoupdate_schedule: monthly
|
||||
|
|
72
CHANGES.rst
|
@ -2,6 +2,78 @@
|
|||
Changelog (Pillow)
|
||||
==================
|
||||
|
||||
9.4.0 (unreleased)
|
||||
------------------
|
||||
|
||||
- Added DDS support for uncompressed L and LA images #6820
|
||||
[radarhere, REDxEYE]
|
||||
|
||||
- Added LightSource tag values to ExifTags #6749
|
||||
[radarhere]
|
||||
|
||||
- Fixed PyAccess after changing ICO size #6821
|
||||
[radarhere]
|
||||
|
||||
- Do not use EXIF from info when saving PNG images #6819
|
||||
[radarhere]
|
||||
|
||||
- Fixed saving EXIF data to MPO #6817
|
||||
[radarhere]
|
||||
|
||||
- Added Exif hide_offsets() #6762
|
||||
[radarhere]
|
||||
|
||||
- Only compare to previous frame when checking for duplicate GIF frames while saving #6787
|
||||
[radarhere]
|
||||
|
||||
- Always initialize all plugins in registered_extensions() #6811
|
||||
[radarhere]
|
||||
|
||||
- Ignore non-opaque WebP background when saving as GIF #6792
|
||||
[radarhere]
|
||||
|
||||
- Only set tile in ImageFile __setstate__ #6793
|
||||
[radarhere]
|
||||
|
||||
- When reading BLP, do not trust JPEG decoder to determine image is CMYK #6767
|
||||
[radarhere]
|
||||
|
||||
- Added IFD enum to ExifTags #6748
|
||||
[radarhere]
|
||||
|
||||
- Fixed bug combining GIF frame durations #6779
|
||||
[radarhere]
|
||||
|
||||
- Support saving JPEG comments #6774
|
||||
[smason, radarhere]
|
||||
|
||||
- Added getxmp() to WebPImagePlugin #6758
|
||||
[radarhere]
|
||||
|
||||
- Added "exact" option when saving WebP #6747
|
||||
[ashafaei, radarhere]
|
||||
|
||||
- Use fractional coordinates when drawing text #6722
|
||||
[radarhere]
|
||||
|
||||
- Fixed writing int as BYTE tag #6740
|
||||
[radarhere]
|
||||
|
||||
- Added MP Format Version when saving MPO #6735
|
||||
[radarhere]
|
||||
|
||||
- Added Interop to ExifTags #6724
|
||||
[radarhere]
|
||||
|
||||
- CVE-2007-4559 patch when building on Windows #6704
|
||||
[TrellixVulnTeam, nulano, radarhere]
|
||||
|
||||
- Fix compiler warning: accessing 64 bytes in a region of size 48 #6714
|
||||
[wiredfool]
|
||||
|
||||
- Use verbose flag for pip install #6713
|
||||
[wiredfool, radarhere]
|
||||
|
||||
9.3.0 (2022-10-29)
|
||||
------------------
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
include *.c
|
||||
include *.h
|
||||
include *.in
|
||||
include *.lock
|
||||
include *.md
|
||||
include *.py
|
||||
include *.rst
|
||||
|
@ -10,7 +9,6 @@ include *.txt
|
|||
include *.yaml
|
||||
include LICENSE
|
||||
include Makefile
|
||||
include Pipfile
|
||||
include tox.ini
|
||||
graft Tests
|
||||
graft src
|
||||
|
|
6
Makefile
|
@ -53,12 +53,12 @@ inplace: clean
|
|||
|
||||
.PHONY: install
|
||||
install:
|
||||
python3 -m pip install .
|
||||
python3 -m pip -v install .
|
||||
python3 selftest.py
|
||||
|
||||
.PHONY: install-coverage
|
||||
install-coverage:
|
||||
CFLAGS="-coverage -Werror=implicit-function-declaration" python3 -m pip install --global-option="build_ext" .
|
||||
CFLAGS="-coverage -Werror=implicit-function-declaration" python3 -m pip -v install --global-option="build_ext" .
|
||||
python3 selftest.py
|
||||
|
||||
.PHONY: debug
|
||||
|
@ -67,7 +67,7 @@ debug:
|
|||
# for our stuff, kills optimization, and redirects to dev null so we
|
||||
# see any build failures.
|
||||
make clean > /dev/null
|
||||
CFLAGS='-g -O0' python3 -m pip install --global-option="build_ext" . > /dev/null
|
||||
CFLAGS='-g -O0' python3 -m pip -v install --global-option="build_ext" . > /dev/null
|
||||
|
||||
.PHONY: release-test
|
||||
release-test:
|
||||
|
|
22
Pipfile
|
@ -1,22 +0,0 @@
|
|||
[[source]]
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
black = "*"
|
||||
check-manifest = "*"
|
||||
coverage = "*"
|
||||
defusedxml = "*"
|
||||
packaging = "*"
|
||||
markdown2 = "*"
|
||||
olefile = "*"
|
||||
pyroma = "*"
|
||||
pytest = "*"
|
||||
pytest-cov = "*"
|
||||
pytest-timeout = "*"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[requires]
|
||||
python_version = "3.9"
|
324
Pipfile.lock
generated
|
@ -1,324 +0,0 @@
|
|||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "e5cad23bf4187647d53b613a64dc4792b7064bf86b08dfb5737580e32943f54d"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.9"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"attrs": {
|
||||
"hashes": [
|
||||
"sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1",
|
||||
"sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
||||
"version": "==21.2.0"
|
||||
},
|
||||
"black": {
|
||||
"hashes": [
|
||||
"sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3",
|
||||
"sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==21.12b0"
|
||||
},
|
||||
"build": {
|
||||
"hashes": [
|
||||
"sha256:1aaadcd69338252ade4f7ec1265e1a19184bf916d84c9b7df095f423948cb89f",
|
||||
"sha256:21b7ebbd1b22499c4dac536abc7606696ea4d909fd755e00f09f3c0f2c05e3c8"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==0.7.0"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872",
|
||||
"sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"
|
||||
],
|
||||
"version": "==2021.10.8"
|
||||
},
|
||||
"charset-normalizer": {
|
||||
"hashes": [
|
||||
"sha256:1eecaa09422db5be9e29d7fc65664e6c33bd06f9ced7838578ba40d58bdf3721",
|
||||
"sha256:b0b883e8e874edfdece9c28f314e3dd5badf067342e42fb162203335ae61aa2c"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"version": "==2.0.9"
|
||||
},
|
||||
"check-manifest": {
|
||||
"hashes": [
|
||||
"sha256:365c94d65de4c927d9d8b505371d08ee19f9f369c86b9ac3db97c2754c827c95",
|
||||
"sha256:56dadd260a9c7d550b159796d2894b6d0bcc176a94cbc426d9bb93e5e48d12ce"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.47"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3",
|
||||
"sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==8.0.3"
|
||||
},
|
||||
"coverage": {
|
||||
"hashes": [
|
||||
"sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0",
|
||||
"sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd",
|
||||
"sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884",
|
||||
"sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48",
|
||||
"sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76",
|
||||
"sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0",
|
||||
"sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64",
|
||||
"sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685",
|
||||
"sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47",
|
||||
"sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d",
|
||||
"sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840",
|
||||
"sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f",
|
||||
"sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971",
|
||||
"sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c",
|
||||
"sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a",
|
||||
"sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de",
|
||||
"sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17",
|
||||
"sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4",
|
||||
"sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521",
|
||||
"sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57",
|
||||
"sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b",
|
||||
"sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282",
|
||||
"sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644",
|
||||
"sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475",
|
||||
"sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d",
|
||||
"sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da",
|
||||
"sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953",
|
||||
"sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2",
|
||||
"sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e",
|
||||
"sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c",
|
||||
"sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc",
|
||||
"sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64",
|
||||
"sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74",
|
||||
"sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617",
|
||||
"sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3",
|
||||
"sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d",
|
||||
"sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa",
|
||||
"sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739",
|
||||
"sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8",
|
||||
"sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8",
|
||||
"sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781",
|
||||
"sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58",
|
||||
"sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9",
|
||||
"sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c",
|
||||
"sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd",
|
||||
"sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e",
|
||||
"sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==6.2"
|
||||
},
|
||||
"defusedxml": {
|
||||
"hashes": [
|
||||
"sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69",
|
||||
"sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.7.1"
|
||||
},
|
||||
"docutils": {
|
||||
"hashes": [
|
||||
"sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c",
|
||||
"sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
||||
"version": "==0.18.1"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff",
|
||||
"sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"version": "==3.3"
|
||||
},
|
||||
"iniconfig": {
|
||||
"hashes": [
|
||||
"sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3",
|
||||
"sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"
|
||||
],
|
||||
"version": "==1.1.1"
|
||||
},
|
||||
"markdown2": {
|
||||
"hashes": [
|
||||
"sha256:8f4ac8d9a124ab408c67361090ed512deda746c04362c36c2ec16190c720c2b0",
|
||||
"sha256:91113caf23aa662570fe21984f08fe74f814695c0a0ea8e863a8b4c4f63f9f6e"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.4.2"
|
||||
},
|
||||
"mypy-extensions": {
|
||||
"hashes": [
|
||||
"sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d",
|
||||
"sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"
|
||||
],
|
||||
"version": "==0.4.3"
|
||||
},
|
||||
"olefile": {
|
||||
"hashes": [
|
||||
"sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.46"
|
||||
},
|
||||
"packaging": {
|
||||
"hashes": [
|
||||
"sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb",
|
||||
"sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==21.3"
|
||||
},
|
||||
"pathspec": {
|
||||
"hashes": [
|
||||
"sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a",
|
||||
"sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"
|
||||
],
|
||||
"version": "==0.9.0"
|
||||
},
|
||||
"pep517": {
|
||||
"hashes": [
|
||||
"sha256:931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0",
|
||||
"sha256:dd884c326898e2c6e11f9e0b64940606a93eb10ea022a2e067959f3a110cf161"
|
||||
],
|
||||
"version": "==0.12.0"
|
||||
},
|
||||
"platformdirs": {
|
||||
"hashes": [
|
||||
"sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2",
|
||||
"sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.4.0"
|
||||
},
|
||||
"pluggy": {
|
||||
"hashes": [
|
||||
"sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159",
|
||||
"sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.0.0"
|
||||
},
|
||||
"py": {
|
||||
"hashes": [
|
||||
"sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719",
|
||||
"sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
||||
"version": "==1.11.0"
|
||||
},
|
||||
"pygments": {
|
||||
"hashes": [
|
||||
"sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380",
|
||||
"sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"
|
||||
],
|
||||
"markers": "python_version >= '3.5'",
|
||||
"version": "==2.10.0"
|
||||
},
|
||||
"pyparsing": {
|
||||
"hashes": [
|
||||
"sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4",
|
||||
"sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.0.6"
|
||||
},
|
||||
"pyroma": {
|
||||
"hashes": [
|
||||
"sha256:0fba67322913026091590e68e0d9e0d4fbd6420fcf34d315b2ad6985ab104d65",
|
||||
"sha256:f8c181e0d5d292f11791afc18f7d0218a83c85cf64d6f8fb1571ce9d29a24e4a"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.2"
|
||||
},
|
||||
"pytest": {
|
||||
"hashes": [
|
||||
"sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89",
|
||||
"sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==6.2.5"
|
||||
},
|
||||
"pytest-cov": {
|
||||
"hashes": [
|
||||
"sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6",
|
||||
"sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.0.0"
|
||||
},
|
||||
"pytest-timeout": {
|
||||
"hashes": [
|
||||
"sha256:e6f98b54dafde8d70e4088467ff621260b641eb64895c4195b6e5c8f45638112",
|
||||
"sha256:fe9c3d5006c053bb9e062d60f641e6a76d6707aedb645350af9593e376fcc717"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.0.2"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24",
|
||||
"sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
||||
"version": "==2.26.0"
|
||||
},
|
||||
"setuptools": {
|
||||
"hashes": [
|
||||
"sha256:5ec2bbb534ed160b261acbbdd1b463eb3cf52a8d223d96a8ab9981f63798e85c",
|
||||
"sha256:75fd345a47ce3d79595b27bf57e6f49c2ca7904f3c7ce75f8a87012046c86b0b"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==60.0.0"
|
||||
},
|
||||
"toml": {
|
||||
"hashes": [
|
||||
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
|
||||
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
|
||||
],
|
||||
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'",
|
||||
"version": "==0.10.2"
|
||||
},
|
||||
"tomli": {
|
||||
"hashes": [
|
||||
"sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f",
|
||||
"sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.2.3"
|
||||
},
|
||||
"typing-extensions": {
|
||||
"hashes": [
|
||||
"sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e",
|
||||
"sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==4.0.1"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece",
|
||||
"sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
|
||||
"version": "==1.26.7"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
|
@ -54,9 +54,9 @@ As of 2019, Pillow development is
|
|||
<a href="https://app.codecov.io/gh/python-pillow/Pillow"><img
|
||||
alt="Code coverage"
|
||||
src="https://codecov.io/gh/python-pillow/Pillow/branch/main/graph/badge.svg"></a>
|
||||
<a href="https://github.com/python-pillow/Pillow/actions/workflows/tidelift.yml"><img
|
||||
alt="Tidelift Align"
|
||||
src="https://github.com/python-pillow/Pillow/actions/workflows/tidelift.yml/badge.svg"></a>
|
||||
<a href="https://bugs.chromium.org/p/oss-fuzz/issues/list?sort=-opened&can=1&q=proj:pillow"><img
|
||||
alt="Fuzzing Status"
|
||||
src="https://oss-fuzz-build-logs.storage.googleapis.com/badges/pillow.svg"></a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
|
BIN
Tests/images/duplicate_frame.gif
Normal file
After Width: | Height: | Size: 138 B |
BIN
Tests/images/flower_thumbnail.png
Normal file
After Width: | Height: | Size: 35 KiB |
Before Width: | Height: | Size: 9.3 KiB After Width: | Height: | Size: 9.3 KiB |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.0 KiB |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.1 KiB |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.1 KiB |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.1 KiB |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.1 KiB |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.1 KiB |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.1 KiB |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.0 KiB |
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.1 KiB |
Before Width: | Height: | Size: 2.8 KiB After Width: | Height: | Size: 2.8 KiB |
Before Width: | Height: | Size: 807 B After Width: | Height: | Size: 809 B |
BIN
Tests/images/uncompressed_l.dds
Normal file
BIN
Tests/images/uncompressed_l.png
Normal file
After Width: | Height: | Size: 861 B |
BIN
Tests/images/uncompressed_la.dds
Normal file
BIN
Tests/images/uncompressed_la.png
Normal file
After Width: | Height: | Size: 1.0 KiB |
|
@ -19,29 +19,17 @@ python3 setup.py build --build-base=/tmp/build install
|
|||
|
||||
# Build fuzzers in $OUT.
|
||||
for fuzzer in $(find $SRC -name 'fuzz_*.py'); do
|
||||
fuzzer_basename=$(basename -s .py $fuzzer)
|
||||
fuzzer_package=${fuzzer_basename}.pkg
|
||||
pyinstaller \
|
||||
compile_python_fuzzer $fuzzer \
|
||||
--add-binary /usr/local/lib/libjpeg.so.62.3.0:. \
|
||||
--add-binary /usr/local/lib/libfreetype.so.6:. \
|
||||
--add-binary /usr/local/lib/liblcms2.so.2:. \
|
||||
--add-binary /usr/local/lib/libopenjp2.so.7:. \
|
||||
--add-binary /usr/local/lib/libpng16.so.16:. \
|
||||
--add-binary /usr/local/lib/libtiff.so.5:. \
|
||||
--add-binary /usr/local/lib/libtiff.so.6:. \
|
||||
--add-binary /usr/local/lib/libwebp.so.7:. \
|
||||
--add-binary /usr/local/lib/libwebpdemux.so.2:. \
|
||||
--add-binary /usr/local/lib/libwebpmux.so.3:. \
|
||||
--add-binary /usr/local/lib/libxcb.so.1:. \
|
||||
--distpath $OUT --onefile --name $fuzzer_package $fuzzer
|
||||
|
||||
# Create execution wrapper.
|
||||
echo "#!/bin/sh
|
||||
# LLVMFuzzerTestOneInput for fuzzer detection.
|
||||
this_dir=\$(dirname \"\$0\")
|
||||
LD_PRELOAD=\$this_dir/sanitizer_with_fuzzer.so \
|
||||
ASAN_OPTIONS=\$ASAN_OPTIONS:symbolize=1:external_symbolizer_path=\$this_dir/llvm-symbolizer:detect_leaks=0 \
|
||||
\$this_dir/$fuzzer_package \$@" > $OUT/$fuzzer_basename
|
||||
chmod u+x $OUT/$fuzzer_basename
|
||||
--add-binary /usr/local/lib/libxcb.so.1:.
|
||||
done
|
||||
|
||||
find Tests/images Tests/icc -print | zip -q $OUT/fuzz_pillow_seed_corpus.zip -@
|
||||
|
|
|
@ -35,6 +35,7 @@ def test_questionable():
|
|||
"pal8os2v2.bmp",
|
||||
"rgb24prof.bmp",
|
||||
"pal1p1.bmp",
|
||||
"pal4rletrns.bmp",
|
||||
"pal8offs.bmp",
|
||||
"rgb24lprof.bmp",
|
||||
"rgb32fakealpha.bmp",
|
||||
|
|
|
@ -22,6 +22,8 @@ TEST_FILE_DX10_BC7 = "Tests/images/bc7-argb-8bpp_MipMaps-1.dds"
|
|||
TEST_FILE_DX10_BC7_UNORM_SRGB = "Tests/images/DXGI_FORMAT_BC7_UNORM_SRGB.dds"
|
||||
TEST_FILE_DX10_R8G8B8A8 = "Tests/images/argb-32bpp_MipMaps-1.dds"
|
||||
TEST_FILE_DX10_R8G8B8A8_UNORM_SRGB = "Tests/images/DXGI_FORMAT_R8G8B8A8_UNORM_SRGB.dds"
|
||||
TEST_FILE_UNCOMPRESSED_L = "Tests/images/uncompressed_l.dds"
|
||||
TEST_FILE_UNCOMPRESSED_L_WITH_ALPHA = "Tests/images/uncompressed_la.dds"
|
||||
TEST_FILE_UNCOMPRESSED_RGB = "Tests/images/hopper.dds"
|
||||
TEST_FILE_UNCOMPRESSED_RGB_WITH_ALPHA = "Tests/images/uncompressed_rgb.dds"
|
||||
|
||||
|
@ -194,26 +196,24 @@ def test_unimplemented_dxgi_format():
|
|||
pass
|
||||
|
||||
|
||||
def test_uncompressed_rgb():
|
||||
"""Check uncompressed RGB images can be opened"""
|
||||
@pytest.mark.parametrize(
|
||||
("mode", "size", "test_file"),
|
||||
[
|
||||
("L", (128, 128), TEST_FILE_UNCOMPRESSED_L),
|
||||
("LA", (128, 128), TEST_FILE_UNCOMPRESSED_L_WITH_ALPHA),
|
||||
("RGB", (128, 128), TEST_FILE_UNCOMPRESSED_RGB),
|
||||
("RGBA", (800, 600), TEST_FILE_UNCOMPRESSED_RGB_WITH_ALPHA),
|
||||
],
|
||||
)
|
||||
def test_uncompressed(mode, size, test_file):
|
||||
"""Check uncompressed images can be opened"""
|
||||
|
||||
# convert -format dds -define dds:compression=none hopper.jpg hopper.dds
|
||||
with Image.open(TEST_FILE_UNCOMPRESSED_RGB) as im:
|
||||
with Image.open(test_file) as im:
|
||||
assert im.format == "DDS"
|
||||
assert im.mode == "RGB"
|
||||
assert im.size == (128, 128)
|
||||
assert im.mode == mode
|
||||
assert im.size == size
|
||||
|
||||
assert_image_equal_tofile(im, "Tests/images/hopper.png")
|
||||
|
||||
# Test image with alpha
|
||||
with Image.open(TEST_FILE_UNCOMPRESSED_RGB_WITH_ALPHA) as im:
|
||||
assert im.format == "DDS"
|
||||
assert im.mode == "RGBA"
|
||||
assert im.size == (800, 600)
|
||||
|
||||
assert_image_equal_tofile(
|
||||
im, TEST_FILE_UNCOMPRESSED_RGB_WITH_ALPHA.replace(".dds", ".png")
|
||||
)
|
||||
assert_image_equal_tofile(im, test_file.replace(".dds", ".png"))
|
||||
|
||||
|
||||
def test__accept_true():
|
||||
|
@ -305,6 +305,8 @@ def test_save_unsupported_mode(tmp_path):
|
|||
@pytest.mark.parametrize(
|
||||
("mode", "test_file"),
|
||||
[
|
||||
("L", "Tests/images/linear_gradient.png"),
|
||||
("LA", "Tests/images/uncompressed_la.png"),
|
||||
("RGB", "Tests/images/hopper.png"),
|
||||
("RGBA", "Tests/images/pil123rgba.png"),
|
||||
],
|
||||
|
|
|
@ -677,6 +677,24 @@ def test_dispose2_background(tmp_path):
|
|||
assert im.getpixel((0, 0)) == (255, 0, 0)
|
||||
|
||||
|
||||
def test_dispose2_background_frame(tmp_path):
|
||||
out = str(tmp_path / "temp.gif")
|
||||
|
||||
im_list = [Image.new("RGBA", (1, 20))]
|
||||
|
||||
different_frame = Image.new("RGBA", (1, 20))
|
||||
different_frame.putpixel((0, 10), (255, 0, 0, 255))
|
||||
im_list.append(different_frame)
|
||||
|
||||
# Frame that matches the background
|
||||
im_list.append(Image.new("RGBA", (1, 20)))
|
||||
|
||||
im_list[0].save(out, save_all=True, append_images=im_list[1:], disposal=2)
|
||||
|
||||
with Image.open(out) as im:
|
||||
assert im.n_frames == 3
|
||||
|
||||
|
||||
def test_transparency_in_second_frame(tmp_path):
|
||||
out = str(tmp_path / "temp.gif")
|
||||
with Image.open("Tests/images/different_transparency.gif") as im:
|
||||
|
@ -791,6 +809,22 @@ def test_roundtrip_info_duration(tmp_path):
|
|||
] == duration_list
|
||||
|
||||
|
||||
def test_roundtrip_info_duration_combined(tmp_path):
|
||||
out = str(tmp_path / "temp.gif")
|
||||
with Image.open("Tests/images/duplicate_frame.gif") as im:
|
||||
assert [frame.info["duration"] for frame in ImageSequence.Iterator(im)] == [
|
||||
1000,
|
||||
1000,
|
||||
1000,
|
||||
]
|
||||
im.save(out, save_all=True)
|
||||
|
||||
with Image.open(out) as reloaded:
|
||||
assert [
|
||||
frame.info["duration"] for frame in ImageSequence.Iterator(reloaded)
|
||||
] == [1000, 2000]
|
||||
|
||||
|
||||
def test_identical_frames(tmp_path):
|
||||
duration_list = [1000, 1500, 2000, 4000]
|
||||
|
||||
|
@ -859,14 +893,23 @@ def test_background(tmp_path):
|
|||
im.info["background"] = 1
|
||||
im.save(out)
|
||||
with Image.open(out) as reread:
|
||||
|
||||
assert reread.info["background"] == im.info["background"]
|
||||
|
||||
|
||||
def test_webp_background(tmp_path):
|
||||
out = str(tmp_path / "temp.gif")
|
||||
|
||||
# Test opaque WebP background
|
||||
if features.check("webp") and features.check("webp_anim"):
|
||||
with Image.open("Tests/images/hopper.webp") as im:
|
||||
assert isinstance(im.info["background"], tuple)
|
||||
assert im.info["background"] == (255, 255, 255, 255)
|
||||
im.save(out)
|
||||
|
||||
# Test non-opaque WebP background
|
||||
im = Image.new("L", (100, 100), "#000")
|
||||
im.info["background"] = (0, 0, 0, 0)
|
||||
im.save(out)
|
||||
|
||||
|
||||
def test_comment(tmp_path):
|
||||
with Image.open(TEST_GIF) as im:
|
||||
|
|
|
@ -71,6 +71,19 @@ def test_save_to_bytes():
|
|||
)
|
||||
|
||||
|
||||
def test_getpixel(tmp_path):
|
||||
temp_file = str(tmp_path / "temp.ico")
|
||||
|
||||
im = hopper()
|
||||
im.save(temp_file, "ico", sizes=[(32, 32), (64, 64)])
|
||||
|
||||
with Image.open(temp_file) as reloaded:
|
||||
reloaded.load()
|
||||
reloaded.size = (32, 32)
|
||||
|
||||
assert reloaded.getpixel((0, 0)) == (18, 20, 62)
|
||||
|
||||
|
||||
def test_no_duplicates(tmp_path):
|
||||
temp_file = str(tmp_path / "temp.ico")
|
||||
temp_file2 = str(tmp_path / "temp2.ico")
|
||||
|
|
|
@ -86,6 +86,33 @@ class TestFileJpeg:
|
|||
assert len(im.applist) == 2
|
||||
|
||||
assert im.info["comment"] == b"File written by Adobe Photoshop\xa8 4.0\x00"
|
||||
assert im.app["COM"] == im.info["comment"]
|
||||
|
||||
def test_comment_write(self):
|
||||
with Image.open(TEST_FILE) as im:
|
||||
assert im.info["comment"] == b"File written by Adobe Photoshop\xa8 4.0\x00"
|
||||
|
||||
# Test that existing comment is saved by default
|
||||
out = BytesIO()
|
||||
im.save(out, format="JPEG")
|
||||
with Image.open(out) as reloaded:
|
||||
assert im.info["comment"] == reloaded.info["comment"]
|
||||
|
||||
# Ensure that a blank comment causes any existing comment to be removed
|
||||
for comment in ("", b"", None):
|
||||
out = BytesIO()
|
||||
im.save(out, format="JPEG", comment=comment)
|
||||
with Image.open(out) as reloaded:
|
||||
assert "comment" not in reloaded.info
|
||||
|
||||
# Test that a comment argument overrides the default comment
|
||||
for comment in ("Test comment text", b"Text comment text"):
|
||||
out = BytesIO()
|
||||
im.save(out, format="JPEG", comment=comment)
|
||||
with Image.open(out) as reloaded:
|
||||
if not isinstance(comment, bytes):
|
||||
comment = comment.encode()
|
||||
assert reloaded.info["comment"] == comment
|
||||
|
||||
def test_cmyk(self):
|
||||
# Test CMYK handling. Thanks to Tim and Charlie for test data,
|
||||
|
@ -415,6 +442,13 @@ class TestFileJpeg:
|
|||
info = im._getexif()
|
||||
assert info[305] == "Adobe Photoshop CS Macintosh"
|
||||
|
||||
def test_get_child_images(self):
|
||||
with Image.open("Tests/images/flower.jpg") as im:
|
||||
ims = im.get_child_images()
|
||||
|
||||
assert len(ims) == 1
|
||||
assert_image_equal_tofile(ims[0], "Tests/images/flower_thumbnail.png")
|
||||
|
||||
def test_mp(self):
|
||||
with Image.open("Tests/images/pil_sample_rgb.jpg") as im:
|
||||
assert im._getmp() is None
|
||||
|
|
|
@ -80,7 +80,10 @@ def test_app(test_file):
|
|||
|
||||
@pytest.mark.parametrize("test_file", test_files)
|
||||
def test_exif(test_file):
|
||||
with Image.open(test_file) as im:
|
||||
with Image.open(test_file) as im_original:
|
||||
im_reloaded = roundtrip(im_original, save_all=True, exif=im_original.getexif())
|
||||
|
||||
for im in (im_original, im_reloaded):
|
||||
info = im._getexif()
|
||||
assert info[272] == "Nintendo 3DS"
|
||||
assert info[296] == 2
|
||||
|
@ -268,6 +271,7 @@ def test_save_all():
|
|||
im_reloaded = roundtrip(im, save_all=True, append_images=[im2])
|
||||
|
||||
assert_image_equal(im, im_reloaded)
|
||||
assert im_reloaded.mpinfo[45056] == b"0100"
|
||||
|
||||
im_reloaded.seek(1)
|
||||
assert_image_similar(im2, im_reloaded, 1)
|
||||
|
|
|
@ -706,10 +706,18 @@ class TestFilePng:
|
|||
assert exif[274] == 3
|
||||
|
||||
def test_exif_save(self, tmp_path):
|
||||
# Test exif is not saved from info
|
||||
test_file = str(tmp_path / "temp.png")
|
||||
with Image.open("Tests/images/exif.png") as im:
|
||||
test_file = str(tmp_path / "temp.png")
|
||||
im.save(test_file)
|
||||
|
||||
with Image.open(test_file) as reloaded:
|
||||
assert reloaded._getexif() is None
|
||||
|
||||
# Test passing in exif
|
||||
with Image.open("Tests/images/exif.png") as im:
|
||||
im.save(test_file, exif=im.getexif())
|
||||
|
||||
with Image.open(test_file) as reloaded:
|
||||
exif = reloaded._getexif()
|
||||
assert exif[274] == 1
|
||||
|
@ -720,7 +728,7 @@ class TestFilePng:
|
|||
def test_exif_from_jpg(self, tmp_path):
|
||||
with Image.open("Tests/images/pil_sample_rgb.jpg") as im:
|
||||
test_file = str(tmp_path / "temp.png")
|
||||
im.save(test_file)
|
||||
im.save(test_file, exif=im.getexif())
|
||||
|
||||
with Image.open(test_file) as reloaded:
|
||||
exif = reloaded._getexif()
|
||||
|
|
|
@ -201,6 +201,22 @@ def test_writing_bytes_to_ascii(tmp_path):
|
|||
assert reloaded.tag_v2[271] == "test"
|
||||
|
||||
|
||||
def test_writing_int_to_bytes(tmp_path):
|
||||
im = hopper()
|
||||
info = TiffImagePlugin.ImageFileDirectory_v2()
|
||||
|
||||
tag = TiffTags.TAGS_V2[700]
|
||||
assert tag.type == TiffTags.BYTE
|
||||
|
||||
info[700] = 1
|
||||
|
||||
out = str(tmp_path / "temp.tiff")
|
||||
im.save(out, tiffinfo=info)
|
||||
|
||||
with Image.open(out) as reloaded:
|
||||
assert reloaded.tag_v2[700] == b"\x01"
|
||||
|
||||
|
||||
def test_undefined_zero(tmp_path):
|
||||
# Check that the tag has not been changed since this test was created
|
||||
tag = TiffTags.TAGS_V2[45059]
|
||||
|
|
|
@ -97,6 +97,35 @@ def test_write_rgba(tmp_path):
|
|||
assert_image_similar(image, pil_image, 1.0)
|
||||
|
||||
|
||||
def test_keep_rgb_values_when_transparent(tmp_path):
|
||||
"""
|
||||
Saving transparent pixels should retain their original RGB values
|
||||
when using the "exact" parameter.
|
||||
"""
|
||||
|
||||
image = hopper("RGB")
|
||||
|
||||
# create a copy of the image
|
||||
# with the left half transparent
|
||||
half_transparent_image = image.copy()
|
||||
new_alpha = Image.new("L", (128, 128), 255)
|
||||
new_alpha.paste(0, (0, 0, 64, 128))
|
||||
half_transparent_image.putalpha(new_alpha)
|
||||
|
||||
# save with transparent area preserved
|
||||
temp_file = str(tmp_path / "temp.webp")
|
||||
half_transparent_image.save(temp_file, exact=True, lossless=True)
|
||||
|
||||
with Image.open(temp_file) as reloaded:
|
||||
assert reloaded.mode == "RGBA"
|
||||
assert reloaded.format == "WEBP"
|
||||
|
||||
# even though it is lossless, if we don't use exact=True
|
||||
# in libwebp >= 0.5, the transparent area will be filled with black
|
||||
# (or something more conducive to compression)
|
||||
assert_image_equal(reloaded.convert("RGB"), image)
|
||||
|
||||
|
||||
def test_write_unsupported_mode_PA(tmp_path):
|
||||
"""
|
||||
Saving a palette-based file with transparency to WebP format
|
||||
|
|
|
@ -11,6 +11,11 @@ pytestmark = [
|
|||
skip_unless_feature("webp_mux"),
|
||||
]
|
||||
|
||||
try:
|
||||
from defusedxml import ElementTree
|
||||
except ImportError:
|
||||
ElementTree = None
|
||||
|
||||
|
||||
def test_read_exif_metadata():
|
||||
|
||||
|
@ -110,6 +115,22 @@ def test_read_no_exif():
|
|||
assert not webp_image._getexif()
|
||||
|
||||
|
||||
def test_getxmp():
|
||||
with Image.open("Tests/images/flower.webp") as im:
|
||||
assert "xmp" not in im.info
|
||||
assert im.getxmp() == {}
|
||||
|
||||
with Image.open("Tests/images/flower2.webp") as im:
|
||||
if ElementTree is None:
|
||||
with pytest.warns(UserWarning):
|
||||
assert im.getxmp() == {}
|
||||
else:
|
||||
assert (
|
||||
im.getxmp()["xmpmeta"]["xmptk"]
|
||||
== "Adobe XMP Core 5.3-c011 66.145661, 2012/02/06-14:56:27 "
|
||||
)
|
||||
|
||||
|
||||
@skip_unless_feature("webp_anim")
|
||||
def test_write_animated_metadata(tmp_path):
|
||||
iccp_data = b"<iccp_data>"
|
||||
|
|
|
@ -7,7 +7,14 @@ import warnings
|
|||
|
||||
import pytest
|
||||
|
||||
from PIL import Image, ImageDraw, ImagePalette, UnidentifiedImageError, features
|
||||
from PIL import (
|
||||
ExifTags,
|
||||
Image,
|
||||
ImageDraw,
|
||||
ImagePalette,
|
||||
UnidentifiedImageError,
|
||||
features,
|
||||
)
|
||||
|
||||
from .helper import (
|
||||
assert_image_equal,
|
||||
|
@ -394,8 +401,6 @@ class TestImage:
|
|||
def test_registered_extensions_uninitialized(self):
|
||||
# Arrange
|
||||
Image._initialized = 0
|
||||
extension = Image.EXTENSION
|
||||
Image.EXTENSION = {}
|
||||
|
||||
# Act
|
||||
Image.registered_extensions()
|
||||
|
@ -403,10 +408,6 @@ class TestImage:
|
|||
# Assert
|
||||
assert Image._initialized == 2
|
||||
|
||||
# Restore the original state and assert
|
||||
Image.EXTENSION = extension
|
||||
assert Image.EXTENSION
|
||||
|
||||
def test_registered_extensions(self):
|
||||
# Arrange
|
||||
# Open an image to trigger plugin registration
|
||||
|
@ -808,6 +809,18 @@ class TestImage:
|
|||
reloaded_exif.load(exif.tobytes())
|
||||
assert reloaded_exif.get_ifd(0xA005) == exif.get_ifd(0xA005)
|
||||
|
||||
def test_exif_ifd1(self):
|
||||
with Image.open("Tests/images/flower.jpg") as im:
|
||||
exif = im.getexif()
|
||||
assert exif.get_ifd(ExifTags.IFD.IFD1) == {
|
||||
513: 2036,
|
||||
514: 5448,
|
||||
259: 6,
|
||||
296: 2,
|
||||
282: 180.0,
|
||||
283: 180.0,
|
||||
}
|
||||
|
||||
def test_exif_ifd(self):
|
||||
with Image.open("Tests/images/flower.jpg") as im:
|
||||
exif = im.getexif()
|
||||
|
@ -838,6 +851,31 @@ class TestImage:
|
|||
34665: 196,
|
||||
}
|
||||
|
||||
def test_exif_hide_offsets(self):
|
||||
with Image.open("Tests/images/flower.jpg") as im:
|
||||
exif = im.getexif()
|
||||
|
||||
# Check offsets are present initially
|
||||
assert 0x8769 in exif
|
||||
for tag in (0xA005, 0x927C):
|
||||
assert tag in exif.get_ifd(0x8769)
|
||||
assert exif.get_ifd(0xA005)
|
||||
loaded_exif = exif
|
||||
|
||||
with Image.open("Tests/images/flower.jpg") as im:
|
||||
new_exif = im.getexif()
|
||||
|
||||
for exif in (loaded_exif, new_exif):
|
||||
exif.hide_offsets()
|
||||
|
||||
# Assert they are hidden afterwards,
|
||||
# but that the IFDs are still available
|
||||
assert 0x8769 not in exif
|
||||
assert exif.get_ifd(0x8769)
|
||||
for tag in (0xA005, 0x927C):
|
||||
assert tag not in exif.get_ifd(0x8769)
|
||||
assert exif.get_ifd(0xA005)
|
||||
|
||||
@pytest.mark.parametrize("size", ((1, 0), (0, 1), (0, 0)))
|
||||
def test_zero_tobytes(self, size):
|
||||
im = Image.new("RGB", size)
|
||||
|
|
|
@ -104,6 +104,13 @@ def test_rgba_p():
|
|||
assert_image_similar(im, comparable, 20)
|
||||
|
||||
|
||||
def test_rgba():
|
||||
with Image.open("Tests/images/transparent.png") as im:
|
||||
assert im.mode == "RGBA"
|
||||
|
||||
assert_image_similar(im.convert("RGBa").convert("RGB"), im.convert("RGB"), 1.5)
|
||||
|
||||
|
||||
def test_trns_p(tmp_path):
|
||||
im = hopper("P")
|
||||
im.info["transparency"] = 0
|
||||
|
|
|
@ -55,10 +55,11 @@ def test_mode_with_L_with_float():
|
|||
assert im.getpixel((0, 0)) == 2
|
||||
|
||||
|
||||
def test_mode_i():
|
||||
@pytest.mark.parametrize("mode", ("I", "I;16", "I;16L", "I;16B"))
|
||||
def test_mode_i(mode):
|
||||
src = hopper("L")
|
||||
data = list(src.getdata())
|
||||
im = Image.new("I", src.size, 0)
|
||||
im = Image.new(mode, src.size, 0)
|
||||
im.putdata(data, 2, 256)
|
||||
|
||||
target = [2 * elt + 256 for elt in data]
|
||||
|
|
|
@ -1238,6 +1238,27 @@ def test_stroke_descender():
|
|||
assert_image_similar_tofile(im, "Tests/images/imagedraw_stroke_descender.png", 6.76)
|
||||
|
||||
|
||||
@skip_unless_feature("freetype2")
|
||||
def test_split_word():
|
||||
# Arrange
|
||||
im = Image.new("RGB", (230, 55))
|
||||
expected = im.copy()
|
||||
expected_draw = ImageDraw.Draw(expected)
|
||||
font = ImageFont.truetype("Tests/fonts/FreeMono.ttf", 48)
|
||||
expected_draw.text((0, 0), "paradise", font=font)
|
||||
|
||||
draw = ImageDraw.Draw(im)
|
||||
|
||||
# Act
|
||||
draw.text((0, 0), "par", font=font)
|
||||
|
||||
length = draw.textlength("par", font=font)
|
||||
draw.text((length, 0), "adise", font=font)
|
||||
|
||||
# Assert
|
||||
assert_image_equal(im, expected)
|
||||
|
||||
|
||||
@skip_unless_feature("freetype2")
|
||||
def test_stroke_multiline():
|
||||
# Arrange
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
@ -33,7 +34,9 @@ class TestImageGrab:
|
|||
|
||||
@pytest.mark.skipif(Image.core.HAVE_XCB, reason="tests missing XCB")
|
||||
def test_grab_no_xcb(self):
|
||||
if sys.platform not in ("win32", "darwin"):
|
||||
if sys.platform not in ("win32", "darwin") and not shutil.which(
|
||||
"gnome-screenshot"
|
||||
):
|
||||
with pytest.raises(OSError) as e:
|
||||
ImageGrab.grab()
|
||||
assert str(e.value).startswith("Pillow was built without XCB support")
|
||||
|
|
|
@ -34,7 +34,7 @@ def test_numpy_to_image():
|
|||
|
||||
# Check supported 1-bit integer formats
|
||||
assert_image(to_image(bool, 1, 1), "1", TEST_IMAGE_SIZE)
|
||||
assert_image(to_image(numpy.bool8, 1, 1), "1", TEST_IMAGE_SIZE)
|
||||
assert_image(to_image(numpy.bool_, 1, 1), "1", TEST_IMAGE_SIZE)
|
||||
|
||||
# Check supported 8-bit integer formats
|
||||
assert_image(to_image(numpy.uint8), "L", TEST_IMAGE_SIZE)
|
||||
|
@ -193,7 +193,7 @@ def test_putdata():
|
|||
"dtype",
|
||||
(
|
||||
bool,
|
||||
numpy.bool8,
|
||||
numpy.bool_,
|
||||
numpy.int8,
|
||||
numpy.int16,
|
||||
numpy.int32,
|
||||
|
|
|
@ -15,11 +15,12 @@ ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
|||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " serve to start a local server for viewing docs"
|
||||
@echo " livehtml to start a local server for viewing docs and auto-reload on change"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
|
@ -39,42 +40,49 @@ help:
|
|||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
-rm -rf $(BUILDDIR)/*
|
||||
|
||||
install-sphinx:
|
||||
$(PYTHON) -m pip install --quiet sphinx sphinx-copybutton sphinx-issues sphinx-removed-in sphinxext-opengraph furo olefile
|
||||
$(PYTHON) -m pip install --quiet furo olefile sphinx sphinx-copybutton sphinx-inline-tabs sphinx-issues sphinx-removed-in sphinxext-opengraph
|
||||
|
||||
.PHONY: html
|
||||
html:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b html -W --keep-going $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
.PHONY: dirhtml
|
||||
dirhtml:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
.PHONY: singlehtml
|
||||
singlehtml:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
.PHONY: pickle
|
||||
pickle:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
.PHONY: json
|
||||
json:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
.PHONY: htmlhelp
|
||||
htmlhelp:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
|
@ -82,6 +90,7 @@ htmlhelp:
|
|||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
.PHONY: qthelp
|
||||
qthelp:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
|
@ -92,6 +101,7 @@ qthelp:
|
|||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PillowPILfork.qhc"
|
||||
|
||||
.PHONY: devhelp
|
||||
devhelp:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
|
@ -102,12 +112,14 @@ devhelp:
|
|||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PillowPILfork"
|
||||
@echo "# devhelp"
|
||||
|
||||
.PHONY: epub
|
||||
epub:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
.PHONY: latex
|
||||
latex:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
|
@ -116,6 +128,7 @@ latex:
|
|||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
.PHONY: latexpdf
|
||||
latexpdf:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
|
@ -123,18 +136,21 @@ latexpdf:
|
|||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
.PHONY: text
|
||||
text:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
.PHONY: man
|
||||
man:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
.PHONY: texinfo
|
||||
texinfo:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
|
@ -143,6 +159,7 @@ texinfo:
|
|||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
.PHONY: info
|
||||
info:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
|
@ -150,18 +167,21 @@ info:
|
|||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
.PHONY: gettext
|
||||
gettext:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
.PHONY: changes
|
||||
changes:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
.PHONY: linkcheck
|
||||
linkcheck:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck -j auto
|
||||
|
@ -169,14 +189,17 @@ linkcheck:
|
|||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
.PHONY: doctest
|
||||
doctest:
|
||||
$(MAKE) install-sphinx
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
.PHONY: livehtml
|
||||
livehtml: html
|
||||
livereload $(BUILDDIR)/html -p 33233
|
||||
|
||||
.PHONY: serve
|
||||
serve:
|
||||
cd $(BUILDDIR)/html; $(PYTHON) -m http.server
|
||||
|
|
|
@ -27,12 +27,13 @@ needs_sphinx = "2.4"
|
|||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
"sphinx_copybutton",
|
||||
"sphinx_issues",
|
||||
"sphinx_removed_in",
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.viewcode",
|
||||
"sphinx_copybutton",
|
||||
"sphinx_inline_tabs",
|
||||
"sphinx_issues",
|
||||
"sphinx_removed_in",
|
||||
"sphinxext.opengraph",
|
||||
]
|
||||
|
||||
|
|
|
@ -24,9 +24,10 @@ To get the number and names of bands in an image, use the
|
|||
Modes
|
||||
-----
|
||||
|
||||
The ``mode`` of an image is a string which defines the type and depth of a pixel in the image.
|
||||
Each pixel uses the full range of the bit depth. So a 1-bit pixel has a range
|
||||
of 0-1, an 8-bit pixel has a range of 0-255 and so on. The current release
|
||||
The ``mode`` of an image is a string which defines the type and depth of a pixel in the
|
||||
image. Each pixel uses the full range of the bit depth. So a 1-bit pixel has a range of
|
||||
0-1, an 8-bit pixel has a range of 0-255, a 32-signed integer pixel has the range of
|
||||
INT32 and a 32-bit floating point pixel has the range of FLOAT32. The current release
|
||||
supports the following standard modes:
|
||||
|
||||
* ``1`` (1-bit pixels, black and white, stored with one pixel per byte)
|
||||
|
@ -41,6 +42,9 @@ supports the following standard modes:
|
|||
|
||||
* ``LAB`` (3x8-bit pixels, the L*a*b color space)
|
||||
* ``HSV`` (3x8-bit pixels, Hue, Saturation, Value color space)
|
||||
|
||||
* Hue's range of 0-255 is a scaled version of 0 degrees <= Hue < 360 degrees
|
||||
|
||||
* ``I`` (32-bit signed integer pixels)
|
||||
* ``F`` (32-bit floating point pixels)
|
||||
|
||||
|
|
|
@ -474,6 +474,11 @@ The :py:meth:`~PIL.Image.Image.save` method supports the following options:
|
|||
|
||||
.. versionadded:: 2.5.0
|
||||
|
||||
**comment**
|
||||
A comment about the image.
|
||||
|
||||
.. versionadded:: 9.4.0
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
|
@ -1129,6 +1134,11 @@ The :py:meth:`~PIL.Image.Image.save` method supports the following options:
|
|||
**method**
|
||||
Quality/speed trade-off (0=fast, 6=slower-better). Defaults to 4.
|
||||
|
||||
**exact**
|
||||
If true, preserve the transparent RGB values. Otherwise, discard
|
||||
invisible RGB values for better compression. Defaults to false.
|
||||
Requires libwebp 0.5.0 or later.
|
||||
|
||||
**icc_profile**
|
||||
The ICC Profile to include in the saved file. Only supported if
|
||||
the system WebP library was built with webpmux support.
|
||||
|
|
|
@ -57,9 +57,9 @@ Pillow for enterprise is available via the Tidelift Subscription. `Learn more <h
|
|||
:target: https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=badge
|
||||
:alt: Tidelift
|
||||
|
||||
.. image:: https://github.com/python-pillow/Pillow/actions/workflows/tidelift.yml/badge.svg
|
||||
:target: https://github.com/python-pillow/Pillow/actions/workflows/tidelift.yml
|
||||
:alt: Tidelift Align
|
||||
.. image:: https://oss-fuzz-build-logs.storage.googleapis.com/badges/pillow.svg
|
||||
:target: https://bugs.chromium.org/p/oss-fuzz/issues/list?sort=-opened&can=1&q=proj:pillow
|
||||
:alt: Fuzzing Status
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/pillow.svg
|
||||
:target: https://pypi.org/project/Pillow/
|
||||
|
|
|
@ -23,6 +23,11 @@ Pillow supports these Python versions.
|
|||
:file: older-versions.csv
|
||||
:header-rows: 1
|
||||
|
||||
.. _Linux Installation:
|
||||
.. _macOS Installation:
|
||||
.. _Windows Installation:
|
||||
.. _FreeBSD Installation:
|
||||
|
||||
Basic Installation
|
||||
------------------
|
||||
|
||||
|
@ -38,75 +43,73 @@ Install Pillow with :command:`pip`::
|
|||
python3 -m pip install --upgrade Pillow
|
||||
|
||||
|
||||
Windows Installation
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
.. tab:: Linux
|
||||
|
||||
We provide Pillow binaries for Windows compiled for the matrix of
|
||||
supported Pythons in both 32 and 64-bit versions in the wheel format.
|
||||
These binaries include support for all optional libraries except
|
||||
libimagequant and libxcb. Raqm support requires
|
||||
FriBiDi to be installed separately::
|
||||
We provide binaries for Linux for each of the supported Python
|
||||
versions in the manylinux wheel format. These include support for all
|
||||
optional libraries except libimagequant. Raqm support requires
|
||||
FriBiDi to be installed separately::
|
||||
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --upgrade Pillow
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --upgrade Pillow
|
||||
|
||||
To install Pillow in MSYS2, see `Building on Windows using MSYS2/MinGW`_.
|
||||
Most major Linux distributions, including Fedora, Ubuntu and ArchLinux
|
||||
also include Pillow in packages that previously contained PIL e.g.
|
||||
``python-imaging``. Debian splits it into two packages, ``python3-pil``
|
||||
and ``python3-pil.imagetk``.
|
||||
|
||||
.. tab:: macOS
|
||||
|
||||
We provide binaries for macOS for each of the supported Python
|
||||
versions in the wheel format. These include support for all optional
|
||||
libraries except libimagequant. Raqm support requires
|
||||
FriBiDi to be installed separately::
|
||||
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --upgrade Pillow
|
||||
|
||||
.. tab:: Windows
|
||||
|
||||
We provide Pillow binaries for Windows compiled for the matrix of
|
||||
supported Pythons in both 32 and 64-bit versions in the wheel format.
|
||||
These binaries include support for all optional libraries except
|
||||
libimagequant and libxcb. Raqm support requires
|
||||
FriBiDi to be installed separately::
|
||||
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --upgrade Pillow
|
||||
|
||||
To install Pillow in MSYS2, see `Building on Windows using MSYS2/MinGW`_.
|
||||
|
||||
.. tab:: FreeBSD
|
||||
|
||||
Pillow can be installed on FreeBSD via the official Ports or Packages systems:
|
||||
|
||||
**Ports**::
|
||||
|
||||
cd /usr/ports/graphics/py-pillow && make install clean
|
||||
|
||||
**Packages**::
|
||||
|
||||
pkg install py38-pillow
|
||||
|
||||
.. note::
|
||||
|
||||
The `Pillow FreeBSD port
|
||||
<https://www.freshports.org/graphics/py-pillow/>`_ and packages
|
||||
are tested by the ports team with all supported FreeBSD versions.
|
||||
|
||||
|
||||
macOS Installation
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We provide binaries for macOS for each of the supported Python
|
||||
versions in the wheel format. These include support for all optional
|
||||
libraries except libimagequant. Raqm support requires
|
||||
FriBiDi to be installed separately::
|
||||
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --upgrade Pillow
|
||||
|
||||
Linux Installation
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We provide binaries for Linux for each of the supported Python
|
||||
versions in the manylinux wheel format. These include support for all
|
||||
optional libraries except libimagequant. Raqm support requires
|
||||
FriBiDi to be installed separately::
|
||||
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --upgrade Pillow
|
||||
|
||||
Most major Linux distributions, including Fedora, Ubuntu and ArchLinux
|
||||
also include Pillow in packages that previously contained PIL e.g.
|
||||
``python-imaging``. Debian splits it into two packages, ``python3-pil``
|
||||
and ``python3-pil.imagetk``.
|
||||
|
||||
FreeBSD Installation
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Pillow can be installed on FreeBSD via the official Ports or Packages systems:
|
||||
|
||||
**Ports**::
|
||||
|
||||
cd /usr/ports/graphics/py-pillow && make install clean
|
||||
|
||||
**Packages**::
|
||||
|
||||
pkg install py38-pillow
|
||||
|
||||
.. note::
|
||||
|
||||
The `Pillow FreeBSD port
|
||||
<https://www.freshports.org/graphics/py-pillow/>`_ and packages
|
||||
are tested by the ports team with all supported FreeBSD versions.
|
||||
|
||||
.. _Building on Linux:
|
||||
.. _Building on macOS:
|
||||
.. _Building on Windows:
|
||||
.. _Building on Windows using MSYS2/MinGW:
|
||||
.. _Building on FreeBSD:
|
||||
.. _Building on Android:
|
||||
|
||||
Building From Source
|
||||
--------------------
|
||||
|
||||
Download and extract the `compressed archive from PyPI`_.
|
||||
|
||||
.. _compressed archive from PyPI: https://pypi.org/project/Pillow/
|
||||
|
||||
.. _external-libraries:
|
||||
|
||||
External Libraries
|
||||
|
@ -140,14 +143,14 @@ Many of Pillow's features require external libraries:
|
|||
|
||||
* **libtiff** provides compressed TIFF functionality
|
||||
|
||||
* Pillow has been tested with libtiff versions **3.x** and **4.0-4.4**
|
||||
* Pillow has been tested with libtiff versions **3.x** and **4.0-4.5**
|
||||
|
||||
* **libfreetype** provides type related services
|
||||
|
||||
* **littlecms** provides color management
|
||||
|
||||
* Pillow version 2.2.1 and below uses liblcms1, Pillow 2.3.0 and
|
||||
above uses liblcms2. Tested with **1.19** and **2.7-2.13.1**.
|
||||
above uses liblcms2. Tested with **1.19** and **2.7-2.14**.
|
||||
|
||||
* **libwebp** provides the WebP format.
|
||||
|
||||
|
@ -191,7 +194,141 @@ Many of Pillow's features require external libraries:
|
|||
|
||||
* **libxcb** provides X11 screengrab support.
|
||||
|
||||
Once you have installed the prerequisites, run::
|
||||
.. tab:: Linux
|
||||
|
||||
If you didn't build Python from source, make sure you have Python's
|
||||
development libraries installed.
|
||||
|
||||
In Debian or Ubuntu::
|
||||
|
||||
sudo apt-get install python3-dev python3-setuptools
|
||||
|
||||
In Fedora, the command is::
|
||||
|
||||
sudo dnf install python3-devel redhat-rpm-config
|
||||
|
||||
In Alpine, the command is::
|
||||
|
||||
sudo apk add python3-dev py3-setuptools
|
||||
|
||||
.. Note:: ``redhat-rpm-config`` is required on Fedora 23, but not earlier versions.
|
||||
|
||||
Prerequisites for **Ubuntu 16.04 LTS - 22.04 LTS** are installed with::
|
||||
|
||||
sudo apt-get install libtiff5-dev libjpeg8-dev libopenjp2-7-dev zlib1g-dev \
|
||||
libfreetype6-dev liblcms2-dev libwebp-dev tcl8.6-dev tk8.6-dev python3-tk \
|
||||
libharfbuzz-dev libfribidi-dev libxcb1-dev
|
||||
|
||||
To install libraqm, ``sudo apt-get install meson`` and then see
|
||||
``depends/install_raqm.sh``.
|
||||
|
||||
Prerequisites are installed on recent **Red Hat**, **CentOS** or **Fedora** with::
|
||||
|
||||
sudo dnf install libtiff-devel libjpeg-devel openjpeg2-devel zlib-devel \
|
||||
freetype-devel lcms2-devel libwebp-devel tcl-devel tk-devel \
|
||||
harfbuzz-devel fribidi-devel libraqm-devel libimagequant-devel libxcb-devel
|
||||
|
||||
Note that the package manager may be yum or DNF, depending on the
|
||||
exact distribution.
|
||||
|
||||
Prerequisites are installed for **Alpine** with::
|
||||
|
||||
sudo apk add tiff-dev jpeg-dev openjpeg-dev zlib-dev freetype-dev lcms2-dev \
|
||||
libwebp-dev tcl-dev tk-dev harfbuzz-dev fribidi-dev libimagequant-dev \
|
||||
libxcb-dev libpng-dev
|
||||
|
||||
See also the ``Dockerfile``\s in the Test Infrastructure repo
|
||||
(https://github.com/python-pillow/docker-images) for a known working
|
||||
install process for other tested distros.
|
||||
|
||||
.. tab:: macOS
|
||||
|
||||
The Xcode command line tools are required to compile portions of
|
||||
Pillow. The tools are installed by running ``xcode-select --install``
|
||||
from the command line. The command line tools are required even if you
|
||||
have the full Xcode package installed. It may be necessary to run
|
||||
``sudo xcodebuild -license`` to accept the license prior to using the
|
||||
tools.
|
||||
|
||||
The easiest way to install external libraries is via `Homebrew
|
||||
<https://brew.sh/>`_. After you install Homebrew, run::
|
||||
|
||||
brew install libjpeg libtiff little-cms2 openjpeg webp
|
||||
|
||||
To install libraqm on macOS use Homebrew to install its dependencies::
|
||||
|
||||
brew install freetype harfbuzz fribidi
|
||||
|
||||
Then see ``depends/install_raqm_cmake.sh`` to install libraqm.
|
||||
|
||||
.. tab:: Windows
|
||||
|
||||
We recommend you use prebuilt wheels from PyPI.
|
||||
If you wish to compile Pillow manually, you can use the build scripts
|
||||
in the ``winbuild`` directory used for CI testing and development.
|
||||
These scripts require Visual Studio 2017 or newer and NASM.
|
||||
|
||||
The scripts also install Pillow from the local copy of the source code, so the
|
||||
`Installing`_ instructions will not be necessary afterwards.
|
||||
|
||||
.. tab:: Windows using MSYS2/MinGW
|
||||
|
||||
To build Pillow using MSYS2, make sure you run the **MSYS2 MinGW 32-bit** or
|
||||
**MSYS2 MinGW 64-bit** console, *not* **MSYS2** directly.
|
||||
|
||||
The following instructions target the 64-bit build, for 32-bit
|
||||
replace all occurrences of ``mingw-w64-x86_64-`` with ``mingw-w64-i686-``.
|
||||
|
||||
Make sure you have Python and GCC installed::
|
||||
|
||||
pacman -S \
|
||||
mingw-w64-x86_64-gcc \
|
||||
mingw-w64-x86_64-python3 \
|
||||
mingw-w64-x86_64-python3-pip \
|
||||
mingw-w64-x86_64-python3-setuptools
|
||||
|
||||
Prerequisites are installed on **MSYS2 MinGW 64-bit** with::
|
||||
|
||||
pacman -S \
|
||||
mingw-w64-x86_64-libjpeg-turbo \
|
||||
mingw-w64-x86_64-zlib \
|
||||
mingw-w64-x86_64-libtiff \
|
||||
mingw-w64-x86_64-freetype \
|
||||
mingw-w64-x86_64-lcms2 \
|
||||
mingw-w64-x86_64-libwebp \
|
||||
mingw-w64-x86_64-openjpeg2 \
|
||||
mingw-w64-x86_64-libimagequant \
|
||||
mingw-w64-x86_64-libraqm
|
||||
|
||||
.. tab:: FreeBSD
|
||||
|
||||
.. Note:: Only FreeBSD 10 and 11 tested
|
||||
|
||||
Make sure you have Python's development libraries installed::
|
||||
|
||||
sudo pkg install python3
|
||||
|
||||
Prerequisites are installed on **FreeBSD 10 or 11** with::
|
||||
|
||||
sudo pkg install jpeg-turbo tiff webp lcms2 freetype2 openjpeg harfbuzz fribidi libxcb
|
||||
|
||||
Then see ``depends/install_raqm_cmake.sh`` to install libraqm.
|
||||
|
||||
.. tab:: Android
|
||||
|
||||
Basic Android support has been added for compilation within the Termux
|
||||
environment. The dependencies can be installed by::
|
||||
|
||||
pkg install -y python ndk-sysroot clang make \
|
||||
libjpeg-turbo
|
||||
|
||||
This has been tested within the Termux app on ChromeOS, on x86.
|
||||
|
||||
Installing
|
||||
^^^^^^^^^^
|
||||
|
||||
Once you have installed the prerequisites, to install Pillow from the source
|
||||
code on PyPI, run::
|
||||
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --upgrade Pillow --no-binary :all:
|
||||
|
@ -211,9 +348,19 @@ prerequisites, it may be necessary to manually clear the pip cache or
|
|||
build without cache using the ``--no-cache-dir`` option to force a
|
||||
build with newly installed external libraries.
|
||||
|
||||
If you would like to install from a local copy of the source code instead, you
|
||||
can clone from GitHub with ``git clone https://github.com/python-pillow/Pillow``
|
||||
or download and extract the `compressed archive from PyPI`_.
|
||||
|
||||
After navigating to the Pillow directory, run::
|
||||
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install .
|
||||
|
||||
.. _compressed archive from PyPI: https://pypi.org/project/Pillow/#files
|
||||
|
||||
Build Options
|
||||
^^^^^^^^^^^^^
|
||||
"""""""""""""
|
||||
|
||||
* Environment variable: ``MAX_CONCURRENCY=n``. Pillow can use
|
||||
multiprocessing to build the extension. Setting ``MAX_CONCURRENCY``
|
||||
|
@ -256,157 +403,6 @@ Sample usage::
|
|||
|
||||
python3 -m pip install --upgrade Pillow --global-option="build_ext" --global-option="--enable-[feature]"
|
||||
|
||||
|
||||
Building on macOS
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Xcode command line tools are required to compile portions of
|
||||
Pillow. The tools are installed by running ``xcode-select --install``
|
||||
from the command line. The command line tools are required even if you
|
||||
have the full Xcode package installed. It may be necessary to run
|
||||
``sudo xcodebuild -license`` to accept the license prior to using the
|
||||
tools.
|
||||
|
||||
The easiest way to install external libraries is via `Homebrew
|
||||
<https://brew.sh/>`_. After you install Homebrew, run::
|
||||
|
||||
brew install libjpeg libtiff little-cms2 openjpeg webp
|
||||
|
||||
To install libraqm on macOS use Homebrew to install its dependencies::
|
||||
|
||||
brew install freetype harfbuzz fribidi
|
||||
|
||||
Then see ``depends/install_raqm_cmake.sh`` to install libraqm.
|
||||
|
||||
Now install Pillow with::
|
||||
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --upgrade Pillow --no-binary :all:
|
||||
|
||||
or from within the uncompressed source directory::
|
||||
|
||||
python3 -m pip install .
|
||||
|
||||
Building on Windows
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We recommend you use prebuilt wheels from PyPI.
|
||||
If you wish to compile Pillow manually, you can use the build scripts
|
||||
in the ``winbuild`` directory used for CI testing and development.
|
||||
These scripts require Visual Studio 2017 or newer and NASM.
|
||||
|
||||
Building on Windows using MSYS2/MinGW
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To build Pillow using MSYS2, make sure you run the **MSYS2 MinGW 32-bit** or
|
||||
**MSYS2 MinGW 64-bit** console, *not* **MSYS2** directly.
|
||||
|
||||
The following instructions target the 64-bit build, for 32-bit
|
||||
replace all occurrences of ``mingw-w64-x86_64-`` with ``mingw-w64-i686-``.
|
||||
|
||||
Make sure you have Python and GCC installed::
|
||||
|
||||
pacman -S \
|
||||
mingw-w64-x86_64-gcc \
|
||||
mingw-w64-x86_64-python3 \
|
||||
mingw-w64-x86_64-python3-pip \
|
||||
mingw-w64-x86_64-python3-setuptools
|
||||
|
||||
Prerequisites are installed on **MSYS2 MinGW 64-bit** with::
|
||||
|
||||
pacman -S \
|
||||
mingw-w64-x86_64-libjpeg-turbo \
|
||||
mingw-w64-x86_64-zlib \
|
||||
mingw-w64-x86_64-libtiff \
|
||||
mingw-w64-x86_64-freetype \
|
||||
mingw-w64-x86_64-lcms2 \
|
||||
mingw-w64-x86_64-libwebp \
|
||||
mingw-w64-x86_64-openjpeg2 \
|
||||
mingw-w64-x86_64-libimagequant \
|
||||
mingw-w64-x86_64-libraqm
|
||||
|
||||
Now install Pillow with::
|
||||
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --upgrade Pillow --no-binary :all:
|
||||
|
||||
|
||||
Building on FreeBSD
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. Note:: Only FreeBSD 10 and 11 tested
|
||||
|
||||
Make sure you have Python's development libraries installed::
|
||||
|
||||
sudo pkg install python3
|
||||
|
||||
Prerequisites are installed on **FreeBSD 10 or 11** with::
|
||||
|
||||
sudo pkg install jpeg-turbo tiff webp lcms2 freetype2 openjpeg harfbuzz fribidi libxcb
|
||||
|
||||
Then see ``depends/install_raqm_cmake.sh`` to install libraqm.
|
||||
|
||||
|
||||
Building on Linux
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you didn't build Python from source, make sure you have Python's
|
||||
development libraries installed.
|
||||
|
||||
In Debian or Ubuntu::
|
||||
|
||||
sudo apt-get install python3-dev python3-setuptools
|
||||
|
||||
In Fedora, the command is::
|
||||
|
||||
sudo dnf install python3-devel redhat-rpm-config
|
||||
|
||||
In Alpine, the command is::
|
||||
|
||||
sudo apk add python3-dev py3-setuptools
|
||||
|
||||
.. Note:: ``redhat-rpm-config`` is required on Fedora 23, but not earlier versions.
|
||||
|
||||
Prerequisites for **Ubuntu 16.04 LTS - 22.04 LTS** are installed with::
|
||||
|
||||
sudo apt-get install libtiff5-dev libjpeg8-dev libopenjp2-7-dev zlib1g-dev \
|
||||
libfreetype6-dev liblcms2-dev libwebp-dev tcl8.6-dev tk8.6-dev python3-tk \
|
||||
libharfbuzz-dev libfribidi-dev libxcb1-dev
|
||||
|
||||
To install libraqm, ``sudo apt-get install meson`` and then see
|
||||
``depends/install_raqm.sh``.
|
||||
|
||||
Prerequisites are installed on recent **Red Hat**, **CentOS** or **Fedora** with::
|
||||
|
||||
sudo dnf install libtiff-devel libjpeg-devel openjpeg2-devel zlib-devel \
|
||||
freetype-devel lcms2-devel libwebp-devel tcl-devel tk-devel \
|
||||
harfbuzz-devel fribidi-devel libraqm-devel libimagequant-devel libxcb-devel
|
||||
|
||||
Note that the package manager may be yum or DNF, depending on the
|
||||
exact distribution.
|
||||
|
||||
Prerequisites are installed for **Alpine** with::
|
||||
|
||||
sudo apk add tiff-dev jpeg-dev openjpeg-dev zlib-dev freetype-dev lcms2-dev \
|
||||
libwebp-dev tcl-dev tk-dev harfbuzz-dev fribidi-dev libimagequant-dev \
|
||||
libxcb-dev libpng-dev
|
||||
|
||||
See also the ``Dockerfile``\s in the Test Infrastructure repo
|
||||
(https://github.com/python-pillow/docker-images) for a known working
|
||||
install process for other tested distros.
|
||||
|
||||
Building on Android
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Basic Android support has been added for compilation within the Termux
|
||||
environment. The dependencies can be installed by::
|
||||
|
||||
pkg install -y python ndk-sysroot clang make \
|
||||
libjpeg-turbo
|
||||
|
||||
This has been tested within the Termux app on ChromeOS, on x86.
|
||||
|
||||
|
||||
Platform Support
|
||||
----------------
|
||||
|
||||
|
@ -440,10 +436,10 @@ These platforms are built and tested for every change.
|
|||
+----------------------------------+----------------------------+---------------------+
|
||||
| Debian 11 Bullseye | 3.9 | x86 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
| Fedora 35 | 3.10 | x86-64 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
| Fedora 36 | 3.10 | x86-64 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
| Fedora 37 | 3.11 | x86-64 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
| Gentoo | 3.9 | x86-64 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
| macOS 11 Big Sur | 3.7, 3.8, 3.9, 3.10, 3.11, | x86-64 |
|
||||
|
@ -464,7 +460,7 @@ These platforms are built and tested for every change.
|
|||
| +----------------------------+---------------------+
|
||||
| | 3.9 (MinGW) | x86, x86-64 |
|
||||
| +----------------------------+---------------------+
|
||||
| | 3.7, 3.8, 3.9 (Cygwin) | x86-64 |
|
||||
| | 3.8, 3.9 (Cygwin) | x86-64 |
|
||||
+----------------------------------+----------------------------+---------------------+
|
||||
|
||||
|
||||
|
@ -482,11 +478,13 @@ These platforms have been reported to work at the versions mentioned.
|
|||
| Operating system | | Tested Python | | Latest tested | | Tested |
|
||||
| | | versions | | Pillow version | | processors |
|
||||
+==================================+===========================+==================+==============+
|
||||
| macOS 12 Big Sur | 3.7, 3.8, 3.9, 3.10 | 9.2.0 |arm |
|
||||
| macOS 13 Ventura | 3.7, 3.8, 3.9, 3.10, 3.11 | 9.3.0 |arm |
|
||||
+----------------------------------+---------------------------+------------------+--------------+
|
||||
| macOS 12 Big Sur | 3.7, 3.8, 3.9, 3.10, 3.11 | 9.3.0 |arm |
|
||||
+----------------------------------+---------------------------+------------------+--------------+
|
||||
| macOS 11 Big Sur | 3.7, 3.8, 3.9, 3.10 | 8.4.0 |arm |
|
||||
| +---------------------------+------------------+--------------+
|
||||
| | 3.7, 3.8, 3.9, 3.10 | 9.2.0 |x86-64 |
|
||||
| | 3.7, 3.8, 3.9, 3.10, 3.11 | 9.3.0 |x86-64 |
|
||||
| +---------------------------+------------------+ |
|
||||
| | 3.6 | 8.4.0 | |
|
||||
+----------------------------------+---------------------------+------------------+--------------+
|
||||
|
|
|
@ -4,8 +4,50 @@
|
|||
:py:mod:`~PIL.ExifTags` Module
|
||||
==============================
|
||||
|
||||
The :py:mod:`~PIL.ExifTags` module exposes two dictionaries which
|
||||
provide constants and clear-text names for various well-known EXIF tags.
|
||||
The :py:mod:`~PIL.ExifTags` module exposes several ``enum.IntEnum`` classes
|
||||
which provide constants and clear-text names for various well-known EXIF tags.
|
||||
|
||||
.. py:data:: Base
|
||||
|
||||
>>> from PIL.ExifTags import Base
|
||||
>>> Base.ImageDescription.value
|
||||
270
|
||||
>>> Base(270).name
|
||||
'ImageDescription'
|
||||
|
||||
.. py:data:: GPS
|
||||
|
||||
>>> from PIL.ExifTags import GPS
|
||||
>>> GPS.GPSDestLatitude.value
|
||||
20
|
||||
>>> GPS(20).name
|
||||
'GPSDestLatitude'
|
||||
|
||||
.. py:data:: Interop
|
||||
|
||||
>>> from PIL.ExifTags import Interop
|
||||
>>> Interop.RelatedImageFileFormat.value
|
||||
4096
|
||||
>>> Interop(4096).name
|
||||
'RelatedImageFileFormat'
|
||||
|
||||
.. py:data:: IFD
|
||||
|
||||
>>> from PIL.ExifTags import IFD
|
||||
>>> IFD.Exif.value
|
||||
34665
|
||||
>>> IFD(34665).name
|
||||
'Exif
|
||||
|
||||
.. py:data:: LightSource
|
||||
|
||||
>>> from PIL.ExifTags import LightSource
|
||||
>>> LightSource.Unknown.value
|
||||
0
|
||||
>>> LightSource(0).name
|
||||
'Unknown'
|
||||
|
||||
Two of these values are also exposed as dictionaries.
|
||||
|
||||
.. py:data:: TAGS
|
||||
:type: dict
|
||||
|
@ -26,22 +68,3 @@ provide constants and clear-text names for various well-known EXIF tags.
|
|||
>>> from PIL.ExifTags import GPSTAGS
|
||||
>>> GPSTAGS[20]
|
||||
'GPSDestLatitude'
|
||||
|
||||
|
||||
These values are also exposed as ``enum.IntEnum`` classes.
|
||||
|
||||
.. py:data:: Base
|
||||
|
||||
>>> from PIL.ExifTags import Base
|
||||
>>> Base.ImageDescription.value
|
||||
270
|
||||
>>> Base(270).name
|
||||
'ImageDescription'
|
||||
|
||||
.. py:data:: GPS
|
||||
|
||||
>>> from PIL.ExifTags import GPS
|
||||
>>> GPS.GPSDestLatitude.value
|
||||
20
|
||||
>>> GPS(20).name
|
||||
'GPSDestLatitude'
|
||||
|
|
105
docs/releasenotes/9.4.0.rst
Normal file
|
@ -0,0 +1,105 @@
|
|||
9.4.0
|
||||
-----
|
||||
|
||||
Backwards Incompatible Changes
|
||||
==============================
|
||||
|
||||
TODO
|
||||
^^^^
|
||||
|
||||
TODO
|
||||
|
||||
Deprecations
|
||||
============
|
||||
|
||||
TODO
|
||||
^^^^
|
||||
|
||||
TODO
|
||||
|
||||
API Changes
|
||||
===========
|
||||
|
||||
TODO
|
||||
^^^^
|
||||
|
||||
TODO
|
||||
|
||||
API Additions
|
||||
=============
|
||||
|
||||
Added start position for getmask and getmask2
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Text may render differently when starting at fractional coordinates, so
|
||||
:py:meth:`.FreeTypeFont.getmask` and :py:meth:`.FreeTypeFont.getmask2` now
|
||||
support a ``start`` argument. This tuple of horizontal and vertical offset
|
||||
will be used internally by :py:meth:`.ImageDraw.text` to more accurately place
|
||||
text at the ``xy`` coordinates.
|
||||
|
||||
Added the ``exact`` encoding option for WebP
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``exact`` encoding option for WebP is now supported. The WebP encoder
|
||||
removes the hidden RGB values for better compression by default in libwebp 0.5
|
||||
or later. By setting this option to ``True``, the encoder will keep the hidden
|
||||
RGB values.
|
||||
|
||||
Added IFD, Interop and LightSource ExifTags enums
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:py:data:`~PIL.ExifTags.IFD` has been added, allowing enums to be used with
|
||||
:py:meth:`~PIL.Image.Exif.get_ifd`::
|
||||
|
||||
from PIL import Image, ExifTags
|
||||
im = Image.open("Tests/images/flower.jpg")
|
||||
print(im.getexif().get_ifd(ExifTags.IFD.Exif))
|
||||
|
||||
``IFD1`` can also be used with :py:meth:`~PIL.Image.Exif.get_ifd`, but it should
|
||||
not be used in other contexts, as the enum value is only internally meaningful.
|
||||
|
||||
:py:data:`~PIL.ExifTags.Interop` has been added for tags within the Interop IFD::
|
||||
|
||||
from PIL import Image, ExifTags
|
||||
im = Image.open("Tests/images/flower.jpg")
|
||||
interop_ifd = im.getexif().get_ifd(ExifTags.IFD.Interop)
|
||||
print(interop_ifd.get(ExifTags.Interop.InteropIndex)) # R98
|
||||
|
||||
:py:data:`~PIL.ExifTags.LightSource` has been added for values within the LightSource
|
||||
tag::
|
||||
|
||||
from PIL import Image, ExifTags
|
||||
im = Image.open("Tests/images/iptc.jpg")
|
||||
exif_ifd = im.getexif().get_ifd(ExifTags.IFD.Exif)
|
||||
print(ExifTags.LightSource(exif_ifd[0x9208])) # LightSource.Unknown
|
||||
|
||||
getxmp()
|
||||
^^^^^^^^
|
||||
|
||||
`XMP data <https://en.wikipedia.org/wiki/Extensible_Metadata_Platform>`_ can now be
|
||||
decoded for WEBP images through ``getxmp()``.
|
||||
|
||||
Writing JPEG comments
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When saving a JPEG image, a comment can now be written from
|
||||
:py:attr:`~PIL.Image.Image.info`, or by using an argument when saving::
|
||||
|
||||
im.save(out, comment="Test comment")
|
||||
|
||||
Security
|
||||
========
|
||||
|
||||
TODO
|
||||
^^^^
|
||||
|
||||
TODO
|
||||
|
||||
Other Changes
|
||||
=============
|
||||
|
||||
Added support for DDS L and LA images
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Support has been added to read and write L and LA DDS images in the uncompressed
|
||||
format, known as "luminance" textures.
|
|
@ -14,6 +14,7 @@ expected to be backported to earlier versions.
|
|||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
9.4.0
|
||||
9.3.0
|
||||
9.2.0
|
||||
9.1.1
|
||||
|
|
|
@ -46,6 +46,7 @@ docs =
|
|||
olefile
|
||||
sphinx>=2.4
|
||||
sphinx-copybutton
|
||||
sphinx-inline-tabs
|
||||
sphinx-issues>=3.0.1
|
||||
sphinx-removed-in
|
||||
sphinxext-opengraph
|
||||
|
|
|
@ -373,6 +373,9 @@ class BLP1Decoder(_BLPBaseDecoder):
|
|||
data = BytesIO(data)
|
||||
image = JpegImageFile(data)
|
||||
Image._decompression_bomb_check(image.size)
|
||||
if image.mode == "CMYK":
|
||||
decoder_name, extents, offset, args = image.tile[0]
|
||||
image.tile = [(decoder_name, extents, offset, (args[0], "CMYK"))]
|
||||
r, g, b = image.convert("RGB").split()
|
||||
image = Image.merge("RGB", (b, g, r))
|
||||
self.set_as_raw(image.tobytes())
|
||||
|
|
|
@ -135,11 +135,19 @@ class DdsImageFile(ImageFile.ImageFile):
|
|||
fourcc = header.read(4)
|
||||
(bitcount,) = struct.unpack("<I", header.read(4))
|
||||
masks = struct.unpack("<4I", header.read(16))
|
||||
if pfflags & DDPF_RGB:
|
||||
if pfflags & DDPF_LUMINANCE:
|
||||
# Texture contains uncompressed L or LA data
|
||||
if pfflags & DDPF_ALPHAPIXELS:
|
||||
self.mode = "LA"
|
||||
else:
|
||||
self.mode = "L"
|
||||
|
||||
self.tile = [("raw", (0, 0) + self.size, 0, (self.mode, 0, 1))]
|
||||
elif pfflags & DDPF_RGB:
|
||||
# Texture contains uncompressed RGB data
|
||||
masks = {mask: ["R", "G", "B", "A"][i] for i, mask in enumerate(masks)}
|
||||
rawmode = ""
|
||||
if bitcount == 32:
|
||||
if pfflags & DDPF_ALPHAPIXELS:
|
||||
rawmode += masks[0xFF000000]
|
||||
else:
|
||||
self.mode = "RGB"
|
||||
|
@ -223,9 +231,24 @@ class DdsImageFile(ImageFile.ImageFile):
|
|||
|
||||
|
||||
def _save(im, fp, filename):
|
||||
if im.mode not in ("RGB", "RGBA"):
|
||||
if im.mode not in ("RGB", "RGBA", "L", "LA"):
|
||||
raise OSError(f"cannot write mode {im.mode} as DDS")
|
||||
|
||||
rawmode = im.mode
|
||||
masks = [0xFF0000, 0xFF00, 0xFF]
|
||||
if im.mode in ("L", "LA"):
|
||||
pixel_flags = DDPF_LUMINANCE
|
||||
else:
|
||||
pixel_flags = DDPF_RGB
|
||||
rawmode = rawmode[::-1]
|
||||
if im.mode in ("LA", "RGBA"):
|
||||
pixel_flags |= DDPF_ALPHAPIXELS
|
||||
masks.append(0xFF000000)
|
||||
|
||||
bitcount = len(masks) * 8
|
||||
while len(masks) < 4:
|
||||
masks.append(0)
|
||||
|
||||
fp.write(
|
||||
o32(DDS_MAGIC)
|
||||
+ o32(124) # header size
|
||||
|
@ -234,18 +257,15 @@ def _save(im, fp, filename):
|
|||
) # flags
|
||||
+ o32(im.height)
|
||||
+ o32(im.width)
|
||||
+ o32((im.width * (32 if im.mode == "RGBA" else 24) + 7) // 8) # pitch
|
||||
+ o32((im.width * bitcount + 7) // 8) # pitch
|
||||
+ o32(0) # depth
|
||||
+ o32(0) # mipmaps
|
||||
+ o32(0) * 11 # reserved
|
||||
+ o32(32) # pfsize
|
||||
+ o32(DDS_RGBA if im.mode == "RGBA" else DDPF_RGB) # pfflags
|
||||
+ o32(pixel_flags) # pfflags
|
||||
+ o32(0) # fourcc
|
||||
+ o32(32 if im.mode == "RGBA" else 24) # bitcount
|
||||
+ o32(0xFF0000) # rbitmask
|
||||
+ o32(0xFF00) # gbitmask
|
||||
+ o32(0xFF) # bbitmask
|
||||
+ o32(0xFF000000 if im.mode == "RGBA" else 0) # abitmask
|
||||
+ o32(bitcount) # bitcount
|
||||
+ b"".join(o32(mask) for mask in masks) # rgbabitmask
|
||||
+ o32(DDSCAPS_TEXTURE) # dwCaps
|
||||
+ o32(0) # dwCaps2
|
||||
+ o32(0) # dwCaps3
|
||||
|
@ -255,7 +275,7 @@ def _save(im, fp, filename):
|
|||
if im.mode == "RGBA":
|
||||
r, g, b, a = im.split()
|
||||
im = Image.merge("RGBA", (a, r, g, b))
|
||||
ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (im.mode[::-1], 0, 1))])
|
||||
ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))])
|
||||
|
||||
|
||||
def _accept(prefix):
|
||||
|
|
|
@ -338,3 +338,43 @@ class GPS(IntEnum):
|
|||
|
||||
"""Maps EXIF GPS tags to tag names."""
|
||||
GPSTAGS = {i.value: i.name for i in GPS}
|
||||
|
||||
|
||||
class Interop(IntEnum):
|
||||
InteropIndex = 1
|
||||
InteropVersion = 2
|
||||
RelatedImageFileFormat = 4096
|
||||
RelatedImageWidth = 4097
|
||||
RleatedImageHeight = 4098
|
||||
|
||||
|
||||
class IFD(IntEnum):
|
||||
Exif = 34665
|
||||
GPSInfo = 34853
|
||||
Makernote = 37500
|
||||
Interop = 40965
|
||||
IFD1 = -1
|
||||
|
||||
|
||||
class LightSource(IntEnum):
|
||||
Unknown = 0
|
||||
Daylight = 1
|
||||
Fluorescent = 2
|
||||
Tungsten = 3
|
||||
Flash = 4
|
||||
Fine = 9
|
||||
Cloudy = 10
|
||||
Shade = 11
|
||||
DaylightFluorescent = 12
|
||||
DayWhiteFluorescent = 13
|
||||
CoolWhiteFluorescent = 14
|
||||
WhiteFluorescent = 15
|
||||
StandardLightA = 17
|
||||
StandardLightB = 18
|
||||
StandardLightC = 19
|
||||
D55 = 20
|
||||
D65 = 21
|
||||
D75 = 22
|
||||
D50 = 23
|
||||
ISO = 24
|
||||
Other = 255
|
||||
|
|
|
@ -565,6 +565,16 @@ def _write_single_frame(im, fp, palette):
|
|||
fp.write(b"\0") # end of image data
|
||||
|
||||
|
||||
def _getbbox(base_im, im_frame):
|
||||
if _get_palette_bytes(im_frame) == _get_palette_bytes(base_im):
|
||||
delta = ImageChops.subtract_modulo(im_frame, base_im)
|
||||
else:
|
||||
delta = ImageChops.subtract_modulo(
|
||||
im_frame.convert("RGB"), base_im.convert("RGB")
|
||||
)
|
||||
return delta.getbbox()
|
||||
|
||||
|
||||
def _write_multiple_frames(im, fp, palette):
|
||||
|
||||
duration = im.encoderinfo.get("duration")
|
||||
|
@ -598,6 +608,12 @@ def _write_multiple_frames(im, fp, palette):
|
|||
if im_frames:
|
||||
# delta frame
|
||||
previous = im_frames[-1]
|
||||
bbox = _getbbox(previous["im"], im_frame)
|
||||
if not bbox:
|
||||
# This frame is identical to the previous frame
|
||||
if encoderinfo.get("duration"):
|
||||
previous["encoderinfo"]["duration"] += encoderinfo["duration"]
|
||||
continue
|
||||
if encoderinfo.get("disposal") == 2:
|
||||
if background_im is None:
|
||||
color = im.encoderinfo.get(
|
||||
|
@ -606,21 +622,7 @@ def _write_multiple_frames(im, fp, palette):
|
|||
background = _get_background(im_frame, color)
|
||||
background_im = Image.new("P", im_frame.size, background)
|
||||
background_im.putpalette(im_frames[0]["im"].palette)
|
||||
base_im = background_im
|
||||
else:
|
||||
base_im = previous["im"]
|
||||
if _get_palette_bytes(im_frame) == _get_palette_bytes(base_im):
|
||||
delta = ImageChops.subtract_modulo(im_frame, base_im)
|
||||
else:
|
||||
delta = ImageChops.subtract_modulo(
|
||||
im_frame.convert("RGB"), base_im.convert("RGB")
|
||||
)
|
||||
bbox = delta.getbbox()
|
||||
if not bbox:
|
||||
# This frame is identical to the previous frame
|
||||
if duration:
|
||||
previous["encoderinfo"]["duration"] += encoderinfo["duration"]
|
||||
continue
|
||||
bbox = _getbbox(background_im, im_frame)
|
||||
else:
|
||||
bbox = None
|
||||
im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo})
|
||||
|
@ -886,20 +888,23 @@ def _get_palette_bytes(im):
|
|||
def _get_background(im, info_background):
|
||||
background = 0
|
||||
if info_background:
|
||||
background = info_background
|
||||
if isinstance(background, tuple):
|
||||
if isinstance(info_background, tuple):
|
||||
# WebPImagePlugin stores an RGBA value in info["background"]
|
||||
# So it must be converted to the same format as GifImagePlugin's
|
||||
# info["background"] - a global color table index
|
||||
try:
|
||||
background = im.palette.getcolor(background, im)
|
||||
background = im.palette.getcolor(info_background, im)
|
||||
except ValueError as e:
|
||||
if str(e) == "cannot allocate more than 256 colors":
|
||||
if str(e) not in (
|
||||
# If all 256 colors are in use,
|
||||
# then there is no need for the background color
|
||||
return 0
|
||||
else:
|
||||
"cannot allocate more than 256 colors",
|
||||
# Ignore non-opaque WebP background
|
||||
"cannot add non-opaque RGBA color to RGB palette",
|
||||
):
|
||||
raise
|
||||
else:
|
||||
background = info_background
|
||||
return background
|
||||
|
||||
|
||||
|
|
|
@ -327,6 +327,7 @@ class IcoImageFile(ImageFile.ImageFile):
|
|||
# if tile is PNG, it won't really be loaded yet
|
||||
im.load()
|
||||
self.im = im.im
|
||||
self.pyaccess = None
|
||||
self.mode = im.mode
|
||||
if im.size != self.size:
|
||||
warnings.warn("Image was not the expected size")
|
||||
|
|
125
src/PIL/Image.py
|
@ -47,7 +47,14 @@ except ImportError:
|
|||
# VERSION was removed in Pillow 6.0.0.
|
||||
# PILLOW_VERSION was removed in Pillow 9.0.0.
|
||||
# Use __version__ instead.
|
||||
from . import ImageMode, TiffTags, UnidentifiedImageError, __version__, _plugins
|
||||
from . import (
|
||||
ExifTags,
|
||||
ImageMode,
|
||||
TiffTags,
|
||||
UnidentifiedImageError,
|
||||
__version__,
|
||||
_plugins,
|
||||
)
|
||||
from ._binary import i32le, o32be, o32le
|
||||
from ._deprecate import deprecate
|
||||
from ._util import DeferredError, is_path
|
||||
|
@ -704,7 +711,6 @@ class Image:
|
|||
|
||||
def __setstate__(self, state):
|
||||
Image.__init__(self)
|
||||
self.tile = []
|
||||
info, mode, size, palette, data = state
|
||||
self.info = info
|
||||
self.mode = mode
|
||||
|
@ -1447,6 +1453,49 @@ class Image:
|
|||
self._exif._loaded = False
|
||||
self.getexif()
|
||||
|
||||
def get_child_images(self):
|
||||
child_images = []
|
||||
exif = self.getexif()
|
||||
ifds = []
|
||||
if ExifTags.Base.SubIFDs in exif:
|
||||
subifd_offsets = exif[ExifTags.Base.SubIFDs]
|
||||
if subifd_offsets:
|
||||
if not isinstance(subifd_offsets, tuple):
|
||||
subifd_offsets = (subifd_offsets,)
|
||||
for subifd_offset in subifd_offsets:
|
||||
ifds.append((exif._get_ifd_dict(subifd_offset), subifd_offset))
|
||||
ifd1 = exif.get_ifd(ExifTags.IFD.IFD1)
|
||||
if ifd1 and ifd1.get(513):
|
||||
ifds.append((ifd1, exif._info.next))
|
||||
|
||||
offset = None
|
||||
for ifd, ifd_offset in ifds:
|
||||
current_offset = self.fp.tell()
|
||||
if offset is None:
|
||||
offset = current_offset
|
||||
|
||||
fp = self.fp
|
||||
thumbnail_offset = ifd.get(513)
|
||||
if thumbnail_offset is not None:
|
||||
try:
|
||||
thumbnail_offset += self._exif_offset
|
||||
except AttributeError:
|
||||
pass
|
||||
self.fp.seek(thumbnail_offset)
|
||||
data = self.fp.read(ifd.get(514))
|
||||
fp = io.BytesIO(data)
|
||||
|
||||
with open(fp) as im:
|
||||
if thumbnail_offset is None:
|
||||
im._frame_pos = [ifd_offset]
|
||||
im._seek(0)
|
||||
im.load()
|
||||
child_images.append(im)
|
||||
|
||||
if offset is not None:
|
||||
self.fp.seek(offset)
|
||||
return child_images
|
||||
|
||||
def getim(self):
|
||||
"""
|
||||
Returns a capsule that points to the internal image memory.
|
||||
|
@ -1482,7 +1531,8 @@ class Image:
|
|||
def apply_transparency(self):
|
||||
"""
|
||||
If a P mode image has a "transparency" key in the info dictionary,
|
||||
remove the key and apply the transparency to the palette instead.
|
||||
remove the key and instead apply the transparency to the palette.
|
||||
Otherwise, the image is unchanged.
|
||||
"""
|
||||
if self.mode != "P" or "transparency" not in self.info:
|
||||
return
|
||||
|
@ -3368,8 +3418,7 @@ def registered_extensions():
|
|||
Returns a dictionary containing all file extensions belonging
|
||||
to registered plugins
|
||||
"""
|
||||
if not EXTENSION:
|
||||
init()
|
||||
init()
|
||||
return EXTENSION
|
||||
|
||||
|
||||
|
@ -3503,6 +3552,7 @@ class Exif(MutableMapping):
|
|||
|
||||
def __init__(self):
|
||||
self._data = {}
|
||||
self._hidden_data = {}
|
||||
self._ifds = {}
|
||||
self._info = None
|
||||
self._loaded_exif = None
|
||||
|
@ -3556,6 +3606,7 @@ class Exif(MutableMapping):
|
|||
return
|
||||
self._loaded_exif = data
|
||||
self._data.clear()
|
||||
self._hidden_data.clear()
|
||||
self._ifds.clear()
|
||||
if data and data.startswith(b"Exif\x00\x00"):
|
||||
data = data[6:]
|
||||
|
@ -3576,6 +3627,7 @@ class Exif(MutableMapping):
|
|||
def load_from_fp(self, fp, offset=None):
|
||||
self._loaded_exif = None
|
||||
self._data.clear()
|
||||
self._hidden_data.clear()
|
||||
self._ifds.clear()
|
||||
|
||||
# process dictionary
|
||||
|
@ -3598,14 +3650,16 @@ class Exif(MutableMapping):
|
|||
merged_dict = dict(self)
|
||||
|
||||
# get EXIF extension
|
||||
if 0x8769 in self:
|
||||
ifd = self._get_ifd_dict(self[0x8769])
|
||||
if ExifTags.IFD.Exif in self:
|
||||
ifd = self._get_ifd_dict(self[ExifTags.IFD.Exif])
|
||||
if ifd:
|
||||
merged_dict.update(ifd)
|
||||
|
||||
# GPS
|
||||
if 0x8825 in self:
|
||||
merged_dict[0x8825] = self._get_ifd_dict(self[0x8825])
|
||||
if ExifTags.IFD.GPSInfo in self:
|
||||
merged_dict[ExifTags.IFD.GPSInfo] = self._get_ifd_dict(
|
||||
self[ExifTags.IFD.GPSInfo]
|
||||
)
|
||||
|
||||
return merged_dict
|
||||
|
||||
|
@ -3615,31 +3669,35 @@ class Exif(MutableMapping):
|
|||
head = self._get_head()
|
||||
ifd = TiffImagePlugin.ImageFileDirectory_v2(ifh=head)
|
||||
for tag, value in self.items():
|
||||
if tag in [0x8769, 0x8225, 0x8825] and not isinstance(value, dict):
|
||||
if tag in [
|
||||
ExifTags.IFD.Exif,
|
||||
ExifTags.IFD.GPSInfo,
|
||||
] and not isinstance(value, dict):
|
||||
value = self.get_ifd(tag)
|
||||
if (
|
||||
tag == 0x8769
|
||||
and 0xA005 in value
|
||||
and not isinstance(value[0xA005], dict)
|
||||
tag == ExifTags.IFD.Exif
|
||||
and ExifTags.IFD.Interop in value
|
||||
and not isinstance(value[ExifTags.IFD.Interop], dict)
|
||||
):
|
||||
value = value.copy()
|
||||
value[0xA005] = self.get_ifd(0xA005)
|
||||
value[ExifTags.IFD.Interop] = self.get_ifd(ExifTags.IFD.Interop)
|
||||
ifd[tag] = value
|
||||
return b"Exif\x00\x00" + head + ifd.tobytes(offset)
|
||||
|
||||
def get_ifd(self, tag):
|
||||
if tag not in self._ifds:
|
||||
if tag in [0x8769, 0x8825]:
|
||||
# exif, gpsinfo
|
||||
if tag in self:
|
||||
self._ifds[tag] = self._get_ifd_dict(self[tag])
|
||||
elif tag in [0xA005, 0x927C]:
|
||||
# interop, makernote
|
||||
if 0x8769 not in self._ifds:
|
||||
self.get_ifd(0x8769)
|
||||
tag_data = self._ifds[0x8769][tag]
|
||||
if tag == 0x927C:
|
||||
# makernote
|
||||
if tag == ExifTags.IFD.IFD1:
|
||||
if self._info is not None:
|
||||
self._ifds[tag] = self._get_ifd_dict(self._info.next)
|
||||
elif tag in [ExifTags.IFD.Exif, ExifTags.IFD.GPSInfo]:
|
||||
offset = self._hidden_data.get(tag, self.get(tag))
|
||||
if offset is not None:
|
||||
self._ifds[tag] = self._get_ifd_dict(offset)
|
||||
elif tag in [ExifTags.IFD.Interop, ExifTags.IFD.Makernote]:
|
||||
if ExifTags.IFD.Exif not in self._ifds:
|
||||
self.get_ifd(ExifTags.IFD.Exif)
|
||||
tag_data = self._ifds[ExifTags.IFD.Exif][tag]
|
||||
if tag == ExifTags.IFD.Makernote:
|
||||
from .TiffImagePlugin import ImageFileDirectory_v2
|
||||
|
||||
if tag_data[:8] == b"FUJIFILM":
|
||||
|
@ -3715,9 +3773,22 @@ class Exif(MutableMapping):
|
|||
makernote = {0x1101: dict(self._fixup_dict(camerainfo))}
|
||||
self._ifds[tag] = makernote
|
||||
else:
|
||||
# interop
|
||||
# Interop
|
||||
self._ifds[tag] = self._get_ifd_dict(tag_data)
|
||||
return self._ifds.get(tag, {})
|
||||
ifd = self._ifds.get(tag, {})
|
||||
if tag == ExifTags.IFD.Exif and self._hidden_data:
|
||||
ifd = {
|
||||
k: v
|
||||
for (k, v) in ifd.items()
|
||||
if k not in (ExifTags.IFD.Interop, ExifTags.IFD.Makernote)
|
||||
}
|
||||
return ifd
|
||||
|
||||
def hide_offsets(self):
|
||||
for tag in (ExifTags.IFD.Exif, ExifTags.IFD.GPSInfo):
|
||||
if tag in self:
|
||||
self._hidden_data[tag] = self[tag]
|
||||
del self[tag]
|
||||
|
||||
def __str__(self):
|
||||
if self._info is not None:
|
||||
|
|
|
@ -452,7 +452,11 @@ class ImageDraw:
|
|||
mode = self.fontmode
|
||||
if stroke_width == 0 and embedded_color:
|
||||
mode = "RGBA"
|
||||
coord = xy
|
||||
coord = []
|
||||
start = []
|
||||
for i in range(2):
|
||||
coord.append(int(xy[i]))
|
||||
start.append(math.modf(xy[i])[0])
|
||||
try:
|
||||
mask, offset = font.getmask2(
|
||||
text,
|
||||
|
@ -463,6 +467,7 @@ class ImageDraw:
|
|||
stroke_width=stroke_width,
|
||||
anchor=anchor,
|
||||
ink=ink,
|
||||
start=start,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
@ -478,6 +483,7 @@ class ImageDraw:
|
|||
stroke_width,
|
||||
anchor,
|
||||
ink,
|
||||
start=start,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
@ -490,7 +496,7 @@ class ImageDraw:
|
|||
# extract mask and set text alpha
|
||||
color, mask = mask, mask.getband(3)
|
||||
color.fillband(3, (ink >> 24) & 0xFF)
|
||||
x, y = (int(c) for c in coord)
|
||||
x, y = coord
|
||||
self.im.paste(color, (x, y, x + mask.size[0], y + mask.size[1]), mask)
|
||||
else:
|
||||
self.draw.draw_bitmap(coord, mask, ink)
|
||||
|
|
|
@ -137,6 +137,10 @@ class ImageFile(Image.Image):
|
|||
if self.format is not None:
|
||||
return Image.MIME.get(self.format.upper())
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.tile = []
|
||||
super().__setstate__(state)
|
||||
|
||||
def verify(self):
|
||||
"""Check file integrity"""
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
#
|
||||
|
||||
import base64
|
||||
import math
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
@ -588,6 +589,7 @@ class FreeTypeFont:
|
|||
stroke_width=0,
|
||||
anchor=None,
|
||||
ink=0,
|
||||
start=None,
|
||||
):
|
||||
"""
|
||||
Create a bitmap for the text.
|
||||
|
@ -647,6 +649,11 @@ class FreeTypeFont:
|
|||
|
||||
.. versionadded:: 8.0.0
|
||||
|
||||
:param start: Tuple of horizontal and vertical offset, as text may render
|
||||
differently when starting at fractional coordinates.
|
||||
|
||||
.. versionadded:: 9.4.0
|
||||
|
||||
:return: An internal PIL storage memory instance as defined by the
|
||||
:py:mod:`PIL.Image.core` interface module.
|
||||
"""
|
||||
|
@ -659,6 +666,7 @@ class FreeTypeFont:
|
|||
stroke_width=stroke_width,
|
||||
anchor=anchor,
|
||||
ink=ink,
|
||||
start=start,
|
||||
)[0]
|
||||
|
||||
def getmask2(
|
||||
|
@ -672,6 +680,7 @@ class FreeTypeFont:
|
|||
stroke_width=0,
|
||||
anchor=None,
|
||||
ink=0,
|
||||
start=None,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
|
@ -739,6 +748,11 @@ class FreeTypeFont:
|
|||
|
||||
.. versionadded:: 8.0.0
|
||||
|
||||
:param start: Tuple of horizontal and vertical offset, as text may render
|
||||
differently when starting at fractional coordinates.
|
||||
|
||||
.. versionadded:: 9.4.0
|
||||
|
||||
:return: A tuple of an internal PIL storage memory instance as defined by the
|
||||
:py:mod:`PIL.Image.core` interface module, and the text offset, the
|
||||
gap between the starting coordinate and the first marking
|
||||
|
@ -750,12 +764,23 @@ class FreeTypeFont:
|
|||
size, offset = self.font.getsize(
|
||||
text, mode, direction, features, language, anchor
|
||||
)
|
||||
size = size[0] + stroke_width * 2, size[1] + stroke_width * 2
|
||||
if start is None:
|
||||
start = (0, 0)
|
||||
size = tuple(math.ceil(size[i] + stroke_width * 2 + start[i]) for i in range(2))
|
||||
offset = offset[0] - stroke_width, offset[1] - stroke_width
|
||||
Image._decompression_bomb_check(size)
|
||||
im = fill("RGBA" if mode == "RGBA" else "L", size, 0)
|
||||
self.font.render(
|
||||
text, im.id, mode, direction, features, language, stroke_width, ink
|
||||
text,
|
||||
im.id,
|
||||
mode,
|
||||
direction,
|
||||
features,
|
||||
language,
|
||||
stroke_width,
|
||||
ink,
|
||||
start[0],
|
||||
start[1],
|
||||
)
|
||||
return im, offset
|
||||
|
||||
|
|
|
@ -125,7 +125,7 @@ class Viewer:
|
|||
path = options.pop("file")
|
||||
else:
|
||||
raise TypeError("Missing required argument: 'path'")
|
||||
os.system(self.get_command(path, **options))
|
||||
os.system(self.get_command(path, **options)) # nosec
|
||||
return 1
|
||||
|
||||
|
||||
|
|
|
@ -45,6 +45,7 @@ from . import Image, ImageFile, TiffImagePlugin
|
|||
from ._binary import i16be as i16
|
||||
from ._binary import i32be as i32
|
||||
from ._binary import o8
|
||||
from ._binary import o16be as o16
|
||||
from ._deprecate import deprecate
|
||||
from .JpegPresets import presets
|
||||
|
||||
|
@ -89,6 +90,7 @@ def APP(self, marker):
|
|||
if "exif" not in self.info:
|
||||
# extract EXIF information (incomplete)
|
||||
self.info["exif"] = s # FIXME: value will change
|
||||
self._exif_offset = self.fp.tell() - n + 6
|
||||
elif marker == 0xFFE2 and s[:5] == b"FPXR\0":
|
||||
# extract FlashPix information (incomplete)
|
||||
self.info["flashpix"] = s # FIXME: value will change
|
||||
|
@ -724,7 +726,7 @@ def _save(im, fp, filename):
|
|||
icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:]
|
||||
i = 1
|
||||
for marker in markers:
|
||||
size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker))
|
||||
size = o16(2 + ICC_OVERHEAD_LEN + len(marker))
|
||||
extra += (
|
||||
b"\xFF\xE2"
|
||||
+ size
|
||||
|
@ -735,6 +737,8 @@ def _save(im, fp, filename):
|
|||
)
|
||||
i += 1
|
||||
|
||||
comment = info.get("comment", im.info.get("comment"))
|
||||
|
||||
# "progressive" is the official name, but older documentation
|
||||
# says "progression"
|
||||
# FIXME: issue a warning if the wrong form is used (post-1.1.7)
|
||||
|
@ -757,6 +761,7 @@ def _save(im, fp, filename):
|
|||
dpi[1],
|
||||
subsampling,
|
||||
qtables,
|
||||
comment,
|
||||
extra,
|
||||
exif,
|
||||
)
|
||||
|
|
|
@ -22,7 +22,14 @@ import itertools
|
|||
import os
|
||||
import struct
|
||||
|
||||
from . import Image, ImageFile, ImageSequence, JpegImagePlugin, TiffImagePlugin
|
||||
from . import (
|
||||
ExifTags,
|
||||
Image,
|
||||
ImageFile,
|
||||
ImageSequence,
|
||||
JpegImagePlugin,
|
||||
TiffImagePlugin,
|
||||
)
|
||||
from ._binary import i16be as i16
|
||||
from ._binary import o32le
|
||||
|
||||
|
@ -45,14 +52,22 @@ def _save_all(im, fp, filename):
|
|||
_save(im, fp, filename)
|
||||
return
|
||||
|
||||
mpf_offset = 28
|
||||
offsets = []
|
||||
for imSequence in itertools.chain([im], append_images):
|
||||
for im_frame in ImageSequence.Iterator(imSequence):
|
||||
if not offsets:
|
||||
# APP2 marker
|
||||
im.encoderinfo["extra"] = (
|
||||
b"\xFF\xE2" + struct.pack(">H", 6 + 70) + b"MPF\0" + b" " * 70
|
||||
im_frame.encoderinfo["extra"] = (
|
||||
b"\xFF\xE2" + struct.pack(">H", 6 + 82) + b"MPF\0" + b" " * 82
|
||||
)
|
||||
exif = im_frame.encoderinfo.get("exif")
|
||||
if isinstance(exif, Image.Exif):
|
||||
exif = exif.tobytes()
|
||||
im_frame.encoderinfo["exif"] = exif
|
||||
if exif:
|
||||
mpf_offset += 4 + len(exif)
|
||||
|
||||
JpegImagePlugin._save(im_frame, fp, filename)
|
||||
offsets.append(fp.tell())
|
||||
else:
|
||||
|
@ -60,6 +75,7 @@ def _save_all(im, fp, filename):
|
|||
offsets.append(fp.tell() - offsets[-1])
|
||||
|
||||
ifd = TiffImagePlugin.ImageFileDirectory_v2()
|
||||
ifd[0xB000] = b"0100"
|
||||
ifd[0xB001] = len(offsets)
|
||||
|
||||
mpentries = b""
|
||||
|
@ -71,11 +87,11 @@ def _save_all(im, fp, filename):
|
|||
mptype = 0x000000 # Undefined
|
||||
mpentries += struct.pack("<LLLHH", mptype, size, data_offset, 0, 0)
|
||||
if i == 0:
|
||||
data_offset -= 28
|
||||
data_offset -= mpf_offset
|
||||
data_offset += size
|
||||
ifd[0xB002] = mpentries
|
||||
|
||||
fp.seek(28)
|
||||
fp.seek(mpf_offset)
|
||||
fp.write(b"II\x2A\x00" + o32le(8) + ifd.tobytes(8))
|
||||
fp.seek(0, os.SEEK_END)
|
||||
|
||||
|
@ -136,7 +152,7 @@ class MpoImageFile(JpegImagePlugin.JpegImageFile):
|
|||
|
||||
mptype = self.mpinfo[0xB002][frame]["Attribute"]["MPType"]
|
||||
if mptype.startswith("Large Thumbnail"):
|
||||
exif = self.getexif().get_ifd(0x8769)
|
||||
exif = self.getexif().get_ifd(ExifTags.IFD.Exif)
|
||||
if 40962 in exif and 40963 in exif:
|
||||
self._size = (exif[40962], exif[40963])
|
||||
elif "exif" in self.info:
|
||||
|
|
|
@ -1383,7 +1383,7 @@ def _save(im, fp, filename, chunk=putchunk, save_all=False):
|
|||
chunks.remove(cid)
|
||||
chunk(fp, cid, data)
|
||||
|
||||
exif = im.encoderinfo.get("exif", im.info.get("exif"))
|
||||
exif = im.encoderinfo.get("exif")
|
||||
if exif:
|
||||
if isinstance(exif, Image.Exif):
|
||||
exif = exif.tobytes(8)
|
||||
|
|
|
@ -208,7 +208,9 @@ class PpmPlainDecoder(ImageFile.PyDecoder):
|
|||
tokens = b"".join(block.split())
|
||||
for token in tokens:
|
||||
if token not in (48, 49):
|
||||
raise ValueError(f"Invalid token for this mode: {bytes([token])}")
|
||||
raise ValueError(
|
||||
b"Invalid token for this mode: %s" % bytes([token])
|
||||
)
|
||||
data = (data + tokens)[:total_bytes]
|
||||
invert = bytes.maketrans(b"01", b"\xFF\x00")
|
||||
return data.translate(invert)
|
||||
|
@ -242,13 +244,13 @@ class PpmPlainDecoder(ImageFile.PyDecoder):
|
|||
half_token = tokens.pop() # save half token for later
|
||||
if len(half_token) > max_len: # prevent buildup of half_token
|
||||
raise ValueError(
|
||||
f"Token too long found in data: {half_token[:max_len + 1]}"
|
||||
b"Token too long found in data: %s" % half_token[: max_len + 1]
|
||||
)
|
||||
|
||||
for token in tokens:
|
||||
if len(token) > max_len:
|
||||
raise ValueError(
|
||||
f"Token too long found in data: {token[:max_len + 1]}"
|
||||
b"Token too long found in data: %s" % token[: max_len + 1]
|
||||
)
|
||||
value = int(token)
|
||||
if value > maxval:
|
||||
|
|
|
@ -13,8 +13,7 @@
|
|||
|
||||
# Notes:
|
||||
#
|
||||
# * Implements the pixel access object following Access.
|
||||
# * Does not implement the line functions, as they don't appear to be used
|
||||
# * Implements the pixel access object following Access.c
|
||||
# * Taking only the tuple form, which is used from python.
|
||||
# * Fill.c uses the integer form, but it's still going to use the old
|
||||
# Access.c implementation.
|
||||
|
|
|
@ -719,6 +719,8 @@ class ImageFileDirectory_v2(MutableMapping):
|
|||
|
||||
@_register_writer(1) # Basic type, except for the legacy API.
|
||||
def write_byte(self, data):
|
||||
if isinstance(data, int):
|
||||
data = bytes((data,))
|
||||
return data
|
||||
|
||||
@_register_loader(2, 1)
|
||||
|
@ -1151,39 +1153,6 @@ class TiffImageFile(ImageFile.ImageFile):
|
|||
"""Return the current frame number"""
|
||||
return self.__frame
|
||||
|
||||
def get_child_images(self):
|
||||
if SUBIFD not in self.tag_v2:
|
||||
return []
|
||||
child_images = []
|
||||
exif = self.getexif()
|
||||
offset = None
|
||||
for im_offset in self.tag_v2[SUBIFD]:
|
||||
# reset buffered io handle in case fp
|
||||
# was passed to libtiff, invalidating the buffer
|
||||
current_offset = self._fp.tell()
|
||||
if offset is None:
|
||||
offset = current_offset
|
||||
|
||||
fp = self._fp
|
||||
ifd = exif._get_ifd_dict(im_offset)
|
||||
jpegInterchangeFormat = ifd.get(513)
|
||||
if jpegInterchangeFormat is not None:
|
||||
fp.seek(jpegInterchangeFormat)
|
||||
jpeg_data = fp.read(ifd.get(514))
|
||||
|
||||
fp = io.BytesIO(jpeg_data)
|
||||
|
||||
with Image.open(fp) as im:
|
||||
if jpegInterchangeFormat is None:
|
||||
im._frame_pos = [im_offset]
|
||||
im._seek(0)
|
||||
im.load()
|
||||
child_images.append(im)
|
||||
|
||||
if offset is not None:
|
||||
self._fp.seek(offset)
|
||||
return child_images
|
||||
|
||||
def getxmp(self):
|
||||
"""
|
||||
Returns a dictionary containing the XMP tags.
|
||||
|
|
|
@ -98,6 +98,15 @@ class WebPImageFile(ImageFile.ImageFile):
|
|||
return None
|
||||
return self.getexif()._get_merged_dict()
|
||||
|
||||
def getxmp(self):
|
||||
"""
|
||||
Returns a dictionary containing the XMP tags.
|
||||
Requires defusedxml to be installed.
|
||||
|
||||
:returns: XMP tags in a dictionary.
|
||||
"""
|
||||
return self._getxmp(self.info["xmp"]) if "xmp" in self.info else {}
|
||||
|
||||
def seek(self, frame):
|
||||
if not self._seek_check(frame):
|
||||
return
|
||||
|
@ -318,6 +327,7 @@ def _save(im, fp, filename):
|
|||
exif = exif[6:]
|
||||
xmp = im.encoderinfo.get("xmp", "")
|
||||
method = im.encoderinfo.get("method", 4)
|
||||
exact = 1 if im.encoderinfo.get("exact") else 0
|
||||
|
||||
if im.mode not in _VALID_WEBP_LEGACY_MODES:
|
||||
alpha = (
|
||||
|
@ -336,6 +346,7 @@ def _save(im, fp, filename):
|
|||
im.mode,
|
||||
icc_profile,
|
||||
method,
|
||||
exact,
|
||||
exif,
|
||||
xmp,
|
||||
)
|
||||
|
|
|
@ -1531,25 +1531,21 @@ if (PySequence_Check(op)) { \
|
|||
PyErr_SetString(PyExc_TypeError, must_be_sequence);
|
||||
return NULL;
|
||||
}
|
||||
int endian = strncmp(image->mode, "I;16", 4) == 0 ? (strcmp(image->mode, "I;16B") == 0 ? 2 : 1) : 0;
|
||||
double value;
|
||||
if (scale == 1.0 && offset == 0.0) {
|
||||
/* Clipped data */
|
||||
for (i = x = y = 0; i < n; i++) {
|
||||
set_value_to_item(seq, i);
|
||||
image->image8[y][x] = (UINT8)CLIP8(value);
|
||||
if (++x >= (int)image->xsize) {
|
||||
x = 0, y++;
|
||||
}
|
||||
for (i = x = y = 0; i < n; i++) {
|
||||
set_value_to_item(seq, i);
|
||||
if (scale != 1.0 || offset != 0.0) {
|
||||
value = value * scale + offset;
|
||||
}
|
||||
|
||||
} else {
|
||||
/* Scaled and clipped data */
|
||||
for (i = x = y = 0; i < n; i++) {
|
||||
set_value_to_item(seq, i);
|
||||
image->image8[y][x] = CLIP8(value * scale + offset);
|
||||
if (++x >= (int)image->xsize) {
|
||||
x = 0, y++;
|
||||
}
|
||||
if (endian == 0) {
|
||||
image->image8[y][x] = (UINT8)CLIP8(value);
|
||||
} else {
|
||||
image->image8[y][x * 2 + (endian == 2 ? 1 : 0)] = CLIP8((int)value % 256);
|
||||
image->image8[y][x * 2 + (endian == 2 ? 0 : 1)] = CLIP8((int)value >> 8);
|
||||
}
|
||||
if (++x >= (int)image->xsize) {
|
||||
x = 0, y++;
|
||||
}
|
||||
}
|
||||
PyErr_Clear(); /* Avoid weird exceptions */
|
||||
|
@ -1829,7 +1825,7 @@ _resize(ImagingObject *self, PyObject *args) {
|
|||
box[1] - (int)box[1] == 0 && box[3] - box[1] == ysize) {
|
||||
imOut = ImagingCrop(imIn, box[0], box[1], box[2], box[3]);
|
||||
} else if (filter == IMAGING_TRANSFORM_NEAREST) {
|
||||
double a[6];
|
||||
double a[8];
|
||||
|
||||
memset(a, 0, sizeof a);
|
||||
a[0] = (double)(box[2] - box[0]) / xsize;
|
||||
|
|
|
@ -777,13 +777,15 @@ font_render(FontObject *self, PyObject *args) {
|
|||
const char *lang = NULL;
|
||||
PyObject *features = Py_None;
|
||||
PyObject *string;
|
||||
float x_start = 0;
|
||||
float y_start = 0;
|
||||
|
||||
/* render string into given buffer (the buffer *must* have
|
||||
the right size, or this will crash) */
|
||||
|
||||
if (!PyArg_ParseTuple(
|
||||
args,
|
||||
"On|zzOziL:render",
|
||||
"On|zzOziLff:render",
|
||||
&string,
|
||||
&id,
|
||||
&mode,
|
||||
|
@ -791,7 +793,9 @@ font_render(FontObject *self, PyObject *args) {
|
|||
&features,
|
||||
&lang,
|
||||
&stroke_width,
|
||||
&foreground_ink_long)) {
|
||||
&foreground_ink_long,
|
||||
&x_start,
|
||||
&y_start)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
@ -876,8 +880,8 @@ font_render(FontObject *self, PyObject *args) {
|
|||
}
|
||||
|
||||
/* set pen position to text origin */
|
||||
x = (-x_min + stroke_width) << 6;
|
||||
y = (-y_max + (-stroke_width)) << 6;
|
||||
x = (-x_min + stroke_width + x_start) * 64;
|
||||
y = (-y_max + (-stroke_width) - y_start) * 64;
|
||||
|
||||
if (stroker == NULL) {
|
||||
load_flags |= FT_LOAD_RENDER;
|
||||
|
@ -956,7 +960,7 @@ font_render(FontObject *self, PyObject *args) {
|
|||
/* we didn't ask for color, fall through to default */
|
||||
#endif
|
||||
default:
|
||||
PyErr_SetString(PyExc_IOError, "unsupported bitmap pixel mode");
|
||||
PyErr_SetString(PyExc_OSError, "unsupported bitmap pixel mode");
|
||||
goto glyph_error;
|
||||
}
|
||||
|
||||
|
@ -1023,7 +1027,7 @@ font_render(FontObject *self, PyObject *args) {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
PyErr_SetString(PyExc_IOError, "unsupported bitmap pixel mode");
|
||||
PyErr_SetString(PyExc_OSError, "unsupported bitmap pixel mode");
|
||||
goto glyph_error;
|
||||
}
|
||||
}
|
||||
|
|
14
src/_webp.c
|
@ -178,12 +178,11 @@ _anim_encoder_new(PyObject *self, PyObject *args) {
|
|||
return NULL;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
void
|
||||
_anim_encoder_dealloc(PyObject *self) {
|
||||
WebPAnimEncoderObject *encp = (WebPAnimEncoderObject *)self;
|
||||
WebPPictureFree(&(encp->frame));
|
||||
WebPAnimEncoderDelete(encp->enc);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
|
@ -400,12 +399,11 @@ _anim_decoder_new(PyObject *self, PyObject *args) {
|
|||
return NULL;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
void
|
||||
_anim_decoder_dealloc(PyObject *self) {
|
||||
WebPAnimDecoderObject *decp = (WebPAnimDecoderObject *)self;
|
||||
WebPDataClear(&(decp->data));
|
||||
WebPAnimDecoderDelete(decp->dec);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
|
@ -576,6 +574,7 @@ WebPEncode_wrapper(PyObject *self, PyObject *args) {
|
|||
int lossless;
|
||||
float quality_factor;
|
||||
int method;
|
||||
int exact;
|
||||
uint8_t *rgb;
|
||||
uint8_t *icc_bytes;
|
||||
uint8_t *exif_bytes;
|
||||
|
@ -597,7 +596,7 @@ WebPEncode_wrapper(PyObject *self, PyObject *args) {
|
|||
|
||||
if (!PyArg_ParseTuple(
|
||||
args,
|
||||
"y#iiifss#is#s#",
|
||||
"y#iiifss#iis#s#",
|
||||
(char **)&rgb,
|
||||
&size,
|
||||
&width,
|
||||
|
@ -608,6 +607,7 @@ WebPEncode_wrapper(PyObject *self, PyObject *args) {
|
|||
&icc_bytes,
|
||||
&icc_size,
|
||||
&method,
|
||||
&exact,
|
||||
&exif_bytes,
|
||||
&exif_size,
|
||||
&xmp_bytes,
|
||||
|
@ -633,6 +633,10 @@ WebPEncode_wrapper(PyObject *self, PyObject *args) {
|
|||
config.lossless = lossless;
|
||||
config.quality = quality_factor;
|
||||
config.method = method;
|
||||
#if WEBP_ENCODER_ABI_VERSION >= 0x0209
|
||||
// the "exact" flag is only available in libwebp 0.5.0 and later
|
||||
config.exact = exact;
|
||||
#endif
|
||||
|
||||
// Validate the config
|
||||
if (!WebPValidateConfig(&config)) {
|
||||
|
|
32
src/encode.c
|
@ -1048,6 +1048,8 @@ PyImaging_JpegEncoderNew(PyObject *self, PyObject *args) {
|
|||
PyObject *qtables = NULL;
|
||||
unsigned int *qarrays = NULL;
|
||||
int qtablesLen = 0;
|
||||
char *comment = NULL;
|
||||
Py_ssize_t comment_size;
|
||||
char *extra = NULL;
|
||||
Py_ssize_t extra_size;
|
||||
char *rawExif = NULL;
|
||||
|
@ -1055,7 +1057,7 @@ PyImaging_JpegEncoderNew(PyObject *self, PyObject *args) {
|
|||
|
||||
if (!PyArg_ParseTuple(
|
||||
args,
|
||||
"ss|nnnnnnnnOy#y#",
|
||||
"ss|nnnnnnnnOz#y#y#",
|
||||
&mode,
|
||||
&rawmode,
|
||||
&quality,
|
||||
|
@ -1067,6 +1069,8 @@ PyImaging_JpegEncoderNew(PyObject *self, PyObject *args) {
|
|||
&ydpi,
|
||||
&subsampling,
|
||||
&qtables,
|
||||
&comment,
|
||||
&comment_size,
|
||||
&extra,
|
||||
&extra_size,
|
||||
&rawExif,
|
||||
|
@ -1090,13 +1094,28 @@ PyImaging_JpegEncoderNew(PyObject *self, PyObject *args) {
|
|||
return NULL;
|
||||
}
|
||||
|
||||
// Freed in JpegEncode, Case 5
|
||||
// Freed in JpegEncode, Case 6
|
||||
qarrays = get_qtables_arrays(qtables, &qtablesLen);
|
||||
|
||||
if (comment && comment_size > 0) {
|
||||
/* malloc check ok, length is from python parsearg */
|
||||
char *p = malloc(comment_size); // Freed in JpegEncode, Case 6
|
||||
if (!p) {
|
||||
return ImagingError_MemoryError();
|
||||
}
|
||||
memcpy(p, comment, comment_size);
|
||||
comment = p;
|
||||
} else {
|
||||
comment = NULL;
|
||||
}
|
||||
|
||||
if (extra && extra_size > 0) {
|
||||
/* malloc check ok, length is from python parsearg */
|
||||
char *p = malloc(extra_size); // Freed in JpegEncode, Case 5
|
||||
char *p = malloc(extra_size); // Freed in JpegEncode, Case 6
|
||||
if (!p) {
|
||||
if (comment) {
|
||||
free(comment);
|
||||
}
|
||||
return ImagingError_MemoryError();
|
||||
}
|
||||
memcpy(p, extra, extra_size);
|
||||
|
@ -1107,8 +1126,11 @@ PyImaging_JpegEncoderNew(PyObject *self, PyObject *args) {
|
|||
|
||||
if (rawExif && rawExifLen > 0) {
|
||||
/* malloc check ok, length is from python parsearg */
|
||||
char *pp = malloc(rawExifLen); // Freed in JpegEncode, Case 5
|
||||
char *pp = malloc(rawExifLen); // Freed in JpegEncode, Case 6
|
||||
if (!pp) {
|
||||
if (comment) {
|
||||
free(comment);
|
||||
}
|
||||
if (extra) {
|
||||
free(extra);
|
||||
}
|
||||
|
@ -1134,6 +1156,8 @@ PyImaging_JpegEncoderNew(PyObject *self, PyObject *args) {
|
|||
((JPEGENCODERSTATE *)encoder->state.context)->streamtype = streamtype;
|
||||
((JPEGENCODERSTATE *)encoder->state.context)->xdpi = xdpi;
|
||||
((JPEGENCODERSTATE *)encoder->state.context)->ydpi = ydpi;
|
||||
((JPEGENCODERSTATE *)encoder->state.context)->comment = comment;
|
||||
((JPEGENCODERSTATE *)encoder->state.context)->comment_size = comment_size;
|
||||
((JPEGENCODERSTATE *)encoder->state.context)->extra = extra;
|
||||
((JPEGENCODERSTATE *)encoder->state.context)->extra_size = extra_size;
|
||||
((JPEGENCODERSTATE *)encoder->state.context)->rawExif = rawExif;
|
||||
|
|
|
@ -43,23 +43,6 @@ add_item(const char *mode) {
|
|||
return &access_table[i];
|
||||
}
|
||||
|
||||
/* fetch pointer to pixel line */
|
||||
|
||||
static void *
|
||||
line_8(Imaging im, int x, int y) {
|
||||
return &im->image8[y][x];
|
||||
}
|
||||
|
||||
static void *
|
||||
line_16(Imaging im, int x, int y) {
|
||||
return &im->image8[y][x + x];
|
||||
}
|
||||
|
||||
static void *
|
||||
line_32(Imaging im, int x, int y) {
|
||||
return &im->image32[y][x];
|
||||
}
|
||||
|
||||
/* fetch individual pixel */
|
||||
|
||||
static void
|
||||
|
@ -187,36 +170,35 @@ put_pixel_32(Imaging im, int x, int y, const void *color) {
|
|||
|
||||
void
|
||||
ImagingAccessInit() {
|
||||
#define ADD(mode_, line_, get_pixel_, put_pixel_) \
|
||||
#define ADD(mode_, get_pixel_, put_pixel_) \
|
||||
{ \
|
||||
ImagingAccess access = add_item(mode_); \
|
||||
access->line = line_; \
|
||||
access->get_pixel = get_pixel_; \
|
||||
access->put_pixel = put_pixel_; \
|
||||
}
|
||||
|
||||
/* populate access table */
|
||||
ADD("1", line_8, get_pixel_8, put_pixel_8);
|
||||
ADD("L", line_8, get_pixel_8, put_pixel_8);
|
||||
ADD("LA", line_32, get_pixel, put_pixel);
|
||||
ADD("La", line_32, get_pixel, put_pixel);
|
||||
ADD("I", line_32, get_pixel_32, put_pixel_32);
|
||||
ADD("I;16", line_16, get_pixel_16L, put_pixel_16L);
|
||||
ADD("I;16L", line_16, get_pixel_16L, put_pixel_16L);
|
||||
ADD("I;16B", line_16, get_pixel_16B, put_pixel_16B);
|
||||
ADD("I;32L", line_32, get_pixel_32L, put_pixel_32L);
|
||||
ADD("I;32B", line_32, get_pixel_32B, put_pixel_32B);
|
||||
ADD("F", line_32, get_pixel_32, put_pixel_32);
|
||||
ADD("P", line_8, get_pixel_8, put_pixel_8);
|
||||
ADD("PA", line_32, get_pixel, put_pixel);
|
||||
ADD("RGB", line_32, get_pixel_32, put_pixel_32);
|
||||
ADD("RGBA", line_32, get_pixel_32, put_pixel_32);
|
||||
ADD("RGBa", line_32, get_pixel_32, put_pixel_32);
|
||||
ADD("RGBX", line_32, get_pixel_32, put_pixel_32);
|
||||
ADD("CMYK", line_32, get_pixel_32, put_pixel_32);
|
||||
ADD("YCbCr", line_32, get_pixel_32, put_pixel_32);
|
||||
ADD("LAB", line_32, get_pixel_32, put_pixel_32);
|
||||
ADD("HSV", line_32, get_pixel_32, put_pixel_32);
|
||||
ADD("1", get_pixel_8, put_pixel_8);
|
||||
ADD("L", get_pixel_8, put_pixel_8);
|
||||
ADD("LA", get_pixel, put_pixel);
|
||||
ADD("La", get_pixel, put_pixel);
|
||||
ADD("I", get_pixel_32, put_pixel_32);
|
||||
ADD("I;16", get_pixel_16L, put_pixel_16L);
|
||||
ADD("I;16L", get_pixel_16L, put_pixel_16L);
|
||||
ADD("I;16B", get_pixel_16B, put_pixel_16B);
|
||||
ADD("I;32L", get_pixel_32L, put_pixel_32L);
|
||||
ADD("I;32B", get_pixel_32B, put_pixel_32B);
|
||||
ADD("F", get_pixel_32, put_pixel_32);
|
||||
ADD("P", get_pixel_8, put_pixel_8);
|
||||
ADD("PA", get_pixel, put_pixel);
|
||||
ADD("RGB", get_pixel_32, put_pixel_32);
|
||||
ADD("RGBA", get_pixel_32, put_pixel_32);
|
||||
ADD("RGBa", get_pixel_32, put_pixel_32);
|
||||
ADD("RGBX", get_pixel_32, put_pixel_32);
|
||||
ADD("CMYK", get_pixel_32, put_pixel_32);
|
||||
ADD("YCbCr", get_pixel_32, put_pixel_32);
|
||||
ADD("LAB", get_pixel_32, put_pixel_32);
|
||||
ADD("HSV", get_pixel_32, put_pixel_32);
|
||||
}
|
||||
|
||||
ImagingAccess
|
||||
|
|
|
@ -479,6 +479,25 @@ rgba2rgbA(UINT8 *out, const UINT8 *in, int xsize) {
|
|||
}
|
||||
}
|
||||
|
||||
static void
|
||||
rgba2rgb_(UINT8 *out, const UINT8 *in, int xsize) {
|
||||
int x;
|
||||
unsigned int alpha;
|
||||
for (x = 0; x < xsize; x++, in += 4) {
|
||||
alpha = in[3];
|
||||
if (alpha == 255 || alpha == 0) {
|
||||
*out++ = in[0];
|
||||
*out++ = in[1];
|
||||
*out++ = in[2];
|
||||
} else {
|
||||
*out++ = CLIP8((255 * in[0]) / alpha);
|
||||
*out++ = CLIP8((255 * in[1]) / alpha);
|
||||
*out++ = CLIP8((255 * in[2]) / alpha);
|
||||
}
|
||||
*out++ = 255;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Conversion of RGB + single transparent color to RGBA,
|
||||
* where any pixel that matches the color will have the
|
||||
|
@ -934,6 +953,7 @@ static struct {
|
|||
{"RGBA", "HSV", rgb2hsv},
|
||||
|
||||
{"RGBa", "RGBA", rgba2rgbA},
|
||||
{"RGBa", "RGB", rgba2rgb_},
|
||||
|
||||
{"RGBX", "1", rgb2bit},
|
||||
{"RGBX", "L", rgb2l},
|
||||
|
|
|
@ -124,7 +124,6 @@ struct ImagingMemoryInstance {
|
|||
|
||||
struct ImagingAccessInstance {
|
||||
const char *mode;
|
||||
void *(*line)(Imaging im, int x, int y);
|
||||
void (*get_pixel)(Imaging im, int x, int y, void *pixel);
|
||||
void (*put_pixel)(Imaging im, int x, int y, const void *pixel);
|
||||
};
|
||||
|
|
|
@ -92,6 +92,10 @@ typedef struct {
|
|||
/* in factors of DCTSIZE2 */
|
||||
int qtablesLen;
|
||||
|
||||
/* Comment */
|
||||
char *comment;
|
||||
size_t comment_size;
|
||||
|
||||
/* Extra data (to be injected after header) */
|
||||
char *extra;
|
||||
int extra_size;
|
||||
|
|
|
@ -277,6 +277,13 @@ ImagingJpegEncode(Imaging im, ImagingCodecState state, UINT8 *buf, int bytes) {
|
|||
}
|
||||
|
||||
case 4:
|
||||
|
||||
if (context->comment) {
|
||||
jpeg_write_marker(&context->cinfo, JPEG_COM, (unsigned char *)context->comment, context->comment_size);
|
||||
}
|
||||
state->state++;
|
||||
|
||||
case 5:
|
||||
if (1024 > context->destination.pub.free_in_buffer) {
|
||||
break;
|
||||
}
|
||||
|
@ -301,7 +308,7 @@ ImagingJpegEncode(Imaging im, ImagingCodecState state, UINT8 *buf, int bytes) {
|
|||
state->state++;
|
||||
/* fall through */
|
||||
|
||||
case 5:
|
||||
case 6:
|
||||
|
||||
/* Finish compression */
|
||||
if (context->destination.pub.free_in_buffer < 100) {
|
||||
|
@ -310,6 +317,10 @@ ImagingJpegEncode(Imaging im, ImagingCodecState state, UINT8 *buf, int bytes) {
|
|||
jpeg_finish_compress(&context->cinfo);
|
||||
|
||||
/* Clean up */
|
||||
if (context->comment) {
|
||||
free(context->comment);
|
||||
context->comment = NULL;
|
||||
}
|
||||
if (context->extra) {
|
||||
free(context->extra);
|
||||
context->extra = NULL;
|
||||
|
|
|
@ -1717,7 +1717,7 @@ ImagingQuantize(Imaging im, int colors, int mode, int kmeans) {
|
|||
|
||||
withAlpha = !strcmp(im->mode, "RGBA");
|
||||
int transparency = 0;
|
||||
unsigned char r, g, b;
|
||||
unsigned char r = 0, g = 0, b = 0;
|
||||
for (i = y = 0; y < im->ysize; y++) {
|
||||
for (x = 0; x < im->xsize; x++, i++) {
|
||||
p[i].v = im->image32[y][x];
|
||||
|
|
25
tox.ini
|
@ -1,15 +1,13 @@
|
|||
# Tox (https://tox.readthedocs.io/en/latest/) is a tool for running tests
|
||||
# in multiple virtualenvs. This configuration file will run the
|
||||
# test suite on all supported python versions. To use it,
|
||||
# "python3 -m pip install tox" and then run "tox" from this directory.
|
||||
|
||||
[tox]
|
||||
envlist =
|
||||
lint
|
||||
py{37,38,39,310,311,py3}
|
||||
py{py3, 311, 310, 39, 38, 37}
|
||||
minversion = 1.9
|
||||
|
||||
[testenv]
|
||||
deps =
|
||||
cffi
|
||||
numpy
|
||||
extras =
|
||||
tests
|
||||
commands =
|
||||
|
@ -17,16 +15,15 @@ commands =
|
|||
{envpython} -m pip install --global-option="build_ext" --global-option="--inplace" .
|
||||
{envpython} selftest.py
|
||||
{envpython} -m pytest -W always {posargs}
|
||||
deps =
|
||||
cffi
|
||||
numpy
|
||||
allowlist_externals = make
|
||||
|
||||
[testenv:lint]
|
||||
passenv =
|
||||
PRE_COMMIT_COLOR
|
||||
skip_install = true
|
||||
deps =
|
||||
check-manifest
|
||||
pre-commit
|
||||
commands =
|
||||
pre-commit run --all-files --show-diff-on-failure
|
||||
check-manifest
|
||||
deps =
|
||||
pre-commit
|
||||
check-manifest
|
||||
skip_install = true
|
||||
passenv = PRE_COMMIT_COLOR
|
||||
|
|
|
@ -152,9 +152,9 @@ deps = {
|
|||
"libs": [r"*.lib"],
|
||||
},
|
||||
"xz": {
|
||||
"url": SF_PROJECTS + "/lzmautils/files/xz-5.2.7.tar.gz/download",
|
||||
"filename": "xz-5.2.7.tar.gz",
|
||||
"dir": "xz-5.2.7",
|
||||
"url": SF_PROJECTS + "/lzmautils/files/xz-5.4.0.tar.gz/download",
|
||||
"filename": "xz-5.4.0.tar.gz",
|
||||
"dir": "xz-5.4.0",
|
||||
"license": "COPYING",
|
||||
"patch": {
|
||||
r"src\liblzma\api\lzma.h": {
|
||||
|
@ -200,15 +200,11 @@ deps = {
|
|||
"libs": [r"output\release-static\{architecture}\lib\*.lib"],
|
||||
},
|
||||
"libtiff": {
|
||||
"url": "https://download.osgeo.org/libtiff/tiff-4.4.0.tar.gz",
|
||||
"filename": "tiff-4.4.0.tar.gz",
|
||||
"dir": "tiff-4.4.0",
|
||||
"license": "COPYRIGHT",
|
||||
"url": "https://download.osgeo.org/libtiff/tiff-4.5.0.tar.gz",
|
||||
"filename": "tiff-4.5.0.tar.gz",
|
||||
"dir": "tiff-4.5.0",
|
||||
"license": "LICENSE.md",
|
||||
"patch": {
|
||||
r"cmake\LZMACodec.cmake": {
|
||||
# fix typo
|
||||
"${{LZMA_FOUND}}": "${{LIBLZMA_FOUND}}",
|
||||
},
|
||||
r"libtiff\tif_lzma.c": {
|
||||
# link against liblzma.lib
|
||||
"#ifdef LZMA_SUPPORT": '#ifdef LZMA_SUPPORT\n#pragma comment(lib, "liblzma.lib")', # noqa: E501
|
||||
|
@ -228,9 +224,9 @@ deps = {
|
|||
# "bins": [r"libtiff\*.dll"],
|
||||
},
|
||||
"libpng": {
|
||||
"url": SF_PROJECTS + "/libpng/files/libpng16/1.6.38/lpng1638.zip/download",
|
||||
"filename": "lpng1638.zip",
|
||||
"dir": "lpng1638",
|
||||
"url": SF_PROJECTS + "/libpng/files/libpng16/1.6.39/lpng1639.zip/download",
|
||||
"filename": "lpng1639.zip",
|
||||
"dir": "lpng1639",
|
||||
"license": "LICENSE",
|
||||
"build": [
|
||||
# lint: do not inline
|
||||
|
@ -293,9 +289,9 @@ deps = {
|
|||
# "bins": [r"objs\{msbuild_arch}\Release\freetype.dll"],
|
||||
},
|
||||
"lcms2": {
|
||||
"url": SF_PROJECTS + "/lcms/files/lcms/2.13/lcms2-2.13.1.tar.gz/download",
|
||||
"filename": "lcms2-2.13.1.tar.gz",
|
||||
"dir": "lcms2-2.13.1",
|
||||
"url": SF_PROJECTS + "/lcms/files/lcms/2.13/lcms2-2.14.tar.gz/download",
|
||||
"filename": "lcms2-2.14.tar.gz",
|
||||
"dir": "lcms2-2.14",
|
||||
"license": "COPYING",
|
||||
"patch": {
|
||||
r"Projects\VC2022\lcms2_static\lcms2_static.vcxproj": {
|
||||
|
@ -323,6 +319,11 @@ deps = {
|
|||
"filename": "openjpeg-2.5.0.tar.gz",
|
||||
"dir": "openjpeg-2.5.0",
|
||||
"license": "LICENSE",
|
||||
"patch": {
|
||||
r"src\lib\openjp2\ht_dec.c": {
|
||||
"#ifdef OPJ_COMPILER_MSVC\n return (OPJ_UINT32)__popcnt(val);": "#if defined(OPJ_COMPILER_MSVC) && (defined(_M_IX86) || defined(_M_AMD64))\n return (OPJ_UINT32)__popcnt(val);", # noqa: E501
|
||||
}
|
||||
},
|
||||
"build": [
|
||||
cmd_cmake(("-DBUILD_CODEC:BOOL=OFF", "-DBUILD_SHARED_LIBS:BOOL=OFF")),
|
||||
cmd_nmake(target="clean"),
|
||||
|
@ -355,11 +356,12 @@ deps = {
|
|||
"libs": [r"imagequant.lib"],
|
||||
},
|
||||
"harfbuzz": {
|
||||
"url": "https://github.com/harfbuzz/harfbuzz/archive/5.3.1.zip",
|
||||
"filename": "harfbuzz-5.3.1.zip",
|
||||
"dir": "harfbuzz-5.3.1",
|
||||
"url": "https://github.com/harfbuzz/harfbuzz/archive/6.0.0.zip",
|
||||
"filename": "harfbuzz-6.0.0.zip",
|
||||
"dir": "harfbuzz-6.0.0",
|
||||
"license": "COPYING",
|
||||
"build": [
|
||||
cmd_set("CXXFLAGS", "-d2FH4-"),
|
||||
cmd_cmake("-DHB_HAVE_FREETYPE:BOOL=TRUE"),
|
||||
cmd_nmake(target="clean"),
|
||||
cmd_nmake(target="harfbuzz"),
|
||||
|
@ -469,11 +471,22 @@ def extract_dep(url, filename):
|
|||
raise RuntimeError(ex)
|
||||
|
||||
print("Extracting " + filename)
|
||||
sources_dir_abs = os.path.abspath(sources_dir)
|
||||
if filename.endswith(".zip"):
|
||||
with zipfile.ZipFile(file) as zf:
|
||||
for member in zf.namelist():
|
||||
member_abspath = os.path.abspath(os.path.join(sources_dir, member))
|
||||
member_prefix = os.path.commonpath([sources_dir_abs, member_abspath])
|
||||
if sources_dir_abs != member_prefix:
|
||||
raise RuntimeError("Attempted Path Traversal in Zip File")
|
||||
zf.extractall(sources_dir)
|
||||
elif filename.endswith(".tar.gz") or filename.endswith(".tgz"):
|
||||
with tarfile.open(file, "r:gz") as tgz:
|
||||
for member in tgz.getnames():
|
||||
member_abspath = os.path.abspath(os.path.join(sources_dir, member))
|
||||
member_prefix = os.path.commonpath([sources_dir_abs, member_abspath])
|
||||
if sources_dir_abs != member_prefix:
|
||||
raise RuntimeError("Attempted Path Traversal in Tar File")
|
||||
tgz.extractall(sources_dir)
|
||||
else:
|
||||
raise RuntimeError("Unknown archive type: " + filename)
|
||||
|
|