mirror of
https://github.com/python-pillow/Pillow.git
synced 2025-01-27 17:54:32 +03:00
Merge branch 'main' into cover
This commit is contained in:
commit
955b2d553c
|
@ -21,13 +21,11 @@ environment:
|
||||||
install:
|
install:
|
||||||
- '%PYTHON%\%EXECUTABLE% --version'
|
- '%PYTHON%\%EXECUTABLE% --version'
|
||||||
- '%PYTHON%\%EXECUTABLE% -m pip install --upgrade pip'
|
- '%PYTHON%\%EXECUTABLE% -m pip install --upgrade pip'
|
||||||
- curl -fsSL -o pillow-depends.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
|
|
||||||
- curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip
|
- curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip
|
||||||
- 7z x pillow-depends.zip -oc:\
|
|
||||||
- 7z x pillow-test-images.zip -oc:\
|
- 7z x pillow-test-images.zip -oc:\
|
||||||
- mv c:\pillow-depends-main c:\pillow-depends
|
|
||||||
- xcopy /S /Y c:\test-images-main\* c:\pillow\tests\images
|
- xcopy /S /Y c:\test-images-main\* c:\pillow\tests\images
|
||||||
- 7z x ..\pillow-depends\nasm-2.16.01-win64.zip -oc:\
|
- curl -fsSL -o nasm-win64.zip https://raw.githubusercontent.com/python-pillow/pillow-depends/main/nasm-2.16.01-win64.zip
|
||||||
|
- 7z x nasm-win64.zip -oc:\
|
||||||
- choco install ghostscript --version=10.0.0.20230317
|
- choco install ghostscript --version=10.0.0.20230317
|
||||||
- path c:\nasm-2.16.01;C:\Program Files\gs\gs10.00.0\bin;%PATH%
|
- path c:\nasm-2.16.01;C:\Program Files\gs\gs10.00.0\bin;%PATH%
|
||||||
- cd c:\pillow\winbuild\
|
- cd c:\pillow\winbuild\
|
||||||
|
|
2
.github/workflows/release-drafter.yml
vendored
2
.github/workflows/release-drafter.yml
vendored
|
@ -10,7 +10,7 @@ on:
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
|
@ -8,7 +8,7 @@ on:
|
||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
rev: 23.7.0
|
rev: v3.13.0
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py38-plus]
|
||||||
|
|
||||||
|
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||||
|
rev: 23.9.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
args: [--target-version=py38]
|
args: [--target-version=py38]
|
||||||
|
@ -33,7 +39,7 @@ repos:
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
[flake8-2020, flake8-errmsg, flake8-implicit-str-concat]
|
[flake8-2020, flake8-errmsg, flake8-implicit-str-concat, flake8-logging]
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/pygrep-hooks
|
- repo: https://github.com/pre-commit/pygrep-hooks
|
||||||
rev: v1.10.0
|
rev: v1.10.0
|
||||||
|
@ -44,10 +50,15 @@ repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.4.0
|
rev: v4.4.0
|
||||||
hooks:
|
hooks:
|
||||||
|
- id: check-executables-have-shebangs
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- id: check-toml
|
- id: check-toml
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
exclude: ^Tests/images/
|
||||||
|
- id: trailing-whitespace
|
||||||
|
exclude: ^.github/.*TEMPLATE|^Tests/(fonts|images)/
|
||||||
|
|
||||||
- repo: https://github.com/sphinx-contrib/sphinx-lint
|
- repo: https://github.com/sphinx-contrib/sphinx-lint
|
||||||
rev: v0.6.8
|
rev: v0.6.8
|
||||||
|
|
|
@ -29,9 +29,6 @@ Changelog (Pillow)
|
||||||
- Added session type check for Linux in ImageGrab.grabclipboard() #7332
|
- Added session type check for Linux in ImageGrab.grabclipboard() #7332
|
||||||
[TheNooB2706, radarhere, hugovk]
|
[TheNooB2706, radarhere, hugovk]
|
||||||
|
|
||||||
- Read WebP duration after opening #7311
|
|
||||||
[k128, radarhere]
|
|
||||||
|
|
||||||
- Allow "loop=None" when saving GIF images #7329
|
- Allow "loop=None" when saving GIF images #7329
|
||||||
[radarhere]
|
[radarhere]
|
||||||
|
|
||||||
|
|
0
Tests/check_j2k_leaks.py
Executable file → Normal file
0
Tests/check_j2k_leaks.py
Executable file → Normal file
|
@ -37,4 +37,4 @@ The Font Software may be sold as part of a larger software package but no copy o
|
||||||
|
|
||||||
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
|
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
|
||||||
|
|
||||||
Except as contained in this notice, the name of Tavmjong Bah shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Font Software without prior written authorization from Tavmjong Bah. For further information, contact: tavmjong @ free . fr.
|
Except as contained in this notice, the name of Tavmjong Bah shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Font Software without prior written authorization from Tavmjong Bah. For further information, contact: tavmjong @ free . fr.
|
||||||
|
|
|
@ -91,7 +91,7 @@ def assert_image_equal(a, b, msg=None):
|
||||||
if HAS_UPLOADER:
|
if HAS_UPLOADER:
|
||||||
try:
|
try:
|
||||||
url = test_image_results.upload(a, b)
|
url = test_image_results.upload(a, b)
|
||||||
logger.error(f"Url for test images: {url}")
|
logger.error("URL for test images: %s", url)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -126,7 +126,7 @@ def assert_image_similar(a, b, epsilon, msg=None):
|
||||||
if HAS_UPLOADER:
|
if HAS_UPLOADER:
|
||||||
try:
|
try:
|
||||||
url = test_image_results.upload(a, b)
|
url = test_image_results.upload(a, b)
|
||||||
logger.error(f"Url for test images: {url}")
|
logger.exception("URL for test images: %s", url)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
raise e
|
raise e
|
||||||
|
|
|
@ -22,4 +22,3 @@ and that the name of ICC shall not be used in advertising or publicity
|
||||||
pertaining to distribution of the software without specific, written
|
pertaining to distribution of the software without specific, written
|
||||||
prior permission. ICC makes no representations about the suitability
|
prior permission. ICC makes no representations about the suitability
|
||||||
of this software for any purpose.
|
of this software for any purpose.
|
||||||
|
|
||||||
|
|
0
Tests/images/negative_size.ppm
Executable file → Normal file
0
Tests/images/negative_size.ppm
Executable file → Normal file
|
@ -233,4 +233,15 @@ class TestFileWebp:
|
||||||
im.save(out_webp, save_all=True)
|
im.save(out_webp, save_all=True)
|
||||||
|
|
||||||
with Image.open(out_webp) as reloaded:
|
with Image.open(out_webp) as reloaded:
|
||||||
|
reloaded.load()
|
||||||
assert reloaded.info["duration"] == 1000
|
assert reloaded.info["duration"] == 1000
|
||||||
|
|
||||||
|
def test_roundtrip_rgba_palette(self, tmp_path):
|
||||||
|
temp_file = str(tmp_path / "temp.webp")
|
||||||
|
im = Image.new("RGBA", (1, 1)).convert("P")
|
||||||
|
assert im.mode == "P"
|
||||||
|
assert im.palette.mode == "RGBA"
|
||||||
|
im.save(temp_file)
|
||||||
|
|
||||||
|
with Image.open(temp_file) as im:
|
||||||
|
assert im.getpixel((0, 0)) == (0, 0, 0, 0)
|
||||||
|
|
|
@ -906,6 +906,31 @@ class TestImage:
|
||||||
im = Image.new("RGB", size)
|
im = Image.new("RGB", size)
|
||||||
assert im.tobytes() == b""
|
assert im.tobytes() == b""
|
||||||
|
|
||||||
|
def test_has_transparency_data(self):
|
||||||
|
for mode in ("1", "L", "P", "RGB"):
|
||||||
|
im = Image.new(mode, (1, 1))
|
||||||
|
assert not im.has_transparency_data
|
||||||
|
|
||||||
|
for mode in ("LA", "La", "PA", "RGBA", "RGBa"):
|
||||||
|
im = Image.new(mode, (1, 1))
|
||||||
|
assert im.has_transparency_data
|
||||||
|
|
||||||
|
# P mode with "transparency" info
|
||||||
|
with Image.open("Tests/images/first_frame_transparency.gif") as im:
|
||||||
|
assert "transparency" in im.info
|
||||||
|
assert im.has_transparency_data
|
||||||
|
|
||||||
|
# RGB mode with "transparency" info
|
||||||
|
with Image.open("Tests/images/rgb_trns.png") as im:
|
||||||
|
assert "transparency" in im.info
|
||||||
|
assert im.has_transparency_data
|
||||||
|
|
||||||
|
# P mode with RGBA palette
|
||||||
|
im = Image.new("RGBA", (1, 1)).convert("P")
|
||||||
|
assert im.mode == "P"
|
||||||
|
assert im.palette.mode == "RGBA"
|
||||||
|
assert im.has_transparency_data
|
||||||
|
|
||||||
def test_apply_transparency(self):
|
def test_apply_transparency(self):
|
||||||
im = Image.new("P", (1, 1))
|
im = Image.new("P", (1, 1))
|
||||||
im.putpalette((0, 0, 0, 1, 1, 1))
|
im.putpalette((0, 0, 0, 1, 1, 1))
|
||||||
|
|
0
_custom_build/backend.py
Executable file → Normal file
0
_custom_build/backend.py
Executable file → Normal file
|
@ -11,4 +11,3 @@ pushd $archive
|
||||||
meson build --prefix=/usr && sudo ninja -C build install
|
meson build --prefix=/usr && sudo ninja -C build install
|
||||||
|
|
||||||
popd
|
popd
|
||||||
|
|
||||||
|
|
|
@ -15,4 +15,3 @@ make && sudo make install
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
popd
|
popd
|
||||||
|
|
||||||
|
|
|
@ -2,4 +2,3 @@
|
||||||
|
|
||||||
pkg install -y python ndk-sysroot clang make \
|
pkg install -y python ndk-sysroot clang make \
|
||||||
libjpeg-turbo
|
libjpeg-turbo
|
||||||
|
|
||||||
|
|
|
@ -498,11 +498,13 @@ These platforms have been reported to work at the versions mentioned.
|
||||||
| Operating system | | Tested Python | | Latest tested | | Tested |
|
| Operating system | | Tested Python | | Latest tested | | Tested |
|
||||||
| | | versions | | Pillow version | | processors |
|
| | | versions | | Pillow version | | processors |
|
||||||
+==================================+===========================+==================+==============+
|
+==================================+===========================+==================+==============+
|
||||||
|
| macOS 14 Sonoma | 3.8, 3.9, 3.10, 3.11 | 10.0.1 |arm |
|
||||||
|
+----------------------------------+---------------------------+------------------+--------------+
|
||||||
| macOS 13 Ventura | 3.8, 3.9, 3.10, 3.11 | 10.0.1 |arm |
|
| macOS 13 Ventura | 3.8, 3.9, 3.10, 3.11 | 10.0.1 |arm |
|
||||||
| +---------------------------+------------------+ |
|
| +---------------------------+------------------+ |
|
||||||
| | 3.7 | 9.5.0 | |
|
| | 3.7 | 9.5.0 | |
|
||||||
+----------------------------------+---------------------------+------------------+--------------+
|
+----------------------------------+---------------------------+------------------+--------------+
|
||||||
| macOS 12 Big Sur | 3.7, 3.8, 3.9, 3.10, 3.11 | 9.3.0 |arm |
|
| macOS 12 Monterey | 3.7, 3.8, 3.9, 3.10, 3.11 | 9.3.0 |arm |
|
||||||
+----------------------------------+---------------------------+------------------+--------------+
|
+----------------------------------+---------------------------+------------------+--------------+
|
||||||
| macOS 11 Big Sur | 3.7, 3.8, 3.9, 3.10 | 8.4.0 |arm |
|
| macOS 11 Big Sur | 3.7, 3.8, 3.9, 3.10 | 8.4.0 |arm |
|
||||||
| +---------------------------+------------------+--------------+
|
| +---------------------------+------------------+--------------+
|
||||||
|
|
|
@ -5,4 +5,4 @@ Pillow 9.3 - 9.5,,Yes,Yes,Yes,Yes,Yes,,
|
||||||
Pillow 9.0 - 9.2,,,Yes,Yes,Yes,Yes,,
|
Pillow 9.0 - 9.2,,,Yes,Yes,Yes,Yes,,
|
||||||
Pillow 8.3.2 - 8.4,,,Yes,Yes,Yes,Yes,Yes,
|
Pillow 8.3.2 - 8.4,,,Yes,Yes,Yes,Yes,Yes,
|
||||||
Pillow 8.0 - 8.3.1,,,,Yes,Yes,Yes,Yes,
|
Pillow 8.0 - 8.3.1,,,,Yes,Yes,Yes,Yes,
|
||||||
Pillow 7.0 - 7.2,,,,,Yes,Yes,Yes,Yes
|
Pillow 7.0 - 7.2,,,,,Yes,Yes,Yes,Yes
|
||||||
|
|
|
|
@ -5,4 +5,4 @@ Pillow 5.2 - 5.4,,Yes,Yes,Yes,Yes,,,Yes,,,
|
||||||
Pillow 5.0 - 5.1,,,Yes,Yes,Yes,,,Yes,,,
|
Pillow 5.0 - 5.1,,,Yes,Yes,Yes,,,Yes,,,
|
||||||
Pillow 4,,,Yes,Yes,Yes,Yes,,Yes,,,
|
Pillow 4,,,Yes,Yes,Yes,Yes,,Yes,,,
|
||||||
Pillow 2 - 3,,,,Yes,Yes,Yes,Yes,Yes,Yes,,
|
Pillow 2 - 3,,,,Yes,Yes,Yes,Yes,Yes,Yes,,
|
||||||
Pillow < 2,,,,,,,,Yes,Yes,Yes,Yes
|
Pillow < 2,,,,,,,,Yes,Yes,Yes,Yes
|
||||||
|
|
|
|
@ -351,6 +351,8 @@ Instances of the :py:class:`Image` class have the following attributes:
|
||||||
|
|
||||||
.. seealso:: :attr:`~Image.is_animated`, :func:`~Image.seek` and :func:`~Image.tell`
|
.. seealso:: :attr:`~Image.is_animated`, :func:`~Image.seek` and :func:`~Image.tell`
|
||||||
|
|
||||||
|
.. autoattribute:: PIL.Image.Image.has_transparency_data
|
||||||
|
|
||||||
Classes
|
Classes
|
||||||
-------
|
-------
|
||||||
|
|
||||||
|
|
|
@ -41,6 +41,17 @@ while maintaining the original aspect ratio.
|
||||||
See :ref:`relative-resize` for a comparison between this and similar ImageOps
|
See :ref:`relative-resize` for a comparison between this and similar ImageOps
|
||||||
methods.
|
methods.
|
||||||
|
|
||||||
|
has_transparency_data
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Images now have :py:attr:`~PIL.Image.Image.has_transparency_data` to indicate
|
||||||
|
whether the image has transparency data, whether in the form of an alpha
|
||||||
|
channel, a palette with an alpha channel, or a "transparency" key in the
|
||||||
|
:py:attr:`~PIL.Image.Image.info` dictionary.
|
||||||
|
|
||||||
|
Even if this attribute is true, the image might still appear solid, if all of
|
||||||
|
the values shown within are opaque.
|
||||||
|
|
||||||
Security
|
Security
|
||||||
========
|
========
|
||||||
|
|
||||||
|
|
|
@ -49,4 +49,3 @@ The external dependencies on libjpeg and zlib are now required by default.
|
||||||
If the headers or libraries are not found, then installation will abort
|
If the headers or libraries are not found, then installation will abort
|
||||||
with an error. This behaviour can be disabled with the ``--disable-libjpeg``
|
with an error. This behaviour can be disabled with the ``--disable-libjpeg``
|
||||||
and ``--disable-zlib`` flags.
|
and ``--disable-zlib`` flags.
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,3 @@ image size can lead to a smaller allocation than expected, leading to
|
||||||
arbitrary writes.
|
arbitrary writes.
|
||||||
|
|
||||||
This issue was found by Cris Neckar at Divergent Security.
|
This issue was found by Cris Neckar at Divergent Security.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -20,5 +20,3 @@ CPython 3.6.1 to not work on installations of C-Python 3.6.0. This fix
|
||||||
undefines PySlice_GetIndicesEx if it exists to restore compatibility
|
undefines PySlice_GetIndicesEx if it exists to restore compatibility
|
||||||
with both 3.6.0 and 3.6.1. See https://bugs.python.org/issue29943 for
|
with both 3.6.0 and 3.6.1. See https://bugs.python.org/issue29943 for
|
||||||
more details.
|
more details.
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8,4 +8,3 @@ Fixed Windows PyPy Build
|
||||||
|
|
||||||
A change in the 4.2.0 cycle broke the Windows PyPy build. This has
|
A change in the 4.2.0 cycle broke the Windows PyPy build. This has
|
||||||
been fixed, and PyPy is now part of the Windows CI matrix.
|
been fixed, and PyPy is now part of the Windows CI matrix.
|
||||||
|
|
||||||
|
|
|
@ -175,6 +175,3 @@ Dark theme for docs
|
||||||
^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The https://pillow.readthedocs.io documentation will use a dark theme if the user has requested the system use one. Uses the ``prefers-color-scheme`` CSS media query.
|
The https://pillow.readthedocs.io documentation will use a dark theme if the user has requested the system use one. Uses the ``prefers-color-scheme`` CSS media query.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -68,11 +68,11 @@ def bdf_char(f):
|
||||||
# followed by the width in x (BBw), height in y (BBh),
|
# followed by the width in x (BBw), height in y (BBh),
|
||||||
# and x and y displacement (BBxoff0, BByoff0)
|
# and x and y displacement (BBxoff0, BByoff0)
|
||||||
# of the lower left corner from the origin of the character.
|
# of the lower left corner from the origin of the character.
|
||||||
width, height, x_disp, y_disp = [int(p) for p in props["BBX"].split()]
|
width, height, x_disp, y_disp = (int(p) for p in props["BBX"].split())
|
||||||
|
|
||||||
# The word DWIDTH
|
# The word DWIDTH
|
||||||
# followed by the width in x and y of the character in device pixels.
|
# followed by the width in x and y of the character in device pixels.
|
||||||
dwx, dwy = [int(p) for p in props["DWIDTH"].split()]
|
dwx, dwy = (int(p) for p in props["DWIDTH"].split())
|
||||||
|
|
||||||
bbox = (
|
bbox = (
|
||||||
(dwx, dwy),
|
(dwx, dwy),
|
||||||
|
|
|
@ -339,9 +339,9 @@ class EpsImageFile(ImageFile.ImageFile):
|
||||||
# data start identifier (the image data follows after a single line
|
# data start identifier (the image data follows after a single line
|
||||||
# consisting only of this quoted value)
|
# consisting only of this quoted value)
|
||||||
image_data_values = byte_arr[11:bytes_read].split(None, 7)
|
image_data_values = byte_arr[11:bytes_read].split(None, 7)
|
||||||
columns, rows, bit_depth, mode_id = [
|
columns, rows, bit_depth, mode_id = (
|
||||||
int(value) for value in image_data_values[:4]
|
int(value) for value in image_data_values[:4]
|
||||||
]
|
)
|
||||||
|
|
||||||
if bit_depth == 1:
|
if bit_depth == 1:
|
||||||
self._mode = "1"
|
self._mode = "1"
|
||||||
|
|
|
@ -915,7 +915,7 @@ class Image:
|
||||||
|
|
||||||
self.load()
|
self.load()
|
||||||
|
|
||||||
has_transparency = self.info.get("transparency") is not None
|
has_transparency = "transparency" in self.info
|
||||||
if not mode and self.mode == "P":
|
if not mode and self.mode == "P":
|
||||||
# determine default mode
|
# determine default mode
|
||||||
if self.palette:
|
if self.palette:
|
||||||
|
@ -1531,6 +1531,24 @@ class Image:
|
||||||
rawmode = mode
|
rawmode = mode
|
||||||
return list(self.im.getpalette(mode, rawmode))
|
return list(self.im.getpalette(mode, rawmode))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_transparency_data(self) -> bool:
|
||||||
|
"""
|
||||||
|
Determine if an image has transparency data, whether in the form of an
|
||||||
|
alpha channel, a palette with an alpha channel, or a "transparency" key
|
||||||
|
in the info dictionary.
|
||||||
|
|
||||||
|
Note the image might still appear solid, if all of the values shown
|
||||||
|
within are opaque.
|
||||||
|
|
||||||
|
:returns: A boolean.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
self.mode in ("LA", "La", "PA", "RGBA", "RGBa")
|
||||||
|
or (self.mode == "P" and self.palette.mode.endswith("A"))
|
||||||
|
or "transparency" in self.info
|
||||||
|
)
|
||||||
|
|
||||||
def apply_transparency(self):
|
def apply_transparency(self):
|
||||||
"""
|
"""
|
||||||
If a P mode image has a "transparency" key in the info dictionary,
|
If a P mode image has a "transparency" key in the info dictionary,
|
||||||
|
|
|
@ -166,7 +166,7 @@ def grabclipboard():
|
||||||
msg = "wl-paste or xclip is required for ImageGrab.grabclipboard() on Linux"
|
msg = "wl-paste or xclip is required for ImageGrab.grabclipboard() on Linux"
|
||||||
raise NotImplementedError(msg)
|
raise NotImplementedError(msg)
|
||||||
|
|
||||||
p = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
p = subprocess.run(args, capture_output=True)
|
||||||
err = p.stderr
|
err = p.stderr
|
||||||
if err:
|
if err:
|
||||||
msg = f"{args[0]} error: {err.strip().decode()}"
|
msg = f"{args[0]} error: {err.strip().decode()}"
|
||||||
|
|
|
@ -823,7 +823,7 @@ class ImageFileDirectory_v2(MutableMapping):
|
||||||
try:
|
try:
|
||||||
unit_size, handler = self._load_dispatch[typ]
|
unit_size, handler = self._load_dispatch[typ]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
logger.debug(msg + f" - unsupported type {typ}")
|
logger.debug("%s - unsupported type %s", msg, typ)
|
||||||
continue # ignore unsupported type
|
continue # ignore unsupported type
|
||||||
size = count * unit_size
|
size = count * unit_size
|
||||||
if size > (8 if self._bigtiff else 4):
|
if size > (8 if self._bigtiff else 4):
|
||||||
|
@ -880,7 +880,7 @@ class ImageFileDirectory_v2(MutableMapping):
|
||||||
if tag == STRIPOFFSETS:
|
if tag == STRIPOFFSETS:
|
||||||
stripoffsets = len(entries)
|
stripoffsets = len(entries)
|
||||||
typ = self.tagtype.get(tag)
|
typ = self.tagtype.get(tag)
|
||||||
logger.debug(f"Tag {tag}, Type: {typ}, Value: {repr(value)}")
|
logger.debug("Tag %s, Type: %s, Value: %s", tag, typ, repr(value))
|
||||||
is_ifd = typ == TiffTags.LONG and isinstance(value, dict)
|
is_ifd = typ == TiffTags.LONG and isinstance(value, dict)
|
||||||
if is_ifd:
|
if is_ifd:
|
||||||
if self._endian == "<":
|
if self._endian == "<":
|
||||||
|
@ -929,7 +929,7 @@ class ImageFileDirectory_v2(MutableMapping):
|
||||||
|
|
||||||
# pass 2: write entries to file
|
# pass 2: write entries to file
|
||||||
for tag, typ, count, value, data in entries:
|
for tag, typ, count, value, data in entries:
|
||||||
logger.debug(f"{tag} {typ} {count} {repr(value)} {repr(data)}")
|
logger.debug("%s %s %s %s %s", tag, typ, count, repr(value), repr(data))
|
||||||
result += self._pack("HHL4s", tag, typ, count, value)
|
result += self._pack("HHL4s", tag, typ, count, value)
|
||||||
|
|
||||||
# -- overwrite here for multi-page --
|
# -- overwrite here for multi-page --
|
||||||
|
@ -1098,8 +1098,8 @@ class TiffImageFile(ImageFile.ImageFile):
|
||||||
self._n_frames = None
|
self._n_frames = None
|
||||||
|
|
||||||
logger.debug("*** TiffImageFile._open ***")
|
logger.debug("*** TiffImageFile._open ***")
|
||||||
logger.debug(f"- __first: {self.__first}")
|
logger.debug("- __first: %s", self.__first)
|
||||||
logger.debug(f"- ifh: {repr(ifh)}") # Use repr to avoid str(bytes)
|
logger.debug("- ifh: %s", repr(ifh)) # Use repr to avoid str(bytes)
|
||||||
|
|
||||||
# and load the first frame
|
# and load the first frame
|
||||||
self._seek(0)
|
self._seek(0)
|
||||||
|
@ -1137,12 +1137,15 @@ class TiffImageFile(ImageFile.ImageFile):
|
||||||
msg = "no more images in TIFF file"
|
msg = "no more images in TIFF file"
|
||||||
raise EOFError(msg)
|
raise EOFError(msg)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"Seeking to frame {frame}, on frame {self.__frame}, "
|
"Seeking to frame %s, on frame %s, __next %s, location: %s",
|
||||||
f"__next {self.__next}, location: {self.fp.tell()}"
|
frame,
|
||||||
|
self.__frame,
|
||||||
|
self.__next,
|
||||||
|
self.fp.tell(),
|
||||||
)
|
)
|
||||||
self.fp.seek(self.__next)
|
self.fp.seek(self.__next)
|
||||||
self._frame_pos.append(self.__next)
|
self._frame_pos.append(self.__next)
|
||||||
logger.debug("Loading tags, location: %s" % self.fp.tell())
|
logger.debug("Loading tags, location: %s", self.fp.tell())
|
||||||
self.tag_v2.load(self.fp)
|
self.tag_v2.load(self.fp)
|
||||||
if self.tag_v2.next in self._frame_pos:
|
if self.tag_v2.next in self._frame_pos:
|
||||||
# This IFD has already been processed
|
# This IFD has already been processed
|
||||||
|
@ -1330,18 +1333,18 @@ class TiffImageFile(ImageFile.ImageFile):
|
||||||
fillorder = self.tag_v2.get(FILLORDER, 1)
|
fillorder = self.tag_v2.get(FILLORDER, 1)
|
||||||
|
|
||||||
logger.debug("*** Summary ***")
|
logger.debug("*** Summary ***")
|
||||||
logger.debug(f"- compression: {self._compression}")
|
logger.debug("- compression: %s", self._compression)
|
||||||
logger.debug(f"- photometric_interpretation: {photo}")
|
logger.debug("- photometric_interpretation: %s", photo)
|
||||||
logger.debug(f"- planar_configuration: {self._planar_configuration}")
|
logger.debug("- planar_configuration: %s", self._planar_configuration)
|
||||||
logger.debug(f"- fill_order: {fillorder}")
|
logger.debug("- fill_order: %s", fillorder)
|
||||||
logger.debug(f"- YCbCr subsampling: {self.tag.get(YCBCRSUBSAMPLING)}")
|
logger.debug("- YCbCr subsampling: %s", self.tag.get(YCBCRSUBSAMPLING))
|
||||||
|
|
||||||
# size
|
# size
|
||||||
xsize = int(self.tag_v2.get(IMAGEWIDTH))
|
xsize = int(self.tag_v2.get(IMAGEWIDTH))
|
||||||
ysize = int(self.tag_v2.get(IMAGELENGTH))
|
ysize = int(self.tag_v2.get(IMAGELENGTH))
|
||||||
self._size = xsize, ysize
|
self._size = xsize, ysize
|
||||||
|
|
||||||
logger.debug(f"- size: {self.size}")
|
logger.debug("- size: %s", self.size)
|
||||||
|
|
||||||
sample_format = self.tag_v2.get(SAMPLEFORMAT, (1,))
|
sample_format = self.tag_v2.get(SAMPLEFORMAT, (1,))
|
||||||
if len(sample_format) > 1 and max(sample_format) == min(sample_format) == 1:
|
if len(sample_format) > 1 and max(sample_format) == min(sample_format) == 1:
|
||||||
|
@ -1397,7 +1400,7 @@ class TiffImageFile(ImageFile.ImageFile):
|
||||||
bps_tuple,
|
bps_tuple,
|
||||||
extra_tuple,
|
extra_tuple,
|
||||||
)
|
)
|
||||||
logger.debug(f"format key: {key}")
|
logger.debug("format key: %s", key)
|
||||||
try:
|
try:
|
||||||
self._mode, rawmode = OPEN_INFO[key]
|
self._mode, rawmode = OPEN_INFO[key]
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
|
@ -1405,8 +1408,8 @@ class TiffImageFile(ImageFile.ImageFile):
|
||||||
msg = "unknown pixel mode"
|
msg = "unknown pixel mode"
|
||||||
raise SyntaxError(msg) from e
|
raise SyntaxError(msg) from e
|
||||||
|
|
||||||
logger.debug(f"- raw mode: {rawmode}")
|
logger.debug("- raw mode: %s", rawmode)
|
||||||
logger.debug(f"- pil mode: {self.mode}")
|
logger.debug("- pil mode: %s", self.mode)
|
||||||
|
|
||||||
self.info["compression"] = self._compression
|
self.info["compression"] = self._compression
|
||||||
|
|
||||||
|
@ -1447,7 +1450,7 @@ class TiffImageFile(ImageFile.ImageFile):
|
||||||
if fillorder == 2:
|
if fillorder == 2:
|
||||||
# Replace fillorder with fillorder=1
|
# Replace fillorder with fillorder=1
|
||||||
key = key[:3] + (1,) + key[4:]
|
key = key[:3] + (1,) + key[4:]
|
||||||
logger.debug(f"format key: {key}")
|
logger.debug("format key: %s", key)
|
||||||
# this should always work, since all the
|
# this should always work, since all the
|
||||||
# fillorder==2 modes have a corresponding
|
# fillorder==2 modes have a corresponding
|
||||||
# fillorder=1 mode
|
# fillorder=1 mode
|
||||||
|
@ -1610,7 +1613,7 @@ def _save(im, fp, filename):
|
||||||
info = exif
|
info = exif
|
||||||
else:
|
else:
|
||||||
info = {}
|
info = {}
|
||||||
logger.debug("Tiffinfo Keys: %s" % list(info))
|
logger.debug("Tiffinfo Keys: %s", list(info))
|
||||||
if isinstance(info, ImageFileDirectory_v1):
|
if isinstance(info, ImageFileDirectory_v1):
|
||||||
info = info.to_v2()
|
info = info.to_v2()
|
||||||
for key in info:
|
for key in info:
|
||||||
|
@ -1743,7 +1746,7 @@ def _save(im, fp, filename):
|
||||||
ifd[JPEGQUALITY] = quality
|
ifd[JPEGQUALITY] = quality
|
||||||
|
|
||||||
logger.debug("Saving using libtiff encoder")
|
logger.debug("Saving using libtiff encoder")
|
||||||
logger.debug("Items: %s" % sorted(ifd.items()))
|
logger.debug("Items: %s", sorted(ifd.items()))
|
||||||
_fp = 0
|
_fp = 0
|
||||||
if hasattr(fp, "fileno"):
|
if hasattr(fp, "fileno"):
|
||||||
try:
|
try:
|
||||||
|
@ -1811,7 +1814,7 @@ def _save(im, fp, filename):
|
||||||
if SAMPLEFORMAT in atts and len(atts[SAMPLEFORMAT]) == 1:
|
if SAMPLEFORMAT in atts and len(atts[SAMPLEFORMAT]) == 1:
|
||||||
atts[SAMPLEFORMAT] = atts[SAMPLEFORMAT][0]
|
atts[SAMPLEFORMAT] = atts[SAMPLEFORMAT][0]
|
||||||
|
|
||||||
logger.debug("Converted items: %s" % sorted(atts.items()))
|
logger.debug("Converted items: %s", sorted(atts.items()))
|
||||||
|
|
||||||
# libtiff always expects the bytes in native order.
|
# libtiff always expects the bytes in native order.
|
||||||
# we're storing image byte order. So, if the rawmode
|
# we're storing image byte order. So, if the rawmode
|
||||||
|
|
|
@ -74,9 +74,6 @@ class WebPImageFile(ImageFile.ImageFile):
|
||||||
self.info["background"] = (bg_r, bg_g, bg_b, bg_a)
|
self.info["background"] = (bg_r, bg_g, bg_b, bg_a)
|
||||||
self.n_frames = frame_count
|
self.n_frames = frame_count
|
||||||
self.is_animated = self.n_frames > 1
|
self.is_animated = self.n_frames > 1
|
||||||
ret = self._decoder.get_next()
|
|
||||||
if ret is not None:
|
|
||||||
self.info["duration"] = ret[1]
|
|
||||||
self._mode = "RGB" if mode == "RGBX" else mode
|
self._mode = "RGB" if mode == "RGBX" else mode
|
||||||
self.rawmode = mode
|
self.rawmode = mode
|
||||||
self.tile = []
|
self.tile = []
|
||||||
|
@ -93,7 +90,7 @@ class WebPImageFile(ImageFile.ImageFile):
|
||||||
self.info["xmp"] = xmp
|
self.info["xmp"] = xmp
|
||||||
|
|
||||||
# Initialize seek state
|
# Initialize seek state
|
||||||
self._reset()
|
self._reset(reset=False)
|
||||||
|
|
||||||
def _getexif(self):
|
def _getexif(self):
|
||||||
if "exif" not in self.info:
|
if "exif" not in self.info:
|
||||||
|
@ -116,8 +113,9 @@ class WebPImageFile(ImageFile.ImageFile):
|
||||||
# Set logical frame to requested position
|
# Set logical frame to requested position
|
||||||
self.__logical_frame = frame
|
self.__logical_frame = frame
|
||||||
|
|
||||||
def _reset(self):
|
def _reset(self, reset=True):
|
||||||
self._decoder.reset()
|
if reset:
|
||||||
|
self._decoder.reset()
|
||||||
self.__physical_frame = 0
|
self.__physical_frame = 0
|
||||||
self.__loaded = -1
|
self.__loaded = -1
|
||||||
self.__timestamp = 0
|
self.__timestamp = 0
|
||||||
|
@ -332,12 +330,7 @@ def _save(im, fp, filename):
|
||||||
exact = 1 if im.encoderinfo.get("exact") else 0
|
exact = 1 if im.encoderinfo.get("exact") else 0
|
||||||
|
|
||||||
if im.mode not in _VALID_WEBP_LEGACY_MODES:
|
if im.mode not in _VALID_WEBP_LEGACY_MODES:
|
||||||
alpha = (
|
im = im.convert("RGBA" if im.has_transparency_data else "RGB")
|
||||||
"A" in im.mode
|
|
||||||
or "a" in im.mode
|
|
||||||
or (im.mode == "P" and "transparency" in im.info)
|
|
||||||
)
|
|
||||||
im = im.convert("RGBA" if alpha else "RGB")
|
|
||||||
|
|
||||||
data = _webp.WebPEncode(
|
data = _webp.WebPEncode(
|
||||||
im.tobytes(),
|
im.tobytes(),
|
||||||
|
|
|
@ -36,4 +36,4 @@ typedef struct {
|
||||||
/* image data size from file descriptor */
|
/* image data size from file descriptor */
|
||||||
long bufsize;
|
long bufsize;
|
||||||
|
|
||||||
} SGISTATE;
|
} SGISTATE;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
@ -7,42 +9,41 @@ import struct
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
def cmd_cd(path):
|
def cmd_cd(path: str) -> str:
|
||||||
return f"cd /D {path}"
|
return f"cd /D {path}"
|
||||||
|
|
||||||
|
|
||||||
def cmd_set(name, value):
|
def cmd_set(name: str, value: str) -> str:
|
||||||
return f"set {name}={value}"
|
return f"set {name}={value}"
|
||||||
|
|
||||||
|
|
||||||
def cmd_append(name, value):
|
def cmd_append(name: str, value: str) -> str:
|
||||||
op = "path " if name == "PATH" else f"set {name}="
|
op = "path " if name == "PATH" else f"set {name}="
|
||||||
return op + f"%{name}%;{value}"
|
return op + f"%{name}%;{value}"
|
||||||
|
|
||||||
|
|
||||||
def cmd_copy(src, tgt):
|
def cmd_copy(src: str, tgt: str) -> str:
|
||||||
return f'copy /Y /B "{src}" "{tgt}"'
|
return f'copy /Y /B "{src}" "{tgt}"'
|
||||||
|
|
||||||
|
|
||||||
def cmd_xcopy(src, tgt):
|
def cmd_xcopy(src: str, tgt: str) -> str:
|
||||||
return f'xcopy /Y /E "{src}" "{tgt}"'
|
return f'xcopy /Y /E "{src}" "{tgt}"'
|
||||||
|
|
||||||
|
|
||||||
def cmd_mkdir(path):
|
def cmd_mkdir(path: str) -> str:
|
||||||
return f'mkdir "{path}"'
|
return f'mkdir "{path}"'
|
||||||
|
|
||||||
|
|
||||||
def cmd_rmdir(path):
|
def cmd_rmdir(path: str) -> str:
|
||||||
return f'rmdir /S /Q "{path}"'
|
return f'rmdir /S /Q "{path}"'
|
||||||
|
|
||||||
|
|
||||||
def cmd_nmake(makefile=None, target="", params=None):
|
def cmd_nmake(
|
||||||
if params is None:
|
makefile: str | None = None,
|
||||||
params = ""
|
target: str = "",
|
||||||
elif isinstance(params, (list, tuple)):
|
params: list[str] | None = None,
|
||||||
params = " ".join(params)
|
) -> str:
|
||||||
else:
|
params = "" if params is None else " ".join(params)
|
||||||
params = str(params)
|
|
||||||
|
|
||||||
return " ".join(
|
return " ".join(
|
||||||
[
|
[
|
||||||
|
@ -55,7 +56,7 @@ def cmd_nmake(makefile=None, target="", params=None):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def cmds_cmake(target, *params):
|
def cmds_cmake(target: str | tuple[str, ...] | list[str], *params) -> list[str]:
|
||||||
if not isinstance(target, str):
|
if not isinstance(target, str):
|
||||||
target = " ".join(target)
|
target = " ".join(target)
|
||||||
|
|
||||||
|
@ -80,8 +81,11 @@ def cmds_cmake(target, *params):
|
||||||
|
|
||||||
|
|
||||||
def cmd_msbuild(
|
def cmd_msbuild(
|
||||||
file, configuration="Release", target="Build", platform="{msbuild_arch}"
|
file: str,
|
||||||
):
|
configuration: str = "Release",
|
||||||
|
target: str = "Build",
|
||||||
|
platform: str = "{msbuild_arch}",
|
||||||
|
) -> str:
|
||||||
return " ".join(
|
return " ".join(
|
||||||
[
|
[
|
||||||
"{msbuild}",
|
"{msbuild}",
|
||||||
|
@ -96,14 +100,14 @@ def cmd_msbuild(
|
||||||
|
|
||||||
SF_PROJECTS = "https://sourceforge.net/projects"
|
SF_PROJECTS = "https://sourceforge.net/projects"
|
||||||
|
|
||||||
architectures = {
|
ARCHITECTURES = {
|
||||||
"x86": {"vcvars_arch": "x86", "msbuild_arch": "Win32"},
|
"x86": {"vcvars_arch": "x86", "msbuild_arch": "Win32"},
|
||||||
"x64": {"vcvars_arch": "x86_amd64", "msbuild_arch": "x64"},
|
"x64": {"vcvars_arch": "x86_amd64", "msbuild_arch": "x64"},
|
||||||
"ARM64": {"vcvars_arch": "x86_arm64", "msbuild_arch": "ARM64"},
|
"ARM64": {"vcvars_arch": "x86_arm64", "msbuild_arch": "ARM64"},
|
||||||
}
|
}
|
||||||
|
|
||||||
# dependencies, listed in order of compilation
|
# dependencies, listed in order of compilation
|
||||||
deps = {
|
DEPS = {
|
||||||
"libjpeg": {
|
"libjpeg": {
|
||||||
"url": SF_PROJECTS
|
"url": SF_PROJECTS
|
||||||
+ "/libjpeg-turbo/files/3.0.0/libjpeg-turbo-3.0.0.tar.gz/download",
|
+ "/libjpeg-turbo/files/3.0.0/libjpeg-turbo-3.0.0.tar.gz/download",
|
||||||
|
@ -365,7 +369,7 @@ deps = {
|
||||||
|
|
||||||
|
|
||||||
# based on distutils._msvccompiler from CPython 3.7.4
|
# based on distutils._msvccompiler from CPython 3.7.4
|
||||||
def find_msvs():
|
def find_msvs() -> dict[str, str] | None:
|
||||||
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
|
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
|
||||||
if not root:
|
if not root:
|
||||||
print("Program Files not found")
|
print("Program Files not found")
|
||||||
|
@ -421,25 +425,40 @@ def find_msvs():
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def extract_dep(url, filename):
|
def download_dep(url: str, file: str) -> None:
|
||||||
import tarfile
|
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
|
||||||
|
ex = None
|
||||||
|
for i in range(3):
|
||||||
|
try:
|
||||||
|
print(f"Fetching {url} (attempt {i + 1})...")
|
||||||
|
content = urllib.request.urlopen(url).read()
|
||||||
|
with open(file, "wb") as f:
|
||||||
|
f.write(content)
|
||||||
|
break
|
||||||
|
except urllib.error.URLError as e:
|
||||||
|
ex = e
|
||||||
|
else:
|
||||||
|
raise RuntimeError(ex)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_dep(url: str, filename: str) -> None:
|
||||||
|
import tarfile
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
file = os.path.join(args.depends_dir, filename)
|
file = os.path.join(args.depends_dir, filename)
|
||||||
if not os.path.exists(file):
|
if not os.path.exists(file):
|
||||||
ex = None
|
# First try our mirror
|
||||||
for i in range(3):
|
mirror_url = (
|
||||||
try:
|
f"https://raw.githubusercontent.com/"
|
||||||
print("Fetching %s (attempt %d)..." % (url, i + 1))
|
f"python-pillow/pillow-depends/main/{filename}"
|
||||||
content = urllib.request.urlopen(url).read()
|
)
|
||||||
with open(file, "wb") as f:
|
try:
|
||||||
f.write(content)
|
download_dep(mirror_url, file)
|
||||||
break
|
except RuntimeError as exc:
|
||||||
except urllib.error.URLError as e:
|
# Otherwise try upstream
|
||||||
ex = e
|
print(exc)
|
||||||
else:
|
download_dep(url, file)
|
||||||
raise RuntimeError(ex)
|
|
||||||
|
|
||||||
print("Extracting " + filename)
|
print("Extracting " + filename)
|
||||||
sources_dir_abs = os.path.abspath(sources_dir)
|
sources_dir_abs = os.path.abspath(sources_dir)
|
||||||
|
@ -466,7 +485,7 @@ def extract_dep(url, filename):
|
||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
|
||||||
def write_script(name, lines):
|
def write_script(name: str, lines: list[str]) -> None:
|
||||||
name = os.path.join(args.build_dir, name)
|
name = os.path.join(args.build_dir, name)
|
||||||
lines = [line.format(**prefs) for line in lines]
|
lines = [line.format(**prefs) for line in lines]
|
||||||
print("Writing " + name)
|
print("Writing " + name)
|
||||||
|
@ -477,7 +496,7 @@ def write_script(name, lines):
|
||||||
print(" " + line)
|
print(" " + line)
|
||||||
|
|
||||||
|
|
||||||
def get_footer(dep):
|
def get_footer(dep: dict) -> list[str]:
|
||||||
lines = []
|
lines = []
|
||||||
for out in dep.get("headers", []):
|
for out in dep.get("headers", []):
|
||||||
lines.append(cmd_copy(out, "{inc_dir}"))
|
lines.append(cmd_copy(out, "{inc_dir}"))
|
||||||
|
@ -488,7 +507,7 @@ def get_footer(dep):
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
|
|
||||||
def build_env():
|
def build_env() -> None:
|
||||||
lines = [
|
lines = [
|
||||||
"if defined DISTUTILS_USE_SDK goto end",
|
"if defined DISTUTILS_USE_SDK goto end",
|
||||||
cmd_set("INCLUDE", "{inc_dir}"),
|
cmd_set("INCLUDE", "{inc_dir}"),
|
||||||
|
@ -504,8 +523,8 @@ def build_env():
|
||||||
write_script("build_env.cmd", lines)
|
write_script("build_env.cmd", lines)
|
||||||
|
|
||||||
|
|
||||||
def build_dep(name):
|
def build_dep(name: str) -> str:
|
||||||
dep = deps[name]
|
dep = DEPS[name]
|
||||||
dir = dep["dir"]
|
dir = dep["dir"]
|
||||||
file = f"build_dep_{name}.cmd"
|
file = f"build_dep_{name}.cmd"
|
||||||
|
|
||||||
|
@ -554,9 +573,9 @@ def build_dep(name):
|
||||||
return file
|
return file
|
||||||
|
|
||||||
|
|
||||||
def build_dep_all():
|
def build_dep_all() -> None:
|
||||||
lines = [r'call "{build_dir}\build_env.cmd"']
|
lines = [r'call "{build_dir}\build_env.cmd"']
|
||||||
for dep_name in deps:
|
for dep_name in DEPS:
|
||||||
print()
|
print()
|
||||||
if dep_name in disabled:
|
if dep_name in disabled:
|
||||||
print(f"Skipping disabled dependency {dep_name}")
|
print(f"Skipping disabled dependency {dep_name}")
|
||||||
|
@ -602,7 +621,7 @@ if __name__ == "__main__":
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--architecture",
|
"--architecture",
|
||||||
choices=architectures,
|
choices=ARCHITECTURES,
|
||||||
default=os.environ.get(
|
default=os.environ.get(
|
||||||
"ARCHITECTURE",
|
"ARCHITECTURE",
|
||||||
(
|
(
|
||||||
|
@ -634,7 +653,7 @@ if __name__ == "__main__":
|
||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
arch_prefs = architectures[args.architecture]
|
arch_prefs = ARCHITECTURES[args.architecture]
|
||||||
print("Target architecture:", args.architecture)
|
print("Target architecture:", args.architecture)
|
||||||
|
|
||||||
msvs = find_msvs()
|
msvs = find_msvs()
|
||||||
|
@ -693,7 +712,7 @@ if __name__ == "__main__":
|
||||||
# TODO find NASM automatically
|
# TODO find NASM automatically
|
||||||
}
|
}
|
||||||
|
|
||||||
for k, v in deps.items():
|
for k, v in DEPS.items():
|
||||||
prefs[f"dir_{k}"] = os.path.join(sources_dir, v["dir"])
|
prefs[f"dir_{k}"] = os.path.join(sources_dir, v["dir"])
|
||||||
|
|
||||||
print()
|
print()
|
||||||
|
|
Loading…
Reference in New Issue
Block a user