mirror of
https://github.com/python-pillow/Pillow.git
synced 2024-12-26 09:56:17 +03:00
Merge pull request #7407 from hugovk/ci-appveyor-depends
AppVeyor: don't download huge pillow-depends.zip
This commit is contained in:
commit
b723e9e62e
|
@ -21,13 +21,11 @@ environment:
|
|||
install:
|
||||
- '%PYTHON%\%EXECUTABLE% --version'
|
||||
- '%PYTHON%\%EXECUTABLE% -m pip install --upgrade pip'
|
||||
- curl -fsSL -o pillow-depends.zip https://github.com/python-pillow/pillow-depends/archive/main.zip
|
||||
- curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip
|
||||
- 7z x pillow-depends.zip -oc:\
|
||||
- 7z x pillow-test-images.zip -oc:\
|
||||
- mv c:\pillow-depends-main c:\pillow-depends
|
||||
- xcopy /S /Y c:\test-images-main\* c:\pillow\tests\images
|
||||
- 7z x ..\pillow-depends\nasm-2.16.01-win64.zip -oc:\
|
||||
- curl -fsSL -o nasm-win64.zip https://raw.githubusercontent.com/python-pillow/pillow-depends/main/nasm-2.16.01-win64.zip
|
||||
- 7z x nasm-win64.zip -oc:\
|
||||
- choco install ghostscript --version=10.0.0.20230317
|
||||
- path c:\nasm-2.16.01;C:\Program Files\gs\gs10.00.0\bin;%PATH%
|
||||
- cd c:\pillow\winbuild\
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import platform
|
||||
|
@ -7,42 +9,41 @@ import struct
|
|||
import subprocess
|
||||
|
||||
|
||||
def cmd_cd(path):
|
||||
def cmd_cd(path: str) -> str:
|
||||
return f"cd /D {path}"
|
||||
|
||||
|
||||
def cmd_set(name, value):
|
||||
def cmd_set(name: str, value: str) -> str:
|
||||
return f"set {name}={value}"
|
||||
|
||||
|
||||
def cmd_append(name, value):
|
||||
def cmd_append(name: str, value: str) -> str:
|
||||
op = "path " if name == "PATH" else f"set {name}="
|
||||
return op + f"%{name}%;{value}"
|
||||
|
||||
|
||||
def cmd_copy(src, tgt):
|
||||
def cmd_copy(src: str, tgt: str) -> str:
|
||||
return f'copy /Y /B "{src}" "{tgt}"'
|
||||
|
||||
|
||||
def cmd_xcopy(src, tgt):
|
||||
def cmd_xcopy(src: str, tgt: str) -> str:
|
||||
return f'xcopy /Y /E "{src}" "{tgt}"'
|
||||
|
||||
|
||||
def cmd_mkdir(path):
|
||||
def cmd_mkdir(path: str) -> str:
|
||||
return f'mkdir "{path}"'
|
||||
|
||||
|
||||
def cmd_rmdir(path):
|
||||
def cmd_rmdir(path: str) -> str:
|
||||
return f'rmdir /S /Q "{path}"'
|
||||
|
||||
|
||||
def cmd_nmake(makefile=None, target="", params=None):
|
||||
if params is None:
|
||||
params = ""
|
||||
elif isinstance(params, (list, tuple)):
|
||||
params = " ".join(params)
|
||||
else:
|
||||
params = str(params)
|
||||
def cmd_nmake(
|
||||
makefile: str | None = None,
|
||||
target: str = "",
|
||||
params: list[str] | None = None,
|
||||
) -> str:
|
||||
params = "" if params is None else " ".join(params)
|
||||
|
||||
return " ".join(
|
||||
[
|
||||
|
@ -55,7 +56,7 @@ def cmd_nmake(makefile=None, target="", params=None):
|
|||
)
|
||||
|
||||
|
||||
def cmds_cmake(target, *params):
|
||||
def cmds_cmake(target: str | tuple[str, ...] | list[str], *params) -> list[str]:
|
||||
if not isinstance(target, str):
|
||||
target = " ".join(target)
|
||||
|
||||
|
@ -80,8 +81,11 @@ def cmds_cmake(target, *params):
|
|||
|
||||
|
||||
def cmd_msbuild(
|
||||
file, configuration="Release", target="Build", platform="{msbuild_arch}"
|
||||
):
|
||||
file: str,
|
||||
configuration: str = "Release",
|
||||
target: str = "Build",
|
||||
platform: str = "{msbuild_arch}",
|
||||
) -> str:
|
||||
return " ".join(
|
||||
[
|
||||
"{msbuild}",
|
||||
|
@ -96,14 +100,14 @@ def cmd_msbuild(
|
|||
|
||||
SF_PROJECTS = "https://sourceforge.net/projects"
|
||||
|
||||
architectures = {
|
||||
ARCHITECTURES = {
|
||||
"x86": {"vcvars_arch": "x86", "msbuild_arch": "Win32"},
|
||||
"x64": {"vcvars_arch": "x86_amd64", "msbuild_arch": "x64"},
|
||||
"ARM64": {"vcvars_arch": "x86_arm64", "msbuild_arch": "ARM64"},
|
||||
}
|
||||
|
||||
# dependencies, listed in order of compilation
|
||||
deps = {
|
||||
DEPS = {
|
||||
"libjpeg": {
|
||||
"url": SF_PROJECTS
|
||||
+ "/libjpeg-turbo/files/3.0.0/libjpeg-turbo-3.0.0.tar.gz/download",
|
||||
|
@ -365,7 +369,7 @@ deps = {
|
|||
|
||||
|
||||
# based on distutils._msvccompiler from CPython 3.7.4
|
||||
def find_msvs():
|
||||
def find_msvs() -> dict[str, str] | None:
|
||||
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
|
||||
if not root:
|
||||
print("Program Files not found")
|
||||
|
@ -421,17 +425,13 @@ def find_msvs():
|
|||
}
|
||||
|
||||
|
||||
def extract_dep(url, filename):
|
||||
import tarfile
|
||||
def download_dep(url: str, file: str) -> None:
|
||||
import urllib.request
|
||||
import zipfile
|
||||
|
||||
file = os.path.join(args.depends_dir, filename)
|
||||
if not os.path.exists(file):
|
||||
ex = None
|
||||
for i in range(3):
|
||||
try:
|
||||
print("Fetching %s (attempt %d)..." % (url, i + 1))
|
||||
print(f"Fetching {url} (attempt {i + 1})...")
|
||||
content = urllib.request.urlopen(url).read()
|
||||
with open(file, "wb") as f:
|
||||
f.write(content)
|
||||
|
@ -441,6 +441,25 @@ def extract_dep(url, filename):
|
|||
else:
|
||||
raise RuntimeError(ex)
|
||||
|
||||
|
||||
def extract_dep(url: str, filename: str) -> None:
|
||||
import tarfile
|
||||
import zipfile
|
||||
|
||||
file = os.path.join(args.depends_dir, filename)
|
||||
if not os.path.exists(file):
|
||||
# First try our mirror
|
||||
mirror_url = (
|
||||
f"https://raw.githubusercontent.com/"
|
||||
f"python-pillow/pillow-depends/main/{filename}"
|
||||
)
|
||||
try:
|
||||
download_dep(mirror_url, file)
|
||||
except RuntimeError as exc:
|
||||
# Otherwise try upstream
|
||||
print(exc)
|
||||
download_dep(url, file)
|
||||
|
||||
print("Extracting " + filename)
|
||||
sources_dir_abs = os.path.abspath(sources_dir)
|
||||
if filename.endswith(".zip"):
|
||||
|
@ -466,7 +485,7 @@ def extract_dep(url, filename):
|
|||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def write_script(name, lines):
|
||||
def write_script(name: str, lines: list[str]) -> None:
|
||||
name = os.path.join(args.build_dir, name)
|
||||
lines = [line.format(**prefs) for line in lines]
|
||||
print("Writing " + name)
|
||||
|
@ -477,7 +496,7 @@ def write_script(name, lines):
|
|||
print(" " + line)
|
||||
|
||||
|
||||
def get_footer(dep):
|
||||
def get_footer(dep: dict) -> list[str]:
|
||||
lines = []
|
||||
for out in dep.get("headers", []):
|
||||
lines.append(cmd_copy(out, "{inc_dir}"))
|
||||
|
@ -488,7 +507,7 @@ def get_footer(dep):
|
|||
return lines
|
||||
|
||||
|
||||
def build_env():
|
||||
def build_env() -> None:
|
||||
lines = [
|
||||
"if defined DISTUTILS_USE_SDK goto end",
|
||||
cmd_set("INCLUDE", "{inc_dir}"),
|
||||
|
@ -504,8 +523,8 @@ def build_env():
|
|||
write_script("build_env.cmd", lines)
|
||||
|
||||
|
||||
def build_dep(name):
|
||||
dep = deps[name]
|
||||
def build_dep(name: str) -> str:
|
||||
dep = DEPS[name]
|
||||
dir = dep["dir"]
|
||||
file = f"build_dep_{name}.cmd"
|
||||
|
||||
|
@ -554,9 +573,9 @@ def build_dep(name):
|
|||
return file
|
||||
|
||||
|
||||
def build_dep_all():
|
||||
def build_dep_all() -> None:
|
||||
lines = [r'call "{build_dir}\build_env.cmd"']
|
||||
for dep_name in deps:
|
||||
for dep_name in DEPS:
|
||||
print()
|
||||
if dep_name in disabled:
|
||||
print(f"Skipping disabled dependency {dep_name}")
|
||||
|
@ -602,7 +621,7 @@ if __name__ == "__main__":
|
|||
)
|
||||
parser.add_argument(
|
||||
"--architecture",
|
||||
choices=architectures,
|
||||
choices=ARCHITECTURES,
|
||||
default=os.environ.get(
|
||||
"ARCHITECTURE",
|
||||
(
|
||||
|
@ -634,7 +653,7 @@ if __name__ == "__main__":
|
|||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
arch_prefs = architectures[args.architecture]
|
||||
arch_prefs = ARCHITECTURES[args.architecture]
|
||||
print("Target architecture:", args.architecture)
|
||||
|
||||
msvs = find_msvs()
|
||||
|
@ -693,7 +712,7 @@ if __name__ == "__main__":
|
|||
# TODO find NASM automatically
|
||||
}
|
||||
|
||||
for k, v in deps.items():
|
||||
for k, v in DEPS.items():
|
||||
prefs[f"dir_{k}"] = os.path.join(sources_dir, v["dir"])
|
||||
|
||||
print()
|
||||
|
|
Loading…
Reference in New Issue
Block a user