ci: drop github download script

Easier to do interactively, now that all the artifacts are packaged in
the same archive.
This commit is contained in:
Daniele Varrazzo 2023-04-03 05:10:36 +02:00
parent 46238ba351
commit 0b01ded426
2 changed files with 5 additions and 103 deletions

View File

@ -49,10 +49,11 @@ How to make a psycopg2 release
.. __: https://github.com/psycopg/psycopg2/actions/workflows/packages.yml .. __: https://github.com/psycopg/psycopg2/actions/workflows/packages.yml
.. __: https://ci.appveyor.com/project/psycopg/psycopg2/settings .. __: https://ci.appveyor.com/project/psycopg/psycopg2/settings
- When the workflows have finished download the packages using the - When the workflows have finished download the packages from the job
``download_packages_{github|appveyor}.py`` scripts from the artifacts. For Appveyor you can use the ``download_packages_appveyor.py``
``scripts/build`` directory. They will be saved in a scripts from the ``scripts/build`` directory. They will be saved in a
``wheelhouse/psycopg2-${VERSION}`` directory. ``wheelhouse/psycopg2-${VERSION}`` directory. For Github just download it
from the web interface (it's a single file).
- Only for stable packages: upload the signed packages on PyPI:: - Only for stable packages: upload the signed packages on PyPI::

View File

@ -1,99 +0,0 @@
#!/usr/bin/env python
"""Download packages from github actions artifacts
"""
import io
import os
import sys
import logging
import datetime as dt
from pathlib import Path
from zipfile import ZipFile
import requests
logger = logging.getLogger()
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
REPOS = "psycopg/psycopg2"
WORKFLOW_NAME = "Build packages"
class ScriptError(Exception):
"""Controlled exception raised by the script."""
def main():
try:
token = os.environ["GITHUB_TOKEN"]
except KeyError:
raise ScriptError("please set a GITHUB_TOKEN to download artifacts")
s = requests.Session()
s.headers["Accept"] = "application/vnd.github.v3+json"
s.headers["Authorization"] = f"token {token}"
logger.info("looking for recent runs")
resp = s.get(f"https://api.github.com/repos/{REPOS}/actions/runs?per_page=10")
resp.raise_for_status()
for run in resp.json()["workflow_runs"]:
if run["name"] == WORKFLOW_NAME:
break
else:
raise ScriptError(f"couldn't find {WORKFLOW_NAME!r} in recent runs")
if run["status"] != "completed":
raise ScriptError(f"run #{run['run_number']} is in status {run['status']}")
updated_at = dt.datetime.fromisoformat(run["updated_at"].replace("Z", "+00:00"))
now = dt.datetime.now(dt.timezone.utc)
age = now - updated_at
logger.info(f"found run #{run['run_number']} updated {pretty_interval(age)} ago")
if age > dt.timedelta(hours=6):
logger.warning("maybe it's a bit old?")
logger.info(f"looking for run #{run['run_number']} artifacts")
resp = s.get(f"{run['url']}/artifacts")
resp.raise_for_status()
artifacts = resp.json()["artifacts"]
dest = Path("wheelhouse")
if not dest.exists():
logger.info(f"creating dir {dest}")
dest.mkdir(parents=True)
for artifact in artifacts:
logger.info(f"downloading {artifact['name']} archive")
zip_url = artifact["archive_download_url"]
resp = s.get(zip_url)
with ZipFile(io.BytesIO(resp.content)) as zf:
logger.info("extracting archive content")
zf.extractall(dest)
logger.info(f"now you can run: 'twine upload -s {dest}/*'")
def pretty_interval(td):
secs = td.total_seconds()
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
days, hours = divmod(hours, 24)
if days:
return f"{int(days)} days, {int(hours)} hours, {int(mins)} minutes"
elif hours:
return f"{int(hours)} hours, {int(mins)} minutes"
else:
return f"{int(mins)} minutes"
if __name__ == "__main__":
try:
sys.exit(main())
except ScriptError as e:
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
logger.info("user interrupt")
sys.exit(1)