mirror of
https://github.com/psycopg/psycopg2.git
synced 2025-01-31 09:24:07 +03:00
Merge pull request #1772 from psycopg/ci-vcpkg
Package psycopg2-binary for windows using vcpkg libpq
This commit is contained in:
commit
ed4ba11d17
|
@ -1,19 +0,0 @@
|
||||||
This file is a simple placeholder for forcing the appveyor build cache
|
|
||||||
to invalidate itself since appveyor.yml changes more frequently then
|
|
||||||
the cache needs updating. Note, the versions list here can be
|
|
||||||
different than what is indicated in appveyor.yml.
|
|
||||||
|
|
||||||
To invalidate the cache, update this file and check it into git.
|
|
||||||
|
|
||||||
|
|
||||||
Currently used modules built in the cache:
|
|
||||||
|
|
||||||
- OPENSSL_VERSION: 1.1.1w
|
|
||||||
- POSTGRES_VERSION: 16.0
|
|
||||||
|
|
||||||
|
|
||||||
NOTE: to zap the cache manually you can also use:
|
|
||||||
|
|
||||||
curl -X DELETE -H "Authorization: Bearer $APPVEYOR_TOKEN" -H "Content-Type: application/json" https://ci.appveyor.com/api/projects/psycopg/psycopg2/buildcache
|
|
||||||
|
|
||||||
with the token from https://ci.appveyor.com/api-token
|
|
|
@ -1,83 +0,0 @@
|
||||||
version: 2.x.{build}
|
|
||||||
|
|
||||||
clone_folder: C:\Project
|
|
||||||
|
|
||||||
# We use the configuration to specify the package name
|
|
||||||
configuration:
|
|
||||||
- psycopg2
|
|
||||||
- psycopg2-binary
|
|
||||||
|
|
||||||
environment:
|
|
||||||
matrix:
|
|
||||||
# For Python versions available on Appveyor, see
|
|
||||||
# https://www.appveyor.com/docs/windows-images-software/#python
|
|
||||||
- {PY_VER: "312", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "312", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "311", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "311", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "310", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "310", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "39", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "39", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
|
|
||||||
WORKFLOW: packages
|
|
||||||
|
|
||||||
OPENSSL_VERSION: "1_1_1w"
|
|
||||||
POSTGRES_VERSION: "16_0"
|
|
||||||
|
|
||||||
PSYCOPG2_TESTDB: psycopg2_test
|
|
||||||
PSYCOPG2_TESTDB_USER: postgres
|
|
||||||
PSYCOPG2_TESTDB_HOST: localhost
|
|
||||||
|
|
||||||
PGUSER: postgres
|
|
||||||
PGPASSWORD: Password12!
|
|
||||||
PGSSLMODE: require
|
|
||||||
|
|
||||||
# Add CWD to perl library path for PostgreSQL build on VS2019
|
|
||||||
PERL5LIB: .
|
|
||||||
|
|
||||||
# Select according to the service enabled
|
|
||||||
POSTGRES_DIR: C:\Program Files\PostgreSQL\13\
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
fast_finish: false
|
|
||||||
|
|
||||||
services:
|
|
||||||
# Note: if you change this service also change POSTGRES_DIR
|
|
||||||
- postgresql13
|
|
||||||
|
|
||||||
cache:
|
|
||||||
# Rebuild cache if following file changes
|
|
||||||
# (See the file to zap the cache manually)
|
|
||||||
- C:\Others -> .appveyor\cache_rebuild
|
|
||||||
|
|
||||||
# Script called before repo cloning
|
|
||||||
# init:
|
|
||||||
|
|
||||||
# Repository gets cloned, Cache is restored
|
|
||||||
|
|
||||||
install:
|
|
||||||
- "py scripts\\build\\appveyor.py install"
|
|
||||||
|
|
||||||
# PostgreSQL server starts now
|
|
||||||
|
|
||||||
build: "off"
|
|
||||||
|
|
||||||
build_script:
|
|
||||||
- "py scripts\\build\\appveyor.py build_script"
|
|
||||||
|
|
||||||
after_build:
|
|
||||||
- "py scripts\\build\\appveyor.py after_build"
|
|
||||||
|
|
||||||
before_test:
|
|
||||||
- "py scripts\\build\\appveyor.py before_test"
|
|
||||||
|
|
||||||
test_script:
|
|
||||||
- "py scripts\\build\\appveyor.py test_script"
|
|
||||||
|
|
||||||
artifacts:
|
|
||||||
- path: dist\psycopg2-*\*.whl
|
|
||||||
name: wheel
|
|
||||||
|
|
||||||
|
|
||||||
# vim: set ts=4 sts=4 sw=4:
|
|
|
@ -1,74 +0,0 @@
|
||||||
version: 2.x.{build}
|
|
||||||
|
|
||||||
clone_folder: C:\Project
|
|
||||||
|
|
||||||
environment:
|
|
||||||
matrix:
|
|
||||||
# For Python versions available on Appveyor, see
|
|
||||||
# https://www.appveyor.com/docs/windows-images-software/#python
|
|
||||||
- {PY_VER: "312", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "312", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "311", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "311", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "310", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "310", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "39", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
- {PY_VER: "39", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
|
|
||||||
|
|
||||||
WORKFLOW: tests
|
|
||||||
|
|
||||||
OPENSSL_VERSION: "1_1_1w"
|
|
||||||
POSTGRES_VERSION: "16_0"
|
|
||||||
|
|
||||||
PSYCOPG2_TESTDB: psycopg2_test
|
|
||||||
PSYCOPG2_TESTDB_USER: postgres
|
|
||||||
PSYCOPG2_TESTDB_HOST: localhost
|
|
||||||
|
|
||||||
PGUSER: postgres
|
|
||||||
PGPASSWORD: Password12!
|
|
||||||
PGSSLMODE: require
|
|
||||||
|
|
||||||
# Add CWD to perl library path for PostgreSQL build on VS2019
|
|
||||||
PERL5LIB: .
|
|
||||||
|
|
||||||
# Select according to the service enabled
|
|
||||||
POSTGRES_DIR: C:\Program Files\PostgreSQL\13\
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
fast_finish: false
|
|
||||||
|
|
||||||
services:
|
|
||||||
# Note: if you change this service also change POSTGRES_DIR
|
|
||||||
- postgresql13
|
|
||||||
|
|
||||||
cache:
|
|
||||||
# Rebuild cache if following file changes
|
|
||||||
# (See the file to zap the cache manually)
|
|
||||||
- C:\Others -> .appveyor\cache_rebuild
|
|
||||||
|
|
||||||
# Script called before repo cloning
|
|
||||||
# init:
|
|
||||||
|
|
||||||
# Repository gets cloned, Cache is restored
|
|
||||||
|
|
||||||
install:
|
|
||||||
- py scripts\\build\\appveyor.py install"
|
|
||||||
|
|
||||||
# PostgreSQL server starts now
|
|
||||||
|
|
||||||
build: "off"
|
|
||||||
|
|
||||||
build_script:
|
|
||||||
- py scripts\\build\\appveyor.py build_script"
|
|
||||||
|
|
||||||
after_build:
|
|
||||||
- py scripts\\build\\appveyor.py after_build"
|
|
||||||
|
|
||||||
before_test:
|
|
||||||
- py scripts\\build\\appveyor.py before_test"
|
|
||||||
|
|
||||||
test_script:
|
|
||||||
- py scripts\\build\\appveyor.py test_script"
|
|
||||||
|
|
||||||
|
|
||||||
# vim: set ts=4 sts=4 sw=4:
|
|
72
.github/workflows/packages.yml
vendored
72
.github/workflows/packages.yml
vendored
|
@ -182,3 +182,75 @@ jobs:
|
||||||
with:
|
with:
|
||||||
name: macos-${{matrix.pyver}}-macos-${{matrix.macver}}_${{matrix.arch}}
|
name: macos-${{matrix.pyver}}-macos-${{matrix.macver}}_${{matrix.arch}}
|
||||||
path: ./wheelhouse/*.whl
|
path: ./wheelhouse/*.whl
|
||||||
|
|
||||||
|
|
||||||
|
build-windows:
|
||||||
|
runs-on: windows-latest
|
||||||
|
if: true
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch: [win_amd64]
|
||||||
|
pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
|
||||||
|
package_name: [psycopg2, psycopg2-binary]
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# there are some other libpq in PATH
|
||||||
|
- name: Drop spurious libpq in the path
|
||||||
|
run: rm -rf c:/tools/php C:/Strawberry/c/bin
|
||||||
|
|
||||||
|
- name: Checkout repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Start PostgreSQL service for test
|
||||||
|
run: |
|
||||||
|
$PgSvc = Get-Service "postgresql*"
|
||||||
|
Set-Service $PgSvc.Name -StartupType manual
|
||||||
|
$PgSvc.Start()
|
||||||
|
shell: powershell
|
||||||
|
|
||||||
|
- name: Export GitHub Actions cache environment variables
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const path = require('path')
|
||||||
|
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || '');
|
||||||
|
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
|
||||||
|
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/lib'));
|
||||||
|
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/bin'));
|
||||||
|
|
||||||
|
- name: Create the binary package source tree
|
||||||
|
run: >-
|
||||||
|
sed -i 's/^setup(name="psycopg2"/setup(name="${{matrix.package_name}}"/'
|
||||||
|
setup.py
|
||||||
|
if: ${{ matrix.package_name != 'psycopg2' }}
|
||||||
|
|
||||||
|
- name: Build wheels
|
||||||
|
uses: pypa/cibuildwheel@v2.22.0
|
||||||
|
env:
|
||||||
|
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" # cache vcpkg
|
||||||
|
CIBW_BUILD: ${{matrix.pyver}}-${{matrix.arch}}
|
||||||
|
CIBW_ARCHS_WINDOWS: AMD64 x86
|
||||||
|
CIBW_BEFORE_BUILD_WINDOWS: '.\scripts\build\wheel_win32_before_build.bat'
|
||||||
|
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: >-
|
||||||
|
delvewheel repair -w {dest_dir}
|
||||||
|
--no-mangle "libiconv-2.dll;libwinpthread-1.dll" {wheel}
|
||||||
|
CIBW_TEST_COMMAND: >-
|
||||||
|
set PYTHONPATH={project} &&
|
||||||
|
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
|
||||||
|
# Note: no fast test because we don't run Windows tests
|
||||||
|
CIBW_ENVIRONMENT_WINDOWS: >-
|
||||||
|
PSYCOPG2_TESTDB=postgres
|
||||||
|
PSYCOPG2_TESTDB_USER=postgres
|
||||||
|
PSYCOPG2_TESTDB_HOST=localhost
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: windows-${{ matrix.package_name }}-${{matrix.pyver}}-${{matrix.arch}}
|
||||||
|
path: ./wheelhouse/*.whl
|
||||||
|
|
1
.github/workflows/tests.yml
vendored
1
.github/workflows/tests.yml
vendored
|
@ -10,6 +10,7 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
linux:
|
linux:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: true
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
|
|
|
@ -73,13 +73,8 @@ production it is advised to use the package built from sources.
|
||||||
.. _install: https://www.psycopg.org/docs/install.html#install-from-source
|
.. _install: https://www.psycopg.org/docs/install.html#install-from-source
|
||||||
.. _faq: https://www.psycopg.org/docs/faq.html#faq-compile
|
.. _faq: https://www.psycopg.org/docs/faq.html#faq-compile
|
||||||
|
|
||||||
:Linux/OSX: |gh-actions|
|
:Build status: |gh-actions|
|
||||||
:Windows: |appveyor|
|
|
||||||
|
|
||||||
.. |gh-actions| image:: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml/badge.svg
|
.. |gh-actions| image:: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml/badge.svg
|
||||||
:target: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
|
:target: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
|
||||||
:alt: Linux and OSX build status
|
:alt: Build status
|
||||||
|
|
||||||
.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/github/psycopg/psycopg2?branch=master&svg=true
|
|
||||||
:target: https://ci.appveyor.com/project/psycopg/psycopg2/branch/master
|
|
||||||
:alt: Windows build status
|
|
||||||
|
|
|
@ -16,10 +16,9 @@ How to make a psycopg2 release
|
||||||
$ export VERSION=2.8.4
|
$ export VERSION=2.8.4
|
||||||
|
|
||||||
- Push psycopg2 to master or to the maint branch. Make sure tests on `GitHub
|
- Push psycopg2 to master or to the maint branch. Make sure tests on `GitHub
|
||||||
Actions`__ and AppVeyor__ pass.
|
Actions`__.
|
||||||
|
|
||||||
.. __: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
|
.. __: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
|
||||||
.. __: https://ci.appveyor.com/project/psycopg/psycopg2
|
|
||||||
|
|
||||||
- Create a signed tag with the content of the relevant NEWS bit and push it.
|
- Create a signed tag with the content of the relevant NEWS bit and push it.
|
||||||
E.g.::
|
E.g.::
|
||||||
|
@ -41,19 +40,10 @@ How to make a psycopg2 release
|
||||||
|
|
||||||
- On GitHub Actions run manually a `package build workflow`__.
|
- On GitHub Actions run manually a `package build workflow`__.
|
||||||
|
|
||||||
- On Appveyor change the `build settings`__ and replace the custom
|
|
||||||
configuration file name from ``.appveyor/tests.yml`` to
|
|
||||||
``.appveyor/packages.yml`` (yeah, that sucks a bit. Remember to put it
|
|
||||||
back to testing).
|
|
||||||
|
|
||||||
.. __: https://github.com/psycopg/psycopg2/actions/workflows/packages.yml
|
.. __: https://github.com/psycopg/psycopg2/actions/workflows/packages.yml
|
||||||
.. __: https://ci.appveyor.com/project/psycopg/psycopg2/settings
|
|
||||||
|
|
||||||
- When the workflows have finished download the packages from the job
|
- When the workflows have finished download the packages from the job
|
||||||
artifacts. For Appveyor you can use the ``download_packages_appveyor.py``
|
artifacts.
|
||||||
scripts from the ``scripts/build`` directory. They will be saved in a
|
|
||||||
``wheelhouse/psycopg2-${VERSION}`` directory. For Github just download it
|
|
||||||
from the web interface (it's a single file).
|
|
||||||
|
|
||||||
- Only for stable packages: upload the signed packages on PyPI::
|
- Only for stable packages: upload the signed packages on PyPI::
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,7 @@
|
||||||
#ifndef PSYCOPG_H
|
#ifndef PSYCOPG_H
|
||||||
#define PSYCOPG_H 1
|
#define PSYCOPG_H 1
|
||||||
|
|
||||||
|
#include <pg_config.h>
|
||||||
#if PG_VERSION_NUM < 90100
|
#if PG_VERSION_NUM < 90100
|
||||||
#error "Psycopg requires PostgreSQL client library (libpq) >= 9.1"
|
#error "Psycopg requires PostgreSQL client library (libpq) >= 9.1"
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -1,847 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Build steps for the windows binary packages.
|
|
||||||
|
|
||||||
The script is designed to be called by appveyor. Subcommands map the steps in
|
|
||||||
'appveyor.yml'.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import shutil
|
|
||||||
import logging
|
|
||||||
import subprocess as sp
|
|
||||||
from glob import glob
|
|
||||||
from pathlib import Path
|
|
||||||
from zipfile import ZipFile
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
from tempfile import NamedTemporaryFile
|
|
||||||
from urllib.request import urlopen
|
|
||||||
|
|
||||||
opt = None
|
|
||||||
STEP_PREFIX = 'step_'
|
|
||||||
|
|
||||||
logger = logging.getLogger()
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
global opt
|
|
||||||
opt = parse_cmdline()
|
|
||||||
logger.setLevel(opt.loglevel)
|
|
||||||
|
|
||||||
cmd = globals()[STEP_PREFIX + opt.step]
|
|
||||||
cmd()
|
|
||||||
|
|
||||||
|
|
||||||
def setup_build_env():
|
|
||||||
"""
|
|
||||||
Set the environment variables according to the build environment
|
|
||||||
"""
|
|
||||||
setenv('VS_VER', opt.vs_ver)
|
|
||||||
|
|
||||||
path = [
|
|
||||||
str(opt.py_dir),
|
|
||||||
str(opt.py_dir / 'Scripts'),
|
|
||||||
r'C:\Strawberry\Perl\bin',
|
|
||||||
r'C:\Program Files\Git\mingw64\bin',
|
|
||||||
str(opt.ssl_build_dir / 'bin'),
|
|
||||||
os.environ['PATH'],
|
|
||||||
]
|
|
||||||
setenv('PATH', os.pathsep.join(path))
|
|
||||||
|
|
||||||
logger.info("Configuring compiler")
|
|
||||||
bat_call([opt.vc_dir / "vcvarsall.bat", 'x86' if opt.arch_32 else 'amd64'])
|
|
||||||
|
|
||||||
|
|
||||||
def python_info():
|
|
||||||
logger.info("Python Information")
|
|
||||||
run_python(['--version'], stderr=sp.STDOUT)
|
|
||||||
run_python(
|
|
||||||
['-c', "import sys; print('64bit: %s' % (sys.maxsize > 2**32))"]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def step_install():
|
|
||||||
python_info()
|
|
||||||
configure_sdk()
|
|
||||||
configure_postgres()
|
|
||||||
install_python_build_tools()
|
|
||||||
|
|
||||||
|
|
||||||
def install_python_build_tools():
|
|
||||||
"""
|
|
||||||
Install or upgrade pip and build tools.
|
|
||||||
"""
|
|
||||||
run_python("-m pip install --upgrade pip setuptools wheel".split())
|
|
||||||
|
|
||||||
|
|
||||||
def configure_sdk():
|
|
||||||
# The program rc.exe on 64bit with some versions look in the wrong path
|
|
||||||
# location when building postgresql. This cheats by copying the x64 bit
|
|
||||||
# files to that location.
|
|
||||||
if opt.arch_64:
|
|
||||||
for fn in glob(
|
|
||||||
r'C:\Program Files\Microsoft SDKs\Windows\v7.0\Bin\x64\rc*'
|
|
||||||
):
|
|
||||||
copy_file(
|
|
||||||
fn, r"C:\Program Files (x86)\Microsoft SDKs\Windows\v7.0A\Bin"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def configure_postgres():
|
|
||||||
"""
|
|
||||||
Set up PostgreSQL config before the service starts.
|
|
||||||
"""
|
|
||||||
logger.info("Configuring Postgres")
|
|
||||||
opt.pg_data_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
with (opt.pg_data_dir / 'postgresql.conf').open('a') as f:
|
|
||||||
# allow > 1 prepared transactions for test cases
|
|
||||||
print("max_prepared_transactions = 10", file=f)
|
|
||||||
print("ssl = on", file=f)
|
|
||||||
|
|
||||||
# Create openssl certificate to allow ssl connection
|
|
||||||
cwd = os.getcwd()
|
|
||||||
os.chdir(opt.pg_data_dir)
|
|
||||||
run_openssl(
|
|
||||||
'req -new -x509 -days 365 -nodes -text '
|
|
||||||
'-out server.crt -keyout server.key -subj /CN=initd.org'.split()
|
|
||||||
)
|
|
||||||
run_openssl(
|
|
||||||
'req -new -nodes -text -out root.csr -keyout root.key '
|
|
||||||
'-subj /CN=initd.org'.split()
|
|
||||||
)
|
|
||||||
|
|
||||||
run_openssl(
|
|
||||||
'x509 -req -in root.csr -text -days 3650 -extensions v3_ca '
|
|
||||||
'-signkey root.key -out root.crt'.split()
|
|
||||||
)
|
|
||||||
|
|
||||||
run_openssl(
|
|
||||||
'req -new -nodes -text -out server.csr -keyout server.key '
|
|
||||||
'-subj /CN=initd.org'.split()
|
|
||||||
)
|
|
||||||
|
|
||||||
run_openssl(
|
|
||||||
'x509 -req -in server.csr -text -days 365 -CA root.crt '
|
|
||||||
'-CAkey root.key -CAcreateserial -out server.crt'.split()
|
|
||||||
)
|
|
||||||
|
|
||||||
os.chdir(cwd)
|
|
||||||
|
|
||||||
|
|
||||||
def run_openssl(args):
|
|
||||||
"""Run the appveyor-installed openssl with some args."""
|
|
||||||
# https://www.appveyor.com/docs/windows-images-software/
|
|
||||||
openssl = Path(r"C:\OpenSSL-v111-Win64") / 'bin' / 'openssl'
|
|
||||||
return run_command([openssl] + args)
|
|
||||||
|
|
||||||
|
|
||||||
def step_build_script():
|
|
||||||
setup_build_env()
|
|
||||||
build_openssl()
|
|
||||||
build_libpq()
|
|
||||||
build_psycopg()
|
|
||||||
|
|
||||||
if opt.is_wheel:
|
|
||||||
build_binary_packages()
|
|
||||||
|
|
||||||
|
|
||||||
def build_openssl():
|
|
||||||
top = opt.ssl_build_dir
|
|
||||||
if (top / 'lib' / 'libssl.lib').exists():
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info("Building OpenSSL")
|
|
||||||
|
|
||||||
# Setup directories for building OpenSSL libraries
|
|
||||||
ensure_dir(top / 'include' / 'openssl')
|
|
||||||
ensure_dir(top / 'lib')
|
|
||||||
|
|
||||||
# Setup OpenSSL Environment Variables based on processor architecture
|
|
||||||
if opt.arch_32:
|
|
||||||
target = 'VC-WIN32'
|
|
||||||
setenv('VCVARS_PLATFORM', 'x86')
|
|
||||||
else:
|
|
||||||
target = 'VC-WIN64A'
|
|
||||||
setenv('VCVARS_PLATFORM', 'amd64')
|
|
||||||
setenv('CPU', 'AMD64')
|
|
||||||
|
|
||||||
ver = os.environ['OPENSSL_VERSION']
|
|
||||||
|
|
||||||
# Download OpenSSL source
|
|
||||||
zipname = f'OpenSSL_{ver}.zip'
|
|
||||||
zipfile = opt.cache_dir / zipname
|
|
||||||
if not zipfile.exists():
|
|
||||||
download(
|
|
||||||
f"https://github.com/openssl/openssl/archive/{zipname}", zipfile
|
|
||||||
)
|
|
||||||
|
|
||||||
with ZipFile(zipfile) as z:
|
|
||||||
z.extractall(path=opt.build_dir)
|
|
||||||
|
|
||||||
sslbuild = opt.build_dir / f"openssl-OpenSSL_{ver}"
|
|
||||||
os.chdir(sslbuild)
|
|
||||||
run_command(
|
|
||||||
['perl', 'Configure', target, 'no-asm']
|
|
||||||
+ ['no-shared', 'no-zlib', f'--prefix={top}', f'--openssldir={top}']
|
|
||||||
)
|
|
||||||
|
|
||||||
run_command("nmake build_libs install_sw".split())
|
|
||||||
|
|
||||||
assert (top / 'lib' / 'libssl.lib').exists()
|
|
||||||
|
|
||||||
os.chdir(opt.clone_dir)
|
|
||||||
shutil.rmtree(sslbuild)
|
|
||||||
|
|
||||||
|
|
||||||
def build_libpq():
|
|
||||||
top = opt.pg_build_dir
|
|
||||||
if (top / 'lib' / 'libpq.lib').exists():
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info("Building libpq")
|
|
||||||
|
|
||||||
# Setup directories for building PostgreSQL librarires
|
|
||||||
ensure_dir(top / 'include')
|
|
||||||
ensure_dir(top / 'lib')
|
|
||||||
ensure_dir(top / 'bin')
|
|
||||||
|
|
||||||
ver = os.environ['POSTGRES_VERSION']
|
|
||||||
|
|
||||||
# Download PostgreSQL source
|
|
||||||
zipname = f'postgres-REL_{ver}.zip'
|
|
||||||
zipfile = opt.cache_dir / zipname
|
|
||||||
if not zipfile.exists():
|
|
||||||
download(
|
|
||||||
f"https://github.com/postgres/postgres/archive/REL_{ver}.zip",
|
|
||||||
zipfile,
|
|
||||||
)
|
|
||||||
|
|
||||||
with ZipFile(zipfile) as z:
|
|
||||||
z.extractall(path=opt.build_dir)
|
|
||||||
|
|
||||||
pgbuild = opt.build_dir / f"postgres-REL_{ver}"
|
|
||||||
os.chdir(pgbuild)
|
|
||||||
|
|
||||||
# Setup build config file (config.pl)
|
|
||||||
os.chdir("src/tools/msvc")
|
|
||||||
with open("config.pl", 'w') as f:
|
|
||||||
print(
|
|
||||||
"""\
|
|
||||||
$config->{ldap} = 0;
|
|
||||||
$config->{openssl} = "%s";
|
|
||||||
|
|
||||||
1;
|
|
||||||
"""
|
|
||||||
% str(opt.ssl_build_dir).replace('\\', '\\\\'),
|
|
||||||
file=f,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Hack the Mkvcbuild.pm file so we build the lib version of libpq
|
|
||||||
file_replace('Mkvcbuild.pm', "'libpq', 'dll'", "'libpq', 'lib'")
|
|
||||||
|
|
||||||
# Build libpgport, libpgcommon, libpq
|
|
||||||
run_command([which("build"), "libpgport"])
|
|
||||||
run_command([which("build"), "libpgcommon"])
|
|
||||||
run_command([which("build"), "libpq"])
|
|
||||||
|
|
||||||
# Install includes
|
|
||||||
with (pgbuild / "src/backend/parser/gram.h").open("w") as f:
|
|
||||||
print("", file=f)
|
|
||||||
|
|
||||||
# Copy over built libraries
|
|
||||||
file_replace("Install.pm", "qw(Install)", "qw(Install CopyIncludeFiles)")
|
|
||||||
run_command(
|
|
||||||
["perl", "-MInstall=CopyIncludeFiles", "-e"]
|
|
||||||
+ [f"chdir('../../..'); CopyIncludeFiles('{top}')"]
|
|
||||||
)
|
|
||||||
|
|
||||||
for lib in ('libpgport', 'libpgcommon', 'libpq'):
|
|
||||||
copy_file(pgbuild / f'Release/{lib}/{lib}.lib', top / 'lib')
|
|
||||||
|
|
||||||
# Prepare local include directory for building from
|
|
||||||
for dir in ('win32', 'win32_msvc'):
|
|
||||||
merge_dir(pgbuild / f"src/include/port/{dir}", pgbuild / "src/include")
|
|
||||||
|
|
||||||
# Build pg_config in place
|
|
||||||
os.chdir(pgbuild / 'src/bin/pg_config')
|
|
||||||
run_command(
|
|
||||||
['cl', 'pg_config.c', '/MT', '/nologo', fr'/I{pgbuild}\src\include']
|
|
||||||
+ ['/link', fr'/LIBPATH:{top}\lib']
|
|
||||||
+ ['libpgcommon.lib', 'libpgport.lib', 'advapi32.lib']
|
|
||||||
+ ['/NODEFAULTLIB:libcmt.lib']
|
|
||||||
+ [fr'/OUT:{top}\bin\pg_config.exe']
|
|
||||||
)
|
|
||||||
|
|
||||||
assert (top / 'lib' / 'libpq.lib').exists()
|
|
||||||
assert (top / 'bin' / 'pg_config.exe').exists()
|
|
||||||
|
|
||||||
os.chdir(opt.clone_dir)
|
|
||||||
shutil.rmtree(pgbuild)
|
|
||||||
|
|
||||||
|
|
||||||
def build_psycopg():
|
|
||||||
os.chdir(opt.package_dir)
|
|
||||||
patch_package_name()
|
|
||||||
add_pg_config_path()
|
|
||||||
run_python(
|
|
||||||
["setup.py", "build_ext", "--have-ssl"]
|
|
||||||
+ ["-l", "libpgcommon libpgport"]
|
|
||||||
+ ["-L", opt.ssl_build_dir / 'lib']
|
|
||||||
+ ['-I', opt.ssl_build_dir / 'include']
|
|
||||||
)
|
|
||||||
run_python(["setup.py", "build_py"])
|
|
||||||
|
|
||||||
|
|
||||||
def patch_package_name():
|
|
||||||
"""Change the psycopg2 package name in the setup.py if required."""
|
|
||||||
if opt.package_name == 'psycopg2':
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info("changing package name to %s", opt.package_name)
|
|
||||||
|
|
||||||
with (opt.package_dir / 'setup.py').open() as f:
|
|
||||||
data = f.read()
|
|
||||||
|
|
||||||
# Replace the name of the package with what desired
|
|
||||||
rex = re.compile(r"""name=["']psycopg2["']""")
|
|
||||||
assert len(rex.findall(data)) == 1, rex.findall(data)
|
|
||||||
data = rex.sub(f'name="{opt.package_name}"', data)
|
|
||||||
|
|
||||||
with (opt.package_dir / 'setup.py').open('w') as f:
|
|
||||||
f.write(data)
|
|
||||||
|
|
||||||
|
|
||||||
def build_binary_packages():
|
|
||||||
"""Create wheel binary packages."""
|
|
||||||
os.chdir(opt.package_dir)
|
|
||||||
|
|
||||||
add_pg_config_path()
|
|
||||||
|
|
||||||
# Build .whl packages
|
|
||||||
run_python(['setup.py', 'bdist_wheel', "-d", opt.dist_dir])
|
|
||||||
|
|
||||||
|
|
||||||
def step_after_build():
|
|
||||||
if not opt.is_wheel:
|
|
||||||
install_built_package()
|
|
||||||
else:
|
|
||||||
install_binary_package()
|
|
||||||
|
|
||||||
|
|
||||||
def install_built_package():
|
|
||||||
"""Install the package just built by setup build."""
|
|
||||||
os.chdir(opt.package_dir)
|
|
||||||
|
|
||||||
# Install the psycopg just built
|
|
||||||
add_pg_config_path()
|
|
||||||
run_python(["setup.py", "install"])
|
|
||||||
shutil.rmtree("psycopg2.egg-info")
|
|
||||||
|
|
||||||
|
|
||||||
def install_binary_package():
|
|
||||||
"""Install the package from a packaged wheel."""
|
|
||||||
run_python(
|
|
||||||
['-m', 'pip', 'install', '--no-index', '-f', opt.dist_dir]
|
|
||||||
+ [opt.package_name]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def add_pg_config_path():
|
|
||||||
"""Allow finding in the path the pg_config just built."""
|
|
||||||
pg_path = str(opt.pg_build_dir / 'bin')
|
|
||||||
if pg_path not in os.environ['PATH'].split(os.pathsep):
|
|
||||||
setenv('PATH', os.pathsep.join([pg_path, os.environ['PATH']]))
|
|
||||||
|
|
||||||
|
|
||||||
def step_before_test():
|
|
||||||
print_psycopg2_version()
|
|
||||||
|
|
||||||
# Create and setup PostgreSQL database for the tests
|
|
||||||
run_command([opt.pg_bin_dir / 'createdb', os.environ['PSYCOPG2_TESTDB']])
|
|
||||||
run_command(
|
|
||||||
[opt.pg_bin_dir / 'psql', '-d', os.environ['PSYCOPG2_TESTDB']]
|
|
||||||
+ ['-c', "CREATE EXTENSION hstore"]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def print_psycopg2_version():
|
|
||||||
"""Print psycopg2 and libpq versions installed."""
|
|
||||||
for expr in (
|
|
||||||
'psycopg2.__version__',
|
|
||||||
'psycopg2.__libpq_version__',
|
|
||||||
'psycopg2.extensions.libpq_version()',
|
|
||||||
):
|
|
||||||
out = out_python(['-c', f"import psycopg2; print({expr})"])
|
|
||||||
logger.info("built %s: %s", expr, out.decode('ascii'))
|
|
||||||
|
|
||||||
|
|
||||||
def step_test_script():
|
|
||||||
check_libpq_version()
|
|
||||||
run_test_suite()
|
|
||||||
|
|
||||||
|
|
||||||
def check_libpq_version():
|
|
||||||
"""
|
|
||||||
Fail if the package installed is not using the expected libpq version.
|
|
||||||
"""
|
|
||||||
want_ver = tuple(map(int, os.environ['POSTGRES_VERSION'].split('_')))
|
|
||||||
want_ver = "%d%04d" % want_ver
|
|
||||||
got_ver = (
|
|
||||||
out_python(
|
|
||||||
['-c']
|
|
||||||
+ ["import psycopg2; print(psycopg2.extensions.libpq_version())"]
|
|
||||||
)
|
|
||||||
.decode('ascii')
|
|
||||||
.rstrip()
|
|
||||||
)
|
|
||||||
assert want_ver == got_ver, f"libpq version mismatch: {want_ver!r} != {got_ver!r}"
|
|
||||||
|
|
||||||
|
|
||||||
def run_test_suite():
|
|
||||||
# Remove this var, which would make badly a configured OpenSSL 1.1 work
|
|
||||||
os.environ.pop('OPENSSL_CONF', None)
|
|
||||||
|
|
||||||
# Run the unit test
|
|
||||||
args = [
|
|
||||||
'-c',
|
|
||||||
"import tests; tests.unittest.main(defaultTest='tests.test_suite')",
|
|
||||||
]
|
|
||||||
|
|
||||||
if opt.is_wheel:
|
|
||||||
os.environ['PSYCOPG2_TEST_FAST'] = '1'
|
|
||||||
else:
|
|
||||||
args.append('--verbose')
|
|
||||||
|
|
||||||
os.chdir(opt.package_dir)
|
|
||||||
run_python(args)
|
|
||||||
|
|
||||||
|
|
||||||
def step_on_success():
|
|
||||||
print_sha1_hashes()
|
|
||||||
if setup_ssh():
|
|
||||||
upload_packages()
|
|
||||||
|
|
||||||
|
|
||||||
def print_sha1_hashes():
|
|
||||||
"""
|
|
||||||
Print the packages sha1 so their integrity can be checked upon signing.
|
|
||||||
"""
|
|
||||||
logger.info("artifacts SHA1 hashes:")
|
|
||||||
|
|
||||||
os.chdir(opt.package_dir / 'dist')
|
|
||||||
run_command([which('sha1sum'), '-b', 'psycopg2-*/*'])
|
|
||||||
|
|
||||||
|
|
||||||
def setup_ssh():
|
|
||||||
"""
|
|
||||||
Configure ssh to upload built packages where they can be retrieved.
|
|
||||||
|
|
||||||
Return False if can't configure and upload shoould be skipped.
|
|
||||||
"""
|
|
||||||
# If we are not on the psycopg AppVeyor account, the environment variable
|
|
||||||
# REMOTE_KEY will not be decrypted. In that case skip uploading.
|
|
||||||
if os.environ['APPVEYOR_ACCOUNT_NAME'] != 'psycopg':
|
|
||||||
logger.warn("skipping artifact upload: you are not psycopg")
|
|
||||||
return False
|
|
||||||
|
|
||||||
pkey = os.environ.get('REMOTE_KEY', None)
|
|
||||||
if not pkey:
|
|
||||||
logger.warn("skipping artifact upload: no remote key")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Write SSH Private Key file from environment variable
|
|
||||||
pkey = pkey.replace(' ', '\n')
|
|
||||||
with (opt.clone_dir / 'data/id_rsa-psycopg-upload').open('w') as f:
|
|
||||||
f.write(
|
|
||||||
f"""\
|
|
||||||
-----BEGIN RSA PRIVATE KEY-----
|
|
||||||
{pkey}
|
|
||||||
-----END RSA PRIVATE KEY-----
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
# Make a directory to please MinGW's version of ssh
|
|
||||||
ensure_dir(r"C:\MinGW\msys\1.0\home\appveyor\.ssh")
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def upload_packages():
|
|
||||||
# Upload built artifacts
|
|
||||||
logger.info("uploading artifacts")
|
|
||||||
|
|
||||||
os.chdir(opt.clone_dir)
|
|
||||||
run_command(
|
|
||||||
[r"C:\MinGW\msys\1.0\bin\rsync", "-avr"]
|
|
||||||
+ ["-e", r"C:\MinGW\msys\1.0\bin\ssh -F data/ssh_config"]
|
|
||||||
+ ["psycopg2/dist/", "upload:"]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def download(url, fn):
|
|
||||||
"""Download a file locally"""
|
|
||||||
logger.info("downloading %s", url)
|
|
||||||
with open(fn, 'wb') as fo, urlopen(url) as fi:
|
|
||||||
while 1:
|
|
||||||
data = fi.read(8192)
|
|
||||||
if not data:
|
|
||||||
break
|
|
||||||
fo.write(data)
|
|
||||||
|
|
||||||
logger.info("file downloaded: %s", fn)
|
|
||||||
|
|
||||||
|
|
||||||
def file_replace(fn, s1, s2):
|
|
||||||
"""
|
|
||||||
Replace all the occurrences of the string s1 into s2 in the file fn.
|
|
||||||
"""
|
|
||||||
assert os.path.exists(fn)
|
|
||||||
with open(fn, 'r+') as f:
|
|
||||||
data = f.read()
|
|
||||||
f.seek(0)
|
|
||||||
f.write(data.replace(s1, s2))
|
|
||||||
f.truncate()
|
|
||||||
|
|
||||||
|
|
||||||
def merge_dir(src, tgt):
|
|
||||||
"""
|
|
||||||
Merge the content of the directory src into the directory tgt
|
|
||||||
|
|
||||||
Reproduce the semantic of "XCOPY /Y /S src/* tgt"
|
|
||||||
"""
|
|
||||||
src = str(src)
|
|
||||||
for dp, _dns, fns in os.walk(src):
|
|
||||||
logger.debug("dirpath %s", dp)
|
|
||||||
if not fns:
|
|
||||||
continue
|
|
||||||
assert dp.startswith(src)
|
|
||||||
subdir = dp[len(src) :].lstrip(os.sep)
|
|
||||||
tgtdir = ensure_dir(os.path.join(tgt, subdir))
|
|
||||||
for fn in fns:
|
|
||||||
copy_file(os.path.join(dp, fn), tgtdir)
|
|
||||||
|
|
||||||
|
|
||||||
def bat_call(cmdline):
|
|
||||||
"""
|
|
||||||
Simulate 'CALL' from a batch file
|
|
||||||
|
|
||||||
Execute CALL *cmdline* and export the changed environment to the current
|
|
||||||
environment.
|
|
||||||
|
|
||||||
nana-nana-nana-nana...
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not isinstance(cmdline, str):
|
|
||||||
cmdline = map(str, cmdline)
|
|
||||||
cmdline = ' '.join(c if ' ' not in c else '"%s"' % c for c in cmdline)
|
|
||||||
|
|
||||||
data = f"""\
|
|
||||||
CALL {cmdline}
|
|
||||||
{opt.py_exe} -c "import os, sys, json; \
|
|
||||||
json.dump(dict(os.environ), sys.stdout, indent=2)"
|
|
||||||
"""
|
|
||||||
|
|
||||||
logger.debug("preparing file to batcall:\n\n%s", data)
|
|
||||||
|
|
||||||
with NamedTemporaryFile(suffix='.bat') as tmp:
|
|
||||||
fn = tmp.name
|
|
||||||
|
|
||||||
with open(fn, "w") as f:
|
|
||||||
f.write(data)
|
|
||||||
|
|
||||||
try:
|
|
||||||
out = out_command(fn)
|
|
||||||
# be vewwy vewwy caweful to print the env var as it might contain
|
|
||||||
# secwet things like your pwecious pwivate key.
|
|
||||||
# logger.debug("output of command:\n\n%s", out.decode('utf8', 'replace'))
|
|
||||||
|
|
||||||
# The output has some useless crap on stdout, because sure, and json
|
|
||||||
# indented so the last { on column 1 is where we have to start parsing
|
|
||||||
|
|
||||||
m = list(re.finditer(b'^{', out, re.MULTILINE))[-1]
|
|
||||||
out = out[m.start() :]
|
|
||||||
env = json.loads(out)
|
|
||||||
for k, v in env.items():
|
|
||||||
if os.environ.get(k) != v:
|
|
||||||
setenv(k, v)
|
|
||||||
finally:
|
|
||||||
os.remove(fn)
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_dir(dir):
|
|
||||||
if not isinstance(dir, Path):
|
|
||||||
dir = Path(dir)
|
|
||||||
|
|
||||||
if not dir.is_dir():
|
|
||||||
logger.info("creating directory %s", dir)
|
|
||||||
dir.mkdir(parents=True)
|
|
||||||
|
|
||||||
return dir
|
|
||||||
|
|
||||||
|
|
||||||
def run_command(cmdline, **kwargs):
|
|
||||||
"""Run a command, raise on error."""
|
|
||||||
if not isinstance(cmdline, str):
|
|
||||||
cmdline = list(map(str, cmdline))
|
|
||||||
logger.info("running command: %s", cmdline)
|
|
||||||
sp.check_call(cmdline, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def out_command(cmdline, **kwargs):
|
|
||||||
"""Run a command, return its output, raise on error."""
|
|
||||||
if not isinstance(cmdline, str):
|
|
||||||
cmdline = list(map(str, cmdline))
|
|
||||||
logger.info("running command: %s", cmdline)
|
|
||||||
data = sp.check_output(cmdline, **kwargs)
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def run_python(args, **kwargs):
|
|
||||||
"""
|
|
||||||
Run a script in the target Python.
|
|
||||||
"""
|
|
||||||
return run_command([opt.py_exe] + args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def out_python(args, **kwargs):
|
|
||||||
"""
|
|
||||||
Return the output of a script run in the target Python.
|
|
||||||
"""
|
|
||||||
return out_command([opt.py_exe] + args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_file(src, dst):
|
|
||||||
logger.info("copying file %s -> %s", src, dst)
|
|
||||||
shutil.copy(src, dst)
|
|
||||||
|
|
||||||
|
|
||||||
def setenv(k, v):
|
|
||||||
logger.debug("setting %s=%s", k, v)
|
|
||||||
os.environ[k] = v
|
|
||||||
|
|
||||||
|
|
||||||
def which(name):
|
|
||||||
"""
|
|
||||||
Return the full path of a command found on the path
|
|
||||||
"""
|
|
||||||
base, ext = os.path.splitext(name)
|
|
||||||
if not ext:
|
|
||||||
exts = ('.com', '.exe', '.bat', '.cmd')
|
|
||||||
else:
|
|
||||||
exts = (ext,)
|
|
||||||
|
|
||||||
for dir in ['.'] + os.environ['PATH'].split(os.pathsep):
|
|
||||||
for ext in exts:
|
|
||||||
fn = os.path.join(dir, base + ext)
|
|
||||||
if os.path.isfile(fn):
|
|
||||||
return fn
|
|
||||||
|
|
||||||
raise Exception(f"couldn't find program on path: {name}")
|
|
||||||
|
|
||||||
|
|
||||||
class Options:
|
|
||||||
"""
|
|
||||||
An object exposing the script configuration from env vars and command line.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def py_ver(self):
|
|
||||||
"""The Python version to build as 2 digits string.
|
|
||||||
|
|
||||||
For large values of 2, occasionally.
|
|
||||||
"""
|
|
||||||
rv = os.environ['PY_VER']
|
|
||||||
assert rv in ('37', '38', '39', '310', '311', '312'), rv
|
|
||||||
return rv
|
|
||||||
|
|
||||||
@property
|
|
||||||
def py_arch(self):
|
|
||||||
"""The Python architecture to build, 32 or 64."""
|
|
||||||
rv = os.environ['PY_ARCH']
|
|
||||||
assert rv in ('32', '64'), rv
|
|
||||||
return int(rv)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def arch_32(self):
|
|
||||||
"""True if the Python architecture to build is 32 bits."""
|
|
||||||
return self.py_arch == 32
|
|
||||||
|
|
||||||
@property
|
|
||||||
def arch_64(self):
|
|
||||||
"""True if the Python architecture to build is 64 bits."""
|
|
||||||
return self.py_arch == 64
|
|
||||||
|
|
||||||
@property
|
|
||||||
def package_name(self):
|
|
||||||
return os.environ.get('CONFIGURATION', 'psycopg2')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def package_version(self):
|
|
||||||
"""The psycopg2 version number to build."""
|
|
||||||
with (self.package_dir / 'setup.py').open() as f:
|
|
||||||
data = f.read()
|
|
||||||
|
|
||||||
m = re.search(
|
|
||||||
r"""^PSYCOPG_VERSION\s*=\s*['"](.*)['"]""", data, re.MULTILINE
|
|
||||||
)
|
|
||||||
return m.group(1)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_wheel(self):
|
|
||||||
"""Are we building the wheel packages or just the extension?"""
|
|
||||||
workflow = os.environ["WORKFLOW"]
|
|
||||||
return workflow == "packages"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def py_dir(self):
|
|
||||||
"""
|
|
||||||
The path to the target python binary to execute.
|
|
||||||
"""
|
|
||||||
dirname = ''.join(
|
|
||||||
[r"C:\Python", self.py_ver, '-x64' if self.arch_64 else '']
|
|
||||||
)
|
|
||||||
return Path(dirname)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def py_exe(self):
|
|
||||||
"""
|
|
||||||
The full path of the target python executable.
|
|
||||||
"""
|
|
||||||
return self.py_dir / 'python.exe'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def vc_dir(self):
|
|
||||||
"""
|
|
||||||
The path of the Visual C compiler.
|
|
||||||
"""
|
|
||||||
if self.vs_ver == '16.0':
|
|
||||||
path = Path(
|
|
||||||
r"C:\Program Files (x86)\Microsoft Visual Studio\2019"
|
|
||||||
r"\Community\VC\Auxiliary\Build"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
path = Path(
|
|
||||||
r"C:\Program Files (x86)\Microsoft Visual Studio %s\VC"
|
|
||||||
% self.vs_ver
|
|
||||||
)
|
|
||||||
return path
|
|
||||||
|
|
||||||
@property
|
|
||||||
def vs_ver(self):
|
|
||||||
# https://wiki.python.org/moin/WindowsCompilers
|
|
||||||
# https://www.appveyor.com/docs/windows-images-software/#python
|
|
||||||
# Py 3.6--3.8 = VS Ver. 14.0 (VS 2015)
|
|
||||||
# Py 3.9 = VS Ver. 16.0 (VS 2019)
|
|
||||||
vsvers = {
|
|
||||||
'37': '14.0',
|
|
||||||
'38': '14.0',
|
|
||||||
'39': '16.0',
|
|
||||||
'310': '16.0',
|
|
||||||
'311': '16.0',
|
|
||||||
'312': '16.0',
|
|
||||||
}
|
|
||||||
return vsvers[self.py_ver]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def clone_dir(self):
|
|
||||||
"""The directory where the repository is cloned."""
|
|
||||||
return Path(r"C:\Project")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def appveyor_pg_dir(self):
|
|
||||||
"""The directory of the postgres service made available by Appveyor."""
|
|
||||||
return Path(os.environ['POSTGRES_DIR'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pg_data_dir(self):
|
|
||||||
"""The data dir of the appveyor postgres service."""
|
|
||||||
return self.appveyor_pg_dir / 'data'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pg_bin_dir(self):
|
|
||||||
"""The bin dir of the appveyor postgres service."""
|
|
||||||
return self.appveyor_pg_dir / 'bin'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pg_build_dir(self):
|
|
||||||
"""The directory where to build the postgres libraries for psycopg."""
|
|
||||||
return self.cache_arch_dir / 'postgresql'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ssl_build_dir(self):
|
|
||||||
"""The directory where to build the openssl libraries for psycopg."""
|
|
||||||
return self.cache_arch_dir / 'openssl'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cache_arch_dir(self):
|
|
||||||
rv = self.cache_dir / str(self.py_arch) / self.vs_ver
|
|
||||||
return ensure_dir(rv)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cache_dir(self):
|
|
||||||
return Path(r"C:\Others")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def build_dir(self):
|
|
||||||
rv = self.cache_arch_dir / 'Builds'
|
|
||||||
return ensure_dir(rv)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def package_dir(self):
|
|
||||||
return self.clone_dir
|
|
||||||
|
|
||||||
@property
|
|
||||||
def dist_dir(self):
|
|
||||||
"""The directory where to build packages to distribute."""
|
|
||||||
return (
|
|
||||||
self.package_dir / 'dist' / (f'psycopg2-{self.package_version}')
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_cmdline():
|
|
||||||
parser = ArgumentParser(description=__doc__)
|
|
||||||
|
|
||||||
g = parser.add_mutually_exclusive_group()
|
|
||||||
g.add_argument(
|
|
||||||
'-q',
|
|
||||||
'--quiet',
|
|
||||||
help="Talk less",
|
|
||||||
dest='loglevel',
|
|
||||||
action='store_const',
|
|
||||||
const=logging.WARN,
|
|
||||||
default=logging.INFO,
|
|
||||||
)
|
|
||||||
g.add_argument(
|
|
||||||
'-v',
|
|
||||||
'--verbose',
|
|
||||||
help="Talk more",
|
|
||||||
dest='loglevel',
|
|
||||||
action='store_const',
|
|
||||||
const=logging.DEBUG,
|
|
||||||
default=logging.INFO,
|
|
||||||
)
|
|
||||||
|
|
||||||
steps = [
|
|
||||||
n[len(STEP_PREFIX) :]
|
|
||||||
for n in globals()
|
|
||||||
if n.startswith(STEP_PREFIX) and callable(globals()[n])
|
|
||||||
]
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
'step', choices=steps, help="the appveyor step to execute"
|
|
||||||
)
|
|
||||||
|
|
||||||
opt = parser.parse_args(namespace=Options())
|
|
||||||
|
|
||||||
return opt
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.exit(main())
|
|
|
@ -1,117 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
"""Download packages from appveyor artifacts
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import logging
|
|
||||||
import datetime as dt
|
|
||||||
from pathlib import Path
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
logger = logging.getLogger()
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
|
|
||||||
|
|
||||||
API_URL = "https://ci.appveyor.com/api"
|
|
||||||
REPOS = "psycopg/psycopg2"
|
|
||||||
WORKFLOW_NAME = "Build packages"
|
|
||||||
|
|
||||||
|
|
||||||
class ScriptError(Exception):
|
|
||||||
"""Controlled exception raised by the script."""
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
opt = parse_cmdline()
|
|
||||||
try:
|
|
||||||
token = os.environ["APPVEYOR_TOKEN"]
|
|
||||||
except KeyError:
|
|
||||||
raise ScriptError("please set a APPVEYOR_TOKEN to download artifacts")
|
|
||||||
|
|
||||||
s = requests.Session()
|
|
||||||
s.headers["Content-Type"] = "application/json"
|
|
||||||
s.headers["Authorization"] = f"Bearer {token}"
|
|
||||||
|
|
||||||
if opt.build:
|
|
||||||
logger.info("fetching build %s", opt.build)
|
|
||||||
resp = s.get(f"{API_URL}/projects/{REPOS}/build/{opt.build}")
|
|
||||||
else:
|
|
||||||
logger.info("fetching last run")
|
|
||||||
resp = s.get(f"{API_URL}/projects/{REPOS}")
|
|
||||||
|
|
||||||
resp.raise_for_status()
|
|
||||||
data = resp.json()
|
|
||||||
|
|
||||||
updated_at = dt.datetime.fromisoformat(
|
|
||||||
re.sub(r"\.\d+", "", data["build"]["finished"])
|
|
||||||
)
|
|
||||||
now = dt.datetime.now(dt.timezone.utc)
|
|
||||||
age = now - updated_at
|
|
||||||
logger.info(
|
|
||||||
f"found build {data['build']['version']} updated {pretty_interval(age)} ago"
|
|
||||||
)
|
|
||||||
if age > dt.timedelta(hours=6):
|
|
||||||
logger.warning("maybe it's a bit old?")
|
|
||||||
|
|
||||||
jobs = data["build"]["jobs"]
|
|
||||||
for job in jobs:
|
|
||||||
if job["status"] != "success":
|
|
||||||
raise ScriptError(f"status for job {job['jobId']} is {job['status']}")
|
|
||||||
|
|
||||||
logger.info(f"fetching artifacts info for {job['name']}")
|
|
||||||
resp = s.get(f"{API_URL}/buildjobs/{job['jobId']}/artifacts/")
|
|
||||||
resp.raise_for_status()
|
|
||||||
afs = resp.json()
|
|
||||||
for af in afs:
|
|
||||||
fn = af["fileName"]
|
|
||||||
if fn.startswith("dist/"):
|
|
||||||
fn = fn.split("/", 1)[1]
|
|
||||||
dest = Path("wheelhouse") / fn
|
|
||||||
logger.info(f"downloading {dest}")
|
|
||||||
resp = s.get(
|
|
||||||
f"{API_URL}/buildjobs/{job['jobId']}/artifacts/{af['fileName']}"
|
|
||||||
)
|
|
||||||
resp.raise_for_status()
|
|
||||||
if not dest.parent.exists():
|
|
||||||
dest.parent.mkdir(parents=True)
|
|
||||||
|
|
||||||
with dest.open("wb") as f:
|
|
||||||
f.write(resp.content)
|
|
||||||
|
|
||||||
logger.info("now you can run: 'twine upload -s wheelhouse/*'")
|
|
||||||
|
|
||||||
|
|
||||||
def parse_cmdline():
|
|
||||||
parser = ArgumentParser(description=__doc__)
|
|
||||||
parser.add_argument("--build", help="build version to download [default: latest]")
|
|
||||||
opt = parser.parse_args()
|
|
||||||
return opt
|
|
||||||
|
|
||||||
|
|
||||||
def pretty_interval(td):
|
|
||||||
secs = td.total_seconds()
|
|
||||||
mins, secs = divmod(secs, 60)
|
|
||||||
hours, mins = divmod(mins, 60)
|
|
||||||
days, hours = divmod(hours, 24)
|
|
||||||
if days:
|
|
||||||
return f"{int(days)} days, {int(hours)} hours, {int(mins)} minutes"
|
|
||||||
elif hours:
|
|
||||||
return f"{int(hours)} hours, {int(mins)} minutes"
|
|
||||||
else:
|
|
||||||
return f"{int(mins)} minutes"
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
try:
|
|
||||||
sys.exit(main())
|
|
||||||
|
|
||||||
except ScriptError as e:
|
|
||||||
logger.error("%s", e)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
logger.info("user interrupt")
|
|
||||||
sys.exit(1)
|
|
|
@ -0,0 +1,101 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
"""
|
||||||
|
We use vcpkg in github actions to build psycopg-binary.
|
||||||
|
|
||||||
|
This is a stub to work as `pg_config --libdir` or `pg_config --includedir` to
|
||||||
|
make it work with vcpkg.
|
||||||
|
|
||||||
|
You will need install `vcpkg`, set `VCPKG_ROOT` env, and run `vcpkg install
|
||||||
|
libpq:x64-windows-release` before using this script.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import platform
|
||||||
|
from pathlib import Path
|
||||||
|
from argparse import ArgumentParser, Namespace, RawDescriptionHelpFormatter
|
||||||
|
|
||||||
|
|
||||||
|
class ScriptError(Exception):
|
||||||
|
"""Controlled exception raised by the script."""
|
||||||
|
|
||||||
|
|
||||||
|
def _main() -> None:
|
||||||
|
# only x64-windows
|
||||||
|
if not (sys.platform == "win32" and platform.machine() == "AMD64"):
|
||||||
|
raise ScriptError("this script should only be used in x64-windows")
|
||||||
|
|
||||||
|
vcpkg_root = os.environ.get(
|
||||||
|
"VCPKG_ROOT", os.environ.get("VCPKG_INSTALLATION_ROOT", "")
|
||||||
|
)
|
||||||
|
if not vcpkg_root:
|
||||||
|
raise ScriptError("VCPKG_ROOT/VCPKG_INSTALLATION_ROOT env var not specified")
|
||||||
|
vcpkg_platform_root = (Path(vcpkg_root) / "installed/x64-windows-release").resolve()
|
||||||
|
|
||||||
|
args = parse_cmdline()
|
||||||
|
|
||||||
|
if args.libdir:
|
||||||
|
if not (f := vcpkg_platform_root / "lib/libpq.lib").exists():
|
||||||
|
raise ScriptError(f"libpq library not found: {f}")
|
||||||
|
print(vcpkg_platform_root.joinpath("lib"))
|
||||||
|
|
||||||
|
elif args.includedir or args.includedir_server:
|
||||||
|
# NOTE: on linux, the includedir-server dir contains pg_config.h
|
||||||
|
# which we need because it includes the PG_VERSION_NUM macro.
|
||||||
|
# In the vcpkg directory this file is in the includedir directory,
|
||||||
|
# therefore we return the same value.
|
||||||
|
if not (d := vcpkg_platform_root / "include/libpq").is_dir():
|
||||||
|
raise ScriptError(f"libpq include directory not found: {d}")
|
||||||
|
print(vcpkg_platform_root.joinpath("include"))
|
||||||
|
|
||||||
|
elif args.cppflags or args.ldflags:
|
||||||
|
print("")
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ScriptError("command not handled")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_cmdline() -> Namespace:
|
||||||
|
parser = ArgumentParser(
|
||||||
|
description=__doc__, formatter_class=RawDescriptionHelpFormatter
|
||||||
|
)
|
||||||
|
g = parser.add_mutually_exclusive_group(required=True)
|
||||||
|
g.add_argument(
|
||||||
|
"--libdir",
|
||||||
|
action="store_true",
|
||||||
|
help="show location of object code libraries",
|
||||||
|
)
|
||||||
|
g.add_argument(
|
||||||
|
"--includedir",
|
||||||
|
action="store_true",
|
||||||
|
help="show location of C header files of the client interfaces",
|
||||||
|
)
|
||||||
|
g.add_argument(
|
||||||
|
"--includedir-server",
|
||||||
|
action="store_true",
|
||||||
|
help="show location of C header files for the server",
|
||||||
|
)
|
||||||
|
g.add_argument(
|
||||||
|
"--cppflags",
|
||||||
|
action="store_true",
|
||||||
|
help="(dummy) show CPPFLAGS value used when PostgreSQL was built",
|
||||||
|
)
|
||||||
|
g.add_argument(
|
||||||
|
"--ldflags",
|
||||||
|
action="store_true",
|
||||||
|
help="(dummy) show LDFLAGS value used when PostgreSQL was built",
|
||||||
|
)
|
||||||
|
opt = parser.parse_args()
|
||||||
|
return opt
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
try:
|
||||||
|
_main()
|
||||||
|
except ScriptError as e:
|
||||||
|
print(f"ERROR: {e}.", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
11
scripts/build/pg_config_vcpkg_stub/pyproject.toml
Normal file
11
scripts/build/pg_config_vcpkg_stub/pyproject.toml
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = 'pg_config_vcpkg_stub'
|
||||||
|
version = "0"
|
||||||
|
description = "see docs string in pg_config_vcpkg_stub for more details"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
pg_config = 'pg_config_vcpkg_stub:main'
|
7
scripts/build/wheel_win32_before_build.bat
Normal file
7
scripts/build/wheel_win32_before_build.bat
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
@echo on
|
||||||
|
|
||||||
|
pip install delvewheel wheel
|
||||||
|
|
||||||
|
vcpkg install libpq:x64-windows-release
|
||||||
|
|
||||||
|
pipx install .\scripts\build\pg_config_vcpkg_stub\
|
31
setup.py
31
setup.py
|
@ -29,7 +29,6 @@ for coroutine libraries.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import re
|
|
||||||
import subprocess
|
import subprocess
|
||||||
from setuptools import setup, Extension
|
from setuptools import setup, Extension
|
||||||
from distutils.command.build_ext import build_ext
|
from distutils.command.build_ext import build_ext
|
||||||
|
@ -382,34 +381,8 @@ For further information please check the 'doc/src/install.rst' file (also at
|
||||||
if token.startswith("-I"):
|
if token.startswith("-I"):
|
||||||
self.include_dirs.append(token[2:])
|
self.include_dirs.append(token[2:])
|
||||||
|
|
||||||
pgversion = pg_config_helper.query("version").split()[1]
|
# enable lo64 if Python 64 bits
|
||||||
|
if is_py_64():
|
||||||
verre = re.compile(
|
|
||||||
r"(\d+)(?:\.(\d+))?(?:(?:\.(\d+))|(devel|(?:alpha|beta|rc)\d+))?")
|
|
||||||
m = verre.match(pgversion)
|
|
||||||
if m:
|
|
||||||
pgmajor, pgminor, pgpatch = m.group(1, 2, 3)
|
|
||||||
# Postgres >= 10 doesn't have pgminor anymore.
|
|
||||||
pgmajor = int(pgmajor)
|
|
||||||
if pgmajor >= 10:
|
|
||||||
pgminor, pgpatch = None, pgminor
|
|
||||||
if pgminor is None or not pgminor.isdigit():
|
|
||||||
pgminor = 0
|
|
||||||
if pgpatch is None or not pgpatch.isdigit():
|
|
||||||
pgpatch = 0
|
|
||||||
pgminor = int(pgminor)
|
|
||||||
pgpatch = int(pgpatch)
|
|
||||||
else:
|
|
||||||
sys.stderr.write(
|
|
||||||
f"Error: could not determine PostgreSQL version from "
|
|
||||||
f"'{pgversion}'")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
define_macros.append(("PG_VERSION_NUM", "%d%02d%02d" %
|
|
||||||
(pgmajor, pgminor, pgpatch)))
|
|
||||||
|
|
||||||
# enable lo64 if libpq >= 9.3 and Python 64 bits
|
|
||||||
if (pgmajor, pgminor) >= (9, 3) and is_py_64():
|
|
||||||
define_macros.append(("HAVE_LO64", "1"))
|
define_macros.append(("HAVE_LO64", "1"))
|
||||||
|
|
||||||
# Inject the flag in the version string already packed up
|
# Inject the flag in the version string already packed up
|
||||||
|
|
|
@ -31,7 +31,7 @@ from subprocess import Popen
|
||||||
from weakref import ref
|
from weakref import ref
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
from .testutils import (skip_before_postgres,
|
from .testutils import (skip_before_postgres, skip_if_windows,
|
||||||
ConnectingTestCase, skip_copy_if_green, skip_if_crdb, slow, StringIO)
|
ConnectingTestCase, skip_copy_if_green, skip_if_crdb, slow, StringIO)
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
@ -330,6 +330,7 @@ class ExceptionsTestCase(ConnectingTestCase):
|
||||||
|
|
||||||
class TestExtensionModule(unittest.TestCase):
|
class TestExtensionModule(unittest.TestCase):
|
||||||
@slow
|
@slow
|
||||||
|
@skip_if_windows
|
||||||
def test_import_internal(self):
|
def test_import_internal(self):
|
||||||
# check that the internal package can be imported "naked"
|
# check that the internal package can be imported "naked"
|
||||||
# we may break this property if there is a compelling reason to do so,
|
# we may break this property if there is a compelling reason to do so,
|
||||||
|
|
Loading…
Reference in New Issue
Block a user