Compare commits

..

No commits in common. "master" and "2.9.10" have entirely different histories.

37 changed files with 1367 additions and 696 deletions

19
.appveyor/cache_rebuild Normal file
View File

@ -0,0 +1,19 @@
This file is a simple placeholder for forcing the appveyor build cache
to invalidate itself since appveyor.yml changes more frequently then
the cache needs updating. Note, the versions list here can be
different than what is indicated in appveyor.yml.
To invalidate the cache, update this file and check it into git.
Currently used modules built in the cache:
- OPENSSL_VERSION: 1.1.1w
- POSTGRES_VERSION: 16.0
NOTE: to zap the cache manually you can also use:
curl -X DELETE -H "Authorization: Bearer $APPVEYOR_TOKEN" -H "Content-Type: application/json" https://ci.appveyor.com/api/projects/psycopg/psycopg2/buildcache
with the token from https://ci.appveyor.com/api-token

83
.appveyor/packages.yml Normal file
View File

@ -0,0 +1,83 @@
version: 2.x.{build}
clone_folder: C:\Project
# We use the configuration to specify the package name
configuration:
- psycopg2
- psycopg2-binary
environment:
matrix:
# For Python versions available on Appveyor, see
# https://www.appveyor.com/docs/windows-images-software/#python
- {PY_VER: "312", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "312", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "311", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "311", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "310", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "310", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "39", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "39", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
WORKFLOW: packages
OPENSSL_VERSION: "1_1_1w"
POSTGRES_VERSION: "16_0"
PSYCOPG2_TESTDB: psycopg2_test
PSYCOPG2_TESTDB_USER: postgres
PSYCOPG2_TESTDB_HOST: localhost
PGUSER: postgres
PGPASSWORD: Password12!
PGSSLMODE: require
# Add CWD to perl library path for PostgreSQL build on VS2019
PERL5LIB: .
# Select according to the service enabled
POSTGRES_DIR: C:\Program Files\PostgreSQL\13\
matrix:
fast_finish: false
services:
# Note: if you change this service also change POSTGRES_DIR
- postgresql13
cache:
# Rebuild cache if following file changes
# (See the file to zap the cache manually)
- C:\Others -> .appveyor\cache_rebuild
# Script called before repo cloning
# init:
# Repository gets cloned, Cache is restored
install:
- "py scripts\\build\\appveyor.py install"
# PostgreSQL server starts now
build: "off"
build_script:
- "py scripts\\build\\appveyor.py build_script"
after_build:
- "py scripts\\build\\appveyor.py after_build"
before_test:
- "py scripts\\build\\appveyor.py before_test"
test_script:
- "py scripts\\build\\appveyor.py test_script"
artifacts:
- path: dist\psycopg2-*\*.whl
name: wheel
# vim: set ts=4 sts=4 sw=4:

74
.appveyor/tests.yml Normal file
View File

@ -0,0 +1,74 @@
version: 2.x.{build}
clone_folder: C:\Project
environment:
matrix:
# For Python versions available on Appveyor, see
# https://www.appveyor.com/docs/windows-images-software/#python
- {PY_VER: "312", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "312", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "311", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "311", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "310", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "310", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "39", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "39", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
WORKFLOW: tests
OPENSSL_VERSION: "1_1_1w"
POSTGRES_VERSION: "16_0"
PSYCOPG2_TESTDB: psycopg2_test
PSYCOPG2_TESTDB_USER: postgres
PSYCOPG2_TESTDB_HOST: localhost
PGUSER: postgres
PGPASSWORD: Password12!
PGSSLMODE: require
# Add CWD to perl library path for PostgreSQL build on VS2019
PERL5LIB: .
# Select according to the service enabled
POSTGRES_DIR: C:\Program Files\PostgreSQL\13\
matrix:
fast_finish: false
services:
# Note: if you change this service also change POSTGRES_DIR
- postgresql13
cache:
# Rebuild cache if following file changes
# (See the file to zap the cache manually)
- C:\Others -> .appveyor\cache_rebuild
# Script called before repo cloning
# init:
# Repository gets cloned, Cache is restored
install:
- py scripts\\build\\appveyor.py install"
# PostgreSQL server starts now
build: "off"
build_script:
- py scripts\\build\\appveyor.py build_script"
after_build:
- py scripts\\build\\appveyor.py after_build"
before_test:
- py scripts\\build\\appveyor.py before_test"
test_script:
- py scripts\\build\\appveyor.py test_script"
# vim: set ts=4 sts=4 sw=4:

View File

@ -1,138 +0,0 @@
name: Build and cache libpq
# Build the libpq package and cache the artifacts.
#
# Every Python version on the same architecture will use the same libpq.
# Therefore building and caching the libpq together with the binary packages
# result in multiple concurrent builds, and the github artifacts manageer very
# confused.
#
# This job builds the libpq and then caches the artifacts so that the
# packages.yml workflow will find the library in the cache.
#
# You can see the caches at https://github.com/psycopg/psycopg2/actions/caches
#
# Or from the API:
#
# curl -fsSL -X GET \
# -H "Accept: application/vnd.github+json" \
# -H "Authorization: Bearer $GITHUB_TOKEN" \
# -H "X-GitHub-Api-Version: 2022-11-28" \
# "https://api.github.com/repos/psycopg/psycopg/actions/caches" \
# | jq -r '.actions_caches[].key'
#
# You can delete a cache using:
#
# curl -fsSL -X DELETE \
# -H "Accept: application/vnd.github+json" \
# -H "Authorization: Bearer $GITHUB_TOKEN" \
# -H "X-GitHub-Api-Version: 2022-11-28" \
# "https://api.github.com/repos/psycopg/psycopg/actions/caches?key=libpq-manylinux-ppc64le-17.2-3.4.0"
#
# ref: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#delete-github-actions-caches-for-a-repository-using-a-cache-key
on:
workflow_dispatch:
push:
paths:
- .github/workflows/build-and-cache-libpq.yml
- scripts/build/build_libpq.sh
# TODO: move these env vars in an external env file in order to share them
# across workflows.
env:
LIBPQ_VERSION: "17.6"
OPENSSL_VERSION: "3.5.4"
PQ_FLAGS: ""
concurrency:
# Cancel older requests of the same workflow in the same branch.
group: ${{ github.workflow }}-${{ github.ref_name }}
cancel-in-progress: true
jobs:
linux: # {{{
runs-on: ubuntu-latest
if: true
strategy:
fail-fast: false
matrix:
arch: [x86_64, ppc64le, aarch64, riscv64]
platform: [manylinux, musllinux]
steps:
- uses: actions/checkout@v6
- name: Set up QEMU for multi-arch build
# Check https://github.com/docker/setup-qemu-action for newer versions.
uses: docker/setup-qemu-action@v3
with:
# https://github.com/pypa/cibuildwheel/discussions/2256
image: tonistiigi/binfmt:qemu-v8.1.5
- name: Cache libpq build
uses: actions/cache@v5
with:
path: /tmp/libpq.build
key: libpq-${{ matrix.platform }}-${{ matrix.arch }}-${{ env.LIBPQ_VERSION }}-${{ env.OPENSSL_VERSION }}${{ env.PQ_FLAGS }}
- name: Build wheels
uses: pypa/cibuildwheel@v3.3.1
env:
CIBW_SKIP: "cp31?t-*"
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
CIBW_MANYLINUX_I686_IMAGE: manylinux2014
CIBW_MANYLINUX_AARCH64_IMAGE: manylinux2014
CIBW_MANYLINUX_PPC64LE_IMAGE: manylinux2014
CIBW_MANYLINUX_RISCV64_IMAGE: quay.io/pypa/manylinux_2_39_riscv64
CIBW_BUILD: cp313-${{matrix.platform}}_${{matrix.arch}}
CIBW_ARCHS_LINUX: auto aarch64 ppc64le riscv64
CIBW_BEFORE_ALL_LINUX: ./scripts/build/build_libpq.sh
CIBW_REPAIR_WHEEL_COMMAND: >-
./scripts/build/strip_wheel.sh {wheel}
&& auditwheel repair -w {dest_dir} {wheel}
CIBW_ENVIRONMENT_PASS_LINUX: LIBPQ_VERSION OPENSSL_VERSION
CIBW_ENVIRONMENT: >-
LIBPQ_BUILD_PREFIX=/host/tmp/libpq.build
PATH="$LIBPQ_BUILD_PREFIX/bin:$PATH"
LD_LIBRARY_PATH="$LIBPQ_BUILD_PREFIX/lib:$LIBPQ_BUILD_PREFIX/lib64"
# }}}
macos: # {{{
runs-on: macos-latest
if: true
strategy:
fail-fast: false
matrix:
arch: [x86_64, arm64]
steps:
- name: Checkout repos
uses: actions/checkout@v6
- name: Cache libpq build
uses: actions/cache@v5
with:
path: /tmp/libpq.build
key: libpq-macos-${{ env.LIBPQ_VERSION }}-${{ matrix.arch }}-${{ env.OPENSSL_VERSION }}${{ env.PQ_FLAGS }}
- name: Build wheels
uses: pypa/cibuildwheel@v3.3.1
env:
CIBW_SKIP: "cp31?t-*"
CIBW_BUILD: cp313-macosx_${{matrix.arch}}
CIBW_ARCHS_MACOS: ${{matrix.arch}}
MACOSX_ARCHITECTURE: ${{matrix.arch}}
CIBW_BEFORE_ALL_MACOS: ./scripts/build/build_libpq.sh
CIBW_ENVIRONMENT: >-
PSYCOPG_IMPL=binary
LIBPQ_BUILD_PREFIX=/tmp/libpq.build
PATH="$LIBPQ_BUILD_PREFIX/bin:$PATH"
# }}}

View File

@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Trigger docs build - name: Trigger docs build
uses: peter-evans/repository-dispatch@v4 uses: peter-evans/repository-dispatch@v2
with: with:
repository: psycopg/psycopg-website repository: psycopg/psycopg-website
event-type: psycopg2-commit event-type: psycopg2-commit

View File

@ -1,24 +1,11 @@
--- ---
name: Build packages name: Build packages
# Note: these jobs also build and cache the libpq, but, because every Python
# version will try to build and cache the same libpq instance, there is a race
# condition and most likely the artifacts manager will refuse to cache.
#
# Please run the `build-and-cache-libpq.yml` workflow when the libpq/openssl
# version change in order to update the cache.
on: on:
- workflow_dispatch - workflow_dispatch
env:
PIP_BREAK_SYSTEM_PACKAGES: "1"
LIBPQ_VERSION: "17.6"
OPENSSL_VERSION: "3.5.4"
PQ_FLAGS: ""
jobs: jobs:
sdist: # {{{ build-sdist:
if: true if: true
strategy: strategy:
fail-fast: false fail-fast: false
@ -30,7 +17,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repos - name: Checkout repos
uses: actions/checkout@v6 uses: actions/checkout@v4
- name: Build sdist - name: Build sdist
run: ./scripts/build/build_sdist.sh run: ./scripts/build/build_sdist.sh
@ -38,7 +25,7 @@ jobs:
PACKAGE_NAME: ${{ matrix.package_name }} PACKAGE_NAME: ${{ matrix.package_name }}
- name: Upload artifacts - name: Upload artifacts
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v4
with: with:
name: sdist-${{ matrix.package_name }} name: sdist-${{ matrix.package_name }}
path: | path: |
@ -65,43 +52,44 @@ jobs:
--health-timeout 5s --health-timeout 5s
--health-retries 5 --health-retries 5
# }}}
linux: # {{{ build-linux:
if: true if: true
env:
LIBPQ_VERSION: "16.0"
OPENSSL_VERSION: "1.1.1w"
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
platform: [manylinux, musllinux] platform: [manylinux, musllinux]
arch: [x86_64, aarch64, ppc64le, riscv64] arch: [x86_64, i686, aarch64, ppc64le]
pyver: [cp39, cp310, cp311, cp312, cp313, cp314] pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repos - name: Checkout repos
uses: actions/checkout@v6 uses: actions/checkout@v4
- name: Set up QEMU for multi-arch build - name: Set up QEMU for multi-arch build
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v3
- name: Cache libpq build - name: Cache libpq build
uses: actions/cache@v5 uses: actions/cache@v3
with: with:
path: /tmp/libpq.build path: /tmp/libpq.build
key: libpq-${{ matrix.platform }}-${{ matrix.arch }}-${{ env.LIBPQ_VERSION }}-${{ env.OPENSSL_VERSION }}${{ env.PQ_FLAGS }} key: libpq-${{ env.LIBPQ_VERSION }}-${{ matrix.platform }}-${{ matrix.arch }}
- name: Build wheels - name: Build wheels
uses: pypa/cibuildwheel@v3.3.1 uses: pypa/cibuildwheel@v2.21.2
env: env:
CIBW_SKIP: "cp31?t-*"
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014 CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
CIBW_MANYLINUX_I686_IMAGE: manylinux2014 CIBW_MANYLINUX_I686_IMAGE: manylinux2014
CIBW_MANYLINUX_AARCH64_IMAGE: manylinux2014 CIBW_MANYLINUX_AARCH64_IMAGE: manylinux2014
CIBW_MANYLINUX_PPC64LE_IMAGE: manylinux2014 CIBW_MANYLINUX_PPC64LE_IMAGE: manylinux2014
CIBW_MANYLINUX_RISCV64_IMAGE: quay.io/pypa/manylinux_2_39_riscv64
CIBW_BUILD: ${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}} CIBW_BUILD: ${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}}
CIBW_ARCHS_LINUX: auto aarch64 ppc64le riscv64 CIBW_ARCHS_LINUX: auto aarch64 ppc64le
CIBW_BEFORE_ALL_LINUX: ./scripts/build/wheel_linux_before_all.sh CIBW_BEFORE_ALL_LINUX: ./scripts/build/wheel_linux_before_all.sh
CIBW_REPAIR_WHEEL_COMMAND: >- CIBW_REPAIR_WHEEL_COMMAND: >-
./scripts/build/strip_wheel.sh {wheel} ./scripts/build/strip_wheel.sh {wheel}
@ -121,7 +109,7 @@ jobs:
PSYCOPG2_TESTDB_PASSWORD=password PSYCOPG2_TESTDB_PASSWORD=password
PSYCOPG2_TEST_FAST=1 PSYCOPG2_TEST_FAST=1
- uses: actions/upload-artifact@v6 - uses: actions/upload-artifact@v4
with: with:
name: linux-${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}} name: linux-${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}}
path: ./wheelhouse/*.whl path: ./wheelhouse/*.whl
@ -140,140 +128,55 @@ jobs:
--health-timeout 5s --health-timeout 5s
--health-retries 5 --health-retries 5
# }}}
macos: # {{{ build-macos:
runs-on: macos-latest runs-on: macos-${{ matrix.macver }}
if: true if: true
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
# These archs require an Apple M1 runner: [arm64, universal2] # These archs require an Apple M1 runner: [arm64, universal2]
arch: [x86_64, arm64] arch: [x86_64]
pyver: [cp39, cp310, cp311, cp312, cp313, cp314] pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
macver: ["12"]
include:
- arch: arm64
pyver: cp310
macver: "14"
- arch: arm64
pyver: cp311
macver: "14"
- arch: arm64
pyver: cp312
macver: "14"
- arch: arm64
pyver: cp313
macver: "14"
steps: steps:
- name: Checkout repos - name: Checkout repos
uses: actions/checkout@v6 uses: actions/checkout@v4
- name: Cache libpq build
uses: actions/cache@v5
with:
path: /tmp/libpq.build
key: libpq-macos-${{ env.LIBPQ_VERSION }}-${{ matrix.arch }}-${{ env.OPENSSL_VERSION }}${{ env.PQ_FLAGS }}
- name: Build wheels - name: Build wheels
uses: pypa/cibuildwheel@v3.3.1 uses: pypa/cibuildwheel@v2.21.2
env: env:
CIBW_SKIP: "cp31?t-*"
CIBW_BUILD: ${{matrix.pyver}}-macosx_${{matrix.arch}} CIBW_BUILD: ${{matrix.pyver}}-macosx_${{matrix.arch}}
CIBW_ARCHS_MACOS: ${{matrix.arch}} CIBW_ARCHS_MACOS: ${{matrix.arch}}
MACOSX_ARCHITECTURE: ${{matrix.arch}}
CIBW_BEFORE_ALL_MACOS: ./scripts/build/wheel_macos_before_all.sh CIBW_BEFORE_ALL_MACOS: ./scripts/build/wheel_macos_before_all.sh
CIBW_TEST_COMMAND: >- CIBW_TEST_COMMAND: >-
export PYTHONPATH={project} && export PYTHONPATH={project} &&
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')" python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
CIBW_ENVIRONMENT: >- CIBW_ENVIRONMENT: >-
MACOSX_DEPLOYMENT_TARGET=${{ matrix.macver }}.0
PG_VERSION=16 PG_VERSION=16
PACKAGE_NAME=psycopg2-binary PACKAGE_NAME=psycopg2-binary
PSYCOPG2_TESTDB=postgres PSYCOPG2_TESTDB=postgres
PATH="/tmp/libpq.build/bin:$PATH" PSYCOPG2_TEST_FAST=1
PATH="/usr/local/opt/postgresql@${PG_VERSION}/bin:$PATH"
- name: Upload artifacts - name: Upload artifacts
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v4
with: with:
name: macos-${{matrix.pyver}}-macos-${{matrix.arch}} name: macos-${{matrix.pyver}}-macos-${{matrix.macver}}_${{matrix.arch}}
path: ./wheelhouse/*.whl path: ./wheelhouse/*.whl
# }}}
windows: # {{{
runs-on: windows-latest
if: true
strategy:
fail-fast: false
matrix:
arch: [win_amd64]
pyver: [cp39, cp310, cp311, cp312, cp313, cp314]
package_name: [psycopg2, psycopg2-binary]
defaults:
run:
shell: bash
steps:
# there are some other libpq in PATH
- name: Drop spurious libpq in the path
run: rm -rf c:/tools/php C:/Strawberry/c/bin
- name: Checkout repo
uses: actions/checkout@v6
- name: Start PostgreSQL service for test
run: |
$PgSvc = Get-Service "postgresql*"
Set-Service $PgSvc.Name -StartupType manual
$PgSvc.Start()
shell: powershell
- name: Export GitHub Actions cache environment variables
uses: actions/github-script@v8
with:
script: |
const path = require('path')
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/lib'));
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/bin'));
- name: Create the binary package source tree
run: >-
sed -i 's/^setup(name="psycopg2"/setup(name="${{matrix.package_name}}"/'
setup.py
if: ${{ matrix.package_name != 'psycopg2' }}
- name: Build wheels
uses: pypa/cibuildwheel@v3.3.1
env:
CIBW_SKIP: "cp31?t-*"
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" # cache vcpkg
CIBW_BUILD: ${{matrix.pyver}}-${{matrix.arch}}
CIBW_ARCHS_WINDOWS: AMD64 x86
CIBW_BEFORE_BUILD_WINDOWS: '.\scripts\build\wheel_win32_before_build.bat'
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: >-
delvewheel repair -w {dest_dir}
--no-mangle "libiconv-2.dll;libwinpthread-1.dll" {wheel}
CIBW_TEST_COMMAND: >-
set PYTHONPATH={project} &&
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
# Note: no fast test because we don't run Windows tests
CIBW_ENVIRONMENT_WINDOWS: >-
PSYCOPG2_TESTDB=postgres
PSYCOPG2_TESTDB_USER=postgres
PSYCOPG2_TESTDB_HOST=localhost
- name: Upload artifacts
uses: actions/upload-artifact@v6
with:
name: windows-${{ matrix.package_name }}-${{matrix.pyver}}-${{matrix.arch}}
path: ./wheelhouse/*.whl
# }}}
merge: # {{{
runs-on: ubuntu-latest
needs:
- sdist
- linux
- macos
- windows
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@v6
with:
name: psycopg2-artifacts
delete-merged: true
# }}}

View File

@ -1,35 +1,32 @@
name: Tests name: Tests
env:
PIP_BREAK_SYSTEM_PACKAGES: "1"
on: on:
push: push:
pull_request: pull_request:
jobs: jobs:
linux: tests:
name: Unit tests run
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: true
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: include:
- {python: "3.8", postgres: "12"}
- {python: "3.9", postgres: "13"} - {python: "3.9", postgres: "13"}
- {python: "3.10", postgres: "14"} - {python: "3.10", postgres: "14"}
- {python: "3.11", postgres: "15"} - {python: "3.11", postgres: "15"}
- {python: "3.12", postgres: "16"} - {python: "3.12", postgres: "16"}
- {python: "3.13", postgres: "17"} - {python: "3.13-dev", postgres: "17"}
- {python: "3.14", postgres: "18"}
# Opposite extremes of the supported Py/PG range, other architecture # Opposite extremes of the supported Py/PG range, other architecture
- {python: "3.9", postgres: "18", architecture: "x86"} - {python: "3.8", postgres: "17", architecture: "x86"}
- {python: "3.10", postgres: "17", architecture: "x86"} - {python: "3.9", postgres: "16", architecture: "x86"}
- {python: "3.11", postgres: "16", architecture: "x86"} - {python: "3.10", postgres: "15", architecture: "x86"}
- {python: "3.12", postgres: "15", architecture: "x86"} - {python: "3.11", postgres: "14", architecture: "x86"}
- {python: "3.13", postgres: "14", architecture: "x86"} - {python: "3.12", postgres: "13", architecture: "x86"}
- {python: "3.14", postgres: "13", architecture: "x86"} - {python: "3.13-dev", postgres: "12", architecture: "x86"}
env: env:
PSYCOPG2_TESTDB: postgres PSYCOPG2_TESTDB: postgres
@ -52,7 +49,7 @@ jobs:
--health-retries 5 --health-retries 5
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
# Can enable to test an unreleased libpq version. # Can enable to test an unreleased libpq version.
- name: install libpq 16 - name: install libpq 16
@ -69,10 +66,9 @@ jobs:
- name: Install tox - name: Install tox
run: pip install "tox < 4" run: pip install "tox < 4"
- uses: actions/setup-python@v6 - uses: actions/setup-python@v5
with: with:
python-version: ${{ matrix.python }} python-version: ${{ matrix.python }}
allow-prereleases: true
- name: Run tests - name: Run tests
env: env:
MATRIX_PYTHON: ${{ matrix.python }} MATRIX_PYTHON: ${{ matrix.python }}

12
NEWS
View File

@ -1,18 +1,6 @@
Current release Current release
--------------- ---------------
What's new in psycopg 2.9.11
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Add support for Python 3.14.
- Avoid a segfault passing more arguments than placeholders if Python is built
with assertions enabled (:ticket:`#1791`).
- Add riscv64 platform binary packages (:ticket:`#1813`).
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
PostgreSQL 18.
- Drop support for Python 3.8.
What's new in psycopg 2.9.10 What's new in psycopg 2.9.10
^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@ -73,8 +73,13 @@ production it is advised to use the package built from sources.
.. _install: https://www.psycopg.org/docs/install.html#install-from-source .. _install: https://www.psycopg.org/docs/install.html#install-from-source
.. _faq: https://www.psycopg.org/docs/faq.html#faq-compile .. _faq: https://www.psycopg.org/docs/faq.html#faq-compile
:Build status: |gh-actions| :Linux/OSX: |gh-actions|
:Windows: |appveyor|
.. |gh-actions| image:: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml/badge.svg .. |gh-actions| image:: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml/badge.svg
:target: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml :target: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
:alt: Build status :alt: Linux and OSX build status
.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/github/psycopg/psycopg2?branch=master&svg=true
:target: https://ci.appveyor.com/project/psycopg/psycopg2/branch/master
:alt: Windows build status

View File

@ -16,9 +16,10 @@ How to make a psycopg2 release
$ export VERSION=2.8.4 $ export VERSION=2.8.4
- Push psycopg2 to master or to the maint branch. Make sure tests on `GitHub - Push psycopg2 to master or to the maint branch. Make sure tests on `GitHub
Actions`__. Actions`__ and AppVeyor__ pass.
.. __: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml .. __: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
.. __: https://ci.appveyor.com/project/psycopg/psycopg2
- Create a signed tag with the content of the relevant NEWS bit and push it. - Create a signed tag with the content of the relevant NEWS bit and push it.
E.g.:: E.g.::
@ -40,10 +41,19 @@ How to make a psycopg2 release
- On GitHub Actions run manually a `package build workflow`__. - On GitHub Actions run manually a `package build workflow`__.
- On Appveyor change the `build settings`__ and replace the custom
configuration file name from ``.appveyor/tests.yml`` to
``.appveyor/packages.yml`` (yeah, that sucks a bit. Remember to put it
back to testing).
.. __: https://github.com/psycopg/psycopg2/actions/workflows/packages.yml .. __: https://github.com/psycopg/psycopg2/actions/workflows/packages.yml
.. __: https://ci.appveyor.com/project/psycopg/psycopg2/settings
- When the workflows have finished download the packages from the job - When the workflows have finished download the packages from the job
artifacts. artifacts. For Appveyor you can use the ``download_packages_appveyor.py``
scripts from the ``scripts/build`` directory. They will be saved in a
``wheelhouse/psycopg2-${VERSION}`` directory. For Github just download it
from the web interface (it's a single file).
- Only for stable packages: upload the signed packages on PyPI:: - Only for stable packages: upload the signed packages on PyPI::

View File

@ -1,48 +1,50 @@
# This file was autogenerated by uv via the following command: #
# uv pip compile requirements.in -o requirements.txt # This file is autogenerated by pip-compile with Python 3.10
alabaster==1.0.0 # by the following command:
#
# pip-compile requirements.in
#
alabaster==0.7.13
# via sphinx # via sphinx
babel==2.17.0 babel==2.12.1
# via sphinx # via sphinx
certifi==2025.10.5 certifi>=2023.7.22
# via requests # via requests
charset-normalizer==3.4.3 charset-normalizer==3.1.0
# via requests # via requests
docutils==0.21.2 docutils==0.19
# via sphinx # via sphinx
idna==3.10 idna==3.4
# via requests # via requests
imagesize==1.4.1 imagesize==1.4.1
# via sphinx # via sphinx
jinja2==3.1.6 jinja2==3.1.2
# via sphinx # via sphinx
markupsafe==3.0.3 markupsafe==2.1.2
# via jinja2 # via jinja2
packaging==25.0 packaging==23.1
# via sphinx # via sphinx
pygments==2.19.2 pygments==2.15.0
# via sphinx # via sphinx
requests==2.32.5 requests==2.31.0
# via sphinx # via sphinx
snowballstemmer==3.0.1 snowballstemmer==2.2.0
# via sphinx # via sphinx
sphinx==8.1.3 sphinx==6.1.3
# via -r requirements.in # via -r requirements.in
sphinx-better-theme==0.1.5 sphinx-better-theme==0.1.5
# via -r requirements.in # via -r requirements.in
sphinxcontrib-applehelp==2.0.0 sphinxcontrib-applehelp==1.0.4
# via sphinx # via sphinx
sphinxcontrib-devhelp==2.0.0 sphinxcontrib-devhelp==1.0.2
# via sphinx # via sphinx
sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-htmlhelp==2.0.1
# via sphinx # via sphinx
sphinxcontrib-jsmath==1.0.1 sphinxcontrib-jsmath==1.0.1
# via sphinx # via sphinx
sphinxcontrib-qthelp==2.0.0 sphinxcontrib-qthelp==1.0.3
# via sphinx # via sphinx
sphinxcontrib-serializinghtml==2.0.0 sphinxcontrib-serializinghtml==1.1.5
# via sphinx # via sphinx
tomli==2.3.0 urllib3==1.26.17
# via sphinx
urllib3==2.5.0
# via requests # via requests

View File

@ -277,7 +277,7 @@ def drop_test_table(name):
cur.execute("SAVEPOINT drop_test_table;") cur.execute("SAVEPOINT drop_test_table;")
try: try:
cur.execute("DROP TABLE %s;" % name) cur.execute("DROP TABLE %s;" % name)
except Exception: except:
cur.execute("ROLLBACK TO SAVEPOINT drop_test_table;") cur.execute("ROLLBACK TO SAVEPOINT drop_test_table;")
conn.commit() conn.commit()

View File

@ -131,8 +131,8 @@ The current `!psycopg2` implementation supports:
.. ..
NOTE: keep consistent with setup.py and the /features/ page. NOTE: keep consistent with setup.py and the /features/ page.
- Python versions from 3.9 to 3.14 - Python versions from 3.8 to 3.13
- PostgreSQL server versions from 7.4 to 18 - PostgreSQL server versions from 7.4 to 17
- PostgreSQL client library version from 9.1 - PostgreSQL client library version from 9.1
.. note:: .. note::

View File

@ -69,7 +69,6 @@ CLASS_LOCATOR_EXCEPTION = '0F'
CLASS_INVALID_GRANTOR = '0L' CLASS_INVALID_GRANTOR = '0L'
CLASS_INVALID_ROLE_SPECIFICATION = '0P' CLASS_INVALID_ROLE_SPECIFICATION = '0P'
CLASS_DIAGNOSTICS_EXCEPTION = '0Z' CLASS_DIAGNOSTICS_EXCEPTION = '0Z'
CLASS_XQUERY_ERROR = '10'
CLASS_CASE_NOT_FOUND = '20' CLASS_CASE_NOT_FOUND = '20'
CLASS_CARDINALITY_VIOLATION = '21' CLASS_CARDINALITY_VIOLATION = '21'
CLASS_DATA_EXCEPTION = '22' CLASS_DATA_EXCEPTION = '22'
@ -155,9 +154,6 @@ INVALID_ROLE_SPECIFICATION = '0P000'
DIAGNOSTICS_EXCEPTION = '0Z000' DIAGNOSTICS_EXCEPTION = '0Z000'
STACKED_DIAGNOSTICS_ACCESSED_WITHOUT_ACTIVE_HANDLER = '0Z002' STACKED_DIAGNOSTICS_ACCESSED_WITHOUT_ACTIVE_HANDLER = '0Z002'
# Class 10 - XQuery Error
INVALID_ARGUMENT_FOR_XQUERY = '10608'
# Class 20 - Case Not Found # Class 20 - Case Not Found
CASE_NOT_FOUND = '20000' CASE_NOT_FOUND = '20000'
@ -404,7 +400,6 @@ SYSTEM_ERROR = '58000'
IO_ERROR = '58030' IO_ERROR = '58030'
UNDEFINED_FILE = '58P01' UNDEFINED_FILE = '58P01'
DUPLICATE_FILE = '58P02' DUPLICATE_FILE = '58P02'
FILE_NAME_TOO_LONG = '58P03'
# Class 72 - Snapshot Failure # Class 72 - Snapshot Failure
SNAPSHOT_TOO_OLD = '72000' SNAPSHOT_TOO_OLD = '72000'

View File

@ -1047,7 +1047,7 @@ static cursorObject *
_conn_get_async_cursor(connectionObject *self) { _conn_get_async_cursor(connectionObject *self) {
PyObject *py_curs; PyObject *py_curs;
if (!(py_curs = psyco_weakref_get_object(self->async_cursor))) { if (!(py_curs = PyWeakref_GetObject(self->async_cursor))) {
PyErr_SetString(PyExc_SystemError, PyErr_SetString(PyExc_SystemError,
"got null dereferencing cursor weakref"); "got null dereferencing cursor weakref");
goto error; goto error;

View File

@ -342,7 +342,7 @@ _psyco_curs_merge_query_args(cursorObject *self,
if (PyObject_HasAttrString(arg, "args")) { if (PyObject_HasAttrString(arg, "args")) {
PyObject *args = PyObject_GetAttrString(arg, "args"); PyObject *args = PyObject_GetAttrString(arg, "args");
PyObject *str = PySequence_GetItem(args, 0); PyObject *str = PySequence_GetItem(args, 0);
const char *s = PyUnicode_AsUTF8(str); const char *s = Bytes_AS_STRING(str);
Dprintf("curs_execute: -> %s", s); Dprintf("curs_execute: -> %s", s);
@ -779,7 +779,7 @@ curs_fetchone(cursorObject *self, PyObject *dummy)
successive requests to reallocate it */ successive requests to reallocate it */
if (self->row >= self->rowcount if (self->row >= self->rowcount
&& self->conn->async_cursor && self->conn->async_cursor
&& psyco_weakref_get_object(self->conn->async_cursor) == (PyObject*)self) && PyWeakref_GetObject(self->conn->async_cursor) == (PyObject*)self)
CLEARPGRES(self->pgres); CLEARPGRES(self->pgres);
return res; return res;
@ -826,7 +826,7 @@ curs_next_named(cursorObject *self)
successive requests to reallocate it */ successive requests to reallocate it */
if (self->row >= self->rowcount if (self->row >= self->rowcount
&& self->conn->async_cursor && self->conn->async_cursor
&& psyco_weakref_get_object(self->conn->async_cursor) == (PyObject*)self) && PyWeakref_GetObject(self->conn->async_cursor) == (PyObject*)self)
CLEARPGRES(self->pgres); CLEARPGRES(self->pgres);
return res; return res;
@ -911,7 +911,7 @@ curs_fetchmany(cursorObject *self, PyObject *args, PyObject *kwords)
successive requests to reallocate it */ successive requests to reallocate it */
if (self->row >= self->rowcount if (self->row >= self->rowcount
&& self->conn->async_cursor && self->conn->async_cursor
&& psyco_weakref_get_object(self->conn->async_cursor) == (PyObject*)self) && PyWeakref_GetObject(self->conn->async_cursor) == (PyObject*)self)
CLEARPGRES(self->pgres); CLEARPGRES(self->pgres);
/* success */ /* success */
@ -980,7 +980,7 @@ curs_fetchall(cursorObject *self, PyObject *dummy)
successive requests to reallocate it */ successive requests to reallocate it */
if (self->row >= self->rowcount if (self->row >= self->rowcount
&& self->conn->async_cursor && self->conn->async_cursor
&& psyco_weakref_get_object(self->conn->async_cursor) == (PyObject*)self) && PyWeakref_GetObject(self->conn->async_cursor) == (PyObject*)self)
CLEARPGRES(self->pgres); CLEARPGRES(self->pgres);
/* success */ /* success */

View File

@ -27,7 +27,6 @@
#ifndef PSYCOPG_H #ifndef PSYCOPG_H
#define PSYCOPG_H 1 #define PSYCOPG_H 1
#include <pg_config.h>
#if PG_VERSION_NUM < 90100 #if PG_VERSION_NUM < 90100
#error "Psycopg requires PostgreSQL client library (libpq) >= 9.1" #error "Psycopg requires PostgreSQL client library (libpq) >= 9.1"
#endif #endif

View File

@ -27,8 +27,8 @@
#ifndef PSYCOPG_PYTHON_H #ifndef PSYCOPG_PYTHON_H
#define PSYCOPG_PYTHON_H 1 #define PSYCOPG_PYTHON_H 1
#if PY_VERSION_HEX < 0x03090000 #if PY_VERSION_HEX < 0x03080000
#error "psycopg requires Python 3.9" #error "psycopg requires Python 3.8"
#endif #endif
#include <structmember.h> #include <structmember.h>

View File

@ -395,9 +395,25 @@ psyco_is_main_interp(void)
#if PY_VERSION_HEX >= 0x030d0000 #if PY_VERSION_HEX >= 0x030d0000
/* tested with Python 3.13.0a6 */ /* tested with Python 3.13.0a6 */
return PyInterpreterState_Get() == PyInterpreterState_Main(); return PyInterpreterState_Get() == PyInterpreterState_Main();
#else #elif PY_VERSION_HEX >= 0x03080000
/* tested with Python 3.8.0a2 */ /* tested with Python 3.8.0a2 */
return _PyInterpreterState_Get() == PyInterpreterState_Main(); return _PyInterpreterState_Get() == PyInterpreterState_Main();
#else
static PyInterpreterState *main_interp = NULL; /* Cached reference */
PyInterpreterState *interp;
if (main_interp) {
return (main_interp == PyThreadState_Get()->interp);
}
/* No cached value: cache the proper value and try again. */
interp = PyInterpreterState_Head();
while (interp->next)
interp = interp->next;
main_interp = interp;
assert (main_interp);
return psyco_is_main_interp();
#endif #endif
} }
@ -441,36 +457,3 @@ psyco_get_decimal_type(void)
return decimalType; return decimalType;
} }
/* Return the object referred by the weak ref as a borrowed pointer.
*
* Reproduce the semantics of PyWeakref_GetObject(), which was deprecated in
* 3.13.
*
* I know that it would have been better to reproduce the semantics of
* PyWeakref_GetRef(), thank you for the suggestion. However this opens a can
* of worms in cursor_type.c. Why so? Glad you ask: because there are many
* places in that function where we don't check the return value. That stuff is
* convoluted and async: I think in case of failure it would fail of internal
* error, but it's not been reported doing so.
*/
BORROWED PyObject *
psyco_weakref_get_object(PyObject *ref)
{
#if PY_VERSION_HEX >= 0x030d0000
PyObject *obj = NULL;
int rv;
if ((rv = PyWeakref_GetRef(ref, &obj)) > 0) {
Py_DECREF(obj); /* make it weak */
}
else if (rv == 0) { /* dead ref */
obj = Py_None;
}
/* else it's an error */
return obj;
#else
return PyWeakref_GetObject(ref);
#endif
}

View File

@ -59,8 +59,6 @@ HIDDEN RAISES BORROWED PyObject *psyco_set_error(
HIDDEN PyObject *psyco_get_decimal_type(void); HIDDEN PyObject *psyco_get_decimal_type(void);
HIDDEN BORROWED PyObject *psyco_weakref_get_object(PyObject *);
HIDDEN PyObject *Bytes_Format(PyObject *format, PyObject *args); HIDDEN PyObject *Bytes_Format(PyObject *format, PyObject *args);

847
scripts/build/appveyor.py Executable file
View File

@ -0,0 +1,847 @@
#!/usr/bin/env python3
"""
Build steps for the windows binary packages.
The script is designed to be called by appveyor. Subcommands map the steps in
'appveyor.yml'.
"""
import re
import os
import sys
import json
import shutil
import logging
import subprocess as sp
from glob import glob
from pathlib import Path
from zipfile import ZipFile
from argparse import ArgumentParser
from tempfile import NamedTemporaryFile
from urllib.request import urlopen
opt = None
STEP_PREFIX = 'step_'
logger = logging.getLogger()
logging.basicConfig(
level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s'
)
def main():
global opt
opt = parse_cmdline()
logger.setLevel(opt.loglevel)
cmd = globals()[STEP_PREFIX + opt.step]
cmd()
def setup_build_env():
"""
Set the environment variables according to the build environment
"""
setenv('VS_VER', opt.vs_ver)
path = [
str(opt.py_dir),
str(opt.py_dir / 'Scripts'),
r'C:\Strawberry\Perl\bin',
r'C:\Program Files\Git\mingw64\bin',
str(opt.ssl_build_dir / 'bin'),
os.environ['PATH'],
]
setenv('PATH', os.pathsep.join(path))
logger.info("Configuring compiler")
bat_call([opt.vc_dir / "vcvarsall.bat", 'x86' if opt.arch_32 else 'amd64'])
def python_info():
logger.info("Python Information")
run_python(['--version'], stderr=sp.STDOUT)
run_python(
['-c', "import sys; print('64bit: %s' % (sys.maxsize > 2**32))"]
)
def step_install():
python_info()
configure_sdk()
configure_postgres()
install_python_build_tools()
def install_python_build_tools():
"""
Install or upgrade pip and build tools.
"""
run_python("-m pip install --upgrade pip setuptools wheel".split())
def configure_sdk():
# The program rc.exe on 64bit with some versions look in the wrong path
# location when building postgresql. This cheats by copying the x64 bit
# files to that location.
if opt.arch_64:
for fn in glob(
r'C:\Program Files\Microsoft SDKs\Windows\v7.0\Bin\x64\rc*'
):
copy_file(
fn, r"C:\Program Files (x86)\Microsoft SDKs\Windows\v7.0A\Bin"
)
def configure_postgres():
"""
Set up PostgreSQL config before the service starts.
"""
logger.info("Configuring Postgres")
opt.pg_data_dir.mkdir(parents=True, exist_ok=True)
with (opt.pg_data_dir / 'postgresql.conf').open('a') as f:
# allow > 1 prepared transactions for test cases
print("max_prepared_transactions = 10", file=f)
print("ssl = on", file=f)
# Create openssl certificate to allow ssl connection
cwd = os.getcwd()
os.chdir(opt.pg_data_dir)
run_openssl(
'req -new -x509 -days 365 -nodes -text '
'-out server.crt -keyout server.key -subj /CN=initd.org'.split()
)
run_openssl(
'req -new -nodes -text -out root.csr -keyout root.key '
'-subj /CN=initd.org'.split()
)
run_openssl(
'x509 -req -in root.csr -text -days 3650 -extensions v3_ca '
'-signkey root.key -out root.crt'.split()
)
run_openssl(
'req -new -nodes -text -out server.csr -keyout server.key '
'-subj /CN=initd.org'.split()
)
run_openssl(
'x509 -req -in server.csr -text -days 365 -CA root.crt '
'-CAkey root.key -CAcreateserial -out server.crt'.split()
)
os.chdir(cwd)
def run_openssl(args):
"""Run the appveyor-installed openssl with some args."""
# https://www.appveyor.com/docs/windows-images-software/
openssl = Path(r"C:\OpenSSL-v111-Win64") / 'bin' / 'openssl'
return run_command([openssl] + args)
def step_build_script():
setup_build_env()
build_openssl()
build_libpq()
build_psycopg()
if opt.is_wheel:
build_binary_packages()
def build_openssl():
top = opt.ssl_build_dir
if (top / 'lib' / 'libssl.lib').exists():
return
logger.info("Building OpenSSL")
# Setup directories for building OpenSSL libraries
ensure_dir(top / 'include' / 'openssl')
ensure_dir(top / 'lib')
# Setup OpenSSL Environment Variables based on processor architecture
if opt.arch_32:
target = 'VC-WIN32'
setenv('VCVARS_PLATFORM', 'x86')
else:
target = 'VC-WIN64A'
setenv('VCVARS_PLATFORM', 'amd64')
setenv('CPU', 'AMD64')
ver = os.environ['OPENSSL_VERSION']
# Download OpenSSL source
zipname = f'OpenSSL_{ver}.zip'
zipfile = opt.cache_dir / zipname
if not zipfile.exists():
download(
f"https://github.com/openssl/openssl/archive/{zipname}", zipfile
)
with ZipFile(zipfile) as z:
z.extractall(path=opt.build_dir)
sslbuild = opt.build_dir / f"openssl-OpenSSL_{ver}"
os.chdir(sslbuild)
run_command(
['perl', 'Configure', target, 'no-asm']
+ ['no-shared', 'no-zlib', f'--prefix={top}', f'--openssldir={top}']
)
run_command("nmake build_libs install_sw".split())
assert (top / 'lib' / 'libssl.lib').exists()
os.chdir(opt.clone_dir)
shutil.rmtree(sslbuild)
def build_libpq():
top = opt.pg_build_dir
if (top / 'lib' / 'libpq.lib').exists():
return
logger.info("Building libpq")
# Setup directories for building PostgreSQL librarires
ensure_dir(top / 'include')
ensure_dir(top / 'lib')
ensure_dir(top / 'bin')
ver = os.environ['POSTGRES_VERSION']
# Download PostgreSQL source
zipname = f'postgres-REL_{ver}.zip'
zipfile = opt.cache_dir / zipname
if not zipfile.exists():
download(
f"https://github.com/postgres/postgres/archive/REL_{ver}.zip",
zipfile,
)
with ZipFile(zipfile) as z:
z.extractall(path=opt.build_dir)
pgbuild = opt.build_dir / f"postgres-REL_{ver}"
os.chdir(pgbuild)
# Setup build config file (config.pl)
os.chdir("src/tools/msvc")
with open("config.pl", 'w') as f:
print(
"""\
$config->{ldap} = 0;
$config->{openssl} = "%s";
1;
"""
% str(opt.ssl_build_dir).replace('\\', '\\\\'),
file=f,
)
# Hack the Mkvcbuild.pm file so we build the lib version of libpq
file_replace('Mkvcbuild.pm', "'libpq', 'dll'", "'libpq', 'lib'")
# Build libpgport, libpgcommon, libpq
run_command([which("build"), "libpgport"])
run_command([which("build"), "libpgcommon"])
run_command([which("build"), "libpq"])
# Install includes
with (pgbuild / "src/backend/parser/gram.h").open("w") as f:
print("", file=f)
# Copy over built libraries
file_replace("Install.pm", "qw(Install)", "qw(Install CopyIncludeFiles)")
run_command(
["perl", "-MInstall=CopyIncludeFiles", "-e"]
+ [f"chdir('../../..'); CopyIncludeFiles('{top}')"]
)
for lib in ('libpgport', 'libpgcommon', 'libpq'):
copy_file(pgbuild / f'Release/{lib}/{lib}.lib', top / 'lib')
# Prepare local include directory for building from
for dir in ('win32', 'win32_msvc'):
merge_dir(pgbuild / f"src/include/port/{dir}", pgbuild / "src/include")
# Build pg_config in place
os.chdir(pgbuild / 'src/bin/pg_config')
run_command(
['cl', 'pg_config.c', '/MT', '/nologo', fr'/I{pgbuild}\src\include']
+ ['/link', fr'/LIBPATH:{top}\lib']
+ ['libpgcommon.lib', 'libpgport.lib', 'advapi32.lib']
+ ['/NODEFAULTLIB:libcmt.lib']
+ [fr'/OUT:{top}\bin\pg_config.exe']
)
assert (top / 'lib' / 'libpq.lib').exists()
assert (top / 'bin' / 'pg_config.exe').exists()
os.chdir(opt.clone_dir)
shutil.rmtree(pgbuild)
def build_psycopg():
os.chdir(opt.package_dir)
patch_package_name()
add_pg_config_path()
run_python(
["setup.py", "build_ext", "--have-ssl"]
+ ["-l", "libpgcommon libpgport"]
+ ["-L", opt.ssl_build_dir / 'lib']
+ ['-I', opt.ssl_build_dir / 'include']
)
run_python(["setup.py", "build_py"])
def patch_package_name():
"""Change the psycopg2 package name in the setup.py if required."""
if opt.package_name == 'psycopg2':
return
logger.info("changing package name to %s", opt.package_name)
with (opt.package_dir / 'setup.py').open() as f:
data = f.read()
# Replace the name of the package with what desired
rex = re.compile(r"""name=["']psycopg2["']""")
assert len(rex.findall(data)) == 1, rex.findall(data)
data = rex.sub(f'name="{opt.package_name}"', data)
with (opt.package_dir / 'setup.py').open('w') as f:
f.write(data)
def build_binary_packages():
"""Create wheel binary packages."""
os.chdir(opt.package_dir)
add_pg_config_path()
# Build .whl packages
run_python(['setup.py', 'bdist_wheel', "-d", opt.dist_dir])
def step_after_build():
if not opt.is_wheel:
install_built_package()
else:
install_binary_package()
def install_built_package():
"""Install the package just built by setup build."""
os.chdir(opt.package_dir)
# Install the psycopg just built
add_pg_config_path()
run_python(["setup.py", "install"])
shutil.rmtree("psycopg2.egg-info")
def install_binary_package():
"""Install the package from a packaged wheel."""
run_python(
['-m', 'pip', 'install', '--no-index', '-f', opt.dist_dir]
+ [opt.package_name]
)
def add_pg_config_path():
"""Allow finding in the path the pg_config just built."""
pg_path = str(opt.pg_build_dir / 'bin')
if pg_path not in os.environ['PATH'].split(os.pathsep):
setenv('PATH', os.pathsep.join([pg_path, os.environ['PATH']]))
def step_before_test():
print_psycopg2_version()
# Create and setup PostgreSQL database for the tests
run_command([opt.pg_bin_dir / 'createdb', os.environ['PSYCOPG2_TESTDB']])
run_command(
[opt.pg_bin_dir / 'psql', '-d', os.environ['PSYCOPG2_TESTDB']]
+ ['-c', "CREATE EXTENSION hstore"]
)
def print_psycopg2_version():
"""Print psycopg2 and libpq versions installed."""
for expr in (
'psycopg2.__version__',
'psycopg2.__libpq_version__',
'psycopg2.extensions.libpq_version()',
):
out = out_python(['-c', f"import psycopg2; print({expr})"])
logger.info("built %s: %s", expr, out.decode('ascii'))
def step_test_script():
check_libpq_version()
run_test_suite()
def check_libpq_version():
"""
Fail if the package installed is not using the expected libpq version.
"""
want_ver = tuple(map(int, os.environ['POSTGRES_VERSION'].split('_')))
want_ver = "%d%04d" % want_ver
got_ver = (
out_python(
['-c']
+ ["import psycopg2; print(psycopg2.extensions.libpq_version())"]
)
.decode('ascii')
.rstrip()
)
assert want_ver == got_ver, f"libpq version mismatch: {want_ver!r} != {got_ver!r}"
def run_test_suite():
# Remove this var, which would make badly a configured OpenSSL 1.1 work
os.environ.pop('OPENSSL_CONF', None)
# Run the unit test
args = [
'-c',
"import tests; tests.unittest.main(defaultTest='tests.test_suite')",
]
if opt.is_wheel:
os.environ['PSYCOPG2_TEST_FAST'] = '1'
else:
args.append('--verbose')
os.chdir(opt.package_dir)
run_python(args)
def step_on_success():
print_sha1_hashes()
if setup_ssh():
upload_packages()
def print_sha1_hashes():
"""
Print the packages sha1 so their integrity can be checked upon signing.
"""
logger.info("artifacts SHA1 hashes:")
os.chdir(opt.package_dir / 'dist')
run_command([which('sha1sum'), '-b', 'psycopg2-*/*'])
def setup_ssh():
"""
Configure ssh to upload built packages where they can be retrieved.
Return False if can't configure and upload shoould be skipped.
"""
# If we are not on the psycopg AppVeyor account, the environment variable
# REMOTE_KEY will not be decrypted. In that case skip uploading.
if os.environ['APPVEYOR_ACCOUNT_NAME'] != 'psycopg':
logger.warn("skipping artifact upload: you are not psycopg")
return False
pkey = os.environ.get('REMOTE_KEY', None)
if not pkey:
logger.warn("skipping artifact upload: no remote key")
return False
# Write SSH Private Key file from environment variable
pkey = pkey.replace(' ', '\n')
with (opt.clone_dir / 'data/id_rsa-psycopg-upload').open('w') as f:
f.write(
f"""\
-----BEGIN RSA PRIVATE KEY-----
{pkey}
-----END RSA PRIVATE KEY-----
"""
)
# Make a directory to please MinGW's version of ssh
ensure_dir(r"C:\MinGW\msys\1.0\home\appveyor\.ssh")
return True
def upload_packages():
# Upload built artifacts
logger.info("uploading artifacts")
os.chdir(opt.clone_dir)
run_command(
[r"C:\MinGW\msys\1.0\bin\rsync", "-avr"]
+ ["-e", r"C:\MinGW\msys\1.0\bin\ssh -F data/ssh_config"]
+ ["psycopg2/dist/", "upload:"]
)
def download(url, fn):
"""Download a file locally"""
logger.info("downloading %s", url)
with open(fn, 'wb') as fo, urlopen(url) as fi:
while 1:
data = fi.read(8192)
if not data:
break
fo.write(data)
logger.info("file downloaded: %s", fn)
def file_replace(fn, s1, s2):
"""
Replace all the occurrences of the string s1 into s2 in the file fn.
"""
assert os.path.exists(fn)
with open(fn, 'r+') as f:
data = f.read()
f.seek(0)
f.write(data.replace(s1, s2))
f.truncate()
def merge_dir(src, tgt):
"""
Merge the content of the directory src into the directory tgt
Reproduce the semantic of "XCOPY /Y /S src/* tgt"
"""
src = str(src)
for dp, _dns, fns in os.walk(src):
logger.debug("dirpath %s", dp)
if not fns:
continue
assert dp.startswith(src)
subdir = dp[len(src) :].lstrip(os.sep)
tgtdir = ensure_dir(os.path.join(tgt, subdir))
for fn in fns:
copy_file(os.path.join(dp, fn), tgtdir)
def bat_call(cmdline):
"""
Simulate 'CALL' from a batch file
Execute CALL *cmdline* and export the changed environment to the current
environment.
nana-nana-nana-nana...
"""
if not isinstance(cmdline, str):
cmdline = map(str, cmdline)
cmdline = ' '.join(c if ' ' not in c else '"%s"' % c for c in cmdline)
data = f"""\
CALL {cmdline}
{opt.py_exe} -c "import os, sys, json; \
json.dump(dict(os.environ), sys.stdout, indent=2)"
"""
logger.debug("preparing file to batcall:\n\n%s", data)
with NamedTemporaryFile(suffix='.bat') as tmp:
fn = tmp.name
with open(fn, "w") as f:
f.write(data)
try:
out = out_command(fn)
# be vewwy vewwy caweful to print the env var as it might contain
# secwet things like your pwecious pwivate key.
# logger.debug("output of command:\n\n%s", out.decode('utf8', 'replace'))
# The output has some useless crap on stdout, because sure, and json
# indented so the last { on column 1 is where we have to start parsing
m = list(re.finditer(b'^{', out, re.MULTILINE))[-1]
out = out[m.start() :]
env = json.loads(out)
for k, v in env.items():
if os.environ.get(k) != v:
setenv(k, v)
finally:
os.remove(fn)
def ensure_dir(dir):
if not isinstance(dir, Path):
dir = Path(dir)
if not dir.is_dir():
logger.info("creating directory %s", dir)
dir.mkdir(parents=True)
return dir
def run_command(cmdline, **kwargs):
"""Run a command, raise on error."""
if not isinstance(cmdline, str):
cmdline = list(map(str, cmdline))
logger.info("running command: %s", cmdline)
sp.check_call(cmdline, **kwargs)
def out_command(cmdline, **kwargs):
"""Run a command, return its output, raise on error."""
if not isinstance(cmdline, str):
cmdline = list(map(str, cmdline))
logger.info("running command: %s", cmdline)
data = sp.check_output(cmdline, **kwargs)
return data
def run_python(args, **kwargs):
"""
Run a script in the target Python.
"""
return run_command([opt.py_exe] + args, **kwargs)
def out_python(args, **kwargs):
"""
Return the output of a script run in the target Python.
"""
return out_command([opt.py_exe] + args, **kwargs)
def copy_file(src, dst):
logger.info("copying file %s -> %s", src, dst)
shutil.copy(src, dst)
def setenv(k, v):
logger.debug("setting %s=%s", k, v)
os.environ[k] = v
def which(name):
"""
Return the full path of a command found on the path
"""
base, ext = os.path.splitext(name)
if not ext:
exts = ('.com', '.exe', '.bat', '.cmd')
else:
exts = (ext,)
for dir in ['.'] + os.environ['PATH'].split(os.pathsep):
for ext in exts:
fn = os.path.join(dir, base + ext)
if os.path.isfile(fn):
return fn
raise Exception(f"couldn't find program on path: {name}")
class Options:
"""
An object exposing the script configuration from env vars and command line.
"""
@property
def py_ver(self):
"""The Python version to build as 2 digits string.
For large values of 2, occasionally.
"""
rv = os.environ['PY_VER']
assert rv in ('37', '38', '39', '310', '311', '312'), rv
return rv
@property
def py_arch(self):
"""The Python architecture to build, 32 or 64."""
rv = os.environ['PY_ARCH']
assert rv in ('32', '64'), rv
return int(rv)
@property
def arch_32(self):
"""True if the Python architecture to build is 32 bits."""
return self.py_arch == 32
@property
def arch_64(self):
"""True if the Python architecture to build is 64 bits."""
return self.py_arch == 64
@property
def package_name(self):
return os.environ.get('CONFIGURATION', 'psycopg2')
@property
def package_version(self):
"""The psycopg2 version number to build."""
with (self.package_dir / 'setup.py').open() as f:
data = f.read()
m = re.search(
r"""^PSYCOPG_VERSION\s*=\s*['"](.*)['"]""", data, re.MULTILINE
)
return m.group(1)
@property
def is_wheel(self):
"""Are we building the wheel packages or just the extension?"""
workflow = os.environ["WORKFLOW"]
return workflow == "packages"
@property
def py_dir(self):
"""
The path to the target python binary to execute.
"""
dirname = ''.join(
[r"C:\Python", self.py_ver, '-x64' if self.arch_64 else '']
)
return Path(dirname)
@property
def py_exe(self):
"""
The full path of the target python executable.
"""
return self.py_dir / 'python.exe'
@property
def vc_dir(self):
"""
The path of the Visual C compiler.
"""
if self.vs_ver == '16.0':
path = Path(
r"C:\Program Files (x86)\Microsoft Visual Studio\2019"
r"\Community\VC\Auxiliary\Build"
)
else:
path = Path(
r"C:\Program Files (x86)\Microsoft Visual Studio %s\VC"
% self.vs_ver
)
return path
@property
def vs_ver(self):
# https://wiki.python.org/moin/WindowsCompilers
# https://www.appveyor.com/docs/windows-images-software/#python
# Py 3.6--3.8 = VS Ver. 14.0 (VS 2015)
# Py 3.9 = VS Ver. 16.0 (VS 2019)
vsvers = {
'37': '14.0',
'38': '14.0',
'39': '16.0',
'310': '16.0',
'311': '16.0',
'312': '16.0',
}
return vsvers[self.py_ver]
@property
def clone_dir(self):
"""The directory where the repository is cloned."""
return Path(r"C:\Project")
@property
def appveyor_pg_dir(self):
"""The directory of the postgres service made available by Appveyor."""
return Path(os.environ['POSTGRES_DIR'])
@property
def pg_data_dir(self):
"""The data dir of the appveyor postgres service."""
return self.appveyor_pg_dir / 'data'
@property
def pg_bin_dir(self):
"""The bin dir of the appveyor postgres service."""
return self.appveyor_pg_dir / 'bin'
@property
def pg_build_dir(self):
"""The directory where to build the postgres libraries for psycopg."""
return self.cache_arch_dir / 'postgresql'
@property
def ssl_build_dir(self):
"""The directory where to build the openssl libraries for psycopg."""
return self.cache_arch_dir / 'openssl'
@property
def cache_arch_dir(self):
rv = self.cache_dir / str(self.py_arch) / self.vs_ver
return ensure_dir(rv)
@property
def cache_dir(self):
return Path(r"C:\Others")
@property
def build_dir(self):
rv = self.cache_arch_dir / 'Builds'
return ensure_dir(rv)
@property
def package_dir(self):
return self.clone_dir
@property
def dist_dir(self):
"""The directory where to build packages to distribute."""
return (
self.package_dir / 'dist' / (f'psycopg2-{self.package_version}')
)
def parse_cmdline():
parser = ArgumentParser(description=__doc__)
g = parser.add_mutually_exclusive_group()
g.add_argument(
'-q',
'--quiet',
help="Talk less",
dest='loglevel',
action='store_const',
const=logging.WARN,
default=logging.INFO,
)
g.add_argument(
'-v',
'--verbose',
help="Talk more",
dest='loglevel',
action='store_const',
const=logging.DEBUG,
default=logging.INFO,
)
steps = [
n[len(STEP_PREFIX) :]
for n in globals()
if n.startswith(STEP_PREFIX) and callable(globals()[n])
]
parser.add_argument(
'step', choices=steps, help="the appveyor step to execute"
)
opt = parser.parse_args(namespace=Options())
return opt
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,8 +1,9 @@
#!/bin/bash #!/bin/bash
# Build a modern version of libpq and depending libs from source on Centos 5, Rocky, Alpine or macOS # Build a modern version of libpq and depending libs from source on Centos 5
set -euo pipefail set -euo pipefail
set -x
# Last release: https://www.postgresql.org/ftp/source/ # Last release: https://www.postgresql.org/ftp/source/
# IMPORTANT! Change the cache key in packages.yml when upgrading libraries # IMPORTANT! Change the cache key in packages.yml when upgrading libraries
@ -11,76 +12,30 @@ postgres_version="${LIBPQ_VERSION}"
# last release: https://www.openssl.org/source/ # last release: https://www.openssl.org/source/
openssl_version="${OPENSSL_VERSION}" openssl_version="${OPENSSL_VERSION}"
# Latest release: https://kerberos.org/dist/ # last release: https://openldap.org/software/download/
krb5_version="1.21.3" ldap_version="2.6.3"
# Latest release: https://openldap.org/software/download/ # last release: https://github.com/cyrusimap/cyrus-sasl/releases
ldap_version="2.6.9"
# Latest release: https://github.com/cyrusimap/cyrus-sasl/releases
sasl_version="2.1.28" sasl_version="2.1.28"
export LIBPQ_BUILD_PREFIX=${LIBPQ_BUILD_PREFIX:-/tmp/libpq.build} export LIBPQ_BUILD_PREFIX=${LIBPQ_BUILD_PREFIX:-/tmp/libpq.build}
case "$(uname)" in if [[ -f "${LIBPQ_BUILD_PREFIX}/lib/libpq.so" ]]; then
Darwin)
ID=macos
library_suffix=dylib
;;
Linux)
source /etc/os-release
library_suffix=so
;;
*)
echo "$0: unexpected Operating system: '$(uname)'" >&2
exit 1
;;
esac
# Install packages required for test and wheels build, regardless of whether
# we will build the libpq or not.
case "$ID" in
alpine)
apk add --no-cache krb5-libs
;;
esac
if [[ -f "${LIBPQ_BUILD_PREFIX}/lib/libpq.${library_suffix}" ]]; then
echo "libpq already available: build skipped" >&2 echo "libpq already available: build skipped" >&2
exit 0 exit 0
fi fi
# Install packages required to build the libpq. source /etc/os-release
case "$ID" in case "$ID" in
centos|rocky) centos)
yum update -y yum update -y
yum install -y flex krb5-devel pam-devel perl perl-IPC-Cmd perl-Time-Piece zlib-devel yum install -y zlib-devel krb5-devel pam-devel
;; ;;
alpine) alpine)
apk upgrade apk upgrade
apk add --no-cache flex krb5-dev linux-pam-dev openldap-dev \ apk add --no-cache zlib-dev krb5-dev linux-pam-dev openldap-dev openssl-dev
openssl-dev zlib-dev
;;
macos)
brew install automake m4 libtool
# If available, libpq seemingly insists on linking against homebrew's
# openssl no matter what so remove it. Since homebrew's curl depends on
# it, force use of system curl.
brew uninstall --force --ignore-dependencies openssl gettext curl
if [ -z "${MACOSX_ARCHITECTURE:-}" ]; then
MACOSX_ARCHITECTURE="$(uname -m)"
fi
# Set the deployment target to be <= to that of the oldest supported Python version.
# e.g. https://www.python.org/downloads/release/python-380/
if [ "$MACOSX_ARCHITECTURE" == "x86_64" ]; then
export MACOSX_DEPLOYMENT_TARGET=10.9
else
export MACOSX_DEPLOYMENT_TARGET=11.0
fi
;; ;;
*) *)
@ -89,79 +44,33 @@ case "$ID" in
;; ;;
esac esac
if [ "$ID" == "centos" ]; then
if [ "$ID" == "macos" ]; then
make_configure_standard_flags=( \
--prefix=${LIBPQ_BUILD_PREFIX} \
"CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ -arch $MACOSX_ARCHITECTURE" \
"LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib -arch $MACOSX_ARCHITECTURE" \
)
else
make_configure_standard_flags=( \
--prefix=${LIBPQ_BUILD_PREFIX} \
CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ \
"LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib -L${LIBPQ_BUILD_PREFIX}/lib64" \
)
fi
if [ "$ID" == "centos" ] || [ "$ID" == "rocky" ] || [ "$ID" == "macos" ]; then
if [[ ! -f "${LIBPQ_BUILD_PREFIX}/openssl.cnf" ]]; then
# Build openssl if needed # Build openssl if needed
openssl_tag="openssl-${openssl_version}" openssl_tag="OpenSSL_${openssl_version//./_}"
openssl_dir="openssl-${openssl_tag}" openssl_dir="openssl-${openssl_tag}"
if [ ! -d "${openssl_dir}" ]; then if [ ! -d "${openssl_dir}" ]; then curl -sL \
curl -fsSL \
https://github.com/openssl/openssl/archive/${openssl_tag}.tar.gz \ https://github.com/openssl/openssl/archive/${openssl_tag}.tar.gz \
| tar xzf - | tar xzf -
pushd "${openssl_dir}" cd "${openssl_dir}"
options=(--prefix=${LIBPQ_BUILD_PREFIX} --openssldir=${LIBPQ_BUILD_PREFIX} \ ./config --prefix=${LIBPQ_BUILD_PREFIX} --openssldir=${LIBPQ_BUILD_PREFIX} \
zlib -fPIC shared) zlib -fPIC shared
if [ -z "${MACOSX_ARCHITECTURE:-}" ]; then make depend
./config $options make
else
./configure "darwin64-$MACOSX_ARCHITECTURE-cc" $options
fi
make -s depend
make -s
else else
pushd "${openssl_dir}" cd "${openssl_dir}"
fi fi
# Install openssl # Install openssl
make install_sw make install_sw
popd cd ..
fi
fi
if [ "$ID" == "macos" ]; then
# Build kerberos if needed
krb5_dir="krb5-${krb5_version}/src"
if [ ! -d "${krb5_dir}" ]; then
curl -fsSL "https://kerberos.org/dist/krb5/${krb5_version%.*}/krb5-${krb5_version}.tar.gz" \
| tar xzf -
pushd "${krb5_dir}"
./configure "${make_configure_standard_flags[@]}"
make -s
else
pushd "${krb5_dir}"
fi
make install
popd
fi fi
if [ "$ID" == "centos" ] || [ "$ID" == "rocky" ] || [ "$ID" == "macos" ]; then if [ "$ID" == "centos" ]; then
if [[ ! -f "${LIBPQ_BUILD_PREFIX}/lib/libsasl2.${library_suffix}" ]]; then
# Build libsasl2 if needed # Build libsasl2 if needed
# The system package (cyrus-sasl-devel) causes an amazing error on i686: # The system package (cyrus-sasl-devel) causes an amazing error on i686:
@ -170,68 +79,59 @@ if [ "$ID" == "centos" ] || [ "$ID" == "rocky" ] || [ "$ID" == "macos" ]; then
sasl_tag="cyrus-sasl-${sasl_version}" sasl_tag="cyrus-sasl-${sasl_version}"
sasl_dir="cyrus-sasl-${sasl_tag}" sasl_dir="cyrus-sasl-${sasl_tag}"
if [ ! -d "${sasl_dir}" ]; then if [ ! -d "${sasl_dir}" ]; then
curl -fsSL \ curl -sL \
https://github.com/cyrusimap/cyrus-sasl/archive/${sasl_tag}.tar.gz \ https://github.com/cyrusimap/cyrus-sasl/archive/${sasl_tag}.tar.gz \
| tar xzf - | tar xzf -
pushd "${sasl_dir}" cd "${sasl_dir}"
if [ "$ID" == "rocky" ]; then
# Fix missing time.h include in multiple files for newer GCC versions
sed -i.bak '/#include "saslint.h"/a\
#include <time.h>' lib/saslutil.c
sed -i.bak '/#include "plugin_common.h"/a\
#include <time.h>' plugins/cram.c
fi
autoreconf -i autoreconf -i
./configure "${make_configure_standard_flags[@]}" --disable-macos-framework ./configure --prefix=${LIBPQ_BUILD_PREFIX} \
make -s CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib
make
else else
pushd "${sasl_dir}" cd "${sasl_dir}"
fi fi
# Install libsasl2 # Install libsasl2
# requires missing nroff to build # requires missing nroff to build
touch saslauthd/saslauthd.8 touch saslauthd/saslauthd.8
make install make install
popd cd ..
fi
fi fi
if [ "$ID" == "centos" ] || [ "$ID" == "rocky" ] || [ "$ID" == "macos" ]; then if [ "$ID" == "centos" ]; then
if [[ ! -f "${LIBPQ_BUILD_PREFIX}/lib/libldap.${library_suffix}" ]]; then
# Build openldap if needed # Build openldap if needed
ldap_tag="${ldap_version}" ldap_tag="${ldap_version}"
ldap_dir="openldap-${ldap_tag}" ldap_dir="openldap-${ldap_tag}"
if [ ! -d "${ldap_dir}" ]; then if [ ! -d "${ldap_dir}" ]; then
curl -fsSL \ curl -sL \
https://www.openldap.org/software/download/OpenLDAP/openldap-release/openldap-${ldap_tag}.tgz \ https://www.openldap.org/software/download/OpenLDAP/openldap-release/openldap-${ldap_tag}.tgz \
| tar xzf - | tar xzf -
pushd "${ldap_dir}" cd "${ldap_dir}"
./configure "${make_configure_standard_flags[@]}" --enable-backends=no --enable-null ./configure --prefix=${LIBPQ_BUILD_PREFIX} --enable-backends=no --enable-null \
CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib
make -s depend make depend
make -s -C libraries/liblutil/ make -C libraries/liblutil/
make -s -C libraries/liblber/ make -C libraries/liblber/
make -s -C libraries/libldap/ make -C libraries/libldap/
else else
pushd "${ldap_dir}" cd "${ldap_dir}"
fi fi
# Install openldap # Install openldap
make -C libraries/liblber/ install make -C libraries/liblber/ install
make -C libraries/libldap/ install make -C libraries/libldap/ install
make -C include/ install make -C include/ install
chmod +x ${LIBPQ_BUILD_PREFIX}/lib/{libldap,liblber}*.${library_suffix}* chmod +x ${LIBPQ_BUILD_PREFIX}/lib/{libldap,liblber}*.so*
popd cd ..
fi
fi fi
@ -239,36 +139,36 @@ fi
postgres_tag="REL_${postgres_version//./_}" postgres_tag="REL_${postgres_version//./_}"
postgres_dir="postgres-${postgres_tag}" postgres_dir="postgres-${postgres_tag}"
if [ ! -d "${postgres_dir}" ]; then if [ ! -d "${postgres_dir}" ]; then
curl -fsSL \ curl -sL \
https://github.com/postgres/postgres/archive/${postgres_tag}.tar.gz \ https://github.com/postgres/postgres/archive/${postgres_tag}.tar.gz \
| tar xzf - | tar xzf -
pushd "${postgres_dir}" cd "${postgres_dir}"
if [ "$ID" != "macos" ]; then # Match the default unix socket dir default with what defined on Ubuntu and
# Match the default unix socket dir default with what defined on Ubuntu and # Red Hat, which seems the most common location
# Red Hat, which seems the most common location sed -i 's|#define DEFAULT_PGSOCKET_DIR .*'\
sed -i 's|#define DEFAULT_PGSOCKET_DIR .*'\
'|#define DEFAULT_PGSOCKET_DIR "/var/run/postgresql"|' \ '|#define DEFAULT_PGSOCKET_DIR "/var/run/postgresql"|' \
src/include/pg_config_manual.h src/include/pg_config_manual.h
fi
export LD_LIBRARY_PATH="${LIBPQ_BUILD_PREFIX}/lib:${LIBPQ_BUILD_PREFIX}/lib64" # Often needed, but currently set by the workflow
# export LD_LIBRARY_PATH="${LIBPQ_BUILD_PREFIX}/lib"
./configure "${make_configure_standard_flags[@]}" --sysconfdir=/etc/postgresql-common \ ./configure --prefix=${LIBPQ_BUILD_PREFIX} --sysconfdir=/etc/postgresql-common \
--with-gssapi --with-openssl --with-pam --with-ldap \ --with-gssapi --with-openssl --with-pam --with-ldap \
--without-readline --without-icu --without-readline --without-icu \
make -s -C src/interfaces/libpq CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib
make -s -C src/bin/pg_config make -C src/interfaces/libpq
make -s -C src/include make -C src/bin/pg_config
make -C src/include
else else
pushd "${postgres_dir}" cd "${postgres_dir}"
fi fi
# Install libpq # Install libpq
make -C src/interfaces/libpq install make -C src/interfaces/libpq install
make -C src/bin/pg_config install make -C src/bin/pg_config install
make -C src/include install make -C src/include install
popd cd ..
find ${LIBPQ_BUILD_PREFIX} -name \*.${library_suffix}.\* -type f -exec strip --strip-unneeded {} \; find ${LIBPQ_BUILD_PREFIX} -name \*.so.\* -type f -exec strip --strip-unneeded {} \;

View File

@ -0,0 +1,117 @@
#!/usr/bin/env python
"""Download packages from appveyor artifacts
"""
import os
import re
import sys
import logging
import datetime as dt
from pathlib import Path
from argparse import ArgumentParser
import requests
logger = logging.getLogger()
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
API_URL = "https://ci.appveyor.com/api"
REPOS = "psycopg/psycopg2"
WORKFLOW_NAME = "Build packages"
class ScriptError(Exception):
"""Controlled exception raised by the script."""
def main():
opt = parse_cmdline()
try:
token = os.environ["APPVEYOR_TOKEN"]
except KeyError:
raise ScriptError("please set a APPVEYOR_TOKEN to download artifacts")
s = requests.Session()
s.headers["Content-Type"] = "application/json"
s.headers["Authorization"] = f"Bearer {token}"
if opt.build:
logger.info("fetching build %s", opt.build)
resp = s.get(f"{API_URL}/projects/{REPOS}/build/{opt.build}")
else:
logger.info("fetching last run")
resp = s.get(f"{API_URL}/projects/{REPOS}")
resp.raise_for_status()
data = resp.json()
updated_at = dt.datetime.fromisoformat(
re.sub(r"\.\d+", "", data["build"]["finished"])
)
now = dt.datetime.now(dt.timezone.utc)
age = now - updated_at
logger.info(
f"found build {data['build']['version']} updated {pretty_interval(age)} ago"
)
if age > dt.timedelta(hours=6):
logger.warning("maybe it's a bit old?")
jobs = data["build"]["jobs"]
for job in jobs:
if job["status"] != "success":
raise ScriptError(f"status for job {job['jobId']} is {job['status']}")
logger.info(f"fetching artifacts info for {job['name']}")
resp = s.get(f"{API_URL}/buildjobs/{job['jobId']}/artifacts/")
resp.raise_for_status()
afs = resp.json()
for af in afs:
fn = af["fileName"]
if fn.startswith("dist/"):
fn = fn.split("/", 1)[1]
dest = Path("wheelhouse") / fn
logger.info(f"downloading {dest}")
resp = s.get(
f"{API_URL}/buildjobs/{job['jobId']}/artifacts/{af['fileName']}"
)
resp.raise_for_status()
if not dest.parent.exists():
dest.parent.mkdir(parents=True)
with dest.open("wb") as f:
f.write(resp.content)
logger.info("now you can run: 'twine upload -s wheelhouse/*'")
def parse_cmdline():
parser = ArgumentParser(description=__doc__)
parser.add_argument("--build", help="build version to download [default: latest]")
opt = parser.parse_args()
return opt
def pretty_interval(td):
secs = td.total_seconds()
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
days, hours = divmod(hours, 24)
if days:
return f"{int(days)} days, {int(hours)} hours, {int(mins)} minutes"
elif hours:
return f"{int(hours)} hours, {int(mins)} minutes"
else:
return f"{int(mins)} minutes"
if __name__ == "__main__":
try:
sys.exit(main())
except ScriptError as e:
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
logger.info("user interrupt")
sys.exit(1)

View File

@ -1,101 +0,0 @@
#!/usr/bin/env python
"""
We use vcpkg in github actions to build psycopg-binary.
This is a stub to work as `pg_config --libdir` or `pg_config --includedir` to
make it work with vcpkg.
You will need install `vcpkg`, set `VCPKG_ROOT` env, and run `vcpkg install
libpq:x64-windows-release` before using this script.
"""
import os
import sys
import platform
from pathlib import Path
from argparse import ArgumentParser, Namespace, RawDescriptionHelpFormatter
class ScriptError(Exception):
"""Controlled exception raised by the script."""
def _main() -> None:
# only x64-windows
if not (sys.platform == "win32" and platform.machine() == "AMD64"):
raise ScriptError("this script should only be used in x64-windows")
vcpkg_root = os.environ.get(
"VCPKG_ROOT", os.environ.get("VCPKG_INSTALLATION_ROOT", "")
)
if not vcpkg_root:
raise ScriptError("VCPKG_ROOT/VCPKG_INSTALLATION_ROOT env var not specified")
vcpkg_platform_root = (Path(vcpkg_root) / "installed/x64-windows-release").resolve()
args = parse_cmdline()
if args.libdir:
if not (f := vcpkg_platform_root / "lib/libpq.lib").exists():
raise ScriptError(f"libpq library not found: {f}")
print(vcpkg_platform_root.joinpath("lib"))
elif args.includedir or args.includedir_server:
# NOTE: on linux, the includedir-server dir contains pg_config.h
# which we need because it includes the PG_VERSION_NUM macro.
# In the vcpkg directory this file is in the includedir directory,
# therefore we return the same value.
if not (d := vcpkg_platform_root / "include/libpq").is_dir():
raise ScriptError(f"libpq include directory not found: {d}")
print(vcpkg_platform_root.joinpath("include"))
elif args.cppflags or args.ldflags:
print("")
else:
raise ScriptError("command not handled")
def parse_cmdline() -> Namespace:
parser = ArgumentParser(
description=__doc__, formatter_class=RawDescriptionHelpFormatter
)
g = parser.add_mutually_exclusive_group(required=True)
g.add_argument(
"--libdir",
action="store_true",
help="show location of object code libraries",
)
g.add_argument(
"--includedir",
action="store_true",
help="show location of C header files of the client interfaces",
)
g.add_argument(
"--includedir-server",
action="store_true",
help="show location of C header files for the server",
)
g.add_argument(
"--cppflags",
action="store_true",
help="(dummy) show CPPFLAGS value used when PostgreSQL was built",
)
g.add_argument(
"--ldflags",
action="store_true",
help="(dummy) show LDFLAGS value used when PostgreSQL was built",
)
opt = parser.parse_args()
return opt
def main() -> None:
try:
_main()
except ScriptError as e:
print(f"ERROR: {e}.", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()

View File

@ -1,11 +0,0 @@
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = 'pg_config_vcpkg_stub'
version = "0"
description = "see docs string in pg_config_vcpkg_stub for more details"
[project.scripts]
pg_config = 'pg_config_vcpkg_stub:main'

View File

@ -26,7 +26,7 @@ case "$ID" in
done) | sort | uniq done) | sort | uniq
;; ;;
centos|rocky) centos)
echo "TODO!" echo "TODO!"
;; ;;

View File

@ -35,7 +35,7 @@ case "$ID" in
apt-get -y install libpq-dev apt-get -y install libpq-dev
;; ;;
centos|rocky) centos)
"${dir}/build_libpq.sh" > /dev/null "${dir}/build_libpq.sh" > /dev/null
;; ;;

View File

@ -11,12 +11,6 @@ set -x
dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
prjdir="$( cd "${dir}/../.." && pwd )" prjdir="$( cd "${dir}/../.." && pwd )"
# Build dependency libraries
"${prjdir}/scripts/build/build_libpq.sh" > /dev/null
# Show dependency tree
otool -L /tmp/libpq.build/lib/*.dylib
brew install gnu-sed postgresql@${PG_VERSION} brew install gnu-sed postgresql@${PG_VERSION}
brew link --overwrite postgresql@${PG_VERSION} brew link --overwrite postgresql@${PG_VERSION}

View File

@ -1,7 +0,0 @@
@echo on
pip install delvewheel wheel
vcpkg install libpq:x64-windows-release
pipx install .\scripts\build\pg_config_vcpkg_stub\

View File

@ -33,7 +33,7 @@ def main():
file_start = read_base_file(filename) file_start = read_base_file(filename)
# If you add a version to the list fix the docs (in errorcodes.rst) # If you add a version to the list fix the docs (in errorcodes.rst)
classes, errors = fetch_errors("11 12 13 14 15 16 17 18".split()) classes, errors = fetch_errors("11 12 13 14 15 16 17".split())
disambiguate(errors) disambiguate(errors)

View File

@ -29,6 +29,7 @@ for coroutine libraries.
import os import os
import sys import sys
import re
import subprocess import subprocess
from setuptools import setup, Extension from setuptools import setup, Extension
from distutils.command.build_ext import build_ext from distutils.command.build_ext import build_ext
@ -40,7 +41,7 @@ import configparser
# Take a look at https://www.python.org/dev/peps/pep-0440/ # Take a look at https://www.python.org/dev/peps/pep-0440/
# for a consistent versioning pattern. # for a consistent versioning pattern.
PSYCOPG_VERSION = '2.9.11' PSYCOPG_VERSION = '2.9.10'
# note: if you are changing the list of supported Python version please fix # note: if you are changing the list of supported Python version please fix
@ -51,12 +52,12 @@ Intended Audience :: Developers
License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
Programming Language :: Python Programming Language :: Python
Programming Language :: Python :: 3 Programming Language :: Python :: 3
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.11
Programming Language :: Python :: 3.12 Programming Language :: Python :: 3.12
Programming Language :: Python :: 3.13 Programming Language :: Python :: 3.13
Programming Language :: Python :: 3.14
Programming Language :: Python :: 3 :: Only Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: CPython
Programming Language :: C Programming Language :: C
@ -381,8 +382,34 @@ For further information please check the 'doc/src/install.rst' file (also at
if token.startswith("-I"): if token.startswith("-I"):
self.include_dirs.append(token[2:]) self.include_dirs.append(token[2:])
# enable lo64 if Python 64 bits pgversion = pg_config_helper.query("version").split()[1]
if is_py_64():
verre = re.compile(
r"(\d+)(?:\.(\d+))?(?:(?:\.(\d+))|(devel|(?:alpha|beta|rc)\d+))?")
m = verre.match(pgversion)
if m:
pgmajor, pgminor, pgpatch = m.group(1, 2, 3)
# Postgres >= 10 doesn't have pgminor anymore.
pgmajor = int(pgmajor)
if pgmajor >= 10:
pgminor, pgpatch = None, pgminor
if pgminor is None or not pgminor.isdigit():
pgminor = 0
if pgpatch is None or not pgpatch.isdigit():
pgpatch = 0
pgminor = int(pgminor)
pgpatch = int(pgpatch)
else:
sys.stderr.write(
f"Error: could not determine PostgreSQL version from "
f"'{pgversion}'")
sys.exit(1)
define_macros.append(("PG_VERSION_NUM", "%d%02d%02d" %
(pgmajor, pgminor, pgpatch)))
# enable lo64 if libpq >= 9.3 and Python 64 bits
if (pgmajor, pgminor) >= (9, 3) and is_py_64():
define_macros.append(("HAVE_LO64", "1")) define_macros.append(("HAVE_LO64", "1"))
# Inject the flag in the version string already packed up # Inject the flag in the version string already packed up
@ -524,7 +551,7 @@ setup(name="psycopg2",
url="https://psycopg.org/", url="https://psycopg.org/",
license="LGPL with exceptions", license="LGPL with exceptions",
platforms=["any"], platforms=["any"],
python_requires='>=3.9', python_requires='>=3.8',
description=readme.split("\n")[0], description=readme.split("\n")[0],
long_description="\n".join(readme.split("\n")[2:]).lstrip(), long_description="\n".join(readme.split("\n")[2:]).lstrip(),
classifiers=[x for x in classifiers.split("\n") if x], classifiers=[x for x in classifiers.split("\n") if x],

View File

@ -504,7 +504,7 @@ class AsyncTests(ConnectingTestCase):
raise Exception("Unexpected result from poll: %r", state) raise Exception("Unexpected result from poll: %r", state)
polls += 1 polls += 1
self.assert_(polls >= 5, polls) self.assert_(polls >= 8, polls)
def test_poll_noop(self): def test_poll_noop(self):
self.conn.poll() self.conn.poll()

View File

@ -139,11 +139,6 @@ class CursorTests(ConnectingTestCase):
self.assertRaises(psycopg2.ProgrammingError, self.assertRaises(psycopg2.ProgrammingError,
cur.mogrify, "select %(foo, %(bar)", {'foo': 1, 'bar': 2}) cur.mogrify, "select %(foo, %(bar)", {'foo': 1, 'bar': 2})
def test_bad_params_number(self):
cur = self.conn.cursor()
self.assertRaises(IndexError, cur.execute, "select %s, %s", [1])
self.assertRaises(TypeError, cur.execute, "select %s", [1, 2])
def test_cast(self): def test_cast(self):
curs = self.conn.cursor() curs = self.conn.cursor()

View File

@ -152,7 +152,7 @@ class GreenTestCase(ConnectingTestCase):
""") """)
polls = stub.polls.count(POLL_READ) polls = stub.polls.count(POLL_READ)
self.assert_(polls > 6, polls) self.assert_(polls > 8, polls)
class CallbackErrorTestCase(ConnectingTestCase): class CallbackErrorTestCase(ConnectingTestCase):

View File

@ -71,13 +71,10 @@ class NetworkingTestCase(testutils.ConnectingTestCase):
cur.execute("select %s", [ip.ip_interface('::ffff:102:300/128')]) cur.execute("select %s", [ip.ip_interface('::ffff:102:300/128')])
# The texual representation of addresses has changed in Python 3.13 # The texual representation of addresses has changed in Python 3.13
# https://github.com/python/cpython/issues/128840 if sys.version_info >= (3, 13):
if str(ip.ip_interface("::ffff:102:300/128")) == "::ffff:1.2.3.0/128":
self.assertEquals(cur.fetchone()[0], '::ffff:1.2.3.0/128') self.assertEquals(cur.fetchone()[0], '::ffff:1.2.3.0/128')
elif str(ip.ip_interface("::ffff:102:300/128")) == "::ffff:102:300/128":
self.assertEquals(cur.fetchone()[0], '::ffff:102:300/128')
else: else:
assert False, "unexpected" self.assertEquals(cur.fetchone()[0], '::ffff:102:300/128')
@testutils.skip_if_crdb("cidr") @testutils.skip_if_crdb("cidr")
def test_cidr_cast(self): def test_cidr_cast(self):
@ -120,13 +117,10 @@ class NetworkingTestCase(testutils.ConnectingTestCase):
cur.execute("select %s", [ip.ip_network('::ffff:102:300/128')]) cur.execute("select %s", [ip.ip_network('::ffff:102:300/128')])
# The texual representation of addresses has changed in Python 3.13 # The texual representation of addresses has changed in Python 3.13
# https://github.com/python/cpython/issues/128840 if sys.version_info >= (3, 13):
if str(ip.ip_interface("::ffff:102:300/128")) == "::ffff:1.2.3.0/128":
self.assertEquals(cur.fetchone()[0], '::ffff:1.2.3.0/128') self.assertEquals(cur.fetchone()[0], '::ffff:1.2.3.0/128')
elif str(ip.ip_interface("::ffff:102:300/128")) == "::ffff:102:300/128":
self.assertEquals(cur.fetchone()[0], '::ffff:102:300/128')
else: else:
assert False, "unexpected" self.assertEquals(cur.fetchone()[0], '::ffff:102:300/128')
def test_suite(): def test_suite():

View File

@ -31,7 +31,7 @@ from subprocess import Popen
from weakref import ref from weakref import ref
import unittest import unittest
from .testutils import (skip_before_postgres, skip_if_windows, from .testutils import (skip_before_postgres,
ConnectingTestCase, skip_copy_if_green, skip_if_crdb, slow, StringIO) ConnectingTestCase, skip_copy_if_green, skip_if_crdb, slow, StringIO)
import psycopg2 import psycopg2
@ -330,7 +330,6 @@ class ExceptionsTestCase(ConnectingTestCase):
class TestExtensionModule(unittest.TestCase): class TestExtensionModule(unittest.TestCase):
@slow @slow
@skip_if_windows
def test_import_internal(self): def test_import_internal(self):
# check that the internal package can be imported "naked" # check that the internal package can be imported "naked"
# we may break this property if there is a compelling reason to do so, # we may break this property if there is a compelling reason to do so,

View File

@ -1,5 +1,5 @@
[tox] [tox]
envlist = {3.9,3.10,3.11,3.12,3.13,3.14} envlist = {3.8,3.9,3.10,3.11,3.12,3.13}
[testenv] [testenv]
commands = make check commands = make check