Compare commits

..

No commits in common. "master" and "2.9.9" have entirely different histories.

38 changed files with 1306 additions and 531 deletions

19
.appveyor/cache_rebuild Normal file
View File

@ -0,0 +1,19 @@
This file is a simple placeholder for forcing the appveyor build cache
to invalidate itself since appveyor.yml changes more frequently then
the cache needs updating. Note, the versions list here can be
different than what is indicated in appveyor.yml.
To invalidate the cache, update this file and check it into git.
Currently used modules built in the cache:
- OPENSSL_VERSION: 1.1.1w
- POSTGRES_VERSION: 16.0
NOTE: to zap the cache manually you can also use:
curl -X DELETE -H "Authorization: Bearer $APPVEYOR_TOKEN" -H "Content-Type: application/json" https://ci.appveyor.com/api/projects/psycopg/psycopg2/buildcache
with the token from https://ci.appveyor.com/api-token

90
.appveyor/packages.yml Normal file
View File

@ -0,0 +1,90 @@
version: 2.x.{build}
clone_folder: C:\Project
# We use the configuration to specify the package name
configuration:
- psycopg2
- psycopg2-binary
environment:
matrix:
# For Python versions available on Appveyor, see
# https://www.appveyor.com/docs/windows-images-software/#python
- {PY_VER: "312", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "312", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "311", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "311", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "310", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "310", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "39", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "39", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "38", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015}
- {PY_VER: "38", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015}
- {PY_VER: "37", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015}
- {PY_VER: "37", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015}
WORKFLOW: packages
OPENSSL_VERSION: "1_1_1w"
POSTGRES_VERSION: "16_0"
PSYCOPG2_TESTDB: psycopg2_test
PSYCOPG2_TESTDB_USER: postgres
PSYCOPG2_TESTDB_HOST: localhost
PGUSER: postgres
PGPASSWORD: Password12!
PGSSLMODE: require
# Add CWD to perl library path for PostgreSQL build on VS2019
PERL5LIB: .
# Select according to the service enabled
POSTGRES_DIR: C:\Program Files\PostgreSQL\9.6\
# The python used in the build process, not the one packages are built for
PYEXE: C:\Python36\python.exe
matrix:
fast_finish: false
services:
# Note: if you change this service also change POSTGRES_DIR
- postgresql96
cache:
# Rebuild cache if following file changes
# (See the file to zap the cache manually)
- C:\Others -> .appveyor\cache_rebuild
# Script called before repo cloning
# init:
# Repository gets cloned, Cache is restored
install:
- "%PYEXE% scripts\\build\\appveyor.py install"
# PostgreSQL server starts now
build: "off"
build_script:
- "%PYEXE% scripts\\build\\appveyor.py build_script"
after_build:
- "%PYEXE% scripts\\build\\appveyor.py after_build"
before_test:
- "%PYEXE% scripts\\build\\appveyor.py before_test"
test_script:
- "%PYEXE% scripts\\build\\appveyor.py test_script"
artifacts:
- path: dist\psycopg2-*\*.whl
name: wheel
# vim: set ts=4 sts=4 sw=4:

79
.appveyor/tests.yml Normal file
View File

@ -0,0 +1,79 @@
version: 2.x.{build}
clone_folder: C:\Project
environment:
matrix:
# For Python versions available on Appveyor, see
# https://www.appveyor.com/docs/windows-images-software/#python
- {PY_VER: "311", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "311", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "310", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "310", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "39", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "39", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019}
- {PY_VER: "38", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015}
- {PY_VER: "38", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015}
- {PY_VER: "37", PY_ARCH: "32", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015}
- {PY_VER: "37", PY_ARCH: "64", APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015}
WORKFLOW: tests
OPENSSL_VERSION: "1_1_1w"
POSTGRES_VERSION: "16_0"
PSYCOPG2_TESTDB: psycopg2_test
PSYCOPG2_TESTDB_USER: postgres
PSYCOPG2_TESTDB_HOST: localhost
PGUSER: postgres
PGPASSWORD: Password12!
PGSSLMODE: require
# Add CWD to perl library path for PostgreSQL build on VS2019
PERL5LIB: .
# Select according to the service enabled
POSTGRES_DIR: C:\Program Files\PostgreSQL\9.6\
# The python used in the build process, not the one packages are built for
PYEXE: C:\Python36\python.exe
matrix:
fast_finish: false
services:
# Note: if you change this service also change POSTGRES_DIR
- postgresql96
cache:
# Rebuild cache if following file changes
# (See the file to zap the cache manually)
- C:\Others -> .appveyor\cache_rebuild
# Script called before repo cloning
# init:
# Repository gets cloned, Cache is restored
install:
- "%PYEXE% scripts\\build\\appveyor.py install"
# PostgreSQL server starts now
build: "off"
build_script:
- "%PYEXE% scripts\\build\\appveyor.py build_script"
after_build:
- "%PYEXE% scripts\\build\\appveyor.py after_build"
before_test:
- "%PYEXE% scripts\\build\\appveyor.py before_test"
test_script:
- "%PYEXE% scripts\\build\\appveyor.py test_script"
# vim: set ts=4 sts=4 sw=4:

View File

@ -1,6 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"

View File

@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Trigger docs build
uses: peter-evans/repository-dispatch@v3
uses: peter-evans/repository-dispatch@v1
with:
repository: psycopg/psycopg-website
event-type: psycopg2-commit

View File

@ -3,13 +3,9 @@ name: Build packages
on:
- workflow_dispatch
env:
PIP_BREAK_SYSTEM_PACKAGES: "1"
LIBPQ_VERSION: "16.0"
OPENSSL_VERSION: "1.1.1w"
jobs:
sdist: # {{{
build-sdist:
if: true
strategy:
fail-fast: false
@ -21,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repos
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Build sdist
run: ./scripts/build/build_sdist.sh
@ -29,9 +25,8 @@ jobs:
PACKAGE_NAME: ${{ matrix.package_name }}
- name: Upload artifacts
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: sdist-${{ matrix.package_name }}
path: |
dist/*.tar.gz
@ -44,7 +39,7 @@ jobs:
services:
postgresql:
image: postgres:16
image: postgres:13
env:
POSTGRES_PASSWORD: password
ports:
@ -56,34 +51,37 @@ jobs:
--health-timeout 5s
--health-retries 5
# }}}
linux: # {{{
build-linux:
if: true
env:
LIBPQ_VERSION: "16.0"
OPENSSL_VERSION: "1.1.1w"
strategy:
fail-fast: false
matrix:
platform: [manylinux, musllinux]
arch: [x86_64, i686, aarch64, ppc64le]
pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
pyver: [cp37, cp38, cp39, cp310, cp311, cp312]
runs-on: ubuntu-latest
steps:
- name: Checkout repos
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Set up QEMU for multi-arch build
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
- name: Cache libpq build
uses: actions/cache@v4
uses: actions/cache@v3
with:
path: /tmp/libpq.build
key: libpq-${{ env.LIBPQ_VERSION }}-${{ matrix.platform }}-${{ matrix.arch }}
- name: Build wheels
uses: pypa/cibuildwheel@v2.23.2
uses: pypa/cibuildwheel@v2.16.1
env:
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
CIBW_MANYLINUX_I686_IMAGE: manylinux2014
@ -110,14 +108,13 @@ jobs:
PSYCOPG2_TESTDB_PASSWORD=password
PSYCOPG2_TEST_FAST=1
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: linux-${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}}
path: ./wheelhouse/*.whl
services:
postgresql:
image: postgres:16
image: postgres:14
env:
POSTGRES_PASSWORD: password
ports:
@ -129,9 +126,8 @@ jobs:
--health-timeout 5s
--health-retries 5
# }}}
macos: # {{{
build-macos:
runs-on: macos-latest
if: true
@ -139,25 +135,18 @@ jobs:
fail-fast: false
matrix:
# These archs require an Apple M1 runner: [arm64, universal2]
arch: [x86_64, arm64]
pyver: [cp39, cp310, cp311, cp312, cp313]
arch: [x86_64]
pyver: [cp37, cp38, cp39, cp310, cp311, cp312]
steps:
- name: Checkout repos
uses: actions/checkout@v4
- name: Cache libpq build
uses: actions/cache@v4
with:
path: /tmp/libpq.build
key: libpq-${{ env.LIBPQ_VERSION }}-macos-${{ matrix.arch }}
uses: actions/checkout@v3
- name: Build wheels
uses: pypa/cibuildwheel@v2.23.2
uses: pypa/cibuildwheel@v2.16.1
env:
CIBW_BUILD: ${{matrix.pyver}}-macosx_${{matrix.arch}}
CIBW_ARCHS_MACOS: ${{matrix.arch}}
MACOSX_ARCHITECTURE: ${{matrix.arch}}
CIBW_ARCHS_MACOS: x86_64
CIBW_BEFORE_ALL_MACOS: ./scripts/build/wheel_macos_before_all.sh
CIBW_TEST_COMMAND: >-
export PYTHONPATH={project} &&
@ -166,101 +155,10 @@ jobs:
PG_VERSION=16
PACKAGE_NAME=psycopg2-binary
PSYCOPG2_TESTDB=postgres
PATH="/tmp/libpq.build/bin:$PATH"
PSYCOPG2_TEST_FAST=1
PATH="/usr/local/opt/postgresql@${PG_VERSION}/bin:$PATH"
- name: Upload artifacts
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: macos-${{matrix.pyver}}-macos-${{matrix.arch}}
path: ./wheelhouse/*.whl
# }}}
windows: # {{{
runs-on: windows-latest
if: true
strategy:
fail-fast: false
matrix:
arch: [win_amd64]
pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
package_name: [psycopg2, psycopg2-binary]
defaults:
run:
shell: bash
steps:
# there are some other libpq in PATH
- name: Drop spurious libpq in the path
run: rm -rf c:/tools/php C:/Strawberry/c/bin
- name: Checkout repo
uses: actions/checkout@v4
- name: Start PostgreSQL service for test
run: |
$PgSvc = Get-Service "postgresql*"
Set-Service $PgSvc.Name -StartupType manual
$PgSvc.Start()
shell: powershell
- name: Export GitHub Actions cache environment variables
uses: actions/github-script@v7
with:
script: |
const path = require('path')
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/lib'));
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/bin'));
- name: Create the binary package source tree
run: >-
sed -i 's/^setup(name="psycopg2"/setup(name="${{matrix.package_name}}"/'
setup.py
if: ${{ matrix.package_name != 'psycopg2' }}
- name: Build wheels
uses: pypa/cibuildwheel@v2.23.2
env:
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" # cache vcpkg
CIBW_BUILD: ${{matrix.pyver}}-${{matrix.arch}}
CIBW_ARCHS_WINDOWS: AMD64 x86
CIBW_BEFORE_BUILD_WINDOWS: '.\scripts\build\wheel_win32_before_build.bat'
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: >-
delvewheel repair -w {dest_dir}
--no-mangle "libiconv-2.dll;libwinpthread-1.dll" {wheel}
CIBW_TEST_COMMAND: >-
set PYTHONPATH={project} &&
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
# Note: no fast test because we don't run Windows tests
CIBW_ENVIRONMENT_WINDOWS: >-
PSYCOPG2_TESTDB=postgres
PSYCOPG2_TESTDB_USER=postgres
PSYCOPG2_TESTDB_HOST=localhost
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: windows-${{ matrix.package_name }}-${{matrix.pyver}}-${{matrix.arch}}
path: ./wheelhouse/*.whl
# }}}
merge: # {{{
runs-on: ubuntu-latest
needs:
- sdist
- linux
- macos
- windows
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@v4
with:
name: psycopg2-artifacts
delete-merged: true
# }}}

View File

@ -1,35 +1,32 @@
name: Tests
env:
PIP_BREAK_SYSTEM_PACKAGES: "1"
on:
push:
pull_request:
jobs:
linux:
tests:
name: Unit tests run
runs-on: ubuntu-latest
if: true
strategy:
fail-fast: false
matrix:
include:
- {python: "3.7", postgres: "10"}
- {python: "3.8", postgres: "12"}
- {python: "3.9", postgres: "13"}
- {python: "3.10", postgres: "14"}
- {python: "3.11", postgres: "15"}
- {python: "3.12", postgres: "16"}
- {python: "3.13", postgres: "17"}
# Opposite extremes of the supported Py/PG range, other architecture
- {python: "3.8", postgres: "17", architecture: "x86"}
- {python: "3.9", postgres: "16", architecture: "x86"}
- {python: "3.10", postgres: "15", architecture: "x86"}
- {python: "3.11", postgres: "14", architecture: "x86"}
- {python: "3.12", postgres: "13", architecture: "x86"}
- {python: "3.13", postgres: "12", architecture: "x86"}
- {python: "3.7", postgres: "16", architecture: "x86"}
- {python: "3.8", postgres: "15", architecture: "x86"}
- {python: "3.9", postgres: "14", architecture: "x86"}
- {python: "3.10", postgres: "13", architecture: "x86"}
- {python: "3.11", postgres: "11", architecture: "x86"}
- {python: "3.12", postgres: "10", architecture: "x86"}
env:
PSYCOPG2_TESTDB: postgres
@ -52,7 +49,7 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
# Can enable to test an unreleased libpq version.
- name: install libpq 16
@ -69,7 +66,7 @@ jobs:
- name: Install tox
run: pip install "tox < 4"
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
- name: Run tests

10
NEWS
View File

@ -1,16 +1,6 @@
Current release
---------------
What's new in psycopg 2.9.10
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Add support for Python 3.13.
- Receive notifications on commit (:ticket:`#1728`).
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
PostgreSQL 17.
- Drop support for Python 3.7.
What's new in psycopg 2.9.9
^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@ -17,18 +17,6 @@ flexible objects adaptation system.
Psycopg 2 is both Unicode and Python 3 friendly.
.. Note::
The psycopg2 package is still widely used and actively maintained, but it
is not expected to receive new features.
`Psycopg 3`__ is the evolution of psycopg2 and is where `new features are
being developed`__: if you are starting a new project you should probably
start from 3!
.. __: https://pypi.org/project/psycopg/
.. __: https://www.psycopg.org/psycopg3/docs/index.html
Documentation
-------------
@ -73,8 +61,13 @@ production it is advised to use the package built from sources.
.. _install: https://www.psycopg.org/docs/install.html#install-from-source
.. _faq: https://www.psycopg.org/docs/faq.html#faq-compile
:Build status: |gh-actions|
:Linux/OSX: |gh-actions|
:Windows: |appveyor|
.. |gh-actions| image:: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml/badge.svg
:target: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
:alt: Build status
:alt: Linux and OSX build status
.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/github/psycopg/psycopg2?branch=master&svg=true
:target: https://ci.appveyor.com/project/psycopg/psycopg2/branch/master
:alt: Windows build status

View File

@ -16,9 +16,10 @@ How to make a psycopg2 release
$ export VERSION=2.8.4
- Push psycopg2 to master or to the maint branch. Make sure tests on `GitHub
Actions`__.
Actions`__ and AppVeyor__ pass.
.. __: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
.. __: https://ci.appveyor.com/project/psycopg/psycopg2
- Create a signed tag with the content of the relevant NEWS bit and push it.
E.g.::
@ -40,10 +41,19 @@ How to make a psycopg2 release
- On GitHub Actions run manually a `package build workflow`__.
- On Appveyor change the `build settings`__ and replace the custom
configuration file name from ``.appveyor/tests.yml`` to
``.appveyor/packages.yml`` (yeah, that sucks a bit. Remember to put it
back to testing).
.. __: https://github.com/psycopg/psycopg2/actions/workflows/packages.yml
.. __: https://ci.appveyor.com/project/psycopg/psycopg2/settings
- When the workflows have finished download the packages from the job
artifacts.
artifacts. For Appveyor you can use the ``download_packages_appveyor.py``
scripts from the ``scripts/build`` directory. They will be saved in a
``wheelhouse/psycopg2-${VERSION}`` directory. For Github just download it
from the web interface (it's a single file).
- Only for stable packages: upload the signed packages on PyPI::

View File

@ -18,8 +18,6 @@
.. versionchanged:: 2.9.4 added errors introduced in PostgreSQL 15
.. versionchanged:: 2.9.10 added errors introduced in PostgreSQL 17
This module exposes the classes psycopg raises upon receiving an error from
the database with a :sql:`SQLSTATE` value attached (available in the
`~psycopg2.Error.pgcode` attribute). The content of the module is generated

View File

@ -131,8 +131,8 @@ The current `!psycopg2` implementation supports:
..
NOTE: keep consistent with setup.py and the /features/ page.
- Python versions from 3.8 to 3.13
- PostgreSQL server versions from 7.4 to 17
- Python versions from 3.7 to 3.12
- PostgreSQL server versions from 7.4 to 16
- PostgreSQL client library version from 9.1
.. note::

View File

@ -814,7 +814,7 @@ is rolled back.
When a cursor exits the ``with`` block it is closed, releasing any resource
eventually associated with it. The state of the transaction is not affected.
A connection can be used in more than one ``with`` statement
A connection can be used in more than a ``with`` statement
and each ``with`` block is effectively wrapped in a separate transaction::
conn = psycopg2.connect(DSN)
@ -1053,7 +1053,7 @@ using the |lo_import|_ and |lo_export|_ libpq functions.
If Psycopg was built with 64 bits large objects support (i.e. the first
two conditions above are verified), the `psycopg2.__version__` constant
will contain the ``lo64`` flag. If any of the condition is not met
will contain the ``lo64`` flag. If any of the contition is not met
several `!lobject` methods will fail if the arguments exceed 2GB.

View File

@ -256,7 +256,6 @@ HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL = '25008'
NO_ACTIVE_SQL_TRANSACTION = '25P01'
IN_FAILED_SQL_TRANSACTION = '25P02'
IDLE_IN_TRANSACTION_SESSION_TIMEOUT = '25P03'
TRANSACTION_TIMEOUT = '25P04'
# Class 26 - Invalid SQL Statement Name
INVALID_SQL_STATEMENT_NAME = '26000'

View File

@ -1344,11 +1344,6 @@ conn_set_session(connectionObject *self, int autocommit,
}
}
Py_BLOCK_THREADS;
conn_notifies_process(self);
conn_notice_process(self);
Py_UNBLOCK_THREADS;
if (autocommit != SRV_STATE_UNCHANGED) {
self->autocommit = autocommit;
}
@ -1413,11 +1408,6 @@ conn_set_client_encoding(connectionObject *self, const char *pgenc)
goto endlock;
}
Py_BLOCK_THREADS;
conn_notifies_process(self);
conn_notice_process(self);
Py_UNBLOCK_THREADS;
endlock:
pthread_mutex_unlock(&self->lock);
Py_END_ALLOW_THREADS;

View File

@ -64,7 +64,7 @@ psyco_lobj_close(lobjectObject *self, PyObject *args)
/* write method - write data to the lobject */
#define psyco_lobj_write_doc \
"write(str | bytes) -- Write a string or bytes to the large object."
"write(str) -- Write a string to the large object."
static PyObject *
psyco_lobj_write(lobjectObject *self, PyObject *args)

View File

@ -412,7 +412,6 @@ pq_commit(connectionObject *conn)
}
Py_BLOCK_THREADS;
conn_notifies_process(conn);
conn_notice_process(conn);
Py_UNBLOCK_THREADS;
@ -469,7 +468,6 @@ pq_abort(connectionObject *conn)
retvalue = pq_abort_locked(conn, &_save);
Py_BLOCK_THREADS;
conn_notifies_process(conn);
conn_notice_process(conn);
Py_UNBLOCK_THREADS;
@ -540,7 +538,6 @@ pq_reset(connectionObject *conn)
Py_BLOCK_THREADS;
conn_notice_process(conn);
conn_notifies_process(conn);
Py_UNBLOCK_THREADS;
pthread_mutex_unlock(&conn->lock);

View File

@ -27,7 +27,6 @@
#ifndef PSYCOPG_H
#define PSYCOPG_H 1
#include <pg_config.h>
#if PG_VERSION_NUM < 90100
#error "Psycopg requires PostgreSQL client library (libpq) >= 9.1"
#endif

View File

@ -27,8 +27,8 @@
#ifndef PSYCOPG_PYTHON_H
#define PSYCOPG_PYTHON_H 1
#if PY_VERSION_HEX < 0x03080000
#error "psycopg requires Python 3.8"
#if PY_VERSION_HEX < 0x03070000
#error "psycopg requires Python 3.7"
#endif
#include <structmember.h>

View File

@ -144,7 +144,6 @@
{"25P01", "NoActiveSqlTransaction"},
{"25P02", "InFailedSqlTransaction"},
{"25P03", "IdleInTransactionSessionTimeout"},
{"25P04", "TransactionTimeout"},
/* Class 26 - Invalid SQL Statement Name */
{"26000", "InvalidSqlStatementName"},

View File

@ -392,10 +392,7 @@ psyco_set_error(PyObject *exc, cursorObject *curs, const char *msg)
static int
psyco_is_main_interp(void)
{
#if PY_VERSION_HEX >= 0x030d0000
/* tested with Python 3.13.0a6 */
return PyInterpreterState_Get() == PyInterpreterState_Main();
#elif PY_VERSION_HEX >= 0x03080000
#if PY_VERSION_HEX >= 0x03080000
/* tested with Python 3.8.0a2 */
return _PyInterpreterState_Get() == PyInterpreterState_Main();
#else

849
scripts/build/appveyor.py Executable file
View File

@ -0,0 +1,849 @@
#!/usr/bin/env python3
"""
Build steps for the windows binary packages.
The script is designed to be called by appveyor. Subcommands map the steps in
'appveyor.yml'.
"""
import re
import os
import sys
import json
import shutil
import logging
import subprocess as sp
from glob import glob
from pathlib import Path
from zipfile import ZipFile
from argparse import ArgumentParser
from tempfile import NamedTemporaryFile
from urllib.request import urlopen
opt = None
STEP_PREFIX = 'step_'
logger = logging.getLogger()
logging.basicConfig(
level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s'
)
def main():
global opt
opt = parse_cmdline()
logger.setLevel(opt.loglevel)
cmd = globals()[STEP_PREFIX + opt.step]
cmd()
def setup_build_env():
"""
Set the environment variables according to the build environment
"""
setenv('VS_VER', opt.vs_ver)
path = [
str(opt.py_dir),
str(opt.py_dir / 'Scripts'),
r'C:\Strawberry\Perl\bin',
r'C:\Program Files\Git\mingw64\bin',
str(opt.ssl_build_dir / 'bin'),
os.environ['PATH'],
]
setenv('PATH', os.pathsep.join(path))
logger.info("Configuring compiler")
bat_call([opt.vc_dir / "vcvarsall.bat", 'x86' if opt.arch_32 else 'amd64'])
def python_info():
logger.info("Python Information")
run_python(['--version'], stderr=sp.STDOUT)
run_python(
['-c', "import sys; print('64bit: %s' % (sys.maxsize > 2**32))"]
)
def step_install():
python_info()
configure_sdk()
configure_postgres()
if opt.is_wheel:
install_wheel_support()
def install_wheel_support():
"""
Install an up-to-date pip wheel package to build wheels.
"""
run_python("-m pip install --upgrade pip".split())
run_python("-m pip install wheel".split())
def configure_sdk():
# The program rc.exe on 64bit with some versions look in the wrong path
# location when building postgresql. This cheats by copying the x64 bit
# files to that location.
if opt.arch_64:
for fn in glob(
r'C:\Program Files\Microsoft SDKs\Windows\v7.0\Bin\x64\rc*'
):
copy_file(
fn, r"C:\Program Files (x86)\Microsoft SDKs\Windows\v7.0A\Bin"
)
def configure_postgres():
"""
Set up PostgreSQL config before the service starts.
"""
logger.info("Configuring Postgres")
with (opt.pg_data_dir / 'postgresql.conf').open('a') as f:
# allow > 1 prepared transactions for test cases
print("max_prepared_transactions = 10", file=f)
print("ssl = on", file=f)
# Create openssl certificate to allow ssl connection
cwd = os.getcwd()
os.chdir(opt.pg_data_dir)
run_openssl(
'req -new -x509 -days 365 -nodes -text '
'-out server.crt -keyout server.key -subj /CN=initd.org'.split()
)
run_openssl(
'req -new -nodes -text -out root.csr -keyout root.key '
'-subj /CN=initd.org'.split()
)
run_openssl(
'x509 -req -in root.csr -text -days 3650 -extensions v3_ca '
'-signkey root.key -out root.crt'.split()
)
run_openssl(
'req -new -nodes -text -out server.csr -keyout server.key '
'-subj /CN=initd.org'.split()
)
run_openssl(
'x509 -req -in server.csr -text -days 365 -CA root.crt '
'-CAkey root.key -CAcreateserial -out server.crt'.split()
)
os.chdir(cwd)
def run_openssl(args):
"""Run the appveyor-installed openssl with some args."""
# https://www.appveyor.com/docs/windows-images-software/
openssl = Path(r"C:\OpenSSL-v111-Win64") / 'bin' / 'openssl'
return run_command([openssl] + args)
def step_build_script():
setup_build_env()
build_openssl()
build_libpq()
build_psycopg()
if opt.is_wheel:
build_binary_packages()
def build_openssl():
top = opt.ssl_build_dir
if (top / 'lib' / 'libssl.lib').exists():
return
logger.info("Building OpenSSL")
# Setup directories for building OpenSSL libraries
ensure_dir(top / 'include' / 'openssl')
ensure_dir(top / 'lib')
# Setup OpenSSL Environment Variables based on processor architecture
if opt.arch_32:
target = 'VC-WIN32'
setenv('VCVARS_PLATFORM', 'x86')
else:
target = 'VC-WIN64A'
setenv('VCVARS_PLATFORM', 'amd64')
setenv('CPU', 'AMD64')
ver = os.environ['OPENSSL_VERSION']
# Download OpenSSL source
zipname = f'OpenSSL_{ver}.zip'
zipfile = opt.cache_dir / zipname
if not zipfile.exists():
download(
f"https://github.com/openssl/openssl/archive/{zipname}", zipfile
)
with ZipFile(zipfile) as z:
z.extractall(path=opt.build_dir)
sslbuild = opt.build_dir / f"openssl-OpenSSL_{ver}"
os.chdir(sslbuild)
run_command(
['perl', 'Configure', target, 'no-asm']
+ ['no-shared', 'no-zlib', f'--prefix={top}', f'--openssldir={top}']
)
run_command("nmake build_libs install_sw".split())
assert (top / 'lib' / 'libssl.lib').exists()
os.chdir(opt.clone_dir)
shutil.rmtree(sslbuild)
def build_libpq():
top = opt.pg_build_dir
if (top / 'lib' / 'libpq.lib').exists():
return
logger.info("Building libpq")
# Setup directories for building PostgreSQL librarires
ensure_dir(top / 'include')
ensure_dir(top / 'lib')
ensure_dir(top / 'bin')
ver = os.environ['POSTGRES_VERSION']
# Download PostgreSQL source
zipname = f'postgres-REL_{ver}.zip'
zipfile = opt.cache_dir / zipname
if not zipfile.exists():
download(
f"https://github.com/postgres/postgres/archive/REL_{ver}.zip",
zipfile,
)
with ZipFile(zipfile) as z:
z.extractall(path=opt.build_dir)
pgbuild = opt.build_dir / f"postgres-REL_{ver}"
os.chdir(pgbuild)
# Setup build config file (config.pl)
os.chdir("src/tools/msvc")
with open("config.pl", 'w') as f:
print(
"""\
$config->{ldap} = 0;
$config->{openssl} = "%s";
1;
"""
% str(opt.ssl_build_dir).replace('\\', '\\\\'),
file=f,
)
# Hack the Mkvcbuild.pm file so we build the lib version of libpq
file_replace('Mkvcbuild.pm', "'libpq', 'dll'", "'libpq', 'lib'")
# Build libpgport, libpgcommon, libpq
run_command([which("build"), "libpgport"])
run_command([which("build"), "libpgcommon"])
run_command([which("build"), "libpq"])
# Install includes
with (pgbuild / "src/backend/parser/gram.h").open("w") as f:
print("", file=f)
# Copy over built libraries
file_replace("Install.pm", "qw(Install)", "qw(Install CopyIncludeFiles)")
run_command(
["perl", "-MInstall=CopyIncludeFiles", "-e"]
+ [f"chdir('../../..'); CopyIncludeFiles('{top}')"]
)
for lib in ('libpgport', 'libpgcommon', 'libpq'):
copy_file(pgbuild / f'Release/{lib}/{lib}.lib', top / 'lib')
# Prepare local include directory for building from
for dir in ('win32', 'win32_msvc'):
merge_dir(pgbuild / f"src/include/port/{dir}", pgbuild / "src/include")
# Build pg_config in place
os.chdir(pgbuild / 'src/bin/pg_config')
run_command(
['cl', 'pg_config.c', '/MT', '/nologo', fr'/I{pgbuild}\src\include']
+ ['/link', fr'/LIBPATH:{top}\lib']
+ ['libpgcommon.lib', 'libpgport.lib', 'advapi32.lib']
+ ['/NODEFAULTLIB:libcmt.lib']
+ [fr'/OUT:{top}\bin\pg_config.exe']
)
assert (top / 'lib' / 'libpq.lib').exists()
assert (top / 'bin' / 'pg_config.exe').exists()
os.chdir(opt.clone_dir)
shutil.rmtree(pgbuild)
def build_psycopg():
os.chdir(opt.package_dir)
patch_package_name()
add_pg_config_path()
run_python(
["setup.py", "build_ext", "--have-ssl"]
+ ["-l", "libpgcommon libpgport"]
+ ["-L", opt.ssl_build_dir / 'lib']
+ ['-I', opt.ssl_build_dir / 'include']
)
run_python(["setup.py", "build_py"])
def patch_package_name():
"""Change the psycopg2 package name in the setup.py if required."""
if opt.package_name == 'psycopg2':
return
logger.info("changing package name to %s", opt.package_name)
with (opt.package_dir / 'setup.py').open() as f:
data = f.read()
# Replace the name of the package with what desired
rex = re.compile(r"""name=["']psycopg2["']""")
assert len(rex.findall(data)) == 1, rex.findall(data)
data = rex.sub(f'name="{opt.package_name}"', data)
with (opt.package_dir / 'setup.py').open('w') as f:
f.write(data)
def build_binary_packages():
"""Create wheel binary packages."""
os.chdir(opt.package_dir)
add_pg_config_path()
# Build .whl packages
run_python(['setup.py', 'bdist_wheel', "-d", opt.dist_dir])
def step_after_build():
if not opt.is_wheel:
install_built_package()
else:
install_binary_package()
def install_built_package():
"""Install the package just built by setup build."""
os.chdir(opt.package_dir)
# Install the psycopg just built
add_pg_config_path()
run_python(["setup.py", "install"])
shutil.rmtree("psycopg2.egg-info")
def install_binary_package():
"""Install the package from a packaged wheel."""
run_python(
['-m', 'pip', 'install', '--no-index', '-f', opt.dist_dir]
+ [opt.package_name]
)
def add_pg_config_path():
"""Allow finding in the path the pg_config just built."""
pg_path = str(opt.pg_build_dir / 'bin')
if pg_path not in os.environ['PATH'].split(os.pathsep):
setenv('PATH', os.pathsep.join([pg_path, os.environ['PATH']]))
def step_before_test():
print_psycopg2_version()
# Create and setup PostgreSQL database for the tests
run_command([opt.pg_bin_dir / 'createdb', os.environ['PSYCOPG2_TESTDB']])
run_command(
[opt.pg_bin_dir / 'psql', '-d', os.environ['PSYCOPG2_TESTDB']]
+ ['-c', "CREATE EXTENSION hstore"]
)
def print_psycopg2_version():
"""Print psycopg2 and libpq versions installed."""
for expr in (
'psycopg2.__version__',
'psycopg2.__libpq_version__',
'psycopg2.extensions.libpq_version()',
):
out = out_python(['-c', f"import psycopg2; print({expr})"])
logger.info("built %s: %s", expr, out.decode('ascii'))
def step_test_script():
check_libpq_version()
run_test_suite()
def check_libpq_version():
"""
Fail if the package installed is not using the expected libpq version.
"""
want_ver = tuple(map(int, os.environ['POSTGRES_VERSION'].split('_')))
want_ver = "%d%04d" % want_ver
got_ver = (
out_python(
['-c']
+ ["import psycopg2; print(psycopg2.extensions.libpq_version())"]
)
.decode('ascii')
.rstrip()
)
assert want_ver == got_ver, f"libpq version mismatch: {want_ver!r} != {got_ver!r}"
def run_test_suite():
# Remove this var, which would make badly a configured OpenSSL 1.1 work
os.environ.pop('OPENSSL_CONF', None)
# Run the unit test
args = [
'-c',
"import tests; tests.unittest.main(defaultTest='tests.test_suite')",
]
if opt.is_wheel:
os.environ['PSYCOPG2_TEST_FAST'] = '1'
else:
args.append('--verbose')
os.chdir(opt.package_dir)
run_python(args)
def step_on_success():
print_sha1_hashes()
if setup_ssh():
upload_packages()
def print_sha1_hashes():
"""
Print the packages sha1 so their integrity can be checked upon signing.
"""
logger.info("artifacts SHA1 hashes:")
os.chdir(opt.package_dir / 'dist')
run_command([which('sha1sum'), '-b', 'psycopg2-*/*'])
def setup_ssh():
"""
Configure ssh to upload built packages where they can be retrieved.
Return False if can't configure and upload shoould be skipped.
"""
# If we are not on the psycopg AppVeyor account, the environment variable
# REMOTE_KEY will not be decrypted. In that case skip uploading.
if os.environ['APPVEYOR_ACCOUNT_NAME'] != 'psycopg':
logger.warn("skipping artifact upload: you are not psycopg")
return False
pkey = os.environ.get('REMOTE_KEY', None)
if not pkey:
logger.warn("skipping artifact upload: no remote key")
return False
# Write SSH Private Key file from environment variable
pkey = pkey.replace(' ', '\n')
with (opt.clone_dir / 'data/id_rsa-psycopg-upload').open('w') as f:
f.write(
f"""\
-----BEGIN RSA PRIVATE KEY-----
{pkey}
-----END RSA PRIVATE KEY-----
"""
)
# Make a directory to please MinGW's version of ssh
ensure_dir(r"C:\MinGW\msys\1.0\home\appveyor\.ssh")
return True
def upload_packages():
# Upload built artifacts
logger.info("uploading artifacts")
os.chdir(opt.clone_dir)
run_command(
[r"C:\MinGW\msys\1.0\bin\rsync", "-avr"]
+ ["-e", r"C:\MinGW\msys\1.0\bin\ssh -F data/ssh_config"]
+ ["psycopg2/dist/", "upload:"]
)
def download(url, fn):
"""Download a file locally"""
logger.info("downloading %s", url)
with open(fn, 'wb') as fo, urlopen(url) as fi:
while 1:
data = fi.read(8192)
if not data:
break
fo.write(data)
logger.info("file downloaded: %s", fn)
def file_replace(fn, s1, s2):
"""
Replace all the occurrences of the string s1 into s2 in the file fn.
"""
assert os.path.exists(fn)
with open(fn, 'r+') as f:
data = f.read()
f.seek(0)
f.write(data.replace(s1, s2))
f.truncate()
def merge_dir(src, tgt):
"""
Merge the content of the directory src into the directory tgt
Reproduce the semantic of "XCOPY /Y /S src/* tgt"
"""
src = str(src)
for dp, _dns, fns in os.walk(src):
logger.debug("dirpath %s", dp)
if not fns:
continue
assert dp.startswith(src)
subdir = dp[len(src) :].lstrip(os.sep)
tgtdir = ensure_dir(os.path.join(tgt, subdir))
for fn in fns:
copy_file(os.path.join(dp, fn), tgtdir)
def bat_call(cmdline):
"""
Simulate 'CALL' from a batch file
Execute CALL *cmdline* and export the changed environment to the current
environment.
nana-nana-nana-nana...
"""
if not isinstance(cmdline, str):
cmdline = map(str, cmdline)
cmdline = ' '.join(c if ' ' not in c else '"%s"' % c for c in cmdline)
data = f"""\
CALL {cmdline}
{opt.py_exe} -c "import os, sys, json; \
json.dump(dict(os.environ), sys.stdout, indent=2)"
"""
logger.debug("preparing file to batcall:\n\n%s", data)
with NamedTemporaryFile(suffix='.bat') as tmp:
fn = tmp.name
with open(fn, "w") as f:
f.write(data)
try:
out = out_command(fn)
# be vewwy vewwy caweful to print the env var as it might contain
# secwet things like your pwecious pwivate key.
# logger.debug("output of command:\n\n%s", out.decode('utf8', 'replace'))
# The output has some useless crap on stdout, because sure, and json
# indented so the last { on column 1 is where we have to start parsing
m = list(re.finditer(b'^{', out, re.MULTILINE))[-1]
out = out[m.start() :]
env = json.loads(out)
for k, v in env.items():
if os.environ.get(k) != v:
setenv(k, v)
finally:
os.remove(fn)
def ensure_dir(dir):
if not isinstance(dir, Path):
dir = Path(dir)
if not dir.is_dir():
logger.info("creating directory %s", dir)
dir.mkdir(parents=True)
return dir
def run_command(cmdline, **kwargs):
"""Run a command, raise on error."""
if not isinstance(cmdline, str):
cmdline = list(map(str, cmdline))
logger.info("running command: %s", cmdline)
sp.check_call(cmdline, **kwargs)
def out_command(cmdline, **kwargs):
"""Run a command, return its output, raise on error."""
if not isinstance(cmdline, str):
cmdline = list(map(str, cmdline))
logger.info("running command: %s", cmdline)
data = sp.check_output(cmdline, **kwargs)
return data
def run_python(args, **kwargs):
"""
Run a script in the target Python.
"""
return run_command([opt.py_exe] + args, **kwargs)
def out_python(args, **kwargs):
"""
Return the output of a script run in the target Python.
"""
return out_command([opt.py_exe] + args, **kwargs)
def copy_file(src, dst):
logger.info("copying file %s -> %s", src, dst)
shutil.copy(src, dst)
def setenv(k, v):
logger.debug("setting %s=%s", k, v)
os.environ[k] = v
def which(name):
"""
Return the full path of a command found on the path
"""
base, ext = os.path.splitext(name)
if not ext:
exts = ('.com', '.exe', '.bat', '.cmd')
else:
exts = (ext,)
for dir in ['.'] + os.environ['PATH'].split(os.pathsep):
for ext in exts:
fn = os.path.join(dir, base + ext)
if os.path.isfile(fn):
return fn
raise Exception(f"couldn't find program on path: {name}")
class Options:
"""
An object exposing the script configuration from env vars and command line.
"""
@property
def py_ver(self):
"""The Python version to build as 2 digits string.
For large values of 2, occasionally.
"""
rv = os.environ['PY_VER']
assert rv in ('37', '38', '39', '310', '311', "312"), rv
return rv
@property
def py_arch(self):
"""The Python architecture to build, 32 or 64."""
rv = os.environ['PY_ARCH']
assert rv in ('32', '64'), rv
return int(rv)
@property
def arch_32(self):
"""True if the Python architecture to build is 32 bits."""
return self.py_arch == 32
@property
def arch_64(self):
"""True if the Python architecture to build is 64 bits."""
return self.py_arch == 64
@property
def package_name(self):
return os.environ.get('CONFIGURATION', 'psycopg2')
@property
def package_version(self):
"""The psycopg2 version number to build."""
with (self.package_dir / 'setup.py').open() as f:
data = f.read()
m = re.search(
r"""^PSYCOPG_VERSION\s*=\s*['"](.*)['"]""", data, re.MULTILINE
)
return m.group(1)
@property
def is_wheel(self):
"""Are we building the wheel packages or just the extension?"""
workflow = os.environ["WORKFLOW"]
return workflow == "packages"
@property
def py_dir(self):
"""
The path to the target python binary to execute.
"""
dirname = ''.join(
[r"C:\Python", self.py_ver, '-x64' if self.arch_64 else '']
)
return Path(dirname)
@property
def py_exe(self):
"""
The full path of the target python executable.
"""
return self.py_dir / 'python.exe'
@property
def vc_dir(self):
"""
The path of the Visual C compiler.
"""
if self.vs_ver == '16.0':
path = Path(
r"C:\Program Files (x86)\Microsoft Visual Studio\2019"
r"\Community\VC\Auxiliary\Build"
)
else:
path = Path(
r"C:\Program Files (x86)\Microsoft Visual Studio %s\VC"
% self.vs_ver
)
return path
@property
def vs_ver(self):
# https://wiki.python.org/moin/WindowsCompilers
# https://www.appveyor.com/docs/windows-images-software/#python
# Py 3.6--3.8 = VS Ver. 14.0 (VS 2015)
# Py 3.9 = VS Ver. 16.0 (VS 2019)
vsvers = {
'37': '14.0',
'38': '14.0',
'39': '16.0',
'310': '16.0',
'311': '16.0',
'312': '16.0',
}
return vsvers[self.py_ver]
@property
def clone_dir(self):
"""The directory where the repository is cloned."""
return Path(r"C:\Project")
@property
def appveyor_pg_dir(self):
"""The directory of the postgres service made available by Appveyor."""
return Path(os.environ['POSTGRES_DIR'])
@property
def pg_data_dir(self):
"""The data dir of the appveyor postgres service."""
return self.appveyor_pg_dir / 'data'
@property
def pg_bin_dir(self):
"""The bin dir of the appveyor postgres service."""
return self.appveyor_pg_dir / 'bin'
@property
def pg_build_dir(self):
"""The directory where to build the postgres libraries for psycopg."""
return self.cache_arch_dir / 'postgresql'
@property
def ssl_build_dir(self):
"""The directory where to build the openssl libraries for psycopg."""
return self.cache_arch_dir / 'openssl'
@property
def cache_arch_dir(self):
rv = self.cache_dir / str(self.py_arch) / self.vs_ver
return ensure_dir(rv)
@property
def cache_dir(self):
return Path(r"C:\Others")
@property
def build_dir(self):
rv = self.cache_arch_dir / 'Builds'
return ensure_dir(rv)
@property
def package_dir(self):
return self.clone_dir
@property
def dist_dir(self):
"""The directory where to build packages to distribute."""
return (
self.package_dir / 'dist' / (f'psycopg2-{self.package_version}')
)
def parse_cmdline():
parser = ArgumentParser(description=__doc__)
g = parser.add_mutually_exclusive_group()
g.add_argument(
'-q',
'--quiet',
help="Talk less",
dest='loglevel',
action='store_const',
const=logging.WARN,
default=logging.INFO,
)
g.add_argument(
'-v',
'--verbose',
help="Talk more",
dest='loglevel',
action='store_const',
const=logging.DEBUG,
default=logging.INFO,
)
steps = [
n[len(STEP_PREFIX) :]
for n in globals()
if n.startswith(STEP_PREFIX) and callable(globals()[n])
]
parser.add_argument(
'step', choices=steps, help="the appveyor step to execute"
)
opt = parser.parse_args(namespace=Options())
return opt
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,6 +1,6 @@
#!/bin/bash
# Build a modern version of libpq and depending libs from source on Centos 5, Alpine or macOS
# Build a modern version of libpq and depending libs from source on Centos 5
set -euo pipefail
set -x
@ -12,39 +12,21 @@ postgres_version="${LIBPQ_VERSION}"
# last release: https://www.openssl.org/source/
openssl_version="${OPENSSL_VERSION}"
# last release: https://kerberos.org/dist/
krb5_version="1.21.3"
# last release: https://openldap.org/software/download/
ldap_version="2.6.8"
ldap_version="2.6.3"
# last release: https://github.com/cyrusimap/cyrus-sasl/releases
sasl_version="2.1.28"
export LIBPQ_BUILD_PREFIX=${LIBPQ_BUILD_PREFIX:-/tmp/libpq.build}
case "$(uname)" in
Darwin)
ID=macos
library_suffix=dylib
;;
Linux)
source /etc/os-release
library_suffix=so
;;
*)
echo "$0: unexpected Operating system: '$(uname)'" >&2
exit 1
;;
esac
if [[ -f "${LIBPQ_BUILD_PREFIX}/lib/libpq.${library_suffix}" ]]; then
if [[ -f "${LIBPQ_BUILD_PREFIX}/lib/libpq.so" ]]; then
echo "libpq already available: build skipped" >&2
exit 0
fi
source /etc/os-release
case "$ID" in
centos)
yum update -y
@ -53,25 +35,7 @@ case "$ID" in
alpine)
apk upgrade
apk add --no-cache zlib-dev krb5-dev linux-pam-dev openldap-dev openssl-dev
;;
macos)
brew install automake m4 libtool
# If available, libpq seemingly insists on linking against homebrew's
# openssl no matter what so remove it. Since homebrew's curl depends on
# it, force use of system curl.
brew uninstall --force --ignore-dependencies openssl gettext curl
if [ -z "${MACOSX_ARCHITECTURE:-}" ]; then
MACOSX_ARCHITECTURE="$(uname -m)"
fi
# Set the deployment target to be <= to that of the oldest supported Python version.
# e.g. https://www.python.org/downloads/release/python-380/
if [ "$MACOSX_ARCHITECTURE" == "x86_64" ]; then
export MACOSX_DEPLOYMENT_TARGET=10.9
else
export MACOSX_DEPLOYMENT_TARGET=11.0
fi
apk add --no-cache zlib-dev krb5-dev linux-pam-dev openldap-dev
;;
*)
@ -80,77 +44,33 @@ case "$ID" in
;;
esac
if [ "$ID" == "macos" ]; then
make_configure_standard_flags=( \
--prefix=${LIBPQ_BUILD_PREFIX} \
"CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ -arch $MACOSX_ARCHITECTURE" \
"LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib -arch $MACOSX_ARCHITECTURE" \
)
else
make_configure_standard_flags=( \
--prefix=${LIBPQ_BUILD_PREFIX} \
CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ \
LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib \
)
fi
if [ "$ID" == "centos" ] || [ "$ID" == "macos" ]; then
if [ "$ID" == "centos" ]; then
# Build openssl if needed
openssl_tag="OpenSSL_${openssl_version//./_}"
openssl_dir="openssl-${openssl_tag}"
if [ ! -d "${openssl_dir}" ]; then
curl -sL \
if [ ! -d "${openssl_dir}" ]; then curl -sL \
https://github.com/openssl/openssl/archive/${openssl_tag}.tar.gz \
| tar xzf -
pushd "${openssl_dir}"
options=(--prefix=${LIBPQ_BUILD_PREFIX} --openssldir=${LIBPQ_BUILD_PREFIX} \
zlib -fPIC shared)
if [ -z "${MACOSX_ARCHITECTURE:-}" ]; then
./config $options
else
./configure "darwin64-$MACOSX_ARCHITECTURE-cc" $options
fi
cd "${openssl_dir}"
./config --prefix=${LIBPQ_BUILD_PREFIX} --openssldir=${LIBPQ_BUILD_PREFIX} \
zlib -fPIC shared
make depend
make
else
pushd "${openssl_dir}"
cd "${openssl_dir}"
fi
# Install openssl
make install_sw
popd
cd ..
fi
if [ "$ID" == "macos" ]; then
# Build kerberos if needed
krb5_dir="krb5-${krb5_version}/src"
if [ ! -d "${krb5_dir}" ]; then
curl -sL "https://kerberos.org/dist/krb5/${krb5_version%.*}/krb5-${krb5_version}.tar.gz" \
| tar xzf -
pushd "${krb5_dir}"
./configure "${make_configure_standard_flags[@]}"
make
else
pushd "${krb5_dir}"
fi
make install
popd
fi
if [ "$ID" == "centos" ] || [ "$ID" == "macos" ]; then
if [ "$ID" == "centos" ]; then
# Build libsasl2 if needed
# The system package (cyrus-sasl-devel) causes an amazing error on i686:
@ -163,25 +83,26 @@ if [ "$ID" == "centos" ] || [ "$ID" == "macos" ]; then
https://github.com/cyrusimap/cyrus-sasl/archive/${sasl_tag}.tar.gz \
| tar xzf -
pushd "${sasl_dir}"
cd "${sasl_dir}"
autoreconf -i
./configure "${make_configure_standard_flags[@]}" --disable-macos-framework
./configure --prefix=${LIBPQ_BUILD_PREFIX} \
CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib
make
else
pushd "${sasl_dir}"
cd "${sasl_dir}"
fi
# Install libsasl2
# requires missing nroff to build
touch saslauthd/saslauthd.8
make install
popd
cd ..
fi
if [ "$ID" == "centos" ] || [ "$ID" == "macos" ]; then
if [ "$ID" == "centos" ]; then
# Build openldap if needed
ldap_tag="${ldap_version}"
@ -191,24 +112,25 @@ if [ "$ID" == "centos" ] || [ "$ID" == "macos" ]; then
https://www.openldap.org/software/download/OpenLDAP/openldap-release/openldap-${ldap_tag}.tgz \
| tar xzf -
pushd "${ldap_dir}"
cd "${ldap_dir}"
./configure "${make_configure_standard_flags[@]}" --enable-backends=no --enable-null
./configure --prefix=${LIBPQ_BUILD_PREFIX} --enable-backends=no --enable-null \
CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib
make depend
make -C libraries/liblutil/
make -C libraries/liblber/
make -C libraries/libldap/
else
pushd "${ldap_dir}"
cd "${ldap_dir}"
fi
# Install openldap
make -C libraries/liblber/ install
make -C libraries/libldap/ install
make -C include/ install
chmod +x ${LIBPQ_BUILD_PREFIX}/lib/{libldap,liblber}*.${library_suffix}*
popd
chmod +x ${LIBPQ_BUILD_PREFIX}/lib/{libldap,liblber}*.so*
cd ..
fi
@ -221,33 +143,32 @@ if [ ! -d "${postgres_dir}" ]; then
https://github.com/postgres/postgres/archive/${postgres_tag}.tar.gz \
| tar xzf -
pushd "${postgres_dir}"
cd "${postgres_dir}"
if [ "$ID" != "macos" ]; then
# Match the default unix socket dir default with what defined on Ubuntu and
# Red Hat, which seems the most common location
sed -i 's|#define DEFAULT_PGSOCKET_DIR .*'\
# Match the default unix socket dir default with what defined on Ubuntu and
# Red Hat, which seems the most common location
sed -i 's|#define DEFAULT_PGSOCKET_DIR .*'\
'|#define DEFAULT_PGSOCKET_DIR "/var/run/postgresql"|' \
src/include/pg_config_manual.h
fi
src/include/pg_config_manual.h
# Often needed, but currently set by the workflow
# export LD_LIBRARY_PATH="${LIBPQ_BUILD_PREFIX}/lib"
./configure "${make_configure_standard_flags[@]}" --sysconfdir=/etc/postgresql-common \
./configure --prefix=${LIBPQ_BUILD_PREFIX} --sysconfdir=/etc/postgresql-common \
--with-gssapi --with-openssl --with-pam --with-ldap \
--without-readline --without-icu
--without-readline --without-icu \
CPPFLAGS=-I${LIBPQ_BUILD_PREFIX}/include/ LDFLAGS=-L${LIBPQ_BUILD_PREFIX}/lib
make -C src/interfaces/libpq
make -C src/bin/pg_config
make -C src/include
else
pushd "${postgres_dir}"
cd "${postgres_dir}"
fi
# Install libpq
make -C src/interfaces/libpq install
make -C src/bin/pg_config install
make -C src/include install
popd
cd ..
find ${LIBPQ_BUILD_PREFIX} -name \*.${library_suffix}.\* -type f -exec strip --strip-unneeded {} \;
find ${LIBPQ_BUILD_PREFIX} -name \*.so.\* -type f -exec strip --strip-unneeded {} \;

View File

@ -10,7 +10,7 @@
set -euo pipefail
# set -x
python_versions="3.8.18 3.9.18 3.10.13 3.11.6 3.12.0"
python_versions="3.8.10 3.9.13 3.10.5 3.11.0 3.12.0"
pg_version=16
function log {
@ -89,7 +89,7 @@ log "building wheels"
# Build the binary packages
export CIBW_PLATFORM=macos
export CIBW_ARCHS=arm64
export CIBW_BUILD='cp{38,39,310,311,312}-*'
export CIBW_BUILD='cp{38,39,310,311}-*'
export CIBW_TEST_COMMAND='python -c "import tests; tests.unittest.main(defaultTest=\"tests.test_suite\")"'
export PSYCOPG2_TESTDB=postgres

View File

@ -0,0 +1,117 @@
#!/usr/bin/env python
"""Download packages from appveyor artifacts
"""
import os
import re
import sys
import logging
import datetime as dt
from pathlib import Path
from argparse import ArgumentParser
import requests
logger = logging.getLogger()
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
API_URL = "https://ci.appveyor.com/api"
REPOS = "psycopg/psycopg2"
WORKFLOW_NAME = "Build packages"
class ScriptError(Exception):
"""Controlled exception raised by the script."""
def main():
opt = parse_cmdline()
try:
token = os.environ["APPVEYOR_TOKEN"]
except KeyError:
raise ScriptError("please set a APPVEYOR_TOKEN to download artifacts")
s = requests.Session()
s.headers["Content-Type"] = "application/json"
s.headers["Authorization"] = f"Bearer {token}"
if opt.build:
logger.info("fetching build %s", opt.build)
resp = s.get(f"{API_URL}/projects/{REPOS}/build/{opt.build}")
else:
logger.info("fetching last run")
resp = s.get(f"{API_URL}/projects/{REPOS}")
resp.raise_for_status()
data = resp.json()
updated_at = dt.datetime.fromisoformat(
re.sub(r"\.\d+", "", data["build"]["finished"])
)
now = dt.datetime.now(dt.timezone.utc)
age = now - updated_at
logger.info(
f"found build {data['build']['version']} updated {pretty_interval(age)} ago"
)
if age > dt.timedelta(hours=6):
logger.warning("maybe it's a bit old?")
jobs = data["build"]["jobs"]
for job in jobs:
if job["status"] != "success":
raise ScriptError(f"status for job {job['jobId']} is {job['status']}")
logger.info(f"fetching artifacts info for {job['name']}")
resp = s.get(f"{API_URL}/buildjobs/{job['jobId']}/artifacts/")
resp.raise_for_status()
afs = resp.json()
for af in afs:
fn = af["fileName"]
if fn.startswith("dist/"):
fn = fn.split("/", 1)[1]
dest = Path("wheelhouse") / fn
logger.info(f"downloading {dest}")
resp = s.get(
f"{API_URL}/buildjobs/{job['jobId']}/artifacts/{af['fileName']}"
)
resp.raise_for_status()
if not dest.parent.exists():
dest.parent.mkdir(parents=True)
with dest.open("wb") as f:
f.write(resp.content)
logger.info("now you can run: 'twine upload -s wheelhouse/*'")
def parse_cmdline():
parser = ArgumentParser(description=__doc__)
parser.add_argument("--build", help="build version to download [default: latest]")
opt = parser.parse_args()
return opt
def pretty_interval(td):
secs = td.total_seconds()
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
days, hours = divmod(hours, 24)
if days:
return f"{int(days)} days, {int(hours)} hours, {int(mins)} minutes"
elif hours:
return f"{int(hours)} hours, {int(mins)} minutes"
else:
return f"{int(mins)} minutes"
if __name__ == "__main__":
try:
sys.exit(main())
except ScriptError as e:
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
logger.info("user interrupt")
sys.exit(1)

View File

@ -1,101 +0,0 @@
#!/usr/bin/env python
"""
We use vcpkg in github actions to build psycopg-binary.
This is a stub to work as `pg_config --libdir` or `pg_config --includedir` to
make it work with vcpkg.
You will need install `vcpkg`, set `VCPKG_ROOT` env, and run `vcpkg install
libpq:x64-windows-release` before using this script.
"""
import os
import sys
import platform
from pathlib import Path
from argparse import ArgumentParser, Namespace, RawDescriptionHelpFormatter
class ScriptError(Exception):
"""Controlled exception raised by the script."""
def _main() -> None:
# only x64-windows
if not (sys.platform == "win32" and platform.machine() == "AMD64"):
raise ScriptError("this script should only be used in x64-windows")
vcpkg_root = os.environ.get(
"VCPKG_ROOT", os.environ.get("VCPKG_INSTALLATION_ROOT", "")
)
if not vcpkg_root:
raise ScriptError("VCPKG_ROOT/VCPKG_INSTALLATION_ROOT env var not specified")
vcpkg_platform_root = (Path(vcpkg_root) / "installed/x64-windows-release").resolve()
args = parse_cmdline()
if args.libdir:
if not (f := vcpkg_platform_root / "lib/libpq.lib").exists():
raise ScriptError(f"libpq library not found: {f}")
print(vcpkg_platform_root.joinpath("lib"))
elif args.includedir or args.includedir_server:
# NOTE: on linux, the includedir-server dir contains pg_config.h
# which we need because it includes the PG_VERSION_NUM macro.
# In the vcpkg directory this file is in the includedir directory,
# therefore we return the same value.
if not (d := vcpkg_platform_root / "include/libpq").is_dir():
raise ScriptError(f"libpq include directory not found: {d}")
print(vcpkg_platform_root.joinpath("include"))
elif args.cppflags or args.ldflags:
print("")
else:
raise ScriptError("command not handled")
def parse_cmdline() -> Namespace:
parser = ArgumentParser(
description=__doc__, formatter_class=RawDescriptionHelpFormatter
)
g = parser.add_mutually_exclusive_group(required=True)
g.add_argument(
"--libdir",
action="store_true",
help="show location of object code libraries",
)
g.add_argument(
"--includedir",
action="store_true",
help="show location of C header files of the client interfaces",
)
g.add_argument(
"--includedir-server",
action="store_true",
help="show location of C header files for the server",
)
g.add_argument(
"--cppflags",
action="store_true",
help="(dummy) show CPPFLAGS value used when PostgreSQL was built",
)
g.add_argument(
"--ldflags",
action="store_true",
help="(dummy) show LDFLAGS value used when PostgreSQL was built",
)
opt = parser.parse_args()
return opt
def main() -> None:
try:
_main()
except ScriptError as e:
print(f"ERROR: {e}.", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()

View File

@ -1,11 +0,0 @@
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = 'pg_config_vcpkg_stub'
version = "0"
description = "see docs string in pg_config_vcpkg_stub for more details"
[project.scripts]
pg_config = 'pg_config_vcpkg_stub:main'

View File

@ -11,17 +11,10 @@ set -x
dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
prjdir="$( cd "${dir}/../.." && pwd )"
# Build dependency libraries
"${prjdir}/scripts/build/build_libpq.sh"
# Show dependency tree
otool -L /tmp/libpq.build/lib/*.dylib
brew install gnu-sed postgresql@${PG_VERSION}
brew link --overwrite postgresql@${PG_VERSION}
# Start the database for testing
brew services start postgresql@${PG_VERSION}
brew services start postgresql
# Wait for postgres to come up
for i in $(seq 10 -1 0); do

View File

@ -1,7 +0,0 @@
@echo on
pip install delvewheel wheel
vcpkg install libpq:x64-windows-release
pipx install .\scripts\build\pg_config_vcpkg_stub\

View File

@ -19,7 +19,6 @@ The script can be run at a new PostgreSQL release to refresh the module.
import re
import sys
import time
from urllib.request import urlopen
from collections import defaultdict
@ -33,7 +32,8 @@ def main():
file_start = read_base_file(filename)
# If you add a version to the list fix the docs (in errorcodes.rst)
classes, errors = fetch_errors("11 12 13 14 15 16 17".split())
classes, errors = fetch_errors(
'9.1 9.2 9.3 9.4 9.5 9.6 10 11 12 13 14 15'.split())
disambiguate(errors)
@ -90,8 +90,8 @@ def parse_errors_txt(url):
errors_txt_url = \
"https://raw.githubusercontent.com/postgres/postgres/refs/heads/%s" \
"/src/backend/utils/errcodes.txt"
"http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob_plain;" \
"f=src/backend/utils/errcodes.txt;hb=%s"
def fetch_errors(versions):

View File

@ -29,7 +29,8 @@ def main():
os.path.dirname(__file__), "../psycopg/sqlstate_errors.h")
# If you add a version to the list fix the docs (in errors.rst)
classes, errors = fetch_errors("11 12 13 14 15 16 17".split())
classes, errors = fetch_errors(
'9.1 9.2 9.3 9.4 9.5 9.6 10 11 12 13 14 15'.split())
f = open(filename, "w")
print("/*\n * Autogenerated by 'scripts/make_errors.py'.\n */\n", file=f)
@ -73,8 +74,8 @@ def parse_errors_txt(url):
errors_txt_url = \
"https://raw.githubusercontent.com/postgres/postgres/refs/heads/%s" \
"/src/backend/utils/errcodes.txt"
"http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob_plain;" \
"f=src/backend/utils/errcodes.txt;hb=%s"
def fetch_errors(versions):

View File

@ -29,6 +29,7 @@ for coroutine libraries.
import os
import sys
import re
import subprocess
from setuptools import setup, Extension
from distutils.command.build_ext import build_ext
@ -40,7 +41,7 @@ import configparser
# Take a look at https://www.python.org/dev/peps/pep-0440/
# for a consistent versioning pattern.
PSYCOPG_VERSION = '2.9.10'
PSYCOPG_VERSION = '2.9.9'
# note: if you are changing the list of supported Python version please fix
@ -51,12 +52,12 @@ Intended Audience :: Developers
License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: 3.12
Programming Language :: Python :: 3.13
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: Implementation :: CPython
Programming Language :: C
@ -381,8 +382,34 @@ For further information please check the 'doc/src/install.rst' file (also at
if token.startswith("-I"):
self.include_dirs.append(token[2:])
# enable lo64 if Python 64 bits
if is_py_64():
pgversion = pg_config_helper.query("version").split()[1]
verre = re.compile(
r"(\d+)(?:\.(\d+))?(?:(?:\.(\d+))|(devel|(?:alpha|beta|rc)\d+))?")
m = verre.match(pgversion)
if m:
pgmajor, pgminor, pgpatch = m.group(1, 2, 3)
# Postgres >= 10 doesn't have pgminor anymore.
pgmajor = int(pgmajor)
if pgmajor >= 10:
pgminor, pgpatch = None, pgminor
if pgminor is None or not pgminor.isdigit():
pgminor = 0
if pgpatch is None or not pgpatch.isdigit():
pgpatch = 0
pgminor = int(pgminor)
pgpatch = int(pgpatch)
else:
sys.stderr.write(
f"Error: could not determine PostgreSQL version from "
f"'{pgversion}'")
sys.exit(1)
define_macros.append(("PG_VERSION_NUM", "%d%02d%02d" %
(pgmajor, pgminor, pgpatch)))
# enable lo64 if libpq >= 9.3 and Python 64 bits
if (pgmajor, pgminor) >= (9, 3) and is_py_64():
define_macros.append(("HAVE_LO64", "1"))
# Inject the flag in the version string already packed up
@ -524,7 +551,7 @@ setup(name="psycopg2",
url="https://psycopg.org/",
license="LGPL with exceptions",
platforms=["any"],
python_requires='>=3.8',
python_requires='>=3.7',
description=readme.split("\n")[0],
long_description="\n".join(readme.split("\n")[2:]).lstrip(),
classifiers=[x for x in classifiers.split("\n") if x],
@ -535,7 +562,6 @@ setup(name="psycopg2",
ext_modules=ext,
project_urls={
'Homepage': 'https://psycopg.org/',
'Changes': 'https://www.psycopg.org/docs/news.html',
'Documentation': 'https://www.psycopg.org/docs/',
'Code': 'https://github.com/psycopg/psycopg2',
'Issue Tracker': 'https://github.com/psycopg/psycopg2/issues',

View File

@ -504,7 +504,7 @@ class AsyncTests(ConnectingTestCase):
raise Exception("Unexpected result from poll: %r", state)
polls += 1
self.assert_(polls >= 5, polls)
self.assert_(polls >= 8, polls)
def test_poll_noop(self):
self.conn.poll()

View File

@ -152,7 +152,7 @@ class GreenTestCase(ConnectingTestCase):
""")
polls = stub.polls.count(POLL_READ)
self.assert_(polls > 6, polls)
self.assert_(polls > 8, polls)
class CallbackErrorTestCase(ConnectingTestCase):

View File

@ -18,7 +18,6 @@
from . import testutils
import unittest
import sys
import psycopg2
import psycopg2.extras
@ -69,12 +68,7 @@ class NetworkingTestCase(testutils.ConnectingTestCase):
self.assertEquals(cur.fetchone()[0], '127.0.0.1/24')
cur.execute("select %s", [ip.ip_interface('::ffff:102:300/128')])
# The texual representation of addresses has changed in Python 3.13
if sys.version_info >= (3, 13):
self.assertEquals(cur.fetchone()[0], '::ffff:1.2.3.0/128')
else:
self.assertEquals(cur.fetchone()[0], '::ffff:102:300/128')
self.assertEquals(cur.fetchone()[0], '::ffff:102:300/128')
@testutils.skip_if_crdb("cidr")
def test_cidr_cast(self):
@ -115,12 +109,7 @@ class NetworkingTestCase(testutils.ConnectingTestCase):
self.assertEquals(cur.fetchone()[0], '127.0.0.0/24')
cur.execute("select %s", [ip.ip_network('::ffff:102:300/128')])
# The texual representation of addresses has changed in Python 3.13
if sys.version_info >= (3, 13):
self.assertEquals(cur.fetchone()[0], '::ffff:1.2.3.0/128')
else:
self.assertEquals(cur.fetchone()[0], '::ffff:102:300/128')
self.assertEquals(cur.fetchone()[0], '::ffff:102:300/128')
def test_suite():

View File

@ -31,7 +31,7 @@ from subprocess import Popen
from weakref import ref
import unittest
from .testutils import (skip_before_postgres, skip_if_windows,
from .testutils import (skip_before_postgres,
ConnectingTestCase, skip_copy_if_green, skip_if_crdb, slow, StringIO)
import psycopg2
@ -330,7 +330,6 @@ class ExceptionsTestCase(ConnectingTestCase):
class TestExtensionModule(unittest.TestCase):
@slow
@skip_if_windows
def test_import_internal(self):
# check that the internal package can be imported "naked"
# we may break this property if there is a compelling reason to do so,

View File

@ -23,15 +23,13 @@
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import os
import unittest
from collections import deque
from functools import partial
import psycopg2
from psycopg2 import extensions
from psycopg2.extensions import Notify
from .testutils import ConnectingTestCase, skip_if_crdb, skip_if_windows, slow
from .testutils import ConnectingTestCase, skip_if_crdb, slow
from .testconfig import dsn
import sys
@ -76,9 +74,7 @@ conn.close()
module=psycopg2.__name__,
dsn=dsn, sec=sec, name=name, payload=payload))
env = os.environ.copy()
env.pop("PSYCOPG_DEBUG", None)
return Popen([sys.executable, '-c', script], stdout=PIPE, env=env)
return Popen([sys.executable, '-c', script], stdout=PIPE)
@slow
def test_notifies_received_on_poll(self):
@ -130,52 +126,6 @@ conn.close()
self.assertEqual(pid, self.conn.notifies[0][0])
self.assertEqual('foo', self.conn.notifies[0][1])
def _test_notifies_received_on_operation(self, operation, execute_query=True):
self.listen('foo')
self.conn.commit()
if execute_query:
self.conn.cursor().execute('select 1;')
pid = int(self.notify('foo').communicate()[0])
self.assertEqual(0, len(self.conn.notifies))
operation()
self.assertEqual(1, len(self.conn.notifies))
self.assertEqual(pid, self.conn.notifies[0][0])
self.assertEqual('foo', self.conn.notifies[0][1])
@slow
@skip_if_windows
def test_notifies_received_on_commit(self):
self._test_notifies_received_on_operation(self.conn.commit)
@slow
@skip_if_windows
def test_notifies_received_on_rollback(self):
self._test_notifies_received_on_operation(self.conn.rollback)
@slow
@skip_if_windows
def test_notifies_received_on_reset(self):
self._test_notifies_received_on_operation(self.conn.reset, execute_query=False)
@slow
@skip_if_windows
def test_notifies_received_on_set_session(self):
self._test_notifies_received_on_operation(
partial(self.conn.set_session, autocommit=True, readonly=True),
execute_query=False,
)
@slow
@skip_if_windows
def test_notifies_received_on_set_client_encoding(self):
self._test_notifies_received_on_operation(
partial(
self.conn.set_client_encoding,
'LATIN1' if self.conn.encoding != 'LATIN1' else 'UTF8'
),
execute_query=False,
)
@slow
def test_notify_object(self):
self.autocommit(self.conn)

View File

@ -1,5 +1,5 @@
[tox]
envlist = {3.8,3.9,3.10,3.11,3.12,3.13}
envlist = {3.7,3.8,3.9,3.10,3.11,3.12}
[testenv]
commands = make check