mirror of
https://github.com/psycopg/psycopg2.git
synced 2025-07-31 18:40:09 +03:00
Compare commits
No commits in common. "master" and "2_8_6" have entirely different histories.
81
.appveyor.yml
Normal file
81
.appveyor.yml
Normal file
|
@ -0,0 +1,81 @@
|
|||
version : 2.x.{build}
|
||||
|
||||
clone_folder: C:\Project
|
||||
|
||||
environment:
|
||||
global:
|
||||
# MSVC Express 2008's setenv.cmd failes if /E:ON and /V:ON are not
|
||||
# enabled in the batch script interpreter
|
||||
CMD_IN_ENV: cmd /E:ON /V:ON /C .\appveyor\run_with_env.cmd
|
||||
|
||||
matrix:
|
||||
# For Python versions available on Appveyor, see
|
||||
# https://www.appveyor.com/docs/windows-images-software/#python
|
||||
- {PY_VER: "27", PY_ARCH: "32"}
|
||||
- {PY_VER: "27", PY_ARCH: "64"}
|
||||
- {PY_VER: "38", PY_ARCH: "32"}
|
||||
- {PY_VER: "38", PY_ARCH: "64"}
|
||||
- {PY_VER: "37", PY_ARCH: "32"}
|
||||
- {PY_VER: "37", PY_ARCH: "64"}
|
||||
- {PY_VER: "36", PY_ARCH: "32"}
|
||||
- {PY_VER: "36", PY_ARCH: "64"}
|
||||
- {PY_VER: "35", PY_ARCH: "32"}
|
||||
- {PY_VER: "35", PY_ARCH: "64"}
|
||||
- {PY_VER: "34", PY_ARCH: "32"}
|
||||
- {PY_VER: "34", PY_ARCH: "64"}
|
||||
|
||||
OPENSSL_VERSION: "1_1_1g"
|
||||
POSTGRES_VERSION: "11_4"
|
||||
|
||||
PSYCOPG2_TESTDB: psycopg2_test
|
||||
PSYCOPG2_TESTDB_USER: postgres
|
||||
PSYCOPG2_TESTDB_HOST: localhost
|
||||
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: Password12!
|
||||
PGSSLMODE: require
|
||||
|
||||
# Select according to the service enabled
|
||||
POSTGRES_DIR: C:\Program Files\PostgreSQL\9.6\
|
||||
|
||||
# The python used in the build process, not the one packages are built for
|
||||
PYEXE: C:\Python36\python.exe
|
||||
|
||||
matrix:
|
||||
fast_finish: false
|
||||
|
||||
services:
|
||||
# Note: if you change this service also change POSTGRES_DIR
|
||||
- postgresql96
|
||||
|
||||
cache:
|
||||
# Rebuild cache if following file changes
|
||||
# (See the file to zap the cache manually)
|
||||
- C:\Others -> scripts\appveyor.cache_rebuild
|
||||
|
||||
# Script called before repo cloning
|
||||
# init:
|
||||
|
||||
# Repository gets cloned, Cache is restored
|
||||
|
||||
install:
|
||||
- "%PYEXE% scripts\\appveyor.py install"
|
||||
|
||||
# PostgreSQL server starts now
|
||||
|
||||
build: off
|
||||
|
||||
build_script:
|
||||
- "%PYEXE% scripts\\appveyor.py build_script"
|
||||
|
||||
after_build:
|
||||
- "%PYEXE% scripts\\appveyor.py after_build"
|
||||
|
||||
before_test:
|
||||
- "%PYEXE% scripts\\appveyor.py before_test"
|
||||
|
||||
test_script:
|
||||
- "%PYEXE% scripts\\appveyor.py test_script"
|
||||
|
||||
|
||||
# vim: set ts=4 sts=4 sw=4:
|
|
@ -1,23 +0,0 @@
|
|||
---
|
||||
name: Problem installing psycopg2
|
||||
about: Report a case in which psycopg2 failed to install on your platform
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**This is a bug tracker**
|
||||
If you have a question, such has "how do you do X with Python/PostgreSQL/psycopg2" please [write to the mailing list](https://lists.postgresql.org/manage/) or [open a question](https://github.com/psycopg/psycopg2/discussions) instead.
|
||||
|
||||
**Before opening this ticket, please confirm that:**
|
||||
- [ ] I am running the latest version of pip, i.e. typing ``pip --version`` you get [this version](https://pypi.org/project/pip/).
|
||||
- [ ] I have read the [installation documentation](https://www.psycopg.org/docs/install.html) and the [frequently asked questions](https://www.psycopg.org/docs/faq.html)
|
||||
- [ ] If install failed, I typed `pg_config` on the command line and I obtained an output instead of an error.
|
||||
|
||||
**Please complete the following information:**
|
||||
- OS:
|
||||
- Psycopg version:
|
||||
- Python version:
|
||||
- PostgreSQL version:
|
||||
- pip version
|
27
.github/ISSUE_TEMPLATE/problem-using-psycopg2.md
vendored
27
.github/ISSUE_TEMPLATE/problem-using-psycopg2.md
vendored
|
@ -1,27 +0,0 @@
|
|||
---
|
||||
name: Problem using psycopg2
|
||||
about: Report a case in which psycopg2 is not working as expected
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**This is a bug tracker**
|
||||
If you have a question, such has "how do you do X with Python/PostgreSQL/psycopg2" please [write to the mailing list](https://lists.postgresql.org/manage/) or [open a question](https://github.com/psycopg/psycopg2/discussions) instead.
|
||||
|
||||
**Please complete the following information:**
|
||||
- OS:
|
||||
- Psycopg version:
|
||||
- Python version:
|
||||
- PostgreSQL version:
|
||||
- pip version
|
||||
|
||||
**Describe the bug**
|
||||
Please let us know:
|
||||
|
||||
1: what you did
|
||||
2: what you expected to happen
|
||||
3: what happened instead
|
||||
|
||||
If possible, provide a script reproducing the issue.
|
6
.github/dependabot.yml
vendored
6
.github/dependabot.yml
vendored
|
@ -1,6 +0,0 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
18
.github/workflows/docs.yml
vendored
18
.github/workflows/docs.yml
vendored
|
@ -1,18 +0,0 @@
|
|||
name: Build documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# This should match the DOC_BRANCH value in the psycopg-website Makefile
|
||||
- master
|
||||
|
||||
jobs:
|
||||
docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger docs build
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
repository: psycopg/psycopg-website
|
||||
event-type: psycopg2-commit
|
||||
token: ${{ secrets.ACCESS_TOKEN }}
|
266
.github/workflows/packages.yml
vendored
266
.github/workflows/packages.yml
vendored
|
@ -1,266 +0,0 @@
|
|||
---
|
||||
name: Build packages
|
||||
on:
|
||||
- workflow_dispatch
|
||||
|
||||
env:
|
||||
PIP_BREAK_SYSTEM_PACKAGES: "1"
|
||||
LIBPQ_VERSION: "16.0"
|
||||
OPENSSL_VERSION: "1.1.1w"
|
||||
|
||||
jobs:
|
||||
sdist: # {{{
|
||||
if: true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- package_name: psycopg2
|
||||
- package_name: psycopg2-binary
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repos
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build sdist
|
||||
run: ./scripts/build/build_sdist.sh
|
||||
env:
|
||||
PACKAGE_NAME: ${{ matrix.package_name }}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: sdist-${{ matrix.package_name }}
|
||||
path: |
|
||||
dist/*.tar.gz
|
||||
|
||||
env:
|
||||
PSYCOPG2_TESTDB: postgres
|
||||
PSYCOPG2_TESTDB_HOST: 172.17.0.1
|
||||
PSYCOPG2_TESTDB_USER: postgres
|
||||
PSYCOPG2_TESTDB_PASSWORD: password
|
||||
PSYCOPG2_TEST_FAST: 1
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_PASSWORD: password
|
||||
ports:
|
||||
- 5432:5432
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
# }}}
|
||||
|
||||
linux: # {{{
|
||||
if: true
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [manylinux, musllinux]
|
||||
arch: [x86_64, i686, aarch64, ppc64le]
|
||||
pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repos
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU for multi-arch build
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Cache libpq build
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/libpq.build
|
||||
key: libpq-${{ env.LIBPQ_VERSION }}-${{ matrix.platform }}-${{ matrix.arch }}
|
||||
|
||||
- name: Build wheels
|
||||
uses: pypa/cibuildwheel@v2.23.3
|
||||
env:
|
||||
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
|
||||
CIBW_MANYLINUX_I686_IMAGE: manylinux2014
|
||||
CIBW_MANYLINUX_AARCH64_IMAGE: manylinux2014
|
||||
CIBW_MANYLINUX_PPC64LE_IMAGE: manylinux2014
|
||||
CIBW_BUILD: ${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}}
|
||||
CIBW_ARCHS_LINUX: auto aarch64 ppc64le
|
||||
CIBW_BEFORE_ALL_LINUX: ./scripts/build/wheel_linux_before_all.sh
|
||||
CIBW_REPAIR_WHEEL_COMMAND: >-
|
||||
./scripts/build/strip_wheel.sh {wheel}
|
||||
&& auditwheel repair -w {dest_dir} {wheel}
|
||||
CIBW_TEST_COMMAND: >-
|
||||
export PYTHONPATH={project} &&
|
||||
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
|
||||
CIBW_ENVIRONMENT_PASS_LINUX: LIBPQ_VERSION OPENSSL_VERSION
|
||||
CIBW_ENVIRONMENT: >-
|
||||
PACKAGE_NAME=psycopg2-binary
|
||||
LIBPQ_BUILD_PREFIX=/host/tmp/libpq.build
|
||||
PATH="$LIBPQ_BUILD_PREFIX/bin:$PATH"
|
||||
LD_LIBRARY_PATH="$LIBPQ_BUILD_PREFIX/lib:$LIBPQ_BUILD_PREFIX/lib64"
|
||||
PSYCOPG2_TESTDB=postgres
|
||||
PSYCOPG2_TESTDB_HOST=172.17.0.1
|
||||
PSYCOPG2_TESTDB_USER=postgres
|
||||
PSYCOPG2_TESTDB_PASSWORD=password
|
||||
PSYCOPG2_TEST_FAST=1
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: linux-${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}}
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_PASSWORD: password
|
||||
ports:
|
||||
- 5432:5432
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
# }}}
|
||||
|
||||
macos: # {{{
|
||||
runs-on: macos-latest
|
||||
if: true
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# These archs require an Apple M1 runner: [arm64, universal2]
|
||||
arch: [x86_64, arm64]
|
||||
pyver: [cp39, cp310, cp311, cp312, cp313]
|
||||
|
||||
steps:
|
||||
- name: Checkout repos
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cache libpq build
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/libpq.build
|
||||
key: libpq-${{ env.LIBPQ_VERSION }}-macos-${{ matrix.arch }}
|
||||
|
||||
- name: Build wheels
|
||||
uses: pypa/cibuildwheel@v2.23.3
|
||||
env:
|
||||
CIBW_BUILD: ${{matrix.pyver}}-macosx_${{matrix.arch}}
|
||||
CIBW_ARCHS_MACOS: ${{matrix.arch}}
|
||||
MACOSX_ARCHITECTURE: ${{matrix.arch}}
|
||||
CIBW_BEFORE_ALL_MACOS: ./scripts/build/wheel_macos_before_all.sh
|
||||
CIBW_TEST_COMMAND: >-
|
||||
export PYTHONPATH={project} &&
|
||||
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
|
||||
CIBW_ENVIRONMENT: >-
|
||||
PG_VERSION=16
|
||||
PACKAGE_NAME=psycopg2-binary
|
||||
PSYCOPG2_TESTDB=postgres
|
||||
PATH="/tmp/libpq.build/bin:$PATH"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: macos-${{matrix.pyver}}-macos-${{matrix.arch}}
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
# }}}
|
||||
|
||||
windows: # {{{
|
||||
runs-on: windows-latest
|
||||
if: true
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: [win_amd64]
|
||||
pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
|
||||
package_name: [psycopg2, psycopg2-binary]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
steps:
|
||||
# there are some other libpq in PATH
|
||||
- name: Drop spurious libpq in the path
|
||||
run: rm -rf c:/tools/php C:/Strawberry/c/bin
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Start PostgreSQL service for test
|
||||
run: |
|
||||
$PgSvc = Get-Service "postgresql*"
|
||||
Set-Service $PgSvc.Name -StartupType manual
|
||||
$PgSvc.Start()
|
||||
shell: powershell
|
||||
|
||||
- name: Export GitHub Actions cache environment variables
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const path = require('path')
|
||||
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || '');
|
||||
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
|
||||
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/lib'));
|
||||
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/bin'));
|
||||
|
||||
- name: Create the binary package source tree
|
||||
run: >-
|
||||
sed -i 's/^setup(name="psycopg2"/setup(name="${{matrix.package_name}}"/'
|
||||
setup.py
|
||||
if: ${{ matrix.package_name != 'psycopg2' }}
|
||||
|
||||
- name: Build wheels
|
||||
uses: pypa/cibuildwheel@v2.23.3
|
||||
env:
|
||||
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" # cache vcpkg
|
||||
CIBW_BUILD: ${{matrix.pyver}}-${{matrix.arch}}
|
||||
CIBW_ARCHS_WINDOWS: AMD64 x86
|
||||
CIBW_BEFORE_BUILD_WINDOWS: '.\scripts\build\wheel_win32_before_build.bat'
|
||||
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: >-
|
||||
delvewheel repair -w {dest_dir}
|
||||
--no-mangle "libiconv-2.dll;libwinpthread-1.dll" {wheel}
|
||||
CIBW_TEST_COMMAND: >-
|
||||
set PYTHONPATH={project} &&
|
||||
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
|
||||
# Note: no fast test because we don't run Windows tests
|
||||
CIBW_ENVIRONMENT_WINDOWS: >-
|
||||
PSYCOPG2_TESTDB=postgres
|
||||
PSYCOPG2_TESTDB_USER=postgres
|
||||
PSYCOPG2_TESTDB_HOST=localhost
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: windows-${{ matrix.package_name }}-${{matrix.pyver}}-${{matrix.arch}}
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
# }}}
|
||||
|
||||
merge: # {{{
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- sdist
|
||||
- linux
|
||||
- macos
|
||||
- windows
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@v4
|
||||
with:
|
||||
name: psycopg2-artifacts
|
||||
delete-merged: true
|
||||
|
||||
# }}}
|
79
.github/workflows/tests.yml
vendored
79
.github/workflows/tests.yml
vendored
|
@ -1,79 +0,0 @@
|
|||
name: Tests
|
||||
|
||||
env:
|
||||
PIP_BREAK_SYSTEM_PACKAGES: "1"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
if: true
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- {python: "3.8", postgres: "12"}
|
||||
- {python: "3.9", postgres: "13"}
|
||||
- {python: "3.10", postgres: "14"}
|
||||
- {python: "3.11", postgres: "15"}
|
||||
- {python: "3.12", postgres: "16"}
|
||||
- {python: "3.13", postgres: "17"}
|
||||
|
||||
# Opposite extremes of the supported Py/PG range, other architecture
|
||||
- {python: "3.8", postgres: "17", architecture: "x86"}
|
||||
- {python: "3.9", postgres: "16", architecture: "x86"}
|
||||
- {python: "3.10", postgres: "15", architecture: "x86"}
|
||||
- {python: "3.11", postgres: "14", architecture: "x86"}
|
||||
- {python: "3.12", postgres: "13", architecture: "x86"}
|
||||
- {python: "3.13", postgres: "12", architecture: "x86"}
|
||||
|
||||
env:
|
||||
PSYCOPG2_TESTDB: postgres
|
||||
PSYCOPG2_TESTDB_HOST: 127.0.0.1
|
||||
PSYCOPG2_TESTDB_USER: postgres
|
||||
PSYCOPG2_TESTDB_PASSWORD: password
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
image: postgres:${{ matrix.postgres }}
|
||||
env:
|
||||
POSTGRES_PASSWORD: password
|
||||
ports:
|
||||
- 5432:5432
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Can enable to test an unreleased libpq version.
|
||||
- name: install libpq 16
|
||||
if: false
|
||||
run: |
|
||||
set -x
|
||||
rel=$(lsb_release -c -s)
|
||||
echo "deb http://apt.postgresql.org/pub/repos/apt ${rel}-pgdg main 16" \
|
||||
| sudo tee -a /etc/apt/sources.list.d/pgdg.list
|
||||
sudo apt-get -qq update
|
||||
pqver=$(apt-cache show libpq5 | grep ^Version: | head -1 \
|
||||
| awk '{print $2}')
|
||||
sudo apt-get -qq -y install "libpq-dev=${pqver}" "libpq5=${pqver}"
|
||||
|
||||
- name: Install tox
|
||||
run: pip install "tox < 4"
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
- name: Run tests
|
||||
env:
|
||||
MATRIX_PYTHON: ${{ matrix.python }}
|
||||
run: tox -e ${MATRIX_PYTHON%-dev}
|
||||
timeout-minutes: 5
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -6,7 +6,7 @@ MANIFEST
|
|||
*.sw[po]
|
||||
*.egg-info/
|
||||
dist/*
|
||||
/build
|
||||
build/*
|
||||
env
|
||||
env?
|
||||
.idea
|
||||
|
@ -14,5 +14,3 @@ env?
|
|||
.vscode/
|
||||
/rel
|
||||
/wheels
|
||||
/packages
|
||||
/wheelhouse
|
||||
|
|
38
.travis.yml
Normal file
38
.travis.yml
Normal file
|
@ -0,0 +1,38 @@
|
|||
# Travis CI configuration file for psycopg2
|
||||
|
||||
language: python
|
||||
|
||||
dist: bionic
|
||||
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
|
||||
python:
|
||||
- 3.5
|
||||
- 3.6
|
||||
- 3.7
|
||||
- 3.8
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- python: 2.7
|
||||
|
||||
install:
|
||||
- sudo apt-get install -y bc
|
||||
- pip install -U pip setuptools wheel
|
||||
- pip install .
|
||||
- rm -rf psycopg2.egg-info
|
||||
- sudo scripts/travis_prepare.sh
|
||||
|
||||
script:
|
||||
- scripts/travis_test.sh
|
||||
|
||||
deploy:
|
||||
- provider: script
|
||||
script: bash scripts/travis_update_docs.sh
|
||||
on:
|
||||
branch: master
|
||||
|
||||
notifications:
|
||||
email: false
|
149
NEWS
149
NEWS
|
@ -1,133 +1,6 @@
|
|||
Current release
|
||||
---------------
|
||||
|
||||
What's new in psycopg 2.9.10
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Add support for Python 3.13.
|
||||
- Receive notifications on commit (:ticket:`#1728`).
|
||||
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
|
||||
PostgreSQL 17.
|
||||
- Drop support for Python 3.7.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.9
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Add support for Python 3.12.
|
||||
- Drop support for Python 3.6.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.8
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Wheel package bundled with PostgreSQL 16 libpq in order to add support for
|
||||
recent features, such as ``sslcertmode``.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.7
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fix propagation of exceptions raised during module initialization
|
||||
(:ticket:`#1598`).
|
||||
- Fix building when pg_config returns an empty string (:ticket:`#1599`).
|
||||
- Wheel package bundled with OpenSSL 1.1.1v.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.6
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Package manylinux 2014 for aarch64 and ppc64le platforms, in order to
|
||||
include libpq 15 in the binary package (:ticket:`#1396`).
|
||||
- Wheel package bundled with OpenSSL 1.1.1t.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.5
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Add support for Python 3.11.
|
||||
- Add support for rowcount in MERGE statements in binary packages
|
||||
(:ticket:`#1497`).
|
||||
- Wheel package bundled with OpenSSL 1.1.1r and PostgreSQL 15 libpq.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.4
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fix `~psycopg2.extras.register_composite()`,
|
||||
`~psycopg2.extras.register_range()` with customized :sql:`search_path`
|
||||
(:ticket:`#1487`).
|
||||
- Handle correctly composite types with names or in schemas requiring escape.
|
||||
- Find ``pg_service.conf`` file in the ``/etc/postgresql-common`` directory in
|
||||
binary packages (:ticket:`#1365`).
|
||||
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
|
||||
PostgreSQL 15.
|
||||
- Wheel package bundled with OpenSSL 1.1.1q and PostgreSQL 14.4 libpq.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.3
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Alpine (musl) wheels now available (:ticket:`#1392`).
|
||||
- macOS arm64 (Apple M1) wheels now available (:ticket:`1482`).
|
||||
|
||||
|
||||
What's new in psycopg 2.9.2
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Raise `ValueError` for dates >= Y10k (:ticket:`#1307`).
|
||||
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
|
||||
PostgreSQL 14.
|
||||
- Add preliminary support for Python 3.11 (:tickets:`#1376, #1386`).
|
||||
- Wheel package bundled with OpenSSL 1.1.1l and PostgreSQL 14.1 libpq
|
||||
(:ticket:`#1388`).
|
||||
|
||||
|
||||
What's new in psycopg 2.9.1
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fix regression with named `~psycopg2.sql.Placeholder` (:ticket:`#1291`).
|
||||
|
||||
|
||||
What's new in psycopg 2.9
|
||||
-------------------------
|
||||
|
||||
- ``with connection`` starts a transaction on autocommit transactions too
|
||||
(:ticket:`#941`).
|
||||
- Timezones with fractional minutes are supported on Python 3.7 and following
|
||||
(:ticket:`#1272`).
|
||||
- Escape table and column names in `~cursor.copy_from()` and
|
||||
`~cursor.copy_to()`.
|
||||
- Connection exceptions with sqlstate ``08XXX`` reclassified as
|
||||
`~psycopg2.OperationalError` (a subclass of the previously used
|
||||
`~psycopg2.DatabaseError`) (:ticket:`#1148`).
|
||||
- Include library dirs required from libpq to work around MacOS build problems
|
||||
(:ticket:`#1200`).
|
||||
|
||||
Other changes:
|
||||
|
||||
- Dropped support for Python 2.7, 3.4, 3.5 (:tickets:`#1198, #1000, #1197`).
|
||||
- Dropped support for mx.DateTime.
|
||||
- Use `datetime.timezone` objects by default in datetime objects instead of
|
||||
`~psycopg2.tz.FixedOffsetTimezone`.
|
||||
- The `psycopg2.tz` module is deprecated and scheduled to be dropped in the
|
||||
next major release.
|
||||
- Provide :pep:`599` wheels packages (manylinux2014 tag) for i686 and x86_64
|
||||
platforms.
|
||||
- Provide :pep:`600` wheels packages (manylinux_2_24 tag) for aarch64 and
|
||||
ppc64le platforms.
|
||||
- Wheel package bundled with OpenSSL 1.1.1k and PostgreSQL 13.3 libpq.
|
||||
- Build system for Linux/MacOS binary packages moved to GitHub Actions.
|
||||
|
||||
|
||||
What's new in psycopg 2.8.7
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Accept empty params as `~psycopg2.connect()` (:ticket:`#1250`).
|
||||
- Fix attributes refcount in `Column` initialisation (:ticket:`#1252`).
|
||||
- Allow re-initialisation of static variables in the C module (:ticket:`#1267`).
|
||||
|
||||
|
||||
What's new in psycopg 2.8.6
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
|
@ -135,12 +8,12 @@ What's new in psycopg 2.8.6
|
|||
(:ticket:`#1101`).
|
||||
- Fixed search of mxDateTime headers in virtualenvs (:ticket:`#996`).
|
||||
- Added missing values from errorcodes (:ticket:`#1133`).
|
||||
- `cursor.query` reports the query of the last :sql:`COPY` operation too
|
||||
- `cursor.query` reports the query of the last :sql:`COPY` opearation too
|
||||
(:ticket:`#1141`).
|
||||
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
|
||||
PostgreSQL 13.
|
||||
- Added wheel packages for ARM architecture (:ticket:`#1125`).
|
||||
- Wheel package bundled with OpenSSL 1.1.1g.
|
||||
- Wheel package compiled against OpenSSL 1.1.1g.
|
||||
|
||||
|
||||
What's new in psycopg 2.8.5
|
||||
|
@ -169,7 +42,7 @@ What's new in psycopg 2.8.4
|
|||
and `~psycopg2.extensions.Column.type_code` (:ticket:`#961`).
|
||||
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
|
||||
PostgreSQL 12.
|
||||
- Wheel package bundled with OpenSSL 1.1.1d and PostgreSQL at least 11.4.
|
||||
- Wheel package compiled against OpenSSL 1.1.1d and PostgreSQL at least 11.4.
|
||||
|
||||
|
||||
What's new in psycopg 2.8.3
|
||||
|
@ -258,7 +131,7 @@ Other changes:
|
|||
source files are now compatible with Python 2 & 3 as is.
|
||||
- The `!psycopg2.test` package is no longer installed by ``python setup.py
|
||||
install``.
|
||||
- Wheel package bundled with OpenSSL 1.0.2r and PostgreSQL 11.2 libpq.
|
||||
- Wheel package compiled against OpenSSL 1.0.2r and PostgreSQL 11.2 libpq.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.7
|
||||
|
@ -266,14 +139,14 @@ What's new in psycopg 2.7.7
|
|||
|
||||
- Cleanup of the cursor results assignment code, which might have solved
|
||||
double free and inconsistencies in concurrent usage (:tickets:`#346, #384`).
|
||||
- Wheel package bundled with OpenSSL 1.0.2q.
|
||||
- Wheel package compiled against OpenSSL 1.0.2q.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.6.1
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fixed binary package broken on OS X 10.12 (:ticket:`#807`).
|
||||
- Wheel package bundled with PostgreSQL 11.1 libpq.
|
||||
- Wheel package compiled against PostgreSQL 11.1 libpq.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.6
|
||||
|
@ -290,7 +163,7 @@ What's new in psycopg 2.7.6
|
|||
- `~psycopg2.extras.execute_values()` accepts `~psycopg2.sql.Composable`
|
||||
objects (:ticket:`#794`).
|
||||
- `~psycopg2.errorcodes` map updated to PostgreSQL 11.
|
||||
- Wheel package bundled with PostgreSQL 10.5 libpq and OpenSSL 1.0.2p.
|
||||
- Wheel package compiled against PostgreSQL 10.5 libpq and OpenSSL 1.0.2p.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.5
|
||||
|
@ -304,7 +177,7 @@ What's new in psycopg 2.7.5
|
|||
- Maybe fixed building on MSYS2 (as reported in :ticket:`#658`).
|
||||
- Allow string subclasses in connection and other places (:ticket:`#679`).
|
||||
- Don't raise an exception closing an unused named cursor (:ticket:`#716`).
|
||||
- Wheel package bundled with PostgreSQL 10.4 libpq and OpenSSL 1.0.2o.
|
||||
- Wheel package compiled against PostgreSQL 10.4 libpq and OpenSSL 1.0.2o.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.4
|
||||
|
@ -326,7 +199,7 @@ What's new in psycopg 2.7.4
|
|||
- Fixed `~cursor.rowcount` after `~cursor.executemany()` with :sql:`RETURNING`
|
||||
statements (:ticket:`#633`).
|
||||
- Fixed compatibility problem with pypy3 (:ticket:`#649`).
|
||||
- Wheel packages bundled with PostgreSQL 10.1 libpq and OpenSSL 1.0.2n.
|
||||
- Wheel packages compiled against PostgreSQL 10.1 libpq and OpenSSL 1.0.2n.
|
||||
- Wheel packages for Python 2.6 no more available (support dropped from
|
||||
wheel building infrastructure).
|
||||
|
||||
|
@ -334,7 +207,7 @@ What's new in psycopg 2.7.4
|
|||
What's new in psycopg 2.7.3.2
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Wheel package bundled with PostgreSQL 10.0 libpq and OpenSSL 1.0.2l
|
||||
- Wheel package compiled against PostgreSQL 10.0 libpq and OpenSSL 1.0.2l
|
||||
(:tickets:`#601, #602`).
|
||||
|
||||
|
||||
|
@ -407,7 +280,7 @@ New features:
|
|||
them together.
|
||||
- Added `~psycopg2.__libpq_version__` and
|
||||
`~psycopg2.extensions.libpq_version()` to inspect the version of the
|
||||
``libpq`` library the module was bundled with
|
||||
``libpq`` library the module was compiled/loaded with
|
||||
(:tickets:`#35, #323`).
|
||||
- The attributes `~connection.notices` and `~connection.notifies` can be
|
||||
customized replacing them with any object exposing an `!append()` method
|
||||
|
|
25
README.rst
25
README.rst
|
@ -17,18 +17,6 @@ flexible objects adaptation system.
|
|||
|
||||
Psycopg 2 is both Unicode and Python 3 friendly.
|
||||
|
||||
.. Note::
|
||||
|
||||
The psycopg2 package is still widely used and actively maintained, but it
|
||||
is not expected to receive new features.
|
||||
|
||||
`Psycopg 3`__ is the evolution of psycopg2 and is where `new features are
|
||||
being developed`__: if you are starting a new project you should probably
|
||||
start from 3!
|
||||
|
||||
.. __: https://pypi.org/project/psycopg/
|
||||
.. __: https://www.psycopg.org/psycopg3/docs/index.html
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
@ -73,8 +61,13 @@ production it is advised to use the package built from sources.
|
|||
.. _install: https://www.psycopg.org/docs/install.html#install-from-source
|
||||
.. _faq: https://www.psycopg.org/docs/faq.html#faq-compile
|
||||
|
||||
:Build status: |gh-actions|
|
||||
:Linux/OSX: |travis|
|
||||
:Windows: |appveyor|
|
||||
|
||||
.. |gh-actions| image:: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml/badge.svg
|
||||
:target: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
|
||||
:alt: Build status
|
||||
.. |travis| image:: https://travis-ci.org/psycopg/psycopg2.svg?branch=master
|
||||
:target: https://travis-ci.org/psycopg/psycopg2
|
||||
:alt: Linux and OSX build status
|
||||
|
||||
.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/github/psycopg/psycopg2?branch=master&svg=true
|
||||
:target: https://ci.appveyor.com/project/psycopg/psycopg2/branch/master
|
||||
:alt: Windows build status
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ check: doctest
|
|||
# It is not clean by 'make clean'
|
||||
|
||||
PYTHON := python$(PYTHON_VERSION)
|
||||
PYTHON_VERSION ?= $(shell $(PYTHON) -c 'import sys; print("%d.%d" % sys.version_info[:2])')
|
||||
PYTHON_VERSION ?= $(shell $(PYTHON) -c 'import sys; print ("%d.%d" % sys.version_info[:2])')
|
||||
BUILD_DIR = $(shell pwd)/../build/lib.$(PYTHON_VERSION)
|
||||
|
||||
SPHINXBUILD ?= $$(pwd)/env/bin/sphinx-build
|
||||
|
@ -19,7 +19,7 @@ html: package src/sqlstate_errors.rst
|
|||
cp -r src/_build/html .
|
||||
|
||||
src/sqlstate_errors.rst: ../psycopg/sqlstate_errors.h $(BUILD_DIR)
|
||||
./env/bin/python src/tools/make_sqlstate_docs.py $< > $@
|
||||
env/bin/python src/tools/make_sqlstate_docs.py $< > $@
|
||||
|
||||
$(BUILD_DIR):
|
||||
$(MAKE) PYTHON=$(PYTHON) -C .. package
|
||||
|
@ -33,7 +33,7 @@ clean:
|
|||
rm -rf html src/sqlstate_errors.rst
|
||||
|
||||
env: requirements.txt
|
||||
$(PYTHON) -m venv env
|
||||
virtualenv -p $(PYTHON) env
|
||||
./env/bin/pip install -r requirements.txt
|
||||
echo "$$(pwd)/../build/lib.$(PYTHON_VERSION)" \
|
||||
> env/lib/python$(PYTHON_VERSION)/site-packages/psycopg.pth
|
||||
|
|
|
@ -15,16 +15,28 @@ How to make a psycopg2 release
|
|||
|
||||
$ export VERSION=2.8.4
|
||||
|
||||
- Push psycopg2 to master or to the maint branch. Make sure tests on `GitHub
|
||||
Actions`__.
|
||||
- In the `Travis settings`__ you may want to be sure that the variables
|
||||
``TEST_PAST`` and ``TEST_FUTURE`` are set to 1 to check all
|
||||
the supported postgres version.
|
||||
|
||||
.. __: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
|
||||
.. __: https://travis-ci.org/psycopg/psycopg2/settings
|
||||
|
||||
- Push psycopg2 to master or to the maint branch. Make sure tests on Travis__
|
||||
and AppVeyor__ pass.
|
||||
|
||||
.. __: https://travis-ci.org/psycopg/psycopg2
|
||||
.. __: https://ci.appveyor.com/project/psycopg/psycopg2
|
||||
|
||||
- For an extra test merge or rebase the `test_i686`__ branch on the commit to
|
||||
release and push it too: this will test with Python 32 bits and debug
|
||||
versions.
|
||||
|
||||
.. __: https://github.com/psycopg/psycopg2/tree/test_i686
|
||||
|
||||
- Create a signed tag with the content of the relevant NEWS bit and push it.
|
||||
E.g.::
|
||||
|
||||
# Tag name will be 2_8_4
|
||||
$ git tag -a -s ${VERSION//\./_}
|
||||
$ git tag -a -s 2_8_4
|
||||
|
||||
Psycopg 2.8.4 released
|
||||
|
||||
|
@ -36,18 +48,34 @@ How to make a psycopg2 release
|
|||
- Fixed bug blah (:ticket:`#42`).
|
||||
...
|
||||
|
||||
- Create the packages:
|
||||
- Update the `psycopg2-wheels`_ submodule to the tag version and push. This
|
||||
will build the packages on `Travis CI`__ and `AppVeyor`__ and upload them to
|
||||
https://upload.psycopg.org/.
|
||||
|
||||
- On GitHub Actions run manually a `package build workflow`__.
|
||||
.. _psycopg2-wheels: https://github.com/psycopg/psycopg2-wheels
|
||||
.. __: https://travis-ci.org/psycopg/psycopg2-wheels
|
||||
.. __: https://ci.appveyor.com/project/psycopg/psycopg2-wheels
|
||||
|
||||
.. __: https://github.com/psycopg/psycopg2/actions/workflows/packages.yml
|
||||
- Download the packages generated (this assumes ssh configured properly)::
|
||||
|
||||
- When the workflows have finished download the packages from the job
|
||||
artifacts.
|
||||
$ rsync -arv psycopg-upload:psycopg2-${VERSION} .
|
||||
|
||||
- Only for stable packages: upload the signed packages on PyPI::
|
||||
- Sign the packages and upload the signatures back::
|
||||
|
||||
$ twine upload -s wheelhouse/psycopg2-${VERSION}/*
|
||||
$ for f in psycopg2-${VERSION}/*.{exe,tar.gz,whl}; do \
|
||||
gpg --armor --detach-sign $f;
|
||||
done
|
||||
|
||||
$ rsync -arv psycopg2-${VERSION} psycopg-upload:
|
||||
|
||||
- Remove the ``.exe`` from the dir, because we don't want to upload them on
|
||||
PyPI::
|
||||
|
||||
$ rm -v psycopg2-${VERSION}/*.exe{,.asc}
|
||||
|
||||
- Only for stable packages: upload the packages and signatures on PyPI::
|
||||
|
||||
$ twine upload psycopg2-${VERSION}/*
|
||||
|
||||
- Create a release and release notes in the psycopg website, announce to
|
||||
psycopg and pgsql-announce mailing lists.
|
||||
|
@ -60,7 +88,7 @@ Releasing test packages
|
|||
|
||||
Test packages may be uploaded on the `PyPI testing site`__ using::
|
||||
|
||||
$ twine upload -s -r testpypi wheelhouse/psycopg2-${VERSION}/*
|
||||
$ twine upload -r testpypi psycopg2-${VERSION}/*
|
||||
|
||||
assuming `proper configuration`__ of ``~/.pypirc``.
|
||||
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
Sphinx
|
||||
sphinx-better-theme
|
|
@ -1,50 +1,8 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.10
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile requirements.in
|
||||
#
|
||||
alabaster==0.7.13
|
||||
# via sphinx
|
||||
babel==2.12.1
|
||||
# via sphinx
|
||||
certifi>=2023.7.22
|
||||
# via requests
|
||||
charset-normalizer==3.1.0
|
||||
# via requests
|
||||
docutils==0.19
|
||||
# via sphinx
|
||||
idna==3.4
|
||||
# via requests
|
||||
imagesize==1.4.1
|
||||
# via sphinx
|
||||
jinja2==3.1.2
|
||||
# via sphinx
|
||||
markupsafe==2.1.2
|
||||
# via jinja2
|
||||
packaging==23.1
|
||||
# via sphinx
|
||||
pygments==2.15.0
|
||||
# via sphinx
|
||||
requests==2.31.0
|
||||
# via sphinx
|
||||
snowballstemmer==2.2.0
|
||||
# via sphinx
|
||||
sphinx==6.1.3
|
||||
# via -r requirements.in
|
||||
sphinx-better-theme==0.1.5
|
||||
# via -r requirements.in
|
||||
sphinxcontrib-applehelp==1.0.4
|
||||
# via sphinx
|
||||
sphinxcontrib-devhelp==1.0.2
|
||||
# via sphinx
|
||||
sphinxcontrib-htmlhelp==2.0.1
|
||||
# via sphinx
|
||||
sphinxcontrib-jsmath==1.0.1
|
||||
# via sphinx
|
||||
sphinxcontrib-qthelp==1.0.3
|
||||
# via sphinx
|
||||
sphinxcontrib-serializinghtml==1.1.5
|
||||
# via sphinx
|
||||
urllib3==1.26.17
|
||||
# via requests
|
||||
# Packages only needed to build the docs
|
||||
Pygments>=2.2,<2.3
|
||||
Sphinx>=1.6,<=1.7
|
||||
sphinx-better-theme>=0.1.5,<0.2
|
||||
|
||||
# 0.15.2 affected by https://sourceforge.net/p/docutils/bugs/353/
|
||||
# Can update to 0.16 after release (currently in rc) but must update Sphinx too
|
||||
docutils<0.15
|
||||
|
|
|
@ -226,7 +226,7 @@ read:
|
|||
|
||||
>>> cur.execute("SELECT '(10.2,20.3)'::point")
|
||||
>>> point = cur.fetchone()[0]
|
||||
>>> print(type(point), point.x, point.y)
|
||||
>>> print type(point), point.x, point.y
|
||||
<class 'Point'> 10.2 20.3
|
||||
|
||||
A typecaster created by `!new_type()` can be also used with
|
||||
|
@ -284,15 +284,15 @@ something to read::
|
|||
curs = conn.cursor()
|
||||
curs.execute("LISTEN test;")
|
||||
|
||||
print("Waiting for notifications on channel 'test'")
|
||||
print "Waiting for notifications on channel 'test'"
|
||||
while True:
|
||||
if select.select([conn],[],[],5) == ([],[],[]):
|
||||
print("Timeout")
|
||||
print "Timeout"
|
||||
else:
|
||||
conn.poll()
|
||||
while conn.notifies:
|
||||
notify = conn.notifies.pop(0)
|
||||
print("Got NOTIFY:", notify.pid, notify.channel, notify.payload)
|
||||
print "Got NOTIFY:", notify.pid, notify.channel, notify.payload
|
||||
|
||||
Running the script and executing a command such as :sql:`NOTIFY test, 'hello'`
|
||||
in a separate :program:`psql` shell, the output may look similar to:
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Psycopg documentation build configuration file, created by
|
||||
# sphinx-quickstart on Sun Feb 7 13:48:41 2010.
|
||||
|
@ -47,9 +48,9 @@ source_suffix = '.rst'
|
|||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'Psycopg'
|
||||
project = u'Psycopg'
|
||||
copyright = (
|
||||
'2001-2021, Federico Di Gregorio, Daniele Varrazzo, The Psycopg Team'
|
||||
u'2001-2020, Federico Di Gregorio, Daniele Varrazzo, The Psycopg Team'
|
||||
)
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
|
@ -128,6 +129,8 @@ rst_epilog = """
|
|||
.. _transaction isolation level:
|
||||
https://www.postgresql.org/docs/current/static/transaction-iso.html
|
||||
|
||||
.. _mx.DateTime: https://www.egenix.com/products/python/mxBase/mxDateTime/
|
||||
|
||||
.. |MVCC| replace:: :abbr:`MVCC (Multiversion concurrency control)`
|
||||
"""
|
||||
|
||||
|
@ -232,8 +235,8 @@ latex_documents = [
|
|||
(
|
||||
'index',
|
||||
'psycopg.tex',
|
||||
'Psycopg Documentation',
|
||||
'Federico Di Gregorio',
|
||||
u'Psycopg Documentation',
|
||||
u'Federico Di Gregorio',
|
||||
'manual',
|
||||
)
|
||||
]
|
||||
|
@ -255,7 +258,6 @@ latex_documents = [
|
|||
# If false, no module index is generated.
|
||||
# latex_use_modindex = True
|
||||
|
||||
toc_object_entries = False
|
||||
|
||||
doctest_global_setup = """
|
||||
|
||||
|
|
|
@ -124,7 +124,7 @@ The ``cursor`` class
|
|||
.. attribute:: name
|
||||
|
||||
Read-only attribute containing the name of the cursor if it was
|
||||
created as named cursor by `connection.cursor()`, or `!None` if
|
||||
creates as named cursor by `connection.cursor()`, or `!None` if
|
||||
it is a client side cursor. See :ref:`server-side-cursors`.
|
||||
|
||||
.. extension::
|
||||
|
@ -208,14 +208,6 @@ The ``cursor`` class
|
|||
Parameters are bounded to the query using the same rules described in
|
||||
the `~cursor.execute()` method.
|
||||
|
||||
.. code:: python
|
||||
|
||||
>>> nums = ((1,), (5,), (10,))
|
||||
>>> cur.executemany("INSERT INTO test (num) VALUES (%s)", nums)
|
||||
|
||||
>>> tuples = ((123, "foo"), (42, "bar"), (23, "baz"))
|
||||
>>> cur.executemany("INSERT INTO test (num, data) VALUES (%s, %s)", tuples)
|
||||
|
||||
.. warning::
|
||||
In its current implementation this method is not faster than
|
||||
executing `~cursor.execute()` in a loop. For better performance
|
||||
|
@ -240,16 +232,6 @@ The ``cursor`` class
|
|||
.. versionchanged:: 2.7
|
||||
added support for named arguments.
|
||||
|
||||
.. note::
|
||||
|
||||
`!callproc()` can only be used with PostgreSQL functions__, not
|
||||
with the procedures__ introduced in PostgreSQL 11, which require
|
||||
the :sql:`CALL` statement to run. Please use a normal
|
||||
`execute()` to run them.
|
||||
|
||||
.. __: https://www.postgresql.org/docs/current/sql-createfunction.html
|
||||
.. __: https://www.postgresql.org/docs/current/sql-createprocedure.html
|
||||
|
||||
.. method:: mogrify(operation [, parameters])
|
||||
|
||||
Return a query string after arguments binding. The string returned is
|
||||
|
@ -292,7 +274,7 @@ The ``cursor`` class
|
|||
|
||||
>>> cur.execute("SELECT * FROM test;")
|
||||
>>> for record in cur:
|
||||
... print(record)
|
||||
... print record
|
||||
...
|
||||
(1, 100, "abc'def")
|
||||
(2, None, 'dada')
|
||||
|
@ -516,10 +498,8 @@ The ``cursor`` class
|
|||
|
||||
The time zone factory used to handle data types such as
|
||||
:sql:`TIMESTAMP WITH TIME ZONE`. It should be a `~datetime.tzinfo`
|
||||
object. Default is `datetime.timezone`.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
previosly the default factory was `psycopg2.tz.FixedOffsetTimezone`.
|
||||
object. A few implementations are available in the `psycopg2.tz`
|
||||
module.
|
||||
|
||||
|
||||
.. method:: nextset()
|
||||
|
@ -570,6 +550,13 @@ The ``cursor`` class
|
|||
>>> cur.fetchall()
|
||||
[(6, 42, 'foo'), (7, 74, 'bar')]
|
||||
|
||||
.. note:: the name of the table is not quoted: if the table name
|
||||
contains uppercase letters or special characters it must be quoted
|
||||
with double quotes::
|
||||
|
||||
cur.copy_from(f, '"TABLE"')
|
||||
|
||||
|
||||
.. versionchanged:: 2.0.6
|
||||
added the *columns* parameter.
|
||||
|
||||
|
@ -578,11 +565,6 @@ The ``cursor`` class
|
|||
are encoded in the connection `~connection.encoding` when sent to
|
||||
the backend.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
the table and fields names are now quoted. If you need to specify
|
||||
a schema-qualified table please use `copy_expert()`.
|
||||
|
||||
|
||||
.. method:: copy_to(file, table, sep='\\t', null='\\\\N', columns=None)
|
||||
|
||||
Write the content of the table named *table* *to* the file-like
|
||||
|
@ -604,6 +586,12 @@ The ``cursor`` class
|
|||
2|\N|dada
|
||||
...
|
||||
|
||||
.. note:: the name of the table is not quoted: if the table name
|
||||
contains uppercase letters or special characters it must be quoted
|
||||
with double quotes::
|
||||
|
||||
cur.copy_to(f, '"TABLE"')
|
||||
|
||||
.. versionchanged:: 2.0.6
|
||||
added the *columns* parameter.
|
||||
|
||||
|
@ -612,10 +600,6 @@ The ``cursor`` class
|
|||
are decoded in the connection `~connection.encoding` when read
|
||||
from the backend.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
the table and fields names are now quoted. If you need to specify
|
||||
a schema-qualified table please use `copy_expert()`.
|
||||
|
||||
|
||||
.. method:: copy_expert(sql, file, size=8192)
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ An example of the available constants defined in the module:
|
|||
'42P01'
|
||||
|
||||
Constants representing all the error values defined by PostgreSQL versions
|
||||
between 8.1 and 15 are included in the module.
|
||||
between 8.1 and 13 are included in the module.
|
||||
|
||||
|
||||
.. autofunction:: lookup(code)
|
||||
|
|
|
@ -14,17 +14,11 @@
|
|||
|
||||
.. versionchanged:: 2.8.6 added errors introduced in PostgreSQL 13
|
||||
|
||||
.. versionchanged:: 2.9.2 added errors introduced in PostgreSQL 14
|
||||
|
||||
.. versionchanged:: 2.9.4 added errors introduced in PostgreSQL 15
|
||||
|
||||
.. versionchanged:: 2.9.10 added errors introduced in PostgreSQL 17
|
||||
|
||||
This module exposes the classes psycopg raises upon receiving an error from
|
||||
the database with a :sql:`SQLSTATE` value attached (available in the
|
||||
`~psycopg2.Error.pgcode` attribute). The content of the module is generated
|
||||
from the PostgreSQL source code and includes classes for every error defined
|
||||
by PostgreSQL in versions between 9.1 and 15.
|
||||
by PostgreSQL in versions between 9.1 and 13.
|
||||
|
||||
Every class in the module is named after what referred as "condition name" `in
|
||||
the documentation`__, converted to CamelCase: e.g. the error 22012,
|
||||
|
|
|
@ -453,6 +453,13 @@ deal with Python objects adaptation:
|
|||
|
||||
Specialized adapters for Python datetime objects.
|
||||
|
||||
.. class:: DateFromMx
|
||||
TimeFromMx
|
||||
TimestampFromMx
|
||||
IntervalFromMx
|
||||
|
||||
Specialized adapters for `mx.DateTime`_ objects.
|
||||
|
||||
.. data:: adapters
|
||||
|
||||
Dictionary of the currently registered object adapters. Use
|
||||
|
@ -997,6 +1004,20 @@ from the database. See :ref:`unicode-handling` for details.
|
|||
Typecasters to convert time-related data types to Python `!datetime`
|
||||
objects.
|
||||
|
||||
.. data:: MXDATE
|
||||
MXDATETIME
|
||||
MXDATETIMETZ
|
||||
MXINTERVAL
|
||||
MXTIME
|
||||
MXDATEARRAY
|
||||
MXDATETIMEARRAY
|
||||
MXDATETIMETZARRAY
|
||||
MXINTERVALARRAY
|
||||
MXTIMEARRAY
|
||||
|
||||
Typecasters to convert time-related data types to `mx.DateTime`_ objects.
|
||||
Only available if Psycopg was compiled with `!mx` support.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
previously the `DECIMAL` typecaster and the specific time-related
|
||||
typecasters (`!PY*` and `!MX*`) were not exposed by the `extensions`
|
||||
|
|
|
@ -445,9 +445,7 @@ The individual messages in the replication stream are represented by
|
|||
If the *reply* or *force* parameters are not set, this method will
|
||||
just update internal structures without sending the feedback message
|
||||
to the server. The library sends feedback message automatically
|
||||
when *status_interval* timeout is reached. For this to work, you must
|
||||
call `send_feedback()` on the same Cursor that you called `start_replication()`
|
||||
on (the one in `message.cursor`) or your feedback will be lost.
|
||||
when *status_interval* timeout is reached.
|
||||
|
||||
.. versionchanged:: 2.8.3
|
||||
added the *force* parameter.
|
||||
|
@ -1029,14 +1027,6 @@ parameters. By reducing the number of server roundtrips the performance can be
|
|||
|
||||
.. autofunction:: execute_batch
|
||||
|
||||
.. code:: python
|
||||
|
||||
>>> nums = ((1,), (5,), (10,))
|
||||
>>> execute_batch(cur, "INSERT INTO test (num) VALUES (%s)", nums)
|
||||
|
||||
>>> tuples = ((123, "foo"), (42, "bar"), (23, "baz"))
|
||||
>>> execute_batch(cur, "INSERT INTO test (num, data) VALUES (%s, %s)", tuples)
|
||||
|
||||
.. versionadded:: 2.7
|
||||
|
||||
.. note::
|
||||
|
|
|
@ -180,7 +180,7 @@ Psycopg automatically converts PostgreSQL :sql:`json` data into Python objects.
|
|||
Psycopg converts :sql:`json` values into Python objects but :sql:`jsonb` values are returned as strings. Can :sql:`jsonb` be converted automatically?
|
||||
Automatic conversion of :sql:`jsonb` values is supported from Psycopg
|
||||
release 2.5.4. For previous versions you can register the :sql:`json`
|
||||
typecaster on the :sql:`jsonb` oids (which are known and not supposed to
|
||||
typecaster on the :sql:`jsonb` oids (which are known and not suppsed to
|
||||
change in future PostgreSQL versions)::
|
||||
|
||||
psycopg2.extras.register_json(oid=3802, array_oid=3807, globally=True)
|
||||
|
|
|
@ -31,23 +31,23 @@ wheel_ package available on PyPI_:
|
|||
|
||||
This will install a pre-compiled binary version of the module which does not
|
||||
require the build or runtime prerequisites described below. Make sure to use
|
||||
an up-to-date version of :program:`pip` (you can upgrade it using something
|
||||
an up-date-date version of :program:`pip` (you can upgrade it using something
|
||||
like ``pip install -U pip``).
|
||||
|
||||
You may then import the ``psycopg2`` package, as usual:
|
||||
You may then import the ``psycopg`` package, as usual:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import psycopg2
|
||||
import psycopg
|
||||
|
||||
# Connect to your postgres DB
|
||||
conn = psycopg2.connect("dbname=test user=postgres")
|
||||
conn = psycopg.connect("dbname=test user=postgres")
|
||||
|
||||
# Open a cursor to perform database operations
|
||||
cur = conn.cursor()
|
||||
|
||||
# Execute a query
|
||||
cur.execute("SELECT * FROM my_data")
|
||||
cur.execute("SELECT * FROM my_data");
|
||||
|
||||
# Retrieve query results
|
||||
records = cur.fetchall()
|
||||
|
@ -131,17 +131,11 @@ The current `!psycopg2` implementation supports:
|
|||
..
|
||||
NOTE: keep consistent with setup.py and the /features/ page.
|
||||
|
||||
- Python versions from 3.8 to 3.13
|
||||
- PostgreSQL server versions from 7.4 to 17
|
||||
- Python version 2.7
|
||||
- Python 3 versions from 3.4 to 3.8
|
||||
- PostgreSQL server versions from 7.4 to 12
|
||||
- PostgreSQL client library version from 9.1
|
||||
|
||||
.. note::
|
||||
|
||||
Not all the psycopg2 versions support all the supported Python versions.
|
||||
|
||||
Please see the :ref:`release notes <news>` to verify when the support for
|
||||
a new Python version was added and when the support for an old Python
|
||||
version was removed.
|
||||
|
||||
|
||||
.. _build-prerequisites:
|
||||
|
@ -205,7 +199,7 @@ self-contained wheel package, it will need the libpq_ library at runtime
|
|||
(usually distributed in a ``libpq.so`` or ``libpq.dll`` file). `!psycopg2`
|
||||
relies on the host OS to find the library if the library is installed in a
|
||||
standard location there is usually no problem; if the library is in a
|
||||
non-standard location you will have to tell Psycopg how to find it,
|
||||
non-standard location you will have to tell somehow Psycopg how to find it,
|
||||
which is OS-dependent (for instance setting a suitable
|
||||
:envvar:`LD_LIBRARY_PATH` on Linux).
|
||||
|
||||
|
@ -237,6 +231,7 @@ If you have less standard requirements such as:
|
|||
|
||||
- creating a :ref:`debug build <debug-build>`,
|
||||
- using :program:`pg_config` not in the :envvar:`PATH`,
|
||||
- supporting ``mx.DateTime``,
|
||||
|
||||
then take a look at the ``setup.cfg`` file.
|
||||
|
||||
|
|
|
@ -168,7 +168,7 @@ available through the following exceptions:
|
|||
|
||||
>>> e.pgcode
|
||||
'42P01'
|
||||
>>> print(e.pgerror)
|
||||
>>> print e.pgerror
|
||||
ERROR: relation "barf" does not exist
|
||||
LINE 1: SELECT * FROM barf
|
||||
^
|
||||
|
@ -184,7 +184,7 @@ available through the following exceptions:
|
|||
|
||||
>>> try:
|
||||
... cur.execute("SELECT * FROM barf")
|
||||
... except psycopg2.Error as e:
|
||||
... except psycopg2.Error, e:
|
||||
... pass
|
||||
|
||||
>>> e.diag.severity
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
single: Release notes
|
||||
single: News
|
||||
|
||||
.. _news:
|
||||
|
||||
Release notes
|
||||
=============
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ name should be escaped using `~psycopg2.extensions.quote_ident()`::
|
|||
# This works, but it is not optimal
|
||||
table_name = 'my_table'
|
||||
cur.execute(
|
||||
"insert into %s values (%%s, %%s)" % ext.quote_ident(table_name, cur),
|
||||
"insert into %s values (%%s, %%s)" % ext.quote_ident(table_name),
|
||||
[10, 20])
|
||||
|
||||
This is now safe, but it somewhat ad-hoc. In case, for some reason, it is
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
extension
|
||||
~~~~~~~~~
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sql role
|
||||
~~~~~~~~
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
ticket role
|
||||
~~~~~~~~~~~
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
"""Create the docs table of the sqlstate errors.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
@ -25,8 +26,8 @@ def main():
|
|||
for k in sorted(sqlstate_errors):
|
||||
exc = sqlstate_errors[k]
|
||||
lines.append(Line(
|
||||
f"``{k}``", f"`!{exc.__name__}`",
|
||||
f"`!{get_base_exception(exc).__name__}`", k))
|
||||
"``%s``" % k, "`!%s`" % exc.__name__,
|
||||
"`!%s`" % get_base_exception(exc).__name__, k))
|
||||
|
||||
widths = [max(len(l[c]) for l in lines) for c in range(3)]
|
||||
h = Line(*(['=' * w for w in widths] + [None]))
|
||||
|
@ -39,7 +40,7 @@ def main():
|
|||
for l in lines:
|
||||
cls = l.sqlstate[:2] if l.sqlstate else None
|
||||
if cls and cls != sqlclass:
|
||||
print(f"**Class {cls}**: {sqlclasses[cls]}")
|
||||
print("**Class %s**: %s" % (cls, sqlclasses[cls]))
|
||||
print(h1)
|
||||
sqlclass = cls
|
||||
|
||||
|
|
|
@ -5,10 +5,6 @@
|
|||
|
||||
.. module:: psycopg2.tz
|
||||
|
||||
.. deprecated:: 2.9
|
||||
The module will be dropped in psycopg 2.10. Use `datetime.timezone`
|
||||
instead.
|
||||
|
||||
This module holds two different tzinfo implementations that can be used as the
|
||||
`tzinfo` argument to `~datetime.datetime` constructors, directly passed to
|
||||
Psycopg functions or used to set the `cursor.tzinfo_factory` attribute in
|
||||
|
|
|
@ -407,7 +407,7 @@ defined on the database connection (the `PostgreSQL encoding`__, available in
|
|||
`connection.encoding`, is translated into a `Python encoding`__ using the
|
||||
`~psycopg2.extensions.encodings` mapping)::
|
||||
|
||||
>>> print(u, type(u))
|
||||
>>> print u, type(u)
|
||||
àèìòù€ <type 'unicode'>
|
||||
|
||||
>>> cur.execute("INSERT INTO test (num, data) VALUES (%s,%s);", (74, u))
|
||||
|
@ -418,19 +418,19 @@ defined on the database connection (the `PostgreSQL encoding`__, available in
|
|||
When reading data from the database, in Python 2 the strings returned are
|
||||
usually 8 bit `!str` objects encoded in the database client encoding::
|
||||
|
||||
>>> print(conn.encoding)
|
||||
>>> print conn.encoding
|
||||
UTF8
|
||||
|
||||
>>> cur.execute("SELECT data FROM test WHERE num = 74")
|
||||
>>> x = cur.fetchone()[0]
|
||||
>>> print(x, type(x), repr(x))
|
||||
>>> print x, type(x), repr(x)
|
||||
àèìòù€ <type 'str'> '\xc3\xa0\xc3\xa8\xc3\xac\xc3\xb2\xc3\xb9\xe2\x82\xac'
|
||||
|
||||
>>> conn.set_client_encoding('LATIN9')
|
||||
|
||||
>>> cur.execute("SELECT data FROM test WHERE num = 74")
|
||||
>>> x = cur.fetchone()[0]
|
||||
>>> print(type(x), repr(x))
|
||||
>>> print type(x), repr(x)
|
||||
<type 'str'> '\xe0\xe8\xec\xf2\xf9\xa4'
|
||||
|
||||
In Python 3 instead the strings are automatically *decoded* in the connection
|
||||
|
@ -442,7 +442,7 @@ In Python 2 you must register a :ref:`typecaster
|
|||
|
||||
>>> cur.execute("SELECT data FROM test WHERE num = 74")
|
||||
>>> x = cur.fetchone()[0]
|
||||
>>> print(x, type(x), repr(x))
|
||||
>>> print x, type(x), repr(x)
|
||||
àèìòù€ <type 'unicode'> u'\xe0\xe8\xec\xf2\xf9\u20ac'
|
||||
|
||||
In the above example, the `~psycopg2.extensions.UNICODE` typecaster is
|
||||
|
@ -540,6 +540,7 @@ or `!memoryview` (in Python 3).
|
|||
single: Date objects; Adaptation
|
||||
single: Time objects; Adaptation
|
||||
single: Interval objects; Adaptation
|
||||
single: mx.DateTime; Adaptation
|
||||
|
||||
.. _adapt-date:
|
||||
|
||||
|
@ -549,7 +550,8 @@ Date/Time objects adaptation
|
|||
Python builtin `~datetime.datetime`, `~datetime.date`,
|
||||
`~datetime.time`, `~datetime.timedelta` are converted into PostgreSQL's
|
||||
:sql:`timestamp[tz]`, :sql:`date`, :sql:`time[tz]`, :sql:`interval` data types.
|
||||
Time zones are supported too.
|
||||
Time zones are supported too. The Egenix `mx.DateTime`_ objects are adapted
|
||||
the same way::
|
||||
|
||||
>>> dt = datetime.datetime.now()
|
||||
>>> dt
|
||||
|
@ -574,39 +576,29 @@ Time zones handling
|
|||
'''''''''''''''''''
|
||||
|
||||
The PostgreSQL type :sql:`timestamp with time zone` (a.k.a.
|
||||
:sql:`timestamptz`) is converted into Python `~datetime.datetime` objects.
|
||||
:sql:`timestamptz`) is converted into Python `~datetime.datetime` objects with
|
||||
a `~datetime.datetime.tzinfo` attribute set to a
|
||||
`~psycopg2.tz.FixedOffsetTimezone` instance.
|
||||
|
||||
>>> cur.execute("SET TIME ZONE 'Europe/Rome'") # UTC + 1 hour
|
||||
>>> cur.execute("SELECT '2010-01-01 10:30:45'::timestamptz")
|
||||
>>> cur.fetchone()[0]
|
||||
datetime.datetime(2010, 1, 1, 10, 30, 45,
|
||||
tzinfo=datetime.timezone(datetime.timedelta(seconds=3600)))
|
||||
>>> cur.execute("SET TIME ZONE 'Europe/Rome';") # UTC + 1 hour
|
||||
>>> cur.execute("SELECT '2010-01-01 10:30:45'::timestamptz;")
|
||||
>>> cur.fetchone()[0].tzinfo
|
||||
psycopg2.tz.FixedOffsetTimezone(offset=60, name=None)
|
||||
|
||||
.. note::
|
||||
Note that only time zones with an integer number of minutes are supported:
|
||||
this is a limitation of the Python `datetime` module. A few historical time
|
||||
zones had seconds in the UTC offset: these time zones will have the offset
|
||||
rounded to the nearest minute, with an error of up to 30 seconds.
|
||||
|
||||
Before Python 3.7, the `datetime` module only supported timezones with an
|
||||
integer number of minutes. A few historical time zones had seconds in the
|
||||
UTC offset: these time zones will have the offset rounded to the nearest
|
||||
minute, with an error of up to 30 seconds, on Python versions before 3.7.
|
||||
|
||||
>>> cur.execute("SET TIME ZONE 'Asia/Calcutta'") # offset was +5:21:10
|
||||
>>> cur.execute("SELECT '1900-01-01 10:30:45'::timestamptz")
|
||||
>>> cur.fetchone()[0].tzinfo
|
||||
# On Python 3.6: 5h, 21m
|
||||
datetime.timezone(datetime.timedelta(0, 19260))
|
||||
# On Python 3.7 and following: 5h, 21m, 10s
|
||||
datetime.timezone(datetime.timedelta(seconds=19270))
|
||||
>>> cur.execute("SET TIME ZONE 'Asia/Calcutta';") # offset was +5:53:20
|
||||
>>> cur.execute("SELECT '1930-01-01 10:30:45'::timestamptz;")
|
||||
>>> cur.fetchone()[0].tzinfo
|
||||
psycopg2.tz.FixedOffsetTimezone(offset=353, name=None)
|
||||
|
||||
.. versionchanged:: 2.2.2
|
||||
timezones with seconds are supported (with rounding). Previously such
|
||||
timezones raised an error.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
timezones with seconds are supported without rounding.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
use `datetime.timezone` as default tzinfo object instead of
|
||||
`~psycopg2.tz.FixedOffsetTimezone`.
|
||||
|
||||
.. index::
|
||||
double: Date objects; Infinite
|
||||
|
@ -758,10 +750,10 @@ until a call to the `~connection.rollback()` method.
|
|||
|
||||
The connection is responsible for terminating its transaction, calling either
|
||||
the `~connection.commit()` or `~connection.rollback()` method. Committed
|
||||
changes are immediately made persistent in the database. If the connection
|
||||
changes are immediately made persistent into the database. If he connection
|
||||
is closed (using the `~connection.close()` method) or destroyed (using `!del`
|
||||
or by letting it fall out of scope) while a transaction is in progress, the
|
||||
server will discard the transaction. However doing so is not advisable:
|
||||
or letting it falling out of scope) while a transaction is in progress, the
|
||||
server will discard the transaction. However doing so is not adviceable:
|
||||
middleware such as PgBouncer_ may see the connection closed uncleanly and
|
||||
dispose of it.
|
||||
|
||||
|
@ -795,8 +787,6 @@ the details.
|
|||
.. index::
|
||||
single: with statement
|
||||
|
||||
.. _with:
|
||||
|
||||
``with`` statement
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
|
@ -814,7 +804,7 @@ is rolled back.
|
|||
When a cursor exits the ``with`` block it is closed, releasing any resource
|
||||
eventually associated with it. The state of the transaction is not affected.
|
||||
|
||||
A connection can be used in more than one ``with`` statement
|
||||
A connection can be used in more than a ``with`` statement
|
||||
and each ``with`` block is effectively wrapped in a separate transaction::
|
||||
|
||||
conn = psycopg2.connect(DSN)
|
||||
|
@ -842,9 +832,6 @@ and each ``with`` block is effectively wrapped in a separate transaction::
|
|||
finally:
|
||||
conn.close()
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
``with connection`` starts a transaction also on autocommit connections.
|
||||
|
||||
|
||||
.. index::
|
||||
pair: Server side; Cursor
|
||||
|
@ -860,7 +847,7 @@ Server side cursors
|
|||
|
||||
When a database query is executed, the Psycopg `cursor` usually fetches
|
||||
all the records returned by the backend, transferring them to the client
|
||||
process. If the query returns a huge amount of data, a proportionally large
|
||||
process. If the query returned an huge amount of data, a proportionally large
|
||||
amount of memory will be allocated by the client.
|
||||
|
||||
If the dataset is too large to be practically handled on the client side, it is
|
||||
|
@ -1052,8 +1039,8 @@ using the |lo_import|_ and |lo_export|_ libpq functions.
|
|||
(`~connection.server_version` must be >= ``90300``).
|
||||
|
||||
If Psycopg was built with 64 bits large objects support (i.e. the first
|
||||
two conditions above are verified), the `psycopg2.__version__` constant
|
||||
will contain the ``lo64`` flag. If any of the condition is not met
|
||||
two contidions above are verified), the `psycopg2.__version__` constant
|
||||
will contain the ``lo64`` flag. If any of the contition is not met
|
||||
several `!lobject` methods will fail if the arguments exceed 2GB.
|
||||
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ Homepage: https://psycopg.org/
|
|||
# psycopg/__init__.py - initialization of the psycopg module
|
||||
#
|
||||
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -61,6 +61,8 @@ from psycopg2._psycopg import ( # noqa
|
|||
__version__, __libpq_version__,
|
||||
)
|
||||
|
||||
from psycopg2 import tz # noqa
|
||||
|
||||
|
||||
# Register default adapters.
|
||||
|
||||
|
@ -118,6 +120,9 @@ def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs):
|
|||
if 'async_' in kwargs:
|
||||
kwasync['async_'] = kwargs.pop('async_')
|
||||
|
||||
if dsn is None and not kwargs:
|
||||
raise TypeError('missing dsn and no parameters')
|
||||
|
||||
dsn = _ext.make_dsn(dsn, **kwargs)
|
||||
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
|
||||
if cursor_factory is not None:
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# psycopg/_ipaddress.py - Ipaddres-based network types adaptation
|
||||
#
|
||||
# Copyright (C) 2016-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -26,6 +26,7 @@
|
|||
|
||||
from psycopg2.extensions import (
|
||||
new_type, new_array_type, register_type, register_adapter, QuotedString)
|
||||
from psycopg2.compat import text_type
|
||||
|
||||
# The module is imported on register_ipaddress
|
||||
ipaddress = None
|
||||
|
@ -77,13 +78,13 @@ def cast_interface(s, cur=None):
|
|||
if s is None:
|
||||
return None
|
||||
# Py2 version force the use of unicode. meh.
|
||||
return ipaddress.ip_interface(str(s))
|
||||
return ipaddress.ip_interface(text_type(s))
|
||||
|
||||
|
||||
def cast_network(s, cur=None):
|
||||
if s is None:
|
||||
return None
|
||||
return ipaddress.ip_network(str(s))
|
||||
return ipaddress.ip_network(text_type(s))
|
||||
|
||||
|
||||
def adapt_ipaddress(obj):
|
||||
|
|
19
lib/_json.py
19
lib/_json.py
|
@ -8,7 +8,7 @@ extensions importing register_json from extras.
|
|||
# psycopg/_json.py - Implementation of the JSON adaptation objects
|
||||
#
|
||||
# Copyright (C) 2012-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -32,6 +32,7 @@ import json
|
|||
|
||||
from psycopg2._psycopg import ISQLQuote, QuotedString
|
||||
from psycopg2._psycopg import new_type, new_array_type, register_type
|
||||
from psycopg2.compat import PY2
|
||||
|
||||
|
||||
# oids from PostgreSQL 9.2
|
||||
|
@ -43,7 +44,7 @@ JSONB_OID = 3802
|
|||
JSONBARRAY_OID = 3807
|
||||
|
||||
|
||||
class Json:
|
||||
class Json(object):
|
||||
"""
|
||||
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
|
||||
:sql:`json` data type.
|
||||
|
@ -81,9 +82,13 @@ class Json:
|
|||
qs.prepare(self._conn)
|
||||
return qs.getquoted()
|
||||
|
||||
def __str__(self):
|
||||
# getquoted is binary
|
||||
return self.getquoted().decode('ascii', 'replace')
|
||||
if PY2:
|
||||
def __str__(self):
|
||||
return self.getquoted()
|
||||
else:
|
||||
def __str__(self):
|
||||
# getquoted is binary in Py3
|
||||
return self.getquoted().decode('ascii', 'replace')
|
||||
|
||||
|
||||
def register_json(conn_or_curs=None, globally=False, loads=None,
|
||||
|
@ -163,7 +168,7 @@ def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
|
|||
|
||||
JSON = new_type((oid, ), name, typecast_json)
|
||||
if array_oid is not None:
|
||||
JSONARRAY = new_array_type((array_oid, ), f"{name}ARRAY", JSON)
|
||||
JSONARRAY = new_array_type((array_oid, ), "%sARRAY" % name, JSON)
|
||||
else:
|
||||
JSONARRAY = None
|
||||
|
||||
|
@ -194,6 +199,6 @@ def _get_json_oids(conn_or_curs, name='json'):
|
|||
conn.rollback()
|
||||
|
||||
if not r:
|
||||
raise conn.ProgrammingError(f"{name} data type not found")
|
||||
raise conn.ProgrammingError("%s data type not found" % name)
|
||||
|
||||
return r
|
||||
|
|
104
lib/_lru_cache.py
Normal file
104
lib/_lru_cache.py
Normal file
|
@ -0,0 +1,104 @@
|
|||
"""
|
||||
LRU cache implementation for Python 2.7
|
||||
|
||||
Ported from http://code.activestate.com/recipes/578078/ and simplified for our
|
||||
use (only support maxsize > 0 and positional arguments).
|
||||
"""
|
||||
|
||||
from collections import namedtuple
|
||||
from functools import update_wrapper
|
||||
from threading import RLock
|
||||
|
||||
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
|
||||
|
||||
|
||||
def lru_cache(maxsize=100):
|
||||
"""Least-recently-used cache decorator.
|
||||
|
||||
Arguments to the cached function must be hashable.
|
||||
|
||||
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
|
||||
|
||||
"""
|
||||
def decorating_function(user_function):
|
||||
|
||||
cache = dict()
|
||||
stats = [0, 0] # make statistics updateable non-locally
|
||||
HITS, MISSES = 0, 1 # names for the stats fields
|
||||
cache_get = cache.get # bound method to lookup key or return None
|
||||
_len = len # localize the global len() function
|
||||
lock = RLock() # linkedlist updates aren't threadsafe
|
||||
root = [] # root of the circular doubly linked list
|
||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
||||
nonlocal_root = [root] # make updateable non-locally
|
||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
||||
|
||||
assert maxsize and maxsize > 0, "maxsize %s not supported" % maxsize
|
||||
|
||||
def wrapper(*args):
|
||||
# size limited caching that tracks accesses by recency
|
||||
key = args
|
||||
with lock:
|
||||
link = cache_get(key)
|
||||
if link is not None:
|
||||
# record recent use of the key by moving it to the
|
||||
# front of the list
|
||||
root, = nonlocal_root
|
||||
link_prev, link_next, key, result = link
|
||||
link_prev[NEXT] = link_next
|
||||
link_next[PREV] = link_prev
|
||||
last = root[PREV]
|
||||
last[NEXT] = root[PREV] = link
|
||||
link[PREV] = last
|
||||
link[NEXT] = root
|
||||
stats[HITS] += 1
|
||||
return result
|
||||
result = user_function(*args)
|
||||
with lock:
|
||||
root, = nonlocal_root
|
||||
if key in cache:
|
||||
# getting here means that this same key was added to the
|
||||
# cache while the lock was released. since the link
|
||||
# update is already done, we need only return the
|
||||
# computed result and update the count of misses.
|
||||
pass
|
||||
elif _len(cache) >= maxsize:
|
||||
# use the old root to store the new key and result
|
||||
oldroot = root
|
||||
oldroot[KEY] = key
|
||||
oldroot[RESULT] = result
|
||||
# empty the oldest link and make it the new root
|
||||
root = nonlocal_root[0] = oldroot[NEXT]
|
||||
oldkey = root[KEY]
|
||||
# oldvalue = root[RESULT]
|
||||
root[KEY] = root[RESULT] = None
|
||||
# now update the cache dictionary for the new links
|
||||
del cache[oldkey]
|
||||
cache[key] = oldroot
|
||||
else:
|
||||
# put result in a new link at the front of the list
|
||||
last = root[PREV]
|
||||
link = [last, root, key, result]
|
||||
last[NEXT] = root[PREV] = cache[key] = link
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
with lock:
|
||||
cache.clear()
|
||||
root = nonlocal_root[0]
|
||||
root[:] = [root, root, None, None]
|
||||
stats[:] = [0, 0]
|
||||
|
||||
wrapper.__wrapped__ = user_function
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
return update_wrapper(wrapper, user_function)
|
||||
|
||||
return decorating_function
|
|
@ -5,7 +5,7 @@
|
|||
# psycopg/_range.py - Implementation of the Range type and adaptation
|
||||
#
|
||||
# Copyright (C) 2012-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -30,9 +30,10 @@ import re
|
|||
from psycopg2._psycopg import ProgrammingError, InterfaceError
|
||||
from psycopg2.extensions import ISQLQuote, adapt, register_adapter
|
||||
from psycopg2.extensions import new_type, new_array_type, register_type
|
||||
from psycopg2.compat import string_types
|
||||
|
||||
|
||||
class Range:
|
||||
class Range(object):
|
||||
"""Python representation for a PostgreSQL |range|_ type.
|
||||
|
||||
:param lower: lower bound for the range. `!None` means unbound
|
||||
|
@ -47,7 +48,7 @@ class Range:
|
|||
def __init__(self, lower=None, upper=None, bounds='[)', empty=False):
|
||||
if not empty:
|
||||
if bounds not in ('[)', '(]', '()', '[]'):
|
||||
raise ValueError(f"bound flags not valid: {bounds!r}")
|
||||
raise ValueError("bound flags not valid: %r" % bounds)
|
||||
|
||||
self._lower = lower
|
||||
self._upper = upper
|
||||
|
@ -57,9 +58,9 @@ class Range:
|
|||
|
||||
def __repr__(self):
|
||||
if self._bounds is None:
|
||||
return f"{self.__class__.__name__}(empty=True)"
|
||||
return "%s(empty=True)" % self.__class__.__name__
|
||||
else:
|
||||
return "{}({!r}, {!r}, {!r})".format(self.__class__.__name__,
|
||||
return "%s(%r, %r, %r)" % (self.__class__.__name__,
|
||||
self._lower, self._upper, self._bounds)
|
||||
|
||||
def __str__(self):
|
||||
|
@ -143,6 +144,10 @@ class Range:
|
|||
def __bool__(self):
|
||||
return self._bounds is not None
|
||||
|
||||
def __nonzero__(self):
|
||||
# Python 2 compatibility
|
||||
return type(self).__bool__(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Range):
|
||||
return False
|
||||
|
@ -234,7 +239,7 @@ def register_range(pgrange, pyrange, conn_or_curs, globally=False):
|
|||
return caster
|
||||
|
||||
|
||||
class RangeAdapter:
|
||||
class RangeAdapter(object):
|
||||
"""`ISQLQuote` adapter for `Range` subclasses.
|
||||
|
||||
This is an abstract class: concrete classes must set a `name` class
|
||||
|
@ -282,7 +287,7 @@ class RangeAdapter:
|
|||
+ b", '" + r._bounds.encode('utf8') + b"')"
|
||||
|
||||
|
||||
class RangeCaster:
|
||||
class RangeCaster(object):
|
||||
"""Helper class to convert between `Range` and PostgreSQL range types.
|
||||
|
||||
Objects of this class are usually created by `register_range()`. Manual
|
||||
|
@ -310,7 +315,7 @@ class RangeCaster:
|
|||
# an implementation detail and is not documented. It is currently used
|
||||
# for the numeric ranges.
|
||||
self.adapter = None
|
||||
if isinstance(pgrange, str):
|
||||
if isinstance(pgrange, string_types):
|
||||
self.adapter = type(pgrange, (RangeAdapter,), {})
|
||||
self.adapter.name = pgrange
|
||||
else:
|
||||
|
@ -327,7 +332,7 @@ class RangeCaster:
|
|||
|
||||
self.range = None
|
||||
try:
|
||||
if isinstance(pyrange, str):
|
||||
if isinstance(pyrange, string_types):
|
||||
self.range = type(pyrange, (Range,), {})
|
||||
if issubclass(pyrange, Range) and pyrange is not Range:
|
||||
self.range = pyrange
|
||||
|
@ -363,54 +368,33 @@ class RangeCaster:
|
|||
schema = 'public'
|
||||
|
||||
# get the type oid and attributes
|
||||
curs.execute("""\
|
||||
select rngtypid, rngsubtype, typarray
|
||||
try:
|
||||
curs.execute("""\
|
||||
select rngtypid, rngsubtype,
|
||||
(select typarray from pg_type where oid = rngtypid)
|
||||
from pg_range r
|
||||
join pg_type t on t.oid = rngtypid
|
||||
join pg_namespace ns on ns.oid = typnamespace
|
||||
where typname = %s and ns.nspname = %s;
|
||||
""", (tname, schema))
|
||||
rec = curs.fetchone()
|
||||
|
||||
if not rec:
|
||||
# The above algorithm doesn't work for customized seach_path
|
||||
# (#1487) The implementation below works better, but, to guarantee
|
||||
# backwards compatibility, use it only if the original one failed.
|
||||
try:
|
||||
savepoint = False
|
||||
# Because we executed statements earlier, we are either INTRANS
|
||||
# or we are IDLE only if the transaction is autocommit, in
|
||||
# which case we don't need the savepoint anyway.
|
||||
if conn.status == STATUS_IN_TRANSACTION:
|
||||
curs.execute("SAVEPOINT register_type")
|
||||
savepoint = True
|
||||
except ProgrammingError:
|
||||
if not conn.autocommit:
|
||||
conn.rollback()
|
||||
raise
|
||||
else:
|
||||
rec = curs.fetchone()
|
||||
|
||||
curs.execute("""\
|
||||
SELECT rngtypid, rngsubtype, typarray, typname, nspname
|
||||
from pg_range r
|
||||
join pg_type t on t.oid = rngtypid
|
||||
join pg_namespace ns on ns.oid = typnamespace
|
||||
WHERE t.oid = %s::regtype
|
||||
""", (name, ))
|
||||
except ProgrammingError:
|
||||
pass
|
||||
else:
|
||||
rec = curs.fetchone()
|
||||
if rec:
|
||||
tname, schema = rec[3:]
|
||||
finally:
|
||||
if savepoint:
|
||||
curs.execute("ROLLBACK TO SAVEPOINT register_type")
|
||||
|
||||
# revert the status of the connection as before the command
|
||||
if conn_status != STATUS_IN_TRANSACTION and not conn.autocommit:
|
||||
conn.rollback()
|
||||
# revert the status of the connection as before the command
|
||||
if (conn_status != STATUS_IN_TRANSACTION
|
||||
and not conn.autocommit):
|
||||
conn.rollback()
|
||||
|
||||
if not rec:
|
||||
raise ProgrammingError(
|
||||
f"PostgreSQL range '{name}' not found")
|
||||
"PostgreSQL type '%s' not found" % name)
|
||||
|
||||
type, subtype, array = rec[:3]
|
||||
type, subtype, array = rec
|
||||
|
||||
return RangeCaster(name, pyrange,
|
||||
oid=type, subtype_oid=subtype, array_oid=array)
|
||||
|
@ -440,7 +424,7 @@ WHERE t.oid = %s::regtype
|
|||
|
||||
m = self._re_range.match(s)
|
||||
if m is None:
|
||||
raise InterfaceError(f"failed to parse range: '{s}'")
|
||||
raise InterfaceError("failed to parse range: '%s'" % s)
|
||||
|
||||
lower = m.group(3)
|
||||
if lower is None:
|
||||
|
@ -520,7 +504,8 @@ class NumberRangeAdapter(RangeAdapter):
|
|||
else:
|
||||
upper = ''
|
||||
|
||||
return (f"'{r._bounds[0]}{lower},{upper}{r._bounds[1]}'").encode('ascii')
|
||||
return ("'%s%s,%s%s'" % (
|
||||
r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')
|
||||
|
||||
|
||||
# TODO: probably won't work with infs, nans and other tricky cases.
|
||||
|
|
19
lib/compat.py
Normal file
19
lib/compat.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
import sys
|
||||
|
||||
__all__ = ['string_types', 'text_type', 'lru_cache']
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
# Python 2
|
||||
PY2 = True
|
||||
PY3 = False
|
||||
string_types = basestring,
|
||||
text_type = unicode
|
||||
from ._lru_cache import lru_cache
|
||||
|
||||
else:
|
||||
# Python 3
|
||||
PY2 = False
|
||||
PY3 = True
|
||||
string_types = str,
|
||||
text_type = str
|
||||
from functools import lru_cache
|
|
@ -1,11 +1,11 @@
|
|||
"""Error codes for PostgreSQL
|
||||
"""Error codes for PostgresSQL
|
||||
|
||||
This module contains symbolic names for all PostgreSQL error codes.
|
||||
"""
|
||||
# psycopg2/errorcodes.py - PostgreSQL error codes
|
||||
#
|
||||
# Copyright (C) 2006-2019 Johan Dahlin <jdahlin@async.com.br>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -223,7 +223,6 @@ SQL_JSON_OBJECT_NOT_FOUND = '2203C'
|
|||
TOO_MANY_JSON_ARRAY_ELEMENTS = '2203D'
|
||||
TOO_MANY_JSON_OBJECT_MEMBERS = '2203E'
|
||||
SQL_JSON_SCALAR_REQUIRED = '2203F'
|
||||
SQL_JSON_ITEM_CANNOT_BE_CAST_TO_TARGET_TYPE = '2203G'
|
||||
FLOATING_POINT_EXCEPTION = '22P01'
|
||||
INVALID_TEXT_REPRESENTATION = '22P02'
|
||||
INVALID_BINARY_REPRESENTATION = '22P03'
|
||||
|
@ -256,7 +255,6 @@ HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL = '25008'
|
|||
NO_ACTIVE_SQL_TRANSACTION = '25P01'
|
||||
IN_FAILED_SQL_TRANSACTION = '25P02'
|
||||
IDLE_IN_TRANSACTION_SESSION_TIMEOUT = '25P03'
|
||||
TRANSACTION_TIMEOUT = '25P04'
|
||||
|
||||
# Class 26 - Invalid SQL Statement Name
|
||||
INVALID_SQL_STATEMENT_NAME = '26000'
|
||||
|
@ -393,7 +391,6 @@ ADMIN_SHUTDOWN = '57P01'
|
|||
CRASH_SHUTDOWN = '57P02'
|
||||
CANNOT_CONNECT_NOW = '57P03'
|
||||
DATABASE_DROPPED = '57P04'
|
||||
IDLE_SESSION_TIMEOUT = '57P05'
|
||||
|
||||
# Class 58 - System Error (errors external to PostgreSQL itself)
|
||||
SYSTEM_ERROR = '58000'
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# psycopg/errors.py - SQLSTATE and DB-API exceptions
|
||||
#
|
||||
# Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
|
|
@ -13,7 +13,7 @@ This module holds all the extensions to the DBAPI-2.0 provided by psycopg.
|
|||
# psycopg/extensions.py - DBAPI-2.0 extensions specific to psycopg
|
||||
#
|
||||
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -42,6 +42,14 @@ from psycopg2._psycopg import ( # noqa
|
|||
ROWIDARRAY, STRINGARRAY, TIME, TIMEARRAY, UNICODE, UNICODEARRAY,
|
||||
AsIs, Binary, Boolean, Float, Int, QuotedString, )
|
||||
|
||||
try:
|
||||
from psycopg2._psycopg import ( # noqa
|
||||
MXDATE, MXDATETIME, MXDATETIMETZ, MXINTERVAL, MXTIME, MXDATEARRAY,
|
||||
MXDATETIMEARRAY, MXDATETIMETZARRAY, MXINTERVALARRAY, MXTIMEARRAY,
|
||||
DateFromMx, TimeFromMx, TimestampFromMx, IntervalFromMx, )
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from psycopg2._psycopg import ( # noqa
|
||||
PYDATE, PYDATETIME, PYDATETIMETZ, PYINTERVAL, PYTIME, PYDATEARRAY,
|
||||
PYDATETIMEARRAY, PYDATETIMETZARRAY, PYINTERVALARRAY, PYTIMEARRAY,
|
||||
|
@ -98,7 +106,7 @@ def register_adapter(typ, callable):
|
|||
|
||||
|
||||
# The SQL_IN class is the official adapter for tuples starting from 2.0.6.
|
||||
class SQL_IN:
|
||||
class SQL_IN(object):
|
||||
"""Adapt any iterable to an SQL quotable object."""
|
||||
def __init__(self, seq):
|
||||
self._seq = seq
|
||||
|
@ -122,7 +130,7 @@ class SQL_IN:
|
|||
return str(self.getquoted())
|
||||
|
||||
|
||||
class NoneAdapter:
|
||||
class NoneAdapter(object):
|
||||
"""Adapt None to NULL.
|
||||
|
||||
This adapter is not used normally as a fast path in mogrify uses NULL,
|
||||
|
@ -160,7 +168,7 @@ def make_dsn(dsn=None, **kwargs):
|
|||
tmp.update(kwargs)
|
||||
kwargs = tmp
|
||||
|
||||
dsn = " ".join(["{}={}".format(k, _param_escape(str(v)))
|
||||
dsn = " ".join(["%s=%s" % (k, _param_escape(str(v)))
|
||||
for (k, v) in kwargs.items()])
|
||||
|
||||
# verify that the returned dsn is valid
|
||||
|
|
210
lib/extras.py
210
lib/extras.py
|
@ -6,7 +6,7 @@ and classes until a better place in the distribution is found.
|
|||
# psycopg/extras.py - miscellaneous extra goodies for psycopg
|
||||
#
|
||||
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -38,7 +38,7 @@ from psycopg2 import extensions as _ext
|
|||
from .extensions import cursor as _cursor
|
||||
from .extensions import connection as _connection
|
||||
from .extensions import adapt as _A, quote_ident
|
||||
from functools import lru_cache
|
||||
from .compat import PY2, PY3, lru_cache
|
||||
|
||||
from psycopg2._psycopg import ( # noqa
|
||||
REPLICATION_PHYSICAL, REPLICATION_LOGICAL,
|
||||
|
@ -72,47 +72,47 @@ class DictCursorBase(_cursor):
|
|||
else:
|
||||
raise NotImplementedError(
|
||||
"DictCursorBase can't be instantiated without a row factory.")
|
||||
super().__init__(*args, **kwargs)
|
||||
super(DictCursorBase, self).__init__(*args, **kwargs)
|
||||
self._query_executed = False
|
||||
self._prefetch = False
|
||||
self.row_factory = row_factory
|
||||
|
||||
def fetchone(self):
|
||||
if self._prefetch:
|
||||
res = super().fetchone()
|
||||
res = super(DictCursorBase, self).fetchone()
|
||||
if self._query_executed:
|
||||
self._build_index()
|
||||
if not self._prefetch:
|
||||
res = super().fetchone()
|
||||
res = super(DictCursorBase, self).fetchone()
|
||||
return res
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
if self._prefetch:
|
||||
res = super().fetchmany(size)
|
||||
res = super(DictCursorBase, self).fetchmany(size)
|
||||
if self._query_executed:
|
||||
self._build_index()
|
||||
if not self._prefetch:
|
||||
res = super().fetchmany(size)
|
||||
res = super(DictCursorBase, self).fetchmany(size)
|
||||
return res
|
||||
|
||||
def fetchall(self):
|
||||
if self._prefetch:
|
||||
res = super().fetchall()
|
||||
res = super(DictCursorBase, self).fetchall()
|
||||
if self._query_executed:
|
||||
self._build_index()
|
||||
if not self._prefetch:
|
||||
res = super().fetchall()
|
||||
res = super(DictCursorBase, self).fetchall()
|
||||
return res
|
||||
|
||||
def __iter__(self):
|
||||
try:
|
||||
if self._prefetch:
|
||||
res = super().__iter__()
|
||||
res = super(DictCursorBase, self).__iter__()
|
||||
first = next(res)
|
||||
if self._query_executed:
|
||||
self._build_index()
|
||||
if not self._prefetch:
|
||||
res = super().__iter__()
|
||||
res = super(DictCursorBase, self).__iter__()
|
||||
first = next(res)
|
||||
|
||||
yield first
|
||||
|
@ -126,7 +126,7 @@ class DictConnection(_connection):
|
|||
"""A connection that uses `DictCursor` automatically."""
|
||||
def cursor(self, *args, **kwargs):
|
||||
kwargs.setdefault('cursor_factory', self.cursor_factory or DictCursor)
|
||||
return super().cursor(*args, **kwargs)
|
||||
return super(DictConnection, self).cursor(*args, **kwargs)
|
||||
|
||||
|
||||
class DictCursor(DictCursorBase):
|
||||
|
@ -137,18 +137,18 @@ class DictCursor(DictCursorBase):
|
|||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['row_factory'] = DictRow
|
||||
super().__init__(*args, **kwargs)
|
||||
super(DictCursor, self).__init__(*args, **kwargs)
|
||||
self._prefetch = True
|
||||
|
||||
def execute(self, query, vars=None):
|
||||
self.index = OrderedDict()
|
||||
self._query_executed = True
|
||||
return super().execute(query, vars)
|
||||
return super(DictCursor, self).execute(query, vars)
|
||||
|
||||
def callproc(self, procname, vars=None):
|
||||
self.index = OrderedDict()
|
||||
self._query_executed = True
|
||||
return super().callproc(procname, vars)
|
||||
return super(DictCursor, self).callproc(procname, vars)
|
||||
|
||||
def _build_index(self):
|
||||
if self._query_executed and self.description:
|
||||
|
@ -169,22 +169,22 @@ class DictRow(list):
|
|||
def __getitem__(self, x):
|
||||
if not isinstance(x, (int, slice)):
|
||||
x = self._index[x]
|
||||
return super().__getitem__(x)
|
||||
return super(DictRow, self).__getitem__(x)
|
||||
|
||||
def __setitem__(self, x, v):
|
||||
if not isinstance(x, (int, slice)):
|
||||
x = self._index[x]
|
||||
super().__setitem__(x, v)
|
||||
super(DictRow, self).__setitem__(x, v)
|
||||
|
||||
def items(self):
|
||||
g = super().__getitem__
|
||||
g = super(DictRow, self).__getitem__
|
||||
return ((n, g(self._index[n])) for n in self._index)
|
||||
|
||||
def keys(self):
|
||||
return iter(self._index)
|
||||
|
||||
def values(self):
|
||||
g = super().__getitem__
|
||||
g = super(DictRow, self).__getitem__
|
||||
return (g(self._index[n]) for n in self._index)
|
||||
|
||||
def get(self, x, default=None):
|
||||
|
@ -201,7 +201,7 @@ class DictRow(list):
|
|||
|
||||
def __reduce__(self):
|
||||
# this is apparently useless, but it fixes #1073
|
||||
return super().__reduce__()
|
||||
return super(DictRow, self).__reduce__()
|
||||
|
||||
def __getstate__(self):
|
||||
return self[:], self._index.copy()
|
||||
|
@ -210,12 +210,27 @@ class DictRow(list):
|
|||
self[:] = data[0]
|
||||
self._index = data[1]
|
||||
|
||||
if PY2:
|
||||
iterkeys = keys
|
||||
itervalues = values
|
||||
iteritems = items
|
||||
has_key = __contains__
|
||||
|
||||
def keys(self):
|
||||
return list(self.iterkeys())
|
||||
|
||||
def values(self):
|
||||
return tuple(self.itervalues())
|
||||
|
||||
def items(self):
|
||||
return list(self.iteritems())
|
||||
|
||||
|
||||
class RealDictConnection(_connection):
|
||||
"""A connection that uses `RealDictCursor` automatically."""
|
||||
def cursor(self, *args, **kwargs):
|
||||
kwargs.setdefault('cursor_factory', self.cursor_factory or RealDictCursor)
|
||||
return super().cursor(*args, **kwargs)
|
||||
return super(RealDictConnection, self).cursor(*args, **kwargs)
|
||||
|
||||
|
||||
class RealDictCursor(DictCursorBase):
|
||||
|
@ -228,17 +243,17 @@ class RealDictCursor(DictCursorBase):
|
|||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['row_factory'] = RealDictRow
|
||||
super().__init__(*args, **kwargs)
|
||||
super(RealDictCursor, self).__init__(*args, **kwargs)
|
||||
|
||||
def execute(self, query, vars=None):
|
||||
self.column_mapping = []
|
||||
self._query_executed = True
|
||||
return super().execute(query, vars)
|
||||
return super(RealDictCursor, self).execute(query, vars)
|
||||
|
||||
def callproc(self, procname, vars=None):
|
||||
self.column_mapping = []
|
||||
self._query_executed = True
|
||||
return super().callproc(procname, vars)
|
||||
return super(RealDictCursor, self).callproc(procname, vars)
|
||||
|
||||
def _build_index(self):
|
||||
if self._query_executed and self.description:
|
||||
|
@ -256,7 +271,7 @@ class RealDictRow(OrderedDict):
|
|||
else:
|
||||
cursor = None
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
super(RealDictRow, self).__init__(*args, **kwargs)
|
||||
|
||||
if cursor is not None:
|
||||
# Required for named cursors
|
||||
|
@ -272,20 +287,20 @@ class RealDictRow(OrderedDict):
|
|||
if RealDictRow in self:
|
||||
# We are in the row building phase
|
||||
mapping = self[RealDictRow]
|
||||
super().__setitem__(mapping[key], value)
|
||||
super(RealDictRow, self).__setitem__(mapping[key], value)
|
||||
if key == len(mapping) - 1:
|
||||
# Row building finished
|
||||
del self[RealDictRow]
|
||||
return
|
||||
|
||||
super().__setitem__(key, value)
|
||||
super(RealDictRow, self).__setitem__(key, value)
|
||||
|
||||
|
||||
class NamedTupleConnection(_connection):
|
||||
"""A connection that uses `NamedTupleCursor` automatically."""
|
||||
def cursor(self, *args, **kwargs):
|
||||
kwargs.setdefault('cursor_factory', self.cursor_factory or NamedTupleCursor)
|
||||
return super().cursor(*args, **kwargs)
|
||||
return super(NamedTupleConnection, self).cursor(*args, **kwargs)
|
||||
|
||||
|
||||
class NamedTupleCursor(_cursor):
|
||||
|
@ -309,18 +324,18 @@ class NamedTupleCursor(_cursor):
|
|||
|
||||
def execute(self, query, vars=None):
|
||||
self.Record = None
|
||||
return super().execute(query, vars)
|
||||
return super(NamedTupleCursor, self).execute(query, vars)
|
||||
|
||||
def executemany(self, query, vars):
|
||||
self.Record = None
|
||||
return super().executemany(query, vars)
|
||||
return super(NamedTupleCursor, self).executemany(query, vars)
|
||||
|
||||
def callproc(self, procname, vars=None):
|
||||
self.Record = None
|
||||
return super().callproc(procname, vars)
|
||||
return super(NamedTupleCursor, self).callproc(procname, vars)
|
||||
|
||||
def fetchone(self):
|
||||
t = super().fetchone()
|
||||
t = super(NamedTupleCursor, self).fetchone()
|
||||
if t is not None:
|
||||
nt = self.Record
|
||||
if nt is None:
|
||||
|
@ -328,14 +343,14 @@ class NamedTupleCursor(_cursor):
|
|||
return nt._make(t)
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
ts = super().fetchmany(size)
|
||||
ts = super(NamedTupleCursor, self).fetchmany(size)
|
||||
nt = self.Record
|
||||
if nt is None:
|
||||
nt = self.Record = self._make_nt()
|
||||
return list(map(nt._make, ts))
|
||||
|
||||
def fetchall(self):
|
||||
ts = super().fetchall()
|
||||
ts = super(NamedTupleCursor, self).fetchall()
|
||||
nt = self.Record
|
||||
if nt is None:
|
||||
nt = self.Record = self._make_nt()
|
||||
|
@ -343,7 +358,7 @@ class NamedTupleCursor(_cursor):
|
|||
|
||||
def __iter__(self):
|
||||
try:
|
||||
it = super().__iter__()
|
||||
it = super(NamedTupleCursor, self).__iter__()
|
||||
t = next(it)
|
||||
|
||||
nt = self.Record
|
||||
|
@ -357,6 +372,10 @@ class NamedTupleCursor(_cursor):
|
|||
except StopIteration:
|
||||
return
|
||||
|
||||
# ascii except alnum and underscore
|
||||
_re_clean = _re.compile(
|
||||
'[' + _re.escape(' !"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~') + ']')
|
||||
|
||||
def _make_nt(self):
|
||||
key = tuple(d[0] for d in self.description) if self.description else ()
|
||||
return self._cached_make_nt(key)
|
||||
|
@ -365,7 +384,7 @@ class NamedTupleCursor(_cursor):
|
|||
def _do_make_nt(cls, key):
|
||||
fields = []
|
||||
for s in key:
|
||||
s = _re_clean.sub('_', s)
|
||||
s = cls._re_clean.sub('_', s)
|
||||
# Python identifier cannot start with numbers, namedtuple fields
|
||||
# cannot start with underscore. So...
|
||||
if s[0] == '_' or '0' <= s[0] <= '9':
|
||||
|
@ -417,7 +436,7 @@ class LoggingConnection(_connection):
|
|||
def _logtofile(self, msg, curs):
|
||||
msg = self.filter(msg, curs)
|
||||
if msg:
|
||||
if isinstance(msg, bytes):
|
||||
if PY3 and isinstance(msg, bytes):
|
||||
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
|
||||
self._logobj.write(msg + _os.linesep)
|
||||
|
||||
|
@ -434,7 +453,7 @@ class LoggingConnection(_connection):
|
|||
def cursor(self, *args, **kwargs):
|
||||
self._check()
|
||||
kwargs.setdefault('cursor_factory', self.cursor_factory or LoggingCursor)
|
||||
return super().cursor(*args, **kwargs)
|
||||
return super(LoggingConnection, self).cursor(*args, **kwargs)
|
||||
|
||||
|
||||
class LoggingCursor(_cursor):
|
||||
|
@ -442,13 +461,13 @@ class LoggingCursor(_cursor):
|
|||
|
||||
def execute(self, query, vars=None):
|
||||
try:
|
||||
return super().execute(query, vars)
|
||||
return super(LoggingCursor, self).execute(query, vars)
|
||||
finally:
|
||||
self.connection.log(self.query, self)
|
||||
|
||||
def callproc(self, procname, vars=None):
|
||||
try:
|
||||
return super().callproc(procname, vars)
|
||||
return super(LoggingCursor, self).callproc(procname, vars)
|
||||
finally:
|
||||
self.connection.log(self.query, self)
|
||||
|
||||
|
@ -471,9 +490,9 @@ class MinTimeLoggingConnection(LoggingConnection):
|
|||
def filter(self, msg, curs):
|
||||
t = (_time.time() - curs.timestamp) * 1000
|
||||
if t > self._mintime:
|
||||
if isinstance(msg, bytes):
|
||||
if PY3 and isinstance(msg, bytes):
|
||||
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
|
||||
return f"{msg}{_os.linesep} (execution time: {t} ms)"
|
||||
return msg + _os.linesep + " (execution time: %d ms)" % t
|
||||
|
||||
def cursor(self, *args, **kwargs):
|
||||
kwargs.setdefault('cursor_factory',
|
||||
|
@ -497,14 +516,14 @@ class LogicalReplicationConnection(_replicationConnection):
|
|||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['replication_type'] = REPLICATION_LOGICAL
|
||||
super().__init__(*args, **kwargs)
|
||||
super(LogicalReplicationConnection, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class PhysicalReplicationConnection(_replicationConnection):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['replication_type'] = REPLICATION_PHYSICAL
|
||||
super().__init__(*args, **kwargs)
|
||||
super(PhysicalReplicationConnection, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class StopReplication(Exception):
|
||||
|
@ -525,7 +544,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None):
|
||||
"""Create streaming replication slot."""
|
||||
|
||||
command = f"CREATE_REPLICATION_SLOT {quote_ident(slot_name, self)} "
|
||||
command = "CREATE_REPLICATION_SLOT %s " % quote_ident(slot_name, self)
|
||||
|
||||
if slot_type is None:
|
||||
slot_type = self.connection.replication_type
|
||||
|
@ -536,7 +555,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
"output plugin name is required to create "
|
||||
"logical replication slot")
|
||||
|
||||
command += f"LOGICAL {quote_ident(output_plugin, self)}"
|
||||
command += "LOGICAL %s" % quote_ident(output_plugin, self)
|
||||
|
||||
elif slot_type == REPLICATION_PHYSICAL:
|
||||
if output_plugin is not None:
|
||||
|
@ -548,14 +567,14 @@ class ReplicationCursor(_replicationCursor):
|
|||
|
||||
else:
|
||||
raise psycopg2.ProgrammingError(
|
||||
f"unrecognized replication type: {repr(slot_type)}")
|
||||
"unrecognized replication type: %s" % repr(slot_type))
|
||||
|
||||
self.execute(command)
|
||||
|
||||
def drop_replication_slot(self, slot_name):
|
||||
"""Drop streaming replication slot."""
|
||||
|
||||
command = f"DROP_REPLICATION_SLOT {quote_ident(slot_name, self)}"
|
||||
command = "DROP_REPLICATION_SLOT %s" % quote_ident(slot_name, self)
|
||||
self.execute(command)
|
||||
|
||||
def start_replication(
|
||||
|
@ -570,7 +589,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
|
||||
if slot_type == REPLICATION_LOGICAL:
|
||||
if slot_name:
|
||||
command += f"SLOT {quote_ident(slot_name, self)} "
|
||||
command += "SLOT %s " % quote_ident(slot_name, self)
|
||||
else:
|
||||
raise psycopg2.ProgrammingError(
|
||||
"slot name is required for logical replication")
|
||||
|
@ -579,18 +598,19 @@ class ReplicationCursor(_replicationCursor):
|
|||
|
||||
elif slot_type == REPLICATION_PHYSICAL:
|
||||
if slot_name:
|
||||
command += f"SLOT {quote_ident(slot_name, self)} "
|
||||
command += "SLOT %s " % quote_ident(slot_name, self)
|
||||
# don't add "PHYSICAL", before 9.4 it was just START_REPLICATION XXX/XXX
|
||||
|
||||
else:
|
||||
raise psycopg2.ProgrammingError(
|
||||
f"unrecognized replication type: {repr(slot_type)}")
|
||||
"unrecognized replication type: %s" % repr(slot_type))
|
||||
|
||||
if type(start_lsn) is str:
|
||||
lsn = start_lsn.split('/')
|
||||
lsn = f"{int(lsn[0], 16):X}/{int(lsn[1], 16):08X}"
|
||||
lsn = "%X/%08X" % (int(lsn[0], 16), int(lsn[1], 16))
|
||||
else:
|
||||
lsn = f"{start_lsn >> 32 & 4294967295:X}/{start_lsn & 4294967295:08X}"
|
||||
lsn = "%X/%08X" % ((start_lsn >> 32) & 0xFFFFFFFF,
|
||||
start_lsn & 0xFFFFFFFF)
|
||||
|
||||
command += lsn
|
||||
|
||||
|
@ -599,7 +619,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
raise psycopg2.ProgrammingError(
|
||||
"cannot specify timeline for logical replication")
|
||||
|
||||
command += f" TIMELINE {timeline}"
|
||||
command += " TIMELINE %d" % timeline
|
||||
|
||||
if options:
|
||||
if slot_type == REPLICATION_PHYSICAL:
|
||||
|
@ -610,7 +630,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
for k, v in options.items():
|
||||
if not command.endswith('('):
|
||||
command += ", "
|
||||
command += f"{quote_ident(k, self)} {_A(str(v))}"
|
||||
command += "%s %s" % (quote_ident(k, self), _A(str(v)))
|
||||
command += ")"
|
||||
|
||||
self.start_replication_expert(
|
||||
|
@ -623,7 +643,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
|
||||
# a dbtype and adapter for Python UUID type
|
||||
|
||||
class UUID_adapter:
|
||||
class UUID_adapter(object):
|
||||
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
|
||||
|
||||
.. __: https://docs.python.org/library/uuid.html
|
||||
|
@ -638,10 +658,10 @@ class UUID_adapter:
|
|||
return self
|
||||
|
||||
def getquoted(self):
|
||||
return (f"'{self._uuid}'::uuid").encode('utf8')
|
||||
return ("'%s'::uuid" % self._uuid).encode('utf8')
|
||||
|
||||
def __str__(self):
|
||||
return f"'{self._uuid}'::uuid"
|
||||
return "'%s'::uuid" % self._uuid
|
||||
|
||||
|
||||
def register_uuid(oids=None, conn_or_curs=None):
|
||||
|
@ -678,7 +698,7 @@ def register_uuid(oids=None, conn_or_curs=None):
|
|||
|
||||
# a type, dbtype and adapter for PostgreSQL inet type
|
||||
|
||||
class Inet:
|
||||
class Inet(object):
|
||||
"""Wrap a string to allow for correct SQL-quoting of inet values.
|
||||
|
||||
Note that this adapter does NOT check the passed value to make
|
||||
|
@ -690,7 +710,7 @@ class Inet:
|
|||
self.addr = addr
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.__class__.__name__}({self.addr!r})"
|
||||
return "%s(%r)" % (self.__class__.__name__, self.addr)
|
||||
|
||||
def prepare(self, conn):
|
||||
self._conn = conn
|
||||
|
@ -763,7 +783,7 @@ def wait_select(conn):
|
|||
elif state == POLL_WRITE:
|
||||
select.select([], [conn.fileno()], [])
|
||||
else:
|
||||
raise conn.OperationalError(f"bad state from poll: {state}")
|
||||
raise conn.OperationalError("bad state from poll: %s" % state)
|
||||
except KeyboardInterrupt:
|
||||
conn.cancel()
|
||||
# the loop will be broken by a server error
|
||||
|
@ -785,7 +805,7 @@ def _solve_conn_curs(conn_or_curs):
|
|||
return conn, curs
|
||||
|
||||
|
||||
class HstoreAdapter:
|
||||
class HstoreAdapter(object):
|
||||
"""Adapt a Python dict to the hstore syntax."""
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
|
@ -865,7 +885,7 @@ class HstoreAdapter:
|
|||
for m in self._re_hstore.finditer(s):
|
||||
if m is None or m.start() != start:
|
||||
raise psycopg2.InterfaceError(
|
||||
f"error parsing hstore pair at char {start}")
|
||||
"error parsing hstore pair at char %d" % start)
|
||||
k = _bsdec.sub(r'\1', m.group(1))
|
||||
v = m.group(2)
|
||||
if v is not None:
|
||||
|
@ -876,7 +896,7 @@ class HstoreAdapter:
|
|||
|
||||
if start < len(s):
|
||||
raise psycopg2.InterfaceError(
|
||||
f"error parsing hstore: unparsed data after char {start}")
|
||||
"error parsing hstore: unparsed data after char %d" % start)
|
||||
|
||||
return rv
|
||||
|
||||
|
@ -904,11 +924,12 @@ class HstoreAdapter:
|
|||
rv0, rv1 = [], []
|
||||
|
||||
# get the oid for the hstore
|
||||
curs.execute(f"""SELECT t.oid, {typarray}
|
||||
curs.execute("""\
|
||||
SELECT t.oid, %s
|
||||
FROM pg_type t JOIN pg_namespace ns
|
||||
ON typnamespace = ns.oid
|
||||
WHERE typname = 'hstore';
|
||||
""")
|
||||
""" % typarray)
|
||||
for oids in curs:
|
||||
rv0.append(oids[0])
|
||||
rv1.append(oids[1])
|
||||
|
@ -972,7 +993,12 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
|
|||
array_oid = tuple([x for x in array_oid if x])
|
||||
|
||||
# create and register the typecaster
|
||||
HSTORE = _ext.new_type(oid, "HSTORE", HstoreAdapter.parse)
|
||||
if PY2 and unicode:
|
||||
cast = HstoreAdapter.parse_unicode
|
||||
else:
|
||||
cast = HstoreAdapter.parse
|
||||
|
||||
HSTORE = _ext.new_type(oid, "HSTORE", cast)
|
||||
_ext.register_type(HSTORE, not globally and conn_or_curs or None)
|
||||
_ext.register_adapter(dict, HstoreAdapter)
|
||||
|
||||
|
@ -981,7 +1007,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
|
|||
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
|
||||
|
||||
|
||||
class CompositeCaster:
|
||||
class CompositeCaster(object):
|
||||
"""Helps conversion of a PostgreSQL composite type into a Python object.
|
||||
|
||||
The class is usually created by the `register_composite()` function.
|
||||
|
@ -1002,7 +1028,7 @@ class CompositeCaster:
|
|||
self.typecaster = _ext.new_type((oid,), name, self.parse)
|
||||
if array_oid:
|
||||
self.array_typecaster = _ext.new_array_type(
|
||||
(array_oid,), f"{name}ARRAY", self.typecaster)
|
||||
(array_oid,), "%sARRAY" % name, self.typecaster)
|
||||
else:
|
||||
self.array_typecaster = None
|
||||
|
||||
|
@ -1046,7 +1072,7 @@ class CompositeCaster:
|
|||
rv = []
|
||||
for m in self._re_tokenize.finditer(s):
|
||||
if m is None:
|
||||
raise psycopg2.InterfaceError(f"can't parse type: {s!r}")
|
||||
raise psycopg2.InterfaceError("can't parse type: %r" % s)
|
||||
if m.group(1) is not None:
|
||||
rv.append(None)
|
||||
elif m.group(2) is not None:
|
||||
|
@ -1057,7 +1083,6 @@ class CompositeCaster:
|
|||
return rv
|
||||
|
||||
def _create_type(self, name, attnames):
|
||||
name = _re_clean.sub('_', name)
|
||||
self.type = namedtuple(name, attnames)
|
||||
self._ctor = self.type._make
|
||||
|
||||
|
@ -1095,46 +1120,14 @@ ORDER BY attnum;
|
|||
|
||||
recs = curs.fetchall()
|
||||
|
||||
if not recs:
|
||||
# The above algorithm doesn't work for customized seach_path
|
||||
# (#1487) The implementation below works better, but, to guarantee
|
||||
# backwards compatibility, use it only if the original one failed.
|
||||
try:
|
||||
savepoint = False
|
||||
# Because we executed statements earlier, we are either INTRANS
|
||||
# or we are IDLE only if the transaction is autocommit, in
|
||||
# which case we don't need the savepoint anyway.
|
||||
if conn.status == _ext.STATUS_IN_TRANSACTION:
|
||||
curs.execute("SAVEPOINT register_type")
|
||||
savepoint = True
|
||||
|
||||
curs.execute("""\
|
||||
SELECT t.oid, %s, attname, atttypid, typname, nspname
|
||||
FROM pg_type t
|
||||
JOIN pg_namespace ns ON typnamespace = ns.oid
|
||||
JOIN pg_attribute a ON attrelid = typrelid
|
||||
WHERE t.oid = %%s::regtype
|
||||
AND attnum > 0 AND NOT attisdropped
|
||||
ORDER BY attnum;
|
||||
""" % typarray, (name, ))
|
||||
except psycopg2.ProgrammingError:
|
||||
pass
|
||||
else:
|
||||
recs = curs.fetchall()
|
||||
if recs:
|
||||
tname = recs[0][4]
|
||||
schema = recs[0][5]
|
||||
finally:
|
||||
if savepoint:
|
||||
curs.execute("ROLLBACK TO SAVEPOINT register_type")
|
||||
|
||||
# revert the status of the connection as before the command
|
||||
if conn_status != _ext.STATUS_IN_TRANSACTION and not conn.autocommit:
|
||||
if (conn_status != _ext.STATUS_IN_TRANSACTION
|
||||
and not conn.autocommit):
|
||||
conn.rollback()
|
||||
|
||||
if not recs:
|
||||
raise psycopg2.ProgrammingError(
|
||||
f"PostgreSQL type '{name}' not found")
|
||||
"PostgreSQL type '%s' not found" % name)
|
||||
|
||||
type_oid = recs[0][0]
|
||||
array_oid = recs[0][1]
|
||||
|
@ -1333,8 +1326,3 @@ def _split_sql(sql):
|
|||
raise ValueError("the query doesn't contain any '%s' placeholder")
|
||||
|
||||
return pre, post
|
||||
|
||||
|
||||
# ascii except alnum and underscore
|
||||
_re_clean = _re.compile(
|
||||
'[' + _re.escape(' !"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~') + ']')
|
||||
|
|
|
@ -5,7 +5,7 @@ This module implements thread-safe (and not) connection pools.
|
|||
# psycopg/pool.py - pooling code for psycopg
|
||||
#
|
||||
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -33,7 +33,7 @@ class PoolError(psycopg2.Error):
|
|||
pass
|
||||
|
||||
|
||||
class AbstractConnectionPool:
|
||||
class AbstractConnectionPool(object):
|
||||
"""Generic key-based pooling code."""
|
||||
|
||||
def __init__(self, minconn, maxconn, *args, **kwargs):
|
||||
|
|
43
lib/sql.py
43
lib/sql.py
|
@ -4,7 +4,7 @@
|
|||
# psycopg/sql.py - SQL composition utility module
|
||||
#
|
||||
# Copyright (C) 2016-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -27,12 +27,13 @@
|
|||
import string
|
||||
|
||||
from psycopg2 import extensions as ext
|
||||
from psycopg2.compat import PY3, string_types
|
||||
|
||||
|
||||
_formatter = string.Formatter()
|
||||
|
||||
|
||||
class Composable:
|
||||
class Composable(object):
|
||||
"""
|
||||
Abstract base class for objects that can be used to compose an SQL string.
|
||||
|
||||
|
@ -50,7 +51,7 @@ class Composable:
|
|||
self._wrapped = wrapped
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.__class__.__name__}({self._wrapped!r})"
|
||||
return "%s(%r)" % (self.__class__.__name__, self._wrapped)
|
||||
|
||||
def as_string(self, context):
|
||||
"""
|
||||
|
@ -106,10 +107,10 @@ class Composed(Composable):
|
|||
for i in seq:
|
||||
if not isinstance(i, Composable):
|
||||
raise TypeError(
|
||||
f"Composed elements must be Composable, got {i!r} instead")
|
||||
"Composed elements must be Composable, got %r instead" % i)
|
||||
wrapped.append(i)
|
||||
|
||||
super().__init__(wrapped)
|
||||
super(Composed, self).__init__(wrapped)
|
||||
|
||||
@property
|
||||
def seq(self):
|
||||
|
@ -147,7 +148,7 @@ class Composed(Composable):
|
|||
"foo", "bar"
|
||||
|
||||
"""
|
||||
if isinstance(joiner, str):
|
||||
if isinstance(joiner, string_types):
|
||||
joiner = SQL(joiner)
|
||||
elif not isinstance(joiner, SQL):
|
||||
raise TypeError(
|
||||
|
@ -179,9 +180,9 @@ class SQL(Composable):
|
|||
select "foo", "bar" from "table"
|
||||
"""
|
||||
def __init__(self, string):
|
||||
if not isinstance(string, str):
|
||||
if not isinstance(string, string_types):
|
||||
raise TypeError("SQL values must be strings")
|
||||
super().__init__(string)
|
||||
super(SQL, self).__init__(string)
|
||||
|
||||
@property
|
||||
def string(self):
|
||||
|
@ -323,10 +324,10 @@ class Identifier(Composable):
|
|||
raise TypeError("Identifier cannot be empty")
|
||||
|
||||
for s in strings:
|
||||
if not isinstance(s, str):
|
||||
if not isinstance(s, string_types):
|
||||
raise TypeError("SQL identifier parts must be strings")
|
||||
|
||||
super().__init__(strings)
|
||||
super(Identifier, self).__init__(strings)
|
||||
|
||||
@property
|
||||
def strings(self):
|
||||
|
@ -344,7 +345,9 @@ class Identifier(Composable):
|
|||
"the Identifier wraps more than one than one string")
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.__class__.__name__}({', '.join(map(repr, self._wrapped))})"
|
||||
return "%s(%s)" % (
|
||||
self.__class__.__name__,
|
||||
', '.join(map(repr, self._wrapped)))
|
||||
|
||||
def as_string(self, context):
|
||||
return '.'.join(ext.quote_ident(s, context) for s in self._wrapped)
|
||||
|
@ -389,7 +392,7 @@ class Literal(Composable):
|
|||
a.prepare(conn)
|
||||
|
||||
rv = a.getquoted()
|
||||
if isinstance(rv, bytes):
|
||||
if PY3 and isinstance(rv, bytes):
|
||||
rv = rv.decode(ext.encodings[conn.encoding])
|
||||
|
||||
return rv
|
||||
|
@ -423,14 +426,14 @@ class Placeholder(Composable):
|
|||
"""
|
||||
|
||||
def __init__(self, name=None):
|
||||
if isinstance(name, str):
|
||||
if isinstance(name, string_types):
|
||||
if ')' in name:
|
||||
raise ValueError(f"invalid name: {name!r}")
|
||||
raise ValueError("invalid name: %r" % name)
|
||||
|
||||
elif name is not None:
|
||||
raise TypeError(f"expected string or None as name, got {name!r}")
|
||||
raise TypeError("expected string or None as name, got %r" % name)
|
||||
|
||||
super().__init__(name)
|
||||
super(Placeholder, self).__init__(name)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@ -438,14 +441,12 @@ class Placeholder(Composable):
|
|||
return self._wrapped
|
||||
|
||||
def __repr__(self):
|
||||
if self._wrapped is None:
|
||||
return f"{self.__class__.__name__}()"
|
||||
else:
|
||||
return f"{self.__class__.__name__}({self._wrapped!r})"
|
||||
return "Placeholder(%r)" % (
|
||||
self._wrapped if self._wrapped is not None else '',)
|
||||
|
||||
def as_string(self, context):
|
||||
if self._wrapped is not None:
|
||||
return f"%({self._wrapped})s"
|
||||
return "%%(%s)s" % self._wrapped
|
||||
else:
|
||||
return "%s"
|
||||
|
||||
|
|
49
lib/tz.py
49
lib/tz.py
|
@ -7,7 +7,7 @@ functions or used to set the .tzinfo_factory attribute in cursors.
|
|||
# psycopg/tz.py - tzinfo implementation
|
||||
#
|
||||
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
# Copyright (C) 2020 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -45,11 +45,6 @@ class FixedOffsetTimezone(datetime.tzinfo):
|
|||
offset and name that instance will be returned. This saves memory and
|
||||
improves comparability.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
|
||||
The constructor can take either a timedelta or a number of minutes of
|
||||
offset. Previously only minutes were supported.
|
||||
|
||||
.. __: https://docs.python.org/library/datetime.html
|
||||
"""
|
||||
_name = None
|
||||
|
@ -59,9 +54,7 @@ class FixedOffsetTimezone(datetime.tzinfo):
|
|||
|
||||
def __init__(self, offset=None, name=None):
|
||||
if offset is not None:
|
||||
if not isinstance(offset, datetime.timedelta):
|
||||
offset = datetime.timedelta(minutes=offset)
|
||||
self._offset = offset
|
||||
self._offset = datetime.timedelta(minutes=offset)
|
||||
if name is not None:
|
||||
self._name = name
|
||||
|
||||
|
@ -72,28 +65,18 @@ class FixedOffsetTimezone(datetime.tzinfo):
|
|||
try:
|
||||
return cls._cache[key]
|
||||
except KeyError:
|
||||
tz = super().__new__(cls, offset, name)
|
||||
tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name)
|
||||
cls._cache[key] = tz
|
||||
return tz
|
||||
|
||||
def __repr__(self):
|
||||
offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
|
||||
return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" \
|
||||
% (self._offset, self._name)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, FixedOffsetTimezone):
|
||||
return self._offset == other._offset
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, FixedOffsetTimezone):
|
||||
return self._offset != other._offset
|
||||
else:
|
||||
return NotImplemented
|
||||
% (offset_mins, self._name)
|
||||
|
||||
def __getinitargs__(self):
|
||||
return self._offset, self._name
|
||||
offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
|
||||
return offset_mins, self._name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self._offset
|
||||
|
@ -101,16 +84,14 @@ class FixedOffsetTimezone(datetime.tzinfo):
|
|||
def tzname(self, dt):
|
||||
if self._name is not None:
|
||||
return self._name
|
||||
|
||||
minutes, seconds = divmod(self._offset.total_seconds(), 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
rv = "%+03d" % hours
|
||||
if minutes or seconds:
|
||||
rv += ":%02d" % minutes
|
||||
if seconds:
|
||||
rv += ":%02d" % seconds
|
||||
|
||||
return rv
|
||||
else:
|
||||
seconds = self._offset.seconds + self._offset.days * 86400
|
||||
hours, seconds = divmod(seconds, 3600)
|
||||
minutes = seconds / 60
|
||||
if minutes:
|
||||
return "%+03d:%d" % (hours, minutes)
|
||||
else:
|
||||
return "%+03d" % hours
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_asis.c - adapt types as they are
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -45,12 +45,14 @@ asis_getquoted(asisObject *self, PyObject *args)
|
|||
}
|
||||
else {
|
||||
rv = PyObject_Str(self->wrapped);
|
||||
/* unicode to bytes */
|
||||
#if PY_3
|
||||
/* unicode to bytes in Py3 */
|
||||
if (rv) {
|
||||
PyObject *tmp = PyUnicode_AsUTF8String(rv);
|
||||
Py_DECREF(rv);
|
||||
rv = tmp;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
return rv;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_asis.h - definition for the psycopg AsIs type wrapper
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_binary.c - Binary objects
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -76,6 +76,15 @@ binary_quote(binaryObject *self)
|
|||
buffer_len = view.len;
|
||||
}
|
||||
|
||||
#if PY_2
|
||||
if (!buffer && (Bytes_Check(self->wrapped) || PyBuffer_Check(self->wrapped))) {
|
||||
if (PyObject_AsReadBuffer(self->wrapped, (const void **)&buffer,
|
||||
&buffer_len) < 0) {
|
||||
goto exit;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!buffer) {
|
||||
goto exit;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_binary.h - definition for the Binary type
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_datetime.c - python date/time objects
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -423,8 +423,8 @@ psyco_TimeFromTicks(PyObject *self, PyObject *args)
|
|||
PyObject *
|
||||
psyco_TimestampFromTicks(PyObject *self, PyObject *args)
|
||||
{
|
||||
pydatetimeObject *wrapper = NULL;
|
||||
PyObject *dt_aware = NULL;
|
||||
PyObject *m = NULL;
|
||||
PyObject *tz = NULL;
|
||||
PyObject *res = NULL;
|
||||
struct tm tm;
|
||||
time_t t;
|
||||
|
@ -433,6 +433,10 @@ psyco_TimestampFromTicks(PyObject *self, PyObject *args)
|
|||
if (!PyArg_ParseTuple(args, "d", &ticks))
|
||||
return NULL;
|
||||
|
||||
/* get psycopg2.tz.LOCAL from pythonland */
|
||||
if (!(m = PyImport_ImportModule("psycopg2.tz"))) { goto exit; }
|
||||
if (!(tz = PyObject_GetAttrString(m, "LOCAL"))) { goto exit; }
|
||||
|
||||
t = (time_t)floor(ticks);
|
||||
ticks -= (double)t;
|
||||
if (!localtime_r(&t, &tm)) {
|
||||
|
@ -440,29 +444,14 @@ psyco_TimestampFromTicks(PyObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
/* Convert the tm to a wrapper containing a naive datetime.datetime */
|
||||
if (!(wrapper = (pydatetimeObject *)_psyco_Timestamp(
|
||||
tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday,
|
||||
tm.tm_hour, tm.tm_min, (double)tm.tm_sec + ticks, NULL))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
/* Localize the datetime and assign it back to the wrapper */
|
||||
if (!(dt_aware = PyObject_CallMethod(
|
||||
wrapper->wrapped, "astimezone", NULL))) {
|
||||
goto exit;
|
||||
}
|
||||
Py_CLEAR(wrapper->wrapped);
|
||||
wrapper->wrapped = dt_aware;
|
||||
dt_aware = NULL;
|
||||
|
||||
/* the wrapper is ready to be returned */
|
||||
res = (PyObject *)wrapper;
|
||||
wrapper = NULL;
|
||||
res = _psyco_Timestamp(
|
||||
tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday,
|
||||
tm.tm_hour, tm.tm_min, (double)tm.tm_sec + ticks,
|
||||
tz);
|
||||
|
||||
exit:
|
||||
Py_XDECREF(dt_aware);
|
||||
Py_XDECREF(wrapper);
|
||||
Py_XDECREF(tz);
|
||||
Py_XDECREF(m);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_datetime.h - definition for the python date/time types
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_list.c - python list objects
|
||||
*
|
||||
* Copyright (C) 2004-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_list.h - definition for the python list types
|
||||
*
|
||||
* Copyright (C) 2004-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
302
psycopg/adapter_mxdatetime.c
Normal file
302
psycopg/adapter_mxdatetime.c
Normal file
|
@ -0,0 +1,302 @@
|
|||
/* adapter_mxdatetime.c - mx date/time objects
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
* psycopg2 is free software: you can redistribute it and/or modify it
|
||||
* under the terms of the GNU Lesser General Public License as published
|
||||
* by the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* In addition, as a special exception, the copyright holders give
|
||||
* permission to link this program with the OpenSSL library (or with
|
||||
* modified versions of OpenSSL that use the same license as OpenSSL),
|
||||
* and distribute linked combinations including the two.
|
||||
*
|
||||
* You must obey the GNU Lesser General Public License in all respects for
|
||||
* all of the code used other than OpenSSL.
|
||||
*
|
||||
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
* License for more details.
|
||||
*/
|
||||
|
||||
#define PSYCOPG_MODULE
|
||||
#include "psycopg/psycopg.h"
|
||||
|
||||
#include "psycopg/adapter_mxdatetime.h"
|
||||
#include "psycopg/microprotocols_proto.h"
|
||||
|
||||
#include <mxDateTime.h>
|
||||
#include <string.h>
|
||||
|
||||
|
||||
/* Return 0 on success, -1 on failure, but don't set an exception */
|
||||
|
||||
int
|
||||
psyco_adapter_mxdatetime_init(void)
|
||||
{
|
||||
if (mxDateTime_ImportModuleAndAPI()) {
|
||||
Dprintf("psyco_adapter_mxdatetime_init: mx.DateTime initialization failed");
|
||||
PyErr_Clear();
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/* mxdatetime_str, mxdatetime_getquoted - return result of quoting */
|
||||
|
||||
static PyObject *
|
||||
mxdatetime_str(mxdatetimeObject *self)
|
||||
{
|
||||
mxDateTimeObject *dt;
|
||||
mxDateTimeDeltaObject *dtd;
|
||||
char buf[128] = { 0, };
|
||||
|
||||
switch (self->type) {
|
||||
|
||||
case PSYCO_MXDATETIME_DATE:
|
||||
dt = (mxDateTimeObject *)self->wrapped;
|
||||
if (dt->year >= 1)
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1, "'%04ld-%02d-%02d'::date",
|
||||
dt->year, (int)dt->month, (int)dt->day);
|
||||
else
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1, "'%04ld-%02d-%02d BC'::date",
|
||||
1 - dt->year, (int)dt->month, (int)dt->day);
|
||||
break;
|
||||
|
||||
case PSYCO_MXDATETIME_TIMESTAMP:
|
||||
dt = (mxDateTimeObject *)self->wrapped;
|
||||
if (dt->year >= 1)
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1,
|
||||
"'%04ld-%02d-%02dT%02d:%02d:%09.6f'::timestamp",
|
||||
dt->year, (int)dt->month, (int)dt->day,
|
||||
(int)dt->hour, (int)dt->minute, dt->second);
|
||||
else
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1,
|
||||
"'%04ld-%02d-%02dT%02d:%02d:%09.6f BC'::timestamp",
|
||||
1 - dt->year, (int)dt->month, (int)dt->day,
|
||||
(int)dt->hour, (int)dt->minute, dt->second);
|
||||
break;
|
||||
|
||||
case PSYCO_MXDATETIME_TIME:
|
||||
case PSYCO_MXDATETIME_INTERVAL:
|
||||
/* given the limitation of the mx.DateTime module that uses the same
|
||||
type for both time and delta values we need to do some black magic
|
||||
and make sure we're not using an adapt()ed interval as a simple
|
||||
time */
|
||||
dtd = (mxDateTimeDeltaObject *)self->wrapped;
|
||||
if (0 <= dtd->seconds && dtd->seconds < 24*3600) {
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1, "'%02d:%02d:%09.6f'::time",
|
||||
(int)dtd->hour, (int)dtd->minute, dtd->second);
|
||||
} else {
|
||||
double ss = dtd->hour*3600.0 + dtd->minute*60.0 + dtd->second;
|
||||
|
||||
if (dtd->seconds >= 0)
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1, "'%ld days %.6f seconds'::interval",
|
||||
dtd->day, ss);
|
||||
else
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1, "'-%ld days -%.6f seconds'::interval",
|
||||
dtd->day, ss);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return PyString_FromString(buf);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
mxdatetime_getquoted(mxdatetimeObject *self, PyObject *args)
|
||||
{
|
||||
return mxdatetime_str(self);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
mxdatetime_conform(mxdatetimeObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *res, *proto;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O", &proto)) return NULL;
|
||||
|
||||
if (proto == (PyObject*)&isqlquoteType)
|
||||
res = (PyObject*)self;
|
||||
else
|
||||
res = Py_None;
|
||||
|
||||
Py_INCREF(res);
|
||||
return res;
|
||||
}
|
||||
|
||||
/** the MxDateTime object **/
|
||||
|
||||
/* object member list */
|
||||
|
||||
static struct PyMemberDef mxdatetimeObject_members[] = {
|
||||
{"adapted", T_OBJECT, offsetof(mxdatetimeObject, wrapped), READONLY},
|
||||
{"type", T_INT, offsetof(mxdatetimeObject, type), READONLY},
|
||||
{NULL}
|
||||
};
|
||||
|
||||
/* object method table */
|
||||
|
||||
static PyMethodDef mxdatetimeObject_methods[] = {
|
||||
{"getquoted", (PyCFunction)mxdatetime_getquoted, METH_NOARGS,
|
||||
"getquoted() -> wrapped object value as SQL date/time"},
|
||||
{"__conform__", (PyCFunction)mxdatetime_conform, METH_VARARGS, NULL},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
/* initialization and finalization methods */
|
||||
|
||||
static int
|
||||
mxdatetime_setup(mxdatetimeObject *self, PyObject *obj, int type)
|
||||
{
|
||||
Dprintf("mxdatetime_setup: init mxdatetime object at %p, refcnt = "
|
||||
FORMAT_CODE_PY_SSIZE_T,
|
||||
self, Py_REFCNT(self)
|
||||
);
|
||||
|
||||
self->type = type;
|
||||
Py_INCREF(obj);
|
||||
self->wrapped = obj;
|
||||
|
||||
Dprintf("mxdatetime_setup: good mxdatetime object at %p, refcnt = "
|
||||
FORMAT_CODE_PY_SSIZE_T,
|
||||
self, Py_REFCNT(self)
|
||||
);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void
|
||||
mxdatetime_dealloc(PyObject* obj)
|
||||
{
|
||||
mxdatetimeObject *self = (mxdatetimeObject *)obj;
|
||||
|
||||
Py_CLEAR(self->wrapped);
|
||||
|
||||
Dprintf("mxdatetime_dealloc: deleted mxdatetime object at %p, refcnt = "
|
||||
FORMAT_CODE_PY_SSIZE_T,
|
||||
obj, Py_REFCNT(obj)
|
||||
);
|
||||
|
||||
Py_TYPE(obj)->tp_free(obj);
|
||||
}
|
||||
|
||||
static int
|
||||
mxdatetime_init(PyObject *obj, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
PyObject *mx;
|
||||
int type = -1; /* raise an error if type was not passed! */
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O|i", &mx, &type))
|
||||
return -1;
|
||||
|
||||
return mxdatetime_setup((mxdatetimeObject *)obj, mx, type);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
mxdatetime_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
return type->tp_alloc(type, 0);
|
||||
}
|
||||
|
||||
|
||||
/* object type */
|
||||
|
||||
#define mxdatetimeType_doc \
|
||||
"MxDateTime(mx, type) -> new mx.DateTime wrapper object"
|
||||
|
||||
PyTypeObject mxdatetimeType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"psycopg2._psycopg.MxDateTime",
|
||||
sizeof(mxdatetimeObject), 0,
|
||||
mxdatetime_dealloc, /*tp_dealloc*/
|
||||
0, /*tp_print*/
|
||||
0, /*tp_getattr*/
|
||||
0, /*tp_setattr*/
|
||||
0, /*tp_compare*/
|
||||
0, /*tp_repr*/
|
||||
0, /*tp_as_number*/
|
||||
0, /*tp_as_sequence*/
|
||||
0, /*tp_as_mapping*/
|
||||
0, /*tp_hash */
|
||||
0, /*tp_call*/
|
||||
(reprfunc)mxdatetime_str, /*tp_str*/
|
||||
0, /*tp_getattro*/
|
||||
0, /*tp_setattro*/
|
||||
0, /*tp_as_buffer*/
|
||||
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /*tp_flags*/
|
||||
mxdatetimeType_doc, /*tp_doc*/
|
||||
0, /*tp_traverse*/
|
||||
0, /*tp_clear*/
|
||||
0, /*tp_richcompare*/
|
||||
0, /*tp_weaklistoffset*/
|
||||
0, /*tp_iter*/
|
||||
0, /*tp_iternext*/
|
||||
mxdatetimeObject_methods, /*tp_methods*/
|
||||
mxdatetimeObject_members, /*tp_members*/
|
||||
0, /*tp_getset*/
|
||||
0, /*tp_base*/
|
||||
0, /*tp_dict*/
|
||||
0, /*tp_descr_get*/
|
||||
0, /*tp_descr_set*/
|
||||
0, /*tp_dictoffset*/
|
||||
mxdatetime_init, /*tp_init*/
|
||||
0, /*tp_alloc*/
|
||||
mxdatetime_new, /*tp_new*/
|
||||
};
|
||||
|
||||
|
||||
/** module-level functions **/
|
||||
|
||||
PyObject *
|
||||
psyco_DateFromMx(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *mx;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTime_Type, &mx))
|
||||
return NULL;
|
||||
|
||||
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_DATE);
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_TimeFromMx(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *mx;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTimeDelta_Type, &mx))
|
||||
return NULL;
|
||||
|
||||
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_TIME);
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_TimestampFromMx(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *mx;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTime_Type, &mx))
|
||||
return NULL;
|
||||
|
||||
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_TIMESTAMP);
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_IntervalFromMx(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *mx;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTime_Type, &mx))
|
||||
return NULL;
|
||||
|
||||
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_INTERVAL);
|
||||
}
|
70
psycopg/adapter_mxdatetime.h
Normal file
70
psycopg/adapter_mxdatetime.h
Normal file
|
@ -0,0 +1,70 @@
|
|||
/* adapter_mxdatetime.h - definition for the mx date/time types
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
* psycopg2 is free software: you can redistribute it and/or modify it
|
||||
* under the terms of the GNU Lesser General Public License as published
|
||||
* by the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* In addition, as a special exception, the copyright holders give
|
||||
* permission to link this program with the OpenSSL library (or with
|
||||
* modified versions of OpenSSL that use the same license as OpenSSL),
|
||||
* and distribute linked combinations including the two.
|
||||
*
|
||||
* You must obey the GNU Lesser General Public License in all respects for
|
||||
* all of the code used other than OpenSSL.
|
||||
*
|
||||
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
* License for more details.
|
||||
*/
|
||||
|
||||
#ifndef PSYCOPG_MXDATETIME_H
|
||||
#define PSYCOPG_MXDATETIME_H 1
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
extern HIDDEN PyTypeObject mxdatetimeType;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
|
||||
PyObject *wrapped;
|
||||
int type;
|
||||
#define PSYCO_MXDATETIME_TIME 0
|
||||
#define PSYCO_MXDATETIME_DATE 1
|
||||
#define PSYCO_MXDATETIME_TIMESTAMP 2
|
||||
#define PSYCO_MXDATETIME_INTERVAL 3
|
||||
|
||||
} mxdatetimeObject;
|
||||
|
||||
HIDDEN int psyco_adapter_mxdatetime_init(void);
|
||||
|
||||
HIDDEN PyObject *psyco_DateFromMx(PyObject *module, PyObject *args);
|
||||
#define psyco_DateFromMx_doc \
|
||||
"DateFromMx(mx) -> new date"
|
||||
|
||||
HIDDEN PyObject *psyco_TimeFromMx(PyObject *module, PyObject *args);
|
||||
#define psyco_TimeFromMx_doc \
|
||||
"TimeFromMx(mx) -> new time"
|
||||
|
||||
HIDDEN PyObject *psyco_TimestampFromMx(PyObject *module, PyObject *args);
|
||||
#define psyco_TimestampFromMx_doc \
|
||||
"TimestampFromMx(mx) -> new timestamp"
|
||||
|
||||
HIDDEN PyObject *psyco_IntervalFromMx(PyObject *module, PyObject *args);
|
||||
#define psyco_IntervalFromMx_doc \
|
||||
"IntervalFromMx(mx) -> new interval"
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif /* !defined(PSYCOPG_MXDATETIME_H) */
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_pboolean.c - psycopg boolean type wrapper implementation
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_pboolean.h - definition for the psycopg boolean type wrapper
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_pdecimal.c - psycopg Decimal type wrapper implementation
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -81,7 +81,8 @@ pdecimal_getquoted(pdecimalObject *self, PyObject *args)
|
|||
/* res may be unicode and may suffer for issue #57 */
|
||||
output:
|
||||
|
||||
/* unicode to bytes */
|
||||
#if PY_3
|
||||
/* unicode to bytes in Py3 */
|
||||
{
|
||||
PyObject *tmp = PyUnicode_AsUTF8String(res);
|
||||
Py_DECREF(res);
|
||||
|
@ -89,6 +90,7 @@ output:
|
|||
goto end;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if ('-' == Bytes_AS_STRING(res)[0]) {
|
||||
/* Prepend a space in front of negative numbers (ticket #57) */
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_pdecimal.h - definition for the psycopg Decimal type wrapper
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_float.c - psycopg pfloat type wrapper implementation
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -54,7 +54,8 @@ pfloat_getquoted(pfloatObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
/* unicode to bytes */
|
||||
#if PY_3
|
||||
/* unicode to bytes in Py3 */
|
||||
{
|
||||
PyObject *tmp = PyUnicode_AsUTF8String(rv);
|
||||
Py_DECREF(rv);
|
||||
|
@ -62,6 +63,7 @@ pfloat_getquoted(pfloatObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if ('-' == Bytes_AS_STRING(rv)[0]) {
|
||||
/* Prepend a space in front of negative numbers (ticket #57) */
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_pfloat.h - definition for the psycopg float type wrapper
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_int.c - psycopg pint type wrapper implementation
|
||||
*
|
||||
* Copyright (C) 2011-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -40,7 +40,11 @@ pint_getquoted(pintObject *self, PyObject *args)
|
|||
|
||||
/* Convert subclass to int to handle IntEnum and other subclasses
|
||||
* whose str() is not the number. */
|
||||
if (PyLong_CheckExact(self->wrapped)) {
|
||||
if (PyLong_CheckExact(self->wrapped)
|
||||
#if PY_2
|
||||
|| PyInt_CheckExact(self->wrapped)
|
||||
#endif
|
||||
) {
|
||||
res = PyObject_Str(self->wrapped);
|
||||
} else {
|
||||
PyObject *tmp;
|
||||
|
@ -56,7 +60,8 @@ pint_getquoted(pintObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
/* unicode to bytes */
|
||||
#if PY_3
|
||||
/* unicode to bytes in Py3 */
|
||||
{
|
||||
PyObject *tmp = PyUnicode_AsUTF8String(res);
|
||||
Py_DECREF(res);
|
||||
|
@ -64,6 +69,7 @@ pint_getquoted(pintObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if ('-' == Bytes_AS_STRING(res)[0]) {
|
||||
/* Prepend a space in front of negative numbers (ticket #57) */
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_pint.h - definition for the psycopg int type wrapper
|
||||
*
|
||||
* Copyright (C) 2011-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_qstring.c - QuotedString objects
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* adapter_qstring.h - definition for the QuotedString type
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
*
|
||||
* Copyright (C) 2017 My Karlsson <mk@acc.umu.se>
|
||||
* Copyright (c) 2018, Joyent, Inc.
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
*
|
||||
* Copyright (C) 2017 My Karlsson <mk@acc.umu.se>
|
||||
* Copyright (c) 2018-2019, Joyent, Inc.
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* bytes_format.c - bytes-oriented version of PyString_Format
|
||||
*
|
||||
* Copyright (C) 2010-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* column.h - definition for a column in cursor.description type
|
||||
*
|
||||
* Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* column_type.c - python interface to cursor.description objects
|
||||
*
|
||||
* Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -97,36 +97,17 @@ column_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
|
|||
static int
|
||||
column_init(columnObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
PyObject *name = NULL;
|
||||
PyObject *type_code = NULL;
|
||||
PyObject *display_size = NULL;
|
||||
PyObject *internal_size = NULL;
|
||||
PyObject *precision = NULL;
|
||||
PyObject *scale = NULL;
|
||||
PyObject *null_ok = NULL;
|
||||
PyObject *table_oid = NULL;
|
||||
PyObject *table_column = NULL;
|
||||
|
||||
static char *kwlist[] = {
|
||||
"name", "type_code", "display_size", "internal_size",
|
||||
"precision", "scale", "null_ok", "table_oid", "table_column", NULL};
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|OOOOOOOOO", kwlist,
|
||||
&name, &type_code, &display_size, &internal_size, &precision,
|
||||
&scale, &null_ok, &table_oid, &table_column)) {
|
||||
&self->name, &self->type_code, &self->display_size,
|
||||
&self->internal_size, &self->precision, &self->scale,
|
||||
&self->null_ok, &self->table_oid, &self->table_column)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
Py_XINCREF(name); self->name = name;
|
||||
Py_XINCREF(type_code); self->type_code = type_code;
|
||||
Py_XINCREF(display_size); self->display_size = display_size;
|
||||
Py_XINCREF(internal_size); self->internal_size = internal_size;
|
||||
Py_XINCREF(precision); self->precision = precision;
|
||||
Py_XINCREF(scale); self->scale = scale;
|
||||
Py_XINCREF(null_ok); self->null_ok = null_ok;
|
||||
Py_XINCREF(table_oid); self->table_oid = table_oid;
|
||||
Py_XINCREF(table_column); self->table_column = table_column;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* config.h - general config and Dprintf macro
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* connection.h - definition for the psycopg connection type
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -145,9 +145,6 @@ struct connectionObject {
|
|||
|
||||
/* the pid this connection was created into */
|
||||
pid_t procpid;
|
||||
|
||||
/* inside a with block */
|
||||
int entered;
|
||||
};
|
||||
|
||||
/* map isolation level values into a numeric const */
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* connection_int.c - code used by the connection object
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -33,7 +33,6 @@
|
|||
#include "psycopg/green.h"
|
||||
#include "psycopg/notify.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
/* String indexes match the ISOLATION_LEVEL_* consts */
|
||||
|
@ -1344,11 +1343,6 @@ conn_set_session(connectionObject *self, int autocommit,
|
|||
}
|
||||
}
|
||||
|
||||
Py_BLOCK_THREADS;
|
||||
conn_notifies_process(self);
|
||||
conn_notice_process(self);
|
||||
Py_UNBLOCK_THREADS;
|
||||
|
||||
if (autocommit != SRV_STATE_UNCHANGED) {
|
||||
self->autocommit = autocommit;
|
||||
}
|
||||
|
@ -1413,11 +1407,6 @@ conn_set_client_encoding(connectionObject *self, const char *pgenc)
|
|||
goto endlock;
|
||||
}
|
||||
|
||||
Py_BLOCK_THREADS;
|
||||
conn_notifies_process(self);
|
||||
conn_notice_process(self);
|
||||
Py_UNBLOCK_THREADS;
|
||||
|
||||
endlock:
|
||||
pthread_mutex_unlock(&self->lock);
|
||||
Py_END_ALLOW_THREADS;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* connection_type.c - python interface to connection objects
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -35,7 +35,6 @@
|
|||
#include "psycopg/green.h"
|
||||
#include "psycopg/xid.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
|
||||
|
@ -407,22 +406,10 @@ psyco_conn_tpc_recover(connectionObject *self, PyObject *dummy)
|
|||
static PyObject *
|
||||
psyco_conn_enter(connectionObject *self, PyObject *dummy)
|
||||
{
|
||||
PyObject *rv = NULL;
|
||||
|
||||
EXC_IF_CONN_CLOSED(self);
|
||||
|
||||
if (self->entered) {
|
||||
PyErr_SetString(ProgrammingError,
|
||||
"the connection cannot be re-entered recursively");
|
||||
goto exit;
|
||||
}
|
||||
|
||||
self->entered = 1;
|
||||
Py_INCREF(self);
|
||||
rv = (PyObject *)self;
|
||||
|
||||
exit:
|
||||
return rv;
|
||||
return (PyObject *)self;
|
||||
}
|
||||
|
||||
|
||||
|
@ -440,9 +427,6 @@ psyco_conn_exit(connectionObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
/* even if there will be an error, consider ourselves out */
|
||||
self->entered = 0;
|
||||
|
||||
if (type == Py_None) {
|
||||
if (!(tmp = PyObject_CallMethod((PyObject *)self, "commit", NULL))) {
|
||||
goto exit;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* connection.h - definition for the psycopg ConnectionInfo type
|
||||
*
|
||||
* Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* conninfo_type.c - present information about the libpq connection
|
||||
*
|
||||
* Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* cursor.h - definition for the psycopg cursor type
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* cursor_int.c - code used by the cursor object
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* cursor_type.c - python interface to cursor objects
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -768,7 +768,7 @@ curs_fetchone(cursorObject *self, PyObject *dummy)
|
|||
Dprintf("curs_fetchone: rowcount = %ld", self->rowcount);
|
||||
|
||||
if (self->row >= self->rowcount) {
|
||||
/* we exhausted available data: return None */
|
||||
/* we exausted available data: return None */
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
|
@ -1133,7 +1133,7 @@ exit:
|
|||
}
|
||||
}
|
||||
}
|
||||
PyMem_Free(scpnames);
|
||||
PyMem_Del(scpnames);
|
||||
Py_XDECREF(pname);
|
||||
Py_XDECREF(pnames);
|
||||
Py_XDECREF(operation);
|
||||
|
@ -1303,9 +1303,11 @@ exit:
|
|||
/* Return a newly allocated buffer containing the list of columns to be
|
||||
* copied. On error return NULL and set an exception.
|
||||
*/
|
||||
static char *_psyco_curs_copy_columns(cursorObject *self, PyObject *columns)
|
||||
static char *_psyco_curs_copy_columns(PyObject *columns)
|
||||
{
|
||||
PyObject *col, *coliter;
|
||||
Py_ssize_t collen;
|
||||
char *colname;
|
||||
char *columnlist = NULL;
|
||||
Py_ssize_t bufsize = 512;
|
||||
Py_ssize_t offset = 1;
|
||||
|
@ -1331,28 +1333,15 @@ static char *_psyco_curs_copy_columns(cursorObject *self, PyObject *columns)
|
|||
columnlist[0] = '(';
|
||||
|
||||
while ((col = PyIter_Next(coliter)) != NULL) {
|
||||
Py_ssize_t collen;
|
||||
char *colname;
|
||||
char *quoted_colname;
|
||||
|
||||
if (!(col = psyco_ensure_bytes(col))) {
|
||||
Py_DECREF(coliter);
|
||||
goto error;
|
||||
}
|
||||
Bytes_AsStringAndSize(col, &colname, &collen);
|
||||
if (!(quoted_colname = psyco_escape_identifier(
|
||||
self->conn, colname, collen))) {
|
||||
Py_DECREF(col);
|
||||
Py_DECREF(coliter);
|
||||
goto error;
|
||||
}
|
||||
collen = strlen(quoted_colname);
|
||||
|
||||
while (offset + collen > bufsize - 2) {
|
||||
char *tmp;
|
||||
bufsize *= 2;
|
||||
if (NULL == (tmp = PyMem_Realloc(columnlist, bufsize))) {
|
||||
PQfreemem(quoted_colname);
|
||||
Py_DECREF(col);
|
||||
Py_DECREF(coliter);
|
||||
PyErr_NoMemory();
|
||||
|
@ -1360,11 +1349,10 @@ static char *_psyco_curs_copy_columns(cursorObject *self, PyObject *columns)
|
|||
}
|
||||
columnlist = tmp;
|
||||
}
|
||||
strncpy(&columnlist[offset], quoted_colname, collen);
|
||||
strncpy(&columnlist[offset], colname, collen);
|
||||
offset += collen;
|
||||
columnlist[offset++] = ',';
|
||||
Py_DECREF(col);
|
||||
PQfreemem(quoted_colname);
|
||||
}
|
||||
Py_DECREF(coliter);
|
||||
|
||||
|
@ -1411,9 +1399,8 @@ curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
char *columnlist = NULL;
|
||||
char *quoted_delimiter = NULL;
|
||||
char *quoted_null = NULL;
|
||||
char *quoted_table_name = NULL;
|
||||
const char *table_name;
|
||||
|
||||
const char *table_name;
|
||||
Py_ssize_t bufsize = DEFAULT_COPYBUFF;
|
||||
PyObject *file, *columns = NULL, *res = NULL;
|
||||
|
||||
|
@ -1434,9 +1421,8 @@ curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
EXC_IF_GREEN(copy_from);
|
||||
EXC_IF_TPC_PREPARED(self->conn, copy_from);
|
||||
|
||||
if (!(columnlist = _psyco_curs_copy_columns(self, columns))) {
|
||||
if (NULL == (columnlist = _psyco_curs_copy_columns(columns)))
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (!(quoted_delimiter = psyco_escape_string(
|
||||
self->conn, sep, -1, NULL, NULL))) {
|
||||
|
@ -1448,12 +1434,7 @@ curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
if (!(quoted_table_name = psyco_escape_identifier(
|
||||
self->conn, table_name, -1))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
query_size = strlen(command) + strlen(quoted_table_name) + strlen(columnlist)
|
||||
query_size = strlen(command) + strlen(table_name) + strlen(columnlist)
|
||||
+ strlen(quoted_delimiter) + strlen(quoted_null) + 1;
|
||||
if (!(query = PyMem_New(char, query_size))) {
|
||||
PyErr_NoMemory();
|
||||
|
@ -1461,7 +1442,7 @@ curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
}
|
||||
|
||||
PyOS_snprintf(query, query_size, command,
|
||||
quoted_table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
|
||||
Dprintf("curs_copy_from: query = %s", query);
|
||||
|
||||
|
@ -1488,9 +1469,6 @@ curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
Py_CLEAR(self->copyfile);
|
||||
|
||||
exit:
|
||||
if (quoted_table_name) {
|
||||
PQfreemem(quoted_table_name);
|
||||
}
|
||||
PyMem_Free(columnlist);
|
||||
PyMem_Free(quoted_delimiter);
|
||||
PyMem_Free(quoted_null);
|
||||
|
@ -1521,7 +1499,6 @@ curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
char *quoted_null = NULL;
|
||||
|
||||
const char *table_name;
|
||||
char *quoted_table_name = NULL;
|
||||
PyObject *file = NULL, *columns = NULL, *res = NULL;
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(
|
||||
|
@ -1541,14 +1518,8 @@ curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
EXC_IF_GREEN(copy_to);
|
||||
EXC_IF_TPC_PREPARED(self->conn, copy_to);
|
||||
|
||||
if (!(quoted_table_name = psyco_escape_identifier(
|
||||
self->conn, table_name, -1))) {
|
||||
if (NULL == (columnlist = _psyco_curs_copy_columns(columns)))
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (!(columnlist = _psyco_curs_copy_columns(self, columns))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (!(quoted_delimiter = psyco_escape_string(
|
||||
self->conn, sep, -1, NULL, NULL))) {
|
||||
|
@ -1560,7 +1531,7 @@ curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
query_size = strlen(command) + strlen(quoted_table_name) + strlen(columnlist)
|
||||
query_size = strlen(command) + strlen(table_name) + strlen(columnlist)
|
||||
+ strlen(quoted_delimiter) + strlen(quoted_null) + 1;
|
||||
if (!(query = PyMem_New(char, query_size))) {
|
||||
PyErr_NoMemory();
|
||||
|
@ -1568,7 +1539,7 @@ curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
}
|
||||
|
||||
PyOS_snprintf(query, query_size, command,
|
||||
quoted_table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
|
||||
Dprintf("curs_copy_to: query = %s", query);
|
||||
|
||||
|
@ -1589,9 +1560,6 @@ curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
Py_CLEAR(self->copyfile);
|
||||
|
||||
exit:
|
||||
if (quoted_table_name) {
|
||||
PQfreemem(quoted_table_name);
|
||||
}
|
||||
PyMem_Free(columnlist);
|
||||
PyMem_Free(quoted_delimiter);
|
||||
PyMem_Free(quoted_null);
|
||||
|
@ -1951,11 +1919,10 @@ cursor_setup(cursorObject *self, connectionObject *conn, const char *name)
|
|||
|
||||
/* default tzinfo factory */
|
||||
{
|
||||
/* The datetime api doesn't seem to have a constructor to make a
|
||||
* datetime.timezone, so use the Python interface. */
|
||||
PyObject *m = NULL;
|
||||
if ((m = PyImport_ImportModule("datetime"))) {
|
||||
self->tzinfo_factory = PyObject_GetAttrString(m, "timezone");
|
||||
if ((m = PyImport_ImportModule("psycopg2.tz"))) {
|
||||
self->tzinfo_factory = PyObject_GetAttrString(
|
||||
m, "FixedOffsetTimezone");
|
||||
Py_DECREF(m);
|
||||
}
|
||||
if (!self->tzinfo_factory) {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* diagnostics.c - definition for the psycopg Diagnostics type
|
||||
*
|
||||
* Copyright (C) 2013-2019 Matthew Woodcraft <matthew@woodcraft.me.uk>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* diagnostics.c - present information from libpq error responses
|
||||
*
|
||||
* Copyright (C) 2013-2019 Matthew Woodcraft <matthew@woodcraft.me.uk>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* error.h - definition for the psycopg base Error type
|
||||
*
|
||||
* Copyright (C) 2013-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* error_type.c - python interface to the Error objects
|
||||
*
|
||||
* Copyright (C) 2013-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -65,8 +65,6 @@ base_exception_from_sqlstate(const char *sqlstate)
|
|||
switch (sqlstate[0]) {
|
||||
case '0':
|
||||
switch (sqlstate[1]) {
|
||||
case '8': /* Class 08 - Connection Exception */
|
||||
return OperationalError;
|
||||
case 'A': /* Class 0A - Feature Not Supported */
|
||||
return NotSupportedError;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* green.c - cooperation with coroutine libraries.
|
||||
*
|
||||
* Copyright (C) 2010-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* green.c - cooperation with coroutine libraries.
|
||||
*
|
||||
* Copyright (C) 2010-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* streaming replication
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -38,7 +38,6 @@
|
|||
#include "psycopg/win32_support.h"
|
||||
#else
|
||||
#include <arpa/inet.h>
|
||||
#include <sys/time.h>
|
||||
#endif
|
||||
|
||||
/* support routines taken from pg_basebackup/streamutil.c */
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* libpq_support.h - definitions for libpq_support.c
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* lobject.h - definition for the psycopg lobject type
|
||||
*
|
||||
* Copyright (C) 2006-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* lobject_int.c - code used by the lobject object
|
||||
*
|
||||
* Copyright (C) 2006-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -85,7 +85,11 @@ _lobject_parse_mode(const char *mode)
|
|||
pos += 1;
|
||||
break;
|
||||
default:
|
||||
#if PY_2
|
||||
rv |= LOBJECT_BINARY;
|
||||
#else
|
||||
rv |= LOBJECT_TEXT;
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* lobject_type.c - python interface to lobject objects
|
||||
*
|
||||
* Copyright (C) 2006-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -64,7 +64,7 @@ psyco_lobj_close(lobjectObject *self, PyObject *args)
|
|||
/* write method - write data to the lobject */
|
||||
|
||||
#define psyco_lobj_write_doc \
|
||||
"write(str | bytes) -- Write a string or bytes to the large object."
|
||||
"write(str) -- Write a string to the large object."
|
||||
|
||||
static PyObject *
|
||||
psyco_lobj_write(lobjectObject *self, PyObject *args)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* microprotocols.c - minimalist and non-validating protocols implementation
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -92,7 +92,11 @@ _get_superclass_adapter(PyObject *obj, PyObject *proto)
|
|||
Py_ssize_t i, ii;
|
||||
|
||||
type = Py_TYPE(obj);
|
||||
if (!(type->tp_mro)) {
|
||||
if (!(
|
||||
#if PY_2
|
||||
(Py_TPFLAGS_HAVE_CLASS & type->tp_flags) &&
|
||||
#endif
|
||||
type->tp_mro)) {
|
||||
/* has no mro */
|
||||
return Py_None;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* microprotocols.c - definitions for minimalist and non-validating protocols
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* microprotocol_proto.c - psycopg protocols
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* microporotocols_proto.h - definition for psycopg's protocols
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* notify.h - definition for the psycopg Notify type
|
||||
*
|
||||
* Copyright (C) 2010-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* notify_type.c - python interface to Notify objects
|
||||
*
|
||||
* Copyright (C) 2010-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* pqpath.c - single path into libpq
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -47,7 +47,6 @@
|
|||
#include "psycopg/libpq_support.h"
|
||||
#include "libpq-fe.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#ifdef _WIN32
|
||||
/* select() */
|
||||
#include <winsock2.h>
|
||||
|
@ -348,19 +347,14 @@ pq_begin_locked(connectionObject *conn, PyThreadState **tstate)
|
|||
char buf[256]; /* buf size must be same as bufsize */
|
||||
int result;
|
||||
|
||||
Dprintf("pq_begin_locked: pgconn = %p, %d, status = %d",
|
||||
Dprintf("pq_begin_locked: pgconn = %p, autocommit = %d, status = %d",
|
||||
conn->pgconn, conn->autocommit, conn->status);
|
||||
|
||||
if (conn->status != CONN_STATUS_READY) {
|
||||
if (conn->autocommit || conn->status != CONN_STATUS_READY) {
|
||||
Dprintf("pq_begin_locked: transaction in progress");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (conn->autocommit && !conn->entered) {
|
||||
Dprintf("pq_begin_locked: autocommit and no with block");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (conn->isolevel == ISOLATION_LEVEL_DEFAULT
|
||||
&& conn->readonly == STATE_DEFAULT
|
||||
&& conn->deferrable == STATE_DEFAULT) {
|
||||
|
@ -399,10 +393,10 @@ pq_commit(connectionObject *conn)
|
|||
Py_BEGIN_ALLOW_THREADS;
|
||||
pthread_mutex_lock(&conn->lock);
|
||||
|
||||
Dprintf("pq_commit: pgconn = %p, status = %d",
|
||||
conn->pgconn, conn->status);
|
||||
Dprintf("pq_commit: pgconn = %p, autocommit = %d, status = %d",
|
||||
conn->pgconn, conn->autocommit, conn->status);
|
||||
|
||||
if (conn->status != CONN_STATUS_BEGIN) {
|
||||
if (conn->autocommit || conn->status != CONN_STATUS_BEGIN) {
|
||||
Dprintf("pq_commit: no transaction to commit");
|
||||
retvalue = 0;
|
||||
}
|
||||
|
@ -412,7 +406,6 @@ pq_commit(connectionObject *conn)
|
|||
}
|
||||
|
||||
Py_BLOCK_THREADS;
|
||||
conn_notifies_process(conn);
|
||||
conn_notice_process(conn);
|
||||
Py_UNBLOCK_THREADS;
|
||||
|
||||
|
@ -434,10 +427,10 @@ pq_abort_locked(connectionObject *conn, PyThreadState **tstate)
|
|||
{
|
||||
int retvalue = -1;
|
||||
|
||||
Dprintf("pq_abort_locked: pgconn = %p, status = %d",
|
||||
conn->pgconn, conn->status);
|
||||
Dprintf("pq_abort_locked: pgconn = %p, autocommit = %d, status = %d",
|
||||
conn->pgconn, conn->autocommit, conn->status);
|
||||
|
||||
if (conn->status != CONN_STATUS_BEGIN) {
|
||||
if (conn->autocommit || conn->status != CONN_STATUS_BEGIN) {
|
||||
Dprintf("pq_abort_locked: no transaction to abort");
|
||||
return 0;
|
||||
}
|
||||
|
@ -469,7 +462,6 @@ pq_abort(connectionObject *conn)
|
|||
retvalue = pq_abort_locked(conn, &_save);
|
||||
|
||||
Py_BLOCK_THREADS;
|
||||
conn_notifies_process(conn);
|
||||
conn_notice_process(conn);
|
||||
Py_UNBLOCK_THREADS;
|
||||
|
||||
|
@ -496,12 +488,12 @@ pq_reset_locked(connectionObject *conn, PyThreadState **tstate)
|
|||
{
|
||||
int retvalue = -1;
|
||||
|
||||
Dprintf("pq_reset_locked: pgconn = %p, status = %d",
|
||||
conn->pgconn, conn->status);
|
||||
Dprintf("pq_reset_locked: pgconn = %p, autocommit = %d, status = %d",
|
||||
conn->pgconn, conn->autocommit, conn->status);
|
||||
|
||||
conn->mark += 1;
|
||||
|
||||
if (conn->status == CONN_STATUS_BEGIN) {
|
||||
if (!conn->autocommit && conn->status == CONN_STATUS_BEGIN) {
|
||||
retvalue = pq_execute_command_locked(conn, "ABORT", tstate);
|
||||
if (retvalue != 0) return retvalue;
|
||||
}
|
||||
|
@ -540,7 +532,6 @@ pq_reset(connectionObject *conn)
|
|||
|
||||
Py_BLOCK_THREADS;
|
||||
conn_notice_process(conn);
|
||||
conn_notifies_process(conn);
|
||||
Py_UNBLOCK_THREADS;
|
||||
|
||||
pthread_mutex_unlock(&conn->lock);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* pqpath.h - definitions for pqpath.c
|
||||
*
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
* Copyright (C) 2020 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user