Compare commits

..

No commits in common. "master" and "2_7_BETA_2" have entirely different histories.

251 changed files with 72143 additions and 12478 deletions

239
.appveyor.yml Normal file
View File

@ -0,0 +1,239 @@
version : 2.x.{build}
clone_folder: C:\Project
environment:
global:
# MSVC Express 2008's setenv.cmd failes if /E:ON and /V:ON are not
# enabled in the batch script interpreter
#
# WITH_COMPILER: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_compiler.cmd"
CMD_IN_ENV: cmd /E:ON /V:ON /C .\appveyor\run_with_env.cmd
matrix:
# For Python versions available on Appveyor, see
# http://www.appveyor.com/docs/installed-software#python
# Py 2.7 = VS Ver. 9.0 (VS 2008)
# Py 3.3, 3.4 = VS Ver. 10.0 (VS 2010)
# Py 3.5, 3.6 = VS Ver. 14.0 (VS 2015)
- PYTHON: C:\Python27-x64
PYTHON_ARCH: 64
VS_VER: 9.0
- PYTHON: C:\Python27
PYTHON_ARCH: 32
VS_VER: 9.0
- PYTHON: C:\Python36-x64
PYTHON_ARCH: 64
VS_VER: 14.0
- PYTHON: C:\Python36
PYTHON_ARCH: 32
VS_VER: 14.0
- PYTHON: C:\Python35-x64
PYTHON_ARCH: 64
VS_VER: 14.0
- PYTHON: C:\Python35
PYTHON_ARCH: 32
VS_VER: 14.0
- PYTHON: C:\Python34-x64
DISTUTILS_USE_SDK: '1'
PYTHON_ARCH: 64
VS_VER: 10.0
- PYTHON: C:\Python34
PYTHON_ARCH: 32
VS_VER: 10.0
- PYTHON: C:\Python33-x64
DISTUTILS_USE_SDK: '1'
PYTHON_ARCH: 64
VS_VER: 10.0
- PYTHON: C:\Python33
PYTHON_ARCH: 32
VS_VER: 10.0
PSYCOPG2_TESTDB: psycopg2_test
PSYCOPG2_TESTDB_USER: postgres
PSYCOPG2_TESTDB_PASSWORD: Password12!
PSYCOPG2_TESTDB_HOST: localhost
PSYCOPG2_TESTDB_PORT: 5432
PGUSER: postgres
PGPASSWORD: Password12!
matrix:
fast_finish: false
services:
- postgresql95
cache:
# Rebuild cache if following file changes
- C:\Others -> scripts\appveyor.cache_rebuild
# Script called before repo cloning
init:
# Uncomment next line to get RDP access during the build.
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
#
# Set Python to the path
- SET PATH=%PYTHON%;%PYTHON%\Scripts;%PATH%
# Verify Python version and architecture
- ECHO *******************************************************************
- ECHO Python Information
- ECHO *******************************************************************
- "%PYTHON%\\python --version"
- "%PYTHON%\\python -c \"import sys; print('64bit: ' + str(sys.maxsize > 2**32))\""
# Get & Install NASM
#- curl -L -o nasminst.exe http://www.nasm.us/pub/nasm/releasebuilds/2.12.02/win64/nasm-2.12.02-installer-x64.exe && start /wait nasminst.exe /S
#- SET PATH="C:\Program Files (x86)\nasm;%PATH%"
# Fix problem with VS2008 Express and 64bit builds
- ECHO Fixing VS2008 Express and 64bit builds
- COPY "C:\\Program Files (x86)\\Microsoft Visual Studio 9.0\\VC\\bin\\vcvars64.bat" "C:\\Program Files (x86)\\Microsoft Visual Studio 9.0\\VC\\bin\\amd64\\vcvarsamd64.bat"
# Fix problem with VS2010 Express 64bit missing vcvars64.bat
# Note: repository not cloned at this point, so need to fetch
# file another way
- ECHO Fixing VS2010 Express and 64bit builds
- curl -fsSL -o "C:\\Program Files (x86)\\Microsoft Visual Studio 10.0\\VC\\bin\\amd64\\vcvars64.bat" https://raw.githubusercontent.com/psycopg/psycopg2/master/scripts/vcvars64-vs2010.bat
# Setup the compiler based upon version and architecture
- ECHO Configuring Compiler
- IF "%PYTHON_ARCH%"=="32" (CALL "C:\\Program Files (x86)\\Microsoft Visual Studio %VS_VER%\\VC\\vcvarsall.bat" x86)
- IF "%PYTHON_ARCH%"=="64" (CALL "C:\\Program Files (x86)\\Microsoft Visual Studio %VS_VER%\\VC\\vcvarsall.bat" amd64)
# Change PostgreSQL config before service starts to allow > 1 prepared
# transactions for test cases
- ECHO max_prepared_transactions = 10 >> "C:\\Program Files\\PostgreSQL\\9.5\\data\\postgresql.conf"
# Repository gets cloned, Cache is restored
install:
# We start off CD'ed to cloned folder
- SET BASE_DIR=C:\Others\%PYTHON_ARCH%\%VS_VER%
- SET BUILD_DIR=%BASE_DIR%\Builds
- IF NOT EXIST %BUILD_DIR% MKDIR %BUILD_DIR%
- ECHO *******************************************************************
- ECHO Initialized variables specific for this build
- ECHO *******************************************************************
- ECHO %BASE_DIR%
- ECHO %BUILD_DIR%
- ECHO *******************************************************************
# Setup directories for building OpenSSL libraries
- ECHO *******************************************************************
- ECHO Preparing for building OpenSSL
- ECHO *******************************************************************
- SET OPENSSLTOP=%BASE_DIR%\openssl
- IF NOT EXIST %OPENSSLTOP%\include\openssl MKDIR %OPENSSLTOP%\include\openssl
- IF NOT EXIST %OPENSSLTOP%\lib MKDIR %OPENSSLTOP%\lib
# Setup OpenSSL Environment Variables based on processor architecture
- ps: >-
If ($env:PYTHON_ARCH -Match "32" ) {
$env:VCVARS_PLATFORM="x86"
$env:TARGET="VC-WIN32"
$env:DO="do_ms"
} Else {
$env:VCVARS_PLATFORM="amd64"
$env:TARGET="VC-WIN64A"
$env:DO="do_win64a"
$env:CPU="AMD64"
}
# Download OpenSSL source
- CD C:\Others
- IF NOT EXIST OpenSSL_1_0_2k.zip (
curl -fsSL -o OpenSSL_1_0_2k.zip https://github.com/openssl/openssl/archive/OpenSSL_1_0_2k.zip
)
# To use OpenSSL >= 1.1.0, both libpq and psycopg build environments have
# to support the new library names. Below are commands to build OpenSSL
# 1.1.0:
# - mkdir _build
# - cd _build
# - perl ..\Configure %TARGET% no-asm no-shared --prefix=%BASE_DIR%\openssl --openssldir=%BASE_DIR%\openssl
# - nmake build_libs install_dev
- IF NOT EXIST %OPENSSLTOP%\lib\ssleay32.lib (
CD %BUILD_DIR% &&
7z x C:\Others\OpenSSL_1_0_2k.zip &&
CD openssl-OpenSSL_1_0_2k &&
perl Configure %TARGET% no-asm no-shared no-zlib --prefix=%OPENSSLTOP% --openssldir=%OPENSSLTOP% &&
CALL ms\%DO% &&
nmake -f ms\nt.mak init headers lib &&
COPY inc32\openssl\*.h %OPENSSLTOP%\include\openssl &&
COPY out32\*.lib %OPENSSLTOP%\lib &&
CD %BASE_DIR% &&
RMDIR /S /Q %BUILD_DIR%\openssl-OpenSSL_1_0_2k
)
# Setup directories for building PostgreSQL librarires
- ECHO *******************************************************************
- ECHO Preparing for building PostgreSQL libraries
- ECHO *******************************************************************
- SET PGTOP=%BASE_DIR%\postgresql
- IF NOT EXIST %PGTOP%\include MKDIR %PGTOP%\include
- IF NOT EXIST %PGTOP%\lib MKDIR %PGTOP%\lib
# Download PostgreSQL source
- CD C:\Others
- IF NOT EXIST postgres-REL9_6_2.zip (
curl -fsSL -o postgres-REL9_6_2.zip https://github.com/postgres/postgres/archive/REL9_6_2.zip
)
# Setup build config file (config.pl)
# Build libpgport first
# Build libpq
# NOTE: Cannot set and use the same variable inside an IF
- SET PGBUILD=%BUILD_DIR%\postgres-REL9_6_2
- IF NOT EXIST %PGTOP%\lib\libpq.lib (
CD %BUILD_DIR% &&
7z x C:\Others\postgres-REL9_6_2.zip &&
CD postgres-REL9_6_2\src\tools\msvc &&
ECHO $config-^>{ldap} = 0; > config.pl &&
ECHO $config-^>{openssl} = "%OPENSSLTOP:\=\\%"; >> config.pl &&
ECHO.>> config.pl &&
ECHO 1;>> config.pl &&
build libpgport &&
XCOPY /E ..\..\include %PGTOP%\include &&
COPY %PGBUILD%\Release\libpgport\libpgport.lib %PGTOP%\lib &&
CD ..\..\interfaces\libpq &&
nmake -f win32.mak USE_OPENSSL=1 ENABLE_THREAD_SAFETY=1 SSL_INC=%OPENSSLTOP%\include SSL_LIB_PATH=%OPENSSLTOP%\lib config .\Release\libpq.lib &&
COPY *.h %PGTOP%\include &&
COPY Release\libpq.lib %PGTOP%\lib &&
CD %BASE_DIR% &&
RMDIR /S /Q %PGBUILD%
)
build: off
#before_build:
build_script:
# Add PostgreSQL binaries to the path
- PATH=C:\Program Files\PostgreSQL\9.5\bin\;%PATH%
- CD C:\Project
- "%PYTHON%\\python.exe setup.py build_ext --have-ssl -l libpgcommon -L %OPENSSLTOP%\\lib;%PGTOP%\\lib -I %OPENSSLTOP%\\include;%PGTOP%\\include"
- "%PYTHON%\\python.exe setup.py build"
- "%PYTHON%\\python.exe setup.py install"
#after_build:
before_test:
# Create and setup PostgreSQL database for the tests
- createdb %PSYCOPG2_TESTDB%
- psql -d %PSYCOPG2_TESTDB% -c "CREATE EXTENSION HSTORE;"
test_script:
- "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.__version__)\""
- "%PYTHON%\\python.exe -c \"from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')\" --verbose"

4
.github/FUNDING.yml vendored
View File

@ -1,4 +0,0 @@
github:
- dvarrazzo
custom:
- "https://www.paypal.me/dvarrazzo"

View File

@ -1,23 +0,0 @@
---
name: Problem installing psycopg2
about: Report a case in which psycopg2 failed to install on your platform
title: ''
labels: ''
assignees: ''
---
**This is a bug tracker**
If you have a question, such has "how do you do X with Python/PostgreSQL/psycopg2" please [write to the mailing list](https://lists.postgresql.org/manage/) or [open a question](https://github.com/psycopg/psycopg2/discussions) instead.
**Before opening this ticket, please confirm that:**
- [ ] I am running the latest version of pip, i.e. typing ``pip --version`` you get [this version](https://pypi.org/project/pip/).
- [ ] I have read the [installation documentation](https://www.psycopg.org/docs/install.html) and the [frequently asked questions](https://www.psycopg.org/docs/faq.html)
- [ ] If install failed, I typed `pg_config` on the command line and I obtained an output instead of an error.
**Please complete the following information:**
- OS:
- Psycopg version:
- Python version:
- PostgreSQL version:
- pip version

View File

@ -1,27 +0,0 @@
---
name: Problem using psycopg2
about: Report a case in which psycopg2 is not working as expected
title: ''
labels: ''
assignees: ''
---
**This is a bug tracker**
If you have a question, such has "how do you do X with Python/PostgreSQL/psycopg2" please [write to the mailing list](https://lists.postgresql.org/manage/) or [open a question](https://github.com/psycopg/psycopg2/discussions) instead.
**Please complete the following information:**
- OS:
- Psycopg version:
- Python version:
- PostgreSQL version:
- pip version
**Describe the bug**
Please let us know:
1: what you did
2: what you expected to happen
3: what happened instead
If possible, provide a script reproducing the issue.

View File

@ -1,6 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"

View File

@ -1,18 +0,0 @@
name: Build documentation
on:
push:
branches:
# This should match the DOC_BRANCH value in the psycopg-website Makefile
- master
jobs:
docs:
runs-on: ubuntu-latest
steps:
- name: Trigger docs build
uses: peter-evans/repository-dispatch@v3
with:
repository: psycopg/psycopg-website
event-type: psycopg2-commit
token: ${{ secrets.ACCESS_TOKEN }}

View File

@ -1,266 +0,0 @@
---
name: Build packages
on:
- workflow_dispatch
env:
PIP_BREAK_SYSTEM_PACKAGES: "1"
LIBPQ_VERSION: "16.0"
OPENSSL_VERSION: "1.1.1w"
jobs:
sdist: # {{{
if: true
strategy:
fail-fast: false
matrix:
include:
- package_name: psycopg2
- package_name: psycopg2-binary
runs-on: ubuntu-latest
steps:
- name: Checkout repos
uses: actions/checkout@v4
- name: Build sdist
run: ./scripts/build/build_sdist.sh
env:
PACKAGE_NAME: ${{ matrix.package_name }}
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: sdist-${{ matrix.package_name }}
path: |
dist/*.tar.gz
env:
PSYCOPG2_TESTDB: postgres
PSYCOPG2_TESTDB_HOST: 172.17.0.1
PSYCOPG2_TESTDB_USER: postgres
PSYCOPG2_TESTDB_PASSWORD: password
PSYCOPG2_TEST_FAST: 1
services:
postgresql:
image: postgres:16
env:
POSTGRES_PASSWORD: password
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
# }}}
linux: # {{{
if: true
strategy:
fail-fast: false
matrix:
platform: [manylinux, musllinux]
arch: [x86_64, i686, aarch64, ppc64le]
pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
runs-on: ubuntu-latest
steps:
- name: Checkout repos
uses: actions/checkout@v4
- name: Set up QEMU for multi-arch build
uses: docker/setup-qemu-action@v3
- name: Cache libpq build
uses: actions/cache@v4
with:
path: /tmp/libpq.build
key: libpq-${{ env.LIBPQ_VERSION }}-${{ matrix.platform }}-${{ matrix.arch }}
- name: Build wheels
uses: pypa/cibuildwheel@v2.23.3
env:
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
CIBW_MANYLINUX_I686_IMAGE: manylinux2014
CIBW_MANYLINUX_AARCH64_IMAGE: manylinux2014
CIBW_MANYLINUX_PPC64LE_IMAGE: manylinux2014
CIBW_BUILD: ${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}}
CIBW_ARCHS_LINUX: auto aarch64 ppc64le
CIBW_BEFORE_ALL_LINUX: ./scripts/build/wheel_linux_before_all.sh
CIBW_REPAIR_WHEEL_COMMAND: >-
./scripts/build/strip_wheel.sh {wheel}
&& auditwheel repair -w {dest_dir} {wheel}
CIBW_TEST_COMMAND: >-
export PYTHONPATH={project} &&
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
CIBW_ENVIRONMENT_PASS_LINUX: LIBPQ_VERSION OPENSSL_VERSION
CIBW_ENVIRONMENT: >-
PACKAGE_NAME=psycopg2-binary
LIBPQ_BUILD_PREFIX=/host/tmp/libpq.build
PATH="$LIBPQ_BUILD_PREFIX/bin:$PATH"
LD_LIBRARY_PATH="$LIBPQ_BUILD_PREFIX/lib:$LIBPQ_BUILD_PREFIX/lib64"
PSYCOPG2_TESTDB=postgres
PSYCOPG2_TESTDB_HOST=172.17.0.1
PSYCOPG2_TESTDB_USER=postgres
PSYCOPG2_TESTDB_PASSWORD=password
PSYCOPG2_TEST_FAST=1
- uses: actions/upload-artifact@v4
with:
name: linux-${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}}
path: ./wheelhouse/*.whl
services:
postgresql:
image: postgres:16
env:
POSTGRES_PASSWORD: password
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
# }}}
macos: # {{{
runs-on: macos-latest
if: true
strategy:
fail-fast: false
matrix:
# These archs require an Apple M1 runner: [arm64, universal2]
arch: [x86_64, arm64]
pyver: [cp39, cp310, cp311, cp312, cp313]
steps:
- name: Checkout repos
uses: actions/checkout@v4
- name: Cache libpq build
uses: actions/cache@v4
with:
path: /tmp/libpq.build
key: libpq-${{ env.LIBPQ_VERSION }}-macos-${{ matrix.arch }}
- name: Build wheels
uses: pypa/cibuildwheel@v2.23.3
env:
CIBW_BUILD: ${{matrix.pyver}}-macosx_${{matrix.arch}}
CIBW_ARCHS_MACOS: ${{matrix.arch}}
MACOSX_ARCHITECTURE: ${{matrix.arch}}
CIBW_BEFORE_ALL_MACOS: ./scripts/build/wheel_macos_before_all.sh
CIBW_TEST_COMMAND: >-
export PYTHONPATH={project} &&
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
CIBW_ENVIRONMENT: >-
PG_VERSION=16
PACKAGE_NAME=psycopg2-binary
PSYCOPG2_TESTDB=postgres
PATH="/tmp/libpq.build/bin:$PATH"
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: macos-${{matrix.pyver}}-macos-${{matrix.arch}}
path: ./wheelhouse/*.whl
# }}}
windows: # {{{
runs-on: windows-latest
if: true
strategy:
fail-fast: false
matrix:
arch: [win_amd64]
pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
package_name: [psycopg2, psycopg2-binary]
defaults:
run:
shell: bash
steps:
# there are some other libpq in PATH
- name: Drop spurious libpq in the path
run: rm -rf c:/tools/php C:/Strawberry/c/bin
- name: Checkout repo
uses: actions/checkout@v4
- name: Start PostgreSQL service for test
run: |
$PgSvc = Get-Service "postgresql*"
Set-Service $PgSvc.Name -StartupType manual
$PgSvc.Start()
shell: powershell
- name: Export GitHub Actions cache environment variables
uses: actions/github-script@v7
with:
script: |
const path = require('path')
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/lib'));
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/bin'));
- name: Create the binary package source tree
run: >-
sed -i 's/^setup(name="psycopg2"/setup(name="${{matrix.package_name}}"/'
setup.py
if: ${{ matrix.package_name != 'psycopg2' }}
- name: Build wheels
uses: pypa/cibuildwheel@v2.23.3
env:
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" # cache vcpkg
CIBW_BUILD: ${{matrix.pyver}}-${{matrix.arch}}
CIBW_ARCHS_WINDOWS: AMD64 x86
CIBW_BEFORE_BUILD_WINDOWS: '.\scripts\build\wheel_win32_before_build.bat'
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: >-
delvewheel repair -w {dest_dir}
--no-mangle "libiconv-2.dll;libwinpthread-1.dll" {wheel}
CIBW_TEST_COMMAND: >-
set PYTHONPATH={project} &&
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
# Note: no fast test because we don't run Windows tests
CIBW_ENVIRONMENT_WINDOWS: >-
PSYCOPG2_TESTDB=postgres
PSYCOPG2_TESTDB_USER=postgres
PSYCOPG2_TESTDB_HOST=localhost
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: windows-${{ matrix.package_name }}-${{matrix.pyver}}-${{matrix.arch}}
path: ./wheelhouse/*.whl
# }}}
merge: # {{{
runs-on: ubuntu-latest
needs:
- sdist
- linux
- macos
- windows
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@v4
with:
name: psycopg2-artifacts
delete-merged: true
# }}}

View File

@ -1,79 +0,0 @@
name: Tests
env:
PIP_BREAK_SYSTEM_PACKAGES: "1"
on:
push:
pull_request:
jobs:
linux:
runs-on: ubuntu-latest
if: true
strategy:
fail-fast: false
matrix:
include:
- {python: "3.8", postgres: "12"}
- {python: "3.9", postgres: "13"}
- {python: "3.10", postgres: "14"}
- {python: "3.11", postgres: "15"}
- {python: "3.12", postgres: "16"}
- {python: "3.13", postgres: "17"}
# Opposite extremes of the supported Py/PG range, other architecture
- {python: "3.8", postgres: "17", architecture: "x86"}
- {python: "3.9", postgres: "16", architecture: "x86"}
- {python: "3.10", postgres: "15", architecture: "x86"}
- {python: "3.11", postgres: "14", architecture: "x86"}
- {python: "3.12", postgres: "13", architecture: "x86"}
- {python: "3.13", postgres: "12", architecture: "x86"}
env:
PSYCOPG2_TESTDB: postgres
PSYCOPG2_TESTDB_HOST: 127.0.0.1
PSYCOPG2_TESTDB_USER: postgres
PSYCOPG2_TESTDB_PASSWORD: password
services:
postgresql:
image: postgres:${{ matrix.postgres }}
env:
POSTGRES_PASSWORD: password
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
# Can enable to test an unreleased libpq version.
- name: install libpq 16
if: false
run: |
set -x
rel=$(lsb_release -c -s)
echo "deb http://apt.postgresql.org/pub/repos/apt ${rel}-pgdg main 16" \
| sudo tee -a /etc/apt/sources.list.d/pgdg.list
sudo apt-get -qq update
pqver=$(apt-cache show libpq5 | grep ^Version: | head -1 \
| awk '{print $2}')
sudo apt-get -qq -y install "libpq-dev=${pqver}" "libpq5=${pqver}"
- name: Install tox
run: pip install "tox < 4"
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python }}
- name: Run tests
env:
MATRIX_PYTHON: ${{ matrix.python }}
run: tox -e ${MATRIX_PYTHON%-dev}
timeout-minutes: 5

12
.gitignore vendored
View File

@ -4,15 +4,13 @@ MANIFEST
*.pidb
*.pyc
*.sw[po]
*.egg-info/
dist/*
/build
build/*
doc/src/_build/*
doc/html/*
doc/psycopg2.txt
scripts/pypi_docs_upload.py
env
env?
.idea
.tox
.vscode/
/rel
/wheels
/packages
/wheelhouse

24
.travis.yml Normal file
View File

@ -0,0 +1,24 @@
# Travis CI configuration file for psycopg2
dist: trusty
sudo: required
language: python
python:
- 2.7
- 3.6
- 2.6
- 3.5
- 3.4
- 3.3
- 3.2
install:
- python setup.py install
- sudo scripts/travis_prepare.sh
script:
- scripts/travis_test.sh
notifications:
email: false

View File

@ -6,7 +6,7 @@ For the win32 port:
Jason Erickson <jerickso@indian.com>
Additional Help:
Peter Fein contributed a logging connection/cursor class that even if it
was not used directly heavily influenced the implementation currently in
psycopg2.extras.

View File

@ -1,4 +1,4 @@
Installation instructions are included in the docs.
Please check the 'doc/src/install.rst' file or online at
<https://www.psycopg.org/docs/install.html>.
<http://initd.org/psycopg/docs/install.html>.

19
LICENSE
View File

@ -1,5 +1,5 @@
psycopg2 and the LGPL
---------------------
=====================
psycopg2 is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published
@ -25,14 +25,19 @@ statement from all source files in the program, then also delete it here.
You should have received a copy of the GNU Lesser General Public License
along with psycopg2 (see the doc/ directory.)
If not, see <https://www.gnu.org/licenses/>.
If not, see <http://www.gnu.org/licenses/>.
Alternative licenses
--------------------
====================
The following BSD-like license applies (at your option) to the files following
the pattern ``psycopg/adapter*.{h,c}`` and ``psycopg/microprotocol*.{h,c}``:
If you prefer you can use the Zope Database Adapter ZPsycopgDA (i.e.,
every file inside the ZPsycopgDA directory) user the ZPL license as
published on the Zope web site, http://www.zope.org/Resources/ZPL.
Also, the following BSD-like license applies (at your option) to the
files following the pattern psycopg/adapter*.{h,c} and
psycopg/microprotocol*.{h,c}:
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
@ -42,8 +47,8 @@ the pattern ``psycopg/adapter*.{h,c}`` and ``psycopg/microprotocol*.{h,c}``:
claim that you wrote the original software. If you use this
software in a product, an acknowledgment in the product documentation
would be appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not
be misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.

View File

@ -1,9 +1,11 @@
recursive-include psycopg *.c *.h *.manifest
recursive-include lib *.py
recursive-include tests *.py
recursive-include examples *.py somehackers.jpg whereareyou.jpg
include doc/README.rst doc/SUCCESS doc/COPYING.LESSER doc/pep-0249.txt
include doc/Makefile doc/requirements.txt
recursive-include doc/src *.rst *.py *.css Makefile
recursive-include scripts *.py *.sh
include scripts/maketypes.sh scripts/buildtypes.py
include AUTHORS README.rst INSTALL LICENSE NEWS
include MANIFEST.in setup.py setup.cfg Makefile

View File

@ -29,7 +29,8 @@ SOURCE := $(SOURCE_C) $(SOURCE_PY) $(SOURCE_TESTS) $(SOURCE_DOC)
PACKAGE := $(BUILD_DIR)/psycopg2
PLATLIB := $(PACKAGE)/_psycopg.so
PURELIB := $(patsubst lib/%,$(PACKAGE)/%,$(SOURCE_PY))
PURELIB := $(patsubst lib/%,$(PACKAGE)/%,$(SOURCE_PY)) \
$(patsubst tests/%,$(PACKAGE)/tests/%,$(SOURCE_TESTS))
BUILD_OPT := --build-lib=$(BUILD_DIR)
BUILD_EXT_OPT := --build-lib=$(BUILD_DIR)
@ -42,7 +43,7 @@ endif
VERSION := $(shell grep PSYCOPG_VERSION setup.py | head -1 | sed -e "s/.*'\(.*\)'/\1/")
SDIST := dist/psycopg2-$(VERSION).tar.gz
.PHONY: check clean
.PHONY: env check clean
default: package
@ -50,10 +51,12 @@ all: package sdist
package: $(PLATLIB) $(PURELIB)
docs: docs-html
docs: docs-html docs-txt
docs-html: doc/html/genindex.html
docs-txt: doc/psycopg2.txt
# for PyPI documentation
docs-zip: doc/docs.zip
@ -63,7 +66,7 @@ env:
$(MAKE) -C doc $@
check:
PYTHONPATH=$(BUILD_DIR) $(PYTHON) -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
PYTHONPATH=$(BUILD_DIR):$(PYTHONPATH) $(PYTHON) -c "from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
testdb:
@echo "* Creating $(TESTDB)"
@ -96,6 +99,9 @@ $(SDIST): $(SOURCE)
doc/html/genindex.html: $(PLATLIB) $(PURELIB) $(SOURCE_DOC)
$(MAKE) -C doc html
doc/psycopg2.txt: $(PLATLIB) $(PURELIB) $(SOURCE_DOC)
$(MAKE) -C doc text
doc/docs.zip: doc/html/genindex.html
(cd doc/html && zip -r ../docs.zip *)

423
NEWS
View File

@ -1,398 +1,6 @@
Current release
---------------
What's new in psycopg 2.9.10
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Add support for Python 3.13.
- Receive notifications on commit (:ticket:`#1728`).
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
PostgreSQL 17.
- Drop support for Python 3.7.
What's new in psycopg 2.9.9
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Add support for Python 3.12.
- Drop support for Python 3.6.
What's new in psycopg 2.9.8
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Wheel package bundled with PostgreSQL 16 libpq in order to add support for
recent features, such as ``sslcertmode``.
What's new in psycopg 2.9.7
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fix propagation of exceptions raised during module initialization
(:ticket:`#1598`).
- Fix building when pg_config returns an empty string (:ticket:`#1599`).
- Wheel package bundled with OpenSSL 1.1.1v.
What's new in psycopg 2.9.6
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Package manylinux 2014 for aarch64 and ppc64le platforms, in order to
include libpq 15 in the binary package (:ticket:`#1396`).
- Wheel package bundled with OpenSSL 1.1.1t.
What's new in psycopg 2.9.5
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Add support for Python 3.11.
- Add support for rowcount in MERGE statements in binary packages
(:ticket:`#1497`).
- Wheel package bundled with OpenSSL 1.1.1r and PostgreSQL 15 libpq.
What's new in psycopg 2.9.4
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fix `~psycopg2.extras.register_composite()`,
`~psycopg2.extras.register_range()` with customized :sql:`search_path`
(:ticket:`#1487`).
- Handle correctly composite types with names or in schemas requiring escape.
- Find ``pg_service.conf`` file in the ``/etc/postgresql-common`` directory in
binary packages (:ticket:`#1365`).
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
PostgreSQL 15.
- Wheel package bundled with OpenSSL 1.1.1q and PostgreSQL 14.4 libpq.
What's new in psycopg 2.9.3
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Alpine (musl) wheels now available (:ticket:`#1392`).
- macOS arm64 (Apple M1) wheels now available (:ticket:`1482`).
What's new in psycopg 2.9.2
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Raise `ValueError` for dates >= Y10k (:ticket:`#1307`).
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
PostgreSQL 14.
- Add preliminary support for Python 3.11 (:tickets:`#1376, #1386`).
- Wheel package bundled with OpenSSL 1.1.1l and PostgreSQL 14.1 libpq
(:ticket:`#1388`).
What's new in psycopg 2.9.1
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fix regression with named `~psycopg2.sql.Placeholder` (:ticket:`#1291`).
What's new in psycopg 2.9
-------------------------
- ``with connection`` starts a transaction on autocommit transactions too
(:ticket:`#941`).
- Timezones with fractional minutes are supported on Python 3.7 and following
(:ticket:`#1272`).
- Escape table and column names in `~cursor.copy_from()` and
`~cursor.copy_to()`.
- Connection exceptions with sqlstate ``08XXX`` reclassified as
`~psycopg2.OperationalError` (a subclass of the previously used
`~psycopg2.DatabaseError`) (:ticket:`#1148`).
- Include library dirs required from libpq to work around MacOS build problems
(:ticket:`#1200`).
Other changes:
- Dropped support for Python 2.7, 3.4, 3.5 (:tickets:`#1198, #1000, #1197`).
- Dropped support for mx.DateTime.
- Use `datetime.timezone` objects by default in datetime objects instead of
`~psycopg2.tz.FixedOffsetTimezone`.
- The `psycopg2.tz` module is deprecated and scheduled to be dropped in the
next major release.
- Provide :pep:`599` wheels packages (manylinux2014 tag) for i686 and x86_64
platforms.
- Provide :pep:`600` wheels packages (manylinux_2_24 tag) for aarch64 and
ppc64le platforms.
- Wheel package bundled with OpenSSL 1.1.1k and PostgreSQL 13.3 libpq.
- Build system for Linux/MacOS binary packages moved to GitHub Actions.
What's new in psycopg 2.8.7
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Accept empty params as `~psycopg2.connect()` (:ticket:`#1250`).
- Fix attributes refcount in `Column` initialisation (:ticket:`#1252`).
- Allow re-initialisation of static variables in the C module (:ticket:`#1267`).
What's new in psycopg 2.8.6
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed memory leak changing connection encoding to the current one
(:ticket:`#1101`).
- Fixed search of mxDateTime headers in virtualenvs (:ticket:`#996`).
- Added missing values from errorcodes (:ticket:`#1133`).
- `cursor.query` reports the query of the last :sql:`COPY` operation too
(:ticket:`#1141`).
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
PostgreSQL 13.
- Added wheel packages for ARM architecture (:ticket:`#1125`).
- Wheel package bundled with OpenSSL 1.1.1g.
What's new in psycopg 2.8.5
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed use of `!connection_factory` and `!cursor_factory` together
(:ticket:`#1019`).
- Added support for `~logging.LoggerAdapter` in
`~psycopg2.extras.LoggingConnection` (:ticket:`#1026`).
- `~psycopg2.extensions.Column` objects in `cursor.description` can be sliced
(:ticket:`#1034`).
- Added AIX support (:ticket:`#1061`).
- Fixed `~copy.copy()` of `~psycopg2.extras.DictCursor` rows (:ticket:`#1073`).
What's new in psycopg 2.8.4
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed building with Python 3.8 (:ticket:`#854`).
- Don't swallow keyboard interrupts on connect when a password is specified
in the connection string (:ticket:`#898`).
- Don't advance replication cursor when the message wasn't confirmed
(:ticket:`#940`).
- Fixed inclusion of ``time.h`` on linux (:ticket:`#951`).
- Fixed int overflow for large values in `~psycopg2.extensions.Column.table_oid`
and `~psycopg2.extensions.Column.type_code` (:ticket:`#961`).
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
PostgreSQL 12.
- Wheel package bundled with OpenSSL 1.1.1d and PostgreSQL at least 11.4.
What's new in psycopg 2.8.3
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Added *interval_status* parameter to
`~psycopg2.extras.ReplicationCursor.start_replication()` method and other
facilities to send automatic replication keepalives at periodic intervals
(:ticket:`#913`).
- Fixed namedtuples caching introduced in 2.8 (:ticket:`#928`).
What's new in psycopg 2.8.2
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed `~psycopg2.extras.RealDictCursor` when there are repeated columns
(:ticket:`#884`).
- Binary packages built with openssl 1.1.1b. Should fix concurrency problems
(:tickets:`#543, #836`).
What's new in psycopg 2.8.1
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed `~psycopg2.extras.RealDictRow` modifiability (:ticket:`#886`).
- Fixed "there's no async cursor" error polling a connection with no cursor
(:ticket:`#887`).
What's new in psycopg 2.8
-------------------------
New features:
- Added `~psycopg2.errors` module. Every PostgreSQL error is converted into
a specific exception class (:ticket:`#682`).
- Added `~psycopg2.extensions.encrypt_password()` function (:ticket:`#576`).
- Added `~psycopg2.extensions.BYTES` adapter to manage databases with mixed
encodings on Python 3 (:ticket:`#835`).
- Added `~psycopg2.extensions.Column.table_oid` and
`~psycopg2.extensions.Column.table_column` attributes on `cursor.description`
items (:ticket:`#661`).
- Added `connection.info` object to retrieve various PostgreSQL connection
information (:ticket:`#726`).
- Added `~connection.get_native_connection()` to expose the raw ``PGconn``
structure to C extensions via Capsule (:ticket:`#782`).
- Added `~connection.pgconn_ptr` and `~cursor.pgresult_ptr` to expose raw
C structures to Python and interact with libpq via ctypes (:ticket:`#782`).
- `~psycopg2.sql.Identifier` can represent qualified names in SQL composition
(:ticket:`#732`).
- Added `!ReplicationCursor`.\ `~psycopg2.extras.ReplicationCursor.wal_end`
attribute (:ticket:`#800`).
- Added *fetch* parameter to `~psycopg2.extras.execute_values()` function
(:ticket:`#813`).
- `!str()` on `~psycopg2.extras.Range` produces a human-readable representation
(:ticket:`#773`).
- `~psycopg2.extras.DictCursor` and `~psycopg2.extras.RealDictCursor` rows
maintain columns order (:ticket:`#177`).
- Added `~psycopg2.extensions.Diagnostics.severity_nonlocalized` attribute on
the `~psycopg2.extensions.Diagnostics` object (:ticket:`#783`).
- More efficient `~psycopg2.extras.NamedTupleCursor` (:ticket:`#838`).
Bug fixes:
- Fixed connections occasionally broken by the unrelated use of the
multiprocessing module (:ticket:`#829`).
- Fixed async communication blocking if results are returned in different
chunks, e.g. with notices interspersed to the results (:ticket:`#856`).
- Fixed adaptation of numeric subclasses such as `~enum.IntEnum`
(:ticket:`#591`).
Other changes:
- Dropped support for Python 2.6, 3.2, 3.3.
- Dropped `psycopg1` module.
- Dropped deprecated `!register_tstz_w_secs()` (was previously a no-op).
- Dropped deprecated `!PersistentConnectionPool`. This pool class was mostly
designed to interact with Zope. Use `!ZPsycopgDA.pool` instead.
- Binary packages no longer installed by default. The 'psycopg2-binary'
package must be used explicitly.
- Dropped `!PSYCOPG_DISPLAY_SIZE` build parameter.
- Dropped support for mxDateTime as the default date and time adapter.
mxDatetime support continues to be available as an alternative to Python's
builtin datetime.
- No longer use 2to3 during installation for Python 2 & 3 compatibility. All
source files are now compatible with Python 2 & 3 as is.
- The `!psycopg2.test` package is no longer installed by ``python setup.py
install``.
- Wheel package bundled with OpenSSL 1.0.2r and PostgreSQL 11.2 libpq.
What's new in psycopg 2.7.7
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Cleanup of the cursor results assignment code, which might have solved
double free and inconsistencies in concurrent usage (:tickets:`#346, #384`).
- Wheel package bundled with OpenSSL 1.0.2q.
What's new in psycopg 2.7.6.1
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed binary package broken on OS X 10.12 (:ticket:`#807`).
- Wheel package bundled with PostgreSQL 11.1 libpq.
What's new in psycopg 2.7.6
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Close named cursors if exist, even if `~cursor.execute()` wasn't called
(:ticket:`#746`).
- Fixed building on modern FreeBSD versions with Python 3.7 (:ticket:`#755`).
- Fixed hang trying to :sql:`COPY` via `~cursor.execute()` in asynchronous
connections (:ticket:`#781`).
- Fixed adaptation of arrays of empty arrays (:ticket:`#788`).
- Fixed segfault accessing the connection's `~connection.readonly` and
`~connection.deferrable` attributes repeatedly (:ticket:`#790`).
- `~psycopg2.extras.execute_values()` accepts `~psycopg2.sql.Composable`
objects (:ticket:`#794`).
- `~psycopg2.errorcodes` map updated to PostgreSQL 11.
- Wheel package bundled with PostgreSQL 10.5 libpq and OpenSSL 1.0.2p.
What's new in psycopg 2.7.5
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Allow non-ascii chars in namedtuple fields (regression introduced fixing
:ticket:`#211`).
- Fixed adaptation of arrays of arrays of nulls (:ticket:`#325`).
- Fixed building on Solaris 11 and derivatives such as SmartOS and illumos
(:ticket:`#677`).
- Maybe fixed building on MSYS2 (as reported in :ticket:`#658`).
- Allow string subclasses in connection and other places (:ticket:`#679`).
- Don't raise an exception closing an unused named cursor (:ticket:`#716`).
- Wheel package bundled with PostgreSQL 10.4 libpq and OpenSSL 1.0.2o.
What's new in psycopg 2.7.4
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Moving away from installing the wheel package by default.
Packages installed from wheel raise a warning on import. Added package
``psycopg2-binary`` to install from wheel instead (:ticket:`#543`).
- Convert fields names into valid Python identifiers in
`~psycopg2.extras.NamedTupleCursor` (:ticket:`#211`).
- Fixed Solaris 10 support (:ticket:`#532`).
- `cursor.mogrify()` can be called on closed cursors (:ticket:`#579`).
- Fixed setting session characteristics in corner cases on autocommit
connections (:ticket:`#580`).
- Fixed `~psycopg2.extras.MinTimeLoggingCursor` on Python 3 (:ticket:`#609`).
- Fixed parsing of array of points as floats (:ticket:`#613`).
- Fixed `~psycopg2.__libpq_version__` building with libpq >= 10.1
(:ticket:`#632`).
- Fixed `~cursor.rowcount` after `~cursor.executemany()` with :sql:`RETURNING`
statements (:ticket:`#633`).
- Fixed compatibility problem with pypy3 (:ticket:`#649`).
- Wheel packages bundled with PostgreSQL 10.1 libpq and OpenSSL 1.0.2n.
- Wheel packages for Python 2.6 no more available (support dropped from
wheel building infrastructure).
What's new in psycopg 2.7.3.2
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Wheel package bundled with PostgreSQL 10.0 libpq and OpenSSL 1.0.2l
(:tickets:`#601, #602`).
What's new in psycopg 2.7.3.1
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Dropped libresolv from wheel package to avoid incompatibility with
glibc 2.26 (wheels ticket #2).
What's new in psycopg 2.7.3
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Restored default :sql:`timestamptz[]` typecasting to Python `!datetime`.
Regression introduced in Psycopg 2.7.2 (:ticket:`#578`).
What's new in psycopg 2.7.2
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed inconsistent state in externally closed connections
(:tickets:`#263, #311, #443`). Was fixed in 2.6.2 but not included in
2.7 by mistake.
- Fixed Python exceptions propagation in green callback (:ticket:`#410`).
- Don't display the password in `connection.dsn` when the connection
string is specified as an URI (:ticket:`#528`).
- Return objects with timezone parsing "infinity" :sql:`timestamptz`
(:ticket:`#536`).
- Dropped dependency on VC9 runtime on Windows binary packages
(:ticket:`#541`).
- Fixed segfault in `~connection.lobject()` when *mode*\=\ `!None`
(:ticket:`#544`).
- Fixed `~connection.lobject()` keyword argument *lobject_factory*
(:ticket:`#545`).
- Fixed `~psycopg2.extras.ReplicationCursor.consume_stream()`
*keepalive_interval* argument (:ticket:`#547`).
- Maybe fixed random import error on Python 3.6 in multiprocess
environment (:ticket:`#550`).
- Fixed random `!SystemError` upon receiving abort signal (:ticket:`#551`).
- Accept `~psycopg2.sql.Composable` objects in
`~psycopg2.extras.ReplicationCursor.start_replication_expert()`
(:ticket:`#554`).
- Parse intervals returned as microseconds from Redshift (:ticket:`#558`).
- Added `~psycopg2.extras.Json` `!prepare()` method to consider connection
params when adapting (:ticket:`#562`).
- `~psycopg2.errorcodes` map updated to PostgreSQL 10 beta 1.
What's new in psycopg 2.7.1
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Ignore `!None` arguments passed to `~psycopg2.connect()` and
`~psycopg2.extensions.make_dsn()` (:ticket:`#517`).
- OpenSSL upgraded from major version 0.9.8 to 1.0.2 in the Linux wheel
packages (:ticket:`#518`).
- Fixed build with libpq versions < 9.3 (:ticket:`#520`).
What's new in psycopg 2.7
-------------------------
@ -407,7 +15,7 @@ New features:
them together.
- Added `~psycopg2.__libpq_version__` and
`~psycopg2.extensions.libpq_version()` to inspect the version of the
``libpq`` library the module was bundled with
``libpq`` library the module was compiled/loaded with
(:tickets:`#35, #323`).
- The attributes `~connection.notices` and `~connection.notifies` can be
customized replacing them with any object exposing an `!append()` method
@ -433,17 +41,12 @@ New features:
(:ticket:`#503`).
- `~connection.isolation_level` is now writable and entirely separated from
`~connection.autocommit`; added `~connection.readonly`,
`~connection.deferrable` writable attributes.
`connection.deferrable` writable attributes.
Bug fixes:
- Throw an exception trying to pass ``NULL`` chars as parameters
(:ticket:`#420`).
- Fixed error caused by missing decoding `~psycopg2.extras.LoggingConnection`
(:ticket:`#483`).
- Fixed integer overflow in :sql:`interval` seconds (:ticket:`#512`).
- Make `~psycopg2.extras.Range` objects picklable (:ticket:`#462`).
- Fixed version parsing and building with PostgreSQL 10 (:ticket:`#489`).
Other changes:
@ -457,11 +60,17 @@ Other changes:
(:ticket:`#506`)
What's new in psycopg 2.6.3
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Throw an exception trying to pass ``NULL`` chars as parameters
(:ticket:`#420`).
- Make `~psycopg2.extras.Range` objects picklable (:ticket:`#462`).
What's new in psycopg 2.6.2
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed inconsistent state in externally closed connections
(:tickets:`#263, #311, #443`).
- Report the server response status on errors (such as :ticket:`#281`).
- Raise `!NotSupportedError` on unhandled server response status
(:ticket:`#352`).
@ -471,8 +80,7 @@ What's new in psycopg 2.6.2
(:ticket:`#333`).
- Fixed `!PersistentConnectionPool` on Python 3 (:ticket:`#348`).
- Fixed segfault on `repr()` of an unitialized connection (:ticket:`#361`).
- Allow adapting bytes using `~psycopg2.extensions.QuotedString` on Python 3
(:ticket:`#365`).
- Allow adapting bytes using QuotedString on Python 3 too (:ticket:`#365`).
- Added support for setuptools/wheel (:ticket:`#370`).
- Fix build on Windows with Python 3.5, VS 2015 (:ticket:`#380`).
- Fixed `!errorcodes.lookup` initialization thread-safety (:ticket:`#382`).
@ -621,7 +229,7 @@ Other changes:
- Dropped support for Python 2.4. Please use Psycopg 2.4.x if you need it.
- `~psycopg2.errorcodes` map updated to PostgreSQL 9.2.
- Dropped Zope adapter from source repository. ZPsycopgDA now has its own
project at <https://github.com/psycopg/ZPsycopgDA>.
project at <http://github.com/psycopg/ZPsycopgDA>.
What's new in psycopg 2.4.6
@ -1245,7 +853,7 @@ What's new in psycopg 2.0 beta 7
What's new in psycopg 2.0 beta 6
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
* Support for named cursors.
* Support for named cursors (see examples/fetch.py).
* Safer parsing of time intervals.
@ -1275,7 +883,7 @@ What's new in psycopg 2.0 beta 5
* All classes have been renamed to exist in the psycopg2._psycopg module,
to fix problems with automatic documentation generators like epydoc.
* NOTIFY is correctly trapped.
* NOTIFY is correctly trapped (see examples/notify.py for example code.)
What's new in psycopg 2.0 beta 4
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@ -1392,7 +1000,8 @@ What's new in psycopg 1.99.10
What's new in psycopg 1.99.9
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
* Added simple pooling code (psycopg.pool module).
* Added simple pooling code (psycopg.pool module); see the reworked
examples/threads.py for example code.
* Added DECIMAL typecaster to convert postgresql DECIMAL and NUMERIC
types (i.e, all types with an OID of NUMERICOID.) Note that the

View File

@ -17,64 +17,49 @@ flexible objects adaptation system.
Psycopg 2 is both Unicode and Python 3 friendly.
.. Note::
The psycopg2 package is still widely used and actively maintained, but it
is not expected to receive new features.
`Psycopg 3`__ is the evolution of psycopg2 and is where `new features are
being developed`__: if you are starting a new project you should probably
start from 3!
.. __: https://pypi.org/project/psycopg/
.. __: https://www.psycopg.org/psycopg3/docs/index.html
Documentation
-------------
Documentation is included in the ``doc`` directory and is `available online`__.
Documentation is included in the 'doc' directory and is `available online`__.
.. __: https://www.psycopg.org/docs/
For any other resource (source code repository, bug tracker, mailing list)
please check the `project homepage`__.
.. __: https://psycopg.org/
.. __: http://initd.org/psycopg/docs/
Installation
------------
Building Psycopg requires a few prerequisites (a C compiler, some development
packages): please check the install_ and the faq_ documents in the ``doc`` dir
or online for the details.
If your ``pip`` version supports wheel_ packages it should be possible to
install a binary version of Psycopg including all the dependencies. Just run::
If prerequisites are met, you can install psycopg like any other Python
package, using ``pip`` to download it from PyPI_::
pip install psycopg2
$ pip install psycopg2
If you want to build Psycopg from source you will need some prerequisite (a C
compiler, Python and libpq development packages). If you have what you need
the standard::
or using ``setup.py`` if you have downloaded the source package locally::
python setup.py build
sudo python setup.py install
$ python setup.py build
$ sudo python setup.py install
should work no problem. In case you have any problem check the 'install' and
the 'faq' documents in the docs or online__.
You can also obtain a stand-alone package, not requiring a compiler or
external libraries, by installing the `psycopg2-binary`_ package from PyPI::
.. _wheel: http://pythonwheels.com/
.. __: http://initd.org/psycopg/docs/install.html#install-from-source
$ pip install psycopg2-binary
For any other resource (source code repository, bug tracker, mailing list)
please check the `project homepage`__.
The binary package is a practical choice for development and testing but in
production it is advised to use the package built from sources.
.. __: http://initd.org/psycopg/
.. _PyPI: https://pypi.org/project/psycopg2/
.. _psycopg2-binary: https://pypi.org/project/psycopg2-binary/
.. _install: https://www.psycopg.org/docs/install.html#install-from-source
.. _faq: https://www.psycopg.org/docs/faq.html#faq-compile
:Build status: |gh-actions|
:Linux/OSX: |travis|
:Windows: |appveyor|
.. |gh-actions| image:: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml/badge.svg
:target: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
:alt: Build status
.. |travis| image:: https://travis-ci.org/psycopg/psycopg2.svg?branch=master
:target: https://travis-ci.org/psycopg/psycopg2
:alt: Linux and OSX build status
.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/github/psycopg/psycopg2?svg=true
:target: https://ci.appveyor.com/project/psycopg/psycopg2
:alt: Windows build status

8
doc/.gitignore vendored
View File

@ -1,8 +0,0 @@
env
src/_build/*
html/*
psycopg2.txt
src/sqlstate_errors.rst
# Added by psycopg-website to customize published docs
src/_templates/layout.html

View File

@ -1,7 +1,7 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
@ -10,7 +10,7 @@
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
@ -111,7 +111,7 @@ the following:
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
Version.
Version.
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the

View File

@ -1,6 +1,6 @@
.PHONY: env help clean html package doctest
.PHONY: env help clean html text doctest
docs: html
docs: html text
check: doctest
@ -8,32 +8,34 @@ check: doctest
# It is not clean by 'make clean'
PYTHON := python$(PYTHON_VERSION)
PYTHON_VERSION ?= $(shell $(PYTHON) -c 'import sys; print("%d.%d" % sys.version_info[:2])')
BUILD_DIR = $(shell pwd)/../build/lib.$(PYTHON_VERSION)
PYTHON_VERSION ?= $(shell $(PYTHON) -c 'import sys; print ("%d.%d" % sys.version_info[:2])')
SPHINXBUILD ?= $$(pwd)/env/bin/sphinx-build
SPHOPTS = SPHINXBUILD=$(SPHINXBUILD)
SPHOPTS=PYTHONPATH=$$(pwd)/../build/lib.$(PYTHON_VERSION)/ SPHINXBUILD=$$(pwd)/env/bin/sphinx-build
html: package src/sqlstate_errors.rst
html:
$(MAKE) PYTHON=$(PYTHON) -C .. package
$(MAKE) $(SPHOPTS) -C src $@
cp -r src/_build/html .
src/sqlstate_errors.rst: ../psycopg/sqlstate_errors.h $(BUILD_DIR)
./env/bin/python src/tools/make_sqlstate_docs.py $< > $@
$(BUILD_DIR):
text:
$(MAKE) PYTHON=$(PYTHON) -C .. package
$(MAKE) $(SPHOPTS) -C src $@
cd src && tools/stitch_text.py index.rst _build/text > ../psycopg2.txt
doctest:
$(MAKE) PYTHON=$(PYTHON) -C .. package
$(MAKE) $(SPHOPTS) -C src $@
upload:
# this command requires ssh configured to the proper target
tar czf - -C html . | ssh psycoweb tar xzvf - -C docs/current
# this command requires a .pypirc with the right privileges
python src/tools/pypi_docs_upload.py psycopg2 $$(pwd)/html
clean:
$(MAKE) $(SPHOPTS) -C src $@
rm -rf html src/sqlstate_errors.rst
rm -rf html psycopg2.txt
env: requirements.txt
$(PYTHON) -m venv env
virtualenv env
./env/bin/pip install -r requirements.txt
echo "$$(pwd)/../build/lib.$(PYTHON_VERSION)" \
> env/lib/python$(PYTHON_VERSION)/site-packages/psycopg.pth

View File

@ -6,7 +6,7 @@ introspection, so you will need the same prerequisites_. The only extra
prerequisite is virtualenv_: the packages needed to build the docs will be
installed when building the env.
.. _prerequisites: https://www.psycopg.org/docs/install.html#install-from-source
.. _prerequisites: http://initd.org/psycopg/docs/install.html#install-from-source
.. _virtualenv: https://virtualenv.pypa.io/en/latest/
Build the env once with::
@ -17,4 +17,10 @@ Then you can build the documentation with::
make
You should find the rendered documentation in the ``html`` directory.
Or the single targets::
make html
make text
You should find the rendered documentation in the ``html`` dir and the text
file ``psycopg2.txt``.

View File

@ -1,10 +1,10 @@
From: Jack Moffitt <jack@xiph.org>
To: Psycopg Mailing List <psycopg@lists.initd.org>
Subject: Re: [Psycopg] preparing for 1.0
Date: 22 Oct 2001 11:16:21 -0600
Date: 22 Oct 2001 11:16:21 -0600
www.vorbis.com is serving from 5-10k pages per day with psycopg serving
data for most of that.
www.vorbis.com is serving from 5-10k pages per day with psycopg serving
data for most of that.
I plan to use it for several of our other sites, so that number will
increase.
@ -19,7 +19,7 @@ jack.
From: Yury Don <gercon@vpcit.ru>
To: Psycopg Mailing List <psycopg@lists.initd.org>
Subject: Re: [Psycopg] preparing for 1.0
Date: 23 Oct 2001 09:53:11 +0600
Date: 23 Oct 2001 09:53:11 +0600
We use psycopg and psycopg zope adapter since fisrt public
release (it seems version 0.4). Now it works on 3 our sites and in intranet
@ -32,7 +32,7 @@ to solve the problem, even thouth my knowledge of c were poor.
BTW, segfault with dictfetchall on particular data set (see [Psycopg]
dictfetchXXX() problems) disappeared in 0.99.8pre2.
--
--
Best regards,
Yury Don
@ -42,7 +42,7 @@ To: Federico Di Gregorio <fog@debian.org>
Cc: Psycopg Mailing List <psycopg@lists.initd.org>
Subject: Re: [Psycopg] preparing for 1.0
Date: 23 Oct 2001 08:25:52 -0400
The US Govt Department of Labor's Office of Disability Employment
Policy's DisabilityDirect website is run on zope and zpsycopg.
@ -50,7 +50,7 @@ Policy's DisabilityDirect website is run on zope and zpsycopg.
From: Scott Leerssen <sleerssen@racemi.com>
To: Federico Di Gregorio <fog@debian.org>
Subject: Re: [Psycopg] preparing for 1.0
Date: 23 Oct 2001 09:56:10 -0400
Date: 23 Oct 2001 09:56:10 -0400
Racemi's load management software infrastructure uses psycopg to handle
complex server allocation decisions, plus storage and access of
@ -66,10 +66,10 @@ From: Andre Schubert <andre.schubert@geyer.kabeljournal.de>
To: Federico Di Gregorio <fog@debian.org>
Cc: Psycopg Mailing List <psycopg@lists.initd.org>
Subject: Re: [Psycopg] preparing for 1.0
Date: 23 Oct 2001 11:46:07 +0200
Date: 23 Oct 2001 11:46:07 +0200
i have changed the psycopg version to 0.99.8pre2 on all devel-machines
and all segfaults are gone. after my holiday i wil change to 0.99.8pre2
and all segfaults are gone. after my holiday i wil change to 0.99.8pre2
or 1.0 on our production-server.
this server contains several web-sites which are all connected to
postgres over ZPsycopgDA.
@ -81,7 +81,7 @@ From: Fred Wilson Horch <fhorch@ecoaccess.org>
To: <psycopg@lists.initd.org>
Subject: [Psycopg] Success story for psycopg
Date: 23 Oct 2001 10:59:17 -0400
Due to various quirks of PyGreSQL and PoPy, EcoAccess has been looking for
a reliable, fast and relatively bug-free Python-PostgreSQL interface for
our project.
@ -98,7 +98,7 @@ reports and feature requests, and we're looking forward to using psycopg
as the Python interface for additional database-backed web applications.
Keep up the good work!
--
--
Fred Wilson Horch mailto:fhorch@ecoaccess.org
Executive Director, EcoAccess http://ecoaccess.org/

View File

@ -9,15 +9,15 @@ Replaces: 248
Release-Date: 07 Apr 1999
Introduction
This API has been defined to encourage similarity between the
Python modules that are used to access databases. By doing this,
we hope to achieve a consistency leading to more easily understood
modules, code that is generally more portable across databases,
and a broader reach of database connectivity from Python.
The interface specification consists of several sections:
* Module Interface
* Connection Objects
* Cursor Objects
@ -25,7 +25,7 @@ Introduction
* Type Objects and Constructors
* Implementation Hints
* Major Changes from 1.0 to 2.0
Comments and questions about this specification may be directed
to the SIG for Database Interfacing with Python
(db-sig@python.org).
@ -41,7 +41,7 @@ Introduction
basis for new interfaces.
Module Interface
Access to the database is made available through connection
objects. The module must provide the following constructor for
these:
@ -51,17 +51,17 @@ Module Interface
Constructor for creating a connection to the database.
Returns a Connection Object. It takes a number of
parameters which are database dependent. [1]
These module globals must be defined:
apilevel
String constant stating the supported DB API level.
Currently only the strings '1.0' and '2.0' are allowed.
If not given, a DB-API 1.0 level interface should be
assumed.
threadsafety
Integer constant stating the level of thread safety the
@ -81,33 +81,33 @@ Module Interface
or other external sources that are beyond your control.
paramstyle
String constant stating the type of parameter marker
formatting expected by the interface. Possible values are
[2]:
'qmark' Question mark style,
'qmark' Question mark style,
e.g. '...WHERE name=?'
'numeric' Numeric, positional style,
'numeric' Numeric, positional style,
e.g. '...WHERE name=:1'
'named' Named style,
'named' Named style,
e.g. '...WHERE name=:name'
'format' ANSI C printf format codes,
'format' ANSI C printf format codes,
e.g. '...WHERE name=%s'
'pyformat' Python extended format codes,
'pyformat' Python extended format codes,
e.g. '...WHERE name=%(name)s'
The module should make all error information available through
these exceptions or subclasses thereof:
Warning
Warning
Exception raised for important warnings like data
truncations while inserting, etc. It must be a subclass of
the Python StandardError (defined in the module
exceptions).
Error
Error
Exception that is the base class of all other error
exceptions. You can use this to catch all errors with one
@ -115,7 +115,7 @@ Module Interface
errors and thus should not use this class as base. It must
be a subclass of the Python StandardError (defined in the
module exceptions).
InterfaceError
Exception raised for errors that are related to the
@ -126,50 +126,50 @@ Module Interface
Exception raised for errors that are related to the
database. It must be a subclass of Error.
DataError
Exception raised for errors that are due to problems with
the processed data like division by zero, numeric value
out of range, etc. It must be a subclass of DatabaseError.
OperationalError
Exception raised for errors that are related to the
database's operation and not necessarily under the control
of the programmer, e.g. an unexpected disconnect occurs,
the data source name is not found, a transaction could not
be processed, a memory allocation error occurred during
processing, etc. It must be a subclass of DatabaseError.
IntegrityError
IntegrityError
Exception raised when the relational integrity of the
database is affected, e.g. a foreign key check fails. It
must be a subclass of DatabaseError.
InternalError
InternalError
Exception raised when the database encounters an internal
error, e.g. the cursor is not valid anymore, the
transaction is out of sync, etc. It must be a subclass of
DatabaseError.
ProgrammingError
Exception raised for programming errors, e.g. table not
found or already exists, syntax error in the SQL
statement, wrong number of parameters specified, etc. It
must be a subclass of DatabaseError.
NotSupportedError
Exception raised in case a method or database API was used
which is not supported by the database, e.g. requesting a
.rollback() on a connection that does not support
transaction or has transactions turned off. It must be a
subclass of DatabaseError.
This is the exception inheritance layout:
StandardError
@ -183,17 +183,17 @@ Module Interface
|__InternalError
|__ProgrammingError
|__NotSupportedError
Note: The values of these exceptions are not defined. They should
give the user a fairly good idea of what went wrong, though.
Connection Objects
Connection Objects should respond to the following methods:
.close()
.close()
Close the connection now (rather than whenever __del__ is
called). The connection will be unusable from this point
forward; an Error (or subclass) exception will be raised
@ -203,52 +203,52 @@ Connection Objects
committing the changes first will cause an implicit
rollback to be performed.
.commit()
Commit any pending transaction to the database. Note that
if the database supports an auto-commit feature, this must
be initially off. An interface method may be provided to
turn it back on.
Database modules that do not support transactions should
implement this method with void functionality.
.rollback()
.rollback()
This method is optional since not all databases provide
transaction support. [3]
In case a database does provide transactions this method
causes the the database to roll back to the start of any
pending transaction. Closing a connection without
committing the changes first will cause an implicit
rollback to be performed.
.cursor()
Return a new Cursor Object using the connection. If the
database does not provide a direct cursor concept, the
module will have to emulate cursors using other means to
the extent needed by this specification. [4]
Cursor Objects
These objects represent a database cursor, which is used to
manage the context of a fetch operation. Cursors created from
manage the context of a fetch operation. Cursors created from
the same connection are not isolated, i.e., any changes
done to the database by a cursor are immediately visible by the
other cursors. Cursors created from different connections can
or can not be isolated, depending on how the transaction support
is implemented (see also the connection's rollback() and commit()
is implemented (see also the connection's rollback() and commit()
methods.)
Cursor Objects should respond to the following methods and
attributes:
.description
.description
This read-only attribute is a sequence of 7-item
sequences. Each of these sequences contains information
describing one result column: (name, type_code,
@ -260,17 +260,17 @@ Cursor Objects
This attribute will be None for operations that
do not return rows or if the cursor has not had an
operation invoked via the executeXXX() method yet.
The type_code can be interpreted by comparing it to the
Type Objects specified in the section below.
.rowcount
.rowcount
This read-only attribute specifies the number of rows that
the last executeXXX() produced (for DQL statements like
'select') or affected (for DML statements like 'update' or
'insert').
The attribute is -1 in case no executeXXX() has been
performed on the cursor or the rowcount of the last
operation is not determinable by the interface. [7]
@ -278,96 +278,96 @@ Cursor Objects
Note: Future versions of the DB API specification could
redefine the latter case to have the object return None
instead of -1.
.callproc(procname[,parameters])
(This method is optional since not all databases provide
stored procedures. [3])
Call a stored database procedure with the given name. The
sequence of parameters must contain one entry for each
argument that the procedure expects. The result of the
call is returned as modified copy of the input
sequence. Input parameters are left untouched, output and
input/output parameters replaced with possibly new values.
The procedure may also provide a result set as
output. This must then be made available through the
standard fetchXXX() methods.
.close()
Close the cursor now (rather than whenever __del__ is
called). The cursor will be unusable from this point
forward; an Error (or subclass) exception will be raised
if any operation is attempted with the cursor.
.execute(operation[,parameters])
.execute(operation[,parameters])
Prepare and execute a database operation (query or
command). Parameters may be provided as sequence or
mapping and will be bound to variables in the operation.
Variables are specified in a database-specific notation
(see the module's paramstyle attribute for details). [5]
A reference to the operation will be retained by the
cursor. If the same operation object is passed in again,
then the cursor can optimize its behavior. This is most
effective for algorithms where the same operation is used,
but different parameters are bound to it (many times).
For maximum efficiency when reusing an operation, it is
best to use the setinputsizes() method to specify the
parameter types and sizes ahead of time. It is legal for
a parameter to not match the predefined information; the
implementation should compensate, possibly with a loss of
efficiency.
The parameters may also be specified as list of tuples to
e.g. insert multiple rows in a single operation, but this
kind of usage is depreciated: executemany() should be used
instead.
Return values are not defined.
.executemany(operation,seq_of_parameters)
.executemany(operation,seq_of_parameters)
Prepare a database operation (query or command) and then
execute it against all parameter sequences or mappings
found in the sequence seq_of_parameters.
Modules are free to implement this method using multiple
calls to the execute() method or by using array operations
to have the database process the sequence as a whole in
one call.
Use of this method for an operation which produces one or
more result sets constitutes undefined behavior, and the
implementation is permitted (but not required) to raise
implementation is permitted (but not required) to raise
an exception when it detects that a result set has been
created by an invocation of the operation.
The same comments as for execute() also apply accordingly
to this method.
Return values are not defined.
.fetchone()
.fetchone()
Fetch the next row of a query result set, returning a
single sequence, or None when no more data is
available. [6]
An Error (or subclass) exception is raised if the previous
call to executeXXX() did not produce any result set or no
call was issued yet.
fetchmany([size=cursor.arraysize])
Fetch the next set of rows of a query result, returning a
sequence of sequences (e.g. a list of tuples). An empty
sequence is returned when no more rows are available.
The number of rows to fetch per call is specified by the
parameter. If it is not given, the cursor's arraysize
determines the number of rows to be fetched. The method
@ -375,62 +375,62 @@ Cursor Objects
parameter. If this is not possible due to the specified
number of rows not being available, fewer rows may be
returned.
An Error (or subclass) exception is raised if the previous
call to executeXXX() did not produce any result set or no
call was issued yet.
Note there are performance considerations involved with
the size parameter. For optimal performance, it is
usually best to use the arraysize attribute. If the size
parameter is used, then it is best for it to retain the
same value from one fetchmany() call to the next.
.fetchall()
.fetchall()
Fetch all (remaining) rows of a query result, returning
them as a sequence of sequences (e.g. a list of tuples).
Note that the cursor's arraysize attribute can affect the
performance of this operation.
An Error (or subclass) exception is raised if the previous
call to executeXXX() did not produce any result set or no
call was issued yet.
.nextset()
.nextset()
(This method is optional since not all databases support
multiple result sets. [3])
This method will make the cursor skip to the next
available set, discarding any remaining rows from the
current set.
If there are no more sets, the method returns
None. Otherwise, it returns a true value and subsequent
calls to the fetch methods will return rows from the next
result set.
An Error (or subclass) exception is raised if the previous
call to executeXXX() did not produce any result set or no
call was issued yet.
.arraysize
This read/write attribute specifies the number of rows to
fetch at a time with fetchmany(). It defaults to 1 meaning
to fetch a single row at a time.
Implementations must observe this value with respect to
the fetchmany() method, but are free to interact with the
database a single row at a time. It may also be used in
the implementation of executemany().
.setinputsizes(sizes)
This can be used before a call to executeXXX() to
predefine memory areas for the operation's parameters.
sizes is specified as a sequence -- one item for each
input parameter. The item should be a Type Object that
corresponds to the input that will be used, or it should
@ -438,27 +438,27 @@ Cursor Objects
parameter. If the item is None, then no predefined memory
area will be reserved for that column (this is useful to
avoid predefined areas for large inputs).
This method would be used before the executeXXX() method
is invoked.
Implementations are free to have this method do nothing
and users are free to not use it.
.setoutputsize(size[,column])
Set a column buffer size for fetches of large columns
(e.g. LONGs, BLOBs, etc.). The column is specified as an
index into the result sequence. Not specifying the column
will set the default size for all large columns in the
cursor.
This method would be used before the executeXXX() method
is invoked.
Implementations are free to have this method do nothing
and users are free to not use it.
Type Objects and Constructors
@ -485,15 +485,15 @@ Type Objects and Constructors
Implementation Hints below for details).
The module exports the following constructors and singletons:
Date(year,month,day)
This function constructs an object holding a date value.
Time(hour,minute,second)
This function constructs an object holding a time value.
Timestamp(year,month,day,hour,minute,second)
This function constructs an object holding a time stamp
@ -507,12 +507,12 @@ Type Objects and Constructors
module for details).
TimeFromTicks(ticks)
This function constructs an object holding a time value
from the given ticks value (number of seconds since the
epoch; see the documentation of the standard Python time
module for details).
TimestampFromTicks(ticks)
This function constructs an object holding a time stamp
@ -521,10 +521,10 @@ Type Objects and Constructors
time module for details).
Binary(string)
This function constructs an object capable of holding a
binary (long) string value.
STRING
@ -535,22 +535,22 @@ Type Objects and Constructors
This type object is used to describe (long) binary columns
in a database (e.g. LONG, RAW, BLOBs).
NUMBER
This type object is used to describe numeric columns in a
database.
DATETIME
This type object is used to describe date/time columns in
a database.
ROWID
This type object is used to describe the "Row ID" column
in a database.
SQL NULL values are represented by the Python None singleton on
input and output.
@ -563,7 +563,7 @@ Implementation Hints for Module Authors
* The preferred object types for the date/time objects are those
defined in the mxDateTime package. It provides all necessary
constructors and methods both at Python and C level.
* The preferred object type for Binary objects are the
buffer types available in standard Python starting with
version 1.5.2. Please see the Python documentation for
@ -577,7 +577,7 @@ Implementation Hints for Module Authors
processing. However, it should be noted that this does not
expose a C API like mxDateTime does which means that integration
with C based database modules is more difficult.
* Here is a sample implementation of the Unix ticks based
constructors for date/time delegating work to the generic
constructors:
@ -645,7 +645,7 @@ Implementation Hints for Module Authors
class NotSupportedError(DatabaseError):
pass
In C you can use the PyErr_NewException(fullname,
base, NULL) API to create the exception objects.
@ -760,7 +760,7 @@ Optional DB API Extensions
Warning Message: "DB-API extension connection.messages used"
Cursor Method .next()
Return the next row from the currently executing SQL statement
using the same semantics as .fetchone(). A StopIteration
exception is raised when the result set is exhausted for Python
@ -790,13 +790,13 @@ Optional DB API Extensions
Warning Message: "DB-API extension cursor.lastrowid used"
Optional Error Handling Extension
The core DB API specification only introduces a set of exceptions
which can be raised to report errors to the user. In some cases,
exceptions may be too disruptive for the flow of a program or even
render execution impossible.
render execution impossible.
For these cases and in order to simplify error handling when
dealing with databases, database module authors may choose to
@ -806,7 +806,7 @@ Optional Error Handling Extension
Cursor/Connection Attribute .errorhandler
Read/write attribute which references an error handler to call
in case an error condition is met.
in case an error condition is met.
The handler must be a Python callable taking the following
arguments: errorhandler(connection, cursor, errorclass,
@ -836,7 +836,7 @@ Frequently Asked Questions
specification. This section covers some of the issues people
sometimes have with the specification.
Question:
Question:
How can I construct a dictionary out of the tuples returned by
.fetchxxx():
@ -855,7 +855,7 @@ Frequently Asked Questions
* Some databases don't support case-sensitive column names or
auto-convert them to all lowercase or all uppercase
characters.
* Columns in the result set which are generated by the query
(e.g. using SQL functions) don't map to table column names
and databases usually generate names for these columns in a
@ -872,9 +872,9 @@ Major Changes from Version 1.0 to Version 2.0
compared to the 1.0 version. Because some of these changes will
cause existing DB API 1.0 based scripts to break, the major
version number was adjusted to reflect this change.
These are the most important changes from 1.0 to 2.0:
* The need for a separate dbi module was dropped and the
functionality merged into the module interface itself.
@ -886,10 +886,10 @@ Major Changes from Version 1.0 to Version 2.0
* New constants (apilevel, threadlevel, paramstyle) and
methods (executemany, nextset) were added to provide better
database bindings.
* The semantics of .callproc() needed to call stored
procedures are now clearly defined.
* The definition of the .execute() return value changed.
Previously, the return value was based on the SQL statement
type (which was hard to implement right) -- it is undefined
@ -898,7 +898,7 @@ Major Changes from Version 1.0 to Version 2.0
values, but these are no longer mandated by the
specification and should be considered database interface
dependent.
* Class based exceptions were incorporated into the
specification. Module implementors are free to extend the
exception layout defined in this specification by
@ -916,10 +916,10 @@ Open Issues
questions that were left open in the 1.0 version, there are still
some remaining issues which should be addressed in future
versions:
* Define a useful return value for .nextset() for the case where
a new result set is available.
* Create a fixed point numeric type for use as loss-less
monetary and decimal interchange format.
@ -929,17 +929,17 @@ Footnotes
[1] As a guideline the connection constructor parameters should be
implemented as keyword parameters for more intuitive use and
follow this order of parameters:
dsn Data source name as string
user User name as string (optional)
password Password as string (optional)
host Hostname (optional)
database Database name (optional)
E.g. a connect could look like this:
connect(dsn='myhost:MYDB',user='guido',password='234$')
[2] Module implementors should prefer 'numeric', 'named' or
'pyformat' over the other formats because these offer more
clarity and flexibility.
@ -947,41 +947,41 @@ Footnotes
[3] If the database does not support the functionality required
by the method, the interface should throw an exception in
case the method is used.
The preferred approach is to not implement the method and
thus have Python generate an AttributeError in
case the method is requested. This allows the programmer to
check for database capabilities using the standard
hasattr() function.
For some dynamically configured interfaces it may not be
appropriate to require dynamically making the method
available. These interfaces should then raise a
NotSupportedError to indicate the non-ability
to perform the roll back when the method is invoked.
[4] a database interface may choose to support named cursors by
allowing a string argument to the method. This feature is
not part of the specification, since it complicates
semantics of the .fetchXXX() methods.
[5] The module will use the __getitem__ method of the parameters
object to map either positions (integers) or names (strings)
to parameter values. This allows for both sequences and
mappings to be used as input.
The term "bound" refers to the process of binding an input
value to a database execution buffer. In practical terms,
this means that the input value is directly used as a value
in the operation. The client should not be required to
"escape" the value so that it can be used -- the value
should be equal to the actual database value.
[6] Note that the interface may implement row fetching using
arrays and other optimizations. It is not
guaranteed that a call to this method will only move the
associated cursor forward by one row.
[7] The rowcount attribute may be coded in a way that updates
its value dynamically. This can be useful for databases that
return usable rowcount values only after the first call to

View File

@ -1,68 +0,0 @@
How to make a psycopg2 release
==============================
- Edit ``setup.py`` and set a stable version release. Use PEP 440 to choose
version numbers, e.g.
- ``2.7``: a new major release, new features
- ``2.7.1``: a bugfix release
- ``2.7.1.1``: a release to fix packaging problems
- ``2.7.2.dev0``: version held during development, non-public test packages...
- ``2.8b1``: a beta for public tests
In the rest of this document we assume you have exported the version number
into an environment variable, e.g.::
$ export VERSION=2.8.4
- Push psycopg2 to master or to the maint branch. Make sure tests on `GitHub
Actions`__.
.. __: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
- Create a signed tag with the content of the relevant NEWS bit and push it.
E.g.::
# Tag name will be 2_8_4
$ git tag -a -s ${VERSION//\./_}
Psycopg 2.8.4 released
What's new in psycopg 2.8.4
---------------------------
New features:
- Fixed bug blah (:ticket:`#42`).
...
- Create the packages:
- On GitHub Actions run manually a `package build workflow`__.
.. __: https://github.com/psycopg/psycopg2/actions/workflows/packages.yml
- When the workflows have finished download the packages from the job
artifacts.
- Only for stable packages: upload the signed packages on PyPI::
$ twine upload -s wheelhouse/psycopg2-${VERSION}/*
- Create a release and release notes in the psycopg website, announce to
psycopg and pgsql-announce mailing lists.
- Edit ``setup.py`` changing the version again (e.g. go to ``2.8.5.dev0``).
Releasing test packages
-----------------------
Test packages may be uploaded on the `PyPI testing site`__ using::
$ twine upload -s -r testpypi wheelhouse/psycopg2-${VERSION}/*
assuming `proper configuration`__ of ``~/.pypirc``.
.. __: https://test.pypi.org/project/psycopg2/
.. __: https://wiki.python.org/moin/TestPyPI

View File

@ -1,2 +0,0 @@
Sphinx
sphinx-better-theme

View File

@ -1,50 +1,3 @@
#
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile requirements.in
#
alabaster==0.7.13
# via sphinx
babel==2.12.1
# via sphinx
certifi>=2023.7.22
# via requests
charset-normalizer==3.1.0
# via requests
docutils==0.19
# via sphinx
idna==3.4
# via requests
imagesize==1.4.1
# via sphinx
jinja2==3.1.2
# via sphinx
markupsafe==2.1.2
# via jinja2
packaging==23.1
# via sphinx
pygments==2.15.0
# via sphinx
requests==2.31.0
# via sphinx
snowballstemmer==2.2.0
# via sphinx
sphinx==6.1.3
# via -r requirements.in
sphinx-better-theme==0.1.5
# via -r requirements.in
sphinxcontrib-applehelp==1.0.4
# via sphinx
sphinxcontrib-devhelp==1.0.2
# via sphinx
sphinxcontrib-htmlhelp==2.0.1
# via sphinx
sphinxcontrib-jsmath==1.0.1
# via sphinx
sphinxcontrib-qthelp==1.0.3
# via sphinx
sphinxcontrib-serializinghtml==1.1.5
# via sphinx
urllib3==1.26.17
# via requests
# Packages only needed to build the docs
Pygments>=1.5
Sphinx>=1.2,<=1.3

View File

@ -1,3 +1,5 @@
@import url("classic.css");
blockquote {
font-style: italic;
}
@ -35,102 +37,3 @@ dl.faq dt {
table.data-types div.line-block {
margin-bottom: 0;
}
/* better theme customisation */
body {
background-color: #216464;
}
header, .related, .document, footer {
background-color: white;
}
header h1 {
font-size: 150%;
margin-bottom: 0;
padding: 0.5rem 10px 0.5rem 10px;
}
h1, h2, h3 {
font-weight: normal;
}
.body h1, .body h2, .body h3 {
color: #074848;
}
h1 {
font-size: 200%;
}
h2 {
font-size: 160%;
}
h3 {
font-size: 140%;
}
footer#pagefooter {
margin-bottom: 1rem;
font-size: 85%;
color: #444;
}
#rellinks, #breadcrumbs {
padding-right: 10px;
padding-left: 10px;
}
.sphinxsidebar {
padding-left: 10px;
}
.bodywrapper {
padding-right: 10px;
}
div.body h1, div.body h2, div.body h3 {
background-color: #f2f2f2;
border-bottom: 1px solid #d0d0d0;
}
div.body p.rubric {
border-bottom: 1px solid #d0d0d0;
}
body .sphinxsidebar .search {
margin-top: 0;
}
html pre {
background-color: #efc;
border: 1px solid #ac9;
border-left: none;
border-right: none;
}
a, a:visited {
color: #0b6868;
}
th {
background-color: #ede;
}
code.xref, a code {
font-weight: bold;
}
code.descname {
font-weight: bold;
font-size: 120%;
}
@media (max-width: 820px) {
body {
background-color: white;
}
}

View File

@ -1,6 +0,0 @@
{# Add a title over the search box #}
{%- if pagename != "search" %}
<h3>Quick search</h3>
{%- include "!searchbox.html" %}
{%- endif %}

View File

@ -12,7 +12,7 @@ More advanced topics
conn.commit()
def wait(conn):
while True:
while 1:
state = conn.poll()
if state == psycopg2.extensions.POLL_OK:
break
@ -156,7 +156,7 @@ geometric type:
.. |point| replace:: :sql:`point`
.. _point: https://www.postgresql.org/docs/current/static/datatype-geometric.html#DATATYPE-GEOMETRIC
.. _point: http://www.postgresql.org/docs/current/static/datatype-geometric.html#DATATYPE-GEOMETRIC
The above function call results in the SQL command::
@ -188,7 +188,7 @@ representation into the previously defined `!Point` class:
... return Point(float(m.group(1)), float(m.group(2)))
... else:
... raise InterfaceError("bad point representation: %r" % value)
In order to create a mapping from a PostgreSQL type (either standard or
user-defined), its OID must be known. It can be retrieved either by the second
@ -226,7 +226,7 @@ read:
>>> cur.execute("SELECT '(10.2,20.3)'::point")
>>> point = cur.fetchone()[0]
>>> print(type(point), point.x, point.y)
>>> print type(point), point.x, point.y
<class 'Point'> 10.2 20.3
A typecaster created by `!new_type()` can be also used with
@ -259,9 +259,9 @@ documentation), you should keep the connection in `~connection.autocommit`
mode if you wish to receive or send notifications in a timely manner.
.. |LISTEN| replace:: :sql:`LISTEN`
.. _LISTEN: https://www.postgresql.org/docs/current/static/sql-listen.html
.. _LISTEN: http://www.postgresql.org/docs/current/static/sql-listen.html
.. |NOTIFY| replace:: :sql:`NOTIFY`
.. _NOTIFY: https://www.postgresql.org/docs/current/static/sql-notify.html
.. _NOTIFY: http://www.postgresql.org/docs/current/static/sql-notify.html
Notifications are received after every query execution. If the user is
interested in receiving notifications but not in performing any query, the
@ -284,20 +284,18 @@ something to read::
curs = conn.cursor()
curs.execute("LISTEN test;")
print("Waiting for notifications on channel 'test'")
while True:
print "Waiting for notifications on channel 'test'"
while 1:
if select.select([conn],[],[],5) == ([],[],[]):
print("Timeout")
print "Timeout"
else:
conn.poll()
while conn.notifies:
notify = conn.notifies.pop(0)
print("Got NOTIFY:", notify.pid, notify.channel, notify.payload)
print "Got NOTIFY:", notify.pid, notify.channel, notify.payload
Running the script and executing a command such as :sql:`NOTIFY test, 'hello'`
in a separate :program:`psql` shell, the output may look similar to:
.. code-block:: none
in a separate :program:`psql` shell, the output may look similar to::
Waiting for notifications on channel 'test'
Timeout
@ -328,7 +326,7 @@ received from a previous version server will have the
Asynchronous support
--------------------
.. versionadded:: 2.2
.. versionadded:: 2.2.0
Psycopg can issue asynchronous queries to a PostgreSQL database. An asynchronous
communication style is established passing the parameter *async*\=1 to the
@ -347,7 +345,7 @@ together with the Python :py:func:`~select.select` function in order to carry on
asynchronous operations with Psycopg::
def wait(conn):
while True:
while 1:
state = conn.poll()
if state == psycopg2.extensions.POLL_OK:
break
@ -375,7 +373,7 @@ completely non-blocking connection attempt: see the libpq documentation for
|PQconnectStart|_.
.. |PQconnectStart| replace:: `!PQconnectStart()`
.. _PQconnectStart: https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-PQCONNECTSTARTPARAMS
.. _PQconnectStart: http://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-PQCONNECTSTARTPARAMS
The same loop should be also used to perform nonblocking queries: after
sending a query via `~cursor.execute()` or `~cursor.callproc()`, call
@ -468,7 +466,7 @@ example callback (using `!select()` to block) is provided as
`psycopg2.extras.wait_select()`: it boils down to something similar to::
def wait_select(conn):
while True:
while 1:
state = conn.poll()
if state == extensions.POLL_OK:
break
@ -484,14 +482,14 @@ psycopg2 scope, as the callback can be tied to the libraries' implementation
details. You can check the `psycogreen`_ project for further informations and
resources about the topic.
.. _coroutine: https://en.wikipedia.org/wiki/Coroutine
.. _greenlet: https://pypi.org/project/greenlet/
.. _green threads: https://en.wikipedia.org/wiki/Green_threads
.. _Eventlet: https://eventlet.net/
.. _coroutine: http://en.wikipedia.org/wiki/Coroutine
.. _greenlet: http://pypi.python.org/pypi/greenlet
.. _green threads: http://en.wikipedia.org/wiki/Green_threads
.. _Eventlet: http://eventlet.net/
.. _gevent: http://www.gevent.org/
.. _SQLAlchemy: https://www.sqlalchemy.org/
.. _psycogreen: https://github.com/psycopg/psycogreen/
.. __: https://www.postgresql.org/docs/current/static/libpq-async.html
.. _SQLAlchemy: http://www.sqlalchemy.org/
.. _psycogreen: http://bitbucket.org/dvarrazzo/psycogreen/
.. __: http://www.postgresql.org/docs/current/static/libpq-async.html
.. warning::
@ -536,7 +534,7 @@ Server version 9.4 adds a new feature called *Logical Replication*.
- PostgreSQL `Streaming Replication Protocol`__
.. __: https://www.postgresql.org/docs/current/static/protocol-replication.html
.. __: http://www.postgresql.org/docs/current/static/protocol-replication.html
Logical replication Quick-Start
@ -552,7 +550,8 @@ value greater than zero in ``postgresql.conf`` (these changes require a server
restart). Create a database ``psycopg2_test``.
Then run the following code to quickly try the replication support out. This
is not production code -- it's only intended as a simple demo of logical
is not production code -- it has no error handling, it sends feedback too
often, etc. -- and it's only intended as a simple demo of logical
replication::
from __future__ import print_function

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
#
# Psycopg documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 7 13:48:41 2010.
@ -10,9 +11,7 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
from better import better_theme_path
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
@ -23,16 +22,11 @@ sys.path.append(os.path.abspath('tools/lib'))
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.ifconfig',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.ifconfig',
'sphinx.ext.doctest', 'sphinx.ext.intersphinx' ]
# Specific extensions for Psycopg documentation.
extensions += ['dbapi_extension', 'sql_role', 'ticket_role']
extensions += [ 'dbapi_extension', 'sql_role', 'ticket_role' ]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@ -41,16 +35,14 @@ templates_path = ['_templates']
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8'
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Psycopg'
copyright = (
'2001-2021, Federico Di Gregorio, Daniele Varrazzo, The Psycopg Team'
)
project = u'Psycopg'
copyright = u'2001-2016, Federico Di Gregorio, Daniele Varrazzo'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@ -62,14 +54,16 @@ version = '2.0'
# The full version, including alpha/beta/rc tags.
try:
import psycopg2
except ImportError:
print("WARNING: couldn't import psycopg to read version.")
release = version
else:
release = psycopg2.__version__.split()[0]
version = '.'.join(release.split('.')[:2])
except ImportError:
print "WARNING: couldn't import psycopg to read version."
release = version
intersphinx_mapping = {'py': ('https://docs.python.org/3', None)}
intersphinx_mapping = {
'py': ('http://docs.python.org/2', None),
'py3': ('http://docs.python.org/3', None),
}
# Pattern to generate links to the bug tracker
ticket_url = 'https://github.com/psycopg/psycopg2/issues/%s'
@ -78,16 +72,16 @@ ticket_remap_offset = 230
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
#today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
# unused_docs = []
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
@ -97,25 +91,21 @@ exclude_trees = ['_build', 'html']
default_role = 'obj'
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# Using 'python' instead of the default gives warnings if parsing an example
# fails, instead of defaulting to none
highlight_language = 'python'
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
#modindex_common_prefix = []
# Include TODO items in the documentation
todo_include_todos = False
@ -123,10 +113,12 @@ todo_include_todos = False
rst_epilog = """
.. |DBAPI| replace:: DB API 2.0
.. _DBAPI: https://www.python.org/dev/peps/pep-0249/
.. _DBAPI: http://www.python.org/dev/peps/pep-0249/
.. _transaction isolation level:
https://www.postgresql.org/docs/current/static/transaction-iso.html
http://www.postgresql.org/docs/current/static/transaction-iso.html
.. _mx.DateTime: http://www.egenix.com/products/python/mxBase/mxDateTime/
.. |MVCC| replace:: :abbr:`MVCC (Multiversion concurrency control)`
"""
@ -135,41 +127,35 @@ rst_epilog = """
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'better'
html_theme = 'classic'
# The stylesheet to use with HTML output: this will include the original one
# adding a few classes.
# html_style = 'psycopg.css'
# Hide the sphinx footer
html_show_sphinx = False
html_style = 'psycopg.css'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'linktotheme': False,
'cssfiles': ['_static/psycopg.css'],
}
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [better_theme_path]
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = 'Home'
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
@ -178,41 +164,38 @@ html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# no need for the prev/next topic link using better theme: they are on top
html_sidebars = {
'**': ['localtoc.html', 'searchbox.html'],
}
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
#html_additional_pages = {}
# If false, no module index is generated.
# html_use_modindex = True
#html_use_modindex = True
# If false, no index is generated.
# html_use_index = True
#html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
#html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'psycopgdoc'
@ -221,41 +204,35 @@ htmlhelp_basename = 'psycopgdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
'index',
'psycopg.tex',
'Psycopg Documentation',
'Federico Di Gregorio',
'manual',
)
('index', 'psycopg.tex', u'Psycopg Documentation',
u'Federico Di Gregorio', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
#latex_appendices = []
# If false, no module index is generated.
# latex_use_modindex = True
#latex_use_modindex = True
toc_object_entries = False
doctest_global_setup = """

View File

@ -21,30 +21,8 @@ The ``connection`` class
Connections are thread safe and can be shared among many threads. See
:ref:`thread-safety` for details.
Connections can be used as context managers. Note that a context wraps a
transaction: if the context exits with success the transaction is
committed, if it exits with an exception the transaction is rolled back.
Note that the connection is not closed by the context and it can be used
for several contexts.
.. code:: python
conn = psycopg2.connect(DSN)
with conn:
with conn.cursor() as curs:
curs.execute(SQL1)
with conn:
with conn.cursor() as curs:
curs.execute(SQL2)
# leaving contexts doesn't close the connection
conn.close()
.. method:: cursor(name=None, cursor_factory=None, scrollable=None, withhold=False)
Return a new `cursor` object using the connection.
If *name* is specified, the returned cursor will be a :ref:`server
@ -63,6 +41,11 @@ The ``connection`` class
previously only valid PostgreSQL identifiers were accepted as
cursor name.
.. warning::
It is unsafe to expose the *name* to an untrusted source, for
instance you shouldn't allow *name* to be read from a HTML form.
Consider it as part of the query, not as a query parameter.
The *cursor_factory* argument can be used to create non-standard
cursors. The class returned must be a subclass of
`psycopg2.extensions.cursor`. See :ref:`subclassing-cursor` for
@ -139,7 +122,7 @@ The ``connection`` class
with a `~connection.commit()`/`~connection.rollback()` before
closing.
.. _PgBouncer: http://www.pgbouncer.org/
.. _PgBouncer: http://pgbouncer.projects.postgresql.org/
.. index::
@ -220,7 +203,7 @@ The ``connection`` class
.. seealso:: the |PREPARE TRANSACTION|_ PostgreSQL command.
.. |PREPARE TRANSACTION| replace:: :sql:`PREPARE TRANSACTION`
.. _PREPARE TRANSACTION: https://www.postgresql.org/docs/current/static/sql-prepare-transaction.html
.. _PREPARE TRANSACTION: http://www.postgresql.org/docs/current/static/sql-prepare-transaction.html
.. index::
@ -246,7 +229,7 @@ The ``connection`` class
.. seealso:: the |COMMIT PREPARED|_ PostgreSQL command.
.. |COMMIT PREPARED| replace:: :sql:`COMMIT PREPARED`
.. _COMMIT PREPARED: https://www.postgresql.org/docs/current/static/sql-commit-prepared.html
.. _COMMIT PREPARED: http://www.postgresql.org/docs/current/static/sql-commit-prepared.html
.. index::
@ -268,7 +251,7 @@ The ``connection`` class
.. seealso:: the |ROLLBACK PREPARED|_ PostgreSQL command.
.. |ROLLBACK PREPARED| replace:: :sql:`ROLLBACK PREPARED`
.. _ROLLBACK PREPARED: https://www.postgresql.org/docs/current/static/sql-rollback-prepared.html
.. _ROLLBACK PREPARED: http://www.postgresql.org/docs/current/static/sql-rollback-prepared.html
.. index::
@ -289,17 +272,17 @@ The ``connection`` class
transactions initiated by a program using such driver should be
unpacked correctly.
.. __: https://jdbc.postgresql.org/
.. __: http://jdbc.postgresql.org/
Xids returned by `!tpc_recover()` also have extra attributes
`~psycopg2.extensions.Xid.prepared`, `~psycopg2.extensions.Xid.owner`,
Xids returned by `!tpc_recover()` also have extra attributes
`~psycopg2.extensions.Xid.prepared`, `~psycopg2.extensions.Xid.owner`,
`~psycopg2.extensions.Xid.database` populated with the values read
from the server.
.. seealso:: the |pg_prepared_xacts|_ system view.
.. |pg_prepared_xacts| replace:: `pg_prepared_xacts`
.. _pg_prepared_xacts: https://www.postgresql.org/docs/current/static/view-pg-prepared-xacts.html
.. _pg_prepared_xacts: http://www.postgresql.org/docs/current/static/view-pg-prepared-xacts.html
@ -331,7 +314,7 @@ The ``connection`` class
|PQcancel|_.
.. |PQcancel| replace:: `!PQcancel()`
.. _PQcancel: https://www.postgresql.org/docs/current/static/libpq-cancel.html#LIBPQ-PQCANCEL
.. _PQcancel: http://www.postgresql.org/docs/current/static/libpq-cancel.html#LIBPQ-PQCANCEL
.. versionadded:: 2.3
@ -347,10 +330,10 @@ The ``connection`` class
available for recover.
.. |RESET| replace:: :sql:`RESET`
.. _RESET: https://www.postgresql.org/docs/current/static/sql-reset.html
.. _RESET: http://www.postgresql.org/docs/current/static/sql-reset.html
.. |SET SESSION AUTHORIZATION| replace:: :sql:`SET SESSION AUTHORIZATION`
.. __: https://www.postgresql.org/docs/current/static/sql-set-session-authorization.html
.. __: http://www.postgresql.org/docs/current/static/sql-set-session-authorization.html
.. versionadded:: 2.0.12
@ -360,12 +343,6 @@ The ``connection`` class
Read-only string containing the connection string used by the
connection.
If a password was specified in the connection string it will be
obscured.
.. rubric:: Transaction control methods and attributes.
.. index::
pair: Transaction; Autocommit
@ -391,7 +368,7 @@ The ``connection`` class
`autocommit` attribute.
.. _isolation level:
https://www.postgresql.org/docs/current/static/transaction-iso.html
http://www.postgresql.org/docs/current/static/transaction-iso.html
Arguments set to `!None` (the default for all) will not be changed.
The parameters *isolation_level*, *readonly* and *deferrable* also
@ -401,11 +378,11 @@ The ``connection`` class
|default_transaction_read_only|__, |default_transaction_deferrable|__.
.. |default_transaction_isolation| replace:: :sql:`default_transaction_isolation`
.. __: https://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-DEFAULT-TRANSACTION-ISOLATION
.. __: http://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-DEFAULT-TRANSACTION-ISOLATION
.. |default_transaction_read_only| replace:: :sql:`default_transaction_read_only`
.. __: https://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-DEFAULT-TRANSACTION-READ-ONLY
.. __: http://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-DEFAULT-TRANSACTION-READ-ONLY
.. |default_transaction_deferrable| replace:: :sql:`default_transaction_deferrable`
.. __: https://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-DEFAULT-TRANSACTION-DEFERRABLE
.. __: http://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-DEFAULT-TRANSACTION-DEFERRABLE
The function must be invoked with no transaction in progress.
@ -413,7 +390,7 @@ The ``connection`` class
of the transaction parameters in the server.
.. |SET TRANSACTION| replace:: :sql:`SET TRANSACTION`
.. _SET TRANSACTION: https://www.postgresql.org/docs/current/static/sql-set-transaction.html
.. _SET TRANSACTION: http://www.postgresql.org/docs/current/static/sql-set-transaction.html
.. versionadded:: 2.4.2
@ -559,7 +536,7 @@ The ``connection`` class
is the encoding defined by the database. It should be one of the
`characters set supported by PostgreSQL`__
.. __: https://www.postgresql.org/docs/current/static/multibyte.html
.. __: http://www.postgresql.org/docs/current/static/multibyte.html
.. index::
@ -571,7 +548,7 @@ The ``connection`` class
the session.
.. doctest::
:options: +NORMALIZE_WHITESPACE
:options: NORMALIZE_WHITESPACE
>>> cur.execute("CREATE TABLE foo (id serial PRIMARY KEY);")
>>> pprint(conn.notices)
@ -593,7 +570,7 @@ The ``connection`` class
configuration parameters`__ such as ``log_statement``,
``client_min_messages``, ``log_min_duration_statement`` etc.
.. __: https://www.postgresql.org/docs/current/static/runtime-config-logging.html
.. __: http://www.postgresql.org/docs/current/static/runtime-config-logging.html
.. attribute:: notifies
@ -625,14 +602,115 @@ The ``connection`` class
.. index::
pair: Connection; Info
pair: Backend; PID
.. attribute:: info
.. method:: get_backend_pid()
A `~psycopg2.extensions.ConnectionInfo` object exposing information
about the native libpq connection.
Returns the process ID (PID) of the backend server process handling
this connection.
.. versionadded:: 2.8
Note that the PID belongs to a process executing on the database
server host, not the local host!
.. seealso:: libpq docs for `PQbackendPID()`__ for details.
.. __: http://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQBACKENDPID
.. versionadded:: 2.0.8
.. index::
pair: Server; Parameters
.. method:: get_parameter_status(parameter)
Look up a current parameter setting of the server.
Potential values for ``parameter`` are: ``server_version``,
``server_encoding``, ``client_encoding``, ``is_superuser``,
``session_authorization``, ``DateStyle``, ``TimeZone``,
``integer_datetimes``, and ``standard_conforming_strings``.
If server did not report requested parameter, return `!None`.
.. seealso:: libpq docs for `PQparameterStatus()`__ for details.
.. __: http://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQPARAMETERSTATUS
.. versionadded:: 2.0.12
.. index::
pair: Connection; Parameters
.. method:: get_dsn_parameters()
Get the effective dsn parameters for the connection as a dictionary.
The *password* parameter is removed from the result.
Example::
>>> conn.get_dsn_parameters()
{'dbname': 'test', 'user': 'postgres', 'port': '5432', 'sslmode': 'prefer'}
Requires libpq >= 9.3.
.. seealso:: libpq docs for `PQconninfo()`__ for details.
.. __: http://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-PQCONNINFO
.. versionadded:: 2.7
.. index::
pair: Transaction; Status
.. method:: get_transaction_status()
Return the current session transaction status as an integer. Symbolic
constants for the values are defined in the module
`psycopg2.extensions`: see :ref:`transaction-status-constants`
for the available values.
.. seealso:: libpq docs for `PQtransactionStatus()`__ for details.
.. __: http://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQTRANSACTIONSTATUS
.. index::
pair: Protocol; Version
.. attribute:: protocol_version
A read-only integer representing frontend/backend protocol being used.
Currently Psycopg supports only protocol 3, which allows connection
to PostgreSQL server from version 7.4. Psycopg versions previous than
2.3 support both protocols 2 and 3.
.. seealso:: libpq docs for `PQprotocolVersion()`__ for details.
.. __: http://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQPROTOCOLVERSION
.. versionadded:: 2.0.12
.. index::
pair: Server; Version
.. attribute:: server_version
A read-only integer representing the backend version.
The number is formed by converting the major, minor, and revision
numbers into two-decimal-digit numbers and appending them together.
For example, version 8.1.5 will be returned as ``80105``.
.. seealso:: libpq docs for `PQserverVersion()`__ for details.
.. __: http://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQSERVERVERSION
.. versionadded:: 2.0.12
.. index::
@ -641,11 +719,11 @@ The ``connection`` class
.. attribute:: status
A read-only integer representing the status of the connection.
Symbolic constants for the values are defined in the module
Symbolic constants for the values are defined in the module
`psycopg2.extensions`: see :ref:`connection-status-constants`
for the available values.
The status is undefined for `closed` connections.
The status is undefined for `closed` connectons.
.. method:: lobject([oid [, mode [, new_oid [, new_file [, lobject_factory]]]]])
@ -661,13 +739,13 @@ The ``connection`` class
:param new_oid: Create a new object using the specified OID. The
function raises `~psycopg2.OperationalError` if the OID is already
in use. Default is 0, meaning assign a new one automatically.
:param new_file: The name of a file to be imported in the database
:param new_file: The name of a file to be imported in the the database
(using the |lo_import|_ function)
:param lobject_factory: Subclass of
`~psycopg2.extensions.lobject` to be instantiated.
.. |lo_import| replace:: `!lo_import()`
.. _lo_import: https://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-IMPORT
.. _lo_import: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-IMPORT
Available values for *mode* are:
@ -692,9 +770,9 @@ The ``connection`` class
support.
.. rubric:: Methods related to asynchronous support
.. rubric:: Methods related to asynchronous support.
.. versionadded:: 2.2
.. versionadded:: 2.2.0
.. seealso:: :ref:`async-support` and :ref:`green-support`.
@ -738,178 +816,6 @@ The ``connection`` class
Return `!True` if the connection is executing an asynchronous operation.
.. rubric:: Interoperation with other C API modules
.. attribute:: pgconn_ptr
Return the internal `!PGconn*` as integer. Useful to pass the libpq
raw connection structure to C functions, e.g. via `ctypes`::
>>> import ctypes
>>> import ctypes.util
>>> libpq = ctypes.pydll.LoadLibrary(ctypes.util.find_library('pq'))
>>> libpq.PQserverVersion.argtypes = [ctypes.c_void_p]
>>> libpq.PQserverVersion.restype = ctypes.c_int
>>> libpq.PQserverVersion(conn.pgconn_ptr)
90611
.. versionadded:: 2.8
.. method:: get_native_connection()
Return the internal `!PGconn*` wrapped in a PyCapsule object. This is
only useful for passing the `libpq` raw connection associated to this
connection object to other C-level modules that may have a use for it.
.. seealso:: Python C API `Capsules`__ docs.
.. __: https://docs.python.org/3.1/c-api/capsule.html
.. versionadded:: 2.8
.. rubric:: informative methods of the native connection
.. note::
These methods are better accessed using the `~connection.info`
attributes and may be dropped in future versions.
.. index::
pair: Transaction; Status
.. method:: get_transaction_status()
Also available as `~connection.info`\ `!.`\
`~psycopg2.extensions.ConnectionInfo.transaction_status`.
Return the current session transaction status as an integer. Symbolic
constants for the values are defined in the module
`psycopg2.extensions`: see :ref:`transaction-status-constants`
for the available values.
.. seealso:: libpq docs for `PQtransactionStatus()`__ for details.
.. __: https://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQTRANSACTIONSTATUS
.. index::
pair: Protocol; Version
.. attribute:: protocol_version
Also available as `~connection.info`\ `!.`\
`~psycopg2.extensions.ConnectionInfo.protocol_version`.
A read-only integer representing frontend/backend protocol being used.
Currently Psycopg supports only protocol 3, which allows connection
to PostgreSQL server from version 7.4. Psycopg versions previous than
2.3 support both protocols 2 and 3.
.. seealso:: libpq docs for `PQprotocolVersion()`__ for details.
.. __: https://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQPROTOCOLVERSION
.. versionadded:: 2.0.12
.. index::
pair: Server; Version
.. attribute:: server_version
Also available as `~connection.info`\ `!.`\
`~psycopg2.extensions.ConnectionInfo.server_version`.
A read-only integer representing the backend version.
The number is formed by converting the major, minor, and revision
numbers into two-decimal-digit numbers and appending them together.
For example, version 8.1.5 will be returned as ``80105``.
.. seealso:: libpq docs for `PQserverVersion()`__ for details.
.. __: https://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQSERVERVERSION
.. versionadded:: 2.0.12
.. index::
pair: Backend; PID
.. method:: get_backend_pid()
Also available as `~connection.info`\ `!.`\
`~psycopg2.extensions.ConnectionInfo.backend_pid`.
Returns the process ID (PID) of the backend server process *you
connected to*. Note that if you use a connection pool service such as
PgBouncer_ this value will not be updated if your connection is
switched to a different backend.
Note that the PID belongs to a process executing on the database
server host, not the local host!
.. seealso:: libpq docs for `PQbackendPID()`__ for details.
.. __: https://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQBACKENDPID
.. versionadded:: 2.0.8
.. index::
pair: Server; Parameters
.. method:: get_parameter_status(parameter)
Also available as `~connection.info`\ `!.`\
`~psycopg2.extensions.ConnectionInfo.parameter_status()`.
Look up a current parameter setting of the server.
Potential values for ``parameter`` are: ``server_version``,
``server_encoding``, ``client_encoding``, ``is_superuser``,
``session_authorization``, ``DateStyle``, ``TimeZone``,
``integer_datetimes``, and ``standard_conforming_strings``.
If server did not report requested parameter, return `!None`.
.. seealso:: libpq docs for `PQparameterStatus()`__ for details.
.. __: https://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQPARAMETERSTATUS
.. versionadded:: 2.0.12
.. index::
pair: Connection; Parameters
.. method:: get_dsn_parameters()
Also available as `~connection.info`\ `!.`\
`~psycopg2.extensions.ConnectionInfo.dsn_parameters`.
Get the effective dsn parameters for the connection as a dictionary.
The *password* parameter is removed from the result.
Example::
>>> conn.get_dsn_parameters()
{'dbname': 'test', 'user': 'postgres', 'port': '5432', 'sslmode': 'prefer'}
Requires libpq >= 9.3.
.. seealso:: libpq docs for `PQconninfo()`__ for details.
.. __: https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-PQCONNINFO
.. versionadded:: 2.7
.. testcode::
:hide:

View File

@ -34,64 +34,51 @@ The ``cursor`` class
many cursors from the same connection and should use each cursor from
a single thread. See :ref:`thread-safety` for details.
Cursors can be used as context managers: leaving the context will close
the cursor.
.. attribute:: description
.. code:: python
This read-only attribute is a sequence of 7-item sequences.
with conn.cursor() as curs:
curs.execute(SQL)
Each of these sequences is a named tuple (a regular tuple if
:func:`collections.namedtuple` is not available) containing information
describing one result column:
# the cursor is now closed
0. `!name`: the name of the column returned.
1. `!type_code`: the PostgreSQL OID of the column. You can use the
|pg_type|_ system table to get more informations about the type.
This is the value used by Psycopg to decide what Python type use
to represent the value. See also
:ref:`type-casting-from-sql-to-python`.
2. `!display_size`: the actual length of the column in bytes.
Obtaining this value is computationally intensive, so it is
always `!None` unless the :envvar:`PSYCOPG_DISPLAY_SIZE` parameter
is set at compile time. See also PQgetlength_.
3. `!internal_size`: the size in bytes of the column associated to
this column on the server. Set to a negative value for
variable-size types See also PQfsize_.
4. `!precision`: total number of significant digits in columns of
type |NUMERIC|_. `!None` for other types.
5. `!scale`: count of decimal digits in the fractional part in
columns of type |NUMERIC|. `!None` for other types.
6. `!null_ok`: always `!None` as not easy to retrieve from the libpq.
.. attribute:: description
Read-only attribute describing the result of a query. It is a
sequence of `~psycopg2.extensions.Column` instances, each one
describing one result column in order. The attribute is `!None` for
operations that do not return rows or if the cursor has not had an
operation invoked via the |execute*|_ methods yet.
For compatibility with the DB-API, every object can be unpacked as a
7-items sequence: the attributes retuned this way are the following.
For further details and other attributes available check the
`~psycopg2.extensions.Column` documentation.
0. `~psycopg2.extensions.Column.name`: the name of the column returned.
1. `~psycopg2.extensions.Column.type_code`: the PostgreSQL OID of the
column.
2. `~psycopg2.extensions.Column.display_size`: the actual length of
the column in bytes.
3. `~psycopg2.extensions.Column.internal_size`: the size in bytes of
the column associated to this column on the server.
4. `~psycopg2.extensions.Column.precision`: total number of
significant digits in columns of type |NUMERIC|. `!None`
for other types.
5. `~psycopg2.extensions.Column.scale`: count of decimal digits in
the fractional part in columns of type |NUMERIC|. `!None`
for other types.
6. `~psycopg2.extensions.Column.null_ok`: always `!None` as not easy
to retrieve from the libpq.
This attribute will be `!None` for operations that do not return rows
or if the cursor has not had an operation invoked via the
|execute*|_ methods yet.
.. |pg_type| replace:: :sql:`pg_type`
.. _pg_type: http://www.postgresql.org/docs/current/static/catalog-pg-type.html
.. _PQgetlength: http://www.postgresql.org/docs/current/static/libpq-exec.html#LIBPQ-PQGETLENGTH
.. _PQfsize: http://www.postgresql.org/docs/current/static/libpq-exec.html#LIBPQ-PQFSIZE
.. _NUMERIC: http://www.postgresql.org/docs/current/static/datatype-numeric.html#DATATYPE-NUMERIC-DECIMAL
.. |NUMERIC| replace:: :sql:`NUMERIC`
.. versionchanged:: 2.4
if possible, columns descriptions are named tuple instead of
regular tuples.
.. versionchanged:: 2.8
columns descriptions are instances of `!Column`, exposing extra
attributes.
.. |NUMERIC| replace:: :sql:`NUMERIC`
.. method:: close()
Close the cursor now (rather than whenever `del` is executed).
The cursor will be unusable from this point forward; an
`~psycopg2.InterfaceError` will be raised if any operation is
@ -101,7 +88,7 @@ The ``cursor`` class
the method is automatically called at the end of the ``with``
block.
.. attribute:: closed
Read-only boolean attribute: specifies if the cursor is closed
@ -124,7 +111,7 @@ The ``cursor`` class
.. attribute:: name
Read-only attribute containing the name of the cursor if it was
created as named cursor by `connection.cursor()`, or `!None` if
creates as named cursor by `connection.cursor()`, or `!None` if
it is a client side cursor. See :ref:`server-side-cursors`.
.. extension::
@ -142,7 +129,7 @@ The ``cursor`` class
backward scroll (see the |declare-notes|__).
.. |declare-notes| replace:: :sql:`DECLARE` notes
.. __: https://www.postgresql.org/docs/current/static/sql-declare.html#SQL-DECLARE-NOTES
.. __: http://www.postgresql.org/docs/current/static/sql-declare.html#SQL-DECLARE-NOTES
.. note::
@ -208,14 +195,6 @@ The ``cursor`` class
Parameters are bounded to the query using the same rules described in
the `~cursor.execute()` method.
.. code:: python
>>> nums = ((1,), (5,), (10,))
>>> cur.executemany("INSERT INTO test (num) VALUES (%s)", nums)
>>> tuples = ((123, "foo"), (42, "bar"), (23, "baz"))
>>> cur.executemany("INSERT INTO test (num, data) VALUES (%s, %s)", tuples)
.. warning::
In its current implementation this method is not faster than
executing `~cursor.execute()` in a loop. For better performance
@ -240,16 +219,6 @@ The ``cursor`` class
.. versionchanged:: 2.7
added support for named arguments.
.. note::
`!callproc()` can only be used with PostgreSQL functions__, not
with the procedures__ introduced in PostgreSQL 11, which require
the :sql:`CALL` statement to run. Please use a normal
`execute()` to run them.
.. __: https://www.postgresql.org/docs/current/sql-createfunction.html
.. __: https://www.postgresql.org/docs/current/sql-createprocedure.html
.. method:: mogrify(operation [, parameters])
Return a query string after arguments binding. The string returned is
@ -266,7 +235,7 @@ The ``cursor`` class
The `mogrify()` method is a Psycopg extension to the |DBAPI|.
.. method:: setinputsizes(sizes)
This method is exposed in compliance with the |DBAPI|. It currently
does nothing but it is safe to call it.
@ -292,7 +261,7 @@ The ``cursor`` class
>>> cur.execute("SELECT * FROM test;")
>>> for record in cur:
... print(record)
... print record
...
(1, 100, "abc'def")
(2, None, 'dada')
@ -312,17 +281,17 @@ The ``cursor`` class
>>> cur.execute("SELECT * FROM test WHERE id = %s", (3,))
>>> cur.fetchone()
(3, 42, 'bar')
A `~psycopg2.ProgrammingError` is raised if the previous call
to |execute*|_ did not produce any result set or no call was issued
yet.
.. method:: fetchmany([size=cursor.arraysize])
Fetch the next set of rows of a query result, returning a list of
tuples. An empty list is returned when no more rows are available.
The number of rows to fetch per call is specified by the parameter.
If it is not given, the cursor's `~cursor.arraysize` determines
the number of rows to be fetched. The method should try to fetch as
@ -340,7 +309,7 @@ The ``cursor`` class
A `~psycopg2.ProgrammingError` is raised if the previous call to
|execute*|_ did not produce any result set or no call was issued yet.
Note there are performance considerations involved with the size
parameter. For optimal performance, it is usually best to use the
`~cursor.arraysize` attribute. If the size parameter is used,
@ -375,7 +344,7 @@ The ``cursor`` class
`~psycopg2.ProgrammingError` is raised and the cursor position is
not changed.
.. note::
.. note::
According to the |DBAPI|_, the exception raised for a cursor out
of bound should have been `!IndexError`. The best option is
@ -395,7 +364,7 @@ The ``cursor`` class
.. attribute:: arraysize
This read/write attribute specifies the number of rows to fetch at a
time with `~cursor.fetchmany()`. It defaults to 1 meaning to fetch
a single row at a time.
@ -409,20 +378,20 @@ The ``cursor`` class
default is 2000.
.. versionadded:: 2.4
.. extension::
The `itersize` attribute is a Psycopg extension to the |DBAPI|.
.. attribute:: rowcount
.. attribute:: rowcount
This read-only attribute specifies the number of rows that the last
|execute*|_ produced (for :abbr:`DQL (Data Query Language)` statements
like :sql:`SELECT`) or affected (for
like :sql:`SELECT`) or affected (for
:abbr:`DML (Data Manipulation Language)` statements like :sql:`UPDATE`
or :sql:`INSERT`).
The attribute is -1 in case no |execute*| has been performed on
the cursor or the row count of the last operation if it can't be
determined by the interface.
@ -431,7 +400,7 @@ The ``cursor`` class
The |DBAPI|_ interface reserves to redefine the latter case to
have the object return `!None` instead of -1 in future versions
of the specification.
.. attribute:: rownumber
@ -461,10 +430,10 @@ The ``cursor`` class
more flexibility.
.. |CREATE-TABLE| replace:: :sql:`CREATE TABLE`
.. __: https://www.postgresql.org/docs/current/static/sql-createtable.html
.. __: http://www.postgresql.org/docs/current/static/sql-createtable.html
.. |INSERT-RETURNING| replace:: :sql:`INSERT ... RETURNING`
.. __: https://www.postgresql.org/docs/current/static/sql-insert.html
.. __: http://www.postgresql.org/docs/current/static/sql-insert.html
.. attribute:: query
@ -488,7 +457,7 @@ The ``cursor`` class
command:
>>> cur.execute("INSERT INTO test (num, data) VALUES (%s, %s)", (42, 'bar'))
>>> cur.statusmessage
>>> cur.statusmessage
'INSERT 0 1'
.. extension::
@ -516,20 +485,18 @@ The ``cursor`` class
The time zone factory used to handle data types such as
:sql:`TIMESTAMP WITH TIME ZONE`. It should be a `~datetime.tzinfo`
object. Default is `datetime.timezone`.
.. versionchanged:: 2.9
previosly the default factory was `psycopg2.tz.FixedOffsetTimezone`.
object. A few implementations are available in the `psycopg2.tz`
module.
.. method:: nextset()
This method is not supported (PostgreSQL does not have multiple data
sets) and will raise a `~psycopg2.NotSupportedError` exception.
.. method:: setoutputsize(size [, column])
This method is exposed in compliance with the |DBAPI|. It currently
does nothing but it is safe to call it.
@ -570,6 +537,13 @@ The ``cursor`` class
>>> cur.fetchall()
[(6, 42, 'foo'), (7, 74, 'bar')]
.. note:: the name of the table is not quoted: if the table name
contains uppercase letters or special characters it must be quoted
with double quotes::
cur.copy_from(f, '"TABLE"')
.. versionchanged:: 2.0.6
added the *columns* parameter.
@ -578,11 +552,6 @@ The ``cursor`` class
are encoded in the connection `~connection.encoding` when sent to
the backend.
.. versionchanged:: 2.9
the table and fields names are now quoted. If you need to specify
a schema-qualified table please use `copy_expert()`.
.. method:: copy_to(file, table, sep='\\t', null='\\\\N', columns=None)
Write the content of the table named *table* *to* the file-like
@ -604,6 +573,12 @@ The ``cursor`` class
2|\N|dada
...
.. note:: the name of the table is not quoted: if the table name
contains uppercase letters or special characters it must be quoted
with double quotes::
cur.copy_to(f, '"TABLE"')
.. versionchanged:: 2.0.6
added the *columns* parameter.
@ -612,10 +587,6 @@ The ``cursor`` class
are decoded in the connection `~connection.encoding` when read
from the backend.
.. versionchanged:: 2.9
the table and fields names are now quoted. If you need to specify
a schema-qualified table please use `copy_expert()`.
.. method:: copy_expert(sql, file, size=8192)
@ -630,10 +601,7 @@ The ``cursor`` class
The *sql* statement should be in the form :samp:`COPY {table} TO
STDOUT` to export :samp:`{table}` to the *file* object passed as
argument or :samp:`COPY {table} FROM STDIN` to import the content of
the *file* object into :samp:`{table}`. If you need to compose a
:sql:`COPY` statement dynamically (because table, fields, or query
parameters are in Python variables) you may use the objects provided
by the `psycopg2.sql` module.
the *file* object into :samp:`{table}`.
*file* must be a readable file-like object (as required by
`~cursor.copy_from()`) for *sql* statement :sql:`COPY ... FROM STDIN`
@ -649,7 +617,7 @@ The ``cursor`` class
...
.. |COPY| replace:: :sql:`COPY`
.. __: https://www.postgresql.org/docs/current/static/sql-copy.html
.. __: http://www.postgresql.org/docs/current/static/sql-copy.html
.. versionadded:: 2.0.6
@ -658,24 +626,6 @@ The ``cursor`` class
using Unicode data instead of bytes.
.. rubric:: Interoperation with other C API modules
.. attribute:: pgresult_ptr
Return the cursor's internal `!PGresult*` as integer. Useful to pass
the libpq raw result structure to C functions, e.g. via `ctypes`::
>>> import ctypes
>>> libpq = ctypes.pydll.LoadLibrary(ctypes.util.find_library('pq'))
>>> libpq.PQcmdStatus.argtypes = [ctypes.c_void_p]
>>> libpq.PQcmdStatus.restype = ctypes.c_char_p
>>> curs.execute("select 'x'")
>>> libpq.PQcmdStatus(curs.pgresult_ptr)
b'SELECT 1'
.. versionadded:: 2.8
.. testcode::
:hide:

View File

@ -39,7 +39,7 @@ From PostgreSQL documentation:
.. seealso:: `PostgreSQL Error Codes table`__
.. __: https://www.postgresql.org/docs/current/static/errcodes-appendix.html#ERRCODES-TABLE
.. __: http://www.postgresql.org/docs/current/static/errcodes-appendix.html#ERRCODES-TABLE
An example of the available constants defined in the module:
@ -50,7 +50,7 @@ An example of the available constants defined in the module:
'42P01'
Constants representing all the error values defined by PostgreSQL versions
between 8.1 and 15 are included in the module.
between 8.1 and 9.4 are included in the module.
.. autofunction:: lookup(code)
@ -59,7 +59,7 @@ between 8.1 and 15 are included in the module.
>>> try:
... cur.execute("SELECT ouch FROM aargh;")
... except Exception as e:
... except Exception, e:
... pass
...
>>> errorcodes.lookup(e.pgcode[:2])

View File

@ -1,89 +0,0 @@
`psycopg2.errors` -- Exception classes mapping PostgreSQL errors
================================================================
.. sectionauthor:: Daniele Varrazzo <daniele.varrazzo@gmail.com>
.. index::
single: Error; Class
.. module:: psycopg2.errors
.. versionadded:: 2.8
.. versionchanged:: 2.8.4 added errors introduced in PostgreSQL 12
.. versionchanged:: 2.8.6 added errors introduced in PostgreSQL 13
.. versionchanged:: 2.9.2 added errors introduced in PostgreSQL 14
.. versionchanged:: 2.9.4 added errors introduced in PostgreSQL 15
.. versionchanged:: 2.9.10 added errors introduced in PostgreSQL 17
This module exposes the classes psycopg raises upon receiving an error from
the database with a :sql:`SQLSTATE` value attached (available in the
`~psycopg2.Error.pgcode` attribute). The content of the module is generated
from the PostgreSQL source code and includes classes for every error defined
by PostgreSQL in versions between 9.1 and 15.
Every class in the module is named after what referred as "condition name" `in
the documentation`__, converted to CamelCase: e.g. the error 22012,
``division_by_zero`` is exposed by this module as the class `!DivisionByZero`.
.. __: https://www.postgresql.org/docs/current/static/errcodes-appendix.html#ERRCODES-TABLE
Every exception class is a subclass of one of the :ref:`standard DB-API
exception <dbapi-exceptions>` and expose the `~psycopg2.Error` interface.
Each class' superclass is what used to be raised by psycopg in versions before
the introduction of this module, so everything should be compatible with
previously written code catching one the DB-API class: if your code used to
catch `!IntegrityError` to detect a duplicate entry, it will keep on working
even if a more specialised subclass such as `UniqueViolation` is raised.
The new classes allow a more idiomatic way to check and process a specific
error among the many the database may return. For instance, in order to check
that a table is locked, the following code could have been used previously:
.. code-block:: python
try:
cur.execute("LOCK TABLE mytable IN ACCESS EXCLUSIVE MODE NOWAIT")
except psycopg2.OperationalError as e:
if e.pgcode == psycopg2.errorcodes.LOCK_NOT_AVAILABLE:
locked = True
else:
raise
While this method is still available, the specialised class allows for a more
idiomatic error handler:
.. code-block:: python
try:
cur.execute("LOCK TABLE mytable IN ACCESS EXCLUSIVE MODE NOWAIT")
except psycopg2.errors.LockNotAvailable:
locked = True
.. autofunction:: lookup
.. code-block:: python
try:
cur.execute("LOCK TABLE mytable IN ACCESS EXCLUSIVE MODE NOWAIT")
except psycopg2.errors.lookup("55P03"):
locked = True
SQLSTATE exception classes
--------------------------
The following table contains the list of all the SQLSTATE classes exposed by
the module.
Note that, for completeness, the module also exposes all the
:ref:`DB-API-defined exceptions <dbapi-exceptions>` and :ref:`a few
psycopg-specific ones <extension-exceptions>` exposed by the `!extensions`
module, which are not listed here.
.. include:: sqlstate_errors.rst

View File

@ -94,14 +94,14 @@ introspection etc.
The method uses the efficient |lo_export|_ libpq function.
.. |lo_export| replace:: `!lo_export()`
.. _lo_export: https://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-EXPORT
.. _lo_export: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-EXPORT
.. method:: seek(offset, whence=0)
Set the lobject current position.
.. versionchanged:: 2.6
.. versionchanged:: 2.6.0
added support for *offset* > 2GB.
@ -109,9 +109,9 @@ introspection etc.
Return the lobject current position.
.. versionadded:: 2.2
.. versionadded:: 2.2.0
.. versionchanged:: 2.6
.. versionchanged:: 2.6.0
added support for return value > 2GB.
@ -125,11 +125,11 @@ introspection etc.
libpq function.
.. |lo_truncate| replace:: `!lo_truncate()`
.. _lo_truncate: https://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-TRUNCATE
.. _lo_truncate: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-TRUNCATE
.. versionadded:: 2.2
.. versionadded:: 2.2.0
.. versionchanged:: 2.6
.. versionchanged:: 2.6.0
added support for *len* > 2GB.
.. warning::
@ -154,130 +154,6 @@ introspection etc.
Close the object and remove it from the database.
.. autoclass:: ConnectionInfo(connection)
.. versionadded:: 2.8
.. autoattribute:: dbname
.. autoattribute:: user
.. autoattribute:: password
.. autoattribute:: host
.. autoattribute:: port
.. autoattribute:: options
.. autoattribute:: dsn_parameters
Example::
>>> conn.info.dsn_parameters
{'dbname': 'test', 'user': 'postgres', 'port': '5432', 'sslmode': 'prefer'}
Requires libpq >= 9.3.
.. autoattribute:: status
.. autoattribute:: transaction_status
.. automethod:: parameter_status(name)
.. autoattribute:: protocol_version
Currently Psycopg supports only protocol 3, which allows connection
to PostgreSQL server from version 7.4. Psycopg versions previous than
2.3 support both protocols 2 and 3.
.. autoattribute:: server_version
The number is formed by converting the major, minor, and revision
numbers into two-decimal-digit numbers and appending them together.
After PostgreSQL 10 the minor version was dropped, so the second group
of digits is always ``00``. For example, version 9.3.5 will be
returned as ``90305``, version 10.2 as ``100002``.
.. autoattribute:: error_message
.. autoattribute:: socket
.. autoattribute:: backend_pid
.. autoattribute:: needs_password
.. autoattribute:: used_password
.. autoattribute:: ssl_in_use
.. automethod:: ssl_attribute(name)
.. autoattribute:: ssl_attribute_names
.. class:: Column(\*args, \*\*kwargs)
Description of one result column, exposed as items of the
`cursor.description` sequence.
.. versionadded:: 2.8
in previous version the `!description` attribute was a sequence of
simple tuples or namedtuples.
.. attribute:: name
The name of the column returned.
.. attribute:: type_code
The PostgreSQL OID of the column. You can use the |pg_type|_ system
table to get more informations about the type. This is the value used
by Psycopg to decide what Python type use to represent the value. See
also :ref:`type-casting-from-sql-to-python`.
.. attribute:: display_size
Supposed to be the actual length of the column in bytes. Obtaining
this value is computationally intensive, so it is always `!None`.
.. versionchanged:: 2.8
It was previously possible to obtain this value using a compiler
flag at builtin.
.. attribute:: internal_size
The size in bytes of the column associated to this column on the
server. Set to a negative value for variable-size types See also
PQfsize_.
.. attribute:: precision
Total number of significant digits in columns of type |NUMERIC|_.
`!None` for other types.
.. attribute:: scale
Count of decimal digits in the fractional part in columns of type
|NUMERIC|. `!None` for other types.
.. attribute:: null_ok
Always `!None` as not easy to retrieve from the libpq.
.. attribute:: table_oid
The oid of the table from which the column was fetched (matching
:sql:`pg_class.oid`). `!None` if the column is not a simple reference
to a table column. See also PQftable_.
.. versionadded:: 2.8
.. attribute:: table_column
The number of the column (within its table) making up the result
(matching :sql:`pg_attribute.attnum`, so it will start from 1).
`!None` if the column is not a simple reference to a table column. See
also PQftablecol_.
.. versionadded:: 2.8
.. |pg_type| replace:: :sql:`pg_type`
.. _pg_type: https://www.postgresql.org/docs/current/static/catalog-pg-type.html
.. _PQgetlength: https://www.postgresql.org/docs/current/static/libpq-exec.html#LIBPQ-PQGETLENGTH
.. _PQfsize: https://www.postgresql.org/docs/current/static/libpq-exec.html#LIBPQ-PQFSIZE
.. _PQftable: https://www.postgresql.org/docs/current/static/libpq-exec.html#LIBPQ-PQFTABLE
.. _PQftablecol: https://www.postgresql.org/docs/current/static/libpq-exec.html#LIBPQ-PQFTABLECOL
.. _NUMERIC: https://www.postgresql.org/docs/current/static/datatype-numeric.html#DATATYPE-NUMERIC-DECIMAL
.. |NUMERIC| replace:: :sql:`NUMERIC`
.. autoclass:: Notify(pid, channel, payload='')
:members: pid, channel, payload
@ -310,7 +186,6 @@ introspection etc.
message_primary
schema_name
severity
severity_nonlocalized
source_file
source_function
source_line
@ -323,9 +198,6 @@ introspection etc.
not all the fields are available for all the errors and for all the
server versions.
.. versionadded:: 2.8
The `!severity_nonlocalized` attribute.
.. _sql-adaptation-objects:
@ -413,9 +285,9 @@ deal with Python objects adaptation:
.. method:: getquoted()
Return the string enclosed in single quotes. Any single quote appearing
in the string is escaped by doubling it according to SQL string
constants syntax. Backslashes are escaped too.
Return the string enclosed in single quotes. Any single quote
appearing in the the string is escaped by doubling it according to SQL
string constants syntax. Backslashes are escaped too.
>>> QuotedString(r"O'Reilly").getquoted()
"'O''Reilly'"
@ -453,6 +325,13 @@ deal with Python objects adaptation:
Specialized adapters for Python datetime objects.
.. class:: DateFromMx
TimeFromMx
TimestampFromMx
IntervalFromMx
Specialized adapters for `mx.DateTime`_ objects.
.. data:: adapters
Dictionary of the currently registered object adapters. Use
@ -544,27 +423,20 @@ details.
Used by Psycopg when adapting or casting unicode strings. See
:ref:`unicode-handling`.
.. __: https://www.postgresql.org/docs/current/static/multibyte.html
.. __: https://docs.python.org/library/codecs.html#standard-encodings
.. __: http://www.postgresql.org/docs/current/static/multibyte.html
.. __: http://docs.python.org/library/codecs.html#standard-encodings
.. index::
single: Exceptions; Additional
.. _extension-exceptions:
Additional exceptions
---------------------
The module exports a few exceptions in addition to the :ref:`standard ones
<dbapi-exceptions>` defined by the |DBAPI|_.
.. note::
From psycopg 2.8 these error classes are also exposed by the
`psycopg2.errors` module.
.. exception:: QueryCanceledError
(subclasses `~psycopg2.OperationalError`)
@ -594,7 +466,7 @@ Coroutines support functions
These functions are used to set and retrieve the callback function for
:ref:`cooperation with coroutine libraries <green-support>`.
.. versionadded:: 2.2
.. versionadded:: 2.2.0
.. autofunction:: set_wait_callback(f)
@ -618,7 +490,7 @@ Other functions
.. seealso:: libpq docs for `PQlibVersion()`__.
.. __: https://www.postgresql.org/docs/current/static/libpq-misc.html#LIBPQ-PQLIBVERSION
.. __: http://www.postgresql.org/docs/current/static/libpq-misc.html#LIBPQ-PQLIBVERSION
.. function:: make_dsn(dsn=None, \*\*kwargs)
@ -652,7 +524,7 @@ Other functions
`connection URIs`__ are only supported from libpq 9.2). Raise
`~psycopg2.ProgrammingError` if the *dsn* is not valid.
.. __: https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING
.. __: http://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING
Example::
@ -666,7 +538,7 @@ Other functions
.. seealso:: libpq docs for `PQconninfoParse()`__.
.. __: https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-PQCONNINFOPARSE
.. __: http://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-PQCONNINFOPARSE
.. function:: quote_ident(str, scope)
@ -676,43 +548,13 @@ Other functions
The *scope* must be a `connection` or a `cursor`, the underlying
connection encoding is used for any necessary character conversion.
Requires libpq >= 9.0.
.. versionadded:: 2.7
.. seealso:: libpq docs for `PQescapeIdentifier()`__
.. __: https://www.postgresql.org/docs/current/static/libpq-exec.html#LIBPQ-PQESCAPEIDENTIFIER
.. method:: encrypt_password(password, user, scope=None, algorithm=None)
Return the encrypted form of a PostgreSQL password.
:param password: the cleartext password to encrypt
:param user: the name of the user to use the password for
:param scope: the scope to encrypt the password into; if *algorithm* is
``md5`` it can be `!None`
:type scope: `connection` or `cursor`
:param algorithm: the password encryption algorithm to use
The *algorithm* ``md5`` is always supported. Other algorithms are only
supported if the client libpq version is at least 10 and may require a
compatible server version: check the `PostgreSQL encryption
documentation`__ to know the algorithms supported by your server.
.. __: https://www.postgresql.org/docs/current/static/encryption-options.html
Using `!None` as *algorithm* will result in querying the server to know the
current server password encryption setting, which is a blocking operation:
query the server separately and specify a value for *algorithm* if you
want to maintain a non-blocking behaviour.
.. versionadded:: 2.8
.. seealso:: PostgreSQL docs for the `password_encryption`__ setting, libpq `PQencryptPasswordConn()`__, `PQencryptPassword()`__ functions.
.. __: https://www.postgresql.org/docs/current/static/runtime-config-connection.html#GUC-PASSWORD-ENCRYPTION
.. __: https://www.postgresql.org/docs/current/static/libpq-misc.html#LIBPQ-PQENCRYPTPASSWORDCONN
.. __: https://www.postgresql.org/docs/current/static/libpq-misc.html#LIBPQ-PQENCRYPTPASSWORD
.. __: http://www.postgresql.org/docs/current/static/libpq-exec.html#LIBPQ-PQESCAPEIDENTIFIER
@ -751,8 +593,8 @@ methods. The level can be set to one of the following constants:
.. data:: ISOLATION_LEVEL_READ_COMMITTED
This is usually the default PostgreSQL value, but a different default may
be set in the database configuration.
This is usually the the default PostgreSQL value, but a different default
may be set in the database configuration.
A new transaction is started at the first `~cursor.execute()` command on a
cursor and at each new `!execute()` after a `~connection.commit()` or a
@ -765,7 +607,7 @@ methods. The level can be set to one of the following constants:
.. seealso:: `Read Committed Isolation Level`__ in PostgreSQL
documentation.
.. __: https://www.postgresql.org/docs/current/static/transaction-iso.html#XACT-READ-COMMITTED
.. __: http://www.postgresql.org/docs/current/static/transaction-iso.html#XACT-READ-COMMITTED
.. data:: ISOLATION_LEVEL_REPEATABLE_READ
@ -789,7 +631,7 @@ methods. The level can be set to one of the following constants:
.. seealso:: `Repeatable Read Isolation Level`__ in PostgreSQL
documentation.
.. __: https://www.postgresql.org/docs/current/static/transaction-iso.html#XACT-REPEATABLE-READ
.. __: http://www.postgresql.org/docs/current/static/transaction-iso.html#XACT-REPEATABLE-READ
.. data:: ISOLATION_LEVEL_SERIALIZABLE
@ -808,7 +650,7 @@ methods. The level can be set to one of the following constants:
.. seealso:: `Serializable Isolation Level`__ in PostgreSQL documentation.
.. __: https://www.postgresql.org/docs/current/static/transaction-iso.html#XACT-SERIALIZABLE
.. __: http://www.postgresql.org/docs/current/static/transaction-iso.html#XACT-SERIALIZABLE
.. data:: ISOLATION_LEVEL_DEFAULT
@ -831,7 +673,7 @@ Transaction status constants
----------------------------
These values represent the possible status of a transaction: the current value
can be read using the `connection.info.transaction_status` property.
can be read using the `connection.get_transaction_status()` method.
.. data:: TRANSACTION_STATUS_IDLE
@ -902,7 +744,7 @@ internal usage and Python code should not rely on them.
Poll constants
--------------
.. versionadded:: 2.2
.. versionadded:: 2.2.0
These values can be returned by `connection.poll()` during asynchronous
connection and communication. They match the values in the libpq enum
@ -951,7 +793,6 @@ Python objects. All the typecasters are automatically registered, except
from the database. See :ref:`unicode-handling` for details.
.. data:: BOOLEAN
BYTES
DATE
DECIMAL
FLOAT
@ -968,7 +809,6 @@ from the database. See :ref:`unicode-handling` for details.
.. data:: BINARYARRAY
BOOLEANARRAY
BYTESARRAY
DATEARRAY
DATETIMEARRAY
DECIMALARRAY
@ -985,26 +825,31 @@ from the database. See :ref:`unicode-handling` for details.
.. data:: PYDATE
PYDATETIME
PYDATETIMETZ
PYINTERVAL
PYTIME
PYDATEARRAY
PYDATETIMEARRAY
PYDATETIMETZARRAY
PYINTERVALARRAY
PYTIMEARRAY
Typecasters to convert time-related data types to Python `!datetime`
objects.
.. versionchanged:: 2.2
.. data:: MXDATE
MXDATETIME
MXINTERVAL
MXTIME
MXDATEARRAY
MXDATETIMEARRAY
MXINTERVALARRAY
MXTIMEARRAY
Typecasters to convert time-related data types to `mx.DateTime`_ objects.
Only available if Psycopg was compiled with `!mx` support.
.. versionchanged:: 2.2.0
previously the `DECIMAL` typecaster and the specific time-related
typecasters (`!PY*` and `!MX*`) were not exposed by the `extensions`
module. In older versions they can be imported from the implementation
module `!psycopg2._psycopg`.
.. versionadded:: 2.7.2
the `!*DATETIMETZ*` objects.
.. versionadded:: 2.8
the `!BYTES` and `BYTESARRAY` objects.

View File

@ -41,8 +41,8 @@ If you want to use a `!connection` subclass you can pass it as the
Dictionary-like cursor
^^^^^^^^^^^^^^^^^^^^^^
The dict cursors allow to access to the attributes of retrieved records
using an interface similar to the Python dictionaries instead of the tuples.
The dict cursors allow to access to the retrieved records using an interface
similar to the Python dictionaries instead of the tuples.
>>> dict_cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
>>> dict_cur.execute("INSERT INTO test (num, data) VALUES(%s, %s)",
@ -99,6 +99,20 @@ Real dictionary cursor
.. versionadded:: 2.3
These objects require :py:func:`collections.namedtuple` to be found, so it is
available out-of-the-box only from Python 2.6. Anyway, the namedtuple
implementation is compatible with previous Python versions, so all you
have to do is to `download it`__ and make it available where we
expect it to be... ::
from somewhere import namedtuple
import collections
collections.namedtuple = namedtuple
from psycopg.extras import NamedTupleConnection
# ...
.. __: http://code.activestate.com/recipes/500261-named-tuples/
.. autoclass:: NamedTupleCursor
.. autoclass:: NamedTupleConnection
@ -122,11 +136,6 @@ Logging cursor
.. autoclass:: LoggingCursor
.. note::
Queries that are executed with `cursor.executemany()` are not logged.
.. autoclass:: MinTimeLoggingConnection
:members: initialize,filter
@ -136,8 +145,8 @@ Logging cursor
.. _replication-objects:
Replication support objects
---------------------------
Replication connection and cursor classes
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
See :ref:`replication-support` for an introduction to the topic.
@ -270,7 +279,7 @@ The individual messages in the replication stream are represented by
Replication slots are a feature of PostgreSQL server starting with
version 9.4.
.. method:: start_replication(slot_name=None, slot_type=None, start_lsn=0, timeline=0, options=None, decode=False, status_interval=10)
.. method:: start_replication(slot_name=None, slot_type=None, start_lsn=0, timeline=0, options=None, decode=False)
Start replication on the connection.
@ -288,7 +297,6 @@ The individual messages in the replication stream are represented by
slot (not allowed with physical replication)
:param decode: a flag indicating that unicode conversion should be
performed on messages received from the server
:param status_interval: time between feedback packets sent to the server
If a *slot_name* is specified, the slot must exist on the server and
its type must match the replication type used.
@ -329,15 +337,7 @@ The individual messages in the replication stream are represented by
*This parameter should not be set with physical replication or with
logical replication plugins that produce binary output.*
Replication stream should periodically send feedback to the database
to prevent disconnect via timeout. Feedback is automatically sent when
`read_message()` is called or during run of the `consume_stream()`.
To specify the feedback interval use *status_interval* parameter.
The value of this parameter must be set to at least 1 second, but
it can have a fractional part.
This function constructs a |START_REPLICATION|_ command and calls
This function constructs a ``START_REPLICATION`` command and calls
`start_replication_expert()` internally.
After starting the replication, to actually consume the incoming
@ -345,28 +345,13 @@ The individual messages in the replication stream are represented by
`read_message()` in case of :ref:`asynchronous connection
<async-support>`.
.. versionchanged:: 2.8.3
added the *status_interval* parameter.
.. |START_REPLICATION| replace:: :sql:`START_REPLICATION`
.. _START_REPLICATION: https://www.postgresql.org/docs/current/static/protocol-replication.html
.. method:: start_replication_expert(command, decode=False, status_interval=10)
.. method:: start_replication_expert(command, decode=False)
Start replication on the connection using provided
|START_REPLICATION|_ command.
``START_REPLICATION`` command. See `start_replication()` for
description of *decode* parameter.
:param command: The full replication command. It can be a string or a
`~psycopg2.sql.Composable` instance for dynamic generation.
:param decode: a flag indicating that unicode conversion should be
performed on messages received from the server.
:param status_interval: time between feedback packets sent to the server
.. versionchanged:: 2.8.3
added the *status_interval* parameter.
.. method:: consume_stream(consume, keepalive_interval=None)
.. method:: consume_stream(consume, keepalive_interval=10)
:param consume: a callable object with signature :samp:`consume({msg})`
:param keepalive_interval: interval (in seconds) to send keepalive
@ -389,15 +374,14 @@ The individual messages in the replication stream are represented by
`ReplicationMessage` class. See `read_message()` for details about
message decoding.
This method also sends feedback messages to the server every
*keepalive_interval* (in seconds). The value of this parameter must
This method also sends keepalive messages to the server in case there
were no new data from the server for the duration of
*keepalive_interval* (in seconds). The value of this parameter must
be set to at least 1 second, but it can have a fractional part.
If the *keepalive_interval* is not specified, the value of
*status_interval* specified in the `start_replication()` or
`start_replication_expert()` will be used.
The client must confirm every processed message by calling
`send_feedback()` method on the corresponding replication cursor. A
After processing certain amount of messages the client should send a
confirmation message to the server. This should be done by calling
`send_feedback()` method on the corresponding replication cursor. A
reference to the cursor is provided in the `ReplicationMessage` as an
attribute.
@ -406,11 +390,13 @@ The individual messages in the replication stream are represented by
class LogicalStreamConsumer(object):
# ...
...
def __call__(self, msg):
self.process_message(msg.payload)
msg.cursor.send_feedback(flush_lsn=msg.data_start)
if self.should_send_feedback(msg):
msg.cursor.send_feedback(flush_lsn=msg.data_start)
consumer = LogicalStreamConsumer()
cur.consume_stream(consumer)
@ -423,10 +409,12 @@ The individual messages in the replication stream are represented by
retains all the WAL segments that might be needed to stream the
changes via all of the currently open replication slots.
.. versionchanged:: 2.8.3
changed the default value of the *keepalive_interval* parameter to `!None`.
On the other hand, it is not recommended to send confirmation
after *every* processed message, since that will put an
unnecessary load on network and the server. A possible strategy
is to confirm after every COMMIT message.
.. method:: send_feedback(write_lsn=0, flush_lsn=0, apply_lsn=0, reply=False, force=False)
.. method:: send_feedback(write_lsn=0, flush_lsn=0, apply_lsn=0, reply=False)
:param write_lsn: a LSN position up to which the client has written the data locally
:param flush_lsn: a LSN position up to which the client has processed the
@ -436,21 +424,13 @@ The individual messages in the replication stream are represented by
has applied the changes (physical replication
master-slave protocol only)
:param reply: request the server to send back a keepalive message immediately
:param force: force sending a feedback message regardless of status_interval timeout
Use this method to report to the server that all messages up to a
certain LSN position have been processed on the client and may be
discarded on the server.
If the *reply* or *force* parameters are not set, this method will
just update internal structures without sending the feedback message
to the server. The library sends feedback message automatically
when *status_interval* timeout is reached. For this to work, you must
call `send_feedback()` on the same Cursor that you called `start_replication()`
on (the one in `message.cursor`) or your feedback will be lost.
.. versionchanged:: 2.8.3
added the *force* parameter.
This method can also be called with all default parameters' values to
just send a keepalive message to the server.
Low-level replication cursor methods for :ref:`asynchronous connection
<async-support>` operation.
@ -484,9 +464,9 @@ The individual messages in the replication stream are represented by
corresponding connection to block the process until there is more data
from the server.
Last, but not least, this method sends feedback messages when
*status_interval* timeout is reached or when keepalive message with
reply request arrived from the server.
The server can send keepalive messages to the client periodically.
Such messages are silently consumed by this method and are never
reported to the caller.
.. method:: fileno()
@ -502,40 +482,26 @@ The individual messages in the replication stream are represented by
communication with the server (a data or keepalive message in either
direction).
.. attribute:: feedback_timestamp
A `~datetime` object representing the timestamp at the moment when
the last feedback message sent to the server.
.. versionadded:: 2.8.3
.. attribute:: wal_end
LSN position of the current end of WAL on the server at the
moment of last data or keepalive message received from the
server.
.. versionadded:: 2.8
An actual example of asynchronous operation might look like this::
from select import select
from datetime import datetime
def consume(msg):
# ...
msg.cursor.send_feedback(flush_lsn=msg.data_start)
...
status_interval = 10.0
keepalive_interval = 10.0
while True:
msg = cur.read_message()
if msg:
consume(msg)
else:
now = datetime.now()
timeout = status_interval - (now - cur.feedback_timestamp).total_seconds()
timeout = keepalive_interval - (now - cur.io_timestamp).total_seconds()
try:
sel = select([cur], [], [], max(0, timeout))
if not any(sel):
cur.send_feedback() # timed out, send keepalive message
except InterruptedError:
pass # recalculate timeout and continue
@ -566,22 +532,25 @@ JSON_ adaptation
added |jsonb| support. In previous versions |jsonb| values are returned
as strings. See :ref:`the FAQ <faq-jsonb-adapt>` for a workaround.
Psycopg can adapt Python objects to and from the PostgreSQL |jsons|_
types. With PostgreSQL 9.2 and following versions adaptation is
Psycopg can adapt Python objects to and from the PostgreSQL |pgjson|_ and
|jsonb| types. With PostgreSQL 9.2 and following versions adaptation is
available out-of-the-box. To use JSON data with previous database versions
(either with the `9.1 json extension`__, but even if you want to convert text
fields to JSON) you can use the `register_json()` function.
.. __: http://people.planetpostgresql.org/andrew/index.php?/archives/255-JSON-for-PG-9.2-...-and-now-for-9.1!.html
The Python :py:mod:`json` module is used by default to convert Python objects
to JSON and to parse data from the database.
The Python library used by default to convert Python objects to JSON and to
parse data from the database depends on the language version: with Python 2.6
and following the :py:mod:`json` module from the standard library is used;
with previous versions the `simplejson`_ module is used if available. Note
that the last `!simplejson` version supporting Python 2.4 is the 2.0.9.
.. _JSON: https://www.json.org/
.. |json| replace:: :sql:`json`
.. _JSON: http://www.json.org/
.. |pgjson| replace:: :sql:`json`
.. |jsonb| replace:: :sql:`jsonb`
.. |jsons| replace:: |json| and |jsonb|
.. _jsons: https://www.postgresql.org/docs/current/static/datatype-json.html
.. _pgjson: http://www.postgresql.org/docs/current/static/datatype-json.html
.. _simplejson: http://pypi.python.org/pypi/simplejson/
In order to pass a Python object to the database as query argument you can use
the `Json` adapter::
@ -589,7 +558,7 @@ the `Json` adapter::
curs.execute("insert into mytable (jsondata) values (%s)",
[Json({'a': 100})])
Reading from the database, |json| and |jsonb| values will be automatically
Reading from the database, |pgjson| and |jsonb| values will be automatically
converted to Python objects.
.. note::
@ -637,7 +606,7 @@ or you can subclass it overriding the `~Json.dumps()` method::
Customizing the conversion from PostgreSQL to Python can be done passing a
custom `!loads()` function to `register_json()`. For the builtin data types
(|json| from PostgreSQL 9.2, |jsonb| from PostgreSQL 9.4) use
(|pgjson| from PostgreSQL 9.2, |jsonb| from PostgreSQL 9.4) use
`register_default_json()` and `register_default_jsonb()`. For example, if you
want to convert the float values from :sql:`json` into
:py:class:`~decimal.Decimal` you can use::
@ -645,13 +614,6 @@ want to convert the float values from :sql:`json` into
loads = lambda x: json.loads(x, parse_float=Decimal)
psycopg2.extras.register_json(conn, loads=loads)
Or, if you want to use an alternative JSON module implementation, such as the
faster UltraJSON_, you can use::
psycopg2.extras.register_default_json(loads=ujson.loads, globally=True)
psycopg2.extras.register_default_jsonb(loads=ujson.loads, globally=True)
.. _UltraJSON: https://pypi.org/project/ujson/
.. autoclass:: Json
@ -707,7 +669,7 @@ can be enabled using the `register_hstore()` function.
.. |hstore| replace:: :sql:`hstore`
.. _hstore: https://www.postgresql.org/docs/current/static/hstore.html
.. _hstore: http://www.postgresql.org/docs/current/static/hstore.html
@ -729,7 +691,7 @@ after a table row type) into a Python named tuple, or into a regular tuple if
:py:func:`collections.namedtuple` is not found.
.. |CREATE TYPE| replace:: :sql:`CREATE TYPE`
.. _CREATE TYPE: https://www.postgresql.org/docs/current/static/sql-createtype.html
.. _CREATE TYPE: http://www.postgresql.org/docs/current/static/sql-createtype.html
.. doctest::
@ -843,7 +805,7 @@ PostgreSQL |range|_ types. Builtin |range| types are supported out-of-the-box;
user-defined |range| types can be adapted using `register_range()`.
.. |range| replace:: :sql:`range`
.. _range: https://www.postgresql.org/docs/current/static/rangetypes.html
.. _range: http://www.postgresql.org/docs/current/static/rangetypes.html
.. autoclass:: Range
@ -852,7 +814,7 @@ user-defined |range| types can be adapted using `register_range()`.
features: it doesn't perform normalization and doesn't implement all the
operators__ supported by the database.
.. __: https://www.postgresql.org/docs/current/static/functions-range.html#RANGE-OPERATORS-TABLE
.. __: http://www.postgresql.org/docs/current/static/functions-range.html#RANGE-OPERATORS-TABLE
`!Range` objects are immutable, hashable, and support the ``in`` operator
(checking if an element is within the range). They can be tested for
@ -989,7 +951,7 @@ converted into lists of strings.
.. autofunction:: register_inet
.. deprecated:: 2.7
this function will not receive further development and may disappear in
this function will not receive further development and disappear in
future versions.
.. doctest::
@ -1020,7 +982,7 @@ Fast execution helpers
The current implementation of `~cursor.executemany()` is (using an extremely
charitable understatement) not particularly performing. These functions can
be used to speed up the repeated execution of a statement against a set of
be used to speed up the repeated execution of a statement againts a set of
parameters. By reducing the number of server roundtrips the performance can be
`orders of magnitude better`__ than using `!executemany()`.
@ -1029,14 +991,6 @@ parameters. By reducing the number of server roundtrips the performance can be
.. autofunction:: execute_batch
.. code:: python
>>> nums = ((1,), (5,), (10,))
>>> execute_batch(cur, "INSERT INTO test (num) VALUES (%s)", nums)
>>> tuples = ((123, "foo"), (42, "bar"), (23, "baz"))
>>> execute_batch(cur, "INSERT INTO test (num, data) VALUES (%s, %s)", tuples)
.. versionadded:: 2.7
.. note::
@ -1074,10 +1028,22 @@ parameters. By reducing the number of server roundtrips the performance can be
.. autofunction:: execute_values
.. versionadded:: 2.7
.. versionchanged:: 2.8
added the *fetch* parameter.
.. index::
single: Time zones; Fractional
Fractional time zones
---------------------
.. autofunction:: register_tstz_w_secs
.. versionadded:: 2.0.9
.. versionchanged:: 2.2.2
function is no-op: see :ref:`tz-handling`.
.. index::
pair: Example; Coroutine;

View File

@ -7,30 +7,6 @@ Here are a few gotchas you may encounter using `psycopg2`. Feel free to
suggest new entries!
Meta
----
.. _faq-question:
.. cssclass:: faq
How do I ask a question?
- Have you first checked if your question is answered already in the
documentation?
- If your question is about installing psycopg, have you checked the
:ref:`install FAQ <faq-compile>` and the :ref:`install docs
<installation>`?
- Have you googled for your error message?
- If you haven't found an answer yet, please write to the `Mailing List`_.
- If you haven't found a bug, DO NOT write to the bug tracker to ask
questions. You will only get piro grumpy.
.. _mailing list: https://www.postgresql.org/list/psycopg/
.. _faq-transactions:
Problems with transactions handling
@ -64,7 +40,7 @@ I receive the error *current transaction is aborted, commands ignored until end
PostgreSQL supports nested transactions using the |SAVEPOINT|_ command).
.. |SAVEPOINT| replace:: :sql:`SAVEPOINT`
.. _SAVEPOINT: https://www.postgresql.org/docs/current/static/sql-savepoint.html
.. _SAVEPOINT: http://www.postgresql.org/docs/current/static/sql-savepoint.html
.. _faq-transaction-aborted-multiprocess:
@ -132,19 +108,6 @@ My database is Unicode, but I receive all the strings as UTF-8 `!str`. Can I rec
See :ref:`unicode-handling` for the gory details.
.. _faq-bytes:
.. cssclass:: faq
My database is in mixed encoding. My program was working on Python 2 but Python 3 fails decoding the strings. How do I avoid decoding?
From psycopg 2.8 you can use the following adapters to always return bytes
from strings::
psycopg2.extensions.register_type(psycopg2.extensions.BYTES)
psycopg2.extensions.register_type(psycopg2.extensions.BYTESARRAY)
See :ref:`unicode-handling` for an example.
.. _faq-float:
.. cssclass:: faq
@ -180,7 +143,7 @@ Psycopg automatically converts PostgreSQL :sql:`json` data into Python objects.
Psycopg converts :sql:`json` values into Python objects but :sql:`jsonb` values are returned as strings. Can :sql:`jsonb` be converted automatically?
Automatic conversion of :sql:`jsonb` values is supported from Psycopg
release 2.5.4. For previous versions you can register the :sql:`json`
typecaster on the :sql:`jsonb` oids (which are known and not supposed to
typecaster on the :sql:`jsonb` oids (which are known and not suppsed to
change in future PostgreSQL versions)::
psycopg2.extras.register_json(oid=3802, array_oid=3807, globally=True)
@ -221,8 +184,8 @@ Transferring binary data from PostgreSQL 9.0 doesn't work.
session before reading binary data;
- upgrade the libpq library on the client to at least 9.0.
.. __: https://www.postgresql.org/docs/current/static/datatype-binary.html
.. __: https://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-BYTEA-OUTPUT
.. __: http://www.postgresql.org/docs/current/static/datatype-binary.html
.. __: http://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-BYTEA-OUTPUT
.. _faq-array:
@ -271,7 +234,7 @@ When should I save and re-use a connection as opposed to creating a new one as n
What are the advantages or disadvantages of using named cursors?
The only disadvantages is that they use up resources on the server and
that there is a little overhead because at least two queries (one to
that there is a little overhead because a at least two queries (one to
create the cursor and one to fetch the initial result set) are issued to
the backend. The advantage is that data is fetched one chunk at a time:
using small `~cursor.fetchmany()` values it is possible to use very
@ -292,7 +255,7 @@ How do I interrupt a long-running query in an interactive shell?
can handle a :kbd:`Ctrl-C` correctly. For previous versions, you can use
`this implementation`__.
.. __: https://www.psycopg.org/articles/2014/07/20/cancelling-postgresql-statements-python/
.. __: http://initd.org/psycopg/articles/2014/07/20/cancelling-postgresql-statements-python/
.. code-block:: pycon
@ -311,24 +274,15 @@ How do I interrupt a long-running query in an interactive shell?
.. _faq-compile:
Problems compiling and installing psycopg2
------------------------------------------
.. _faq-wheels:
.. cssclass:: faq
Psycopg 2.8 fails to install, Psycopg 2.7 was working fine.
With Psycopg 2.7 you were installing binary packages, but they have proven
unreliable so now you have to install them explicitly using the
``psycopg2-binary`` package. See :ref:`binary-packages` for all the
details.
Problems compiling and deploying psycopg2
-----------------------------------------
.. _faq-python-h:
.. cssclass:: faq
I can't compile `!psycopg2`: the compiler says *error: Python.h: No such file or directory*. What am I missing?
You need to install a Python development package: it is usually called
``python-dev`` or ``python3-dev`` according to your Python version.
``python-dev``.
.. _faq-libpq-fe-h:
@ -352,9 +306,7 @@ I can't compile `!psycopg2`: the compiler says *error: libpq-fe.h: No such file
API support (*i.e.* the libpq used at compile time was at least 9.3) but
at runtime an older libpq dynamic library is found.
You can use:
.. code-block:: shell
You can use::
$ ldd /path/to/packages/psycopg2/_psycopg.so | grep libpq
@ -364,7 +316,7 @@ I can't compile `!psycopg2`: the compiler says *error: libpq-fe.h: No such file
:program:`pg_config` at install time and the libpq at runtime.
.. |lo_truncate| replace:: `!lo_truncate()`
.. _lo_truncate: https://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-TRUNCATE
.. _lo_truncate: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-TRUNCATE
.. _faq-import-mod_wsgi:
@ -378,5 +330,6 @@ Psycopg raises *ImportError: cannot import name tz* on import in mod_wsgi / ASP,
use the WSGIPythonEggs__ directive.
.. _egg: http://peak.telecommunity.com/DevCenter/PythonEggs
.. __: https://stackoverflow.com/questions/2192323/what-is-the-python-egg-cache-python-egg-cache
.. __: https://modwsgi.readthedocs.io/en/develop/configuration-directives/WSGIPythonEggs.html
.. __: http://stackoverflow.com/questions/2192323/what-is-the-python-egg-cache-python-egg-cache
.. __: http://code.google.com/p/modwsgi/wiki/ConfigurationDirectives#WSGIPythonEggs

View File

@ -23,10 +23,10 @@ extended and customized thanks to a flexible :ref:`objects adaptation system
Psycopg 2 is both Unicode and Python 3 friendly.
.. _Psycopg: https://psycopg.org/
.. _PostgreSQL: https://www.postgresql.org/
.. _Python: https://www.python.org/
.. _libpq: https://www.postgresql.org/docs/current/static/libpq.html
.. _Psycopg: http://initd.org/psycopg/
.. _PostgreSQL: http://www.postgresql.org/
.. _Python: http://www.python.org/
.. _libpq: http://www.postgresql.org/docs/current/static/libpq.html
.. rubric:: Contents
@ -42,14 +42,12 @@ Psycopg 2 is both Unicode and Python 3 friendly.
advanced
extensions
extras
errors
sql
tz
pool
errorcodes
faq
news
license
.. ifconfig:: builder != 'text'
@ -57,7 +55,6 @@ Psycopg 2 is both Unicode and Python 3 friendly.
.. rubric:: Indices and tables
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
@ -68,3 +65,4 @@ Psycopg 2 is both Unicode and Python 3 friendly.
**To Do items in the documentation**
.. todolist::

View File

@ -1,6 +1,4 @@
.. _installation:
Installation
Introduction
============
.. sectionauthor:: Daniele Varrazzo <daniele.varrazzo@gmail.com>
@ -8,140 +6,68 @@ Installation
Psycopg is a PostgreSQL_ adapter for the Python_ programming language. It is a
wrapper for the libpq_, the official PostgreSQL client library.
.. _PostgreSQL: https://www.postgresql.org/
.. _Python: https://www.python.org/
.. index::
single: Install; from PyPI
single: Install; wheel
single: Wheel
.. _binary-packages:
Quick Install
-------------
For most operating systems, the quickest way to install Psycopg is using the
wheel_ package available on PyPI_:
.. code-block:: console
$ pip install psycopg2-binary
This will install a pre-compiled binary version of the module which does not
require the build or runtime prerequisites described below. Make sure to use
an up-to-date version of :program:`pip` (you can upgrade it using something
like ``pip install -U pip``).
You may then import the ``psycopg2`` package, as usual:
.. code-block:: python
import psycopg2
# Connect to your postgres DB
conn = psycopg2.connect("dbname=test user=postgres")
# Open a cursor to perform database operations
cur = conn.cursor()
# Execute a query
cur.execute("SELECT * FROM my_data")
# Retrieve query results
records = cur.fetchall()
.. _PyPI: https://pypi.org/project/psycopg2-binary/
.. _wheel: https://pythonwheels.com/
psycopg vs psycopg-binary
^^^^^^^^^^^^^^^^^^^^^^^^^
The ``psycopg2-binary`` package is meant for beginners to start playing
with Python and PostgreSQL without the need to meet the build
requirements.
If you are the maintainer of a published package depending on `!psycopg2`
you shouldn't use ``psycopg2-binary`` as a module dependency. **For
production use you are advised to use the source distribution.**
The binary packages come with their own versions of a few C libraries,
among which ``libpq`` and ``libssl``, which will be used regardless of other
libraries available on the client: upgrading the system libraries will not
upgrade the libraries used by `!psycopg2`. Please build `!psycopg2` from
source if you want to maintain binary upgradeability.
.. warning::
The `!psycopg2` wheel package comes packaged, among the others, with its
own ``libssl`` binary. This may create conflicts with other extension
modules binding with ``libssl`` as well, for instance with the Python
`ssl` module: in some cases, under concurrency, the interaction between
the two libraries may result in a segfault. In case of doubts you are
advised to use a package built from source.
.. index::
single: Install; disable wheel
single: Wheel; disable
.. _disable-wheel:
Change in binary packages between Psycopg 2.7 and 2.8
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
In version 2.7.x, :command:`pip install psycopg2` would have tried to install
automatically the binary package of Psycopg. Because of concurrency problems
binary packages have displayed, ``psycopg2-binary`` has become a separate
package, and from 2.8 it has become the only way to install the binary
package.
If you are using Psycopg 2.7 and you want to disable the use of wheel binary
packages, relying on the system libraries available on your client, you
can use the :command:`pip` |--no-binary option|__, e.g.:
.. code-block:: console
$ pip install --no-binary :all: psycopg2
.. |--no-binary option| replace:: ``--no-binary`` option
.. __: https://pip.pypa.io/en/stable/reference/pip_install/#install-no-binary
which can be specified in your :file:`requirements.txt` files too, e.g. use:
.. code-block:: none
psycopg2>=2.7,<2.8 --no-binary psycopg2
to use the last bugfix release of the `!psycopg2` 2.7 package, specifying to
always compile it from source. Of course in this case you will have to meet
the :ref:`build prerequisites <build-prerequisites>`.
.. index::
single: Prerequisites
Prerequisites
-------------
The `psycopg2` package is the current mature implementation of the adapter: it
is a C extension and as such it is only compatible with CPython_. If you want
to use Psycopg on a different Python implementation (PyPy, Jython, IronPython)
there is an experimental `porting of Psycopg for Ctypes`__, but it is not as
mature as the C implementation yet.
The current `!psycopg2` implementation supports:
..
NOTE: keep consistent with setup.py and the /features/ page.
- Python versions from 3.8 to 3.13
- PostgreSQL server versions from 7.4 to 17
- Python 2 versions from 2.6 to 2.7
- Python 3 versions from 3.2 to 3.6
- PostgreSQL server versions from 7.4 to 9.6
- PostgreSQL client library version from 9.1
.. note::
.. _PostgreSQL: http://www.postgresql.org/
.. _Python: http://www.python.org/
.. _libpq: http://www.postgresql.org/docs/current/static/libpq.html
.. _CPython: http://en.wikipedia.org/wiki/CPython
.. _Ctypes: http://docs.python.org/library/ctypes.html
.. __: https://github.com/mvantellingen/psycopg2-ctypes
Not all the psycopg2 versions support all the supported Python versions.
Please see the :ref:`release notes <news>` to verify when the support for
a new Python version was added and when the support for an old Python
version was removed.
.. index::
single: Install; from PyPI
Binary install from PyPI
------------------------
`!psycopg2` is `available on PyPI`__ in the form of wheel_ packages for the
most common platform (Linux, OSX, Windows): this should make you able to
install a binary version of the module including all the dependencies simply
using::
pip install psycopg2
Make sure to use an up-to-date version of :program:`pip` (you can upgrade it
using something like ``pip install -U pip``)
.. __: PyPI_
.. _PyPI: https://pypi.python.org/pypi/psycopg2/
.. _wheel: http://pythonwheels.com/
.. index::
single: Install; from source
.. _install-from-source:
Install from source
-------------------
.. _source-package:
You can download a copy of Psycopg source files from the `Psycopg download
page`__ or from PyPI_.
.. __: http://initd.org/psycopg/download/
.. _build-prerequisites:
@ -149,20 +75,17 @@ The current `!psycopg2` implementation supports:
Build prerequisites
^^^^^^^^^^^^^^^^^^^
The build prerequisites are to be met in order to install Psycopg from source
code, from a source distribution package, GitHub_ or from PyPI.
These notes illustrate how to compile Psycopg on Linux. If you want to compile
Psycopg on other platforms you may have to adjust some details accordingly.
.. _GitHub: https://github.com/psycopg/psycopg2
Psycopg is a C wrapper around the libpq_ PostgreSQL client library. To install
it from sources you will need:
Psycopg is a C wrapper to the libpq PostgreSQL client library. To install it
from sources you will need:
- A C compiler.
- The Python header files. They are usually installed in a package such as
**python-dev** or **python3-dev**. A message such as *error: Python.h: No
such file or directory* is an indication that the Python headers are
missing.
**python-dev**. A message such as *error: Python.h: No such file or
directory* is an indication that the Python headers are missing.
- The libpq header files. They are usually installed in a package such as
**libpq-dev**. If you get an *error: libpq-fe.h: No such file or directory*
@ -174,28 +97,13 @@ it from sources you will need:
running ``pg_config --version``: if it returns an error or an unexpected
version number then locate the directory containing the :program:`pg_config`
shipped with the right libpq version (usually
``/usr/lib/postgresql/X.Y/bin/``) and add it to the :envvar:`PATH`:
.. code-block:: console
``/usr/lib/postgresql/X.Y/bin/``) and add it to the :envvar:`PATH`::
$ export PATH=/usr/lib/postgresql/X.Y/bin/:$PATH
You only need :program:`pg_config` to compile `!psycopg2`, not for its
regular usage.
Once everything is in place it's just a matter of running the standard:
.. code-block:: console
$ pip install psycopg2
or, from the directory containing the source code:
.. code-block:: console
$ python setup.py build
$ python setup.py install
Runtime requirements
^^^^^^^^^^^^^^^^^^^^
@ -205,7 +113,7 @@ self-contained wheel package, it will need the libpq_ library at runtime
(usually distributed in a ``libpq.so`` or ``libpq.dll`` file). `!psycopg2`
relies on the host OS to find the library if the library is installed in a
standard location there is usually no problem; if the library is in a
non-standard location you will have to tell Psycopg how to find it,
non-standard location you will have to tell somehow Psycopg how to find it,
which is OS-dependent (for instance setting a suitable
:envvar:`LD_LIBRARY_PATH` on Linux).
@ -226,25 +134,25 @@ which is OS-dependent (for instance setting a suitable
to connect to.
.. index::
single: setup.py
single: setup.cfg
Non-standard builds
-------------------
^^^^^^^^^^^^^^^^^^^
If you have less standard requirements such as:
- creating a :ref:`debug build <debug-build>`,
- using :program:`pg_config` not in the :envvar:`PATH`,
- supporting ``mx.DateTime``,
then take a look at the ``setup.cfg`` file.
Some of the options available in ``setup.cfg`` are also available as command
line arguments of the ``build_ext`` sub-command. For instance you can specify
an alternate :program:`pg_config` location using:
.. code-block:: console
an alternate :program:`pg_config` location using::
$ python setup.py build_ext --pg-config /path/to/pg_config build
@ -265,17 +173,14 @@ In case of problems, Psycopg can be configured to emit detailed debug
messages, which can be very useful for diagnostics and to report a bug. In
order to create a debug package:
- `Download`__ and unpack the Psycopg *source package* (the ``.tar.gz``
package).
- `Download`__ and unpack the Psycopg source package.
- Edit the ``setup.cfg`` file adding the ``PSYCOPG_DEBUG`` flag to the
``define`` option.
- :ref:`Compile and install <build-prerequisites>` the package.
- :ref:`Compile and install <source-package>` the package.
- Set the :envvar:`PSYCOPG_DEBUG` environment variable:
.. code-block:: console
- Set the :envvar:`PSYCOPG_DEBUG` environment variable::
$ export PSYCOPG_DEBUG=1
@ -283,32 +188,9 @@ order to create a debug package:
one you just compiled and not e.g. the system one): you will have a copious
stream of informations printed on stderr.
.. __: https://pypi.org/project/psycopg2/#files
.. __: http://initd.org/psycopg/download/
Non-standard Python Implementation
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The `psycopg2` package is the current mature implementation of the adapter: it
is a C extension and as such it is only compatible with CPython_. If you want
to use Psycopg on a different Python implementation (PyPy, Jython, IronPython)
there is a couple of alternative:
- a `Ctypes port`__, but it is not as mature as the C implementation yet
and it is not as feature-complete;
- a `CFFI port`__ which is currently more used and reported more efficient on
PyPy, but please be careful of its version numbers because they are not
aligned to the official psycopg2 ones and some features may differ.
.. _PostgreSQL: https://www.postgresql.org/
.. _Python: https://www.python.org/
.. _libpq: https://www.postgresql.org/docs/current/static/libpq.html
.. _CPython: https://en.wikipedia.org/wiki/CPython
.. _Ctypes: https://docs.python.org/library/ctypes.html
.. __: https://github.com/mvantellingen/psycopg2-ctypes
.. __: https://github.com/chtd/psycopg2cffi
.. index::
single: tests
@ -319,11 +201,9 @@ Running the test suite
----------------------
Once `!psycopg2` is installed you can run the test suite to verify it is
working correctly. From the source directory, you can run:
working correctly. You can run::
.. code-block:: console
$ python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
python -c "from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
The tests run against a database called ``psycopg2_test`` on UNIX socket and
the standard port. You can configure a different database to run the test by
@ -337,6 +217,7 @@ setting the environment variables:
The database should already exist before running the tests.
.. _other-problems:
If you still have problems
@ -351,14 +232,10 @@ Try the following. *In order:*
- Google for `!psycopg2` *your error message*. Especially useful the week
after the release of a new OS X version.
- Write to the `Mailing List`_.
- If you think that you have discovered a bug, test failure or missing feature
please raise a ticket in the `bug tracker`_.
- Write to the `Mailing List`__.
- Complain on your blog or on Twitter that `!psycopg2` is the worst package
ever and about the quality time you have wasted figuring out the correct
:envvar:`ARCHFLAGS`. Especially useful from the Starbucks near you.
.. _mailing list: https://www.postgresql.org/list/psycopg/
.. _bug tracker: https://github.com/psycopg/psycopg2/issues
.. __: http://mail.postgresql.org/mj/mj_wwwusr/domain=postgresql.org?func=lists-long-full&extra=psycopg

View File

@ -1,7 +0,0 @@
.. index::
single: License
License
=======
.. include:: ../../LICENSE

View File

@ -28,7 +28,7 @@ The module interface respects the standard defined in the |DBAPI|_.
or using a set of keyword arguments::
conn = psycopg2.connect(dbname="test", user="postgres", password="secret")
conn = psycopg2.connect(dbname"test", user="postgres", password="secret")
or using a mix of both: if the same parameter name is specified in both
sources, the *kwargs* value will have precedence over the *dsn* value.
@ -50,11 +50,11 @@ The module interface respects the standard defined in the |DBAPI|_.
using `environment variables`__.
.. __:
.. _connstring: https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING
.. _connstring: http://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING
.. __:
.. _connparams: https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-PARAMKEYWORDS
.. _connparams: http://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-PARAMKEYWORDS
.. __:
.. _connenvvars: https://www.postgresql.org/docs/current/static/libpq-envars.html
.. _connenvvars: http://www.postgresql.org/docs/current/static/libpq-envars.html
Using the *connection_factory* parameter a different class or
connections factory can be specified. It should be a callable object
@ -117,10 +117,9 @@ The module interface respects the standard defined in the |DBAPI|_.
Integer constant reporting the version of the ``libpq`` library this
``psycopg2`` module was compiled with (in the same format of
`~psycopg2.extensions.ConnectionInfo.server_version`). If this value is
greater or equal than ``90100`` then you may query the version of the
actually loaded library using the `~psycopg2.extensions.libpq_version()`
function.
`~connection.server_version`). If this value is greater or equal than
``90100`` then you may query the version of the actually loaded library
using the `~psycopg2.extensions.libpq_version()` function.
.. index::
@ -137,15 +136,14 @@ available through the following exceptions:
.. exception:: Warning
Exception raised for important warnings like data truncations while
inserting, etc. It is a subclass of the Python `StandardError`
(`Exception` on Python 3).
inserting, etc. It is a subclass of the Python `~exceptions.StandardError`.
.. exception:: Error
Exception that is the base class of all other error exceptions. You can
use this to catch all errors with one single `!except` statement. Warnings
are not considered errors and thus not use this class as base. It
is a subclass of the Python `StandardError` (`Exception` on Python 3).
is a subclass of the Python `!StandardError`.
.. attribute:: pgerror
@ -168,7 +166,7 @@ available through the following exceptions:
>>> e.pgcode
'42P01'
>>> print(e.pgerror)
>>> print e.pgerror
ERROR: relation "barf" does not exist
LINE 1: SELECT * FROM barf
^
@ -184,7 +182,7 @@ available through the following exceptions:
>>> try:
... cur.execute("SELECT * FROM barf")
... except psycopg2.Error as e:
... except psycopg2.Error, e:
... pass
>>> e.diag.severity
@ -252,14 +250,13 @@ available through the following exceptions:
.. extension::
Psycopg actually raises a different exception for each :sql:`SQLSTATE`
error returned by the database: the classes are available in the
`psycopg2.errors` module. Every exception class is a subclass of one of
the exception classes defined here though, so they don't need to be
trapped specifically: trapping `!Error` or `!DatabaseError` is usually
what needed to write a generic error handler; trapping a specific error
such as `!NotNullViolation` can be useful to write specific exception
handlers.
Psycopg may raise a few other, more specialized, exceptions: currently
`~psycopg2.extensions.QueryCanceledError` and
`~psycopg2.extensions.TransactionRollbackError` are defined. These
exceptions are not exposed by the main `!psycopg2` module but are
made available by the `~psycopg2.extensions` module. All the
additional exceptions are subclasses of standard |DBAPI| exceptions, so
trapping them specifically is not required.
This is the exception inheritance layout:
@ -273,6 +270,8 @@ This is the exception inheritance layout:
\|__ `DatabaseError`
\|__ `DataError`
\|__ `OperationalError`
\| \|__ `psycopg2.extensions.QueryCanceledError`
\| \|__ `psycopg2.extensions.TransactionRollbackError`
\|__ `IntegrityError`
\|__ `InternalError`
\|__ `ProgrammingError`

View File

@ -1,9 +1,3 @@
.. index::
single: Release notes
single: News
.. _news:
Release notes
=============

View File

@ -24,18 +24,13 @@ directly in the client application.
.. method:: getconn(key=None)
Get a free connection from the pool.
The *key* parameter is optional: if used, the connection will be
associated to the key and calling `!getconn()` with the same key again
will return the same connection.
Get a free connection and assign it to *key* if not `!None`.
.. method:: putconn(conn, key=None, close=False)
Put away a connection.
If *close* is `!True`, discard the connection from the pool.
*key* should be used consistently with `getconn()`.
.. method:: closeall
@ -58,3 +53,12 @@ be used.
.. autoclass:: ThreadedConnectionPool
.. note:: This pool class can be safely used in multi-threaded applications.
.. autoclass:: PersistentConnectionPool
.. note::
This pool class is mostly designed to interact with Zope and probably
not useful in generic applications.

View File

@ -33,7 +33,7 @@ name should be escaped using `~psycopg2.extensions.quote_ident()`::
# This works, but it is not optimal
table_name = 'my_table'
cur.execute(
"insert into %s values (%%s, %%s)" % ext.quote_ident(table_name, cur),
"insert into %s values (%%s, %%s)" % ext.quote_ident(table_name),
[10, 20])
This is now safe, but it somewhat ad-hoc. In case, for some reason, it is
@ -56,54 +56,6 @@ from the query parameters::
[10, 20])
Module usage
------------
Usually you should express the template of your query as an `SQL` instance
with `{}`\-style placeholders and use `~SQL.format()` to merge the variable
parts into them, all of which must be `Composable` subclasses. You can still
have `%s`\ -style placeholders in your query and pass values to
`~cursor.execute()`: such value placeholders will be untouched by
`!format()`::
query = sql.SQL("select {field} from {table} where {pkey} = %s").format(
field=sql.Identifier('my_name'),
table=sql.Identifier('some_table'),
pkey=sql.Identifier('id'))
The resulting object is meant to be passed directly to cursor methods such as
`~cursor.execute()`, `~cursor.executemany()`, `~cursor.copy_expert()`, but can
also be used to compose a query as a Python string, using the
`~Composable.as_string()` method::
cur.execute(query, (42,))
If part of your query is a variable sequence of arguments, such as a
comma-separated list of field names, you can use the `SQL.join()` method to
pass them to the query::
query = sql.SQL("select {fields} from {table}").format(
fields=sql.SQL(',').join([
sql.Identifier('field1'),
sql.Identifier('field2'),
sql.Identifier('field3'),
]),
table=sql.Identifier('some_table'))
`!sql` objects
--------------
The `!sql` objects are in the following inheritance hierarchy:
| `Composable`: the base class exposing the common interface
| ``|__`` `SQL`: a literal snippet of an SQL query
| ``|__`` `Identifier`: a PostgreSQL identifier or dot-separated sequence of identifiers
| ``|__`` `Literal`: a value hardcoded into a query
| ``|__`` `Placeholder`: a `%s`\ -style placeholder whose value will be added later e.g. by `~cursor.execute()`
| ``|__`` `Composed`: a sequence of `!Composable` instances.
.. autoclass:: Composable
.. automethod:: as_string
@ -120,26 +72,16 @@ The `!sql` objects are in the following inheritance hierarchy:
.. autoclass:: Identifier
.. versionchanged:: 2.8
added support for multiple strings.
.. autoattribute:: strings
.. versionadded:: 2.8
previous verions only had a `!string` attribute. The attribute
still exists but is deprecate and will only work if the
`!Identifier` wraps a single string.
.. autoattribute:: string
.. autoclass:: Literal
.. autoattribute:: wrapped
.. autoclass:: Placeholder
.. autoattribute:: name
.. autoclass:: Composed
.. autoattribute:: seq

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
"""
extension
~~~~~~~~~
@ -11,7 +12,7 @@
from docutils import nodes
from sphinx.locale import _
from docutils.parsers.rst import Directive
from sphinx.util.compat import Directive, make_admonition
class extension_node(nodes.Admonition, nodes.Element): pass
@ -28,11 +29,12 @@ class Extension(Directive):
option_spec = {}
def run(self):
node = extension_node('\n'.join(self.content))
node += nodes.title(_('DB API extension'), _('DB API extension'))
self.state.nested_parse(self.content, self.content_offset, node)
node['classes'].append('dbapi-extension')
return [node]
nodes = make_admonition(extension_node,
self.name, [_('DB API extension')], self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
nodes[0]['classes'].append('dbapi-extension')
return nodes
def visit_extension_node(self, node):
@ -48,3 +50,4 @@ def setup(app):
text=(visit_extension_node, depart_extension_node))
app.add_directive('extension', Extension)

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
"""
sql role
~~~~~~~~
@ -11,9 +12,10 @@ from docutils import nodes, utils
from docutils.parsers.rst import roles
def sql_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
text = utils.unescape(text)
text = utils.unescape(text)
options['classes'] = ['sql']
return [nodes.literal(rawtext, text, **options)], []
def setup(app):
roles.register_local_role('sql', sql_role)

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
"""
ticket role
~~~~~~~~~~~
@ -55,3 +56,4 @@ def setup(app):
app.add_config_value('ticket_remap_offset', None, 'env')
app.add_role('ticket', ticket_role)
app.add_role('tickets', ticket_role)

View File

@ -1,57 +0,0 @@
#!/usr/bin/env python
"""Create the docs table of the sqlstate errors.
"""
import re
import sys
from collections import namedtuple
from psycopg2._psycopg import sqlstate_errors
def main():
sqlclasses = {}
clsfile = sys.argv[1]
with open(clsfile) as f:
for l in f:
m = re.match(r'/\* Class (..) - (.+) \*/', l)
if m is not None:
sqlclasses[m.group(1)] = m.group(2)
Line = namedtuple('Line', 'colstate colexc colbase sqlstate')
lines = [Line('SQLSTATE', 'Exception', 'Base exception', None)]
for k in sorted(sqlstate_errors):
exc = sqlstate_errors[k]
lines.append(Line(
f"``{k}``", f"`!{exc.__name__}`",
f"`!{get_base_exception(exc).__name__}`", k))
widths = [max(len(l[c]) for l in lines) for c in range(3)]
h = Line(*(['=' * w for w in widths] + [None]))
lines.insert(0, h)
lines.insert(2, h)
lines.append(h)
h1 = '-' * (sum(widths) + len(widths) - 1)
sqlclass = None
for l in lines:
cls = l.sqlstate[:2] if l.sqlstate else None
if cls and cls != sqlclass:
print(f"**Class {cls}**: {sqlclasses[cls]}")
print(h1)
sqlclass = cls
print("%-*s %-*s %-*s" % (
widths[0], l.colstate, widths[1], l.colexc, widths[2], l.colbase))
def get_base_exception(exc):
for cls in exc.__mro__:
if cls.__module__ == 'psycopg2':
return cls
if __name__ == '__main__':
sys.exit(main())

166
doc/src/tools/pypi_docs_upload.py Executable file
View File

@ -0,0 +1,166 @@
# -*- coding: utf-8 -*-
"""
Standalone script to upload a project docs on PyPI
Hacked together from the following distutils extension, avaliable from
https://bitbucket.org/jezdez/sphinx-pypi-upload/overview (ver. 0.2.1)
sphinx_pypi_upload
~~~~~~~~~~~~~~~~~~
setuptools command for uploading Sphinx documentation to PyPI
:author: Jannis Leidel
:contact: jannis@leidel.info
:copyright: Copyright 2009, Jannis Leidel.
:license: BSD, see LICENSE for details.
"""
import os
import sys
import socket
import zipfile
import httplib
import base64
import urlparse
import tempfile
import cStringIO as StringIO
from ConfigParser import ConfigParser
from distutils import log
from distutils.command.upload import upload
from distutils.errors import DistutilsOptionError
class UploadDoc(object):
"""Distutils command to upload Sphinx documentation."""
def __init__(self, name, upload_dir, repository=None):
self.name = name
self.upload_dir = upload_dir
p = ConfigParser()
p.read(os.path.expanduser('~/.pypirc'))
self.username = p.get('pypi', 'username')
self.password = p.get('pypi', 'password')
self.show_response = False
self.repository = repository or upload.DEFAULT_REPOSITORY
def create_zipfile(self):
# name = self.distribution.metadata.get_name()
name = self.name
tmp_dir = tempfile.mkdtemp()
tmp_file = os.path.join(tmp_dir, "%s.zip" % name)
zip_file = zipfile.ZipFile(tmp_file, "w")
for root, dirs, files in os.walk(self.upload_dir):
if not files:
raise DistutilsOptionError, \
"no files found in upload directory '%s'" % self.upload_dir
for name in files:
full = os.path.join(root, name)
relative = root[len(self.upload_dir):].lstrip(os.path.sep)
dest = os.path.join(relative, name)
zip_file.write(full, dest)
zip_file.close()
return tmp_file
def upload_file(self, filename):
content = open(filename,'rb').read()
# meta = self.distribution.metadata
data = {
':action': 'doc_upload',
'name': self.name, # meta.get_name(),
'content': (os.path.basename(filename),content),
}
# set up the authentication
auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
# Build up the MIME payload for the POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = '\n--' + boundary
end_boundary = sep_boundary + '--'
body = StringIO.StringIO()
for key, value in data.items():
# handle multiple entries for the same name
if type(value) != type([]):
value = [value]
for value in value:
if type(value) is tuple:
fn = ';filename="%s"' % value[0]
value = value[1]
else:
fn = ""
value = str(value)
body.write(sep_boundary)
body.write('\nContent-Disposition: form-data; name="%s"'%key)
body.write(fn)
body.write("\n\n")
body.write(value)
if value and value[-1] == '\r':
body.write('\n') # write an extra newline (lurve Macs)
body.write(end_boundary)
body.write("\n")
body = body.getvalue()
self.announce("Submitting documentation to %s" % (self.repository), log.INFO)
# build the Request
# We can't use urllib2 since we need to send the Basic
# auth right with the first request
schema, netloc, url, params, query, fragments = \
urlparse.urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
http = httplib.HTTPConnection(netloc)
elif schema == 'https':
http = httplib.HTTPSConnection(netloc)
else:
raise AssertionError, "unsupported schema "+schema
data = ''
loglevel = log.INFO
try:
http.connect()
http.putrequest("POST", url)
http.putheader('Content-type',
'multipart/form-data; boundary=%s'%boundary)
http.putheader('Content-length', str(len(body)))
http.putheader('Authorization', auth)
http.endheaders()
http.send(body)
except socket.error, e:
self.announce(str(e), log.ERROR)
return
response = http.getresponse()
if response.status == 200:
self.announce('Server response (%s): %s' % (response.status, response.reason),
log.INFO)
elif response.status == 301:
location = response.getheader('Location')
if location is None:
location = 'http://packages.python.org/%s/' % self.name # meta.get_name()
self.announce('Upload successful. Visit %s' % location,
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (response.status, response.reason),
log.ERROR)
if self.show_response:
print '-'*75, response.read(), '-'*75
def run(self):
zip_file = self.create_zipfile()
self.upload_file(zip_file)
os.remove(zip_file)
def announce(self, msg, *args, **kwargs):
print msg
if __name__ == '__main__':
if len(sys.argv) != 3:
print >>sys.stderr, "usage: %s PROJECT UPLOAD_DIR" % sys.argv[0]
sys.exit(2)
project, upload_dir = sys.argv[1:]
up = UploadDoc(project, upload_dir=upload_dir)
up.run()

63
doc/src/tools/stitch_text.py Executable file
View File

@ -0,0 +1,63 @@
#! /usr/bin/env python
"""A script to stitch together the generated text files in the correct order.
"""
import os
import sys
def main():
if len(sys.argv) != 3:
sys.stderr.write("usage: %s index.rst text-dir\n")
return 2
_, index, txt_dir = sys.argv
for fb in iter_file_base(index):
emit(fb, txt_dir)
return 0
def iter_file_base(fn):
f = open(fn)
if sys.version_info[0] >= 3:
have_line = iter(f).__next__
else:
have_line = iter(f).next
while not have_line().startswith('.. toctree'):
pass
while have_line().strip().startswith(':'):
pass
yield os.path.splitext(os.path.basename(fn))[0]
n = 0
while True:
line = have_line()
if line.isspace():
continue
if line.startswith(".."):
break
n += 1
yield line.strip()
f.close()
if n < 5:
# maybe format changed?
raise Exception("Not enough files found. Format change in index.rst?")
def emit(basename, txt_dir):
f = open(os.path.join(txt_dir, basename + ".txt"))
for line in f:
line = line.replace("``", "'")
sys.stdout.write(line)
f.close()
# some space between sections
sys.stdout.write("\n\n")
if __name__ == '__main__':
sys.exit(main())

View File

@ -5,15 +5,12 @@
.. module:: psycopg2.tz
.. deprecated:: 2.9
The module will be dropped in psycopg 2.10. Use `datetime.timezone`
instead.
This module holds two different tzinfo implementations that can be used as the
`tzinfo` argument to `~datetime.datetime` constructors, directly passed to
Psycopg functions or used to set the `cursor.tzinfo_factory` attribute in
cursors.
cursors.
.. autoclass:: psycopg2.tz.FixedOffsetTimezone
.. autoclass:: psycopg2.tz.LocalTimezone

View File

@ -48,7 +48,7 @@ The main entry points of Psycopg are:
- The class `connection` encapsulates a database session. It allows to:
- create new `cursor` instances using the `~connection.cursor()` method to
- create new `cursor`\s using the `~connection.cursor()` method to
execute database commands and queries,
- terminate transactions using the methods `~connection.commit()` or
@ -73,97 +73,70 @@ The main entry points of Psycopg are:
Passing parameters to SQL queries
---------------------------------
Psycopg converts Python variables to SQL values using their types: the Python
type determines the function used to convert the object into a string
representation suitable for PostgreSQL. Many standard Python types are
already `adapted to the correct SQL representation`__.
Psycopg casts Python variables to SQL literals by type. Many standard Python types
are already `adapted to the correct SQL representation`__.
.. __: python-types-adaptation_
Passing parameters to an SQL statement happens in functions such as
`cursor.execute()` by using ``%s`` placeholders in the SQL statement, and
passing a sequence of values as the second argument of the function. For
example the Python function call::
Example: the Python function call::
>>> cur.execute("""
... INSERT INTO some_table (an_int, a_date, a_string)
... VALUES (%s, %s, %s);
... """,
>>> cur.execute(
... """INSERT INTO some_table (an_int, a_date, a_string)
... VALUES (%s, %s, %s);""",
... (10, datetime.date(2005, 11, 18), "O'Reilly"))
is converted into a SQL command similar to:
.. code-block:: sql
is converted into the SQL command::
INSERT INTO some_table (an_int, a_date, a_string)
VALUES (10, '2005-11-18', 'O''Reilly');
VALUES (10, '2005-11-18', 'O''Reilly');
Named arguments are supported too using :samp:`%({name})s` placeholders in the
query and specifying the values into a mapping. Using named arguments allows
to specify the values in any order and to repeat the same value in several
places in the query::
Named arguments are supported too using :samp:`%({name})s` placeholders.
Using named arguments the values can be passed to the query in any order and
many placeholders can use the same values::
>>> cur.execute("""
... INSERT INTO some_table (an_int, a_date, another_date, a_string)
... VALUES (%(int)s, %(date)s, %(date)s, %(str)s);
... """,
>>> cur.execute(
... """INSERT INTO some_table (an_int, a_date, another_date, a_string)
... VALUES (%(int)s, %(date)s, %(date)s, %(str)s);""",
... {'int': 10, 'str': "O'Reilly", 'date': datetime.date(2005, 11, 18)})
Using characters ``%``, ``(``, ``)`` in the argument names is not supported.
When parameters are used, in order to include a literal ``%`` in the query you
can use the ``%%`` string::
>>> cur.execute("SELECT (%s % 2) = 0 AS even", (10,)) # WRONG
>>> cur.execute("SELECT (%s %% 2) = 0 AS even", (10,)) # correct
can use the ``%%`` string. Using characters ``%``, ``(``, ``)`` in the
argument names is not supported.
While the mechanism resembles regular Python strings manipulation, there are a
few subtle differences you should care about when passing parameters to a
query.
query:
- The Python string operator ``%`` *must not be used*: the `~cursor.execute()`
- The Python string operator ``%`` is not used: the `~cursor.execute()`
method accepts a tuple or dictionary of values as second parameter.
|sql-warn|__:
|sql-warn|__.
.. |sql-warn| replace:: **Never** use ``%`` or ``+`` to merge values
into queries
.. __: sql-injection_
>>> cur.execute("INSERT INTO numbers VALUES (%s, %s)" % (10, 20)) # WRONG
>>> cur.execute("INSERT INTO numbers VALUES (%s, %s)", (10, 20)) # correct
- The variables placeholder must *always be a* ``%s``, even if a different
placeholder (such as a ``%d`` for integers or ``%f`` for floats) may look
more appropriate::
>>> cur.execute("INSERT INTO numbers VALUES (%d)", (42,)) # WRONG
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (42,)) # correct
- For positional variables binding, *the second argument must always be a
sequence*, even if it contains a single variable (remember that Python
requires a comma to create a single element tuple)::
sequence*, even if it contains a single variable. And remember that Python
requires a comma to create a single element tuple::
>>> cur.execute("INSERT INTO foo VALUES (%s)", "bar") # WRONG
>>> cur.execute("INSERT INTO foo VALUES (%s)", ("bar")) # WRONG
>>> cur.execute("INSERT INTO foo VALUES (%s)", ("bar",)) # correct
>>> cur.execute("INSERT INTO foo VALUES (%s)", ["bar"]) # correct
- The placeholder *must not be quoted*. Psycopg will add quotes where needed::
>>> cur.execute("INSERT INTO numbers VALUES ('%s')", (10,)) # WRONG
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (10,)) # correct
- The variables placeholder *must always be a* ``%s``, even if a different
placeholder (such as a ``%d`` for integers or ``%f`` for floats) may look
more appropriate::
>>> cur.execute("INSERT INTO numbers VALUES (%d)", (10,)) # WRONG
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (10,)) # correct
- Only query values should be bound via this method: it shouldn't be used to
merge table or field names to the query (Psycopg will try quoting the table
name as a string value, generating invalid SQL). If you need to generate
dynamically SQL queries (for instance choosing dynamically a table name)
you can use the facilities provided by the `psycopg2.sql` module::
merge table or field names to the query. If you need to generate dynamically
an SQL query (for instance choosing dynamically a table name) you can use
the facilities provided by the `psycopg2.sql` module.
>>> cur.execute("INSERT INTO %s VALUES (%s)", ('numbers', 10)) # WRONG
>>> cur.execute( # correct
... SQL("INSERT INTO {} VALUES (%s)").format(Identifier('numbers')),
... (10,))
.. index:: Security, SQL injection
@ -198,8 +171,8 @@ called `SQL injection`_ and is known to be one of the most widespread forms of
attack to database servers. Before continuing, please print `this page`__ as a
memo and hang it onto your desk.
.. _SQL injection: https://en.wikipedia.org/wiki/SQL_injection
.. __: https://xkcd.com/327/
.. _SQL injection: http://en.wikipedia.org/wiki/SQL_injection
.. __: http://xkcd.com/327/
Psycopg can `automatically convert Python objects to and from SQL
literals`__: using this feature your code will be more robust and
@ -221,28 +194,7 @@ argument of the `~cursor.execute()` method::
>>> cur.execute(SQL, data) # Note: no % operator
Values containing backslashes and LIKE
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Unlike in Python, the backslash (`\\`) is not used as an escape
character *except* in patterns used with `LIKE` and `ILIKE` where they
are needed to escape the `%` and `_` characters.
This can lead to confusing situations::
>>> path = r'C:\Users\Bobby.Tables'
>>> cur.execute('INSERT INTO mytable(path) VALUES (%s)', (path,))
>>> cur.execute('SELECT * FROM mytable WHERE path LIKE %s', (path,))
>>> cur.fetchall()
[]
The solution is to specify an `ESCAPE` character of `''` (empty string)
in your `LIKE` query::
>>> cur.execute("SELECT * FROM mytable WHERE path LIKE %s ESCAPE ''", (path,))
.. index::
single: Adaptation
pair: Objects; Adaptation
@ -372,7 +324,7 @@ converted into `!Decimal`.
This of course may imply a loss of precision.
.. seealso:: `PostgreSQL numeric types
<https://www.postgresql.org/docs/current/static/datatype-numeric.html>`__
<http://www.postgresql.org/docs/current/static/datatype-numeric.html>`__
.. index::
@ -407,30 +359,30 @@ defined on the database connection (the `PostgreSQL encoding`__, available in
`connection.encoding`, is translated into a `Python encoding`__ using the
`~psycopg2.extensions.encodings` mapping)::
>>> print(u, type(u))
>>> print u, type(u)
àèìòù€ <type 'unicode'>
>>> cur.execute("INSERT INTO test (num, data) VALUES (%s,%s);", (74, u))
.. __: https://www.postgresql.org/docs/current/static/multibyte.html
.. __: https://docs.python.org/library/codecs.html
.. __: http://www.postgresql.org/docs/current/static/multibyte.html
.. __: http://docs.python.org/library/codecs.html#standard-encodings
When reading data from the database, in Python 2 the strings returned are
usually 8 bit `!str` objects encoded in the database client encoding::
>>> print(conn.encoding)
>>> print conn.encoding
UTF8
>>> cur.execute("SELECT data FROM test WHERE num = 74")
>>> x = cur.fetchone()[0]
>>> print(x, type(x), repr(x))
>>> print x, type(x), repr(x)
àèìòù€ <type 'str'> '\xc3\xa0\xc3\xa8\xc3\xac\xc3\xb2\xc3\xb9\xe2\x82\xac'
>>> conn.set_client_encoding('LATIN9')
>>> cur.execute("SELECT data FROM test WHERE num = 74")
>>> x = cur.fetchone()[0]
>>> print(type(x), repr(x))
>>> print type(x), repr(x)
<type 'str'> '\xe0\xe8\xec\xf2\xf9\xa4'
In Python 3 instead the strings are automatically *decoded* in the connection
@ -442,7 +394,7 @@ In Python 2 you must register a :ref:`typecaster
>>> cur.execute("SELECT data FROM test WHERE num = 74")
>>> x = cur.fetchone()[0]
>>> print(x, type(x), repr(x))
>>> print x, type(x), repr(x)
àèìòù€ <type 'unicode'> u'\xe0\xe8\xec\xf2\xf9\u20ac'
In the above example, the `~psycopg2.extensions.UNICODE` typecaster is
@ -457,29 +409,13 @@ the connection or globally: see the function
Unicode, you can register the related typecasters globally as soon as
Psycopg is imported::
import psycopg2
import psycopg2.extensions
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
and forget about this story.
.. note::
In some cases, on Python 3, you may want to receive `!bytes` instead of
`!str`, without undergoing to any decoding. This is especially the case if
the data in the database is in mixed encoding. The
`~psycopg2.extensions.BYTES` caster is what you neeed::
import psycopg2.extensions
psycopg2.extensions.register_type(psycopg2.extensions.BYTES, conn)
psycopg2.extensions.register_type(psycopg2.extensions.BYTESARRAY, conn)
cur = conn.cursor()
cur.execute("select %s::text", (u"€",))
cur.fetchone()[0]
b'\xe2\x82\xac'
.. versionadded: 2.8
.. index::
single: Buffer; Adaptation
@ -494,15 +430,17 @@ the connection or globally: see the function
Binary adaptation
^^^^^^^^^^^^^^^^^
Python types representing binary objects are converted into PostgreSQL binary
string syntax, suitable for :sql:`bytea` fields. Such types are `buffer`
(only available in Python 2), `memoryview`, `bytearray`, and `bytes` (only in
Python 3: the name is available in Python 2 but it's only an alias for the
type `!str`). Any object implementing the `Revised Buffer Protocol`__ should
be usable as binary type. Received data is returned as `!buffer` (in Python 2)
Python types representing binary objects are converted into
PostgreSQL binary string syntax, suitable for :sql:`bytea` fields. Such
types are `buffer` (only available in Python 2), `memoryview` (available
from Python 2.7), `bytearray` (available from Python 2.6) and `bytes`
(only from Python 3: the name is available from Python 2.6 but it's only an
alias for the type `!str`). Any object implementing the `Revised Buffer
Protocol`__ should be usable as binary type where the protocol is supported
(i.e. from Python 2.6). Received data is returned as `!buffer` (in Python 2)
or `!memoryview` (in Python 3).
.. __: https://www.python.org/dev/peps/pep-3118/
.. __: http://www.python.org/dev/peps/pep-3118/
.. versionchanged:: 2.4
only strings were supported before.
@ -531,8 +469,8 @@ or `!memoryview` (in Python 3).
server configuration file or in the client session (using a query such as
``SET bytea_output TO escape;``) before receiving binary data.
.. __: https://www.postgresql.org/docs/current/static/datatype-binary.html
.. __: https://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-BYTEA-OUTPUT
.. __: http://www.postgresql.org/docs/current/static/datatype-binary.html
.. __: http://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-BYTEA-OUTPUT
.. index::
@ -540,6 +478,7 @@ or `!memoryview` (in Python 3).
single: Date objects; Adaptation
single: Time objects; Adaptation
single: Interval objects; Adaptation
single: mx.DateTime; Adaptation
.. _adapt-date:
@ -549,7 +488,8 @@ Date/Time objects adaptation
Python builtin `~datetime.datetime`, `~datetime.date`,
`~datetime.time`, `~datetime.timedelta` are converted into PostgreSQL's
:sql:`timestamp[tz]`, :sql:`date`, :sql:`time[tz]`, :sql:`interval` data types.
Time zones are supported too.
Time zones are supported too. The Egenix `mx.DateTime`_ objects are adapted
the same way::
>>> dt = datetime.datetime.now()
>>> dt
@ -562,7 +502,7 @@ Time zones are supported too.
"SELECT '38 days 6027.425337 seconds';"
.. seealso:: `PostgreSQL date/time types
<https://www.postgresql.org/docs/current/static/datatype-datetime.html>`__
<http://www.postgresql.org/docs/current/static/datatype-datetime.html>`__
.. index::
@ -574,39 +514,30 @@ Time zones handling
'''''''''''''''''''
The PostgreSQL type :sql:`timestamp with time zone` (a.k.a.
:sql:`timestamptz`) is converted into Python `~datetime.datetime` objects.
:sql:`timestamptz`) is converted into Python `~datetime.datetime` objects with
a `~datetime.datetime.tzinfo` attribute set to a
`~psycopg2.tz.FixedOffsetTimezone` instance.
>>> cur.execute("SET TIME ZONE 'Europe/Rome'") # UTC + 1 hour
>>> cur.execute("SELECT '2010-01-01 10:30:45'::timestamptz")
>>> cur.fetchone()[0]
datetime.datetime(2010, 1, 1, 10, 30, 45,
tzinfo=datetime.timezone(datetime.timedelta(seconds=3600)))
>>> cur.execute("SET TIME ZONE 'Europe/Rome';") # UTC + 1 hour
>>> cur.execute("SELECT '2010-01-01 10:30:45'::timestamptz;")
>>> cur.fetchone()[0].tzinfo
psycopg2.tz.FixedOffsetTimezone(offset=60, name=None)
.. note::
Note that only time zones with an integer number of minutes are supported:
this is a limitation of the Python `datetime` module. A few historical time
zones had seconds in the UTC offset: these time zones will have the offset
rounded to the nearest minute, with an error of up to 30 seconds.
Before Python 3.7, the `datetime` module only supported timezones with an
integer number of minutes. A few historical time zones had seconds in the
UTC offset: these time zones will have the offset rounded to the nearest
minute, with an error of up to 30 seconds, on Python versions before 3.7.
>>> cur.execute("SET TIME ZONE 'Asia/Calcutta'") # offset was +5:21:10
>>> cur.execute("SELECT '1900-01-01 10:30:45'::timestamptz")
>>> cur.fetchone()[0].tzinfo
# On Python 3.6: 5h, 21m
datetime.timezone(datetime.timedelta(0, 19260))
# On Python 3.7 and following: 5h, 21m, 10s
datetime.timezone(datetime.timedelta(seconds=19270))
>>> cur.execute("SET TIME ZONE 'Asia/Calcutta';") # offset was +5:53:20
>>> cur.execute("SELECT '1930-01-01 10:30:45'::timestamptz;")
>>> cur.fetchone()[0].tzinfo
psycopg2.tz.FixedOffsetTimezone(offset=353, name=None)
.. versionchanged:: 2.2.2
timezones with seconds are supported (with rounding). Previously such
timezones raised an error.
timezones raised an error. In order to deal with them in previous
versions use `psycopg2.extras.register_tstz_w_secs()`.
.. versionchanged:: 2.9
timezones with seconds are supported without rounding.
.. versionchanged:: 2.9
use `datetime.timezone` as default tzinfo object instead of
`~psycopg2.tz.FixedOffsetTimezone`.
.. index::
double: Date objects; Infinite
@ -642,29 +573,6 @@ Of course it will not be possible to write the value of `date.max` in the
database anymore: :sql:`infinity` will be stored instead.
.. _time-handling:
Time handling
'''''''''''''
The PostgreSQL :sql:`time` and Python `~datetime.time` types are not
fully bidirectional.
Within PostgreSQL, the :sql:`time` type's maximum value of ``24:00:00`` is
treated as 24-hours later than the minimum value of ``00:00:00``.
>>> cur.execute("SELECT '24:00:00'::time - '00:00:00'::time")
>>> cur.fetchone()[0]
datetime.timedelta(days=1)
However, Python's `!time` only supports times until ``23:59:59``.
Retrieving a value of ``24:00:00`` results in a `!time` of ``00:00:00``.
>>> cur.execute("SELECT '24:00:00'::time, '00:00:00'::time")
>>> cur.fetchone()
(datetime.time(0, 0), datetime.time(0, 0))
.. _adapt-list:
Lists adaptation
@ -690,7 +598,7 @@ Python lists are converted into PostgreSQL :sql:`ARRAY`\ s::
Furthermore :sql:`ANY` can also work with empty lists, whereas :sql:`IN ()`
is a SQL syntax error.
.. __: https://www.postgresql.org/docs/current/static/functions-subquery.html#FUNCTIONS-SUBQUERY-ANY-SOME
.. __: http://www.postgresql.org/docs/current/static/functions-subquery.html#FUNCTIONS-SUBQUERY-ANY-SOME
.. note::
@ -758,25 +666,18 @@ until a call to the `~connection.rollback()` method.
The connection is responsible for terminating its transaction, calling either
the `~connection.commit()` or `~connection.rollback()` method. Committed
changes are immediately made persistent in the database. If the connection
is closed (using the `~connection.close()` method) or destroyed (using `!del`
or by letting it fall out of scope) while a transaction is in progress, the
server will discard the transaction. However doing so is not advisable:
middleware such as PgBouncer_ may see the connection closed uncleanly and
dispose of it.
.. _PgBouncer: http://www.pgbouncer.org/
changes are immediately made persistent into the database. Closing the
connection using the `~connection.close()` method or destroying the
connection object (using `!del` or letting it fall out of scope)
will result in an implicit rollback.
It is possible to set the connection in *autocommit* mode: this way all the
commands executed will be immediately committed and no rollback is possible. A
few commands (e.g. :sql:`CREATE DATABASE`, :sql:`VACUUM`, :sql:`CALL` on
`stored procedures`__ using transaction control...) require to be run
few commands (e.g. :sql:`CREATE DATABASE`, :sql:`VACUUM`...) require to be run
outside any transaction: in order to be able to run these commands from
Psycopg, the connection must be in autocommit mode: you can use the
`~connection.autocommit` property.
.. __: https://www.postgresql.org/docs/current/xproc.html
.. warning::
By default even a simple :sql:`SELECT` will start a transaction: in
@ -795,8 +696,6 @@ the details.
.. index::
single: with statement
.. _with:
``with`` statement
^^^^^^^^^^^^^^^^^^
@ -814,7 +713,9 @@ is rolled back.
When a cursor exits the ``with`` block it is closed, releasing any resource
eventually associated with it. The state of the transaction is not affected.
A connection can be used in more than one ``with`` statement
Note that, unlike file objects or other resources, exiting the connection's
``with`` block *doesn't close the connection* but only the transaction
associated with it: a connection can be used in more than a ``with`` statement
and each ``with`` block is effectively wrapped in a separate transaction::
conn = psycopg2.connect(DSN)
@ -829,21 +730,6 @@ and each ``with`` block is effectively wrapped in a separate transaction::
conn.close()
.. warning::
Unlike file objects or other resources, exiting the connection's
``with`` block **doesn't close the connection**, but only the transaction
associated to it. If you want to make sure the connection is closed after
a certain point, you should still use a try-catch block::
conn = psycopg2.connect(DSN)
try:
# connection usage
finally:
conn.close()
.. versionchanged:: 2.9
``with connection`` starts a transaction also on autocommit connections.
.. index::
@ -860,7 +746,7 @@ Server side cursors
When a database query is executed, the Psycopg `cursor` usually fetches
all the records returned by the backend, transferring them to the client
process. If the query returns a huge amount of data, a proportionally large
process. If the query returned an huge amount of data, a proportionally large
amount of memory will be allocated by the client.
If the dataset is too large to be practically handled on the client side, it is
@ -890,8 +776,8 @@ you may decrease this value if you are dealing with huge records.
Named cursors are usually created :sql:`WITHOUT HOLD`, meaning they live only
as long as the current transaction. Trying to fetch from a named cursor after
a `~connection.commit()` or to create a named cursor when the connection
is in `~connection.autocommit` mode will result in an exception.
a `~connection.commit()` or to create a named cursor when the `connection`
transaction isolation level is set to `AUTOCOMMIT` will result in an exception.
It is possible to create a :sql:`WITH HOLD` cursor by specifying a `!True`
value for the `withhold` parameter to `~connection.cursor()` or by setting the
`~cursor.withhold` attribute to `!True` before calling `~cursor.execute()` on
@ -906,9 +792,7 @@ lifetime extends well after `~connection.commit()`, calling
It is also possible to use a named cursor to consume a cursor created
in some other way than using the |DECLARE| executed by
`~cursor.execute()`. For example, you may have a PL/pgSQL function
returning a cursor:
.. code-block:: postgres
returning a cursor::
CREATE FUNCTION reffunc(refcursor) RETURNS refcursor AS $$
BEGIN
@ -936,7 +820,7 @@ lifetime extends well after `~connection.commit()`, calling
.. |DECLARE| replace:: :sql:`DECLARE`
.. _DECLARE: https://www.postgresql.org/docs/current/static/sql-declare.html
.. _DECLARE: http://www.postgresql.org/docs/current/static/sql-declare.html
@ -966,7 +850,7 @@ forked processes`__, so when using a module such as `multiprocessing` or a
forking web deploy method such as FastCGI make sure to create the connections
*after* the fork.
.. __: https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNECT
.. __: http://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNECT
Connections shouldn't be shared either by different green threads: see
:ref:`green-support` for further details.
@ -986,7 +870,7 @@ PostgreSQL |COPY|__ command to move data from files to tables and back.
Currently no adaptation is provided between Python and PostgreSQL types on
|COPY|: the file can be any Python file-like object but its format must be in
the format accepted by `PostgreSQL COPY command`__ (data format, escaped
the format accepted by `PostgreSQL COPY command`__ (data fromat, escaped
characters, etc).
.. __: COPY_
@ -1010,7 +894,7 @@ Please refer to the documentation of the single methods for details and
examples.
.. |COPY| replace:: :sql:`COPY`
.. __: https://www.postgresql.org/docs/current/static/sql-copy.html
.. __: http://www.postgresql.org/docs/current/static/sql-copy.html
@ -1027,7 +911,7 @@ access to user data that is stored in a special large-object structure. They
are useful with data values too large to be manipulated conveniently as a
whole.
.. __: https://www.postgresql.org/docs/current/static/largeobjects.html
.. __: http://www.postgresql.org/docs/current/static/largeobjects.html
Psycopg allows access to the large object using the
`~psycopg2.extensions.lobject` class. Objects are generated using the
@ -1038,12 +922,12 @@ Psycopg large object support efficient import/export with file system files
using the |lo_import|_ and |lo_export|_ libpq functions.
.. |lo_import| replace:: `!lo_import()`
.. _lo_import: https://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-IMPORT
.. _lo_import: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-IMPORT
.. |lo_export| replace:: `!lo_export()`
.. _lo_export: https://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-EXPORT
.. _lo_export: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-EXPORT
.. versionchanged:: 2.6
added support for large objects greater than 2GB. Note that the support is
added support for large objects greated than 2GB. Note that the support is
enabled only if all the following conditions are verified:
- the Python build is 64 bits;
@ -1052,8 +936,8 @@ using the |lo_import|_ and |lo_export|_ libpq functions.
(`~connection.server_version` must be >= ``90300``).
If Psycopg was built with 64 bits large objects support (i.e. the first
two conditions above are verified), the `psycopg2.__version__` constant
will contain the ``lo64`` flag. If any of the condition is not met
two contidions above are verified), the `psycopg2.__version__` constant
will contain the ``lo64`` flag. If any of the contition is not met
several `!lobject` methods will fail if the arguments exceed 2GB.
@ -1104,5 +988,6 @@ transactions produced by a Java program.
For further details see the documentation for the above methods.
.. __: https://publications.opengroup.org/c193
.. __: https://jdbc.postgresql.org/
.. __: http://www.opengroup.org/bookstore/catalog/c193.htm
.. __: http://jdbc.postgresql.org/

89
examples/binary.py Normal file
View File

@ -0,0 +1,89 @@
# binary.py - working with binary data
#
# Copyright (C) 2001-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
curs = conn.cursor()
try:
curs.execute("CREATE TABLE test_binary (id int4, name text, img bytea)")
except:
conn.rollback()
curs.execute("DROP TABLE test_binary")
curs.execute("CREATE TABLE test_binary (id int4, name text, img bytea)")
conn.commit()
# first we try two inserts, one with an explicit Binary call and the other
# using a buffer on a file object.
data1 = {'id':1, 'name':'somehackers.jpg',
'img':psycopg2.Binary(open('somehackers.jpg').read())}
data2 = {'id':2, 'name':'whereareyou.jpg',
'img':buffer(open('whereareyou.jpg').read())}
curs.execute("""INSERT INTO test_binary
VALUES (%(id)s, %(name)s, %(img)s)""", data1)
curs.execute("""INSERT INTO test_binary
VALUES (%(id)s, %(name)s, %(img)s)""", data2)
# now we try to extract the images as simple text strings
print "Extracting the images as strings..."
curs.execute("SELECT * FROM test_binary")
for row in curs.fetchall():
name, ext = row[1].split('.')
new_name = name + '_S.' + ext
print " writing %s to %s ..." % (name+'.'+ext, new_name),
open(new_name, 'wb').write(row[2])
print "done"
print " python type of image data is", type(row[2])
# extract exactly the same data but using a binary cursor
print "Extracting the images using a binary cursor:"
curs.execute("""DECLARE zot CURSOR FOR
SELECT img, name FROM test_binary FOR READ ONLY""")
curs.execute("""FETCH ALL FROM zot""")
for row in curs.fetchall():
name, ext = row[1].split('.')
new_name = name + '_B.' + ext
print " writing %s to %s ..." % (name+'.'+ext, new_name),
open(new_name, 'wb').write(row[0])
print "done"
print " python type of image data is", type(row[0])
# this rollback is required because we can't drop a table with a binary cursor
# declared and still open
conn.rollback()
curs.execute("DROP TABLE test_binary")
conn.commit()
print "\nNow try to load the new images, to check it worked!"

177
examples/copy_from.py Normal file
View File

@ -0,0 +1,177 @@
# copy_from.py -- example about copy_from
#
# Copyright (C) 2002 Tom Jenkins <tjenkins@devis.com>
# Copyright (C) 2005 Federico Di Gregorio <fog@initd.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import os
import StringIO
import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
curs = conn.cursor()
try:
curs.execute("CREATE TABLE test_copy (fld1 text, fld2 text, fld3 int4)")
except:
conn.rollback()
curs.execute("DROP TABLE test_copy")
curs.execute("CREATE TABLE test_copy (fld1 text, fld2 text, fld3 int4)")
conn.commit()
# copy_from with default arguments, from open file
io = open('copy_from.txt', 'wr')
data = ['Tom\tJenkins\t37\n',
'Madonna\t\\N\t45\n',
'Federico\tDi Gregorio\t\\N\n']
io.writelines(data)
io.close()
io = open('copy_from.txt', 'r')
curs.copy_from(io, 'test_copy')
print "1) Copy %d records from file object " % len(data) + \
"using defaults (sep: \\t and null = \\N)"
io.close()
curs.execute("SELECT * FROM test_copy")
rows = curs.fetchall()
print " Select returned %d rows" % len(rows)
for r in rows:
print " %s %s\t%s" % (r[0], r[1], r[2])
curs.execute("delete from test_copy")
conn.commit()
# copy_from using custom separator, from open file
io = open('copy_from.txt', 'wr')
data = ['Tom:Jenkins:37\n',
'Madonna:\N:45\n',
'Federico:Di Gregorio:\N\n']
io.writelines(data)
io.close()
io = open('copy_from.txt', 'r')
curs.copy_from(io, 'test_copy', ':')
print "2) Copy %d records from file object using sep = :" % len(data)
io.close()
curs.execute("SELECT * FROM test_copy")
rows = curs.fetchall()
print " Select returned %d rows" % len(rows)
for r in rows:
print " %s %s\t%s" % (r[0], r[1], r[2])
curs.execute("delete from test_copy")
conn.commit()
# copy_from using custom null identifier, from open file
io = open('copy_from.txt', 'wr')
data = ['Tom\tJenkins\t37\n',
'Madonna\tNULL\t45\n',
'Federico\tDi Gregorio\tNULL\n']
io.writelines(data)
io.close()
io = open('copy_from.txt', 'r')
curs.copy_from(io, 'test_copy', null='NULL')
print "3) Copy %d records from file object using null = NULL" % len(data)
io.close()
curs.execute("SELECT * FROM test_copy")
rows = curs.fetchall()
print " Select using cursor returned %d rows" % len(rows)
for r in rows:
print " %s %s\t%s" % (r[0], r[1], r[2])
curs.execute("delete from test_copy")
conn.commit()
# copy_from using custom separator and null identifier
io = open('copy_from.txt', 'wr')
data = ['Tom:Jenkins:37\n', 'Madonna:NULL:45\n', 'Federico:Di Gregorio:NULL\n']
io.writelines(data)
io.close()
io = open('copy_from.txt', 'r')
curs.copy_from(io, 'test_copy', ':', 'NULL')
print "4) Copy %d records from file object " % len(data) + \
"using sep = : and null = NULL"
io.close()
curs.execute("SELECT * FROM test_copy")
rows = curs.fetchall()
print " Select using cursor returned %d rows" % len(rows)
for r in rows:
print " %s %s\t%s" % (r[0], r[1], r[2])
curs.execute("delete from test_copy")
conn.commit()
# anything can be used as a file if it has .read() and .readline() methods
data = StringIO.StringIO()
data.write('\n'.join(['Tom\tJenkins\t37',
'Madonna\t\N\t45',
'Federico\tDi Gregorio\t\N']))
data.seek(0)
curs.copy_from(data, 'test_copy')
print "5) Copy 3 records from StringIO object using defaults"
curs.execute("SELECT * FROM test_copy")
rows = curs.fetchall()
print " Select using cursor returned %d rows" % len(rows)
for r in rows:
print " %s %s\t%s" % (r[0], r[1], r[2])
curs.execute("delete from test_copy")
conn.commit()
# simple error test
print "6) About to raise an error"
data = StringIO.StringIO()
data.write('\n'.join(['Tom\tJenkins\t37',
'Madonna\t\N\t45',
'Federico\tDi Gregorio\taaa']))
data.seek(0)
try:
curs.copy_from(data, 'test_copy')
except StandardError, err:
conn.rollback()
print " Caught error (as expected):\n", err
conn.rollback()
curs.execute("DROP TABLE test_copy")
os.unlink('copy_from.txt')
conn.commit()

103
examples/copy_to.py Normal file
View File

@ -0,0 +1,103 @@
# copy_to.py -- example about copy_to
#
# Copyright (C) 2002 Tom Jenkins <tjenkins@devis.com>
# Copyright (C) 2005 Federico Di Gregorio <fog@initd.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import os
import StringIO
import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
curs = conn.cursor()
try:
curs.execute("CREATE TABLE test_copy (fld1 text, fld2 text, fld3 int4)")
except:
conn.rollback()
curs.execute("DROP TABLE test_copy")
curs.execute("CREATE TABLE test_copy (fld1 text, fld2 text, fld3 int4)")
conn.commit()
# demostrate copy_to functionality
data = [('Tom', 'Jenkins', '37'),
('Madonna', None, '45'),
('Federico', 'Di Gregorio', None)]
query = "INSERT INTO test_copy VALUES (%s, %s, %s)"
curs.executemany(query, data)
conn.commit()
# copy_to using defaults
io = open('copy_to.txt', 'w')
curs.copy_to(io, 'test_copy')
print "1) Copy %d records into file object using defaults: " % len (data) + \
"sep = \\t and null = \\N"
io.close()
rows = open('copy_to.txt', 'r').readlines()
print " File has %d rows:" % len(rows)
for r in rows:
print " ", r,
# copy_to using custom separator
io = open('copy_to.txt', 'w')
curs.copy_to(io, 'test_copy', ':')
print "2) Copy %d records into file object using sep = :" % len(data)
io.close()
rows = open('copy_to.txt', 'r').readlines()
print " File has %d rows:" % len(rows)
for r in rows:
print " ", r,
# copy_to using custom null identifier
io = open('copy_to.txt', 'w')
curs.copy_to(io, 'test_copy', null='NULL')
print "3) Copy %d records into file object using null = NULL" % len(data)
io.close()
rows = open('copy_to.txt', 'r').readlines()
print " File has %d rows:" % len(rows)
for r in rows:
print " ", r,
# copy_to using custom separator and null identifier
io = open('copy_to.txt', 'w')
curs.copy_to(io, 'test_copy', ':', 'NULL')
print "4) Copy %d records into file object using sep = : and null ) NULL" % \
len(data)
io.close()
rows = open('copy_to.txt', 'r').readlines()
print " File has %d rows:" % len(rows)
for r in rows:
print " ", r,
curs.execute("DROP TABLE test_copy")
os.unlink('copy_to.txt')
conn.commit()

63
examples/cursor.py Normal file
View File

@ -0,0 +1,63 @@
# cursor.py - how to subclass the cursor type
#
# Copyright (C) 2004-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import psycopg2
import psycopg2.extensions
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
class NoDataError(psycopg2.ProgrammingError):
"""Exception that will be raised by our cursor."""
pass
class Cursor(psycopg2.extensions.cursor):
"""A custom cursor."""
def fetchone(self):
"""Like fetchone but raise an exception if no data is available.
Note that to have .fetchmany() and .fetchall() to raise the same
exception we'll have to override them too; even if internally psycopg
uses the same function to fetch rows, the code path from Python is
different.
"""
d = psycopg2.extensions.cursor.fetchone(self)
if d is None:
raise NoDataError("no more data")
return d
curs = conn.cursor(cursor_factory=Cursor)
curs.execute("SELECT 1 AS foo")
print "Result of fetchone():", curs.fetchone()
# now let's raise the exception
try:
curs.fetchone()
except NoDataError, err:
print "Exception caught:", err
conn.rollback()

144
examples/dialtone.py Normal file
View File

@ -0,0 +1,144 @@
"""
This example/recipe has been contributed by Valentino Volonghi (dialtone)
Mapping arbitrary objects to a PostgreSQL database with psycopg2
- Problem
You need to store arbitrary objects in a PostgreSQL database without being
intrusive for your classes (don't want inheritance from an 'Item' or
'Persistent' object).
- Solution
"""
from datetime import datetime
import psycopg2
from psycopg2.extensions import adapt, register_adapter
try:
sorted()
except:
def sorted(seq):
seq.sort()
return seq
# Here is the adapter for every object that we may ever need to
# insert in the database. It receives the original object and does
# its job on that instance
class ObjectMapper(object):
def __init__(self, orig, curs=None):
self.orig = orig
self.tmp = {}
self.items, self.fields = self._gatherState()
def _gatherState(self):
adaptee_name = self.orig.__class__.__name__
fields = sorted([(field, getattr(self.orig, field))
for field in persistent_fields[adaptee_name]])
items = []
for item, value in fields:
items.append(item)
return items, fields
def getTableName(self):
return self.orig.__class__.__name__
def getMappedValues(self):
tmp = []
for i in self.items:
tmp.append("%%(%s)s"%i)
return ", ".join(tmp)
def getValuesDict(self):
return dict(self.fields)
def getFields(self):
return self.items
def generateInsert(self):
qry = "INSERT INTO"
qry += " " + self.getTableName() + " ("
qry += ", ".join(self.getFields()) + ") VALUES ("
qry += self.getMappedValues() + ")"
return qry, self.getValuesDict()
# Here are the objects
class Album(object):
id = 0
def __init__(self):
self.creation_time = datetime.now()
self.album_id = self.id
Album.id = Album.id + 1
self.binary_data = buffer('12312312312121')
class Order(object):
id = 0
def __init__(self):
self.items = ['rice','chocolate']
self.price = 34
self.order_id = self.id
Order.id = Order.id + 1
register_adapter(Album, ObjectMapper)
register_adapter(Order, ObjectMapper)
# Describe what is needed to save on each object
# This is actually just configuration, you can use xml with a parser if you
# like to have plenty of wasted CPU cycles ;P.
persistent_fields = {'Album': ['album_id', 'creation_time', 'binary_data'],
'Order': ['order_id', 'items', 'price']
}
print adapt(Album()).generateInsert()
print adapt(Album()).generateInsert()
print adapt(Album()).generateInsert()
print adapt(Order()).generateInsert()
print adapt(Order()).generateInsert()
print adapt(Order()).generateInsert()
"""
- Discussion
Psycopg 2 has a great new feature: adaptation. The big thing about
adaptation is that it enables the programmer to glue most of the
code out there without many difficulties.
This recipe tries to focus attention on a way to generate SQL queries to
insert completely new objects inside a database. As you can see objects do
not know anything about the code that is handling them. We specify all the
fields that we need for each object through the persistent_fields dict.
The most important lines of this recipe are:
register_adapter(Album, ObjectMapper)
register_adapter(Order, ObjectMapper)
In these lines we notify the system that when we call adapt with an Album instance
as an argument we want it to istantiate ObjectMapper passing the Album instance
as argument (self.orig in the ObjectMapper class).
The output is something like this (for each call to generateInsert):
('INSERT INTO Album (album_id, binary_data, creation_time) VALUES
(%(album_id)s, %(binary_data)s, %(creation_time)s)',
{'binary_data': <read-only buffer for 0x402de070, ...>,
'creation_time': datetime.datetime(2004, 9, 10, 20, 48, 29, 633728),
'album_id': 1}
)
This is a tuple of {SQL_QUERY, FILLING_DICT}, and all the quoting/converting
stuff (from python's datetime to postgres s and from python's buffer to
postgres' blob) is handled with the same adaptation process hunder the hood
by psycopg2.
At last, just notice that ObjectMapper is working for both Album and Order
instances without any glitches at all, and both classes could have easily been
coming from closed source libraries or C coded ones (which are not easily
modified), whereas a common pattern in todays ORMs or OODBs is to provide
a basic 'Persistent' object that already knows how to store itself in the
database.
"""

65
examples/dict.py Normal file
View File

@ -0,0 +1,65 @@
# dict.py - using DictCUrsor/DictRow
#
# Copyright (C) 2005-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import psycopg2
import psycopg2.extras
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
curs = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
curs.execute("SELECT 1 AS foo, 'cip' AS bar, date(now()) as zot")
print "Cursor's row factory is", curs.row_factory
data = curs.fetchone()
print "The type of the data row is", type(data)
print "Some data accessed both as tuple and dict:"
print " ", data['foo'], data['bar'], data['zot']
print " ", data[0], data[1], data[2]
# execute another query and demostrate we can still access the row
curs.execute("SELECT 2 AS foo")
print "The type of the data row is", type(data)
print "Some more data accessed both as tuple and dict:"
print " ", data['foo'], data['bar'], data['zot']
print " ", data[0], data[1], data[2]
curs = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
curs.execute("SELECT 1 AS foo, 'cip' AS bar, date(now()) as zot")
print "Cursor's row factory is", curs.row_factory
data = curs.fetchone()
print "The type of the data row is", type(data)
print "Some data accessed both as tuple and dict:"
print " ", data['foo'], data['bar'], data['zot']
print " ", "No access using indices: this is a specialized cursor."
# execute another query and demostrate we can still access the row
curs.execute("SELECT 2 AS foo")
print "The type of the data row is", type(data)
print "Some more data accessed both as tuple and dict:"
print " ", data['foo'], data['bar'], data['zot']
print " ", "No access using indices: this is a specialized cursor."

99
examples/dt.py Normal file
View File

@ -0,0 +1,99 @@
# datetime.py - example of using date and time types
#
# Copyright (C) 2001-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import psycopg2
import mx.DateTime
import datetime
from psycopg2.extensions import adapt
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
curs = conn.cursor()
try:
curs.execute("""CREATE TABLE test_dt (
k int4, d date, t time, dt timestamp, z interval)""")
except:
conn.rollback()
curs.execute("DROP TABLE test_dt")
curs.execute("""CREATE TABLE test_dt (
k int4, d date, t time, dt timestamp, z interval)""")
conn.commit()
# build and insert some data using mx.DateTime
mx1 = (
1,
mx.DateTime.Date(2004, 10, 19),
mx.DateTime.Time(0, 11, 17.015),
mx.DateTime.Timestamp(2004, 10, 19, 0, 11, 17.5),
mx.DateTime.DateTimeDelta(13, 15, 17, 59.9))
from psycopg2.extensions import adapt
import psycopg2.extras
print adapt(mx1)
print "Inserting mx.DateTime values..."
curs.execute("INSERT INTO test_dt VALUES (%s, %s, %s, %s, %s)", mx1)
# build and insert some values using the datetime adapters
dt1 = (
2,
datetime.date(2004, 10, 19),
datetime.time(0, 11, 17, 15000),
datetime.datetime(2004, 10, 19, 0, 11, 17, 500000),
datetime.timedelta(13, 15*3600+17*60+59, 900000))
print "Inserting Python datetime values..."
curs.execute("INSERT INTO test_dt VALUES (%s, %s, %s, %s, %s)", dt1)
# now extract the row from database and print them
print "Extracting values inserted with mx.DateTime wrappers:"
curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 1")
for n, x in zip(mx1[1:], curs.fetchone()):
try:
# this will work only if psycopg has been compiled with datetime
# as the default typecaster for date/time values
s = repr(n) + "\n -> " + str(adapt(n)) + \
"\n -> " + repr(x) + "\n -> " + x.isoformat()
except:
s = repr(n) + "\n -> " + str(adapt(n)) + \
"\n -> " + repr(x) + "\n -> " + str(x)
print s
print
print "Extracting values inserted with Python datetime wrappers:"
curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 2")
for n, x in zip(dt1[1:], curs.fetchone()):
try:
# this will work only if psycopg has been compiled with datetime
# as the default typecaster for date/time values
s = repr(n) + "\n -> " + repr(x) + "\n -> " + x.isoformat()
except:
s = repr(n) + "\n -> " + repr(x) + "\n -> " + str(x)
print s
print
curs.execute("DROP TABLE test_dt")
conn.commit()

105
examples/encoding.py Normal file
View File

@ -0,0 +1,105 @@
# encoding.py - show to change client encoding (and test it works)
# -*- encoding: utf8 -*-
#
# Copyright (C) 2004-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import psycopg2
import psycopg2.extensions
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Initial encoding for this connection is", conn.encoding
print "\n** This example is supposed to be run in a UNICODE terminal! **\n"
print "Available encodings:"
encs = psycopg2.extensions.encodings.items()
encs.sort()
for a, b in encs:
print " ", a, "<->", b
print "Using STRING typecaster"
print "Setting backend encoding to LATIN1 and executing queries:"
conn.set_client_encoding('LATIN1')
curs = conn.cursor()
curs.execute("SELECT %s::TEXT AS foo", ('àèìòù',))
x = curs.fetchone()[0]
print " ->", unicode(x, 'latin-1').encode('utf-8'), type(x)
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0]
print " ->", unicode(x, 'latin-1').encode('utf-8'), type(x)
print "Setting backend encoding to UTF8 and executing queries:"
conn.set_client_encoding('UNICODE')
curs = conn.cursor()
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),))
x = curs.fetchone()[0]
print " ->", x, type(x)
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0]
print " ->", x, type(x)
print "Using UNICODE typecaster"
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
print "Setting backend encoding to LATIN1 and executing queries:"
conn.set_client_encoding('LATIN1')
curs = conn.cursor()
curs.execute("SELECT %s::TEXT AS foo", ('àèìòù',))
x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x)
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x)
print "Setting backend encoding to UTF8 and executing queries:"
conn.set_client_encoding('UNICODE')
curs = conn.cursor()
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),))
x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x)
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x)
print "Executing full UNICODE queries"
print "Setting backend encoding to LATIN1 and executing queries:"
conn.set_client_encoding('LATIN1')
curs = conn.cursor()
curs.execute(u"SELECT %s::TEXT AS foo", ('àèìòù',))
x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x)
curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x)
print "Setting backend encoding to UTF8 and executing queries:"
conn.set_client_encoding('UNICODE')
curs = conn.cursor()
curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),))
x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x)
curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x)

80
examples/fetch.py Normal file
View File

@ -0,0 +1,80 @@
# fetch.py -- example about declaring cursors
#
# Copyright (C) 2001-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
curs = conn.cursor()
try:
curs.execute("CREATE TABLE test_fetch (val int4)")
except:
conn.rollback()
curs.execute("DROP TABLE test_fetch")
curs.execute("CREATE TABLE test_fetch (val int4)")
conn.commit()
# we use this function to format the output
def flatten(l):
"""Flattens list of tuples l."""
return map(lambda x: x[0], l)
# insert 20 rows in the table
for i in range(20):
curs.execute("INSERT INTO test_fetch VALUES(%s)", (i,))
conn.commit()
# does some nice tricks with the transaction and postgres cursors
# (remember to always commit or rollback before a DECLARE)
#
# we don't need to DECLARE ourselves, psycopg now supports named
# cursors (but we leave the code here, comments, as an example of
# what psycopg is doing under the hood)
#
#curs.execute("DECLARE crs CURSOR FOR SELECT * FROM test_fetch")
#curs.execute("FETCH 10 FROM crs")
#print "First 10 rows:", flatten(curs.fetchall())
#curs.execute("MOVE -5 FROM crs")
#print "Moved back cursor by 5 rows (to row 5.)"
#curs.execute("FETCH 10 FROM crs")
#print "Another 10 rows:", flatten(curs.fetchall())
#curs.execute("FETCH 10 FROM crs")
#print "The remaining rows:", flatten(curs.fetchall())
ncurs = conn.cursor("crs")
ncurs.execute("SELECT * FROM test_fetch")
print "First 10 rows:", flatten(ncurs.fetchmany(10))
ncurs.scroll(-5)
print "Moved back cursor by 5 rows (to row 5.)"
print "Another 10 rows:", flatten(ncurs.fetchmany(10))
print "Another one:", list(ncurs.fetchone())
print "The remaining rows:", flatten(ncurs.fetchall())
conn.rollback()
curs.execute("DROP TABLE test_fetch")
conn.commit()

59
examples/lastrowid.py Normal file
View File

@ -0,0 +1,59 @@
# lastrowid.py - example of using .lastrowid attribute
#
# Copyright (C) 2001-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys, psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
curs = conn.cursor()
try:
curs.execute("CREATE TABLE test_oid (name text, surname text)")
except:
conn.rollback()
curs.execute("DROP TABLE test_oid")
curs.execute("CREATE TABLE test_oid (name text, surname text)")
conn.commit()
data = ({'name':'Federico', 'surname':'Di Gregorio'},
{'name':'Pierluigi', 'surname':'Di Nunzio'})
curs.execute("""INSERT INTO test_oid
VALUES (%(name)s, %(surname)s)""", data[0])
foid = curs.lastrowid
print "Oid for %(name)s %(surname)s" % data[0], "is", foid
curs.execute("""INSERT INTO test_oid
VALUES (%(name)s, %(surname)s)""", data[1])
moid = curs.lastrowid
print "Oid for %(name)s %(surname)s" % data[1], "is", moid
curs.execute("SELECT * FROM test_oid WHERE oid = %s", (foid,))
print "Oid", foid, "selected %s %s" % curs.fetchone()
curs.execute("SELECT * FROM test_oid WHERE oid = %s", (moid,))
print "Oid", moid, "selected %s %s" % curs.fetchone()
curs.execute("DROP TABLE test_oid")
conn.commit()

91
examples/lobject.py Normal file
View File

@ -0,0 +1,91 @@
# lobject.py - lobject example
#
# Copyright (C) 2001-2006 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
# this will create a large object with a new random oid, we'll
# use it to make some basic tests about read/write and seek.
lobj = conn.lobject()
loid = lobj.oid
print "Created a new large object with oid", loid
print "Manually importing some binary data into the object:"
data = open("somehackers.jpg").read()
len = lobj.write(data)
print " imported", len, "bytes of data"
conn.commit()
print "Trying to (re)open large object with oid", loid
lobj = conn.lobject(loid)
print "Manually exporting the data from the lobject:"
data1 = lobj.read()
len = lobj.tell()
lobj.seek(0, 0)
data2 = lobj.read()
if data1 != data2:
print "ERROR: read after seek returned different data"
open("somehackers_lobject1.jpg", 'wb').write(data1)
print " written", len, "bytes of data to somehackers_lobject1.jpg"
lobj.unlink()
print "Large object with oid", loid, "removed"
conn.commit()
# now we try to use the import and export functions to do the same
lobj = conn.lobject(0, 'n', 0, "somehackers.jpg")
loid = lobj.oid
print "Imported a new large object with oid", loid
conn.commit()
print "Trying to (re)open large object with oid", loid
lobj = conn.lobject(loid, 'n')
print "Using export() to export the data from the large object:"
lobj.export("somehackers_lobject2.jpg")
print " exported large object to somehackers_lobject2.jpg"
lobj.unlink()
print "Large object with oid", loid, "removed"
conn.commit()
# this will create a very large object with a new random oid.
lobj = conn.lobject()
loid = lobj.oid
print "Created a new large object with oid", loid
print "Manually importing a lot of data into the object:"
data = "data" * 1000000
len = lobj.write(data)
print " imported", len, "bytes of data"
conn.rollback()
print "\nNow try to load the new images, to check it worked!"

47
examples/mogrify.py Normal file
View File

@ -0,0 +1,47 @@
# mogrify.py - test all possible simple type mogrifications
# -*- encoding: latin1 -*-
#
# Copyright (C) 2004-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details..
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys, psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
curs = conn.cursor()
curs.execute("SELECT %(foo)s AS foo", {'foo':'bar'})
curs.execute("SELECT %(foo)s AS foo", {'foo':None})
curs.execute("SELECT %(foo)s AS foo", {'foo':True})
curs.execute("SELECT %(foo)s AS foo", {'foo':42})
curs.execute("SELECT %(foo)s AS foo", {'foo':u'yatt<EFBFBD>!'})
curs.execute("SELECT %(foo)s AS foo", {'foo':u'bar'})
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':'bar'})
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':None})
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':True})
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':42})
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':u'yatt<EFBFBD>!'})
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':u'bar'})
conn.rollback()

126
examples/myfirstrecipe.py Normal file
View File

@ -0,0 +1,126 @@
"""
Using a tuple as a bound variable in "SELECT ... IN (...)" clauses
in PostgreSQL using psycopg2
Some time ago someone asked on the psycopg mailing list how to have a
bound variable expand to the right SQL for an SELECT IN clause:
SELECT * FROM atable WHERE afield IN (value1, value2, value3)
with the values to be used in the IN clause to be passed to the cursor
.execute() method in a tuple as a bound variable, i.e.:
in_values = ("value1", "value2", "value3")
curs.execute("SELECT ... IN %s", (in_values,))
psycopg 1 does support typecasting from Python to PostgreSQL (and back)
only for simple types and this problem has no elegant solution (short or
writing a wrapper class returning the pre-quoted text in an __str__
method.
But psycopg2 offers a simple and elegant solution by partially
implementing the Object Adaptation from PEP 246. psycopg2 moves
the type-casting logic into external adapters and a somehow
broken adapt() function.
While the original adapt() takes 3 arguments, psycopg2's one only takes
1: the bound variable to be adapted. The result is an object supporting
a not-yet well defined protocol that we can call ISQLQuote:
class ISQLQuote:
def getquoted(self):
"Returns a quoted string representing the bound variable."
def getbinary(self):
"Returns a binary quoted string representing the bound variable."
def getbuffer(self):
"Returns the wrapped object itself."
__str__ = getquoted
Then one of the functions (usually .getquoted()) is called by psycopg2 at
the right time to obtain the right, sql-quoted representation for the
corresponding bound variable.
The nice part is that the default, built-in adapters, derived from
psycopg 1 tyecasting code can be overridden by the programmer, simply
replacing them in the psycopg.extensions.adapters dictionary.
Then the solution to the original problem is now obvious: write an
adapter that adapts tuple objects into the right SQL string, by calling
recursively adapt() on each element.
psycopg2 development can be tracked on the psycopg mailing list:
http://lists.initd.org/mailman/listinfo/psycopg
"""
# Copyright (C) 2001-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import psycopg2
import psycopg2.extensions
from psycopg2.extensions import adapt as psycoadapt
from psycopg2.extensions import register_adapter
class AsIs(object):
"""An adapter that just return the object 'as is'.
psycopg 1.99.9 has some optimizations that make impossible to call
adapt() without adding some basic adapters externally. This limitation
will be lifted in a future release.
"""
def __init__(self, obj):
self.__obj = obj
def getquoted(self):
return self.__obj
class SQL_IN(object):
"""Adapt a tuple to an SQL quotable object."""
def __init__(self, seq):
self._seq = seq
def prepare(self, conn):
pass
def getquoted(self):
# this is the important line: note how every object in the
# list is adapted and then how getquoted() is called on it
qobjs = [str(psycoadapt(o).getquoted()) for o in self._seq]
return '(' + ', '.join(qobjs) + ')'
__str__ = getquoted
# add our new adapter class to psycopg list of adapters
register_adapter(tuple, SQL_IN)
register_adapter(float, AsIs)
register_adapter(int, AsIs)
# usually we would call:
#
# conn = psycopg.connect("...")
# curs = conn.cursor()
# curs.execute("SELECT ...", (("this", "is", "the", "tuple"),))
#
# but we have no connection to a database right now, so we just check
# the SQL_IN class by calling psycopg's adapt() directly:
if __name__ == '__main__':
print "Note how the string will be SQL-quoted, but the number will not:"
print psycoadapt(("this is an 'sql quoted' str\\ing", 1, 2.0))

45
examples/notify.py Normal file
View File

@ -0,0 +1,45 @@
# notify.py - example of getting notifies
#
# Copyright (C) 2001-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import select
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
curs = conn.cursor()
curs.execute("listen test")
print "Waiting for 'NOTIFY test'"
while 1:
if select.select([conn],[],[],5)==([],[],[]):
print "Timeout"
else:
conn.poll()
while conn.notifies:
print "Got NOTIFY:", conn.notifies.pop()

54
examples/simple.py Normal file
View File

@ -0,0 +1,54 @@
# simple.py - very simple example of plain DBAPI-2.0 usage
#
# currently used as test-me-stress-me script for psycopg 2.0
#
# Copyright (C) 2001-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
class SimpleQuoter(object):
def sqlquote(x=None):
return "'bar'"
import sys
import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
curs = conn.cursor()
curs.execute("SELECT 1 AS foo")
print curs.fetchone()
curs.execute("SELECT 1 AS foo")
print curs.fetchmany()
curs.execute("SELECT 1 AS foo")
print curs.fetchall()
conn.rollback()
sys.exit(0)
curs.execute("SELECT 1 AS foo", async=1)
curs.execute("SELECT %(foo)s AS foo", {'foo':'bar'})
curs.execute("SELECT %(foo)s AS foo", {'foo':None})
curs.execute("SELECT %(foo)f AS foo", {'foo':42})
curs.execute("SELECT %(foo)s AS foo", {'foo':SimpleQuoter()})

BIN
examples/somehackers.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

161
examples/threads.py Normal file
View File

@ -0,0 +1,161 @@
# threads.py -- example of multiple threads using psycopg
# -*- encoding: latin1 -*-
#
# Copyright (C) 2001-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## some others parameters
INSERT_THREADS = ('A', 'B', 'C')
SELECT_THREADS = ('1', '2')
ROWS = 1000
COMMIT_STEP = 20
SELECT_SIZE = 10000
SELECT_STEP = 500
SELECT_DIV = 250
# the available modes are:
# 0 - one connection for all inserts and one for all select threads
# 1 - connections generated using the connection pool
MODE = 1
## don't modify anything below this line (except for experimenting)
import sys, psycopg2, threading
from psycopg2.pool import ThreadedConnectionPool
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
if len(sys.argv) > 1:
DSN = sys.argv[1]
if len(sys.argv) > 2:
MODE = int(sys.argv[2])
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
curs = conn.cursor()
try:
curs.execute("""CREATE TABLE test_threads (
name text, value1 int4, value2 float)""")
except:
conn.rollback()
curs.execute("DROP TABLE test_threads")
curs.execute("""CREATE TABLE test_threads (
name text, value1 int4, value2 float)""")
conn.commit()
## this function inserts a big number of rows and creates and destroys
## a large number of cursors
def insert_func(conn_or_pool, rows):
name = threading.currentThread().getName()
if MODE == 0:
conn = conn_or_pool
else:
conn = conn_or_pool.getconn()
for i in range(rows):
if divmod(i, COMMIT_STEP)[1] == 0:
conn.commit()
if MODE == 1:
conn_or_pool.putconn(conn)
s = name + ": COMMIT STEP " + str(i)
print s
if MODE == 1:
conn = conn_or_pool.getconn()
c = conn.cursor()
try:
c.execute("INSERT INTO test_threads VALUES (%s, %s, %s)",
(str(i), i, float(i)))
except psycopg2.ProgrammingError, err:
print name, ": an error occurred; skipping this insert"
print err
conn.commit()
## a nice select function that prints the current number of rows in the
## database (and transfer them, putting some pressure on the network)
def select_func(conn_or_pool, z):
name = threading.currentThread().getName()
if MODE == 0:
conn = conn_or_pool
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
for i in range(SELECT_SIZE):
if divmod(i, SELECT_STEP)[1] == 0:
try:
if MODE == 1:
conn = conn_or_pool.getconn()
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
c = conn.cursor()
c.execute("SELECT * FROM test_threads WHERE value2 < %s",
(int(i/z),))
l = c.fetchall()
if MODE == 1:
conn_or_pool.putconn(conn)
s = name + ": number of rows fetched: " + str(len(l))
print s
except psycopg2.ProgrammingError, err:
print name, ": an error occurred; skipping this select"
print err
## create the connection pool or the connections
if MODE == 0:
conn_insert = psycopg2.connect(DSN)
conn_select = psycopg2.connect(DSN)
else:
m = len(INSERT_THREADS) + len(SELECT_THREADS)
n = m/2
conn_insert = conn_select = ThreadedConnectionPool(n, m, DSN)
## create the threads
threads = []
print "Creating INSERT threads:"
for name in INSERT_THREADS:
t = threading.Thread(None, insert_func, 'Thread-'+name,
(conn_insert, ROWS))
t.setDaemon(0)
threads.append(t)
print "Creating SELECT threads:"
for name in SELECT_THREADS:
t = threading.Thread(None, select_func, 'Thread-'+name,
(conn_select, SELECT_DIV))
t.setDaemon(0)
threads.append(t)
## really start the threads now
for t in threads:
t.start()
# and wait for them to finish
for t in threads:
t.join()
print t.getName(), "exited OK"
conn.commit()
curs.execute("SELECT count(name) FROM test_threads")
print "Inserted", curs.fetchone()[0], "rows."
curs.execute("DROP TABLE test_threads")
conn.commit()

67
examples/typecast.py Normal file
View File

@ -0,0 +1,67 @@
# typecast.py - example of per-cursor and per-connection typecasters.
#
# Copyright (C) 2001-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
class SimpleQuoter(object):
def sqlquote(x=None):
return "'bar'"
import sys
import psycopg2
import psycopg2.extensions
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
curs = conn.cursor()
curs.execute("SELECT 'text'::text AS foo")
textoid = curs.description[0][1]
print "Oid for the text datatype is", textoid
def castA(s, curs):
if s is not None: return "(A) " + s
TYPEA = psycopg2.extensions.new_type((textoid,), "TYPEA", castA)
def castB(s, curs):
if s is not None: return "(B) " + s
TYPEB = psycopg2.extensions.new_type((textoid,), "TYPEB", castB)
curs = conn.cursor()
curs.execute("SELECT 'some text.'::text AS foo")
print "Some text from plain connection:", curs.fetchone()[0]
psycopg2.extensions.register_type(TYPEA, conn)
curs = conn.cursor()
curs.execute("SELECT 'some text.'::text AS foo")
print "Some text from connection with typecaster:", curs.fetchone()[0]
curs = conn.cursor()
psycopg2.extensions.register_type(TYPEB, curs)
curs.execute("SELECT 'some text.'::text AS foo")
print "Some text from cursor with typecaster:", curs.fetchone()[0]
curs = conn.cursor()
curs.execute("SELECT 'some text.'::text AS foo")
print "Some text from connection with typecaster again:", curs.fetchone()[0]

69
examples/tz.py Normal file
View File

@ -0,0 +1,69 @@
# tz.py - example of datetime objects with time zones
# -*- encoding: utf8 -*-
#
# Copyright (C) 2004-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import psycopg2
import datetime
from psycopg2.tz import ZERO, LOCAL, FixedOffsetTimezone
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
curs = conn.cursor()
try:
curs.execute("CREATE TABLE test_tz (t timestamp with time zone)")
except:
conn.rollback()
curs.execute("DROP TABLE test_tz")
curs.execute("CREATE TABLE test_tz (t timestamp with time zone)")
conn.commit()
d = datetime.datetime(1971, 10, 19, 22, 30, 0, tzinfo=LOCAL)
curs.execute("INSERT INTO test_tz VALUES (%s)", (d,))
print "Inserted timestamp with timezone:", d
print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d)
tz = FixedOffsetTimezone(-5*60, "EST")
d = datetime.datetime(1971, 10, 19, 22, 30, 0, tzinfo=tz)
curs.execute("INSERT INTO test_tz VALUES (%s)", (d,))
print "Inserted timestamp with timezone:", d
print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d)
curs.execute("SELECT * FROM test_tz")
d = curs.fetchone()[0]
curs.execute("INSERT INTO test_tz VALUES (%s)", (d,))
print "Inserted SELECTed timestamp:", d
print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d)
curs.execute("SELECT * FROM test_tz")
for d in curs:
u = d[0].utcoffset() or ZERO
print "UTC time: ", d[0] - u
print "Local time:", d[0]
print "Time zone:", d[0].tzinfo.tzname(d[0]), d[0].tzinfo.utcoffset(d[0])
curs.execute("DROP TABLE test_tz")
conn.commit()

126
examples/usercast.py Normal file
View File

@ -0,0 +1,126 @@
# usercast.py -- example of user defined typecasters
# -*- encoding: latin-1 -*-
#
# Copyright (C) 2001-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting)
import sys
import psycopg2
import psycopg2.extensions
import whrandom
# importing psycopg.extras will give us a nice tuple adapter: this is wrong
# because the adapter is meant to be used in SQL IN clauses while we use
# tuples to represent points but it works and the example is about Rect, not
# "Point"
import psycopg2.extras
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Initial encoding for this connection is", conn.encoding
curs = conn.cursor()
try:
curs.execute("CREATE TABLE test_cast (p1 point, p2 point, b box)")
except:
conn.rollback()
curs.execute("DROP TABLE test_cast")
curs.execute("CREATE TABLE test_cast (p1 point, p2 point, b box)")
conn.commit()
# this is the callable object we use as a typecast (the typecast is
# usually a function, but we use a class, just to demonstrate the
# flexibility of the psycopg casting system
class Rect(object):
"""Very simple rectangle.
Note that we use this type as a data holder, as an adapter of itself for
the ISQLQuote protocol used by psycopg's adapt() (see __confrom__ below)
and eventually as a type-caster for the data extracted from the database
(that's why __init__ takes the curs argument.)
"""
def __init__(self, s=None, curs=None):
"""Init the rectangle from the optional string s."""
self.x = self.y = self.width = self.height = 0.0
if s: self.from_string(s)
def __conform__(self, proto):
"""This is a terrible hack, just ignore proto and return self."""
if proto == psycopg2.extensions.ISQLQuote:
return self
def from_points(self, x0, y0, x1, y1):
"""Init the rectangle from points."""
if x0 > x1: (x0, x1) = (x1, x0)
if y0 > y1: (y0, y1) = (y1, y0)
self.x = x0
self.y = y0
self.width = x1 - x0
self.height = y1 - y0
def from_string(self, s):
"""Init the rectangle from a string."""
seq = eval(s)
self.from_points(seq[0][0], seq[0][1], seq[1][0], seq[1][1])
def getquoted(self):
"""Format self as a string usable by the db to represent a box."""
s = "'((%d,%d),(%d,%d))'" % (
self.x, self.y, self.x + self.width, self.y + self.height)
return s
def show(self):
"""Format a description of the box."""
s = "X: %d\tY: %d\tWidth: %d\tHeight: %d" % (
self.x, self.y, self.width, self.height)
return s
# here we select from the empty table, just to grab the description
curs.execute("SELECT b FROM test_cast WHERE 0=1")
boxoid = curs.description[0][1]
print "Oid for the box datatype is", boxoid
# and build the user cast object
BOX = psycopg2.extensions.new_type((boxoid,), "BOX", Rect)
psycopg2.extensions.register_type(BOX)
# now insert 100 random data (2 points and a box in each row)
for i in range(100):
p1 = (whrandom.randint(0,100), whrandom.randint(0,100))
p2 = (whrandom.randint(0,100), whrandom.randint(0,100))
b = Rect()
b.from_points(whrandom.randint(0,100), whrandom.randint(0,100),
whrandom.randint(0,100), whrandom.randint(0,100))
curs.execute("INSERT INTO test_cast VALUES ('%(p1)s', '%(p2)s', %(box)s)",
{'box':b, 'p1':p1, 'p2':p2})
print "Added 100 boxed to the database"
# select and print all boxes with at least one point inside
curs.execute("SELECT b FROM test_cast WHERE p1 @ b OR p2 @ b")
boxes = curs.fetchall()
print "Found %d boxes with at least a point inside:" % len(boxes)
for box in boxes:
print " ", box[0].show()
curs.execute("DROP TABLE test_cast")
conn.commit()

BIN
examples/whereareyou.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

View File

@ -6,10 +6,10 @@ provide new-style classes for connection and cursor objects and other sweet
candies. Like the original, psycopg 2 was written with the aim of being very
small and fast, and stable as a rock.
Homepage: https://psycopg.org/
Homepage: http://initd.org/projects/psycopg2
.. _PostgreSQL: https://www.postgresql.org/
.. _Python: https://www.python.org/
.. _PostgreSQL: http://www.postgresql.org/
.. _Python: http://www.python.org/
:Groups:
* `Connections creation`: connect
@ -18,8 +18,7 @@ Homepage: https://psycopg.org/
"""
# psycopg/__init__.py - initialization of the psycopg module
#
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
# Copyright (C) 2020-2021 The Psycopg Team
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@ -44,7 +43,7 @@ Homepage: https://psycopg.org/
# Note: the first internal import should be _psycopg, otherwise the real cause
# of a failed loading of the C module may get hidden, see
# https://archives.postgresql.org/psycopg/2011-02/msg00044.php
# http://archives.postgresql.org/psycopg/2011-02/msg00044.php
# Import the DBAPI-2.0 stuff into top-level module.
@ -61,20 +60,26 @@ from psycopg2._psycopg import ( # noqa
__version__, __libpq_version__,
)
from psycopg2 import tz # noqa
# Register default adapters.
from psycopg2 import extensions as _ext
import psycopg2.extensions as _ext
_ext.register_adapter(tuple, _ext.SQL_IN)
_ext.register_adapter(type(None), _ext.NoneAdapter)
# Register the Decimal adapter here instead of in the C layer.
# This way a new class is registered for each sub-interpreter.
# See ticket #52
from decimal import Decimal # noqa
from psycopg2._psycopg import Decimal as Adapter # noqa
_ext.register_adapter(Decimal, Adapter)
del Decimal, Adapter
try:
from decimal import Decimal
except ImportError:
pass
else:
from psycopg2._psycopg import Decimal as Adapter
_ext.register_adapter(Decimal, Adapter)
del Decimal, Adapter
def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs):
@ -118,6 +123,9 @@ def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs):
if 'async_' in kwargs:
kwasync['async_'] = kwargs.pop('async_')
if dsn is None and not kwargs:
raise TypeError('missing dsn and no parameters')
dsn = _ext.make_dsn(dsn, **kwargs)
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
if cursor_factory is not None:

View File

@ -3,8 +3,7 @@
# psycopg/_ipaddress.py - Ipaddres-based network types adaptation
#
# Copyright (C) 2016-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
# Copyright (C) 2020-2021 The Psycopg Team
# Copyright (C) 2016 Daniele Varrazzo <daniele.varrazzo@gmail.com>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@ -77,13 +76,13 @@ def cast_interface(s, cur=None):
if s is None:
return None
# Py2 version force the use of unicode. meh.
return ipaddress.ip_interface(str(s))
return ipaddress.ip_interface(unicode(s))
def cast_network(s, cur=None):
if s is None:
return None
return ipaddress.ip_network(str(s))
return ipaddress.ip_network(unicode(s))
def adapt_ipaddress(obj):

View File

@ -7,8 +7,7 @@ extensions importing register_json from extras.
# psycopg/_json.py - Implementation of the JSON adaptation objects
#
# Copyright (C) 2012-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
# Copyright (C) 2020-2021 The Psycopg Team
# Copyright (C) 2012 Daniele Varrazzo <daniele.varrazzo@gmail.com>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@ -28,12 +27,22 @@ extensions importing register_json from extras.
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import json
import sys
from psycopg2._psycopg import ISQLQuote, QuotedString
from psycopg2._psycopg import new_type, new_array_type, register_type
# import the best json implementation available
if sys.version_info[:2] >= (2, 6):
import json
else:
try:
import simplejson as json
except ImportError:
json = None
# oids from PostgreSQL 9.2
JSON_OID = 114
JSONARRAY_OID = 199
@ -43,20 +52,27 @@ JSONB_OID = 3802
JSONBARRAY_OID = 3807
class Json:
class Json(object):
"""
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
:sql:`json` data type.
`!Json` can be used to wrap any object supported by the provided *dumps*
function. If none is provided, the standard :py:func:`json.dumps()` is
used.
function. If none is provided, the standard :py:func:`json.dumps()` is
used (`!simplejson` for Python < 2.6;
`~psycopg2.extensions.ISQLQuote.getquoted()` will raise `!ImportError` if
the module is not available).
"""
def __init__(self, adapted, dumps=None):
self.adapted = adapted
self._conn = None
self._dumps = dumps or json.dumps
if dumps is not None:
self._dumps = dumps
elif json is not None:
self._dumps = json.dumps
else:
self._dumps = None
def __conform__(self, proto):
if proto is ISQLQuote:
@ -69,21 +85,25 @@ class Json:
provided in the constructor. You can override this method to create a
customized JSON wrapper.
"""
return self._dumps(obj)
def prepare(self, conn):
self._conn = conn
dumps = self._dumps
if dumps is not None:
return dumps(obj)
else:
raise ImportError(
"json module not available: "
"you should provide a dumps function")
def getquoted(self):
s = self.dumps(self.adapted)
qs = QuotedString(s)
if self._conn is not None:
qs.prepare(self._conn)
return qs.getquoted()
return QuotedString(s).getquoted()
def __str__(self):
# getquoted is binary
return self.getquoted().decode('ascii', 'replace')
if sys.version_info < (3,):
def __str__(self):
return self.getquoted()
else:
def __str__(self):
# getquoted is binary in Py3
return self.getquoted().decode('ascii', 'replace')
def register_json(conn_or_curs=None, globally=False, loads=None,
@ -154,7 +174,10 @@ def register_default_jsonb(conn_or_curs=None, globally=False, loads=None):
def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
"""Create typecasters for json data type."""
if loads is None:
loads = json.loads
if json is None:
raise ImportError("no json module available")
else:
loads = json.loads
def typecast_json(s, cur):
if s is None:
@ -163,7 +186,7 @@ def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
JSON = new_type((oid, ), name, typecast_json)
if array_oid is not None:
JSONARRAY = new_array_type((array_oid, ), f"{name}ARRAY", JSON)
JSONARRAY = new_array_type((array_oid, ), "%sARRAY" % name, JSON)
else:
JSONARRAY = None
@ -181,7 +204,7 @@ def _get_json_oids(conn_or_curs, name='json'):
conn_status = conn.status
# column typarray not available before PG 8.3
typarray = conn.info.server_version >= 80300 and "typarray" or "NULL"
typarray = conn.server_version >= 80300 and "typarray" or "NULL"
# get the oid for the hstore
curs.execute(
@ -190,10 +213,10 @@ def _get_json_oids(conn_or_curs, name='json'):
r = curs.fetchone()
# revert the status of the connection as before the command
if conn_status != STATUS_IN_TRANSACTION and not conn.autocommit:
if (conn_status != STATUS_IN_TRANSACTION and not conn.autocommit):
conn.rollback()
if not r:
raise conn.ProgrammingError(f"{name} data type not found")
raise conn.ProgrammingError("%s data type not found" % name)
return r

View File

@ -4,8 +4,7 @@
# psycopg/_range.py - Implementation of the Range type and adaptation
#
# Copyright (C) 2012-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
# Copyright (C) 2020-2021 The Psycopg Team
# Copyright (C) 2012 Daniele Varrazzo <daniele.varrazzo@gmail.com>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@ -32,7 +31,7 @@ from psycopg2.extensions import ISQLQuote, adapt, register_adapter
from psycopg2.extensions import new_type, new_array_type, register_type
class Range:
class Range(object):
"""Python representation for a PostgreSQL |range|_ type.
:param lower: lower bound for the range. `!None` means unbound
@ -47,7 +46,7 @@ class Range:
def __init__(self, lower=None, upper=None, bounds='[)', empty=False):
if not empty:
if bounds not in ('[)', '(]', '()', '[]'):
raise ValueError(f"bound flags not valid: {bounds!r}")
raise ValueError("bound flags not valid: %r" % bounds)
self._lower = lower
self._upper = upper
@ -57,24 +56,11 @@ class Range:
def __repr__(self):
if self._bounds is None:
return f"{self.__class__.__name__}(empty=True)"
return "%s(empty=True)" % self.__class__.__name__
else:
return "{}({!r}, {!r}, {!r})".format(self.__class__.__name__,
return "%s(%r, %r, %r)" % (self.__class__.__name__,
self._lower, self._upper, self._bounds)
def __str__(self):
if self._bounds is None:
return 'empty'
items = [
self._bounds[0],
str(self._lower),
', ',
str(self._upper),
self._bounds[1]
]
return ''.join(items)
@property
def lower(self):
"""The lower bound of the range. `!None` if empty or unbound."""
@ -140,7 +126,7 @@ class Range:
return True
def __bool__(self):
def __nonzero__(self):
return self._bounds is not None
def __eq__(self, other):
@ -195,8 +181,11 @@ class Range:
return self.__gt__(other)
def __getstate__(self):
return {slot: getattr(self, slot)
for slot in self.__slots__ if hasattr(self, slot)}
return dict(
(slot, getattr(self, slot))
for slot in self.__slots__
if hasattr(self, slot)
)
def __setstate__(self, state):
for slot, value in state.items():
@ -234,7 +223,7 @@ def register_range(pgrange, pyrange, conn_or_curs, globally=False):
return caster
class RangeAdapter:
class RangeAdapter(object):
"""`ISQLQuote` adapter for `Range` subclasses.
This is an abstract class: concrete classes must set a `name` class
@ -282,7 +271,7 @@ class RangeAdapter:
+ b", '" + r._bounds.encode('utf8') + b"')"
class RangeCaster:
class RangeCaster(object):
"""Helper class to convert between `Range` and PostgreSQL range types.
Objects of this class are usually created by `register_range()`. Manual
@ -310,7 +299,7 @@ class RangeCaster:
# an implementation detail and is not documented. It is currently used
# for the numeric ranges.
self.adapter = None
if isinstance(pgrange, str):
if isinstance(pgrange, basestring):
self.adapter = type(pgrange, (RangeAdapter,), {})
self.adapter.name = pgrange
else:
@ -327,7 +316,7 @@ class RangeCaster:
self.range = None
try:
if isinstance(pyrange, str):
if isinstance(pyrange, basestring):
self.range = type(pyrange, (Range,), {})
if issubclass(pyrange, Range) and pyrange is not Range:
self.range = pyrange
@ -348,9 +337,9 @@ class RangeCaster:
from psycopg2.extras import _solve_conn_curs
conn, curs = _solve_conn_curs(conn_or_curs)
if conn.info.server_version < 90200:
if conn.server_version < 90200:
raise ProgrammingError("range types not available in version %s"
% conn.info.server_version)
% conn.server_version)
# Store the transaction status of the connection to revert it after use
conn_status = conn.status
@ -363,54 +352,33 @@ class RangeCaster:
schema = 'public'
# get the type oid and attributes
curs.execute("""\
select rngtypid, rngsubtype, typarray
try:
curs.execute("""\
select rngtypid, rngsubtype,
(select typarray from pg_type where oid = rngtypid)
from pg_range r
join pg_type t on t.oid = rngtypid
join pg_namespace ns on ns.oid = typnamespace
where typname = %s and ns.nspname = %s;
""", (tname, schema))
rec = curs.fetchone()
if not rec:
# The above algorithm doesn't work for customized seach_path
# (#1487) The implementation below works better, but, to guarantee
# backwards compatibility, use it only if the original one failed.
try:
savepoint = False
# Because we executed statements earlier, we are either INTRANS
# or we are IDLE only if the transaction is autocommit, in
# which case we don't need the savepoint anyway.
if conn.status == STATUS_IN_TRANSACTION:
curs.execute("SAVEPOINT register_type")
savepoint = True
except ProgrammingError:
if not conn.autocommit:
conn.rollback()
raise
else:
rec = curs.fetchone()
curs.execute("""\
SELECT rngtypid, rngsubtype, typarray, typname, nspname
from pg_range r
join pg_type t on t.oid = rngtypid
join pg_namespace ns on ns.oid = typnamespace
WHERE t.oid = %s::regtype
""", (name, ))
except ProgrammingError:
pass
else:
rec = curs.fetchone()
if rec:
tname, schema = rec[3:]
finally:
if savepoint:
curs.execute("ROLLBACK TO SAVEPOINT register_type")
# revert the status of the connection as before the command
if conn_status != STATUS_IN_TRANSACTION and not conn.autocommit:
conn.rollback()
# revert the status of the connection as before the command
if (conn_status != STATUS_IN_TRANSACTION
and not conn.autocommit):
conn.rollback()
if not rec:
raise ProgrammingError(
f"PostgreSQL range '{name}' not found")
"PostgreSQL type '%s' not found" % name)
type, subtype, array = rec[:3]
type, subtype, array = rec
return RangeCaster(name, pyrange,
oid=type, subtype_oid=subtype, array_oid=array)
@ -440,7 +408,7 @@ WHERE t.oid = %s::regtype
m = self._re_range.match(s)
if m is None:
raise InterfaceError(f"failed to parse range: '{s}'")
raise InterfaceError("failed to parse range: '%s'" % s)
lower = m.group(3)
if lower is None:
@ -520,12 +488,13 @@ class NumberRangeAdapter(RangeAdapter):
else:
upper = ''
return (f"'{r._bounds[0]}{lower},{upper}{r._bounds[1]}'").encode('ascii')
return ("'%s%s,%s%s'" % (
r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')
# TODO: probably won't work with infs, nans and other tricky cases.
register_adapter(NumericRange, NumberRangeAdapter)
# Register globally typecasters and adapters for builtin range types.
# note: the adapter is registered more than once, but this is harmless.

View File

@ -1,11 +1,10 @@
"""Error codes for PostgreSQL
"""Error codes for PostgresSQL
This module contains symbolic names for all PostgreSQL error codes.
"""
# psycopg2/errorcodes.py - PostgreSQL error codes
#
# Copyright (C) 2006-2019 Johan Dahlin <jdahlin@async.com.br>
# Copyright (C) 2020-2021 The Psycopg Team
# Copyright (C) 2006-2010 Johan Dahlin <jdahlin@async.com.br>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@ -27,7 +26,7 @@ This module contains symbolic names for all PostgreSQL error codes.
#
# Based on:
#
# https://www.postgresql.org/docs/current/static/errcodes-appendix.html
# http://www.postgresql.org/docs/current/static/errcodes-appendix.html
#
@ -41,10 +40,9 @@ def lookup(code, _cache={}):
# Generate the lookup map at first usage.
tmp = {}
for k, v in globals().items():
for k, v in globals().iteritems():
if isinstance(v, str) and len(v) in (2, 5):
# Strip trailing underscore used to disambiguate duplicate values
tmp[v] = k.rstrip("_")
tmp[v] = k
assert tmp
@ -95,7 +93,6 @@ CLASS_PROGRAM_LIMIT_EXCEEDED = '54'
CLASS_OBJECT_NOT_IN_PREREQUISITE_STATE = '55'
CLASS_OPERATOR_INTERVENTION = '57'
CLASS_SYSTEM_ERROR = '58'
CLASS_SNAPSHOT_FAILURE = '72'
CLASS_CONFIGURATION_FILE_ERROR = 'F0'
CLASS_FOREIGN_DATA_WRAPPER_ERROR = 'HV'
CLASS_PL_PGSQL_ERROR = 'P0'
@ -107,7 +104,7 @@ SUCCESSFUL_COMPLETION = '00000'
# Class 01 - Warning
WARNING = '01000'
NULL_VALUE_ELIMINATED_IN_SET_FUNCTION = '01003'
STRING_DATA_RIGHT_TRUNCATION_ = '01004'
STRING_DATA_RIGHT_TRUNCATION = '01004'
PRIVILEGE_NOT_REVOKED = '01006'
PRIVILEGE_NOT_GRANTED = '01007'
IMPLICIT_ZERO_BIT_PADDING = '01008'
@ -165,7 +162,7 @@ DATA_EXCEPTION = '22000'
STRING_DATA_RIGHT_TRUNCATION = '22001'
NULL_VALUE_NO_INDICATOR_PARAMETER = '22002'
NUMERIC_VALUE_OUT_OF_RANGE = '22003'
NULL_VALUE_NOT_ALLOWED_ = '22004'
NULL_VALUE_NOT_ALLOWED = '22004'
ERROR_IN_ASSIGNMENT = '22005'
INVALID_DATETIME_FORMAT = '22007'
DATETIME_FIELD_OVERFLOW = '22008'
@ -175,7 +172,6 @@ INVALID_USE_OF_ESCAPE_CHARACTER = '2200C'
INVALID_ESCAPE_OCTET = '2200D'
ZERO_LENGTH_CHARACTER_STRING = '2200F'
MOST_SPECIFIC_TYPE_MISMATCH = '2200G'
SEQUENCE_GENERATOR_LIMIT_EXCEEDED = '2200H'
NOT_AN_XML_DOCUMENT = '2200L'
INVALID_XML_DOCUMENT = '2200M'
INVALID_XML_CONTENT = '2200N'
@ -184,7 +180,6 @@ INVALID_XML_PROCESSING_INSTRUCTION = '2200T'
INVALID_INDICATOR_PARAMETER_VALUE = '22010'
SUBSTRING_ERROR = '22011'
DIVISION_BY_ZERO = '22012'
INVALID_PRECEDING_OR_FOLLOWING_SIZE = '22013'
INVALID_ARGUMENT_FOR_NTILE_FUNCTION = '22014'
INTERVAL_FIELD_OVERFLOW = '22015'
INVALID_ARGUMENT_FOR_NTH_VALUE_FUNCTION = '22016'
@ -207,23 +202,6 @@ TRIM_ERROR = '22027'
ARRAY_SUBSCRIPT_ERROR = '2202E'
INVALID_TABLESAMPLE_REPEAT = '2202G'
INVALID_TABLESAMPLE_ARGUMENT = '2202H'
DUPLICATE_JSON_OBJECT_KEY_VALUE = '22030'
INVALID_ARGUMENT_FOR_SQL_JSON_DATETIME_FUNCTION = '22031'
INVALID_JSON_TEXT = '22032'
INVALID_SQL_JSON_SUBSCRIPT = '22033'
MORE_THAN_ONE_SQL_JSON_ITEM = '22034'
NO_SQL_JSON_ITEM = '22035'
NON_NUMERIC_SQL_JSON_ITEM = '22036'
NON_UNIQUE_KEYS_IN_A_JSON_OBJECT = '22037'
SINGLETON_SQL_JSON_ITEM_REQUIRED = '22038'
SQL_JSON_ARRAY_NOT_FOUND = '22039'
SQL_JSON_MEMBER_NOT_FOUND = '2203A'
SQL_JSON_NUMBER_NOT_FOUND = '2203B'
SQL_JSON_OBJECT_NOT_FOUND = '2203C'
TOO_MANY_JSON_ARRAY_ELEMENTS = '2203D'
TOO_MANY_JSON_OBJECT_MEMBERS = '2203E'
SQL_JSON_SCALAR_REQUIRED = '2203F'
SQL_JSON_ITEM_CANNOT_BE_CAST_TO_TARGET_TYPE = '2203G'
FLOATING_POINT_EXCEPTION = '22P01'
INVALID_TEXT_REPRESENTATION = '22P02'
INVALID_BINARY_REPRESENTATION = '22P03'
@ -255,8 +233,6 @@ SCHEMA_AND_DATA_STATEMENT_MIXING_NOT_SUPPORTED = '25007'
HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL = '25008'
NO_ACTIVE_SQL_TRANSACTION = '25P01'
IN_FAILED_SQL_TRANSACTION = '25P02'
IDLE_IN_TRANSACTION_SESSION_TIMEOUT = '25P03'
TRANSACTION_TIMEOUT = '25P04'
# Class 26 - Invalid SQL Statement Name
INVALID_SQL_STATEMENT_NAME = '26000'
@ -277,9 +253,9 @@ INVALID_TRANSACTION_TERMINATION = '2D000'
# Class 2F - SQL Routine Exception
SQL_ROUTINE_EXCEPTION = '2F000'
MODIFYING_SQL_DATA_NOT_PERMITTED_ = '2F002'
PROHIBITED_SQL_STATEMENT_ATTEMPTED_ = '2F003'
READING_SQL_DATA_NOT_PERMITTED_ = '2F004'
MODIFYING_SQL_DATA_NOT_PERMITTED = '2F002'
PROHIBITED_SQL_STATEMENT_ATTEMPTED = '2F003'
READING_SQL_DATA_NOT_PERMITTED = '2F004'
FUNCTION_EXECUTED_NO_RETURN_STATEMENT = '2F005'
# Class 34 - Invalid Cursor Name
@ -338,7 +314,6 @@ WRONG_OBJECT_TYPE = '42809'
INVALID_FOREIGN_KEY = '42830'
CANNOT_COERCE = '42846'
UNDEFINED_FUNCTION = '42883'
GENERATED_ALWAYS = '428C9'
RESERVED_NAME = '42939'
UNDEFINED_TABLE = '42P01'
UNDEFINED_PARAMETER = '42P02'
@ -384,7 +359,6 @@ OBJECT_NOT_IN_PREREQUISITE_STATE = '55000'
OBJECT_IN_USE = '55006'
CANT_CHANGE_RUNTIME_PARAM = '55P02'
LOCK_NOT_AVAILABLE = '55P03'
UNSAFE_NEW_ENUM_VALUE_USAGE = '55P04'
# Class 57 - Operator Intervention
OPERATOR_INTERVENTION = '57000'
@ -393,7 +367,6 @@ ADMIN_SHUTDOWN = '57P01'
CRASH_SHUTDOWN = '57P02'
CANNOT_CONNECT_NOW = '57P03'
DATABASE_DROPPED = '57P04'
IDLE_SESSION_TIMEOUT = '57P05'
# Class 58 - System Error (errors external to PostgreSQL itself)
SYSTEM_ERROR = '58000'
@ -401,9 +374,6 @@ IO_ERROR = '58030'
UNDEFINED_FILE = '58P01'
DUPLICATE_FILE = '58P02'
# Class 72 - Snapshot Failure
SNAPSHOT_TOO_OLD = '72000'
# Class F0 - Configuration File Error
CONFIG_FILE_ERROR = 'F0000'
LOCK_FILE_EXISTS = 'F0001'

View File

@ -1,38 +0,0 @@
"""Error classes for PostgreSQL error codes
"""
# psycopg/errors.py - SQLSTATE and DB-API exceptions
#
# Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# NOTE: the exceptions are injected into this module by the C extention.
#
def lookup(code):
"""Lookup an error code and return its exception class.
Raise `!KeyError` if the code is not found.
"""
from psycopg2._psycopg import sqlstate_errors # avoid circular import
return sqlstate_errors[code]

View File

@ -8,12 +8,11 @@ This module holds all the extensions to the DBAPI-2.0 provided by psycopg.
- `adapt()` -- exposes the PEP-246_ compatible adapting mechanism used
by psycopg to adapt Python types to PostgreSQL ones
.. _PEP-246: https://www.python.org/dev/peps/pep-0246/
.. _PEP-246: http://www.python.org/peps/pep-0246.html
"""
# psycopg/extensions.py - DBAPI-2.0 extensions specific to psycopg
#
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
# Copyright (C) 2020-2021 The Psycopg Team
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@ -36,24 +35,35 @@ This module holds all the extensions to the DBAPI-2.0 provided by psycopg.
import re as _re
from psycopg2._psycopg import ( # noqa
BINARYARRAY, BOOLEAN, BOOLEANARRAY, BYTES, BYTESARRAY, DATE, DATEARRAY,
DATETIMEARRAY, DECIMAL, DECIMALARRAY, FLOAT, FLOATARRAY, INTEGER,
INTEGERARRAY, INTERVAL, INTERVALARRAY, LONGINTEGER, LONGINTEGERARRAY,
ROWIDARRAY, STRINGARRAY, TIME, TIMEARRAY, UNICODE, UNICODEARRAY,
BINARYARRAY, BOOLEAN, BOOLEANARRAY, DATE, DATEARRAY, DATETIMEARRAY,
DECIMAL, DECIMALARRAY, FLOAT, FLOATARRAY, INTEGER, INTEGERARRAY,
INTERVAL, INTERVALARRAY, LONGINTEGER, LONGINTEGERARRAY, ROWIDARRAY,
STRINGARRAY, TIME, TIMEARRAY, UNICODE, UNICODEARRAY,
AsIs, Binary, Boolean, Float, Int, QuotedString, )
from psycopg2._psycopg import ( # noqa
PYDATE, PYDATETIME, PYDATETIMETZ, PYINTERVAL, PYTIME, PYDATEARRAY,
PYDATETIMEARRAY, PYDATETIMETZARRAY, PYINTERVALARRAY, PYTIMEARRAY,
DateFromPy, TimeFromPy, TimestampFromPy, IntervalFromPy, )
try:
from psycopg2._psycopg import ( # noqa
MXDATE, MXDATETIME, MXINTERVAL, MXTIME,
MXDATEARRAY, MXDATETIMEARRAY, MXINTERVALARRAY, MXTIMEARRAY,
DateFromMx, TimeFromMx, TimestampFromMx, IntervalFromMx, )
except ImportError:
pass
try:
from psycopg2._psycopg import ( # noqa
PYDATE, PYDATETIME, PYINTERVAL, PYTIME,
PYDATEARRAY, PYDATETIMEARRAY, PYINTERVALARRAY, PYTIMEARRAY,
DateFromPy, TimeFromPy, TimestampFromPy, IntervalFromPy, )
except ImportError:
pass
from psycopg2._psycopg import ( # noqa
adapt, adapters, encodings, connection, cursor,
lobject, Xid, libpq_version, parse_dsn, quote_ident,
string_types, binary_types, new_type, new_array_type, register_type,
ISQLQuote, Notify, Diagnostics, Column, ConnectionInfo,
ISQLQuote, Notify, Diagnostics, Column,
QueryCanceledError, TransactionRollbackError,
set_wait_callback, get_wait_callback, encrypt_password, )
set_wait_callback, get_wait_callback, )
"""Isolation level values."""
@ -98,7 +108,7 @@ def register_adapter(typ, callable):
# The SQL_IN class is the official adapter for tuples starting from 2.0.6.
class SQL_IN:
class SQL_IN(object):
"""Adapt any iterable to an SQL quotable object."""
def __init__(self, seq):
self._seq = seq
@ -122,7 +132,7 @@ class SQL_IN:
return str(self.getquoted())
class NoneAdapter:
class NoneAdapter(object):
"""Adapt None to NULL.
This adapter is not used normally as a fast path in mogrify uses NULL,
@ -152,16 +162,13 @@ def make_dsn(dsn=None, **kwargs):
"you can't specify both 'database' and 'dbname' arguments")
kwargs['dbname'] = kwargs.pop('database')
# Drop the None arguments
kwargs = {k: v for (k, v) in kwargs.items() if v is not None}
if dsn is not None:
tmp = parse_dsn(dsn)
tmp.update(kwargs)
kwargs = tmp
dsn = " ".join(["{}={}".format(k, _param_escape(str(v)))
for (k, v) in kwargs.items()])
dsn = " ".join(["%s=%s" % (k, _param_escape(str(v)))
for (k, v) in kwargs.iteritems()])
# verify that the returned dsn is valid
parse_dsn(dsn)
@ -206,7 +213,7 @@ del Range
# When the encoding is set its name is cleaned up from - and _ and turned
# uppercase, so an encoding not respecting these rules wouldn't be found in the
# encodings keys and would raise an exception with the unicode typecaster
for k, v in list(encodings.items()):
for k, v in encodings.items():
k = k.replace('_', '').replace('-', '').upper()
encodings[k] = v

View File

@ -5,8 +5,7 @@ and classes until a better place in the distribution is found.
"""
# psycopg/extras.py - miscellaneous extra goodies for psycopg
#
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
# Copyright (C) 2020-2021 The Psycopg Team
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@ -27,18 +26,20 @@ and classes until a better place in the distribution is found.
# License for more details.
import os as _os
import sys as _sys
import time as _time
import re as _re
from collections import namedtuple, OrderedDict
import logging as _logging
try:
import logging as _logging
except:
_logging = None
import psycopg2
from psycopg2 import extensions as _ext
from .extensions import cursor as _cursor
from .extensions import connection as _connection
from .extensions import adapt as _A, quote_ident
from functools import lru_cache
from psycopg2.extensions import cursor as _cursor
from psycopg2.extensions import connection as _connection
from psycopg2.extensions import adapt as _A, quote_ident
from psycopg2._psycopg import ( # noqa
REPLICATION_PHYSICAL, REPLICATION_LOGICAL,
@ -72,52 +73,52 @@ class DictCursorBase(_cursor):
else:
raise NotImplementedError(
"DictCursorBase can't be instantiated without a row factory.")
super().__init__(*args, **kwargs)
self._query_executed = False
self._prefetch = False
super(DictCursorBase, self).__init__(*args, **kwargs)
self._query_executed = 0
self._prefetch = 0
self.row_factory = row_factory
def fetchone(self):
if self._prefetch:
res = super().fetchone()
res = super(DictCursorBase, self).fetchone()
if self._query_executed:
self._build_index()
if not self._prefetch:
res = super().fetchone()
res = super(DictCursorBase, self).fetchone()
return res
def fetchmany(self, size=None):
if self._prefetch:
res = super().fetchmany(size)
res = super(DictCursorBase, self).fetchmany(size)
if self._query_executed:
self._build_index()
if not self._prefetch:
res = super().fetchmany(size)
res = super(DictCursorBase, self).fetchmany(size)
return res
def fetchall(self):
if self._prefetch:
res = super().fetchall()
res = super(DictCursorBase, self).fetchall()
if self._query_executed:
self._build_index()
if not self._prefetch:
res = super().fetchall()
res = super(DictCursorBase, self).fetchall()
return res
def __iter__(self):
try:
if self._prefetch:
res = super().__iter__()
first = next(res)
res = super(DictCursorBase, self).__iter__()
first = res.next()
if self._query_executed:
self._build_index()
if not self._prefetch:
res = super().__iter__()
first = next(res)
res = super(DictCursorBase, self).__iter__()
first = res.next()
yield first
while True:
yield next(res)
while 1:
yield res.next()
except StopIteration:
return
@ -125,36 +126,33 @@ class DictCursorBase(_cursor):
class DictConnection(_connection):
"""A connection that uses `DictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or DictCursor)
return super().cursor(*args, **kwargs)
kwargs.setdefault('cursor_factory', DictCursor)
return super(DictConnection, self).cursor(*args, **kwargs)
class DictCursor(DictCursorBase):
"""A cursor that keeps a list of column name -> index mappings__.
.. __: https://docs.python.org/glossary.html#term-mapping
"""
"""A cursor that keeps a list of column name -> index mappings."""
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = DictRow
super().__init__(*args, **kwargs)
self._prefetch = True
super(DictCursor, self).__init__(*args, **kwargs)
self._prefetch = 1
def execute(self, query, vars=None):
self.index = OrderedDict()
self._query_executed = True
return super().execute(query, vars)
self.index = {}
self._query_executed = 1
return super(DictCursor, self).execute(query, vars)
def callproc(self, procname, vars=None):
self.index = OrderedDict()
self._query_executed = True
return super().callproc(procname, vars)
self.index = {}
self._query_executed = 1
return super(DictCursor, self).callproc(procname, vars)
def _build_index(self):
if self._query_executed and self.description:
if self._query_executed == 1 and self.description:
for i in range(len(self.description)):
self.index[self.description[i][0]] = i
self._query_executed = False
self._query_executed = 0
class DictRow(list):
@ -169,40 +167,47 @@ class DictRow(list):
def __getitem__(self, x):
if not isinstance(x, (int, slice)):
x = self._index[x]
return super().__getitem__(x)
return list.__getitem__(self, x)
def __setitem__(self, x, v):
if not isinstance(x, (int, slice)):
x = self._index[x]
super().__setitem__(x, v)
list.__setitem__(self, x, v)
def items(self):
g = super().__getitem__
return ((n, g(self._index[n])) for n in self._index)
return list(self.iteritems())
def keys(self):
return iter(self._index)
return self._index.keys()
def values(self):
g = super().__getitem__
return (g(self._index[n]) for n in self._index)
return tuple(self[:])
def has_key(self, x):
return x in self._index
def get(self, x, default=None):
try:
return self[x]
except Exception:
except:
return default
def iteritems(self):
for n, v in self._index.iteritems():
yield n, list.__getitem__(self, v)
def iterkeys(self):
return self._index.iterkeys()
def itervalues(self):
return list.__iter__(self)
def copy(self):
return OrderedDict(self.items())
return dict(self.iteritems())
def __contains__(self, x):
return x in self._index
def __reduce__(self):
# this is apparently useless, but it fixes #1073
return super().__reduce__()
def __getstate__(self):
return self[:], self._index.copy()
@ -210,12 +215,19 @@ class DictRow(list):
self[:] = data[0]
self._index = data[1]
# drop the crusty Py2 methods
if _sys.version_info[0] > 2:
items = iteritems # noqa
keys = iterkeys # noqa
values = itervalues # noqa
del iteritems, iterkeys, itervalues, has_key
class RealDictConnection(_connection):
"""A connection that uses `RealDictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or RealDictCursor)
return super().cursor(*args, **kwargs)
kwargs.setdefault('cursor_factory', RealDictCursor)
return super(RealDictConnection, self).cursor(*args, **kwargs)
class RealDictCursor(DictCursorBase):
@ -228,64 +240,57 @@ class RealDictCursor(DictCursorBase):
"""
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = RealDictRow
super().__init__(*args, **kwargs)
super(RealDictCursor, self).__init__(*args, **kwargs)
self._prefetch = 0
def execute(self, query, vars=None):
self.column_mapping = []
self._query_executed = True
return super().execute(query, vars)
self._query_executed = 1
return super(RealDictCursor, self).execute(query, vars)
def callproc(self, procname, vars=None):
self.column_mapping = []
self._query_executed = True
return super().callproc(procname, vars)
self._query_executed = 1
return super(RealDictCursor, self).callproc(procname, vars)
def _build_index(self):
if self._query_executed and self.description:
self.column_mapping = [d[0] for d in self.description]
self._query_executed = False
if self._query_executed == 1 and self.description:
for i in range(len(self.description)):
self.column_mapping.append(self.description[i][0])
self._query_executed = 0
class RealDictRow(OrderedDict):
class RealDictRow(dict):
"""A `!dict` subclass representing a data record."""
def __init__(self, *args, **kwargs):
if args and isinstance(args[0], _cursor):
cursor = args[0]
args = args[1:]
else:
cursor = None
__slots__ = ('_column_mapping')
super().__init__(*args, **kwargs)
def __init__(self, cursor):
dict.__init__(self)
# Required for named cursors
if cursor.description and not cursor.column_mapping:
cursor._build_index()
if cursor is not None:
# Required for named cursors
if cursor.description and not cursor.column_mapping:
cursor._build_index()
self._column_mapping = cursor.column_mapping
# Store the cols mapping in the dict itself until the row is fully
# populated, so we don't need to add attributes to the class
# (hence keeping its maintenance, special pickle support, etc.)
self[RealDictRow] = cursor.column_mapping
def __setitem__(self, name, value):
if type(name) == int:
name = self._column_mapping[name]
return dict.__setitem__(self, name, value)
def __setitem__(self, key, value):
if RealDictRow in self:
# We are in the row building phase
mapping = self[RealDictRow]
super().__setitem__(mapping[key], value)
if key == len(mapping) - 1:
# Row building finished
del self[RealDictRow]
return
def __getstate__(self):
return (self.copy(), self._column_mapping[:])
super().__setitem__(key, value)
def __setstate__(self, data):
self.update(data[0])
self._column_mapping = data[1]
class NamedTupleConnection(_connection):
"""A connection that uses `NamedTupleCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or NamedTupleCursor)
return super().cursor(*args, **kwargs)
kwargs.setdefault('cursor_factory', NamedTupleCursor)
return super(NamedTupleConnection, self).cursor(*args, **kwargs)
class NamedTupleCursor(_cursor):
@ -305,22 +310,21 @@ class NamedTupleCursor(_cursor):
"abc'def"
"""
Record = None
MAX_CACHE = 1024
def execute(self, query, vars=None):
self.Record = None
return super().execute(query, vars)
return super(NamedTupleCursor, self).execute(query, vars)
def executemany(self, query, vars):
self.Record = None
return super().executemany(query, vars)
return super(NamedTupleCursor, self).executemany(query, vars)
def callproc(self, procname, vars=None):
self.Record = None
return super().callproc(procname, vars)
return super(NamedTupleCursor, self).callproc(procname, vars)
def fetchone(self):
t = super().fetchone()
t = super(NamedTupleCursor, self).fetchone()
if t is not None:
nt = self.Record
if nt is None:
@ -328,23 +332,23 @@ class NamedTupleCursor(_cursor):
return nt._make(t)
def fetchmany(self, size=None):
ts = super().fetchmany(size)
ts = super(NamedTupleCursor, self).fetchmany(size)
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
return list(map(nt._make, ts))
return map(nt._make, ts)
def fetchall(self):
ts = super().fetchall()
ts = super(NamedTupleCursor, self).fetchall()
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
return list(map(nt._make, ts))
return map(nt._make, ts)
def __iter__(self):
try:
it = super().__iter__()
t = next(it)
it = super(NamedTupleCursor, self).__iter__()
t = it.next()
nt = self.Record
if nt is None:
@ -352,55 +356,35 @@ class NamedTupleCursor(_cursor):
yield nt._make(t)
while True:
yield nt._make(next(it))
while 1:
yield nt._make(it.next())
except StopIteration:
return
def _make_nt(self):
key = tuple(d[0] for d in self.description) if self.description else ()
return self._cached_make_nt(key)
@classmethod
def _do_make_nt(cls, key):
fields = []
for s in key:
s = _re_clean.sub('_', s)
# Python identifier cannot start with numbers, namedtuple fields
# cannot start with underscore. So...
if s[0] == '_' or '0' <= s[0] <= '9':
s = 'f' + s
fields.append(s)
nt = namedtuple("Record", fields)
return nt
@lru_cache(512)
def _cached_make_nt(cls, key):
return cls._do_make_nt(key)
# Exposed for testability, and if someone wants to monkeypatch to tweak
# the cache size.
NamedTupleCursor._cached_make_nt = classmethod(_cached_make_nt)
try:
from collections import namedtuple
except ImportError, _exc:
def _make_nt(self):
raise self._exc
else:
def _make_nt(self, namedtuple=namedtuple):
return namedtuple("Record", [d[0] for d in self.description or ()])
class LoggingConnection(_connection):
"""A connection that logs all queries to a file or logger__ object.
.. __: https://docs.python.org/library/logging.html
.. __: http://docs.python.org/library/logging.html
"""
def initialize(self, logobj):
"""Initialize the connection to log to `!logobj`.
The `!logobj` parameter can be an open file object or a Logger/LoggerAdapter
The `!logobj` parameter can be an open file object or a Logger
instance from the standard logging module.
"""
self._logobj = logobj
if _logging and isinstance(
logobj, (_logging.Logger, _logging.LoggerAdapter)):
if _logging and isinstance(logobj, _logging.Logger):
self.log = self._logtologger
else:
self.log = self._logtofile
@ -417,7 +401,7 @@ class LoggingConnection(_connection):
def _logtofile(self, msg, curs):
msg = self.filter(msg, curs)
if msg:
if isinstance(msg, bytes):
if _sys.version_info[0] >= 3 and isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
self._logobj.write(msg + _os.linesep)
@ -433,8 +417,8 @@ class LoggingConnection(_connection):
def cursor(self, *args, **kwargs):
self._check()
kwargs.setdefault('cursor_factory', self.cursor_factory or LoggingCursor)
return super().cursor(*args, **kwargs)
kwargs.setdefault('cursor_factory', LoggingCursor)
return super(LoggingConnection, self).cursor(*args, **kwargs)
class LoggingCursor(_cursor):
@ -442,13 +426,13 @@ class LoggingCursor(_cursor):
def execute(self, query, vars=None):
try:
return super().execute(query, vars)
return super(LoggingCursor, self).execute(query, vars)
finally:
self.connection.log(self.query, self)
def callproc(self, procname, vars=None):
try:
return super().callproc(procname, vars)
return super(LoggingCursor, self).callproc(procname, vars)
finally:
self.connection.log(self.query, self)
@ -471,13 +455,10 @@ class MinTimeLoggingConnection(LoggingConnection):
def filter(self, msg, curs):
t = (_time.time() - curs.timestamp) * 1000
if t > self._mintime:
if isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
return f"{msg}{_os.linesep} (execution time: {t} ms)"
return msg + _os.linesep + " (execution time: %d ms)" % t
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory',
self.cursor_factory or MinTimeLoggingCursor)
kwargs.setdefault('cursor_factory', MinTimeLoggingCursor)
return LoggingConnection.cursor(self, *args, **kwargs)
@ -497,14 +478,14 @@ class LogicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_LOGICAL
super().__init__(*args, **kwargs)
super(LogicalReplicationConnection, self).__init__(*args, **kwargs)
class PhysicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_PHYSICAL
super().__init__(*args, **kwargs)
super(PhysicalReplicationConnection, self).__init__(*args, **kwargs)
class StopReplication(Exception):
@ -525,7 +506,7 @@ class ReplicationCursor(_replicationCursor):
def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None):
"""Create streaming replication slot."""
command = f"CREATE_REPLICATION_SLOT {quote_ident(slot_name, self)} "
command = "CREATE_REPLICATION_SLOT %s " % quote_ident(slot_name, self)
if slot_type is None:
slot_type = self.connection.replication_type
@ -536,7 +517,7 @@ class ReplicationCursor(_replicationCursor):
"output plugin name is required to create "
"logical replication slot")
command += f"LOGICAL {quote_ident(output_plugin, self)}"
command += "LOGICAL %s" % quote_ident(output_plugin, self)
elif slot_type == REPLICATION_PHYSICAL:
if output_plugin is not None:
@ -548,19 +529,18 @@ class ReplicationCursor(_replicationCursor):
else:
raise psycopg2.ProgrammingError(
f"unrecognized replication type: {repr(slot_type)}")
"unrecognized replication type: %s" % repr(slot_type))
self.execute(command)
def drop_replication_slot(self, slot_name):
"""Drop streaming replication slot."""
command = f"DROP_REPLICATION_SLOT {quote_ident(slot_name, self)}"
command = "DROP_REPLICATION_SLOT %s" % quote_ident(slot_name, self)
self.execute(command)
def start_replication(
self, slot_name=None, slot_type=None, start_lsn=0,
timeline=0, options=None, decode=False, status_interval=10):
def start_replication(self, slot_name=None, slot_type=None, start_lsn=0,
timeline=0, options=None, decode=False):
"""Start replication stream."""
command = "START_REPLICATION "
@ -570,7 +550,7 @@ class ReplicationCursor(_replicationCursor):
if slot_type == REPLICATION_LOGICAL:
if slot_name:
command += f"SLOT {quote_ident(slot_name, self)} "
command += "SLOT %s " % quote_ident(slot_name, self)
else:
raise psycopg2.ProgrammingError(
"slot name is required for logical replication")
@ -579,18 +559,19 @@ class ReplicationCursor(_replicationCursor):
elif slot_type == REPLICATION_PHYSICAL:
if slot_name:
command += f"SLOT {quote_ident(slot_name, self)} "
command += "SLOT %s " % quote_ident(slot_name, self)
# don't add "PHYSICAL", before 9.4 it was just START_REPLICATION XXX/XXX
else:
raise psycopg2.ProgrammingError(
f"unrecognized replication type: {repr(slot_type)}")
"unrecognized replication type: %s" % repr(slot_type))
if type(start_lsn) is str:
lsn = start_lsn.split('/')
lsn = f"{int(lsn[0], 16):X}/{int(lsn[1], 16):08X}"
lsn = "%X/%08X" % (int(lsn[0], 16), int(lsn[1], 16))
else:
lsn = f"{start_lsn >> 32 & 4294967295:X}/{start_lsn & 4294967295:08X}"
lsn = "%X/%08X" % ((start_lsn >> 32) & 0xFFFFFFFF,
start_lsn & 0xFFFFFFFF)
command += lsn
@ -599,7 +580,7 @@ class ReplicationCursor(_replicationCursor):
raise psycopg2.ProgrammingError(
"cannot specify timeline for logical replication")
command += f" TIMELINE {timeline}"
command += " TIMELINE %d" % timeline
if options:
if slot_type == REPLICATION_PHYSICAL:
@ -607,14 +588,13 @@ class ReplicationCursor(_replicationCursor):
"cannot specify output plugin options for physical replication")
command += " ("
for k, v in options.items():
for k, v in options.iteritems():
if not command.endswith('('):
command += ", "
command += f"{quote_ident(k, self)} {_A(str(v))}"
command += "%s %s" % (quote_ident(k, self), _A(str(v)))
command += ")"
self.start_replication_expert(
command, decode=decode, status_interval=status_interval)
self.start_replication_expert(command, decode=decode)
# allows replication cursors to be used in select.select() directly
def fileno(self):
@ -623,11 +603,11 @@ class ReplicationCursor(_replicationCursor):
# a dbtype and adapter for Python UUID type
class UUID_adapter:
class UUID_adapter(object):
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
.. __: https://docs.python.org/library/uuid.html
.. __: https://www.postgresql.org/docs/current/static/datatype-uuid.html
.. __: http://docs.python.org/library/uuid.html
.. __: http://www.postgresql.org/docs/current/static/datatype-uuid.html
"""
def __init__(self, uuid):
@ -638,10 +618,10 @@ class UUID_adapter:
return self
def getquoted(self):
return (f"'{self._uuid}'::uuid").encode('utf8')
return ("'%s'::uuid" % self._uuid).encode('utf8')
def __str__(self):
return f"'{self._uuid}'::uuid"
return "'%s'::uuid" % self._uuid
def register_uuid(oids=None, conn_or_curs=None):
@ -678,7 +658,7 @@ def register_uuid(oids=None, conn_or_curs=None):
# a type, dbtype and adapter for PostgreSQL inet type
class Inet:
class Inet(object):
"""Wrap a string to allow for correct SQL-quoting of inet values.
Note that this adapter does NOT check the passed value to make
@ -690,7 +670,7 @@ class Inet:
self.addr = addr
def __repr__(self):
return f"{self.__class__.__name__}({self.addr!r})"
return "%s(%r)" % (self.__class__.__name__, self.addr)
def prepare(self, conn):
self._conn = conn
@ -742,18 +722,30 @@ def register_inet(oid=None, conn_or_curs=None):
return _ext.INET
def register_tstz_w_secs(oids=None, conn_or_curs=None):
"""The function used to register an alternate type caster for
:sql:`TIMESTAMP WITH TIME ZONE` to deal with historical time zones with
seconds in the UTC offset.
These are now correctly handled by the default type caster, so currently
the function doesn't do anything.
"""
import warnings
warnings.warn("deprecated", DeprecationWarning)
def wait_select(conn):
"""Wait until a connection or cursor has data available.
The function is an example of a wait callback to be registered with
`~psycopg2.extensions.set_wait_callback()`. This function uses
:py:func:`~select.select()` to wait for data to become available, and
therefore is able to handle/receive SIGINT/KeyboardInterrupt.
:py:func:`~select.select()` to wait for data available.
"""
import select
from psycopg2.extensions import POLL_OK, POLL_READ, POLL_WRITE
while True:
while 1:
try:
state = conn.poll()
if state == POLL_OK:
@ -763,7 +755,7 @@ def wait_select(conn):
elif state == POLL_WRITE:
select.select([], [conn.fileno()], [])
else:
raise conn.OperationalError(f"bad state from poll: {state}")
raise conn.OperationalError("bad state from poll: %s" % state)
except KeyboardInterrupt:
conn.cancel()
# the loop will be broken by a server error
@ -785,7 +777,7 @@ def _solve_conn_curs(conn_or_curs):
return conn, curs
class HstoreAdapter:
class HstoreAdapter(object):
"""Adapt a Python dict to the hstore syntax."""
def __init__(self, wrapped):
self.wrapped = wrapped
@ -794,7 +786,7 @@ class HstoreAdapter:
self.conn = conn
# use an old-style getquoted implementation if required
if conn.info.server_version < 90000:
if conn.server_version < 90000:
self.getquoted = self._getquoted_8
def _getquoted_8(self):
@ -804,7 +796,7 @@ class HstoreAdapter:
adapt = _ext.adapt
rv = []
for k, v in self.wrapped.items():
for k, v in self.wrapped.iteritems():
k = adapt(k)
k.prepare(self.conn)
k = k.getquoted()
@ -826,9 +818,9 @@ class HstoreAdapter:
if not self.wrapped:
return b"''::hstore"
k = _ext.adapt(list(self.wrapped.keys()))
k = _ext.adapt(self.wrapped.keys())
k.prepare(self.conn)
v = _ext.adapt(list(self.wrapped.values()))
v = _ext.adapt(self.wrapped.values())
v.prepare(self.conn)
return b"hstore(" + k.getquoted() + b", " + v.getquoted() + b")"
@ -865,7 +857,7 @@ class HstoreAdapter:
for m in self._re_hstore.finditer(s):
if m is None or m.start() != start:
raise psycopg2.InterfaceError(
f"error parsing hstore pair at char {start}")
"error parsing hstore pair at char %d" % start)
k = _bsdec.sub(r'\1', m.group(1))
v = m.group(2)
if v is not None:
@ -876,7 +868,7 @@ class HstoreAdapter:
if start < len(s):
raise psycopg2.InterfaceError(
f"error parsing hstore: unparsed data after char {start}")
"error parsing hstore: unparsed data after char %d" % start)
return rv
@ -899,16 +891,17 @@ class HstoreAdapter:
conn_status = conn.status
# column typarray not available before PG 8.3
typarray = conn.info.server_version >= 80300 and "typarray" or "NULL"
typarray = conn.server_version >= 80300 and "typarray" or "NULL"
rv0, rv1 = [], []
# get the oid for the hstore
curs.execute(f"""SELECT t.oid, {typarray}
curs.execute("""\
SELECT t.oid, %s
FROM pg_type t JOIN pg_namespace ns
ON typnamespace = ns.oid
WHERE typname = 'hstore';
""")
""" % typarray)
for oids in curs:
rv0.append(oids[0])
rv1.append(oids[1])
@ -972,7 +965,12 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
array_oid = tuple([x for x in array_oid if x])
# create and register the typecaster
HSTORE = _ext.new_type(oid, "HSTORE", HstoreAdapter.parse)
if _sys.version_info[0] < 3 and unicode:
cast = HstoreAdapter.parse_unicode
else:
cast = HstoreAdapter.parse
HSTORE = _ext.new_type(oid, "HSTORE", cast)
_ext.register_type(HSTORE, not globally and conn_or_curs or None)
_ext.register_adapter(dict, HstoreAdapter)
@ -981,7 +979,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
class CompositeCaster:
class CompositeCaster(object):
"""Helps conversion of a PostgreSQL composite type into a Python object.
The class is usually created by the `register_composite()` function.
@ -1002,7 +1000,7 @@ class CompositeCaster:
self.typecaster = _ext.new_type((oid,), name, self.parse)
if array_oid:
self.array_typecaster = _ext.new_array_type(
(array_oid,), f"{name}ARRAY", self.typecaster)
(array_oid,), "%sARRAY" % name, self.typecaster)
else:
self.array_typecaster = None
@ -1046,7 +1044,7 @@ class CompositeCaster:
rv = []
for m in self._re_tokenize.finditer(s):
if m is None:
raise psycopg2.InterfaceError(f"can't parse type: {s!r}")
raise psycopg2.InterfaceError("can't parse type: %r" % s)
if m.group(1) is not None:
rv.append(None)
elif m.group(2) is not None:
@ -1057,9 +1055,14 @@ class CompositeCaster:
return rv
def _create_type(self, name, attnames):
name = _re_clean.sub('_', name)
self.type = namedtuple(name, attnames)
self._ctor = self.type._make
try:
from collections import namedtuple
except ImportError:
self.type = tuple
self._ctor = self.type
else:
self.type = namedtuple(name, attnames)
self._ctor = self.type._make
@classmethod
def _from_db(self, name, conn_or_curs):
@ -1080,7 +1083,7 @@ class CompositeCaster:
schema = 'public'
# column typarray not available before PG 8.3
typarray = conn.info.server_version >= 80300 and "typarray" or "NULL"
typarray = conn.server_version >= 80300 and "typarray" or "NULL"
# get the type oid and attributes
curs.execute("""\
@ -1095,46 +1098,14 @@ ORDER BY attnum;
recs = curs.fetchall()
if not recs:
# The above algorithm doesn't work for customized seach_path
# (#1487) The implementation below works better, but, to guarantee
# backwards compatibility, use it only if the original one failed.
try:
savepoint = False
# Because we executed statements earlier, we are either INTRANS
# or we are IDLE only if the transaction is autocommit, in
# which case we don't need the savepoint anyway.
if conn.status == _ext.STATUS_IN_TRANSACTION:
curs.execute("SAVEPOINT register_type")
savepoint = True
curs.execute("""\
SELECT t.oid, %s, attname, atttypid, typname, nspname
FROM pg_type t
JOIN pg_namespace ns ON typnamespace = ns.oid
JOIN pg_attribute a ON attrelid = typrelid
WHERE t.oid = %%s::regtype
AND attnum > 0 AND NOT attisdropped
ORDER BY attnum;
""" % typarray, (name, ))
except psycopg2.ProgrammingError:
pass
else:
recs = curs.fetchall()
if recs:
tname = recs[0][4]
schema = recs[0][5]
finally:
if savepoint:
curs.execute("ROLLBACK TO SAVEPOINT register_type")
# revert the status of the connection as before the command
if conn_status != _ext.STATUS_IN_TRANSACTION and not conn.autocommit:
if (conn_status != _ext.STATUS_IN_TRANSACTION
and not conn.autocommit):
conn.rollback()
if not recs:
raise psycopg2.ProgrammingError(
f"PostgreSQL type '{name}' not found")
"PostgreSQL type '%s' not found" % name)
type_oid = recs[0][0]
array_oid = recs[0][1]
@ -1179,10 +1150,10 @@ def _paginate(seq, page_size):
"""
page = []
it = iter(seq)
while True:
while 1:
try:
for i in range(page_size):
page.append(next(it))
for i in xrange(page_size):
page.append(it.next())
yield page
page = []
except StopIteration:
@ -1207,16 +1178,13 @@ def execute_batch(cur, sql, argslist, page_size=100):
fewer multi-statement commands, each one containing at most *page_size*
statements, resulting in a reduced number of server roundtrips.
After the execution of the function the `cursor.rowcount` property will
**not** contain a total result.
"""
for page in _paginate(argslist, page_size=page_size):
sqls = [cur.mogrify(sql, args) for args in page]
cur.execute(b";".join(sqls))
def execute_values(cur, sql, argslist, template=None, page_size=100, fetch=False):
def execute_values(cur, sql, argslist, template=None, page_size=100):
'''Execute a statement using :sql:`VALUES` with a sequence of parameters.
:param cur: the cursor to use to execute the query.
@ -1230,15 +1198,10 @@ def execute_values(cur, sql, argslist, template=None, page_size=100, fetch=False
*template*.
:param template: the snippet to merge to every item in *argslist* to
compose the query.
- If the *argslist* items are sequences it should contain positional
placeholders (e.g. ``"(%s, %s, %s)"``, or ``"(%s, %s, 42)``" if there
are constants value...).
- If the *argslist* items are mappings it should contain named
placeholders (e.g. ``"(%(id)s, %(f1)s, 42)"``).
compose the query. If *argslist* items are sequences it should contain
positional placeholders (e.g. ``"(%s, %s, %s)"``, or ``"(%s, %s, 42)``"
if there are constants value...); If *argslist* is items are mapping
it should contain named placeholders (e.g. ``"(%(id)s, %(f1)s, 42)"``).
If not specified, assume the arguments are sequence and use a simple
positional template (i.e. ``(%s, %s, ...)``), with the number of
placeholders sniffed by the first element in *argslist*.
@ -1247,15 +1210,8 @@ def execute_values(cur, sql, argslist, template=None, page_size=100, fetch=False
statement. If there are more items the function will execute more than
one statement.
:param fetch: if `!True` return the query results into a list (like in a
`~cursor.fetchall()`). Useful for queries with :sql:`RETURNING`
clause.
.. __: https://www.postgresql.org/docs/current/static/queries-values.html
After the execution of the function the `cursor.rowcount` property will
**not** contain a total result.
While :sql:`INSERT` is an obvious candidate for this function it is
possible to use it with other statements, for example::
@ -1276,10 +1232,6 @@ def execute_values(cur, sql, argslist, template=None, page_size=100, fetch=False
[(1, 20, 3), (4, 50, 6), (7, 8, 9)])
'''
from psycopg2.sql import Composable
if isinstance(sql, Composable):
sql = sql.as_string(cur)
# we can't just use sql % vals because vals is bytes: if sql is bytes
# there will be some decoding error because of stupid codec used, and Py3
# doesn't implement % on bytes.
@ -1287,7 +1239,6 @@ def execute_values(cur, sql, argslist, template=None, page_size=100, fetch=False
sql = sql.encode(_ext.encodings[cur.connection.encoding])
pre, post = _split_sql(sql)
result = [] if fetch else None
for page in _paginate(argslist, page_size=page_size):
if template is None:
template = b'(' + b','.join([b'%s'] * len(page[0])) + b')'
@ -1297,10 +1248,6 @@ def execute_values(cur, sql, argslist, template=None, page_size=100, fetch=False
parts.append(b',')
parts[-1:] = post
cur.execute(b''.join(parts))
if fetch:
result.extend(cur.fetchall())
return result
def _split_sql(sql):
@ -1333,8 +1280,3 @@ def _split_sql(sql):
raise ValueError("the query doesn't contain any '%s' placeholder")
return pre, post
# ascii except alnum and underscore
_re_clean = _re.compile(
'[' + _re.escape(' !"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~') + ']')

View File

@ -4,8 +4,7 @@ This module implements thread-safe (and not) connection pools.
"""
# psycopg/pool.py - pooling code for psycopg
#
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
# Copyright (C) 2020-2021 The Psycopg Team
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@ -26,14 +25,14 @@ This module implements thread-safe (and not) connection pools.
# License for more details.
import psycopg2
from psycopg2 import extensions as _ext
import psycopg2.extensions as _ext
class PoolError(psycopg2.Error):
pass
class AbstractConnectionPool:
class AbstractConnectionPool(object):
"""Generic key-based pooling code."""
def __init__(self, minconn, maxconn, *args, **kwargs):
@ -96,17 +95,17 @@ class AbstractConnectionPool:
"""Put away a connection."""
if self.closed:
raise PoolError("connection pool is closed")
if key is None:
key = self._rused.get(id(conn))
if key is None:
raise PoolError("trying to put unkeyed connection")
if not key:
raise PoolError("trying to put unkeyed connection")
if len(self._pool) < self.minconn and not close:
# Return the connection into a consistent state before putting
# it back into the pool
if not conn.closed:
status = conn.info.transaction_status
status = conn.get_transaction_status()
if status == _ext.TRANSACTION_STATUS_UNKNOWN:
# server connection lost
conn.close()
@ -139,7 +138,7 @@ class AbstractConnectionPool:
for conn in self._pool + list(self._used.values()):
try:
conn.close()
except Exception:
except:
pass
self.closed = True
@ -185,3 +184,58 @@ class ThreadedConnectionPool(AbstractConnectionPool):
self._closeall()
finally:
self._lock.release()
class PersistentConnectionPool(AbstractConnectionPool):
"""A pool that assigns persistent connections to different threads.
Note that this connection pool generates by itself the required keys
using the current thread id. This means that until a thread puts away
a connection it will always get the same connection object by successive
`!getconn()` calls. This also means that a thread can't use more than one
single connection from the pool.
"""
def __init__(self, minconn, maxconn, *args, **kwargs):
"""Initialize the threading lock."""
import warnings
warnings.warn("deprecated: use ZPsycopgDA.pool implementation",
DeprecationWarning)
import threading
AbstractConnectionPool.__init__(
self, minconn, maxconn, *args, **kwargs)
self._lock = threading.Lock()
# we we'll need the thread module, to determine thread ids, so we
# import it here and copy it in an instance variable
import thread as _thread # work around for 2to3 bug - see ticket #348
self.__thread = _thread
def getconn(self):
"""Generate thread id and return a connection."""
key = self.__thread.get_ident()
self._lock.acquire()
try:
return self._getconn(key)
finally:
self._lock.release()
def putconn(self, conn=None, close=False):
"""Put away an unused connection."""
key = self.__thread.get_ident()
self._lock.acquire()
try:
if not conn:
conn = self._used[key]
self._putconn(conn, key, close)
finally:
self._lock.release()
def closeall(self):
"""Close all connections (even the one currently in use.)"""
self._lock.acquire()
try:
self._closeall()
finally:
self._lock.release()

96
lib/psycopg1.py Normal file
View File

@ -0,0 +1,96 @@
"""psycopg 1.1.x compatibility module
This module uses the new style connection and cursor types to build a psycopg
1.1.1.x compatibility layer. It should be considered a temporary hack to run
old code while porting to psycopg 2. Import it as follows::
from psycopg2 import psycopg1 as psycopg
"""
# psycopg/psycopg1.py - psycopg 1.1.x compatibility module
#
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import psycopg2._psycopg as _2psycopg # noqa
from psycopg2.extensions import cursor as _2cursor
from psycopg2.extensions import connection as _2connection
from psycopg2 import * # noqa
import psycopg2.extensions as _ext
_2connect = connect
def connect(*args, **kwargs):
"""connect(dsn, ...) -> new psycopg 1.1.x compatible connection object"""
kwargs['connection_factory'] = connection
conn = _2connect(*args, **kwargs)
conn.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED)
return conn
class connection(_2connection):
"""psycopg 1.1.x connection."""
def cursor(self):
"""cursor() -> new psycopg 1.1.x compatible cursor object"""
return _2connection.cursor(self, cursor_factory=cursor)
def autocommit(self, on_off=1):
"""autocommit(on_off=1) -> switch autocommit on (1) or off (0)"""
if on_off > 0:
self.set_isolation_level(_ext.ISOLATION_LEVEL_AUTOCOMMIT)
else:
self.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED)
class cursor(_2cursor):
"""psycopg 1.1.x cursor.
Note that this cursor implements the exact procedure used by psycopg 1 to
build dictionaries out of result rows. The DictCursor in the
psycopg.extras modules implements a much better and faster algorithm.
"""
def __build_dict(self, row):
res = {}
for i in range(len(self.description)):
res[self.description[i][0]] = row[i]
return res
def dictfetchone(self):
row = _2cursor.fetchone(self)
if row:
return self.__build_dict(row)
else:
return row
def dictfetchmany(self, size):
res = []
rows = _2cursor.fetchmany(self, size)
for row in rows:
res.append(self.__build_dict(row))
return res
def dictfetchall(self):
res = []
rows = _2cursor.fetchall(self)
for row in rows:
res.append(self.__build_dict(row))
return res

View File

@ -1,10 +1,9 @@
"""SQL composition utility module
"""
# psycopg/sql.py - SQL composition utility module
# psycopg/sql.py - Implementation of the JSON adaptation objects
#
# Copyright (C) 2016-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
# Copyright (C) 2020-2021 The Psycopg Team
# Copyright (C) 2016 Daniele Varrazzo <daniele.varrazzo@gmail.com>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@ -24,6 +23,7 @@
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import sys
import string
from psycopg2 import extensions as ext
@ -32,13 +32,12 @@ from psycopg2 import extensions as ext
_formatter = string.Formatter()
class Composable:
class Composable(object):
"""
Abstract base class for objects that can be used to compose an SQL string.
`!Composable` objects can be passed directly to `~cursor.execute()`,
`~cursor.executemany()`, `~cursor.copy_expert()` in place of the query
string.
`!Composable` objects can be passed directly to `~cursor.execute()` and
`~cursor.executemany()` in place of the query string.
`!Composable` objects can be joined using the ``+`` operator: the result
will be a `Composed` instance containing the objects joined. The operator
@ -50,7 +49,7 @@ class Composable:
self._wrapped = wrapped
def __repr__(self):
return f"{self.__class__.__name__}({self._wrapped!r})"
return "%s(%r)" % (self.__class__.__name__, self._wrapped)
def as_string(self, context):
"""
@ -59,9 +58,9 @@ class Composable:
:param context: the context to evaluate the string into.
:type context: `connection` or `cursor`
The method is automatically invoked by `~cursor.execute()`,
`~cursor.executemany()`, `~cursor.copy_expert()` if a `!Composable` is
passed instead of the query string.
The method is automatically invoked by `~cursor.execute()` and
`~cursor.executemany()` if a `!Composable` is passed instead of the
query string.
"""
raise NotImplementedError
@ -85,11 +84,11 @@ class Composable:
class Composed(Composable):
"""
A `Composable` object made of a sequence of `!Composable`.
A `Composable` object made of a sequence of `Composable`.
The object is usually created using `!Composable` operators and methods.
The object is usually created using `Composable` operators and methods.
However it is possible to create a `!Composed` directly specifying a
sequence of `!Composable` as arguments.
sequence of `Composable` as arguments.
Example::
@ -106,10 +105,10 @@ class Composed(Composable):
for i in seq:
if not isinstance(i, Composable):
raise TypeError(
f"Composed elements must be Composable, got {i!r} instead")
"Composed elements must be Composable, got %r instead" % i)
wrapped.append(i)
super().__init__(wrapped)
super(Composed, self).__init__(wrapped)
@property
def seq(self):
@ -147,7 +146,7 @@ class Composed(Composable):
"foo", "bar"
"""
if isinstance(joiner, str):
if isinstance(joiner, basestring):
joiner = SQL(joiner)
elif not isinstance(joiner, SQL):
raise TypeError(
@ -179,9 +178,9 @@ class SQL(Composable):
select "foo", "bar" from "table"
"""
def __init__(self, string):
if not isinstance(string, str):
if not isinstance(string, basestring):
raise TypeError("SQL values must be strings")
super().__init__(string)
super(SQL, self).__init__(string)
@property
def string(self):
@ -203,12 +202,12 @@ class SQL(Composable):
:rtype: `Composed`
The method is similar to the Python `str.format()` method: the string
template supports auto-numbered (``{}``), numbered (``{0}``,
``{1}``...), and named placeholders (``{name}``), with positional
arguments replacing the numbered placeholders and keywords replacing
the named ones. However placeholder modifiers (``{0!r}``, ``{0:<10}``)
are not supported. Only `!Composable` objects can be passed to the
template.
template supports auto-numbered (``{}``, only available from Python
2.7), numbered (``{0}``, ``{1}``...), and named placeholders
(``{name}``), with positional arguments replacing the numbered
placeholders and keywords replacing the named ones. However placeholder
modifiers (``{0!r}``, ``{0:<10}``) are not supported. Only
`!Composable` objects can be passed to the template.
Example::
@ -276,7 +275,7 @@ class SQL(Composable):
rv = []
it = iter(seq)
try:
rv.append(next(it))
rv.append(it.next())
except StopIteration:
pass
else:
@ -289,11 +288,11 @@ class SQL(Composable):
class Identifier(Composable):
"""
A `Composable` representing an SQL identifier or a dot-separated sequence.
A `Composable` representing an SQL identifer.
Identifiers usually represent names of database objects, such as tables or
fields. PostgreSQL identifiers follow `different rules`__ than SQL string
literals for escaping (e.g. they use double quotes instead of single).
Identifiers usually represent names of database objects, such as tables
or fields. They follow `different rules`__ than SQL string literals for
escaping (e.g. they use double quotes).
.. __: https://www.postgresql.org/docs/current/static/sql-syntax-lexical.html# \
SQL-SYNTAX-IDENTIFIERS
@ -306,48 +305,20 @@ class Identifier(Composable):
>>> print(sql.SQL(', ').join([t1, t2, t3]).as_string(conn))
"foo", "ba'r", "ba""z"
Multiple strings can be passed to the object to represent a qualified name,
i.e. a dot-separated sequence of identifiers.
Example::
>>> query = sql.SQL("select {} from {}").format(
... sql.Identifier("table", "field"),
... sql.Identifier("schema", "table"))
>>> print(query.as_string(conn))
select "table"."field" from "schema"."table"
"""
def __init__(self, *strings):
if not strings:
raise TypeError("Identifier cannot be empty")
def __init__(self, string):
if not isinstance(string, basestring):
raise TypeError("SQL identifiers must be strings")
for s in strings:
if not isinstance(s, str):
raise TypeError("SQL identifier parts must be strings")
super().__init__(strings)
@property
def strings(self):
"""A tuple with the strings wrapped by the `Identifier`."""
return self._wrapped
super(Identifier, self).__init__(string)
@property
def string(self):
"""The string wrapped by the `Identifier`.
"""
if len(self._wrapped) == 1:
return self._wrapped[0]
else:
raise AttributeError(
"the Identifier wraps more than one than one string")
def __repr__(self):
return f"{self.__class__.__name__}({', '.join(map(repr, self._wrapped))})"
"""The string wrapped by the `Identifier`."""
return self._wrapped
def as_string(self, context):
return '.'.join(ext.quote_ident(s, context) for s in self._wrapped)
return ext.quote_ident(self._wrapped, context)
class Literal(Composable):
@ -389,7 +360,7 @@ class Literal(Composable):
a.prepare(conn)
rv = a.getquoted()
if isinstance(rv, bytes):
if sys.version_info[0] >= 3 and isinstance(rv, bytes):
rv = rv.decode(ext.encodings[conn.encoding])
return rv
@ -423,14 +394,14 @@ class Placeholder(Composable):
"""
def __init__(self, name=None):
if isinstance(name, str):
if isinstance(name, basestring):
if ')' in name:
raise ValueError(f"invalid name: {name!r}")
raise ValueError("invalid name: %r" % name)
elif name is not None:
raise TypeError(f"expected string or None as name, got {name!r}")
raise TypeError("expected string or None as name, got %r" % name)
super().__init__(name)
super(Placeholder, self).__init__(name)
@property
def name(self):
@ -438,14 +409,12 @@ class Placeholder(Composable):
return self._wrapped
def __repr__(self):
if self._wrapped is None:
return f"{self.__class__.__name__}()"
else:
return f"{self.__class__.__name__}({self._wrapped!r})"
return "Placeholder(%r)" % (
self._wrapped if self._wrapped is not None else '',)
def as_string(self, context):
if self._wrapped is not None:
return f"%({self._wrapped})s"
return "%%(%s)s" % self._wrapped
else:
return "%s"

View File

@ -6,8 +6,7 @@ functions or used to set the .tzinfo_factory attribute in cursors.
"""
# psycopg/tz.py - tzinfo implementation
#
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
# Copyright (C) 2020-2021 The Psycopg Team
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@ -45,12 +44,7 @@ class FixedOffsetTimezone(datetime.tzinfo):
offset and name that instance will be returned. This saves memory and
improves comparability.
.. versionchanged:: 2.9
The constructor can take either a timedelta or a number of minutes of
offset. Previously only minutes were supported.
.. __: https://docs.python.org/library/datetime.html
.. __: http://docs.python.org/library/datetime.html#datetime-tzinfo
"""
_name = None
_offset = ZERO
@ -59,9 +53,7 @@ class FixedOffsetTimezone(datetime.tzinfo):
def __init__(self, offset=None, name=None):
if offset is not None:
if not isinstance(offset, datetime.timedelta):
offset = datetime.timedelta(minutes=offset)
self._offset = offset
self._offset = datetime.timedelta(minutes=offset)
if name is not None:
self._name = name
@ -72,28 +64,18 @@ class FixedOffsetTimezone(datetime.tzinfo):
try:
return cls._cache[key]
except KeyError:
tz = super().__new__(cls, offset, name)
tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name)
cls._cache[key] = tz
return tz
def __repr__(self):
offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" \
% (self._offset, self._name)
def __eq__(self, other):
if isinstance(other, FixedOffsetTimezone):
return self._offset == other._offset
else:
return NotImplemented
def __ne__(self, other):
if isinstance(other, FixedOffsetTimezone):
return self._offset != other._offset
else:
return NotImplemented
% (offset_mins, self._name)
def __getinitargs__(self):
return self._offset, self._name
offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
return (offset_mins, self._name)
def utcoffset(self, dt):
return self._offset
@ -101,16 +83,14 @@ class FixedOffsetTimezone(datetime.tzinfo):
def tzname(self, dt):
if self._name is not None:
return self._name
minutes, seconds = divmod(self._offset.total_seconds(), 60)
hours, minutes = divmod(minutes, 60)
rv = "%+03d" % hours
if minutes or seconds:
rv += ":%02d" % minutes
if seconds:
rv += ":%02d" % seconds
return rv
else:
seconds = self._offset.seconds + self._offset.days * 86400
hours, seconds = divmod(seconds, 3600)
minutes = seconds / 60
if minutes:
return "%+03d:%d" % (hours, minutes)
else:
return "%+03d" % hours
def dst(self, dt):
return ZERO
@ -152,7 +132,6 @@ class LocalTimezone(datetime.tzinfo):
tt = time.localtime(stamp)
return tt.tm_isdst > 0
LOCAL = LocalTimezone()
# TODO: pre-generate some interesting time zones?

View File

@ -1,7 +1,6 @@
/* adapter_asis.c - adapt types as they are
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*
@ -45,12 +44,14 @@ asis_getquoted(asisObject *self, PyObject *args)
}
else {
rv = PyObject_Str(self->wrapped);
/* unicode to bytes */
#if PY_MAJOR_VERSION > 2
/* unicode to bytes in Py3 */
if (rv) {
PyObject *tmp = PyUnicode_AsUTF8String(rv);
Py_DECREF(rv);
rv = tmp;
}
#endif
}
return rv;
@ -59,7 +60,7 @@ asis_getquoted(asisObject *self, PyObject *args)
static PyObject *
asis_str(asisObject *self)
{
return psyco_ensure_text(asis_getquoted(self, NULL));
return psycopg_ensure_text(asis_getquoted(self, NULL));
}
static PyObject *

View File

@ -1,7 +1,6 @@
/* adapter_asis.h - definition for the psycopg AsIs type wrapper
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*

View File

@ -1,7 +1,6 @@
/* adapter_binary.c - Binary objects
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*
@ -46,6 +45,9 @@ binary_escape(unsigned char *from, size_t from_length,
return PQescapeBytea(from, from_length, to_length);
}
#define HAS_BUFFER (PY_MAJOR_VERSION < 3)
#define HAS_MEMORYVIEW (PY_MAJOR_VERSION > 2 || PY_MINOR_VERSION >= 6)
/* binary_quote - do the quote process on plain and unicode strings */
static PyObject *
@ -56,8 +58,10 @@ binary_quote(binaryObject *self)
Py_ssize_t buffer_len;
size_t len = 0;
PyObject *rv = NULL;
#if HAS_MEMORYVIEW
Py_buffer view;
int got_view = 0;
#endif
/* Allow Binary(None) to work */
if (self->wrapped == Py_None) {
@ -67,6 +71,8 @@ binary_quote(binaryObject *self)
}
/* if we got a plain string or a buffer we escape it and save the buffer */
#if HAS_MEMORYVIEW
if (PyObject_CheckBuffer(self->wrapped)) {
if (0 > PyObject_GetBuffer(self->wrapped, &view, PyBUF_CONTIG_RO)) {
goto exit;
@ -75,6 +81,16 @@ binary_quote(binaryObject *self)
buffer = (const char *)(view.buf);
buffer_len = view.len;
}
#endif
#if HAS_BUFFER
if (!buffer && (Bytes_Check(self->wrapped) || PyBuffer_Check(self->wrapped))) {
if (PyObject_AsReadBuffer(self->wrapped, (const void **)&buffer,
&buffer_len) < 0) {
goto exit;
}
}
#endif
if (!buffer) {
goto exit;
@ -98,7 +114,9 @@ binary_quote(binaryObject *self)
exit:
if (to) { PQfreemem(to); }
#if HAS_MEMORYVIEW
if (got_view) { PyBuffer_Release(&view); }
#endif
/* if the wrapped object is not bytes or a buffer, this is an error */
if (!rv && !PyErr_Occurred()) {
@ -124,7 +142,7 @@ binary_getquoted(binaryObject *self, PyObject *args)
static PyObject *
binary_str(binaryObject *self)
{
return psyco_ensure_text(binary_getquoted(self, NULL));
return psycopg_ensure_text(binary_getquoted(self, NULL));
}
static PyObject *

View File

@ -1,7 +1,6 @@
/* adapter_binary.h - definition for the Binary type
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*

View File

@ -1,7 +1,6 @@
/* adapter_datetime.c - python date/time objects
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*
@ -36,9 +35,11 @@
#include <string.h>
RAISES_NEG int
adapter_datetime_init(void)
int
psyco_adapter_datetime_init(void)
{
Dprintf("psyco_adapter_datetime_init: datetime init");
PyDateTime_IMPORT;
if (!PyDateTimeAPI) {
@ -77,7 +78,7 @@ _pydatetime_string_date_time(pydatetimeObject *self)
break;
}
if (!(iso = psyco_ensure_bytes(
if (!(iso = psycopg_ensure_bytes(
PyObject_CallMethod(self->wrapped, "isoformat", NULL)))) {
goto error;
}
@ -99,7 +100,7 @@ _pydatetime_string_delta(pydatetimeObject *self)
char buffer[8];
int i;
int a = PyDateTime_DELTA_GET_MICROSECONDS(obj);
int a = obj->microseconds;
for (i=0; i < 6 ; i++) {
buffer[5-i] = '0' + (a % 10);
@ -108,9 +109,7 @@ _pydatetime_string_delta(pydatetimeObject *self)
buffer[6] = '\0';
return Bytes_FromFormat("'%d days %d.%s seconds'::interval",
PyDateTime_DELTA_GET_DAYS(obj),
PyDateTime_DELTA_GET_SECONDS(obj),
buffer);
obj->days, obj->seconds, buffer);
}
static PyObject *
@ -127,7 +126,7 @@ pydatetime_getquoted(pydatetimeObject *self, PyObject *args)
static PyObject *
pydatetime_str(pydatetimeObject *self)
{
return psyco_ensure_text(pydatetime_getquoted(self, NULL));
return psycopg_ensure_text(pydatetime_getquoted(self, NULL));
}
static PyObject *
@ -264,6 +263,8 @@ PyTypeObject pydatetimeType = {
/** module-level functions **/
#ifdef PSYCOPG_DEFAULT_PYDATETIME
PyObject *
psyco_Date(PyObject *self, PyObject *args)
{
@ -423,8 +424,8 @@ psyco_TimeFromTicks(PyObject *self, PyObject *args)
PyObject *
psyco_TimestampFromTicks(PyObject *self, PyObject *args)
{
pydatetimeObject *wrapper = NULL;
PyObject *dt_aware = NULL;
PyObject *m = NULL;
PyObject *tz = NULL;
PyObject *res = NULL;
struct tm tm;
time_t t;
@ -433,6 +434,10 @@ psyco_TimestampFromTicks(PyObject *self, PyObject *args)
if (!PyArg_ParseTuple(args, "d", &ticks))
return NULL;
/* get psycopg2.tz.LOCAL from pythonland */
if (!(m = PyImport_ImportModule("psycopg2.tz"))) { goto exit; }
if (!(tz = PyObject_GetAttrString(m, "LOCAL"))) { goto exit; }
t = (time_t)floor(ticks);
ticks -= (double)t;
if (!localtime_r(&t, &tm)) {
@ -440,32 +445,19 @@ psyco_TimestampFromTicks(PyObject *self, PyObject *args)
goto exit;
}
/* Convert the tm to a wrapper containing a naive datetime.datetime */
if (!(wrapper = (pydatetimeObject *)_psyco_Timestamp(
tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday,
tm.tm_hour, tm.tm_min, (double)tm.tm_sec + ticks, NULL))) {
goto exit;
}
/* Localize the datetime and assign it back to the wrapper */
if (!(dt_aware = PyObject_CallMethod(
wrapper->wrapped, "astimezone", NULL))) {
goto exit;
}
Py_CLEAR(wrapper->wrapped);
wrapper->wrapped = dt_aware;
dt_aware = NULL;
/* the wrapper is ready to be returned */
res = (PyObject *)wrapper;
wrapper = NULL;
res = _psyco_Timestamp(
tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday,
tm.tm_hour, tm.tm_min, (double)tm.tm_sec + ticks,
tz);
exit:
Py_XDECREF(dt_aware);
Py_XDECREF(wrapper);
Py_XDECREF(tz);
Py_XDECREF(m);
return res;
}
#endif
PyObject *
psyco_DateFromPy(PyObject *self, PyObject *args)
{

View File

@ -1,7 +1,6 @@
/* adapter_datetime.h - definition for the python date/time types
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*
@ -46,7 +45,10 @@ typedef struct {
} pydatetimeObject;
RAISES_NEG HIDDEN int adapter_datetime_init(void);
HIDDEN int psyco_adapter_datetime_init(void);
/* functions exported to psycopgmodule.c */
#ifdef PSYCOPG_DEFAULT_PYDATETIME
HIDDEN PyObject *psyco_Date(PyObject *module, PyObject *args);
#define psyco_Date_doc \
@ -84,6 +86,8 @@ HIDDEN PyObject *psyco_TimestampFromTicks(PyObject *module, PyObject *args);
"Ticks are the number of seconds since the epoch; see the documentation " \
"of the standard Python time module for details)."
#endif /* PSYCOPG_DEFAULT_PYDATETIME */
HIDDEN PyObject *psyco_DateFromPy(PyObject *module, PyObject *args);
#define psyco_DateFromPy_doc \
"DateFromPy(datetime.date) -> new wrapper"

View File

@ -1,7 +1,6 @@
/* adapter_list.c - python list objects
*
* Copyright (C) 2004-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2004-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*
@ -39,14 +38,13 @@ list_quote(listObject *self)
{
/* adapt the list by calling adapt() recursively and then wrapping
everything into "ARRAY[]" */
PyObject *res = NULL;
PyObject **qs = NULL;
Py_ssize_t bufsize = 0;
char *buf = NULL, *ptr;
PyObject *tmp = NULL, *str = NULL, *joined = NULL, *res = NULL;
/* list consisting of only NULL don't work with the ARRAY[] construct
* so we use the {NULL,...} syntax. The same syntax is also necessary
* to convert array of arrays containing only nulls. */
* so we use the {NULL,...} syntax. Note however that list of lists where
* some element is a list of only null still fails: for that we should use
* the '{...}' syntax uniformly but we cannot do it in the current
* infrastructure. TODO in psycopg3 */
int all_nulls = 1;
Py_ssize_t i, len;
@ -55,118 +53,54 @@ list_quote(listObject *self)
/* empty arrays are converted to NULLs (still searching for a way to
insert an empty array in postgresql */
if (len == 0) {
/* it cannot be ARRAY[] because it would make empty lists unusable
* in any() without a cast. But we may convert it into ARRAY[] below */
res = Bytes_FromString("'{}'");
goto exit;
}
if (len == 0) return Bytes_FromString("'{}'");
if (!(qs = PyMem_New(PyObject *, len))) {
PyErr_NoMemory();
goto exit;
}
memset(qs, 0, len * sizeof(PyObject *));
tmp = PyTuple_New(len);
for (i = 0; i < len; i++) {
for (i=0; i<len; i++) {
PyObject *quoted;
PyObject *wrapped = PyList_GET_ITEM(self->wrapped, i);
if (wrapped == Py_None) {
Py_INCREF(psyco_null);
qs[i] = psyco_null;
quoted = psyco_null;
}
else {
if (!(qs[i] = microprotocol_getquoted(
wrapped, (connectionObject*)self->connection))) {
goto exit;
}
/* Lists of arrays containing only nulls are also not supported
* by the ARRAY construct so we should do some special casing */
if (PyList_Check(wrapped)) {
if (Bytes_AS_STRING(qs[i])[0] == 'A') {
all_nulls = 0;
}
else if (0 == strcmp(Bytes_AS_STRING(qs[i]), "'{}'")) {
/* case of issue #788: '{{}}' is not supported but
* array[array[]] is */
all_nulls = 0;
Py_CLEAR(qs[i]);
if (!(qs[i] = Bytes_FromString("ARRAY[]"))) {
goto exit;
}
}
}
else {
all_nulls = 0;
}
quoted = microprotocol_getquoted(wrapped,
(connectionObject*)self->connection);
if (quoted == NULL) goto error;
all_nulls = 0;
}
bufsize += Bytes_GET_SIZE(qs[i]) + 1; /* this, and a comma */
/* here we don't loose a refcnt: SET_ITEM does not change the
reference count and we are just transferring ownership of the tmp
object to the tuple */
PyTuple_SET_ITEM(tmp, i, quoted);
}
/* Create an array literal, usually ARRAY[...] but if the contents are
* all NULL or array of NULL we must use the '{...}' syntax
*/
if (!(ptr = buf = PyMem_Malloc(bufsize + 8))) {
PyErr_NoMemory();
goto exit;
}
/* now that we have a tuple of adapted objects we just need to join them
and put "ARRAY[] around the result */
str = Bytes_FromString(", ");
joined = PyObject_CallMethod(str, "join", "(O)", tmp);
if (joined == NULL) goto error;
/* PG doesn't like ARRAY[NULL..] */
if (!all_nulls) {
strcpy(ptr, "ARRAY[");
ptr += 6;
for (i = 0; i < len; i++) {
Py_ssize_t sl;
sl = Bytes_GET_SIZE(qs[i]);
memcpy(ptr, Bytes_AS_STRING(qs[i]), sl);
ptr += sl;
*ptr++ = ',';
}
*(ptr - 1) = ']';
}
else {
*ptr++ = '\'';
*ptr++ = '{';
for (i = 0; i < len; i++) {
/* in case all the adapted things are nulls (or array of nulls),
* the quoted string is either NULL or an array of the form
* '{NULL,...}', in which case we have to strip the extra quotes */
char *s;
Py_ssize_t sl;
s = Bytes_AS_STRING(qs[i]);
sl = Bytes_GET_SIZE(qs[i]);
if (s[0] != '\'') {
memcpy(ptr, s, sl);
ptr += sl;
}
else {
memcpy(ptr, s + 1, sl - 2);
ptr += sl - 2;
}
*ptr++ = ',';
}
*(ptr - 1) = '}';
*ptr++ = '\'';
res = Bytes_FromFormat("ARRAY[%s]", Bytes_AsString(joined));
} else {
res = Bytes_FromFormat("'{%s}'", Bytes_AsString(joined));
}
res = Bytes_FromStringAndSize(buf, ptr - buf);
exit:
if (qs) {
for (i = 0; i < len; i++) {
PyObject *q = qs[i];
Py_XDECREF(q);
}
PyMem_Free(qs);
}
PyMem_Free(buf);
error:
Py_XDECREF(tmp);
Py_XDECREF(str);
Py_XDECREF(joined);
return res;
}
static PyObject *
list_str(listObject *self)
{
return psyco_ensure_text(list_quote(self));
return psycopg_ensure_text(list_quote(self));
}
static PyObject *

View File

@ -1,7 +1,6 @@
/* adapter_list.h - definition for the python list types
*
* Copyright (C) 2004-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2004-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*

View File

@ -0,0 +1,428 @@
/* adapter_mxdatetime.c - mx date/time objects
*
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*
* psycopg2 is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition, as a special exception, the copyright holders give
* permission to link this program with the OpenSSL library (or with
* modified versions of OpenSSL that use the same license as OpenSSL),
* and distribute linked combinations including the two.
*
* You must obey the GNU Lesser General Public License in all respects for
* all of the code used other than OpenSSL.
*
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*/
#define PSYCOPG_MODULE
#include "psycopg/psycopg.h"
#include "psycopg/adapter_mxdatetime.h"
#include "psycopg/microprotocols_proto.h"
#include <mxDateTime.h>
#include <string.h>
/* Return 0 on success, -1 on failure, but don't set an exception */
int
psyco_adapter_mxdatetime_init(void)
{
Dprintf("psyco_adapter_mxdatetime_init: mx.DateTime init");
if (mxDateTime_ImportModuleAndAPI()) {
Dprintf("psyco_adapter_mxdatetime_init: mx.DateTime initialization failed");
PyErr_Clear();
return -1;
}
return 0;
}
/* mxdatetime_str, mxdatetime_getquoted - return result of quoting */
static PyObject *
mxdatetime_str(mxdatetimeObject *self)
{
mxDateTimeObject *dt;
mxDateTimeDeltaObject *dtd;
char buf[128] = { 0, };
switch (self->type) {
case PSYCO_MXDATETIME_DATE:
dt = (mxDateTimeObject *)self->wrapped;
if (dt->year >= 1)
PyOS_snprintf(buf, sizeof(buf) - 1, "'%04ld-%02d-%02d'::date",
dt->year, (int)dt->month, (int)dt->day);
else
PyOS_snprintf(buf, sizeof(buf) - 1, "'%04ld-%02d-%02d BC'::date",
1 - dt->year, (int)dt->month, (int)dt->day);
break;
case PSYCO_MXDATETIME_TIMESTAMP:
dt = (mxDateTimeObject *)self->wrapped;
if (dt->year >= 1)
PyOS_snprintf(buf, sizeof(buf) - 1,
"'%04ld-%02d-%02dT%02d:%02d:%09.6f'::timestamp",
dt->year, (int)dt->month, (int)dt->day,
(int)dt->hour, (int)dt->minute, dt->second);
else
PyOS_snprintf(buf, sizeof(buf) - 1,
"'%04ld-%02d-%02dT%02d:%02d:%09.6f BC'::timestamp",
1 - dt->year, (int)dt->month, (int)dt->day,
(int)dt->hour, (int)dt->minute, dt->second);
break;
case PSYCO_MXDATETIME_TIME:
case PSYCO_MXDATETIME_INTERVAL:
/* given the limitation of the mx.DateTime module that uses the same
type for both time and delta values we need to do some black magic
and make sure we're not using an adapt()ed interval as a simple
time */
dtd = (mxDateTimeDeltaObject *)self->wrapped;
if (0 <= dtd->seconds && dtd->seconds < 24*3600) {
PyOS_snprintf(buf, sizeof(buf) - 1, "'%02d:%02d:%09.6f'::time",
(int)dtd->hour, (int)dtd->minute, dtd->second);
} else {
double ss = dtd->hour*3600.0 + dtd->minute*60.0 + dtd->second;
if (dtd->seconds >= 0)
PyOS_snprintf(buf, sizeof(buf) - 1, "'%ld days %.6f seconds'::interval",
dtd->day, ss);
else
PyOS_snprintf(buf, sizeof(buf) - 1, "'-%ld days -%.6f seconds'::interval",
dtd->day, ss);
}
break;
}
return PyString_FromString(buf);
}
static PyObject *
mxdatetime_getquoted(mxdatetimeObject *self, PyObject *args)
{
return mxdatetime_str(self);
}
static PyObject *
mxdatetime_conform(mxdatetimeObject *self, PyObject *args)
{
PyObject *res, *proto;
if (!PyArg_ParseTuple(args, "O", &proto)) return NULL;
if (proto == (PyObject*)&isqlquoteType)
res = (PyObject*)self;
else
res = Py_None;
Py_INCREF(res);
return res;
}
/** the MxDateTime object **/
/* object member list */
static struct PyMemberDef mxdatetimeObject_members[] = {
{"adapted", T_OBJECT, offsetof(mxdatetimeObject, wrapped), READONLY},
{"type", T_INT, offsetof(mxdatetimeObject, type), READONLY},
{NULL}
};
/* object method table */
static PyMethodDef mxdatetimeObject_methods[] = {
{"getquoted", (PyCFunction)mxdatetime_getquoted, METH_NOARGS,
"getquoted() -> wrapped object value as SQL date/time"},
{"__conform__", (PyCFunction)mxdatetime_conform, METH_VARARGS, NULL},
{NULL} /* Sentinel */
};
/* initialization and finalization methods */
static int
mxdatetime_setup(mxdatetimeObject *self, PyObject *obj, int type)
{
Dprintf("mxdatetime_setup: init mxdatetime object at %p, refcnt = "
FORMAT_CODE_PY_SSIZE_T,
self, Py_REFCNT(self)
);
self->type = type;
Py_INCREF(obj);
self->wrapped = obj;
Dprintf("mxdatetime_setup: good mxdatetime object at %p, refcnt = "
FORMAT_CODE_PY_SSIZE_T,
self, Py_REFCNT(self)
);
return 0;
}
static void
mxdatetime_dealloc(PyObject* obj)
{
mxdatetimeObject *self = (mxdatetimeObject *)obj;
Py_CLEAR(self->wrapped);
Dprintf("mxdatetime_dealloc: deleted mxdatetime object at %p, refcnt = "
FORMAT_CODE_PY_SSIZE_T,
obj, Py_REFCNT(obj)
);
Py_TYPE(obj)->tp_free(obj);
}
static int
mxdatetime_init(PyObject *obj, PyObject *args, PyObject *kwds)
{
PyObject *mx;
int type = -1; /* raise an error if type was not passed! */
if (!PyArg_ParseTuple(args, "O|i", &mx, &type))
return -1;
return mxdatetime_setup((mxdatetimeObject *)obj, mx, type);
}
static PyObject *
mxdatetime_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
return type->tp_alloc(type, 0);
}
/* object type */
#define mxdatetimeType_doc \
"MxDateTime(mx, type) -> new mx.DateTime wrapper object"
PyTypeObject mxdatetimeType = {
PyVarObject_HEAD_INIT(NULL, 0)
"psycopg2._psycopg.MxDateTime",
sizeof(mxdatetimeObject), 0,
mxdatetime_dealloc, /*tp_dealloc*/
0, /*tp_print*/
0, /*tp_getattr*/
0, /*tp_setattr*/
0, /*tp_compare*/
0, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
0, /*tp_hash */
0, /*tp_call*/
(reprfunc)mxdatetime_str, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /*tp_flags*/
mxdatetimeType_doc, /*tp_doc*/
0, /*tp_traverse*/
0, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
mxdatetimeObject_methods, /*tp_methods*/
mxdatetimeObject_members, /*tp_members*/
0, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
mxdatetime_init, /*tp_init*/
0, /*tp_alloc*/
mxdatetime_new, /*tp_new*/
};
/** module-level functions **/
#ifdef PSYCOPG_DEFAULT_MXDATETIME
PyObject *
psyco_Date(PyObject *self, PyObject *args)
{
PyObject *res, *mx;
int year, month, day;
if (!PyArg_ParseTuple(args, "iii", &year, &month, &day))
return NULL;
mx = mxDateTime.DateTime_FromDateAndTime(year, month, day, 0, 0, 0.0);
if (mx == NULL) return NULL;
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
PSYCO_MXDATETIME_DATE);
Py_DECREF(mx);
return res;
}
PyObject *
psyco_Time(PyObject *self, PyObject *args)
{
PyObject *res, *mx;
int hours, minutes=0;
double seconds=0.0;
if (!PyArg_ParseTuple(args, "iid", &hours, &minutes, &seconds))
return NULL;
mx = mxDateTime.DateTimeDelta_FromTime(hours, minutes, seconds);
if (mx == NULL) return NULL;
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
PSYCO_MXDATETIME_TIME);
Py_DECREF(mx);
return res;
}
PyObject *
psyco_Timestamp(PyObject *self, PyObject *args)
{
PyObject *res, *mx;
int year, month, day;
int hour=0, minute=0; /* default to midnight */
double second=0.0;
if (!PyArg_ParseTuple(args, "lii|iid", &year, &month, &day,
&hour, &minute, &second))
return NULL;
mx = mxDateTime.DateTime_FromDateAndTime(year, month, day,
hour, minute, second);
if (mx == NULL) return NULL;
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
PSYCO_MXDATETIME_TIMESTAMP);
Py_DECREF(mx);
return res;
}
PyObject *
psyco_DateFromTicks(PyObject *self, PyObject *args)
{
PyObject *res, *mx;
double ticks;
if (!PyArg_ParseTuple(args,"d", &ticks))
return NULL;
if (!(mx = mxDateTime.DateTime_FromTicks(ticks)))
return NULL;
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
PSYCO_MXDATETIME_DATE);
Py_DECREF(mx);
return res;
}
PyObject *
psyco_TimeFromTicks(PyObject *self, PyObject *args)
{
PyObject *res, *mx, *dt;
double ticks;
if (!PyArg_ParseTuple(args,"d", &ticks))
return NULL;
if (!(dt = mxDateTime.DateTime_FromTicks(ticks)))
return NULL;
if (!(mx = mxDateTime.DateTimeDelta_FromDaysAndSeconds(
0, ((mxDateTimeObject*)dt)->abstime)))
{
Py_DECREF(dt);
return NULL;
}
Py_DECREF(dt);
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
PSYCO_MXDATETIME_TIME);
Py_DECREF(mx);
return res;
}
PyObject *
psyco_TimestampFromTicks(PyObject *self, PyObject *args)
{
PyObject *mx, *res;
double ticks;
if (!PyArg_ParseTuple(args, "d", &ticks))
return NULL;
if (!(mx = mxDateTime.DateTime_FromTicks(ticks)))
return NULL;
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
PSYCO_MXDATETIME_TIMESTAMP);
Py_DECREF(mx);
return res;
}
#endif
PyObject *
psyco_DateFromMx(PyObject *self, PyObject *args)
{
PyObject *mx;
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTime_Type, &mx))
return NULL;
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
PSYCO_MXDATETIME_DATE);
}
PyObject *
psyco_TimeFromMx(PyObject *self, PyObject *args)
{
PyObject *mx;
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTimeDelta_Type, &mx))
return NULL;
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
PSYCO_MXDATETIME_TIME);
}
PyObject *
psyco_TimestampFromMx(PyObject *self, PyObject *args)
{
PyObject *mx;
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTime_Type, &mx))
return NULL;
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
PSYCO_MXDATETIME_TIMESTAMP);
}
PyObject *
psyco_IntervalFromMx(PyObject *self, PyObject *args)
{
PyObject *mx;
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTime_Type, &mx))
return NULL;
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
PSYCO_MXDATETIME_INTERVAL);
}

View File

@ -0,0 +1,98 @@
/* adapter_mxdatetime.h - definition for the mx date/time types
*
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*
* psycopg2 is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition, as a special exception, the copyright holders give
* permission to link this program with the OpenSSL library (or with
* modified versions of OpenSSL that use the same license as OpenSSL),
* and distribute linked combinations including the two.
*
* You must obey the GNU Lesser General Public License in all respects for
* all of the code used other than OpenSSL.
*
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*/
#ifndef PSYCOPG_MXDATETIME_H
#define PSYCOPG_MXDATETIME_H 1
#ifdef __cplusplus
extern "C" {
#endif
extern HIDDEN PyTypeObject mxdatetimeType;
typedef struct {
PyObject_HEAD
PyObject *wrapped;
int type;
#define PSYCO_MXDATETIME_TIME 0
#define PSYCO_MXDATETIME_DATE 1
#define PSYCO_MXDATETIME_TIMESTAMP 2
#define PSYCO_MXDATETIME_INTERVAL 3
} mxdatetimeObject;
/* functions exported to psycopgmodule.c */
#ifdef PSYCOPG_DEFAULT_MXDATETIME
HIDDEN PyObject *psyco_Date(PyObject *module, PyObject *args);
#define psyco_Date_doc \
"Date(year, month, day) -> new date"
HIDDEN PyObject *psyco_Time(PyObject *module, PyObject *args);
#define psyco_Time_doc \
"Time(hour, minutes, seconds) -> new time"
HIDDEN PyObject *psyco_Timestamp(PyObject *module, PyObject *args);
#define psyco_Timestamp_doc \
"Time(year, month, day, hour, minutes, seconds) -> new timestamp"
HIDDEN PyObject *psyco_DateFromTicks(PyObject *module, PyObject *args);
#define psyco_DateFromTicks_doc \
"DateFromTicks(ticks) -> new date"
HIDDEN PyObject *psyco_TimeFromTicks(PyObject *module, PyObject *args);
#define psyco_TimeFromTicks_doc \
"TimeFromTicks(ticks) -> new time"
HIDDEN PyObject *psyco_TimestampFromTicks(PyObject *module, PyObject *args);
#define psyco_TimestampFromTicks_doc \
"TimestampFromTicks(ticks) -> new timestamp"
#endif /* PSYCOPG_DEFAULT_MXDATETIME */
HIDDEN int psyco_adapter_mxdatetime_init(void);
HIDDEN PyObject *psyco_DateFromMx(PyObject *module, PyObject *args);
#define psyco_DateFromMx_doc \
"DateFromMx(mx) -> new date"
HIDDEN PyObject *psyco_TimeFromMx(PyObject *module, PyObject *args);
#define psyco_TimeFromMx_doc \
"TimeFromMx(mx) -> new time"
HIDDEN PyObject *psyco_TimestampFromMx(PyObject *module, PyObject *args);
#define psyco_TimestampFromMx_doc \
"TimestampFromMx(mx) -> new timestamp"
HIDDEN PyObject *psyco_IntervalFromMx(PyObject *module, PyObject *args);
#define psyco_IntervalFromMx_doc \
"IntervalFromMx(mx) -> new interval"
#ifdef __cplusplus
}
#endif
#endif /* !defined(PSYCOPG_MXDATETIME_H) */

View File

@ -1,7 +1,6 @@
/* adapter_pboolean.c - psycopg boolean type wrapper implementation
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*
@ -49,7 +48,7 @@ pboolean_getquoted(pbooleanObject *self, PyObject *args)
static PyObject *
pboolean_str(pbooleanObject *self)
{
return psyco_ensure_text(pboolean_getquoted(self, NULL));
return psycopg_ensure_text(pboolean_getquoted(self, NULL));
}
static PyObject *

View File

@ -1,7 +1,6 @@
/* adapter_pboolean.h - definition for the psycopg boolean type wrapper
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*

View File

@ -1,7 +1,6 @@
/* adapter_pdecimal.c - psycopg Decimal type wrapper implementation
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*
@ -81,7 +80,8 @@ pdecimal_getquoted(pdecimalObject *self, PyObject *args)
/* res may be unicode and may suffer for issue #57 */
output:
/* unicode to bytes */
#if PY_MAJOR_VERSION > 2
/* unicode to bytes in Py3 */
{
PyObject *tmp = PyUnicode_AsUTF8String(res);
Py_DECREF(res);
@ -89,6 +89,7 @@ output:
goto end;
}
}
#endif
if ('-' == Bytes_AS_STRING(res)[0]) {
/* Prepend a space in front of negative numbers (ticket #57) */
@ -112,7 +113,7 @@ end:
static PyObject *
pdecimal_str(pdecimalObject *self)
{
return psyco_ensure_text(pdecimal_getquoted(self, NULL));
return psycopg_ensure_text(pdecimal_getquoted(self, NULL));
}
static PyObject *

View File

@ -1,7 +1,6 @@
/* adapter_pdecimal.h - definition for the psycopg Decimal type wrapper
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*

View File

@ -1,7 +1,6 @@
/* adapter_float.c - psycopg pfloat type wrapper implementation
*
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
* Copyright (C) 2020-2021 The Psycopg Team
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*
* This file is part of psycopg.
*
@ -54,7 +53,8 @@ pfloat_getquoted(pfloatObject *self, PyObject *args)
goto exit;
}
/* unicode to bytes */
#if PY_MAJOR_VERSION > 2
/* unicode to bytes in Py3 */
{
PyObject *tmp = PyUnicode_AsUTF8String(rv);
Py_DECREF(rv);
@ -62,6 +62,7 @@ pfloat_getquoted(pfloatObject *self, PyObject *args)
goto exit;
}
}
#endif
if ('-' == Bytes_AS_STRING(rv)[0]) {
/* Prepend a space in front of negative numbers (ticket #57) */
@ -85,7 +86,7 @@ exit:
static PyObject *
pfloat_str(pfloatObject *self)
{
return psyco_ensure_text(pfloat_getquoted(self, NULL));
return psycopg_ensure_text(pfloat_getquoted(self, NULL));
}
static PyObject *

Some files were not shown because too many files have changed in this diff Show More