Merge branch 'master' into master

This commit is contained in:
Ashesh Vashi 2018-05-08 15:17:59 +05:30 committed by GitHub
commit 1bec2bdc43
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
136 changed files with 1508 additions and 1777 deletions

View File

@ -13,52 +13,17 @@ environment:
matrix: matrix:
# For Python versions available on Appveyor, see # For Python versions available on Appveyor, see
# http://www.appveyor.com/docs/installed-software#python # http://www.appveyor.com/docs/installed-software#python
- {PYVER: "27", PYTHON_ARCH: "32"}
- {PYVER: "27", PYTHON_ARCH: "64"}
- {PYVER: "34", PYTHON_ARCH: "32"}
- {PYVER: "34", PYTHON_ARCH: "64"}
- {PYVER: "35", PYTHON_ARCH: "32"}
- {PYVER: "35", PYTHON_ARCH: "64"}
- {PYVER: "36", PYTHON_ARCH: "32"}
- {PYVER: "36", PYTHON_ARCH: "64"}
# Py 2.7 = VS Ver. 9.0 (VS 2008) OPENSSL_VERSION: "1_0_2n"
# Py 3.3, 3.4 = VS Ver. 10.0 (VS 2010) POSTGRES_VERSION: "10_1"
# Py 3.5, 3.6 = VS Ver. 14.0 (VS 2015)
- PYTHON: C:\Python27-x64
PYTHON_ARCH: 64
VS_VER: 9.0
- PYTHON: C:\Python27
PYTHON_ARCH: 32
VS_VER: 9.0
- PYTHON: C:\Python36-x64
PYTHON_ARCH: 64
VS_VER: 14.0
- PYTHON: C:\Python36
PYTHON_ARCH: 32
VS_VER: 14.0
- PYTHON: C:\Python35-x64
PYTHON_ARCH: 64
VS_VER: 14.0
- PYTHON: C:\Python35
PYTHON_ARCH: 32
VS_VER: 14.0
- PYTHON: C:\Python34-x64
DISTUTILS_USE_SDK: '1'
PYTHON_ARCH: 64
VS_VER: 10.0
- PYTHON: C:\Python34
PYTHON_ARCH: 32
VS_VER: 10.0
- PYTHON: C:\Python33-x64
DISTUTILS_USE_SDK: '1'
PYTHON_ARCH: 64
VS_VER: 10.0
- PYTHON: C:\Python33
PYTHON_ARCH: 32
VS_VER: 10.0
PSYCOPG2_TESTDB: psycopg2_test PSYCOPG2_TESTDB: psycopg2_test
PSYCOPG2_TESTDB_USER: postgres PSYCOPG2_TESTDB_USER: postgres
@ -73,17 +38,35 @@ matrix:
fast_finish: false fast_finish: false
services: services:
# Note: if you change this service also change the paths to match
# (see where Program Files\Postgres\9.6 is used)
- postgresql96 - postgresql96
cache: cache:
# Rebuild cache if following file changes # Rebuild cache if following file changes
# (See the file to zap the cache manually)
- C:\Others -> scripts\appveyor.cache_rebuild - C:\Others -> scripts\appveyor.cache_rebuild
# Script called before repo cloning # Script called before repo cloning
init: init:
# Uncomment next line to get RDP access during the build. # Uncomment next line to get RDP access during the build.
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1')) #- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
#
# Set env variable according to the build environment
- SET PYTHON=C:\Python%PYVER%
- IF "%PYTHON_ARCH%"=="64" SET PYTHON=%PYTHON%-x64
# Py 2.7 = VS Ver. 9.0 (VS 2008)
# Py 3.3, 3.4 = VS Ver. 10.0 (VS 2010)
# Py 3.5, 3.6 = VS Ver. 14.0 (VS 2015)
- IF "%PYVER%"=="27" SET VS_VER=9.0
- IF "%PYVER%"=="33" SET VS_VER=10.0
- IF "%PYVER%"=="34" SET VS_VER=10.0
- IF "%PYVER%"=="35" SET VS_VER=14.0
- IF "%PYVER%"=="36" SET VS_VER=14.0
- IF "%VS_VER%"=="10.0" IF "%PYTHON_ARCH%"=="64" SET DISTUTILS_USE_SDK=1
# Set Python to the path # Set Python to the path
- SET PATH=%PYTHON%;%PYTHON%\Scripts;C:\Program Files\Git\mingw64\bin;%PATH% - SET PATH=%PYTHON%;%PYTHON%\Scripts;C:\Program Files\Git\mingw64\bin;%PATH%
@ -113,6 +96,11 @@ init:
- IF "%PYTHON_ARCH%"=="32" (CALL "C:\\Program Files (x86)\\Microsoft Visual Studio %VS_VER%\\VC\\vcvarsall.bat" x86) - IF "%PYTHON_ARCH%"=="32" (CALL "C:\\Program Files (x86)\\Microsoft Visual Studio %VS_VER%\\VC\\vcvarsall.bat" x86)
- IF "%PYTHON_ARCH%"=="64" (CALL "C:\\Program Files (x86)\\Microsoft Visual Studio %VS_VER%\\VC\\vcvarsall.bat" amd64) - IF "%PYTHON_ARCH%"=="64" (CALL "C:\\Program Files (x86)\\Microsoft Visual Studio %VS_VER%\\VC\\vcvarsall.bat" amd64)
# The program rc.exe on 64bit with some versions look in the wrong path
# location when building postgresql. This cheats by copying the x64 bit
# files to that location.
- IF "%PYTHON_ARCH%"=="64" (COPY /Y "C:\\Program Files\\Microsoft SDKs\\Windows\\v7.0\\Bin\\x64\\rc*" "C:\\Program Files (x86)\\Microsoft SDKs\\Windows\\v7.0A\\Bin")
# Change PostgreSQL config before service starts to allow > 1 prepared # Change PostgreSQL config before service starts to allow > 1 prepared
# transactions for test cases # transactions for test cases
- ECHO max_prepared_transactions = 10 >> "C:\\Program Files\\PostgreSQL\\9.6\\data\\postgresql.conf" - ECHO max_prepared_transactions = 10 >> "C:\\Program Files\\PostgreSQL\\9.6\\data\\postgresql.conf"
@ -154,8 +142,8 @@ install:
} }
# Download OpenSSL source # Download OpenSSL source
- CD C:\Others - CD C:\Others
- IF NOT EXIST OpenSSL_1_0_2l.zip ( - IF NOT EXIST OpenSSL_%OPENSSL_VERSION%.zip (
curl -fsSL -o OpenSSL_1_0_2l.zip https://github.com/openssl/openssl/archive/OpenSSL_1_0_2l.zip curl -fsSL -o OpenSSL_%OPENSSL_VERSION%.zip https://github.com/openssl/openssl/archive/OpenSSL_%OPENSSL_VERSION%.zip
) )
# To use OpenSSL >= 1.1.0, both libpq and psycopg build environments have # To use OpenSSL >= 1.1.0, both libpq and psycopg build environments have
@ -167,15 +155,15 @@ install:
# - nmake build_libs install_dev # - nmake build_libs install_dev
- IF NOT EXIST %OPENSSLTOP%\lib\ssleay32.lib ( - IF NOT EXIST %OPENSSLTOP%\lib\ssleay32.lib (
CD %BUILD_DIR% && CD %BUILD_DIR% &&
7z x C:\Others\OpenSSL_1_0_2l.zip && 7z x C:\Others\OpenSSL_%OPENSSL_VERSION%.zip &&
CD openssl-OpenSSL_1_0_2l && CD openssl-OpenSSL_%OPENSSL_VERSION% &&
perl Configure %TARGET% no-asm no-shared no-zlib --prefix=%OPENSSLTOP% --openssldir=%OPENSSLTOP% && perl Configure %TARGET% no-asm no-shared no-zlib --prefix=%OPENSSLTOP% --openssldir=%OPENSSLTOP% &&
CALL ms\%DO% && CALL ms\%DO% &&
nmake -f ms\nt.mak init headers lib && nmake -f ms\nt.mak init headers lib &&
COPY inc32\openssl\*.h %OPENSSLTOP%\include\openssl && COPY inc32\openssl\*.h %OPENSSLTOP%\include\openssl &&
COPY out32\*.lib %OPENSSLTOP%\lib && COPY out32\*.lib %OPENSSLTOP%\lib &&
CD %BASE_DIR% && CD %BASE_DIR% &&
RMDIR /S /Q %BUILD_DIR%\openssl-OpenSSL_1_0_2l RMDIR /S /Q %BUILD_DIR%\openssl-OpenSSL_%OPENSSL_VERSION%
) )
# Setup directories for building PostgreSQL librarires # Setup directories for building PostgreSQL librarires
@ -185,32 +173,45 @@ install:
- SET PGTOP=%BASE_DIR%\postgresql - SET PGTOP=%BASE_DIR%\postgresql
- IF NOT EXIST %PGTOP%\include MKDIR %PGTOP%\include - IF NOT EXIST %PGTOP%\include MKDIR %PGTOP%\include
- IF NOT EXIST %PGTOP%\lib MKDIR %PGTOP%\lib - IF NOT EXIST %PGTOP%\lib MKDIR %PGTOP%\lib
- IF NOT EXIST %PGTOP%\bin MKDIR %PGTOP%\bin
# Download PostgreSQL source # Download PostgreSQL source
- CD C:\Others - CD C:\Others
- IF NOT EXIST postgres-REL9_6_3.zip ( - IF NOT EXIST postgres-REL_%POSTGRES_VERSION%.zip (
curl -fsSL -o postgres-REL9_6_3.zip https://github.com/postgres/postgres/archive/REL9_6_3.zip curl -fsSL -o postgres-REL_%POSTGRES_VERSION%.zip https://github.com/postgres/postgres/archive/REL_%POSTGRES_VERSION%.zip
) )
# Setup build config file (config.pl) # Setup build config file (config.pl)
# Build libpgport first # Hack the Mkvcbuild.pm file so we build the lib version of libpq
# Build libpq # Build libpgport, libpgcommon, libpq
# Install includes
# Copy over built libraries
# Prepare local include directory for building from
# Build pg_config in place
# NOTE: Cannot set and use the same variable inside an IF # NOTE: Cannot set and use the same variable inside an IF
- SET PGBUILD=%BUILD_DIR%\postgres-REL9_6_3 - SET PGBUILD=%BUILD_DIR%\postgres-REL_%POSTGRES_VERSION%
- IF NOT EXIST %PGTOP%\lib\libpq.lib ( - IF NOT EXIST %PGTOP%\lib\libpq.lib (
CD %BUILD_DIR% && CD %BUILD_DIR% &&
7z x C:\Others\postgres-REL9_6_3.zip && 7z x C:\Others\postgres-REL_%POSTGRES_VERSION%.zip &&
CD postgres-REL9_6_3\src\tools\msvc && CD postgres-REL_%POSTGRES_VERSION%\src\tools\msvc &&
ECHO $config-^>{ldap} = 0; > config.pl && ECHO $config-^>{ldap} = 0; > config.pl &&
ECHO $config-^>{openssl} = "%OPENSSLTOP:\=\\%"; >> config.pl && ECHO $config-^>{openssl} = "%OPENSSLTOP:\=\\%"; >> config.pl &&
ECHO.>> config.pl && ECHO.>> config.pl &&
ECHO 1;>> config.pl && ECHO 1;>> config.pl &&
perl -pi.bak -e "s/'libpq', 'dll'/'libpq', 'lib'/g" Mkvcbuild.pm &&
build libpgport && build libpgport &&
XCOPY /E ..\..\include %PGTOP%\include && build libpgcommon &&
build libpq &&
ECHO "" > %PGBUILD%\src\backend\parser\gram.h &&
perl -pi.bak -e "s/qw\(Install\)/qw\(Install CopyIncludeFiles\)/g" Install.pm &&
perl -MInstall=CopyIncludeFiles -e"chdir('../../..'); CopyIncludeFiles('%PGTOP%')" &&
COPY %PGBUILD%\Release\libpgport\libpgport.lib %PGTOP%\lib && COPY %PGBUILD%\Release\libpgport\libpgport.lib %PGTOP%\lib &&
CD ..\..\interfaces\libpq && COPY %PGBUILD%\Release\libpgcommon\libpgcommon.lib %PGTOP%\lib &&
nmake -f win32.mak USE_OPENSSL=1 ENABLE_THREAD_SAFETY=1 SSL_INC=%OPENSSLTOP%\include SSL_LIB_PATH=%OPENSSLTOP%\lib config .\Release\libpq.lib && COPY %PGBUILD%\Release\libpq\libpq.lib %PGTOP%\lib &&
COPY *.h %PGTOP%\include && XCOPY /Y /S %PGBUILD%\src\include\port\win32\* %PGBUILD%\src\include &&
COPY Release\libpq.lib %PGTOP%\lib && XCOPY /Y /S %PGBUILD%\src\include\port\win32_msvc\* %PGBUILD%\src\include &&
CD %PGBUILD%\src\bin\pg_config &&
cl pg_config.c /MT /nologo /I%PGBUILD%\src\include /link /LIBPATH:%PGTOP%\lib libpgcommon.lib libpgport.lib advapi32.lib /NODEFAULTLIB:libcmt.lib /OUT:%PGTOP%\bin\pg_config.exe &&
CD %BASE_DIR% && CD %BASE_DIR% &&
RMDIR /S /Q %PGBUILD% RMDIR /S /Q %PGBUILD%
) )
@ -223,9 +224,10 @@ build_script:
# Add PostgreSQL binaries to the path # Add PostgreSQL binaries to the path
- PATH=C:\Program Files\PostgreSQL\9.6\bin\;%PATH% - PATH=C:\Program Files\PostgreSQL\9.6\bin\;%PATH%
- CD C:\Project - CD C:\Project
- "%PYTHON%\\python.exe setup.py build_ext --have-ssl -l libpgcommon -L %OPENSSLTOP%\\lib;%PGTOP%\\lib -I %OPENSSLTOP%\\include;%PGTOP%\\include" - "%PYTHON%\\python.exe setup.py build_ext --have-ssl --pg-config %PGTOP%\\bin\\pg_config.exe -l libpgcommon -l libpgport -L %OPENSSLTOP%\\lib -I %OPENSSLTOP%\\include"
- "%PYTHON%\\python.exe setup.py build" - "%PYTHON%\\python.exe setup.py build"
- "%PYTHON%\\python.exe setup.py install" - "%PYTHON%\\python.exe setup.py install"
- RD /S /Q psycopg2.egg-info
#after_build: #after_build:
@ -235,5 +237,8 @@ before_test:
- psql -d %PSYCOPG2_TESTDB% -c "CREATE EXTENSION HSTORE;" - psql -d %PSYCOPG2_TESTDB% -c "CREATE EXTENSION HSTORE;"
test_script: test_script:
# Print psycopg and libpq versions
- "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.__version__)\"" - "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.__version__)\""
- "%PYTHON%\\python.exe -c \"from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')\" --verbose" - "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.__libpq_version__)\""
- "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.extensions.libpq_version())\""
- "%PYTHON%\\python.exe -c \"import tests; tests.unittest.main(defaultTest='tests.test_suite')\" --verbose"

3
.gitignore vendored
View File

@ -4,6 +4,7 @@ MANIFEST
*.pidb *.pidb
*.pyc *.pyc
*.sw[po] *.sw[po]
*.egg-info/
dist/* dist/*
build/* build/*
doc/src/_build/* doc/src/_build/*
@ -11,6 +12,8 @@ doc/html/*
doc/psycopg2.txt doc/psycopg2.txt
scripts/pypi_docs_upload.py scripts/pypi_docs_upload.py
env env
env?
.idea
.tox .tox
/rel /rel
/wheels /wheels

View File

@ -7,14 +7,12 @@ language: python
python: python:
- 2.7 - 2.7
- 3.6 - 3.6
- 2.6
- 3.5 - 3.5
- 3.4 - 3.4
- 3.3
- 3.2
install: install:
- python setup.py install - python setup.py install
- rm -rf psycopg2.egg-info
- sudo scripts/travis_prepare.sh - sudo scripts/travis_prepare.sh
script: script:

View File

@ -6,6 +6,5 @@ include doc/README.rst doc/SUCCESS doc/COPYING.LESSER doc/pep-0249.txt
include doc/Makefile doc/requirements.txt include doc/Makefile doc/requirements.txt
recursive-include doc/src *.rst *.py *.css Makefile recursive-include doc/src *.rst *.py *.css Makefile
recursive-include scripts *.py *.sh recursive-include scripts *.py *.sh
include scripts/maketypes.sh scripts/buildtypes.py
include AUTHORS README.rst INSTALL LICENSE NEWS include AUTHORS README.rst INSTALL LICENSE NEWS
include MANIFEST.in setup.py setup.cfg Makefile include MANIFEST.in setup.py setup.cfg Makefile

View File

@ -29,8 +29,7 @@ SOURCE := $(SOURCE_C) $(SOURCE_PY) $(SOURCE_TESTS) $(SOURCE_DOC)
PACKAGE := $(BUILD_DIR)/psycopg2 PACKAGE := $(BUILD_DIR)/psycopg2
PLATLIB := $(PACKAGE)/_psycopg.so PLATLIB := $(PACKAGE)/_psycopg.so
PURELIB := $(patsubst lib/%,$(PACKAGE)/%,$(SOURCE_PY)) \ PURELIB := $(patsubst lib/%,$(PACKAGE)/%,$(SOURCE_PY))
$(patsubst tests/%,$(PACKAGE)/tests/%,$(SOURCE_TESTS))
BUILD_OPT := --build-lib=$(BUILD_DIR) BUILD_OPT := --build-lib=$(BUILD_DIR)
BUILD_EXT_OPT := --build-lib=$(BUILD_DIR) BUILD_EXT_OPT := --build-lib=$(BUILD_DIR)
@ -66,7 +65,7 @@ env:
$(MAKE) -C doc $@ $(MAKE) -C doc $@
check: check:
PYTHONPATH=$(BUILD_DIR):$(PYTHONPATH) $(PYTHON) -c "from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose PYTHONPATH=$(BUILD_DIR) $(PYTHON) -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
testdb: testdb:
@echo "* Creating $(TESTDB)" @echo "* Creating $(TESTDB)"

67
NEWS
View File

@ -1,11 +1,64 @@
Current release Current release
--------------- ---------------
What's new in psycopg 2.8
-------------------------
Other changes:
- Dropped support for Python 2.6, 3.2, 3.3.
- Dropped `psycopg1` module.
- Dropped deprecated ``register_tstz_w_secs()`` (was previously a no-op).
- The ``psycopg2.test`` package is no longer installed by ``python setup.py
install``. The test source files now are compatible with Python 2 and 3
without using 2to3.
What's new in psycopg 2.7.5
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed building on Solaris 11 and derivatives such as SmartOS and illumos
(:ticket:`#677`).
- Maybe fixed building on MSYS2 (as reported in :ticket:`#658`).
- Allow string subclasses in connection and other places (:ticket:`#679`).
What's new in psycopg 2.7.4
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Moving away from installing the wheel package by default.
Packages installed from wheel raise a warning on import. Added package
``psycopg2-binary`` to install from wheel instead (:ticket:`#543`).
- Convert fields names into valid Python identifiers in
`~psycopg2.extras.NamedTupleCursor` (:ticket:`#211`).
- Fixed Solaris 10 support (:ticket:`#532`).
- `cursor.mogrify()` can be called on closed cursors (:ticket:`#579`).
- Fixed setting session characteristics in corner cases on autocommit
connections (:ticket:`#580`).
- Fixed `~psycopg2.extras.MinTimeLoggingCursor` on Python 3 (:ticket:`#609`).
- Fixed parsing of array of points as floats (:ticket:`#613`).
- Fixed `~psycopg2.__libpq_version__` building with libpq >= 10.1
(:ticket:`632`).
- Fixed `~cursor.rowcount` after `~cursor.executemany()` with :sql:`RETURNING`
statements (:ticket:`633`).
- Fixed compatibility problem with pypy3 (:ticket:`#649`).
- Wheel packages compiled against PostgreSQL 10.1 libpq and OpenSSL 1.0.2n.
- Wheel packages for Python 2.6 no more available (support dropped from
wheel building infrastructure).
What's new in psycopg 2.7.3.2
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Wheel package compiled against PostgreSQL 10.0 libpq and OpenSSL 1.0.2l
(:tickets:`#601, #602`).
What's new in psycopg 2.7.3.1 What's new in psycopg 2.7.3.1
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Dropped libresolv from wheel package to avoid incompatibility with - Dropped libresolv from wheel package to avoid incompatibility with
glibc 2.26 (wheels ticket #2) glibc 2.26 (wheels ticket #2).
What's new in psycopg 2.7.3 What's new in psycopg 2.7.3
@ -100,9 +153,13 @@ New features:
Bug fixes: Bug fixes:
- Throw an exception trying to pass ``NULL`` chars as parameters
(:ticket:`#420`).
- Fixed error caused by missing decoding `~psycopg2.extras.LoggingConnection` - Fixed error caused by missing decoding `~psycopg2.extras.LoggingConnection`
(:ticket:`#483`). (:ticket:`#483`).
- Fixed integer overflow in :sql:`interval` seconds (:ticket:`#512`). - Fixed integer overflow in :sql:`interval` seconds (:ticket:`#512`).
- Make `~psycopg2.extras.Range` objects picklable (:ticket:`#462`).
- Fixed version parsing and building with PostgreSQL 10 (:ticket:`#489`).
Other changes: Other changes:
@ -116,14 +173,6 @@ Other changes:
(:ticket:`#506`) (:ticket:`#506`)
What's new in psycopg 2.6.3
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Throw an exception trying to pass ``NULL`` chars as parameters
(:ticket:`#420`).
- Make `~psycopg2.extras.Range` objects picklable (:ticket:`#462`).
What's new in psycopg 2.6.2 What's new in psycopg 2.6.2
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@ -25,29 +25,40 @@ Documentation is included in the ``doc`` directory and is `available online`__.
.. __: http://initd.org/psycopg/docs/ .. __: http://initd.org/psycopg/docs/
For any other resource (source code repository, bug tracker, mailing list)
please check the `project homepage`__.
Installation Installation
------------ ------------
If your ``pip`` version supports wheel_ packages it should be possible to Building Psycopg requires a few prerequisites (a C compiler, some development
install a binary version of Psycopg including all the dependencies from PyPI_. packages): please check the install_ and the faq_ documents in the ``doc`` dir
Just run:: or online for the details.
If prerequisites are met, you can install psycopg like any other Python
package, using ``pip`` to download it from PyPI_::
$ pip install -U pip # make sure your pip is up-to-date
$ pip install psycopg2 $ pip install psycopg2
If you want to build Psycopg from source you will need some prerequisites (a C or using ``setup.py`` if you have downloaded the source package locally::
compiler, development packages): please check the install_ and the faq_
documents in the ``doc`` dir for the details. $ python setup.py build
$ sudo python setup.py install
You can also obtain a stand-alone package, not requiring a compiler or
external libraries, by installing the `psycopg2-binary`_ package from PyPI::
$ pip install psycopg2-binary
The binary package is a practical choice for development and testing but in
production it is advised to use the package built from sources.
.. _wheel: http://pythonwheels.com/
.. _PyPI: https://pypi.python.org/pypi/psycopg2 .. _PyPI: https://pypi.python.org/pypi/psycopg2
.. _psycopg2-binary: https://pypi.python.org/pypi/psycopg2-binary
.. _install: http://initd.org/psycopg/docs/install.html#install-from-source .. _install: http://initd.org/psycopg/docs/install.html#install-from-source
.. _faq: http://initd.org/psycopg/docs/faq.html#faq-compile .. _faq: http://initd.org/psycopg/docs/faq.html#faq-compile
For any other resource (source code repository, bug tracker, mailing list)
please check the `project homepage`__.
.. __: http://initd.org/psycopg/ .. __: http://initd.org/psycopg/

View File

@ -1,3 +1,3 @@
# Packages only needed to build the docs # Packages only needed to build the docs
Pygments>=1.5 Pygments>=2.2,<2.3
Sphinx>=1.2,<=1.3 Sphinx>=1.6,<=1.7

View File

@ -295,7 +295,9 @@ something to read::
print "Got NOTIFY:", notify.pid, notify.channel, notify.payload print "Got NOTIFY:", notify.pid, notify.channel, notify.payload
Running the script and executing a command such as :sql:`NOTIFY test, 'hello'` Running the script and executing a command such as :sql:`NOTIFY test, 'hello'`
in a separate :program:`psql` shell, the output may look similar to:: in a separate :program:`psql` shell, the output may look similar to:
.. code-block:: none
Waiting for notifications on channel 'test' Waiting for notifications on channel 'test'
Timeout Timeout

View File

@ -57,7 +57,7 @@ try:
release = psycopg2.__version__.split()[0] release = psycopg2.__version__.split()[0]
version = '.'.join(release.split('.')[:2]) version = '.'.join(release.split('.')[:2])
except ImportError: except ImportError:
print "WARNING: couldn't import psycopg to read version." print("WARNING: couldn't import psycopg to read version.")
release = version release = version
intersphinx_mapping = { intersphinx_mapping = {
@ -101,6 +101,10 @@ default_role = 'obj'
# output. They are ignored by default. # output. They are ignored by default.
#show_authors = False #show_authors = False
# Using 'python' instead of the default gives warnings if parsing an example
# fails, instead of defaulting to none
highlight_language = 'python'
# The name of the Pygments (syntax highlighting) style to use. # The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx' pygments_style = 'sphinx'

View File

@ -41,11 +41,6 @@ The ``connection`` class
previously only valid PostgreSQL identifiers were accepted as previously only valid PostgreSQL identifiers were accepted as
cursor name. cursor name.
.. warning::
It is unsafe to expose the *name* to an untrusted source, for
instance you shouldn't allow *name* to be read from a HTML form.
Consider it as part of the query, not as a query parameter.
The *cursor_factory* argument can be used to create non-standard The *cursor_factory* argument can be used to create non-standard
cursors. The class returned must be a subclass of cursors. The class returned must be a subclass of
`psycopg2.extensions.cursor`. See :ref:`subclassing-cursor` for `psycopg2.extensions.cursor`. See :ref:`subclassing-cursor` for
@ -551,7 +546,7 @@ The ``connection`` class
the session. the session.
.. doctest:: .. doctest::
:options: NORMALIZE_WHITESPACE :options: +NORMALIZE_WHITESPACE
>>> cur.execute("CREATE TABLE foo (id serial PRIMARY KEY);") >>> cur.execute("CREATE TABLE foo (id serial PRIMARY KEY);")
>>> pprint(conn.notices) >>> pprint(conn.notices)

View File

@ -50,7 +50,7 @@ An example of the available constants defined in the module:
'42P01' '42P01'
Constants representing all the error values defined by PostgreSQL versions Constants representing all the error values defined by PostgreSQL versions
between 8.1 and 10 beta 1 are included in the module. between 8.1 and 10 are included in the module.
.. autofunction:: lookup(code) .. autofunction:: lookup(code)
@ -59,7 +59,7 @@ between 8.1 and 10 beta 1 are included in the module.
>>> try: >>> try:
... cur.execute("SELECT ouch FROM aargh;") ... cur.execute("SELECT ouch FROM aargh;")
... except Exception, e: ... except Exception as e:
... pass ... pass
... ...
>>> errorcodes.lookup(e.pgcode[:2]) >>> errorcodes.lookup(e.pgcode[:2])

View File

@ -99,20 +99,6 @@ Real dictionary cursor
.. versionadded:: 2.3 .. versionadded:: 2.3
These objects require :py:func:`collections.namedtuple` to be found, so it is
available out-of-the-box only from Python 2.6. Anyway, the namedtuple
implementation is compatible with previous Python versions, so all you
have to do is to `download it`__ and make it available where we
expect it to be... ::
from somewhere import namedtuple
import collections
collections.namedtuple = namedtuple
from psycopg.extras import NamedTupleConnection
# ...
.. __: http://code.activestate.com/recipes/500261-named-tuples/
.. autoclass:: NamedTupleCursor .. autoclass:: NamedTupleCursor
.. autoclass:: NamedTupleConnection .. autoclass:: NamedTupleConnection
@ -403,7 +389,7 @@ The individual messages in the replication stream are represented by
class LogicalStreamConsumer(object): class LogicalStreamConsumer(object):
... # ...
def __call__(self, msg): def __call__(self, msg):
self.process_message(msg.payload) self.process_message(msg.payload)
@ -501,7 +487,7 @@ The individual messages in the replication stream are represented by
from datetime import datetime from datetime import datetime
def consume(msg): def consume(msg):
... # ...
keepalive_interval = 10.0 keepalive_interval = 10.0
while True: while True:
@ -553,17 +539,13 @@ fields to JSON) you can use the `register_json()` function.
.. __: http://people.planetpostgresql.org/andrew/index.php?/archives/255-JSON-for-PG-9.2-...-and-now-for-9.1!.html .. __: http://people.planetpostgresql.org/andrew/index.php?/archives/255-JSON-for-PG-9.2-...-and-now-for-9.1!.html
The Python library used by default to convert Python objects to JSON and to The Python :py:mod:`json` module is used by default to convert Python objects
parse data from the database depends on the language version: with Python 2.6 to JSON and to parse data from the database.
and following the :py:mod:`json` module from the standard library is used;
with previous versions the `simplejson`_ module is used if available. Note
that the last `!simplejson` version supporting Python 2.4 is the 2.0.9.
.. _JSON: http://www.json.org/ .. _JSON: http://www.json.org/
.. |pgjson| replace:: :sql:`json` .. |pgjson| replace:: :sql:`json`
.. |jsonb| replace:: :sql:`jsonb` .. |jsonb| replace:: :sql:`jsonb`
.. _pgjson: http://www.postgresql.org/docs/current/static/datatype-json.html .. _pgjson: http://www.postgresql.org/docs/current/static/datatype-json.html
.. _simplejson: http://pypi.python.org/pypi/simplejson/
In order to pass a Python object to the database as query argument you can use In order to pass a Python object to the database as query argument you can use
the `Json` adapter:: the `Json` adapter::
@ -1043,20 +1025,6 @@ parameters. By reducing the number of server roundtrips the performance can be
.. versionadded:: 2.7 .. versionadded:: 2.7
.. index::
single: Time zones; Fractional
Fractional time zones
---------------------
.. autofunction:: register_tstz_w_secs
.. versionadded:: 2.0.9
.. versionchanged:: 2.2.2
function is no-op: see :ref:`tz-handling`.
.. index:: .. index::
pair: Example; Coroutine; pair: Example; Coroutine;

View File

@ -306,7 +306,9 @@ I can't compile `!psycopg2`: the compiler says *error: libpq-fe.h: No such file
API support (*i.e.* the libpq used at compile time was at least 9.3) but API support (*i.e.* the libpq used at compile time was at least 9.3) but
at runtime an older libpq dynamic library is found. at runtime an older libpq dynamic library is found.
You can use:: You can use:
.. code-block:: shell
$ ldd /path/to/packages/psycopg2/_psycopg.so | grep libpq $ ldd /path/to/packages/psycopg2/_psycopg.so | grep libpq
@ -332,4 +334,3 @@ Psycopg raises *ImportError: cannot import name tz* on import in mod_wsgi / ASP,
.. _egg: http://peak.telecommunity.com/DevCenter/PythonEggs .. _egg: http://peak.telecommunity.com/DevCenter/PythonEggs
.. __: http://stackoverflow.com/questions/2192323/what-is-the-python-egg-cache-python-egg-cache .. __: http://stackoverflow.com/questions/2192323/what-is-the-python-egg-cache-python-egg-cache
.. __: http://code.google.com/p/modwsgi/wiki/ConfigurationDirectives#WSGIPythonEggs .. __: http://code.google.com/p/modwsgi/wiki/ConfigurationDirectives#WSGIPythonEggs

View File

@ -65,4 +65,3 @@ Psycopg 2 is both Unicode and Python 3 friendly.
**To Do items in the documentation** **To Do items in the documentation**
.. todolist:: .. todolist::

View File

@ -12,16 +12,6 @@ to use Psycopg on a different Python implementation (PyPy, Jython, IronPython)
there is an experimental `porting of Psycopg for Ctypes`__, but it is not as there is an experimental `porting of Psycopg for Ctypes`__, but it is not as
mature as the C implementation yet. mature as the C implementation yet.
The current `!psycopg2` implementation supports:
..
NOTE: keep consistent with setup.py and the /features/ page.
- Python 2 versions from 2.6 to 2.7
- Python 3 versions from 3.2 to 3.6
- PostgreSQL server versions from 7.4 to 9.6
- PostgreSQL client library version from 9.1
.. _PostgreSQL: http://www.postgresql.org/ .. _PostgreSQL: http://www.postgresql.org/
.. _Python: http://www.python.org/ .. _Python: http://www.python.org/
.. _libpq: http://www.postgresql.org/docs/current/static/libpq.html .. _libpq: http://www.postgresql.org/docs/current/static/libpq.html
@ -32,77 +22,20 @@ The current `!psycopg2` implementation supports:
.. index:: .. index::
single: Install; from PyPI single: Prerequisites
Binary install from PyPI Prerequisites
------------------------ -------------
`!psycopg2` is `available on PyPI`__ in the form of wheel_ packages for the The current `!psycopg2` implementation supports:
most common platform (Linux, OSX, Windows): this should make you able to
install a binary version of the module including all the dependencies simply
using:
.. code-block:: console ..
NOTE: keep consistent with setup.py and the /features/ page.
$ pip install psycopg2 - Python version 2.7
- Python 3 versions from 3.4 to 3.6
Make sure to use an up-to-date version of :program:`pip` (you can upgrade it - PostgreSQL server versions from 7.4 to 10
using something like ``pip install -U pip``) - PostgreSQL client library version from 9.1
.. __: PyPI_
.. _PyPI: https://pypi.python.org/pypi/psycopg2/
.. _wheel: http://pythonwheels.com/
.. note::
The binary packages come with their own versions of a few C libraries,
among which ``libpq`` and ``libssl``, which will be used regardless of other
libraries available on the client: upgrading the system libraries will not
upgrade the libraries used by `!psycopg2`. Please build `!psycopg2` from
source if you want to maintain binary upgradeability.
.. warning::
Because the `!psycopg` wheel package uses its own ``libssl`` binary, it is
incompatible with other extension modules binding with ``libssl`` as well,
for instance the Python `ssl` module: the result will likely be a
segfault. If you need using both `!psycopg2` and other libraries using
``libssl`` please :ref:`install psycopg from source
<install-from-source>`.
If you prefer to use the system libraries available on your client you can use
the :command:`pip` ``--no-binary`` option:
.. code-block:: console
$ pip install --no-binary psycopg2
which can be specified in your :file:`requirements.txt` files too, e.g. use:
.. code-block:: none
psycopg2>=2.7,<2.8 --no-binary :all:
to use the last bugfix release of the `!psycopg2` 2.7 package, specifying to
always compile it from source. Of course in this case you will have to meet
the :ref:`build prerequisites <build-prerequisites>`.
.. index::
single: Install; from source
.. _install-from-source:
Install from source
-------------------
.. _source-package:
You can download a copy of Psycopg source files from the `Psycopg download
page`__ or from PyPI_.
.. __: http://initd.org/psycopg/download/
@ -111,8 +44,8 @@ page`__ or from PyPI_.
Build prerequisites Build prerequisites
^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
These notes illustrate how to compile Psycopg on Linux. If you want to compile The build prerequisites are to be met in order to install Psycopg from source
Psycopg on other platforms you may have to adjust some details accordingly. code, either from a source distribution package or from PyPI.
Psycopg is a C wrapper around the libpq_ PostgreSQL client library. To install Psycopg is a C wrapper around the libpq_ PostgreSQL client library. To install
it from sources you will need: it from sources you will need:
@ -144,6 +77,12 @@ it from sources you will need:
Once everything is in place it's just a matter of running the standard: Once everything is in place it's just a matter of running the standard:
.. code-block:: console
$ pip install psycopg2
or, from the directory containing the source code:
.. code-block:: console .. code-block:: console
$ python setup.py build $ python setup.py build
@ -180,12 +119,92 @@ which is OS-dependent (for instance setting a suitable
.. index::
single: Install; from PyPI
single: Install; wheel
single: Wheel
Binary install from PyPI
------------------------
`!psycopg2` is also `available on PyPI`__ in the form of wheel_ packages for
the most common platform (Linux, OSX, Windows): this should make you able to
install a binary version of the module, not requiring the above build or
runtime prerequisites, simply using:
.. code-block:: console
$ pip install psycopg2-binary
Make sure to use an up-to-date version of :program:`pip` (you can upgrade it
using something like ``pip install -U pip``)
.. __: PyPI-binary_
.. _PyPI-binary: https://pypi.python.org/pypi/psycopg2-binary/
.. _wheel: http://pythonwheels.com/
.. note::
The binary packages come with their own versions of a few C libraries,
among which ``libpq`` and ``libssl``, which will be used regardless of other
libraries available on the client: upgrading the system libraries will not
upgrade the libraries used by `!psycopg2`. Please build `!psycopg2` from
source if you want to maintain binary upgradeability.
.. warning::
The `!psycopg2` wheel package comes packaged, among the others, with its
own ``libssl`` binary. This may create conflicts with other extension
modules binding with ``libssl`` as well, for instance with the Python
`ssl` module: in some cases, under concurrency, the interaction between
the two libraries may result in a segfault. In case of doubts you are
advised to use a package built from source.
.. index::
single: Install; disable wheel
single: Wheel; disable
.. _disable-wheel:
Disabling wheel packages for Psycopg 2.7
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
In version 2.7.x, `pip install psycopg2` would have tried to install the wheel
binary package of Psycopg. Because of the problems the wheel package have
displayed, `psycopg2-binary` has become a separate package, and from 2.8 it
has become the only way to install the binary package.
If you are using psycopg 2.7 and you want to disable the use of wheel binary
packages, relying on the system system libraries available on your client, you
can use the :command:`pip` |--no-binary option|__, e.g.:
.. code-block:: console
$ pip install --no-binary :all: psycopg2
.. |--no-binary option| replace:: ``--no-binary`` option
.. __: https://pip.pypa.io/en/stable/reference/pip_install/#install-no-binary
which can be specified in your :file:`requirements.txt` files too, e.g. use:
.. code-block:: none
psycopg2>=2.7,<2.8 --no-binary psycopg2
to use the last bugfix release of the `!psycopg2` 2.7 package, specifying to
always compile it from source. Of course in this case you will have to meet
the :ref:`build prerequisites <build-prerequisites>`.
.. index:: .. index::
single: setup.py single: setup.py
single: setup.cfg single: setup.cfg
Non-standard builds Non-standard builds
^^^^^^^^^^^^^^^^^^^ -------------------
If you have less standard requirements such as: If you have less standard requirements such as:
@ -225,7 +244,7 @@ order to create a debug package:
- Edit the ``setup.cfg`` file adding the ``PSYCOPG_DEBUG`` flag to the - Edit the ``setup.cfg`` file adding the ``PSYCOPG_DEBUG`` flag to the
``define`` option. ``define`` option.
- :ref:`Compile and install <source-package>` the package. - :ref:`Compile and install <build-prerequisites>` the package.
- Set the :envvar:`PSYCOPG_DEBUG` environment variable: - Set the :envvar:`PSYCOPG_DEBUG` environment variable:
@ -250,11 +269,11 @@ Running the test suite
---------------------- ----------------------
Once `!psycopg2` is installed you can run the test suite to verify it is Once `!psycopg2` is installed you can run the test suite to verify it is
working correctly. You can run: working correctly. From the source directory, you can run:
.. code-block:: console .. code-block:: console
$ python -c "from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose $ python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
The tests run against a database called ``psycopg2_test`` on UNIX socket and The tests run against a database called ``psycopg2_test`` on UNIX socket and
the standard port. You can configure a different database to run the test by the standard port. You can configure a different database to run the test by

View File

@ -24,13 +24,18 @@ directly in the client application.
.. method:: getconn(key=None) .. method:: getconn(key=None)
Get a free connection and assign it to *key* if not `!None`. Get a free connection from the pool.
The *key* parameter is optional: if used, the connection will be
associated to the key and calling `!getconn()` with the same key again
will return the same connection.
.. method:: putconn(conn, key=None, close=False) .. method:: putconn(conn, key=None, close=False)
Put away a connection. Put away a connection.
If *close* is `!True`, discard the connection from the pool. If *close* is `!True`, discard the connection from the pool.
*key* should be used consistently with `getconn()`.
.. method:: closeall .. method:: closeall
@ -61,4 +66,3 @@ be used.
This pool class is mostly designed to interact with Zope and probably This pool class is mostly designed to interact with Zope and probably
not useful in generic applications. not useful in generic applications.

View File

@ -12,7 +12,7 @@
from docutils import nodes from docutils import nodes
from sphinx.locale import _ from sphinx.locale import _
from sphinx.util.compat import Directive, make_admonition from docutils.parsers.rst import Directive
class extension_node(nodes.Admonition, nodes.Element): pass class extension_node(nodes.Admonition, nodes.Element): pass
@ -29,12 +29,11 @@ class Extension(Directive):
option_spec = {} option_spec = {}
def run(self): def run(self):
nodes = make_admonition(extension_node, node = extension_node('\n'.join(self.content))
self.name, [_('DB API extension')], self.options, node += nodes.title(_('DB API extension'), _('DB API extension'))
self.content, self.lineno, self.content_offset, self.state.nested_parse(self.content, self.content_offset, node)
self.block_text, self.state, self.state_machine) node['classes'].append('dbapi-extension')
nodes[0]['classes'].append('dbapi-extension') return [node]
return nodes
def visit_extension_node(self, node): def visit_extension_node(self, node):
@ -50,4 +49,3 @@ def setup(app):
text=(visit_extension_node, depart_extension_node)) text=(visit_extension_node, depart_extension_node))
app.add_directive('extension', Extension) app.add_directive('extension', Extension)

View File

@ -18,4 +18,3 @@ def sql_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
def setup(app): def setup(app):
roles.register_local_role('sql', sql_role) roles.register_local_role('sql', sql_role)

View File

@ -56,4 +56,3 @@ def setup(app):
app.add_config_value('ticket_remap_offset', None, 'env') app.add_config_value('ticket_remap_offset', None, 'env')
app.add_role('ticket', ticket_role) app.add_role('ticket', ticket_role)
app.add_role('tickets', ticket_role) app.add_role('tickets', ticket_role)

View File

@ -5,6 +5,7 @@
import os import os
import sys import sys
def main(): def main():
if len(sys.argv) != 3: if len(sys.argv) != 3:
sys.stderr.write("usage: %s index.rst text-dir\n") sys.stderr.write("usage: %s index.rst text-dir\n")
@ -17,23 +18,20 @@ def main():
return 0 return 0
def iter_file_base(fn): def iter_file_base(fn):
f = open(fn) f = open(fn)
if sys.version_info[0] >= 3:
have_line = iter(f).__next__
else:
have_line = iter(f).next
while not have_line().startswith('.. toctree'): while not next(f).startswith('.. toctree'):
pass pass
while have_line().strip().startswith(':'): while next(f).strip().startswith(':'):
pass pass
yield os.path.splitext(os.path.basename(fn))[0] yield os.path.splitext(os.path.basename(fn))[0]
n = 0 n = 0
while True: while True:
line = have_line() line = next(f)
if line.isspace(): if line.isspace():
continue continue
if line.startswith(".."): if line.startswith(".."):
@ -47,6 +45,7 @@ def iter_file_base(fn):
# maybe format changed? # maybe format changed?
raise Exception("Not enough files found. Format change in index.rst?") raise Exception("Not enough files found. Format change in index.rst?")
def emit(basename, txt_dir): def emit(basename, txt_dir):
f = open(os.path.join(txt_dir, basename + ".txt")) f = open(os.path.join(txt_dir, basename + ".txt"))
for line in f: for line in f:
@ -60,4 +59,3 @@ def emit(basename, txt_dir):
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View File

@ -13,4 +13,3 @@ cursors.
.. autoclass:: psycopg2.tz.FixedOffsetTimezone .. autoclass:: psycopg2.tz.FixedOffsetTimezone
.. autoclass:: psycopg2.tz.LocalTimezone .. autoclass:: psycopg2.tz.LocalTimezone

View File

@ -48,7 +48,7 @@ The main entry points of Psycopg are:
- The class `connection` encapsulates a database session. It allows to: - The class `connection` encapsulates a database session. It allows to:
- create new `cursor`\s using the `~connection.cursor()` method to - create new `cursor` instances using the `~connection.cursor()` method to
execute database commands and queries, execute database commands and queries,
- terminate transactions using the methods `~connection.commit()` or - terminate transactions using the methods `~connection.commit()` or
@ -73,70 +73,97 @@ The main entry points of Psycopg are:
Passing parameters to SQL queries Passing parameters to SQL queries
--------------------------------- ---------------------------------
Psycopg casts Python variables to SQL literals by type. Many standard Python types Psycopg converts Python variables to SQL values using their types: the Python
are already `adapted to the correct SQL representation`__. type determines the function used to convert the object into a string
representation suitable for PostgreSQL. Many standard Python types are
already `adapted to the correct SQL representation`__.
.. __: python-types-adaptation_ .. __: python-types-adaptation_
Example: the Python function call:: Passing parameters to an SQL statement happens in functions such as
`cursor.execute()` by using ``%s`` placeholders in the SQL statement, and
passing a sequence of values as the second argument of the function. For
example the Python function call::
>>> cur.execute( >>> cur.execute("""
... """INSERT INTO some_table (an_int, a_date, a_string) ... INSERT INTO some_table (an_int, a_date, a_string)
... VALUES (%s, %s, %s);""", ... VALUES (%s, %s, %s);
... """,
... (10, datetime.date(2005, 11, 18), "O'Reilly")) ... (10, datetime.date(2005, 11, 18), "O'Reilly"))
is converted into the SQL command:: is converted into a SQL command similar to:
.. code-block:: sql
INSERT INTO some_table (an_int, a_date, a_string) INSERT INTO some_table (an_int, a_date, a_string)
VALUES (10, '2005-11-18', 'O''Reilly'); VALUES (10, '2005-11-18', 'O''Reilly');
Named arguments are supported too using :samp:`%({name})s` placeholders. Named arguments are supported too using :samp:`%({name})s` placeholders in the
Using named arguments the values can be passed to the query in any order and query and specifying the values into a mapping. Using named arguments allows
many placeholders can use the same values:: to specify the values in any order and to repeat the same value in several
places in the query::
>>> cur.execute( >>> cur.execute("""
... """INSERT INTO some_table (an_int, a_date, another_date, a_string) ... INSERT INTO some_table (an_int, a_date, another_date, a_string)
... VALUES (%(int)s, %(date)s, %(date)s, %(str)s);""", ... VALUES (%(int)s, %(date)s, %(date)s, %(str)s);
... """,
... {'int': 10, 'str': "O'Reilly", 'date': datetime.date(2005, 11, 18)}) ... {'int': 10, 'str': "O'Reilly", 'date': datetime.date(2005, 11, 18)})
Using characters ``%``, ``(``, ``)`` in the argument names is not supported.
When parameters are used, in order to include a literal ``%`` in the query you When parameters are used, in order to include a literal ``%`` in the query you
can use the ``%%`` string. Using characters ``%``, ``(``, ``)`` in the can use the ``%%`` string::
argument names is not supported.
>>> cur.execute("SELECT (%s % 2) = 0 AS even", (10,)) # WRONG
>>> cur.execute("SELECT (%s %% 2) = 0 AS even", (10,)) # correct
While the mechanism resembles regular Python strings manipulation, there are a While the mechanism resembles regular Python strings manipulation, there are a
few subtle differences you should care about when passing parameters to a few subtle differences you should care about when passing parameters to a
query: query.
- The Python string operator ``%`` is not used: the `~cursor.execute()` - The Python string operator ``%`` *must not be used*: the `~cursor.execute()`
method accepts a tuple or dictionary of values as second parameter. method accepts a tuple or dictionary of values as second parameter.
|sql-warn|__. |sql-warn|__:
.. |sql-warn| replace:: **Never** use ``%`` or ``+`` to merge values .. |sql-warn| replace:: **Never** use ``%`` or ``+`` to merge values
into queries into queries
.. __: sql-injection_ .. __: sql-injection_
- The variables placeholder must *always be a* ``%s``, even if a different >>> cur.execute("INSERT INTO numbers VALUES (%s, %s)" % (10, 20)) # WRONG
placeholder (such as a ``%d`` for integers or ``%f`` for floats) may look >>> cur.execute("INSERT INTO numbers VALUES (%s, %s)", (10, 20)) # correct
more appropriate::
>>> cur.execute("INSERT INTO numbers VALUES (%d)", (42,)) # WRONG
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (42,)) # correct
- For positional variables binding, *the second argument must always be a - For positional variables binding, *the second argument must always be a
sequence*, even if it contains a single variable. And remember that Python sequence*, even if it contains a single variable (remember that Python
requires a comma to create a single element tuple:: requires a comma to create a single element tuple)::
>>> cur.execute("INSERT INTO foo VALUES (%s)", "bar") # WRONG >>> cur.execute("INSERT INTO foo VALUES (%s)", "bar") # WRONG
>>> cur.execute("INSERT INTO foo VALUES (%s)", ("bar")) # WRONG >>> cur.execute("INSERT INTO foo VALUES (%s)", ("bar")) # WRONG
>>> cur.execute("INSERT INTO foo VALUES (%s)", ("bar",)) # correct >>> cur.execute("INSERT INTO foo VALUES (%s)", ("bar",)) # correct
>>> cur.execute("INSERT INTO foo VALUES (%s)", ["bar"]) # correct >>> cur.execute("INSERT INTO foo VALUES (%s)", ["bar"]) # correct
- Only query values should be bound via this method: it shouldn't be used to - The placeholder *must not be quoted*. Psycopg will add quotes where needed::
merge table or field names to the query. If you need to generate dynamically
an SQL query (for instance choosing dynamically a table name) you can use
the facilities provided by the `psycopg2.sql` module.
>>> cur.execute("INSERT INTO numbers VALUES ('%s')", (10,)) # WRONG
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (10,)) # correct
- The variables placeholder *must always be a* ``%s``, even if a different
placeholder (such as a ``%d`` for integers or ``%f`` for floats) may look
more appropriate::
>>> cur.execute("INSERT INTO numbers VALUES (%d)", (10,)) # WRONG
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (10,)) # correct
- Only query values should be bound via this method: it shouldn't be used to
merge table or field names to the query (Psycopg will try quoting the table
name as a string value, generating invalid SQL). If you need to generate
dynamically SQL queries (for instance choosing dynamically a table name)
you can use the facilities provided by the `psycopg2.sql` module::
>>> cur.execute("INSERT INTO %s VALUES (%s)", ('numbers', 10)) # WRONG
>>> cur.execute( # correct
... SQL("INSERT INTO {} VALUES (%s)").format(Identifier('numbers')),
... (10,))
.. index:: Security, SQL injection .. index:: Security, SQL injection
@ -430,14 +457,12 @@ the connection or globally: see the function
Binary adaptation Binary adaptation
^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
Python types representing binary objects are converted into Python types representing binary objects are converted into PostgreSQL binary
PostgreSQL binary string syntax, suitable for :sql:`bytea` fields. Such string syntax, suitable for :sql:`bytea` fields. Such types are `buffer`
types are `buffer` (only available in Python 2), `memoryview` (available (only available in Python 2), `memoryview`, `bytearray`, and `bytes` (only in
from Python 2.7), `bytearray` (available from Python 2.6) and `bytes` Python 3: the name is available in Python 2 but it's only an alias for the
(only from Python 3: the name is available from Python 2.6 but it's only an type `!str`). Any object implementing the `Revised Buffer Protocol`__ should
alias for the type `!str`). Any object implementing the `Revised Buffer be usable as binary type. Received data is returned as `!buffer` (in Python 2)
Protocol`__ should be usable as binary type where the protocol is supported
(i.e. from Python 2.6). Received data is returned as `!buffer` (in Python 2)
or `!memoryview` (in Python 3). or `!memoryview` (in Python 3).
.. __: http://www.python.org/dev/peps/pep-3118/ .. __: http://www.python.org/dev/peps/pep-3118/
@ -535,8 +560,7 @@ rounded to the nearest minute, with an error of up to 30 seconds.
.. versionchanged:: 2.2.2 .. versionchanged:: 2.2.2
timezones with seconds are supported (with rounding). Previously such timezones with seconds are supported (with rounding). Previously such
timezones raised an error. In order to deal with them in previous timezones raised an error.
versions use `psycopg2.extras.register_tstz_w_secs()`.
.. index:: .. index::
@ -792,7 +816,9 @@ lifetime extends well after `~connection.commit()`, calling
It is also possible to use a named cursor to consume a cursor created It is also possible to use a named cursor to consume a cursor created
in some other way than using the |DECLARE| executed by in some other way than using the |DECLARE| executed by
`~cursor.execute()`. For example, you may have a PL/pgSQL function `~cursor.execute()`. For example, you may have a PL/pgSQL function
returning a cursor:: returning a cursor:
.. code-block:: postgres
CREATE FUNCTION reffunc(refcursor) RETURNS refcursor AS $$ CREATE FUNCTION reffunc(refcursor) RETURNS refcursor AS $$
BEGIN BEGIN
@ -990,4 +1016,3 @@ For further details see the documentation for the above methods.
.. __: http://www.opengroup.org/bookstore/catalog/c193.htm .. __: http://www.opengroup.org/bookstore/catalog/c193.htm
.. __: http://jdbc.postgresql.org/ .. __: http://jdbc.postgresql.org/

View File

@ -17,6 +17,7 @@
DSN = 'dbname=test' DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting) ## don't modify anything below this line (except for experimenting)
from __future__ import print_function
import sys import sys
import psycopg2 import psycopg2
@ -24,9 +25,9 @@ import psycopg2
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
curs = conn.cursor() curs = conn.cursor()
try: try:
@ -52,20 +53,20 @@ curs.execute("""INSERT INTO test_binary
# now we try to extract the images as simple text strings # now we try to extract the images as simple text strings
print "Extracting the images as strings..." print("Extracting the images as strings...")
curs.execute("SELECT * FROM test_binary") curs.execute("SELECT * FROM test_binary")
for row in curs.fetchall(): for row in curs.fetchall():
name, ext = row[1].split('.') name, ext = row[1].split('.')
new_name = name + '_S.' + ext new_name = name + '_S.' + ext
print " writing %s to %s ..." % (name+'.'+ext, new_name), print(" writing %s to %s ..." % (name+'.'+ext, new_name), end=' ')
open(new_name, 'wb').write(row[2]) open(new_name, 'wb').write(row[2])
print "done" print("done")
print " python type of image data is", type(row[2]) print(" python type of image data is", type(row[2]))
# extract exactly the same data but using a binary cursor # extract exactly the same data but using a binary cursor
print "Extracting the images using a binary cursor:" print("Extracting the images using a binary cursor:")
curs.execute("""DECLARE zot CURSOR FOR curs.execute("""DECLARE zot CURSOR FOR
SELECT img, name FROM test_binary FOR READ ONLY""") SELECT img, name FROM test_binary FOR READ ONLY""")
@ -74,10 +75,10 @@ curs.execute("""FETCH ALL FROM zot""")
for row in curs.fetchall(): for row in curs.fetchall():
name, ext = row[1].split('.') name, ext = row[1].split('.')
new_name = name + '_B.' + ext new_name = name + '_B.' + ext
print " writing %s to %s ..." % (name+'.'+ext, new_name), print(" writing %s to %s ..." % (name+'.'+ext, new_name), end=' ')
open(new_name, 'wb').write(row[0]) open(new_name, 'wb').write(row[0])
print "done" print("done")
print " python type of image data is", type(row[0]) print(" python type of image data is", type(row[0]))
# this rollback is required because we can't drop a table with a binary cursor # this rollback is required because we can't drop a table with a binary cursor
# declared and still open # declared and still open
@ -86,4 +87,4 @@ conn.rollback()
curs.execute("DROP TABLE test_binary") curs.execute("DROP TABLE test_binary")
conn.commit() conn.commit()
print "\nNow try to load the new images, to check it worked!" print("\nNow try to load the new images, to check it worked!")

View File

@ -27,9 +27,9 @@ import psycopg2
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
curs = conn.cursor() curs = conn.cursor()
try: try:
@ -51,16 +51,16 @@ io.close()
io = open('copy_from.txt', 'r') io = open('copy_from.txt', 'r')
curs.copy_from(io, 'test_copy') curs.copy_from(io, 'test_copy')
print "1) Copy %d records from file object " % len(data) + \ print("1) Copy %d records from file object " % len(data) +
"using defaults (sep: \\t and null = \\N)" "using defaults (sep: \\t and null = \\N)")
io.close() io.close()
curs.execute("SELECT * FROM test_copy") curs.execute("SELECT * FROM test_copy")
rows = curs.fetchall() rows = curs.fetchall()
print " Select returned %d rows" % len(rows) print(" Select returned %d rows" % len(rows))
for r in rows: for r in rows:
print " %s %s\t%s" % (r[0], r[1], r[2]) print(" %s %s\t%s" % (r[0], r[1], r[2]))
curs.execute("delete from test_copy") curs.execute("delete from test_copy")
conn.commit() conn.commit()
@ -75,15 +75,15 @@ io.close()
io = open('copy_from.txt', 'r') io = open('copy_from.txt', 'r')
curs.copy_from(io, 'test_copy', ':') curs.copy_from(io, 'test_copy', ':')
print "2) Copy %d records from file object using sep = :" % len(data) print("2) Copy %d records from file object using sep = :" % len(data))
io.close() io.close()
curs.execute("SELECT * FROM test_copy") curs.execute("SELECT * FROM test_copy")
rows = curs.fetchall() rows = curs.fetchall()
print " Select returned %d rows" % len(rows) print(" Select returned %d rows" % len(rows))
for r in rows: for r in rows:
print " %s %s\t%s" % (r[0], r[1], r[2]) print(" %s %s\t%s" % (r[0], r[1], r[2]))
curs.execute("delete from test_copy") curs.execute("delete from test_copy")
conn.commit() conn.commit()
@ -98,15 +98,15 @@ io.close()
io = open('copy_from.txt', 'r') io = open('copy_from.txt', 'r')
curs.copy_from(io, 'test_copy', null='NULL') curs.copy_from(io, 'test_copy', null='NULL')
print "3) Copy %d records from file object using null = NULL" % len(data) print("3) Copy %d records from file object using null = NULL" % len(data))
io.close() io.close()
curs.execute("SELECT * FROM test_copy") curs.execute("SELECT * FROM test_copy")
rows = curs.fetchall() rows = curs.fetchall()
print " Select using cursor returned %d rows" % len(rows) print(" Select using cursor returned %d rows" % len(rows))
for r in rows: for r in rows:
print " %s %s\t%s" % (r[0], r[1], r[2]) print(" %s %s\t%s" % (r[0], r[1], r[2]))
curs.execute("delete from test_copy") curs.execute("delete from test_copy")
conn.commit() conn.commit()
@ -119,16 +119,16 @@ io.close()
io = open('copy_from.txt', 'r') io = open('copy_from.txt', 'r')
curs.copy_from(io, 'test_copy', ':', 'NULL') curs.copy_from(io, 'test_copy', ':', 'NULL')
print "4) Copy %d records from file object " % len(data) + \ print("4) Copy %d records from file object " % len(data) +
"using sep = : and null = NULL" "using sep = : and null = NULL")
io.close() io.close()
curs.execute("SELECT * FROM test_copy") curs.execute("SELECT * FROM test_copy")
rows = curs.fetchall() rows = curs.fetchall()
print " Select using cursor returned %d rows" % len(rows) print(" Select using cursor returned %d rows" % len(rows))
for r in rows: for r in rows:
print " %s %s\t%s" % (r[0], r[1], r[2]) print(" %s %s\t%s" % (r[0], r[1], r[2]))
curs.execute("delete from test_copy") curs.execute("delete from test_copy")
conn.commit() conn.commit()
@ -141,20 +141,20 @@ data.write('\n'.join(['Tom\tJenkins\t37',
data.seek(0) data.seek(0)
curs.copy_from(data, 'test_copy') curs.copy_from(data, 'test_copy')
print "5) Copy 3 records from StringIO object using defaults" print("5) Copy 3 records from StringIO object using defaults")
curs.execute("SELECT * FROM test_copy") curs.execute("SELECT * FROM test_copy")
rows = curs.fetchall() rows = curs.fetchall()
print " Select using cursor returned %d rows" % len(rows) print(" Select using cursor returned %d rows" % len(rows))
for r in rows: for r in rows:
print " %s %s\t%s" % (r[0], r[1], r[2]) print(" %s %s\t%s" % (r[0], r[1], r[2]))
curs.execute("delete from test_copy") curs.execute("delete from test_copy")
conn.commit() conn.commit()
# simple error test # simple error test
print "6) About to raise an error" print("6) About to raise an error")
data = StringIO.StringIO() data = StringIO.StringIO()
data.write('\n'.join(['Tom\tJenkins\t37', data.write('\n'.join(['Tom\tJenkins\t37',
'Madonna\t\N\t45', 'Madonna\t\N\t45',
@ -163,15 +163,12 @@ data.seek(0)
try: try:
curs.copy_from(data, 'test_copy') curs.copy_from(data, 'test_copy')
except StandardError, err: except StandardError as err:
conn.rollback() conn.rollback()
print " Caught error (as expected):\n", err print(" Caught error (as expected):\n", err)
conn.rollback() conn.rollback()
curs.execute("DROP TABLE test_copy") curs.execute("DROP TABLE test_copy")
os.unlink('copy_from.txt') os.unlink('copy_from.txt')
conn.commit() conn.commit()

View File

@ -18,6 +18,7 @@
DSN = 'dbname=test' DSN = 'dbname=test'
## don't modify anything below this line (except for experimenting) ## don't modify anything below this line (except for experimenting)
from __future__ import print_function
import sys import sys
import os import os
@ -27,9 +28,9 @@ import psycopg2
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
curs = conn.cursor() curs = conn.cursor()
try: try:
@ -51,52 +52,52 @@ conn.commit()
# copy_to using defaults # copy_to using defaults
io = open('copy_to.txt', 'w') io = open('copy_to.txt', 'w')
curs.copy_to(io, 'test_copy') curs.copy_to(io, 'test_copy')
print "1) Copy %d records into file object using defaults: " % len (data) + \ print("1) Copy %d records into file object using defaults: " % len (data) + \
"sep = \\t and null = \\N" "sep = \\t and null = \\N")
io.close() io.close()
rows = open('copy_to.txt', 'r').readlines() rows = open('copy_to.txt', 'r').readlines()
print " File has %d rows:" % len(rows) print(" File has %d rows:" % len(rows))
for r in rows: for r in rows:
print " ", r, print(" ", r, end=' ')
# copy_to using custom separator # copy_to using custom separator
io = open('copy_to.txt', 'w') io = open('copy_to.txt', 'w')
curs.copy_to(io, 'test_copy', ':') curs.copy_to(io, 'test_copy', ':')
print "2) Copy %d records into file object using sep = :" % len(data) print("2) Copy %d records into file object using sep = :" % len(data))
io.close() io.close()
rows = open('copy_to.txt', 'r').readlines() rows = open('copy_to.txt', 'r').readlines()
print " File has %d rows:" % len(rows) print(" File has %d rows:" % len(rows))
for r in rows: for r in rows:
print " ", r, print(" ", r, end=' ')
# copy_to using custom null identifier # copy_to using custom null identifier
io = open('copy_to.txt', 'w') io = open('copy_to.txt', 'w')
curs.copy_to(io, 'test_copy', null='NULL') curs.copy_to(io, 'test_copy', null='NULL')
print "3) Copy %d records into file object using null = NULL" % len(data) print("3) Copy %d records into file object using null = NULL" % len(data))
io.close() io.close()
rows = open('copy_to.txt', 'r').readlines() rows = open('copy_to.txt', 'r').readlines()
print " File has %d rows:" % len(rows) print(" File has %d rows:" % len(rows))
for r in rows: for r in rows:
print " ", r, print(" ", r, end=' ')
# copy_to using custom separator and null identifier # copy_to using custom separator and null identifier
io = open('copy_to.txt', 'w') io = open('copy_to.txt', 'w')
curs.copy_to(io, 'test_copy', ':', 'NULL') curs.copy_to(io, 'test_copy', ':', 'NULL')
print "4) Copy %d records into file object using sep = : and null ) NULL" % \ print("4) Copy %d records into file object using sep = : and null ) NULL" % \
len(data) len(data))
io.close() io.close()
rows = open('copy_to.txt', 'r').readlines() rows = open('copy_to.txt', 'r').readlines()
print " File has %d rows:" % len(rows) print(" File has %d rows:" % len(rows))
for r in rows: for r in rows:
print " ", r, print(" ", r, end=' ')
curs.execute("DROP TABLE test_copy") curs.execute("DROP TABLE test_copy")
os.unlink('copy_to.txt') os.unlink('copy_to.txt')

View File

@ -25,9 +25,9 @@ import psycopg2.extensions
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
class NoDataError(psycopg2.ProgrammingError): class NoDataError(psycopg2.ProgrammingError):
@ -52,12 +52,12 @@ class Cursor(psycopg2.extensions.cursor):
curs = conn.cursor(cursor_factory=Cursor) curs = conn.cursor(cursor_factory=Cursor)
curs.execute("SELECT 1 AS foo") curs.execute("SELECT 1 AS foo")
print "Result of fetchone():", curs.fetchone() print("Result of fetchone():", curs.fetchone())
# now let's raise the exception # now let's raise the exception
try: try:
curs.fetchone() curs.fetchone()
except NoDataError, err: except NoDataError as err:
print "Exception caught:", err print("Exception caught:", err)
conn.rollback() conn.rollback()

View File

@ -17,13 +17,6 @@ from datetime import datetime
import psycopg2 import psycopg2
from psycopg2.extensions import adapt, register_adapter from psycopg2.extensions import adapt, register_adapter
try:
sorted()
except:
def sorted(seq):
seq.sort()
return seq
# Here is the adapter for every object that we may ever need to # Here is the adapter for every object that we may ever need to
# insert in the database. It receives the original object and does # insert in the database. It receives the original object and does
# its job on that instance # its job on that instance
@ -93,12 +86,12 @@ persistent_fields = {'Album': ['album_id', 'creation_time', 'binary_data'],
'Order': ['order_id', 'items', 'price'] 'Order': ['order_id', 'items', 'price']
} }
print adapt(Album()).generateInsert() print(adapt(Album()).generateInsert())
print adapt(Album()).generateInsert() print(adapt(Album()).generateInsert())
print adapt(Album()).generateInsert() print(adapt(Album()).generateInsert())
print adapt(Order()).generateInsert() print(adapt(Order()).generateInsert())
print adapt(Order()).generateInsert() print(adapt(Order()).generateInsert())
print adapt(Order()).generateInsert() print(adapt(Order()).generateInsert())
""" """
- Discussion - Discussion

View File

@ -25,41 +25,41 @@ import psycopg2.extras
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
curs = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) curs = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
curs.execute("SELECT 1 AS foo, 'cip' AS bar, date(now()) as zot") curs.execute("SELECT 1 AS foo, 'cip' AS bar, date(now()) as zot")
print "Cursor's row factory is", curs.row_factory print("Cursor's row factory is", curs.row_factory)
data = curs.fetchone() data = curs.fetchone()
print "The type of the data row is", type(data) print("The type of the data row is", type(data))
print "Some data accessed both as tuple and dict:" print("Some data accessed both as tuple and dict:")
print " ", data['foo'], data['bar'], data['zot'] print(" ", data['foo'], data['bar'], data['zot'])
print " ", data[0], data[1], data[2] print(" ", data[0], data[1], data[2])
# execute another query and demostrate we can still access the row # execute another query and demostrate we can still access the row
curs.execute("SELECT 2 AS foo") curs.execute("SELECT 2 AS foo")
print "The type of the data row is", type(data) print("The type of the data row is", type(data))
print "Some more data accessed both as tuple and dict:" print("Some more data accessed both as tuple and dict:")
print " ", data['foo'], data['bar'], data['zot'] print(" ", data['foo'], data['bar'], data['zot'])
print " ", data[0], data[1], data[2] print(" ", data[0], data[1], data[2])
curs = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) curs = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
curs.execute("SELECT 1 AS foo, 'cip' AS bar, date(now()) as zot") curs.execute("SELECT 1 AS foo, 'cip' AS bar, date(now()) as zot")
print "Cursor's row factory is", curs.row_factory print("Cursor's row factory is", curs.row_factory)
data = curs.fetchone() data = curs.fetchone()
print "The type of the data row is", type(data) print("The type of the data row is", type(data))
print "Some data accessed both as tuple and dict:" print("Some data accessed both as tuple and dict:")
print " ", data['foo'], data['bar'], data['zot'] print(" ", data['foo'], data['bar'], data['zot'])
print " ", "No access using indices: this is a specialized cursor." print(" ", "No access using indices: this is a specialized cursor.")
# execute another query and demostrate we can still access the row # execute another query and demostrate we can still access the row
curs.execute("SELECT 2 AS foo") curs.execute("SELECT 2 AS foo")
print "The type of the data row is", type(data) print("The type of the data row is", type(data))
print "Some more data accessed both as tuple and dict:" print("Some more data accessed both as tuple and dict:")
print " ", data['foo'], data['bar'], data['zot'] print(" ", data['foo'], data['bar'], data['zot'])
print " ", "No access using indices: this is a specialized cursor." print(" ", "No access using indices: this is a specialized cursor.")

View File

@ -28,7 +28,7 @@ from psycopg2.extensions import adapt
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
curs = conn.cursor() curs = conn.cursor()
@ -52,9 +52,9 @@ mx1 = (
from psycopg2.extensions import adapt from psycopg2.extensions import adapt
import psycopg2.extras import psycopg2.extras
print adapt(mx1) print(adapt(mx1))
print "Inserting mx.DateTime values..." print("Inserting mx.DateTime values...")
curs.execute("INSERT INTO test_dt VALUES (%s, %s, %s, %s, %s)", mx1) curs.execute("INSERT INTO test_dt VALUES (%s, %s, %s, %s, %s)", mx1)
# build and insert some values using the datetime adapters # build and insert some values using the datetime adapters
@ -65,11 +65,11 @@ dt1 = (
datetime.datetime(2004, 10, 19, 0, 11, 17, 500000), datetime.datetime(2004, 10, 19, 0, 11, 17, 500000),
datetime.timedelta(13, 15*3600+17*60+59, 900000)) datetime.timedelta(13, 15*3600+17*60+59, 900000))
print "Inserting Python datetime values..." print("Inserting Python datetime values...")
curs.execute("INSERT INTO test_dt VALUES (%s, %s, %s, %s, %s)", dt1) curs.execute("INSERT INTO test_dt VALUES (%s, %s, %s, %s, %s)", dt1)
# now extract the row from database and print them # now extract the row from database and print them
print "Extracting values inserted with mx.DateTime wrappers:" print("Extracting values inserted with mx.DateTime wrappers:")
curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 1") curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 1")
for n, x in zip(mx1[1:], curs.fetchone()): for n, x in zip(mx1[1:], curs.fetchone()):
try: try:
@ -80,10 +80,10 @@ for n, x in zip(mx1[1:], curs.fetchone()):
except: except:
s = repr(n) + "\n -> " + str(adapt(n)) + \ s = repr(n) + "\n -> " + str(adapt(n)) + \
"\n -> " + repr(x) + "\n -> " + str(x) "\n -> " + repr(x) + "\n -> " + str(x)
print s print(s)
print print()
print "Extracting values inserted with Python datetime wrappers:" print("Extracting values inserted with Python datetime wrappers:")
curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 2") curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 2")
for n, x in zip(dt1[1:], curs.fetchone()): for n, x in zip(dt1[1:], curs.fetchone()):
try: try:
@ -92,8 +92,8 @@ for n, x in zip(dt1[1:], curs.fetchone()):
s = repr(n) + "\n -> " + repr(x) + "\n -> " + x.isoformat() s = repr(n) + "\n -> " + repr(x) + "\n -> " + x.isoformat()
except: except:
s = repr(n) + "\n -> " + repr(x) + "\n -> " + str(x) s = repr(n) + "\n -> " + repr(x) + "\n -> " + str(x)
print s print(s)
print print()
curs.execute("DROP TABLE test_dt") curs.execute("DROP TABLE test_dt")
conn.commit() conn.commit()

View File

@ -26,80 +26,80 @@ import psycopg2.extensions
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Initial encoding for this connection is", conn.encoding print("Initial encoding for this connection is", conn.encoding)
print "\n** This example is supposed to be run in a UNICODE terminal! **\n" print("\n** This example is supposed to be run in a UNICODE terminal! **\n")
print "Available encodings:" print("Available encodings:")
encs = psycopg2.extensions.encodings.items() encs = psycopg2.extensions.encodings.items()
encs.sort() encs.sort()
for a, b in encs: for a, b in encs:
print " ", a, "<->", b print(" ", a, "<->", b)
print "Using STRING typecaster" print("Using STRING typecaster")
print "Setting backend encoding to LATIN1 and executing queries:" print("Setting backend encoding to LATIN1 and executing queries:")
conn.set_client_encoding('LATIN1') conn.set_client_encoding('LATIN1')
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT %s::TEXT AS foo", ('àèìòù',)) curs.execute("SELECT %s::TEXT AS foo", ('àèìòù',))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", unicode(x, 'latin-1').encode('utf-8'), type(x) print(" ->", unicode(x, 'latin-1').encode('utf-8'), type(x))
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',)) curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", unicode(x, 'latin-1').encode('utf-8'), type(x) print(" ->", unicode(x, 'latin-1').encode('utf-8'), type(x))
print "Setting backend encoding to UTF8 and executing queries:" print("Setting backend encoding to UTF8 and executing queries:")
conn.set_client_encoding('UNICODE') conn.set_client_encoding('UNICODE')
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),)) curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", x, type(x) print(" ->", x, type(x))
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',)) curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", x, type(x) print(" ->", x, type(x))
print "Using UNICODE typecaster" print("Using UNICODE typecaster")
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
print "Setting backend encoding to LATIN1 and executing queries:" print("Setting backend encoding to LATIN1 and executing queries:")
conn.set_client_encoding('LATIN1') conn.set_client_encoding('LATIN1')
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT %s::TEXT AS foo", ('àèìòù',)) curs.execute("SELECT %s::TEXT AS foo", ('àèìòù',))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x) print(" ->", x.encode('utf-8'), ":", type(x))
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',)) curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x) print(" ->", x.encode('utf-8'), ":", type(x))
print "Setting backend encoding to UTF8 and executing queries:" print("Setting backend encoding to UTF8 and executing queries:")
conn.set_client_encoding('UNICODE') conn.set_client_encoding('UNICODE')
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),)) curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x) print(" ->", x.encode('utf-8'), ":", type(x))
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',)) curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x) print(" ->", x.encode('utf-8'), ":", type(x))
print "Executing full UNICODE queries" print("Executing full UNICODE queries")
print "Setting backend encoding to LATIN1 and executing queries:" print("Setting backend encoding to LATIN1 and executing queries:")
conn.set_client_encoding('LATIN1') conn.set_client_encoding('LATIN1')
curs = conn.cursor() curs = conn.cursor()
curs.execute(u"SELECT %s::TEXT AS foo", ('àèìòù',)) curs.execute(u"SELECT %s::TEXT AS foo", ('àèìòù',))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x) print(" ->", x.encode('utf-8'), ":", type(x))
curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù',)) curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x) print(" ->", x.encode('utf-8'), ":", type(x))
print "Setting backend encoding to UTF8 and executing queries:" print("Setting backend encoding to UTF8 and executing queries:")
conn.set_client_encoding('UNICODE') conn.set_client_encoding('UNICODE')
curs = conn.cursor() curs = conn.cursor()
curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),)) curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x) print(" ->", x.encode('utf-8'), ":", type(x))
curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù',)) curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù',))
x = curs.fetchone()[0] x = curs.fetchone()[0]
print " ->", x.encode('utf-8'), ":", type(x) print(" ->", x.encode('utf-8'), ":", type(x))

View File

@ -24,9 +24,9 @@ import psycopg2
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
curs = conn.cursor() curs = conn.cursor()
try: try:
@ -68,12 +68,12 @@ conn.commit()
ncurs = conn.cursor("crs") ncurs = conn.cursor("crs")
ncurs.execute("SELECT * FROM test_fetch") ncurs.execute("SELECT * FROM test_fetch")
print "First 10 rows:", flatten(ncurs.fetchmany(10)) print("First 10 rows:", flatten(ncurs.fetchmany(10)))
ncurs.scroll(-5) ncurs.scroll(-5)
print "Moved back cursor by 5 rows (to row 5.)" print("Moved back cursor by 5 rows (to row 5.)")
print "Another 10 rows:", flatten(ncurs.fetchmany(10)) print("Another 10 rows:", flatten(ncurs.fetchmany(10)))
print "Another one:", list(ncurs.fetchone()) print("Another one:", list(ncurs.fetchone()))
print "The remaining rows:", flatten(ncurs.fetchall()) print("The remaining rows:", flatten(ncurs.fetchall()))
conn.rollback() conn.rollback()
curs.execute("DROP TABLE test_fetch") curs.execute("DROP TABLE test_fetch")

View File

@ -23,7 +23,7 @@ import sys, psycopg2
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
curs = conn.cursor() curs = conn.cursor()
@ -42,18 +42,18 @@ curs.execute("""INSERT INTO test_oid
VALUES (%(name)s, %(surname)s)""", data[0]) VALUES (%(name)s, %(surname)s)""", data[0])
foid = curs.lastrowid foid = curs.lastrowid
print "Oid for %(name)s %(surname)s" % data[0], "is", foid print("Oid for %(name)s %(surname)s" % data[0], "is", foid)
curs.execute("""INSERT INTO test_oid curs.execute("""INSERT INTO test_oid
VALUES (%(name)s, %(surname)s)""", data[1]) VALUES (%(name)s, %(surname)s)""", data[1])
moid = curs.lastrowid moid = curs.lastrowid
print "Oid for %(name)s %(surname)s" % data[1], "is", moid print("Oid for %(name)s %(surname)s" % data[1], "is", moid)
curs.execute("SELECT * FROM test_oid WHERE oid = %s", (foid,)) curs.execute("SELECT * FROM test_oid WHERE oid = %s", (foid,))
print "Oid", foid, "selected %s %s" % curs.fetchone() print("Oid", foid, "selected %s %s" % curs.fetchone())
curs.execute("SELECT * FROM test_oid WHERE oid = %s", (moid,)) curs.execute("SELECT * FROM test_oid WHERE oid = %s", (moid,))
print "Oid", moid, "selected %s %s" % curs.fetchone() print("Oid", moid, "selected %s %s" % curs.fetchone())
curs.execute("DROP TABLE test_oid") curs.execute("DROP TABLE test_oid")
conn.commit() conn.commit()

View File

@ -24,68 +24,68 @@ import psycopg2
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
# this will create a large object with a new random oid, we'll # this will create a large object with a new random oid, we'll
# use it to make some basic tests about read/write and seek. # use it to make some basic tests about read/write and seek.
lobj = conn.lobject() lobj = conn.lobject()
loid = lobj.oid loid = lobj.oid
print "Created a new large object with oid", loid print("Created a new large object with oid", loid)
print "Manually importing some binary data into the object:" print("Manually importing some binary data into the object:")
data = open("somehackers.jpg").read() data = open("somehackers.jpg").read()
len = lobj.write(data) len = lobj.write(data)
print " imported", len, "bytes of data" print(" imported", len, "bytes of data")
conn.commit() conn.commit()
print "Trying to (re)open large object with oid", loid print("Trying to (re)open large object with oid", loid)
lobj = conn.lobject(loid) lobj = conn.lobject(loid)
print "Manually exporting the data from the lobject:" print("Manually exporting the data from the lobject:")
data1 = lobj.read() data1 = lobj.read()
len = lobj.tell() len = lobj.tell()
lobj.seek(0, 0) lobj.seek(0, 0)
data2 = lobj.read() data2 = lobj.read()
if data1 != data2: if data1 != data2:
print "ERROR: read after seek returned different data" print("ERROR: read after seek returned different data")
open("somehackers_lobject1.jpg", 'wb').write(data1) open("somehackers_lobject1.jpg", 'wb').write(data1)
print " written", len, "bytes of data to somehackers_lobject1.jpg" print(" written", len, "bytes of data to somehackers_lobject1.jpg")
lobj.unlink() lobj.unlink()
print "Large object with oid", loid, "removed" print("Large object with oid", loid, "removed")
conn.commit() conn.commit()
# now we try to use the import and export functions to do the same # now we try to use the import and export functions to do the same
lobj = conn.lobject(0, 'n', 0, "somehackers.jpg") lobj = conn.lobject(0, 'n', 0, "somehackers.jpg")
loid = lobj.oid loid = lobj.oid
print "Imported a new large object with oid", loid print("Imported a new large object with oid", loid)
conn.commit() conn.commit()
print "Trying to (re)open large object with oid", loid print("Trying to (re)open large object with oid", loid)
lobj = conn.lobject(loid, 'n') lobj = conn.lobject(loid, 'n')
print "Using export() to export the data from the large object:" print("Using export() to export the data from the large object:")
lobj.export("somehackers_lobject2.jpg") lobj.export("somehackers_lobject2.jpg")
print " exported large object to somehackers_lobject2.jpg" print(" exported large object to somehackers_lobject2.jpg")
lobj.unlink() lobj.unlink()
print "Large object with oid", loid, "removed" print("Large object with oid", loid, "removed")
conn.commit() conn.commit()
# this will create a very large object with a new random oid. # this will create a very large object with a new random oid.
lobj = conn.lobject() lobj = conn.lobject()
loid = lobj.oid loid = lobj.oid
print "Created a new large object with oid", loid print("Created a new large object with oid", loid)
print "Manually importing a lot of data into the object:" print("Manually importing a lot of data into the object:")
data = "data" * 1000000 data = "data" * 1000000
len = lobj.write(data) len = lobj.write(data)
print " imported", len, "bytes of data" print(" imported", len, "bytes of data")
conn.rollback() conn.rollback()
print "\nNow try to load the new images, to check it worked!" print("\nNow try to load the new images, to check it worked!")

View File

@ -24,10 +24,10 @@ import sys, psycopg2
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT %(foo)s AS foo", {'foo':'bar'}) curs.execute("SELECT %(foo)s AS foo", {'foo':'bar'})
@ -37,11 +37,11 @@ curs.execute("SELECT %(foo)s AS foo", {'foo':42})
curs.execute("SELECT %(foo)s AS foo", {'foo':u'yatt<EFBFBD>!'}) curs.execute("SELECT %(foo)s AS foo", {'foo':u'yatt<EFBFBD>!'})
curs.execute("SELECT %(foo)s AS foo", {'foo':u'bar'}) curs.execute("SELECT %(foo)s AS foo", {'foo':u'bar'})
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':'bar'}) print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':'bar'}))
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':None}) print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':None}))
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':True}) print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':True}))
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':42}) print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':42}))
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':u'yatt<EFBFBD>!'}) print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':u'yatt<EFBFBD>!'}))
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':u'bar'}) print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':u'bar'}))
conn.rollback() conn.rollback()

View File

@ -122,5 +122,5 @@ register_adapter(int, AsIs)
# the SQL_IN class by calling psycopg's adapt() directly: # the SQL_IN class by calling psycopg's adapt() directly:
if __name__ == '__main__': if __name__ == '__main__':
print "Note how the string will be SQL-quoted, but the number will not:" print("Note how the string will be SQL-quoted, but the number will not:")
print psycoadapt(("this is an 'sql quoted' str\\ing", 1, 2.0)) print(psycoadapt(("this is an 'sql quoted' str\\ing", 1, 2.0)))

View File

@ -26,20 +26,20 @@ from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
curs = conn.cursor() curs = conn.cursor()
curs.execute("listen test") curs.execute("listen test")
print "Waiting for 'NOTIFY test'" print("Waiting for 'NOTIFY test'")
while 1: while 1:
if select.select([conn],[],[],5)==([],[],[]): if select.select([conn],[],[],5)==([],[],[]):
print "Timeout" print("Timeout")
else: else:
conn.poll() conn.poll()
while conn.notifies: while conn.notifies:
print "Got NOTIFY:", conn.notifies.pop() print("Got NOTIFY:", conn.notifies.pop())

View File

@ -30,17 +30,17 @@ import psycopg2
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT 1 AS foo") curs.execute("SELECT 1 AS foo")
print curs.fetchone() print(curs.fetchone())
curs.execute("SELECT 1 AS foo") curs.execute("SELECT 1 AS foo")
print curs.fetchmany() print(curs.fetchmany())
curs.execute("SELECT 1 AS foo") curs.execute("SELECT 1 AS foo")
print curs.fetchall() print(curs.fetchall())
conn.rollback() conn.rollback()

View File

@ -45,7 +45,7 @@ if len(sys.argv) > 1:
if len(sys.argv) > 2: if len(sys.argv) > 2:
MODE = int(sys.argv[2]) MODE = int(sys.argv[2])
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
curs = conn.cursor() curs = conn.cursor()
@ -77,16 +77,16 @@ def insert_func(conn_or_pool, rows):
if MODE == 1: if MODE == 1:
conn_or_pool.putconn(conn) conn_or_pool.putconn(conn)
s = name + ": COMMIT STEP " + str(i) s = name + ": COMMIT STEP " + str(i)
print s print(s)
if MODE == 1: if MODE == 1:
conn = conn_or_pool.getconn() conn = conn_or_pool.getconn()
c = conn.cursor() c = conn.cursor()
try: try:
c.execute("INSERT INTO test_threads VALUES (%s, %s, %s)", c.execute("INSERT INTO test_threads VALUES (%s, %s, %s)",
(str(i), i, float(i))) (str(i), i, float(i)))
except psycopg2.ProgrammingError, err: except psycopg2.ProgrammingError as err:
print name, ": an error occurred; skipping this insert" print(name, ": an error occurred; skipping this insert")
print err print(err)
conn.commit() conn.commit()
## a nice select function that prints the current number of rows in the ## a nice select function that prints the current number of rows in the
@ -112,10 +112,10 @@ def select_func(conn_or_pool, z):
if MODE == 1: if MODE == 1:
conn_or_pool.putconn(conn) conn_or_pool.putconn(conn)
s = name + ": number of rows fetched: " + str(len(l)) s = name + ": number of rows fetched: " + str(len(l))
print s print(s)
except psycopg2.ProgrammingError, err: except psycopg2.ProgrammingError as err:
print name, ": an error occurred; skipping this select" print(name, ": an error occurred; skipping this select")
print err print(err)
## create the connection pool or the connections ## create the connection pool or the connections
if MODE == 0: if MODE == 0:
@ -129,14 +129,14 @@ else:
## create the threads ## create the threads
threads = [] threads = []
print "Creating INSERT threads:" print("Creating INSERT threads:")
for name in INSERT_THREADS: for name in INSERT_THREADS:
t = threading.Thread(None, insert_func, 'Thread-'+name, t = threading.Thread(None, insert_func, 'Thread-'+name,
(conn_insert, ROWS)) (conn_insert, ROWS))
t.setDaemon(0) t.setDaemon(0)
threads.append(t) threads.append(t)
print "Creating SELECT threads:" print("Creating SELECT threads:")
for name in SELECT_THREADS: for name in SELECT_THREADS:
t = threading.Thread(None, select_func, 'Thread-'+name, t = threading.Thread(None, select_func, 'Thread-'+name,
(conn_select, SELECT_DIV)) (conn_select, SELECT_DIV))
@ -150,12 +150,12 @@ for t in threads:
# and wait for them to finish # and wait for them to finish
for t in threads: for t in threads:
t.join() t.join()
print t.getName(), "exited OK" print(t.getName(), "exited OK")
conn.commit() conn.commit()
curs.execute("SELECT count(name) FROM test_threads") curs.execute("SELECT count(name) FROM test_threads")
print "Inserted", curs.fetchone()[0], "rows." print("Inserted", curs.fetchone()[0], "rows.")
curs.execute("DROP TABLE test_threads") curs.execute("DROP TABLE test_threads")
conn.commit() conn.commit()

View File

@ -29,14 +29,14 @@ import psycopg2.extensions
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding print("Encoding for this connection is", conn.encoding)
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT 'text'::text AS foo") curs.execute("SELECT 'text'::text AS foo")
textoid = curs.description[0][1] textoid = curs.description[0][1]
print "Oid for the text datatype is", textoid print("Oid for the text datatype is", textoid)
def castA(s, curs): def castA(s, curs):
if s is not None: return "(A) " + s if s is not None: return "(A) " + s
@ -48,20 +48,18 @@ TYPEB = psycopg2.extensions.new_type((textoid,), "TYPEB", castB)
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT 'some text.'::text AS foo") curs.execute("SELECT 'some text.'::text AS foo")
print "Some text from plain connection:", curs.fetchone()[0] print("Some text from plain connection:", curs.fetchone()[0])
psycopg2.extensions.register_type(TYPEA, conn) psycopg2.extensions.register_type(TYPEA, conn)
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT 'some text.'::text AS foo") curs.execute("SELECT 'some text.'::text AS foo")
print "Some text from connection with typecaster:", curs.fetchone()[0] print("Some text from connection with typecaster:", curs.fetchone()[0])
curs = conn.cursor() curs = conn.cursor()
psycopg2.extensions.register_type(TYPEB, curs) psycopg2.extensions.register_type(TYPEB, curs)
curs.execute("SELECT 'some text.'::text AS foo") curs.execute("SELECT 'some text.'::text AS foo")
print "Some text from cursor with typecaster:", curs.fetchone()[0] print("Some text from cursor with typecaster:", curs.fetchone()[0])
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT 'some text.'::text AS foo") curs.execute("SELECT 'some text.'::text AS foo")
print "Some text from connection with typecaster again:", curs.fetchone()[0] print("Some text from connection with typecaster again:", curs.fetchone()[0])

View File

@ -28,7 +28,7 @@ from psycopg2.tz import ZERO, LOCAL, FixedOffsetTimezone
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
curs = conn.cursor() curs = conn.cursor()
@ -42,27 +42,27 @@ conn.commit()
d = datetime.datetime(1971, 10, 19, 22, 30, 0, tzinfo=LOCAL) d = datetime.datetime(1971, 10, 19, 22, 30, 0, tzinfo=LOCAL)
curs.execute("INSERT INTO test_tz VALUES (%s)", (d,)) curs.execute("INSERT INTO test_tz VALUES (%s)", (d,))
print "Inserted timestamp with timezone:", d print("Inserted timestamp with timezone:", d)
print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d) print("Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d))
tz = FixedOffsetTimezone(-5*60, "EST") tz = FixedOffsetTimezone(-5*60, "EST")
d = datetime.datetime(1971, 10, 19, 22, 30, 0, tzinfo=tz) d = datetime.datetime(1971, 10, 19, 22, 30, 0, tzinfo=tz)
curs.execute("INSERT INTO test_tz VALUES (%s)", (d,)) curs.execute("INSERT INTO test_tz VALUES (%s)", (d,))
print "Inserted timestamp with timezone:", d print("Inserted timestamp with timezone:", d)
print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d) print("Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d))
curs.execute("SELECT * FROM test_tz") curs.execute("SELECT * FROM test_tz")
d = curs.fetchone()[0] d = curs.fetchone()[0]
curs.execute("INSERT INTO test_tz VALUES (%s)", (d,)) curs.execute("INSERT INTO test_tz VALUES (%s)", (d,))
print "Inserted SELECTed timestamp:", d print("Inserted SELECTed timestamp:", d)
print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d) print("Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d))
curs.execute("SELECT * FROM test_tz") curs.execute("SELECT * FROM test_tz")
for d in curs: for d in curs:
u = d[0].utcoffset() or ZERO u = d[0].utcoffset() or ZERO
print "UTC time: ", d[0] - u print("UTC time: ", d[0] - u)
print "Local time:", d[0] print("Local time:", d[0])
print "Time zone:", d[0].tzinfo.tzname(d[0]), d[0].tzinfo.utcoffset(d[0]) print("Time zone:", d[0].tzinfo.tzname(d[0]), d[0].tzinfo.utcoffset(d[0]))
curs.execute("DROP TABLE test_tz") curs.execute("DROP TABLE test_tz")

View File

@ -33,9 +33,9 @@ import psycopg2.extras
if len(sys.argv) > 1: if len(sys.argv) > 1:
DSN = sys.argv[1] DSN = sys.argv[1]
print "Opening connection using dsn:", DSN print("Opening connection using dsn:", DSN)
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
print "Initial encoding for this connection is", conn.encoding print("Initial encoding for this connection is", conn.encoding)
curs = conn.cursor() curs = conn.cursor()
try: try:
@ -98,7 +98,7 @@ class Rect(object):
# here we select from the empty table, just to grab the description # here we select from the empty table, just to grab the description
curs.execute("SELECT b FROM test_cast WHERE 0=1") curs.execute("SELECT b FROM test_cast WHERE 0=1")
boxoid = curs.description[0][1] boxoid = curs.description[0][1]
print "Oid for the box datatype is", boxoid print("Oid for the box datatype is", boxoid)
# and build the user cast object # and build the user cast object
BOX = psycopg2.extensions.new_type((boxoid,), "BOX", Rect) BOX = psycopg2.extensions.new_type((boxoid,), "BOX", Rect)
@ -113,14 +113,14 @@ for i in range(100):
whrandom.randint(0,100), whrandom.randint(0,100)) whrandom.randint(0,100), whrandom.randint(0,100))
curs.execute("INSERT INTO test_cast VALUES ('%(p1)s', '%(p2)s', %(box)s)", curs.execute("INSERT INTO test_cast VALUES ('%(p1)s', '%(p2)s', %(box)s)",
{'box':b, 'p1':p1, 'p2':p2}) {'box':b, 'p1':p1, 'p2':p2})
print "Added 100 boxed to the database" print("Added 100 boxed to the database")
# select and print all boxes with at least one point inside # select and print all boxes with at least one point inside
curs.execute("SELECT b FROM test_cast WHERE p1 @ b OR p2 @ b") curs.execute("SELECT b FROM test_cast WHERE p1 @ b OR p2 @ b")
boxes = curs.fetchall() boxes = curs.fetchall()
print "Found %d boxes with at least a point inside:" % len(boxes) print("Found %d boxes with at least a point inside:" % len(boxes))
for box in boxes: for box in boxes:
print " ", box[0].show() print(" ", box[0].show())
curs.execute("DROP TABLE test_cast") curs.execute("DROP TABLE test_cast")
conn.commit() conn.commit()

View File

@ -72,14 +72,10 @@ _ext.register_adapter(type(None), _ext.NoneAdapter)
# Register the Decimal adapter here instead of in the C layer. # Register the Decimal adapter here instead of in the C layer.
# This way a new class is registered for each sub-interpreter. # This way a new class is registered for each sub-interpreter.
# See ticket #52 # See ticket #52
try: from decimal import Decimal
from decimal import Decimal from psycopg2._psycopg import Decimal as Adapter
except ImportError: _ext.register_adapter(Decimal, Adapter)
pass del Decimal, Adapter
else:
from psycopg2._psycopg import Decimal as Adapter
_ext.register_adapter(Decimal, Adapter)
del Decimal, Adapter
def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs): def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs):

View File

@ -27,22 +27,13 @@ extensions importing register_json from extras.
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details. # License for more details.
import json
import sys import sys
from psycopg2._psycopg import ISQLQuote, QuotedString from psycopg2._psycopg import ISQLQuote, QuotedString
from psycopg2._psycopg import new_type, new_array_type, register_type from psycopg2._psycopg import new_type, new_array_type, register_type
# import the best json implementation available
if sys.version_info[:2] >= (2, 6):
import json
else:
try:
import simplejson as json
except ImportError:
json = None
# oids from PostgreSQL 9.2 # oids from PostgreSQL 9.2
JSON_OID = 114 JSON_OID = 114
JSONARRAY_OID = 199 JSONARRAY_OID = 199
@ -58,22 +49,14 @@ class Json(object):
:sql:`json` data type. :sql:`json` data type.
`!Json` can be used to wrap any object supported by the provided *dumps* `!Json` can be used to wrap any object supported by the provided *dumps*
function. If none is provided, the standard :py:func:`json.dumps()` is function. If none is provided, the standard :py:func:`json.dumps()` is
used (`!simplejson` for Python < 2.6; used.
`~psycopg2.extensions.ISQLQuote.getquoted()` will raise `!ImportError` if
the module is not available).
""" """
def __init__(self, adapted, dumps=None): def __init__(self, adapted, dumps=None):
self.adapted = adapted self.adapted = adapted
self._conn = None self._conn = None
self._dumps = dumps or json.dumps
if dumps is not None:
self._dumps = dumps
elif json is not None:
self._dumps = json.dumps
else:
self._dumps = None
def __conform__(self, proto): def __conform__(self, proto):
if proto is ISQLQuote: if proto is ISQLQuote:
@ -86,13 +69,7 @@ class Json(object):
provided in the constructor. You can override this method to create a provided in the constructor. You can override this method to create a
customized JSON wrapper. customized JSON wrapper.
""" """
dumps = self._dumps return self._dumps(obj)
if dumps is not None:
return dumps(obj)
else:
raise ImportError(
"json module not available: "
"you should provide a dumps function")
def prepare(self, conn): def prepare(self, conn):
self._conn = conn self._conn = conn
@ -181,10 +158,7 @@ def register_default_jsonb(conn_or_curs=None, globally=False, loads=None):
def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'): def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
"""Create typecasters for json data type.""" """Create typecasters for json data type."""
if loads is None: if loads is None:
if json is None: loads = json.loads
raise ImportError("no json module available")
else:
loads = json.loads
def typecast_json(s, cur): def typecast_json(s, cur):
if s is None: if s is None:
@ -220,7 +194,7 @@ def _get_json_oids(conn_or_curs, name='json'):
r = curs.fetchone() r = curs.fetchone()
# revert the status of the connection as before the command # revert the status of the connection as before the command
if (conn_status != STATUS_IN_TRANSACTION and not conn.autocommit): if conn_status != STATUS_IN_TRANSACTION and not conn.autocommit:
conn.rollback() conn.rollback()
if not r: if not r:

View File

@ -181,11 +181,8 @@ class Range(object):
return self.__gt__(other) return self.__gt__(other)
def __getstate__(self): def __getstate__(self):
return dict( return {slot: getattr(self, slot)
(slot, getattr(self, slot)) for slot in self.__slots__ if hasattr(self, slot)}
for slot in self.__slots__
if hasattr(self, slot)
)
def __setstate__(self, state): def __setstate__(self, state):
for slot, value in state.items(): for slot, value in state.items():

View File

@ -163,7 +163,7 @@ def make_dsn(dsn=None, **kwargs):
kwargs['dbname'] = kwargs.pop('database') kwargs['dbname'] = kwargs.pop('database')
# Drop the None arguments # Drop the None arguments
kwargs = dict((k, v) for (k, v) in kwargs.iteritems() if v is not None) kwargs = {k: v for (k, v) in kwargs.iteritems() if v is not None}
if dsn is not None: if dsn is not None:
tmp = parse_dsn(dsn) tmp = parse_dsn(dsn)

View File

@ -29,6 +29,7 @@ import os as _os
import sys as _sys import sys as _sys
import time as _time import time as _time
import re as _re import re as _re
from collections import namedtuple
try: try:
import logging as _logging import logging as _logging
@ -109,16 +110,16 @@ class DictCursorBase(_cursor):
try: try:
if self._prefetch: if self._prefetch:
res = super(DictCursorBase, self).__iter__() res = super(DictCursorBase, self).__iter__()
first = res.next() first = next(res)
if self._query_executed: if self._query_executed:
self._build_index() self._build_index()
if not self._prefetch: if not self._prefetch:
res = super(DictCursorBase, self).__iter__() res = super(DictCursorBase, self).__iter__()
first = res.next() first = next(res)
yield first yield first
while 1: while 1:
yield res.next() yield next(res)
except StopIteration: except StopIteration:
return return
@ -263,7 +264,7 @@ class RealDictCursor(DictCursorBase):
class RealDictRow(dict): class RealDictRow(dict):
"""A `!dict` subclass representing a data record.""" """A `!dict` subclass representing a data record."""
__slots__ = ('_column_mapping') __slots__ = ('_column_mapping',)
def __init__(self, cursor): def __init__(self, cursor):
dict.__init__(self) dict.__init__(self)
@ -279,7 +280,7 @@ class RealDictRow(dict):
return dict.__setitem__(self, name, value) return dict.__setitem__(self, name, value)
def __getstate__(self): def __getstate__(self):
return (self.copy(), self._column_mapping[:]) return self.copy(), self._column_mapping[:]
def __setstate__(self, data): def __setstate__(self, data):
self.update(data[0]) self.update(data[0])
@ -348,7 +349,7 @@ class NamedTupleCursor(_cursor):
def __iter__(self): def __iter__(self):
try: try:
it = super(NamedTupleCursor, self).__iter__() it = super(NamedTupleCursor, self).__iter__()
t = it.next() t = next(it)
nt = self.Record nt = self.Record
if nt is None: if nt is None:
@ -357,18 +358,22 @@ class NamedTupleCursor(_cursor):
yield nt._make(t) yield nt._make(t)
while 1: while 1:
yield nt._make(it.next()) yield nt._make(next(it))
except StopIteration: except StopIteration:
return return
try: def _make_nt(self):
from collections import namedtuple def f(s):
except ImportError, _exc: # NOTE: Python 3 actually allows unicode chars in fields
def _make_nt(self): s = _re.sub('[^a-zA-Z0-9_]', '_', s)
raise self._exc # Python identifier cannot start with numbers, namedtuple fields
else: # cannot start with underscore. So...
def _make_nt(self, namedtuple=namedtuple): if _re.match('^[0-9_]', s):
return namedtuple("Record", [d[0] for d in self.description or ()]) s = 'f' + s
return s
return namedtuple("Record", [f(d[0]) for d in self.description or ()])
class LoggingConnection(_connection): class LoggingConnection(_connection):
@ -455,6 +460,8 @@ class MinTimeLoggingConnection(LoggingConnection):
def filter(self, msg, curs): def filter(self, msg, curs):
t = (_time.time() - curs.timestamp) * 1000 t = (_time.time() - curs.timestamp) * 1000
if t > self._mintime: if t > self._mintime:
if _sys.version_info[0] >= 3 and isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
return msg + _os.linesep + " (execution time: %d ms)" % t return msg + _os.linesep + " (execution time: %d ms)" % t
def cursor(self, *args, **kwargs): def cursor(self, *args, **kwargs):
@ -722,18 +729,6 @@ def register_inet(oid=None, conn_or_curs=None):
return _ext.INET return _ext.INET
def register_tstz_w_secs(oids=None, conn_or_curs=None):
"""The function used to register an alternate type caster for
:sql:`TIMESTAMP WITH TIME ZONE` to deal with historical time zones with
seconds in the UTC offset.
These are now correctly handled by the default type caster, so currently
the function doesn't do anything.
"""
import warnings
warnings.warn("deprecated", DeprecationWarning)
def wait_select(conn): def wait_select(conn):
"""Wait until a connection or cursor has data available. """Wait until a connection or cursor has data available.
@ -1055,14 +1050,8 @@ class CompositeCaster(object):
return rv return rv
def _create_type(self, name, attnames): def _create_type(self, name, attnames):
try: self.type = namedtuple(name, attnames)
from collections import namedtuple self._ctor = self.type._make
except ImportError:
self.type = tuple
self._ctor = self.type
else:
self.type = namedtuple(name, attnames)
self._ctor = self.type._make
@classmethod @classmethod
def _from_db(self, name, conn_or_curs): def _from_db(self, name, conn_or_curs):
@ -1153,7 +1142,7 @@ def _paginate(seq, page_size):
while 1: while 1:
try: try:
for i in xrange(page_size): for i in xrange(page_size):
page.append(it.next()) page.append(next(it))
yield page yield page
page = [] page = []
except StopIteration: except StopIteration:

View File

@ -1,96 +0,0 @@
"""psycopg 1.1.x compatibility module
This module uses the new style connection and cursor types to build a psycopg
1.1.1.x compatibility layer. It should be considered a temporary hack to run
old code while porting to psycopg 2. Import it as follows::
from psycopg2 import psycopg1 as psycopg
"""
# psycopg/psycopg1.py - psycopg 1.1.x compatibility module
#
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
from psycopg2 import _psycopg as _2psycopg # noqa
from psycopg2.extensions import cursor as _2cursor
from psycopg2.extensions import connection as _2connection
from psycopg2 import * # noqa
from psycopg2 import extensions as _ext
_2connect = connect
def connect(*args, **kwargs):
"""connect(dsn, ...) -> new psycopg 1.1.x compatible connection object"""
kwargs['connection_factory'] = connection
conn = _2connect(*args, **kwargs)
conn.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED)
return conn
class connection(_2connection):
"""psycopg 1.1.x connection."""
def cursor(self):
"""cursor() -> new psycopg 1.1.x compatible cursor object"""
return _2connection.cursor(self, cursor_factory=cursor)
def autocommit(self, on_off=1):
"""autocommit(on_off=1) -> switch autocommit on (1) or off (0)"""
if on_off > 0:
self.set_isolation_level(_ext.ISOLATION_LEVEL_AUTOCOMMIT)
else:
self.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED)
class cursor(_2cursor):
"""psycopg 1.1.x cursor.
Note that this cursor implements the exact procedure used by psycopg 1 to
build dictionaries out of result rows. The DictCursor in the
psycopg.extras modules implements a much better and faster algorithm.
"""
def __build_dict(self, row):
res = {}
for i in range(len(self.description)):
res[self.description[i][0]] = row[i]
return res
def dictfetchone(self):
row = _2cursor.fetchone(self)
if row:
return self.__build_dict(row)
else:
return row
def dictfetchmany(self, size):
res = []
rows = _2cursor.fetchmany(self, size)
for row in rows:
res.append(self.__build_dict(row))
return res
def dictfetchall(self):
res = []
rows = _2cursor.fetchall(self)
for row in rows:
res.append(self.__build_dict(row))
return res

View File

@ -203,12 +203,12 @@ class SQL(Composable):
:rtype: `Composed` :rtype: `Composed`
The method is similar to the Python `str.format()` method: the string The method is similar to the Python `str.format()` method: the string
template supports auto-numbered (``{}``, only available from Python template supports auto-numbered (``{}``), numbered (``{0}``,
2.7), numbered (``{0}``, ``{1}``...), and named placeholders ``{1}``...), and named placeholders (``{name}``), with positional
(``{name}``), with positional arguments replacing the numbered arguments replacing the numbered placeholders and keywords replacing
placeholders and keywords replacing the named ones. However placeholder the named ones. However placeholder modifiers (``{0!r}``, ``{0:<10}``)
modifiers (``{0!r}``, ``{0:<10}``) are not supported. Only are not supported. Only `!Composable` objects can be passed to the
`!Composable` objects can be passed to the template. template.
Example:: Example::
@ -276,7 +276,7 @@ class SQL(Composable):
rv = [] rv = []
it = iter(seq) it = iter(seq)
try: try:
rv.append(it.next()) rv.append(next(it))
except StopIteration: except StopIteration:
pass pass
else: else:

View File

@ -75,7 +75,7 @@ class FixedOffsetTimezone(datetime.tzinfo):
def __getinitargs__(self): def __getinitargs__(self):
offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60 offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
return (offset_mins, self._name) return offset_mins, self._name
def utcoffset(self, dt): def utcoffset(self, dt):
return self._offset return self._offset

View File

@ -100,7 +100,7 @@ _pydatetime_string_delta(pydatetimeObject *self)
char buffer[8]; char buffer[8];
int i; int i;
int a = obj->microseconds; int a = PyDateTime_DELTA_GET_MICROSECONDS(obj);
for (i=0; i < 6 ; i++) { for (i=0; i < 6 ; i++) {
buffer[5-i] = '0' + (a % 10); buffer[5-i] = '0' + (a % 10);
@ -109,7 +109,9 @@ _pydatetime_string_delta(pydatetimeObject *self)
buffer[6] = '\0'; buffer[6] = '\0';
return Bytes_FromFormat("'%d days %d.%s seconds'::interval", return Bytes_FromFormat("'%d days %d.%s seconds'::interval",
obj->days, obj->seconds, buffer); PyDateTime_DELTA_GET_DAYS(obj),
PyDateTime_DELTA_GET_SECONDS(obj),
buffer);
} }
static PyObject * static PyObject *

View File

@ -295,4 +295,3 @@ Bytes_Format(PyObject *format, PyObject *args)
} }
return NULL; return NULL;
} }

View File

@ -67,6 +67,9 @@ const char *srv_state_guc[] = {
}; };
const int SRV_STATE_UNCHANGED = -1;
/* Return a new "string" from a char* from the database. /* Return a new "string" from a char* from the database.
* *
* On Py2 just get a string, on Py3 decode it in the connection codec. * On Py2 just get a string, on Py3 decode it in the connection codec.
@ -1188,8 +1191,10 @@ conn_set_session(connectionObject *self, int autocommit,
int rv = -1; int rv = -1;
PGresult *pgres = NULL; PGresult *pgres = NULL;
char *error = NULL; char *error = NULL;
int want_autocommit = autocommit == SRV_STATE_UNCHANGED ?
self->autocommit : autocommit;
if (deferrable != self->deferrable && self->server_version < 90100) { if (deferrable != SRV_STATE_UNCHANGED && self->server_version < 90100) {
PyErr_SetString(ProgrammingError, PyErr_SetString(ProgrammingError,
"the 'deferrable' setting is only available" "the 'deferrable' setting is only available"
" from PostgreSQL 9.1"); " from PostgreSQL 9.1");
@ -1209,24 +1214,24 @@ conn_set_session(connectionObject *self, int autocommit,
Py_BEGIN_ALLOW_THREADS; Py_BEGIN_ALLOW_THREADS;
pthread_mutex_lock(&self->lock); pthread_mutex_lock(&self->lock);
if (autocommit) { if (want_autocommit) {
/* we are in autocommit state, so no BEGIN will be issued: /* we are or are going in autocommit state, so no BEGIN will be issued:
* configure the session with the characteristics requested */ * configure the session with the characteristics requested */
if (isolevel != self->isolevel) { if (isolevel != SRV_STATE_UNCHANGED) {
if (0 > pq_set_guc_locked(self, if (0 > pq_set_guc_locked(self,
"default_transaction_isolation", srv_isolevels[isolevel], "default_transaction_isolation", srv_isolevels[isolevel],
&pgres, &error, &_save)) { &pgres, &error, &_save)) {
goto endlock; goto endlock;
} }
} }
if (readonly != self->readonly) { if (readonly != SRV_STATE_UNCHANGED) {
if (0 > pq_set_guc_locked(self, if (0 > pq_set_guc_locked(self,
"default_transaction_read_only", srv_state_guc[readonly], "default_transaction_read_only", srv_state_guc[readonly],
&pgres, &error, &_save)) { &pgres, &error, &_save)) {
goto endlock; goto endlock;
} }
} }
if (deferrable != self->deferrable) { if (deferrable != SRV_STATE_UNCHANGED) {
if (0 > pq_set_guc_locked(self, if (0 > pq_set_guc_locked(self,
"default_transaction_deferrable", srv_state_guc[deferrable], "default_transaction_deferrable", srv_state_guc[deferrable],
&pgres, &error, &_save)) { &pgres, &error, &_save)) {
@ -1251,7 +1256,7 @@ conn_set_session(connectionObject *self, int autocommit,
goto endlock; goto endlock;
} }
} }
if (self->deferrable != STATE_DEFAULT) { if (self->server_version >= 90100 && self->deferrable != STATE_DEFAULT) {
if (0 > pq_set_guc_locked(self, if (0 > pq_set_guc_locked(self,
"default_transaction_deferrable", "default", "default_transaction_deferrable", "default",
&pgres, &error, &_save)) { &pgres, &error, &_save)) {
@ -1260,10 +1265,18 @@ conn_set_session(connectionObject *self, int autocommit,
} }
} }
self->autocommit = autocommit; if (autocommit != SRV_STATE_UNCHANGED) {
self->isolevel = isolevel; self->autocommit = autocommit;
self->readonly = readonly; }
self->deferrable = deferrable; if (isolevel != SRV_STATE_UNCHANGED) {
self->isolevel = isolevel;
}
if (readonly != SRV_STATE_UNCHANGED) {
self->readonly = readonly;
}
if (deferrable != SRV_STATE_UNCHANGED) {
self->deferrable = deferrable;
}
rv = 0; rv = 0;
endlock: endlock:

View File

@ -39,6 +39,7 @@
extern HIDDEN const char *srv_isolevels[]; extern HIDDEN const char *srv_isolevels[];
extern HIDDEN const char *srv_readonly[]; extern HIDDEN const char *srv_readonly[];
extern HIDDEN const char *srv_deferrable[]; extern HIDDEN const char *srv_deferrable[];
extern HIDDEN const int SRV_STATE_UNCHANGED;
/** DBAPI methods **/ /** DBAPI methods **/
@ -561,10 +562,10 @@ psyco_conn_set_session(connectionObject *self, PyObject *args, PyObject *kwargs)
PyObject *deferrable = Py_None; PyObject *deferrable = Py_None;
PyObject *autocommit = Py_None; PyObject *autocommit = Py_None;
int c_isolevel = self->isolevel; int c_isolevel = SRV_STATE_UNCHANGED;
int c_readonly = self->readonly; int c_readonly = SRV_STATE_UNCHANGED;
int c_deferrable = self->deferrable; int c_deferrable = SRV_STATE_UNCHANGED;
int c_autocommit = self->autocommit; int c_autocommit = SRV_STATE_UNCHANGED;
static char *kwlist[] = static char *kwlist[] =
{"isolation_level", "readonly", "deferrable", "autocommit", NULL}; {"isolation_level", "readonly", "deferrable", "autocommit", NULL};
@ -637,7 +638,7 @@ psyco_conn_autocommit_set(connectionObject *self, PyObject *pyvalue)
if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; } if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; }
if (-1 == (value = PyObject_IsTrue(pyvalue))) { return -1; } if (-1 == (value = PyObject_IsTrue(pyvalue))) { return -1; }
if (0 > conn_set_session(self, value, if (0 > conn_set_session(self, value,
self->isolevel, self->readonly, self->deferrable)) { SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED)) {
return -1; return -1;
} }
@ -668,8 +669,8 @@ psyco_conn_isolation_level_set(connectionObject *self, PyObject *pyvalue)
if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; } if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; }
if (0 > (value = _psyco_conn_parse_isolevel(pyvalue))) { return -1; } if (0 > (value = _psyco_conn_parse_isolevel(pyvalue))) { return -1; }
if (0 > conn_set_session(self, self->autocommit, if (0 > conn_set_session(self, SRV_STATE_UNCHANGED,
value, self->readonly, self->deferrable)) { value, SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED)) {
return -1; return -1;
} }
@ -715,13 +716,13 @@ psyco_conn_set_isolation_level(connectionObject *self, PyObject *args)
if (level == 0) { if (level == 0) {
if (0 > conn_set_session(self, 1, if (0 > conn_set_session(self, 1,
self->isolevel, self->readonly, self->deferrable)) { SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED)) {
return NULL; return NULL;
} }
} }
else { else {
if (0 > conn_set_session(self, 0, if (0 > conn_set_session(self, 0,
level, self->readonly, self->deferrable)) { level, SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED)) {
return NULL; return NULL;
} }
} }
@ -767,8 +768,8 @@ psyco_conn_readonly_set(connectionObject *self, PyObject *pyvalue)
if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; } if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; }
if (0 > (value = _psyco_conn_parse_onoff(pyvalue))) { return -1; } if (0 > (value = _psyco_conn_parse_onoff(pyvalue))) { return -1; }
if (0 > conn_set_session(self, self->autocommit, if (0 > conn_set_session(self, SRV_STATE_UNCHANGED,
self->isolevel, value, self->deferrable)) { SRV_STATE_UNCHANGED, value, SRV_STATE_UNCHANGED)) {
return -1; return -1;
} }
@ -813,8 +814,8 @@ psyco_conn_deferrable_set(connectionObject *self, PyObject *pyvalue)
if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; } if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; }
if (0 > (value = _psyco_conn_parse_onoff(pyvalue))) { return -1; } if (0 > (value = _psyco_conn_parse_onoff(pyvalue))) { return -1; }
if (0 > conn_set_session(self, self->autocommit, if (0 > conn_set_session(self, SRV_STATE_UNCHANGED,
self->isolevel, self->readonly, value)) { SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED, value)) {
return -1; return -1;
} }

View File

@ -592,8 +592,6 @@ psyco_curs_mogrify(cursorObject *self, PyObject *args, PyObject *kwargs)
return NULL; return NULL;
} }
EXC_IF_CURS_CLOSED(self);
return _psyco_curs_mogrify(self, operation, vars); return _psyco_curs_mogrify(self, operation, vars);
} }

View File

@ -295,5 +295,3 @@ PyTypeObject notifyType = {
0, /*tp_alloc*/ 0, /*tp_alloc*/
notify_new, /*tp_new*/ notify_new, /*tp_new*/
}; };

View File

@ -52,6 +52,10 @@
#include "win32_support.h" #include "win32_support.h"
#endif #endif
#if defined(__sun) && defined(__SVR4)
#include "solaris_support.h"
#endif
extern HIDDEN PyObject *psyco_DescriptionType; extern HIDDEN PyObject *psyco_DescriptionType;
extern HIDDEN const char *srv_isolevels[]; extern HIDDEN const char *srv_isolevels[];
extern HIDDEN const char *srv_readonly[]; extern HIDDEN const char *srv_readonly[];
@ -1948,8 +1952,9 @@ pq_fetch(cursorObject *curs, int no_result)
} }
else { else {
Dprintf("pq_fetch: got tuples, discarding them"); Dprintf("pq_fetch: got tuples, discarding them");
/* TODO: is there any case in which PQntuples == PQcmdTuples? */
_read_rowcount(curs);
CLEARPGRES(curs->pgres); CLEARPGRES(curs->pgres);
curs->rowcount = -1;
ex = 0; ex = 0;
} }
break; break;

View File

@ -72,6 +72,10 @@ HIDDEN PyObject *psyco_null = NULL;
/* The type of the cursor.description items */ /* The type of the cursor.description items */
HIDDEN PyObject *psyco_DescriptionType = NULL; HIDDEN PyObject *psyco_DescriptionType = NULL;
/* macro trick to stringify a macro expansion */
#define xstr(s) str(s)
#define str(s) #s
/** connect module-level function **/ /** connect module-level function **/
#define psyco_connect_doc \ #define psyco_connect_doc \
"_connect(dsn, [connection_factory], [async]) -- New database connection.\n\n" "_connect(dsn, [connection_factory], [async]) -- New database connection.\n\n"
@ -990,7 +994,7 @@ INIT_MODULE(_psycopg)(void)
psycopg_debug_enabled = 1; psycopg_debug_enabled = 1;
#endif #endif
Dprintf("initpsycopg: initializing psycopg %s", PSYCOPG_VERSION); Dprintf("initpsycopg: initializing psycopg %s", xstr(PSYCOPG_VERSION));
/* initialize all the new types and then the module */ /* initialize all the new types and then the module */
Py_TYPE(&connectionType) = &PyType_Type; Py_TYPE(&connectionType) = &PyType_Type;
@ -1122,7 +1126,7 @@ INIT_MODULE(_psycopg)(void)
if (!(psyco_DescriptionType = psyco_make_description_type())) { goto exit; } if (!(psyco_DescriptionType = psyco_make_description_type())) { goto exit; }
/* set some module's parameters */ /* set some module's parameters */
PyModule_AddStringConstant(module, "__version__", PSYCOPG_VERSION); PyModule_AddStringConstant(module, "__version__", xstr(PSYCOPG_VERSION));
PyModule_AddStringConstant(module, "__doc__", "psycopg PostgreSQL driver"); PyModule_AddStringConstant(module, "__doc__", "psycopg PostgreSQL driver");
PyModule_AddIntConstant(module, "__libpq_version__", PG_VERSION_NUM); PyModule_AddIntConstant(module, "__libpq_version__", PG_VERSION_NUM);
PyModule_AddIntMacro(module, REPLICATION_PHYSICAL); PyModule_AddIntMacro(module, REPLICATION_PHYSICAL);

View File

@ -31,8 +31,10 @@
#include <stringobject.h> #include <stringobject.h>
#endif #endif
#if PY_VERSION_HEX < 0x02060000 #if ((PY_VERSION_HEX < 0x02070000) \
# error "psycopg requires Python >= 2.6" || ((PY_VERSION_HEX >= 0x03000000) \
&& (PY_VERSION_HEX < 0x03040000)) )
# error "psycopg requires Python 2.7 or 3.4+"
#endif #endif
/* hash() return size changed around version 3.2a4 on 64bit platforms. Before /* hash() return size changed around version 3.2a4 on 64bit platforms. Before
@ -44,14 +46,6 @@ typedef long Py_hash_t;
typedef unsigned long Py_uhash_t; typedef unsigned long Py_uhash_t;
#endif #endif
/* Macros defined in Python 2.6 */
#ifndef Py_REFCNT
#define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt)
#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
#define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size)
#define PyVarObject_HEAD_INIT(x,n) PyObject_HEAD_INIT(x) n,
#endif
/* FORMAT_CODE_PY_SSIZE_T is for Py_ssize_t: */ /* FORMAT_CODE_PY_SSIZE_T is for Py_ssize_t: */
#define FORMAT_CODE_PY_SSIZE_T "%" PY_FORMAT_SIZE_T "d" #define FORMAT_CODE_PY_SSIZE_T "%" PY_FORMAT_SIZE_T "d"
@ -93,6 +87,7 @@ typedef unsigned long Py_uhash_t;
#ifndef PyNumber_Int #ifndef PyNumber_Int
#define PyNumber_Int PyNumber_Long #define PyNumber_Int PyNumber_Long
#endif #endif
#endif /* PY_MAJOR_VERSION > 2 */ #endif /* PY_MAJOR_VERSION > 2 */
#if PY_MAJOR_VERSION < 3 #if PY_MAJOR_VERSION < 3
@ -110,6 +105,10 @@ typedef unsigned long Py_uhash_t;
#define Bytes_ConcatAndDel PyString_ConcatAndDel #define Bytes_ConcatAndDel PyString_ConcatAndDel
#define _Bytes_Resize _PyString_Resize #define _Bytes_Resize _PyString_Resize
#define PyDateTime_DELTA_GET_DAYS(o) (((PyDateTime_Delta*)o)->days)
#define PyDateTime_DELTA_GET_SECONDS(o) (((PyDateTime_Delta*)o)->seconds)
#define PyDateTime_DELTA_GET_MICROSECONDS(o) (((PyDateTime_Delta*)o)->microseconds)
#else #else
#define Bytes_Type PyBytes_Type #define Bytes_Type PyBytes_Type

57
psycopg/solaris_support.c Normal file
View File

@ -0,0 +1,57 @@
/* solaris_support.c - emulate functions missing on Solaris
*
* Copyright (C) 2017 My Karlsson <mk@acc.umu.se>
* Copyright (c) 2018, Joyent, Inc.
*
* This file is part of psycopg.
*
* psycopg2 is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition, as a special exception, the copyright holders give
* permission to link this program with the OpenSSL library (or with
* modified versions of OpenSSL that use the same license as OpenSSL),
* and distribute linked combinations including the two.
*
* You must obey the GNU Lesser General Public License in all respects for
* all of the code used other than OpenSSL.
*
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*/
#define PSYCOPG_MODULE
#include "psycopg/psycopg.h"
#include "psycopg/solaris_support.h"
#if defined(__sun) && defined(__SVR4)
/* timeradd is missing on Solaris 10 */
#ifndef timeradd
void
timeradd(struct timeval *a, struct timeval *b, struct timeval *c)
{
c->tv_sec = a->tv_sec + b->tv_sec;
c->tv_usec = a->tv_usec + b->tv_usec;
if (c->tv_usec >= 1000000) {
c->tv_usec -= 1000000;
c->tv_sec += 1;
}
}
/* timersub is missing on Solaris */
void
timersub(struct timeval *a, struct timeval *b, struct timeval *c)
{
c->tv_sec = a->tv_sec - b->tv_sec;
c->tv_usec = a->tv_usec - b->tv_usec;
if (c->tv_usec < 0) {
c->tv_usec += 1000000;
c->tv_sec -= 1;
}
}
#endif /* timeradd */
#endif /* defined(__sun) && defined(__SVR4) */

40
psycopg/solaris_support.h Normal file
View File

@ -0,0 +1,40 @@
/* solaris_support.h - definitions for solaris_support.c
*
* Copyright (C) 2017 My Karlsson <mk@acc.umu.se>
* Copyright (c) 2018, Joyent, Inc.
*
* This file is part of psycopg.
*
* psycopg2 is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition, as a special exception, the copyright holders give
* permission to link this program with the OpenSSL library (or with
* modified versions of OpenSSL that use the same license as OpenSSL),
* and distribute linked combinations including the two.
*
* You must obey the GNU Lesser General Public License in all respects for
* all of the code used other than OpenSSL.
*
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*/
#ifndef PSYCOPG_SOLARIS_SUPPORT_H
#define PSYCOPG_SOLARIS_SUPPORT_H
#include "psycopg/config.h"
#if defined(__sun) && defined(__SVR4)
#include <sys/time.h>
#ifndef timeradd
extern HIDDEN void timeradd(struct timeval *a, struct timeval *b, struct timeval *c);
extern HIDDEN void timersub(struct timeval *a, struct timeval *b, struct timeval *c);
#endif
#endif
#endif /* !defined(PSYCOPG_SOLARIS_SUPPORT_H) */

View File

@ -312,4 +312,3 @@ psycopg_parse_escape(const char *bufin, Py_ssize_t sizein, Py_ssize_t *sizeout)
exit: exit:
return ret; return ret;
} }

View File

@ -15,7 +15,7 @@ static long int typecast_BINARY_types[] = {17, 0};
static long int typecast_ROWID_types[] = {26, 0}; static long int typecast_ROWID_types[] = {26, 0};
static long int typecast_LONGINTEGERARRAY_types[] = {1016, 0}; static long int typecast_LONGINTEGERARRAY_types[] = {1016, 0};
static long int typecast_INTEGERARRAY_types[] = {1005, 1006, 1007, 0}; static long int typecast_INTEGERARRAY_types[] = {1005, 1006, 1007, 0};
static long int typecast_FLOATARRAY_types[] = {1017, 1021, 1022, 0}; static long int typecast_FLOATARRAY_types[] = {1021, 1022, 0};
static long int typecast_DECIMALARRAY_types[] = {1231, 0}; static long int typecast_DECIMALARRAY_types[] = {1231, 0};
static long int typecast_UNICODEARRAY_types[] = {1002, 1003, 1009, 1014, 1015, 0}; static long int typecast_UNICODEARRAY_types[] = {1002, 1003, 1009, 1014, 1015, 0};
static long int typecast_STRINGARRAY_types[] = {1002, 1003, 1009, 1014, 1015, 0}; static long int typecast_STRINGARRAY_types[] = {1002, 1003, 1009, 1014, 1015, 0};
@ -69,4 +69,3 @@ static typecastObject_initlist typecast_builtins[] = {
{"MACADDRARRAY", typecast_MACADDRARRAY_types, typecast_STRINGARRAY_cast, "STRING"}, {"MACADDRARRAY", typecast_MACADDRARRAY_types, typecast_STRINGARRAY_cast, "STRING"},
{NULL, NULL, NULL, NULL} {NULL, NULL, NULL, NULL}
}; };

View File

@ -250,4 +250,3 @@ typecast_MXINTERVAL_cast(const char *str, Py_ssize_t len, PyObject *curs)
#define typecast_DATETIME_cast typecast_MXDATE_cast #define typecast_DATETIME_cast typecast_MXDATE_cast
#define typecast_DATETIMETZ_cast typecast_MXDATE_cast #define typecast_DATETIMETZ_cast typecast_MXDATE_cast
#endif #endif

View File

@ -168,11 +168,11 @@ psycopg_ensure_bytes(PyObject *obj)
PyObject *rv = NULL; PyObject *rv = NULL;
if (!obj) { return NULL; } if (!obj) { return NULL; }
if (PyUnicode_CheckExact(obj)) { if (PyUnicode_Check(obj)) {
rv = PyUnicode_AsUTF8String(obj); rv = PyUnicode_AsUTF8String(obj);
Py_DECREF(obj); Py_DECREF(obj);
} }
else if (Bytes_CheckExact(obj)) { else if (Bytes_Check(obj)) {
rv = obj; rv = obj;
} }
else { else {
@ -282,7 +282,7 @@ exit:
/* Make a connection string out of a string and a dictionary of arguments. /* Make a connection string out of a string and a dictionary of arguments.
* *
* Helper to call psycopg2.extensions.make_dns() * Helper to call psycopg2.extensions.make_dsn()
*/ */
PyObject * PyObject *
psycopg_make_dsn(PyObject *dsn, PyObject *kwargs) psycopg_make_dsn(PyObject *dsn, PyObject *kwargs)

View File

@ -73,7 +73,6 @@
<None Include="lib\extras.py" /> <None Include="lib\extras.py" />
<None Include="lib\__init__.py" /> <None Include="lib\__init__.py" />
<None Include="lib\pool.py" /> <None Include="lib\pool.py" />
<None Include="lib\psycopg1.py" />
<None Include="lib\tz.py" /> <None Include="lib\tz.py" />
<None Include="psycopg\adapter_asis.h" /> <None Include="psycopg\adapter_asis.h" />
<None Include="psycopg\adapter_binary.h" /> <None Include="psycopg\adapter_binary.h" />
@ -98,8 +97,6 @@
<None Include="psycopg\typecast.h" /> <None Include="psycopg\typecast.h" />
<None Include="psycopg\typecast_binary.h" /> <None Include="psycopg\typecast_binary.h" />
<None Include="psycopg\win32_support.h" /> <None Include="psycopg\win32_support.h" />
<None Include="scripts\buildtypes.py" />
<None Include="scripts\maketypes.sh" />
<None Include="ZPsycopgDA\dtml\add.dtml" /> <None Include="ZPsycopgDA\dtml\add.dtml" />
<None Include="ZPsycopgDA\dtml\browse.dtml" /> <None Include="ZPsycopgDA\dtml\browse.dtml" />
<None Include="ZPsycopgDA\dtml\edit.dtml" /> <None Include="ZPsycopgDA\dtml\edit.dtml" />

View File

@ -1,44 +0,0 @@

Microsoft Visual Studio Solution File, Format Version 10.00
# Visual Studio 2008
Project("{2857B73E-F847-4B02-9238-064979017E93}") = "psycopg2", "psycopg2.cproj", "{CFD80D18-3EE5-49ED-992A-E6D433BC7641}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{CFD80D18-3EE5-49ED-992A-E6D433BC7641}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{CFD80D18-3EE5-49ED-992A-E6D433BC7641}.Debug|Any CPU.Build.0 = Debug|Any CPU
{CFD80D18-3EE5-49ED-992A-E6D433BC7641}.Release|Any CPU.ActiveCfg = Release|Any CPU
{CFD80D18-3EE5-49ED-992A-E6D433BC7641}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(MonoDevelopProperties) = preSolution
StartupItem = psycopg2.cproj
Policies = $0
$0.TextStylePolicy = $1
$1.FileWidth = 120
$1.TabWidth = 4
$1.inheritsSet = Mono
$1.inheritsScope = text/plain
$0.DotNetNamingPolicy = $2
$2.DirectoryNamespaceAssociation = None
$2.ResourceNamePolicy = FileName
$0.StandardHeader = $3
$3.Text =
$3.IncludeInNewFiles = False
$0.TextStylePolicy = $4
$4.FileWidth = 72
$4.NoTabsAfterNonTabs = True
$4.RemoveTrailingWhitespace = True
$4.inheritsSet = VisualStudio
$4.inheritsScope = text/plain
$4.scope = text/x-readme
$0.TextStylePolicy = $5
$5.inheritsSet = VisualStudio
$5.inheritsScope = text/plain
$5.scope = text/plain
name = psycopg2
EndGlobalSection
EndGlobal

View File

@ -27,5 +27,4 @@ curs = conn.cursor()
#print curs.fetchone() #print curs.fetchone()
curs.execute("SELECT %s", ([1,2,None],)) curs.execute("SELECT %s", ([1,2,None],))
print curs.fetchone() print(curs.fetchone())

View File

@ -15,22 +15,21 @@ curs = conn.cursor()
def sleep(curs): def sleep(curs):
while not curs.isready(): while not curs.isready():
print "." print(".")
time.sleep(.1) time.sleep(.1)
#curs.execute(""" #curs.execute("""
# DECLARE zz INSENSITIVE SCROLL CURSOR WITH HOLD FOR # DECLARE zz INSENSITIVE SCROLL CURSOR WITH HOLD FOR
# SELECT now(); # SELECT now();
# FOR READ ONLY;""", async = 1) # FOR READ ONLY;""", async = 1)
curs.execute("SELECT now() AS foo", async=1); curs.execute("SELECT now() AS foo", async=1)
sleep(curs) sleep(curs)
print curs.fetchall() print(curs.fetchall())
#curs.execute(""" #curs.execute("""
# FETCH FORWARD 1 FROM zz;""", async = 1) # FETCH FORWARD 1 FROM zz;""", async = 1)
curs.execute("SELECT now() AS bar", async=1); curs.execute("SELECT now() AS bar", async=1)
print curs.fetchall() print(curs.fetchall())
curs.execute("SELECT now() AS bar"); curs.execute("SELECT now() AS bar")
sleep(curs) sleep(curs)

View File

@ -17,7 +17,7 @@ def query_worker(dsn):
break break
if len(sys.argv) != 2: if len(sys.argv) != 2:
print 'usage: %s DSN' % sys.argv[0] print('usage: %s DSN' % sys.argv[0])
sys.exit(1) sys.exit(1)
th = threading.Thread(target=query_worker, args=(sys.argv[1],)) th = threading.Thread(target=query_worker, args=(sys.argv[1],))
th.setDaemon(True) th.setDaemon(True)

View File

@ -12,4 +12,4 @@ o = psycopg2.connect("dbname=test")
c = o.cursor() c = o.cursor()
c.execute("SELECT NULL::decimal(10,2)") c.execute("SELECT NULL::decimal(10,2)")
n = c.fetchone()[0] n = c.fetchone()[0]
print n, type(n) print(n, type(n))

View File

@ -4,15 +4,15 @@ con = psycopg2.connect("dbname=test")
cur = con.cursor() cur = con.cursor()
cur.execute("SELECT %s::regtype::oid", ('bytea', )) cur.execute("SELECT %s::regtype::oid", ('bytea', ))
print cur.fetchone()[0] print(cur.fetchone()[0])
# 17 # 17
cur.execute("CREATE DOMAIN thing AS bytea") cur.execute("CREATE DOMAIN thing AS bytea")
cur.execute("SELECT %s::regtype::oid", ('thing', )) cur.execute("SELECT %s::regtype::oid", ('thing', ))
print cur.fetchone()[0] print(cur.fetchone()[0])
#62148 #62148
cur.execute("CREATE TABLE thingrel (thingcol thing)") cur.execute("CREATE TABLE thingrel (thingcol thing)")
cur.execute("SELECT * FROM thingrel") cur.execute("SELECT * FROM thingrel")
print cur.description print(cur.description)
#(('thingcol', 17, None, -1, None, None, None),) #(('thingcol', 17, None, -1, None, None, None),)

View File

@ -5,15 +5,14 @@ c = o.cursor()
def sql(): def sql():
c.execute("SELECT 1.23 AS foo") c.execute("SELECT 1.23 AS foo")
print 1, c.fetchone() print(1, c.fetchone())
#print c.description #print c.description
c.execute("SELECT 1.23::float AS foo") c.execute("SELECT 1.23::float AS foo")
print 2, c.fetchone() print(2, c.fetchone())
#print c.description #print c.description
print "BEFORE" print("BEFORE")
sql() sql()
import gtk import gtk
print "AFTER" print("AFTER")
sql() sql()

View File

@ -6,9 +6,8 @@ curs = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
curs.execute("SELECT '2005-2-12'::date AS foo, 'boo!' as bar") curs.execute("SELECT '2005-2-12'::date AS foo, 'boo!' as bar")
for x in curs.fetchall(): for x in curs.fetchall():
print type(x), x[0], x[1], x['foo'], x['bar'] print(type(x), x[0], x[1], x['foo'], x['bar'])
curs.execute("SELECT '2005-2-12'::date AS foo, 'boo!' as bar") curs.execute("SELECT '2005-2-12'::date AS foo, 'boo!' as bar")
for x in curs: for x in curs:
print type(x), x[0], x[1], x['foo'], x['bar'] print(type(x), x[0], x[1], x['foo'], x['bar'])

View File

@ -43,7 +43,7 @@ def leak():
row = {'foo': i} row = {'foo': i}
rows.append(row) rows.append(row)
count += 1 count += 1
print "loop count:", count print("loop count:", count)
cursor.executemany(insert, rows) cursor.executemany(insert, rows)
connection.commit() connection.commit()
except psycopg2.IntegrityError: except psycopg2.IntegrityError:
@ -59,7 +59,7 @@ def noleak():
while 1: while 1:
try: try:
count += 1 count += 1
print "loop count:", count print("loop count:", count)
cursor.executemany(insert, rows) cursor.executemany(insert, rows)
connection.commit() connection.commit()
except psycopg2.IntegrityError: except psycopg2.IntegrityError:
@ -72,12 +72,11 @@ try:
elif 'noleak' == sys.argv[1]: elif 'noleak' == sys.argv[1]:
run_function = noleak run_function = noleak
else: else:
print usage print(usage)
sys.exit() sys.exit()
except IndexError: except IndexError:
print usage print(usage)
sys.exit() sys.exit()
# Run leak() or noleak(), whichever was indicated on the command line # Run leak() or noleak(), whichever was indicated on the command line
run_function() run_function()

View File

@ -18,8 +18,8 @@ class O(object):
o = O('R%', second='S%') o = O('R%', second='S%')
print o[0] print(o[0])
print o['second'] print(o['second'])
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
@ -40,6 +40,4 @@ cursor.execute("""
""", (o,)) """, (o,))
for row in cursor: for row in cursor:
print row print(row)

View File

@ -15,10 +15,10 @@ curs = conn.cursor()
curs.execute("SELECT reffunc2()") curs.execute("SELECT reffunc2()")
portal = curs.fetchone()[0] portal = curs.fetchone()[0]
print portal.fetchone() print(portal.fetchone())
print portal.fetchmany(2) print(portal.fetchmany(2))
portal.scroll(0, 'absolute') portal.scroll(0, 'absolute')
print portal.fetchall() print(portal.fetchall())
#print curs.rowcount #print curs.rowcount

View File

@ -3,11 +3,10 @@ class B(object):
if x: self._o = True if x: self._o = True
else: self._o = False else: self._o = False
def __getattribute__(self, attr): def __getattribute__(self, attr):
print "ga called", attr print("ga called", attr)
return object.__getattribute__(self, attr) return object.__getattribute__(self, attr)
def _sqlquote(self): def _sqlquote(self):
if self._o == True: if self._o:
return 'It is True' return 'It is True'
else: else:
return 'It is False' return 'It is False'

View File

@ -8,4 +8,4 @@ for i in range(20000):
datafile = os.popen('ps -p %s -o rss' % os.getpid()) datafile = os.popen('ps -p %s -o rss' % os.getpid())
line = datafile.readlines(2)[1].strip() line = datafile.readlines(2)[1].strip()
datafile.close() datafile.close()
print str(i) + '\t' + line print(str(i) + '\t' + line)

View File

@ -33,7 +33,7 @@ def g():
line = datafile.readlines(2)[1].strip() line = datafile.readlines(2)[1].strip()
datafile.close() datafile.close()
n = 30 n = 30
print str(k*n) + '\t' + line print(str(k*n) + '\t' + line)
k += 1 k += 1
while threading.activeCount()>1: while threading.activeCount()>1:

View File

@ -10,26 +10,26 @@ conn = psycopg2.connect("dbname=test")
#conn.set_client_encoding("xxx") #conn.set_client_encoding("xxx")
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT '2005-2-12'::date AS foo") curs.execute("SELECT '2005-2-12'::date AS foo")
print curs.fetchall() print(curs.fetchall())
curs.execute("SELECT '10:23:60'::time AS foo") curs.execute("SELECT '10:23:60'::time AS foo")
print curs.fetchall() print(curs.fetchall())
curs.execute("SELECT '10:23:59.895342'::time AS foo") curs.execute("SELECT '10:23:59.895342'::time AS foo")
print curs.fetchall() print(curs.fetchall())
curs.execute("SELECT '0:0:12.31423'::time with time zone AS foo") curs.execute("SELECT '0:0:12.31423'::time with time zone AS foo")
print curs.fetchall() print(curs.fetchall())
curs.execute("SELECT '0:0:12+01:30'::time with time zone AS foo") curs.execute("SELECT '0:0:12+01:30'::time with time zone AS foo")
print curs.fetchall() print(curs.fetchall())
curs.execute("SELECT '2005-2-12 10:23:59.895342'::timestamp AS foo") curs.execute("SELECT '2005-2-12 10:23:59.895342'::timestamp AS foo")
print curs.fetchall() print(curs.fetchall())
curs.execute("SELECT '2005-2-12 10:23:59.895342'::timestamp with time zone AS foo") curs.execute("SELECT '2005-2-12 10:23:59.895342'::timestamp with time zone AS foo")
print curs.fetchall() print(curs.fetchall())
#print curs.fetchmany(2) #print curs.fetchmany(2)
#print curs.fetchall() #print curs.fetchall()
def sleep(curs): def sleep(curs):
while not curs.isready(): while not curs.isready():
print "." print(".")
time.sleep(.1) time.sleep(.1)
#curs.execute(""" #curs.execute("""
@ -47,4 +47,3 @@ def sleep(curs):
#curs.execute("SELECT now() AS bar"); #curs.execute("SELECT now() AS bar");
#sleep(curs) #sleep(curs)

View File

@ -4,6 +4,5 @@ import psycopg2.extras
conn = psycopg2.connect("dbname=test") conn = psycopg2.connect("dbname=test")
curs = conn.cursor() curs = conn.cursor()
curs.execute("SELECT true AS foo WHERE 'a' in %s", (("aa", "bb"),)) curs.execute("SELECT true AS foo WHERE 'a' in %s", (("aa", "bb"),))
print curs.fetchall() print(curs.fetchall())
print curs.query print(curs.query)

View File

@ -40,4 +40,3 @@ dbconn.commit()
cursor.close() cursor.close()
dbconn.close() dbconn.close()

View File

@ -52,7 +52,7 @@ signal.signal(signal.SIGHUP, handler)
def worker(): def worker():
while 1: while 1:
print "I'm working" print("I'm working")
sleep(1) sleep(1)
eventlet.spawn(worker) eventlet.spawn(worker)
@ -61,21 +61,21 @@ eventlet.spawn(worker)
# You can unplug the network cable etc. here. # You can unplug the network cable etc. here.
# Kill -HUP will raise an exception in the callback. # Kill -HUP will raise an exception in the callback.
print "PID", os.getpid() print("PID", os.getpid())
conn = psycopg2.connect(DSN) conn = psycopg2.connect(DSN)
curs = conn.cursor() curs = conn.cursor()
try: try:
for i in range(1000): for i in range(1000):
curs.execute("select %s, pg_sleep(1)", (i,)) curs.execute("select %s, pg_sleep(1)", (i,))
r = curs.fetchone() r = curs.fetchone()
print "selected", r print("selected", r)
except BaseException, e: except BaseException, e:
print "got exception:", e.__class__.__name__, e print("got exception:", e.__class__.__name__, e)
if conn.closed: if conn.closed:
print "the connection is closed" print("the connection is closed")
else: else:
conn.rollback() conn.rollback()
curs.execute("select 1") curs.execute("select 1")
print curs.fetchone() print(curs.fetchone())

View File

@ -5,27 +5,27 @@ import signal
import warnings import warnings
import psycopg2 import psycopg2
print "Testing psycopg2 version %s" % psycopg2.__version__ print("Testing psycopg2 version %s" % psycopg2.__version__)
dbname = os.environ.get('PSYCOPG2_TESTDB', 'psycopg2_test') dbname = os.environ.get('PSYCOPG2_TESTDB', 'psycopg2_test')
conn = psycopg2.connect("dbname=%s" % dbname) conn = psycopg2.connect("dbname=%s" % dbname)
curs = conn.cursor() curs = conn.cursor()
curs.isready() curs.isready()
print "Now restart the test postgresql server to drop all connections, press enter when done." print("Now restart the test postgresql server to drop all connections, press enter when done.")
raw_input() raw_input()
try: try:
curs.isready() # No need to test return value curs.isready() # No need to test return value
curs.isready() curs.isready()
except: except:
print "Test passed" print("Test passed")
sys.exit(0) sys.exit(0)
if curs.isready(): if curs.isready():
print "Warning: looks like the connection didn't get killed. This test is probably in-effective" print("Warning: looks like the connection didn't get killed. This test is probably in-effective")
print "Test inconclusive" print("Test inconclusive")
sys.exit(1) sys.exit(1)
gc.collect() # used to error here gc.collect() # used to error here
print "Test Passed" print("Test Passed")

View File

@ -5,5 +5,4 @@ o = psycopg2.connect("dbname=test")
c = o.cursor() c = o.cursor()
c.execute("SELECT 1.23::float AS foo") c.execute("SELECT 1.23::float AS foo")
x = c.fetchone()[0] x = c.fetchone()[0]
print x, type(x) print(x, type(x))

View File

@ -42,7 +42,7 @@ cur = conn.cursor()
gc_thread.start() gc_thread.start()
# Now do lots of "cursor.copy_from" calls: # Now do lots of "cursor.copy_from" calls:
print "copy_from" print("copy_from")
for i in range(1000): for i in range(1000):
f = StringIO("42\tfoo\n74\tbar\n") f = StringIO("42\tfoo\n74\tbar\n")
cur.copy_from(f, 'test', columns=('num', 'data')) cur.copy_from(f, 'test', columns=('num', 'data'))
@ -51,7 +51,7 @@ for i in range(1000):
# python: Modules/gcmodule.c:277: visit_decref: Assertion `gc->gc.gc_refs != 0' failed. # python: Modules/gcmodule.c:277: visit_decref: Assertion `gc->gc.gc_refs != 0' failed.
# Also exercise the copy_to code path # Also exercise the copy_to code path
print "copy_to" print("copy_to")
cur.execute("truncate test") cur.execute("truncate test")
f = StringIO("42\tfoo\n74\tbar\n") f = StringIO("42\tfoo\n74\tbar\n")
cur.copy_from(f, 'test', columns=('num', 'data')) cur.copy_from(f, 'test', columns=('num', 'data'))
@ -60,7 +60,7 @@ for i in range(1000):
cur.copy_to(f, 'test', columns=('num', 'data')) cur.copy_to(f, 'test', columns=('num', 'data'))
# And copy_expert too # And copy_expert too
print "copy_expert" print("copy_expert")
cur.execute("truncate test") cur.execute("truncate test")
for i in range(1000): for i in range(1000):
f = StringIO("42\tfoo\n74\tbar\n") f = StringIO("42\tfoo\n74\tbar\n")
@ -71,5 +71,3 @@ done = 1
cur.close() cur.close()
conn.close() conn.close()

View File

@ -6,7 +6,7 @@ db = psycopg2.connect('dbname=test')
cursor = db.cursor() cursor = db.cursor()
print 'Creating tables and sample data' print('Creating tables and sample data')
cursor.execute(''' cursor.execute('''
CREATE TEMPORARY TABLE foo ( CREATE TEMPORARY TABLE foo (
@ -23,22 +23,22 @@ cursor.execute('INSERT INTO bar VALUES (1, 1)')
db.commit() db.commit()
print 'Deferring constraint and breaking referential integrity' print('Deferring constraint and breaking referential integrity')
cursor.execute('SET CONSTRAINTS bar_foo_fk DEFERRED') cursor.execute('SET CONSTRAINTS bar_foo_fk DEFERRED')
cursor.execute('UPDATE bar SET foo_id = 42 WHERE id = 1') cursor.execute('UPDATE bar SET foo_id = 42 WHERE id = 1')
print 'Committing (this should fail)' print('Committing (this should fail)')
try: try:
db.commit() db.commit()
except: except:
traceback.print_exc() traceback.print_exc()
print 'Rolling back connection' print('Rolling back connection')
db.rollback() db.rollback()
print 'Running a trivial query' print('Running a trivial query')
try: try:
cursor.execute('SELECT TRUE') cursor.execute('SELECT TRUE')
except: except:
traceback.print_exc() traceback.print_exc()
print 'db.closed:', db.closed print('db.closed:', db.closed)

View File

@ -1,3 +1,5 @@
from __future__ import print_function
import psycopg2, psycopg2.extensions import psycopg2, psycopg2.extensions
import threading import threading
import gc import gc
@ -20,9 +22,9 @@ class db_user(threading.Thread):
# the conn2 desctructor will block indefinitely # the conn2 desctructor will block indefinitely
# on the completion of the query # on the completion of the query
# (and it will not be holding the GIL during that time) # (and it will not be holding the GIL during that time)
print >> sys.stderr, "begin conn2 del" print("begin conn2 del", file=sys.stderr)
del cursor, conn2 del cursor, conn2
print >> sys.stderr, "end conn2 del" print("end conn2 del", file=sys.stderr)
def main(): def main():
# lock out a db row # lock out a db row
@ -43,7 +45,7 @@ def main():
# as it will avoid conn_close() # as it will avoid conn_close()
for i in range(10): for i in range(10):
if gc.collect(): if gc.collect():
print >> sys.stderr, "garbage collection done" print("garbage collection done", file=sys.stderr)
break break
time.sleep(1) time.sleep(1)
@ -52,9 +54,9 @@ def main():
# concurrent thread destructor of conn2 to # concurrent thread destructor of conn2 to
# continue and it will end up trying to free # continue and it will end up trying to free
# self->dsn a second time. # self->dsn a second time.
print >> sys.stderr, "begin conn1 del" print("begin conn1 del", file=sys.stderr)
del cursor, conn1 del cursor, conn1
print >> sys.stderr, "end conn1 del" print("end conn1 del", file=sys.stderr)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -1,7 +1,7 @@
import psycopg2.extensions import psycopg2.extensions
print dir(psycopg2._psycopg) print(dir(psycopg2._psycopg))
print psycopg2.extensions.new_type( print(psycopg2.extensions.new_type(
(600,), "POINT", lambda oids, name, fun: None) (600,), "POINT", lambda oids, name, fun: None))
print "ciccia ciccia" print("ciccia ciccia")
print psycopg2._psycopg print(psycopg2._psycopg)

View File

@ -6,4 +6,4 @@ conn = psycopg2.connect("dbname=test")
curs = conn.cursor() curs = conn.cursor()
curs.execute("set timezone = 'Asia/Calcutta'") curs.execute("set timezone = 'Asia/Calcutta'")
curs.execute("SELECT now()") curs.execute("SELECT now()")
print curs.fetchone()[0] print(curs.fetchone()[0])

View File

@ -9,7 +9,14 @@ To invalidate the cache, update this file and check it into git.
Currently used modules built in the cache: Currently used modules built in the cache:
OpenSSL OpenSSL
Version: 1.0.2l Version: 1.0.2n
PostgreSQL PostgreSQL
Version: 9.6.3 Version: 10.1
NOTE: to zap the cache manually you can also use:
curl -X DELETE -H "Authorization: Bearer $APPVEYOR_TOKEN" -H "Content-Type: application/json" https://ci.appveyor.com/api/projects/psycopg/psycopg2/buildcache
with the token from https://ci.appveyor.com/api-token

View File

@ -1,119 +0,0 @@
# -*- python -*-
#
# Copyright (C) 2001-2003 Federico Di Gregorio <fog@debian.org>
#
# This file is part of the psycopg module.
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# this a little script that analyze a file with (TYPE, NUMBER) tuples
# and write out C code ready for inclusion in psycopg. the generated
# code defines the DBAPITypeObject fundamental types and warns for
# undefined types.
import sys
from string import split, strip
# here is the list of the foundamental types we want to import from
# postgresql header files
basic_types = (['NUMBER', ['INT8', 'INT4', 'INT2', 'FLOAT8', 'FLOAT4',
'NUMERIC']],
['LONGINTEGER', ['INT8']],
['INTEGER', ['INT4', 'INT2']],
['FLOAT', ['FLOAT8', 'FLOAT4']],
['DECIMAL', ['NUMERIC']],
['UNICODE', ['NAME', 'CHAR', 'TEXT', 'BPCHAR',
'VARCHAR']],
['STRING', ['NAME', 'CHAR', 'TEXT', 'BPCHAR',
'VARCHAR']],
['BOOLEAN', ['BOOL']],
['DATETIME', ['TIMESTAMP', 'TIMESTAMPTZ',
'TINTERVAL', 'INTERVAL']],
['TIME', ['TIME', 'TIMETZ']],
['DATE', ['DATE']],
['INTERVAL', ['TINTERVAL', 'INTERVAL']],
['BINARY', ['BYTEA']],
['ROWID', ['OID']])
# unfortunately we don't have a nice way to extract array information
# from postgresql headers; we'll have to do it hard-coding :/
array_types = (['LONGINTEGER', [1016]],
['INTEGER', [1005, 1006, 1007]],
['FLOAT', [1017, 1021, 1022]],
['DECIMAL', [1231]],
['UNICODE', [1002, 1003, 1009, 1014, 1015]],
['STRING', [1002, 1003, 1009, 1014, 1015]],
['BOOLEAN', [1000]],
['DATETIME', [1115, 1185]],
['TIME', [1183, 1270]],
['DATE', [1182]],
['INTERVAL', [1187]],
['BINARY', [1001]],
['ROWID', [1028, 1013]])
# this is the header used to compile the data in the C module
HEADER = """
typecastObject_initlist typecast_builtins[] = {
"""
# then comes the footer
FOOTER = """ {NULL, NULL, NULL, NULL}\n};\n"""
# useful error reporting function
def error(msg):
"""Report an error on stderr."""
sys.stderr.write(msg + '\n')
# read couples from stdin and build list
read_types = []
for l in sys.stdin.readlines():
oid, val = split(l)
read_types.append((strip(oid)[:-3], strip(val)))
# look for the wanted types in the read touples
found_types = {}
for t in basic_types:
k = t[0]
found_types[k] = []
for v in t[1]:
found = filter(lambda x, y=v: x[0] == y, read_types)
if len(found) == 0:
error(v + ': value not found')
elif len(found) > 1:
error(v + ': too many values')
else:
found_types[k].append(int(found[0][1]))
# now outputs to stdout the right C-style definitions
stypes = sstruct = ""
for t in basic_types:
k = t[0]
s = str(found_types[k])
s = '{' + s[1:-1] + ', 0}'
stypes = stypes + ('static long int typecast_%s_types[] = %s;\n' % (k, s))
sstruct += (' {"%s", typecast_%s_types, typecast_%s_cast, NULL},\n'
% (k, k, k))
for t in array_types:
kt = t[0]
ka = t[0] + 'ARRAY'
s = str(t[1])
s = '{' + s[1:-1] + ', 0}'
stypes = stypes + ('static long int typecast_%s_types[] = %s;\n' % (ka, s))
sstruct += (' {"%s", typecast_%s_types, typecast_%s_cast, "%s"},\n'
% (ka, ka, ka, kt))
sstruct = HEADER + sstruct + FOOTER
print stypes
print sstruct

View File

@ -15,6 +15,7 @@ The script can be run at a new PostgreSQL release to refresh the module.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details. # License for more details.
from __future__ import print_function
import re import re
import sys import sys
@ -26,22 +27,22 @@ from BeautifulSoup import BeautifulSoup as BS
def main(): def main():
if len(sys.argv) != 2: if len(sys.argv) != 2:
print >>sys.stderr, "usage: %s /path/to/errorcodes.py" % sys.argv[0] print("usage: %s /path/to/errorcodes.py" % sys.argv[0], file=sys.stderr)
return 2 return 2
filename = sys.argv[1] filename = sys.argv[1]
file_start = read_base_file(filename) file_start = read_base_file(filename)
# If you add a version to the list fix the docs (errorcodes.rst, err.rst) # If you add a version to the list fix the docs (in errorcodes.rst)
classes, errors = fetch_errors( classes, errors = fetch_errors(
['8.1', '8.2', '8.3', '8.4', '9.0', '9.1', '9.2', '9.3', '9.4', '9.5', ['8.1', '8.2', '8.3', '8.4', '9.0', '9.1', '9.2', '9.3', '9.4', '9.5',
'9.6', '10 b1']) '9.6', '10'])
f = open(filename, "w") f = open(filename, "w")
for line in file_start: for line in file_start:
print >>f, line print(line, file=f)
for line in generate_module_data(classes, errors): for line in generate_module_data(classes, errors):
print >>f, line print(line, file=f)
def read_base_file(filename): def read_base_file(filename):
@ -141,18 +142,23 @@ def fetch_errors(versions):
errors = defaultdict(dict) errors = defaultdict(dict)
for version in versions: for version in versions:
print >> sys.stderr, version print(version, file=sys.stderr)
tver = tuple(map(int, version.split()[0].split('.'))) tver = tuple(map(int, version.split()[0].split('.')))
if tver < (9, 1): if tver < (9, 1):
c1, e1 = parse_errors_sgml(errors_sgml_url % version) c1, e1 = parse_errors_sgml(errors_sgml_url % version)
else: else:
# TODO: move to 10 stable when released. tag = '%s%s_STABLE' % (
if version == '10 b1': (tver[0] >= 10 and 'REL_' or 'REL'),
tag = 'REL_10_BETA1' version.replace('.', '_'))
else:
tag = 'REL%s_STABLE' % version.replace('.', '_')
c1, e1 = parse_errors_txt(errors_txt_url % tag) c1, e1 = parse_errors_txt(errors_txt_url % tag)
classes.update(c1) classes.update(c1)
# TODO: this error was added in PG 10 beta 1 but dropped in the
# final release. It doesn't harm leaving it in the file. Check if it
# will be added back in PG 11.
# https://github.com/postgres/postgres/commit/28e0727076
errors['55']['55P04'] = 'UNSAFE_NEW_ENUM_VALUE_USAGE'
for c, cerrs in e1.iteritems(): for c, cerrs in e1.iteritems():
errors[c].update(cerrs) errors[c].update(cerrs)

View File

@ -1,41 +0,0 @@
#!/bin/sh
SCRIPTSDIR="`dirname $0`"
SRCDIR="`dirname $SCRIPTSDIR`/psycopg"
if [ -z "$1" ] ; then
echo Usage: $0 '<postgresql include directory>'
exit 1
fi
echo -n checking for pg_type.h ...
if [ -f "$1/catalog/pg_type.h" ] ; then
PGTYPE="$1/catalog/pg_type.h"
else
if [ -f "$1/server/catalog/pg_type.h" ] ; then
PGTYPE="$1/server/catalog/pg_type.h"
else
echo
echo "error: can't find pg_type.h under $1"
exit 2
fi
fi
echo " found"
PGVERSION="`sed -n -e 's/.*PG_VERSION \"\([0-9]\.[0-9]\).*\"/\1/p' $1/pg_config.h`"
PGMAJOR="`echo $PGVERSION | cut -d. -f1`"
PGMINOR="`echo $PGVERSION | cut -d. -f2`"
echo checking for postgresql major: $PGMAJOR
echo checking for postgresql minor: $PGMINOR
echo -n generating pgtypes.h ...
awk '/#define .+OID/ {print "#define " $2 " " $3}' "$PGTYPE" \
> $SRCDIR/pgtypes.h
echo " done"
echo -n generating typecast_builtins.c ...
awk '/#define .+OID/ {print $2 " " $3}' "$PGTYPE" | \
python $SCRIPTSDIR/buildtypes.py >$SRCDIR/typecast_builtins.c
echo " done"

View File

@ -17,6 +17,7 @@ script exits with error 1.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details. # License for more details.
from __future__ import print_function
import gc import gc
import sys import sys
@ -29,8 +30,8 @@ from collections import defaultdict
def main(): def main():
opt = parse_args() opt = parse_args()
import psycopg2.tests import tests
test = psycopg2.tests test = tests
if opt.suite: if opt.suite:
test = getattr(test, opt.suite) test = getattr(test, opt.suite)
@ -43,7 +44,7 @@ def main():
dump(i, opt) dump(i, opt)
f1 = open('debug-%02d.txt' % (opt.nruns - 1)).readlines() f1 = open('debug-%02d.txt' % (opt.nruns - 1)).readlines()
f2 = open('debug-%02d.txt' % (opt.nruns)).readlines() f2 = open('debug-%02d.txt' % opt.nruns).readlines()
for line in difflib.unified_diff(f1, f2, for line in difflib.unified_diff(f1, f2,
"run %d" % (opt.nruns - 1), "run %d" % opt.nruns): "run %d" % (opt.nruns - 1), "run %d" % opt.nruns):
sys.stdout.write(line) sys.stdout.write(line)
@ -52,7 +53,7 @@ def main():
if opt.objs: if opt.objs:
f1 = open('objs-%02d.txt' % (opt.nruns - 1)).readlines() f1 = open('objs-%02d.txt' % (opt.nruns - 1)).readlines()
f2 = open('objs-%02d.txt' % (opt.nruns)).readlines() f2 = open('objs-%02d.txt' % opt.nruns).readlines()
for line in difflib.unified_diff(f1, f2, for line in difflib.unified_diff(f1, f2,
"run %d" % (opt.nruns - 1), "run %d" % opt.nruns): "run %d" % (opt.nruns - 1), "run %d" % opt.nruns):
sys.stdout.write(line) sys.stdout.write(line)

View File

@ -105,11 +105,11 @@ cd /
# Postgres versions supported by Travis CI # Postgres versions supported by Travis CI
if [[ -z "$DONT_TEST_PRESENT" ]]; then if [[ -z "$DONT_TEST_PRESENT" ]]; then
create 10
create 9.6 create 9.6
create 9.5 create 9.5
create 9.4 create 9.4
create 9.3 create 9.3
create 9.2
fi fi
# Unsupported postgres versions that we still support # Unsupported postgres versions that we still support
@ -123,9 +123,10 @@ if [[ -n "$TEST_PAST" ]]; then
create 8.4 create 8.4
create 9.0 create 9.0
create 9.1 create 9.1
create 9.2
fi fi
# Postgres built from master # Postgres built from master
if [[ -n "$TEST_FUTURE" ]]; then if [[ -n "$TEST_FUTURE" ]]; then
create 10 10beta1 create 11 11-master
fi fi

View File

@ -34,23 +34,23 @@ run_test () {
export PSYCOPG2_TEST_REPL_DSN= export PSYCOPG2_TEST_REPL_DSN=
unset PSYCOPG2_TEST_GREEN unset PSYCOPG2_TEST_GREEN
python -c \ python -c \
"from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" \ "import tests; tests.unittest.main(defaultTest='tests.test_suite')" \
$VERBOSE $VERBOSE
printf "\n\nRunning tests against PostgreSQL $VERSION (green mode)\n\n" printf "\n\nRunning tests against PostgreSQL $VERSION (green mode)\n\n"
export PSYCOPG2_TEST_GREEN=1 export PSYCOPG2_TEST_GREEN=1
python -c \ python -c \
"from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" \ "import tests; tests.unittest.main(defaultTest='tests.test_suite')" \
$VERBOSE $VERBOSE
} }
# Postgres versions supported by Travis CI # Postgres versions supported by Travis CI
if [[ -z "$DONT_TEST_PRESENT" ]]; then if [[ -z "$DONT_TEST_PRESENT" ]]; then
run_test 10
run_test 9.6 run_test 9.6
run_test 9.5 run_test 9.5
run_test 9.4 run_test 9.4
run_test 9.3 run_test 9.3
run_test 9.2
fi fi
# Unsupported postgres versions that we still support # Unsupported postgres versions that we still support
@ -64,9 +64,10 @@ if [[ -n "$TEST_PAST" ]]; then
run_test 8.4 run_test 8.4
run_test 9.0 run_test 9.0
run_test 9.1 run_test 9.1
run_test 9.2
fi fi
# Postgres built from master # Postgres built from master
if [[ -n "$TEST_FUTURE" ]]; then if [[ -n "$TEST_FUTURE" ]]; then
run_test 10 run_test 11
fi fi

View File

@ -27,3 +27,6 @@ static_libpq=0
# Add here eventual extra libraries required to link the module. # Add here eventual extra libraries required to link the module.
libraries= libraries=
[metadata]
license_file = LICENSE

View File

@ -39,6 +39,7 @@ except ImportError:
from distutils.command.build_ext import build_ext from distutils.command.build_ext import build_ext
from distutils.sysconfig import get_python_inc from distutils.sysconfig import get_python_inc
from distutils.ccompiler import get_default_compiler from distutils.ccompiler import get_default_compiler
from distutils.errors import CompileError
from distutils.util import get_platform from distutils.util import get_platform
try: try:
@ -64,7 +65,7 @@ except ImportError:
# Take a look at http://www.python.org/dev/peps/pep-0440/ # Take a look at http://www.python.org/dev/peps/pep-0440/
# for a consistent versioning pattern. # for a consistent versioning pattern.
PSYCOPG_VERSION = '2.7.4.dev0' PSYCOPG_VERSION = '2.8.dev0'
# note: if you are changing the list of supported Python version please fix # note: if you are changing the list of supported Python version please fix
@ -75,14 +76,13 @@ Intended Audience :: Developers
License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
License :: OSI Approved :: Zope Public License License :: OSI Approved :: Zope Public License
Programming Language :: Python Programming Language :: Python
Programming Language :: Python :: 2.6 Programming Language :: Python :: 2
Programming Language :: Python :: 2.7 Programming Language :: Python :: 2.7
Programming Language :: Python :: 3 Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4 Programming Language :: Python :: 3.4
Programming Language :: Python :: 3.5 Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.6
Programming Language :: Python :: Implementation :: CPython
Programming Language :: C Programming Language :: C
Programming Language :: SQL Programming Language :: SQL
Topic :: Database Topic :: Database
@ -105,15 +105,23 @@ class PostgresConfig:
if not self.pg_config_exe: if not self.pg_config_exe:
self.pg_config_exe = self.autodetect_pg_config_path() self.pg_config_exe = self.autodetect_pg_config_path()
if self.pg_config_exe is None: if self.pg_config_exe is None:
sys.stderr.write("""\ sys.stderr.write("""
Error: pg_config executable not found. Error: pg_config executable not found.
Please add the directory containing pg_config to the PATH pg_config is required to build psycopg2 from source. Please add the directory
or specify the full executable path with the option: containing pg_config to the $PATH or specify the full executable path with the
option:
python setup.py build_ext --pg-config /path/to/pg_config build ... python setup.py build_ext --pg-config /path/to/pg_config build ...
or with the pg_config option in 'setup.cfg'. or with the pg_config option in 'setup.cfg'.
If you prefer to avoid building psycopg2 from source, please install the PyPI
'psycopg2-binary' package instead.
For further information please check the 'doc/src/install.rst' file (also at
<http://initd.org/psycopg/docs/install.html>).
""") """)
sys.exit(1) sys.exit(1)
@ -195,8 +203,7 @@ or with the pg_config option in 'setup.cfg'.
return None return None
pg_first_inst_key = winreg.OpenKey(reg, pg_first_inst_key = winreg.OpenKey(reg,
'SOFTWARE\\PostgreSQL\\Installations\\' 'SOFTWARE\\PostgreSQL\\Installations\\' + first_sub_key_name)
+ first_sub_key_name)
try: try:
pg_inst_base_dir = winreg.QueryValueEx( pg_inst_base_dir = winreg.QueryValueEx(
pg_first_inst_key, 'Base Directory')[0] pg_first_inst_key, 'Base Directory')[0]
@ -213,8 +220,7 @@ or with the pg_config option in 'setup.cfg'.
# Support unicode paths, if this version of Python provides the # Support unicode paths, if this version of Python provides the
# necessary infrastructure: # necessary infrastructure:
if sys.version_info[0] < 3 \ if sys.version_info[0] < 3:
and hasattr(sys, 'getfilesystemencoding'):
pg_config_path = pg_config_path.encode( pg_config_path = pg_config_path.encode(
sys.getfilesystemencoding()) sys.getfilesystemencoding())
@ -289,13 +295,42 @@ class psycopg_build_ext(build_ext):
else: else:
return build_ext.get_export_symbols(self, extension) return build_ext.get_export_symbols(self, extension)
built_files = 0
def build_extension(self, extension): def build_extension(self, extension):
build_ext.build_extension(self, extension) # Count files compiled to print the binary blurb only if the first fails
compile_orig = getattr(self.compiler, '_compile', None)
if compile_orig is not None:
def _compile(*args, **kwargs):
rv = compile_orig(*args, **kwargs)
psycopg_build_ext.built_files += 1
return rv
self.compiler._compile = _compile
try:
build_ext.build_extension(self, extension)
psycopg_build_ext.built_files += 1
except CompileError:
if self.built_files == 0:
sys.stderr.write("""
It appears you are missing some prerequisite to build the package from source.
You may install a binary package by installing 'psycopg2-binary' from PyPI.
If you want to install psycopg2 from source, please install the packages
required for the build and try again.
For further information please check the 'doc/src/install.rst' file (also at
<http://initd.org/psycopg/docs/install.html>).
""")
raise
sysVer = sys.version_info[:2] sysVer = sys.version_info[:2]
# For Python versions that use MSVC compiler 2008, re-insert the # For Python versions that use MSVC compiler 2008, re-insert the
# manifest into the resulting .pyd file. # manifest into the resulting .pyd file.
if self.compiler_is_msvc() and sysVer in ((2, 6), (2, 7), (3, 0), (3, 1), (3, 2)): if self.compiler_is_msvc() and sysVer == (2, 7):
platform = get_platform() platform = get_platform()
# Default to the x86 manifest # Default to the x86 manifest
manifest = '_psycopg.vc9.x86.manifest' manifest = '_psycopg.vc9.x86.manifest'
@ -317,7 +352,6 @@ class psycopg_build_ext(build_ext):
def finalize_win32(self): def finalize_win32(self):
"""Finalize build system configuration on win32 platform.""" """Finalize build system configuration on win32 platform."""
sysVer = sys.version_info[:2]
# Add compiler-specific arguments: # Add compiler-specific arguments:
extra_compiler_args = [] extra_compiler_args = []
@ -333,17 +367,6 @@ class psycopg_build_ext(build_ext):
# API code. # API code.
extra_compiler_args.append('-fno-strict-aliasing') extra_compiler_args.append('-fno-strict-aliasing')
# Force correct C runtime library linkage:
if sysVer <= (2, 3):
# Yes: 'msvcr60', rather than 'msvcrt', is the correct value
# on the line below:
self.libraries.append('msvcr60')
elif sysVer in ((2, 4), (2, 5)):
self.libraries.append('msvcr71')
# Beyond Python 2.5, we take our chances on the default C runtime
# library, because we don't know what compiler those future
# versions of Python will use.
for extension in ext: # ext is a global list of Extension objects for extension in ext: # ext is a global list of Extension objects
extension.extra_compile_args.extend(extra_compiler_args) extension.extra_compile_args.extend(extra_compiler_args)
# End of add-compiler-specific arguments section. # End of add-compiler-specific arguments section.
@ -412,7 +435,7 @@ class psycopg_build_ext(build_ext):
# *at least* PostgreSQL 7.4 is available (this is the only # *at least* PostgreSQL 7.4 is available (this is the only
# 7.x series supported by psycopg 2) # 7.x series supported by psycopg 2)
pgversion = pg_config_helper.query("version").split()[1] pgversion = pg_config_helper.query("version").split()[1]
except: except Exception:
pgversion = "7.4.0" pgversion = "7.4.0"
verre = re.compile( verre = re.compile(
@ -420,11 +443,14 @@ class psycopg_build_ext(build_ext):
m = verre.match(pgversion) m = verre.match(pgversion)
if m: if m:
pgmajor, pgminor, pgpatch = m.group(1, 2, 3) pgmajor, pgminor, pgpatch = m.group(1, 2, 3)
# Postgres >= 10 doesn't have pgminor anymore.
pgmajor = int(pgmajor)
if pgmajor >= 10:
pgminor, pgpatch = None, pgminor
if pgminor is None or not pgminor.isdigit(): if pgminor is None or not pgminor.isdigit():
pgminor = 0 pgminor = 0
if pgpatch is None or not pgpatch.isdigit(): if pgpatch is None or not pgpatch.isdigit():
pgpatch = 0 pgpatch = 0
pgmajor = int(pgmajor)
pgminor = int(pgminor) pgminor = int(pgminor)
pgpatch = int(pgpatch) pgpatch = int(pgpatch)
else: else:
@ -480,7 +506,7 @@ data_files = []
sources = [ sources = [
'psycopgmodule.c', 'psycopgmodule.c',
'green.c', 'pqpath.c', 'utils.c', 'bytes_format.c', 'green.c', 'pqpath.c', 'utils.c', 'bytes_format.c',
'libpq_support.c', 'win32_support.c', 'libpq_support.c', 'win32_support.c', 'solaris_support.c',
'connection_int.c', 'connection_type.c', 'connection_int.c', 'connection_type.c',
'cursor_int.c', 'cursor_type.c', 'cursor_int.c', 'cursor_type.c',
@ -570,10 +596,7 @@ if version_flags:
else: else:
PSYCOPG_VERSION_EX = PSYCOPG_VERSION PSYCOPG_VERSION_EX = PSYCOPG_VERSION
if not PLATFORM_IS_WINDOWS: define_macros.append(('PSYCOPG_VERSION', PSYCOPG_VERSION_EX))
define_macros.append(('PSYCOPG_VERSION', '"' + PSYCOPG_VERSION_EX + '"'))
else:
define_macros.append(('PSYCOPG_VERSION', '\\"' + PSYCOPG_VERSION_EX + '\\"'))
if parser.has_option('build_ext', 'have_ssl'): if parser.has_option('build_ext', 'have_ssl'):
have_ssl = int(parser.get('build_ext', 'have_ssl')) have_ssl = int(parser.get('build_ext', 'have_ssl'))
@ -617,7 +640,7 @@ try:
f = open("README.rst") f = open("README.rst")
readme = f.read() readme = f.read()
f.close() f.close()
except: except Exception:
print("failed to read readme: ignoring...") print("failed to read readme: ignoring...")
readme = __doc__ readme = __doc__
@ -631,12 +654,13 @@ setup(name="psycopg2",
download_url=download_url, download_url=download_url,
license="LGPL with exceptions or ZPL", license="LGPL with exceptions or ZPL",
platforms=["any"], platforms=["any"],
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',
description=readme.split("\n")[0], description=readme.split("\n")[0],
long_description="\n".join(readme.split("\n")[2:]).lstrip(), long_description="\n".join(readme.split("\n")[2:]).lstrip(),
classifiers=[x for x in classifiers.split("\n") if x], classifiers=[x for x in classifiers.split("\n") if x],
data_files=data_files, data_files=data_files,
package_dir={'psycopg2': 'lib', 'psycopg2.tests': 'tests'}, package_dir={'psycopg2': 'lib'},
packages=['psycopg2', 'psycopg2.tests'], packages=['psycopg2'],
cmdclass={ cmdclass={
'build_ext': psycopg_build_ext, 'build_ext': psycopg_build_ext,
'build_py': build_py, }, 'build_py': build_py, },

Some files were not shown because too many files have changed in this diff Show More