mirror of
https://github.com/psycopg/psycopg2.git
synced 2025-02-07 12:50:32 +03:00
Merge branch 'master' into master
This commit is contained in:
commit
1bec2bdc43
135
.appveyor.yml
135
.appveyor.yml
|
@ -13,52 +13,17 @@ environment:
|
|||
matrix:
|
||||
# For Python versions available on Appveyor, see
|
||||
# http://www.appveyor.com/docs/installed-software#python
|
||||
- {PYVER: "27", PYTHON_ARCH: "32"}
|
||||
- {PYVER: "27", PYTHON_ARCH: "64"}
|
||||
- {PYVER: "34", PYTHON_ARCH: "32"}
|
||||
- {PYVER: "34", PYTHON_ARCH: "64"}
|
||||
- {PYVER: "35", PYTHON_ARCH: "32"}
|
||||
- {PYVER: "35", PYTHON_ARCH: "64"}
|
||||
- {PYVER: "36", PYTHON_ARCH: "32"}
|
||||
- {PYVER: "36", PYTHON_ARCH: "64"}
|
||||
|
||||
# Py 2.7 = VS Ver. 9.0 (VS 2008)
|
||||
# Py 3.3, 3.4 = VS Ver. 10.0 (VS 2010)
|
||||
# Py 3.5, 3.6 = VS Ver. 14.0 (VS 2015)
|
||||
|
||||
- PYTHON: C:\Python27-x64
|
||||
PYTHON_ARCH: 64
|
||||
VS_VER: 9.0
|
||||
|
||||
- PYTHON: C:\Python27
|
||||
PYTHON_ARCH: 32
|
||||
VS_VER: 9.0
|
||||
|
||||
- PYTHON: C:\Python36-x64
|
||||
PYTHON_ARCH: 64
|
||||
VS_VER: 14.0
|
||||
|
||||
- PYTHON: C:\Python36
|
||||
PYTHON_ARCH: 32
|
||||
VS_VER: 14.0
|
||||
|
||||
- PYTHON: C:\Python35-x64
|
||||
PYTHON_ARCH: 64
|
||||
VS_VER: 14.0
|
||||
|
||||
- PYTHON: C:\Python35
|
||||
PYTHON_ARCH: 32
|
||||
VS_VER: 14.0
|
||||
|
||||
- PYTHON: C:\Python34-x64
|
||||
DISTUTILS_USE_SDK: '1'
|
||||
PYTHON_ARCH: 64
|
||||
VS_VER: 10.0
|
||||
|
||||
- PYTHON: C:\Python34
|
||||
PYTHON_ARCH: 32
|
||||
VS_VER: 10.0
|
||||
|
||||
- PYTHON: C:\Python33-x64
|
||||
DISTUTILS_USE_SDK: '1'
|
||||
PYTHON_ARCH: 64
|
||||
VS_VER: 10.0
|
||||
|
||||
- PYTHON: C:\Python33
|
||||
PYTHON_ARCH: 32
|
||||
VS_VER: 10.0
|
||||
OPENSSL_VERSION: "1_0_2n"
|
||||
POSTGRES_VERSION: "10_1"
|
||||
|
||||
PSYCOPG2_TESTDB: psycopg2_test
|
||||
PSYCOPG2_TESTDB_USER: postgres
|
||||
|
@ -73,17 +38,35 @@ matrix:
|
|||
fast_finish: false
|
||||
|
||||
services:
|
||||
# Note: if you change this service also change the paths to match
|
||||
# (see where Program Files\Postgres\9.6 is used)
|
||||
- postgresql96
|
||||
|
||||
cache:
|
||||
# Rebuild cache if following file changes
|
||||
# (See the file to zap the cache manually)
|
||||
- C:\Others -> scripts\appveyor.cache_rebuild
|
||||
|
||||
# Script called before repo cloning
|
||||
init:
|
||||
# Uncomment next line to get RDP access during the build.
|
||||
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
#
|
||||
|
||||
# Set env variable according to the build environment
|
||||
- SET PYTHON=C:\Python%PYVER%
|
||||
- IF "%PYTHON_ARCH%"=="64" SET PYTHON=%PYTHON%-x64
|
||||
|
||||
# Py 2.7 = VS Ver. 9.0 (VS 2008)
|
||||
# Py 3.3, 3.4 = VS Ver. 10.0 (VS 2010)
|
||||
# Py 3.5, 3.6 = VS Ver. 14.0 (VS 2015)
|
||||
- IF "%PYVER%"=="27" SET VS_VER=9.0
|
||||
- IF "%PYVER%"=="33" SET VS_VER=10.0
|
||||
- IF "%PYVER%"=="34" SET VS_VER=10.0
|
||||
- IF "%PYVER%"=="35" SET VS_VER=14.0
|
||||
- IF "%PYVER%"=="36" SET VS_VER=14.0
|
||||
|
||||
- IF "%VS_VER%"=="10.0" IF "%PYTHON_ARCH%"=="64" SET DISTUTILS_USE_SDK=1
|
||||
|
||||
# Set Python to the path
|
||||
- SET PATH=%PYTHON%;%PYTHON%\Scripts;C:\Program Files\Git\mingw64\bin;%PATH%
|
||||
|
||||
|
@ -113,6 +96,11 @@ init:
|
|||
- IF "%PYTHON_ARCH%"=="32" (CALL "C:\\Program Files (x86)\\Microsoft Visual Studio %VS_VER%\\VC\\vcvarsall.bat" x86)
|
||||
- IF "%PYTHON_ARCH%"=="64" (CALL "C:\\Program Files (x86)\\Microsoft Visual Studio %VS_VER%\\VC\\vcvarsall.bat" amd64)
|
||||
|
||||
# The program rc.exe on 64bit with some versions look in the wrong path
|
||||
# location when building postgresql. This cheats by copying the x64 bit
|
||||
# files to that location.
|
||||
- IF "%PYTHON_ARCH%"=="64" (COPY /Y "C:\\Program Files\\Microsoft SDKs\\Windows\\v7.0\\Bin\\x64\\rc*" "C:\\Program Files (x86)\\Microsoft SDKs\\Windows\\v7.0A\\Bin")
|
||||
|
||||
# Change PostgreSQL config before service starts to allow > 1 prepared
|
||||
# transactions for test cases
|
||||
- ECHO max_prepared_transactions = 10 >> "C:\\Program Files\\PostgreSQL\\9.6\\data\\postgresql.conf"
|
||||
|
@ -154,8 +142,8 @@ install:
|
|||
}
|
||||
# Download OpenSSL source
|
||||
- CD C:\Others
|
||||
- IF NOT EXIST OpenSSL_1_0_2l.zip (
|
||||
curl -fsSL -o OpenSSL_1_0_2l.zip https://github.com/openssl/openssl/archive/OpenSSL_1_0_2l.zip
|
||||
- IF NOT EXIST OpenSSL_%OPENSSL_VERSION%.zip (
|
||||
curl -fsSL -o OpenSSL_%OPENSSL_VERSION%.zip https://github.com/openssl/openssl/archive/OpenSSL_%OPENSSL_VERSION%.zip
|
||||
)
|
||||
|
||||
# To use OpenSSL >= 1.1.0, both libpq and psycopg build environments have
|
||||
|
@ -167,15 +155,15 @@ install:
|
|||
# - nmake build_libs install_dev
|
||||
- IF NOT EXIST %OPENSSLTOP%\lib\ssleay32.lib (
|
||||
CD %BUILD_DIR% &&
|
||||
7z x C:\Others\OpenSSL_1_0_2l.zip &&
|
||||
CD openssl-OpenSSL_1_0_2l &&
|
||||
7z x C:\Others\OpenSSL_%OPENSSL_VERSION%.zip &&
|
||||
CD openssl-OpenSSL_%OPENSSL_VERSION% &&
|
||||
perl Configure %TARGET% no-asm no-shared no-zlib --prefix=%OPENSSLTOP% --openssldir=%OPENSSLTOP% &&
|
||||
CALL ms\%DO% &&
|
||||
nmake -f ms\nt.mak init headers lib &&
|
||||
COPY inc32\openssl\*.h %OPENSSLTOP%\include\openssl &&
|
||||
COPY out32\*.lib %OPENSSLTOP%\lib &&
|
||||
CD %BASE_DIR% &&
|
||||
RMDIR /S /Q %BUILD_DIR%\openssl-OpenSSL_1_0_2l
|
||||
RMDIR /S /Q %BUILD_DIR%\openssl-OpenSSL_%OPENSSL_VERSION%
|
||||
)
|
||||
|
||||
# Setup directories for building PostgreSQL librarires
|
||||
|
@ -185,32 +173,45 @@ install:
|
|||
- SET PGTOP=%BASE_DIR%\postgresql
|
||||
- IF NOT EXIST %PGTOP%\include MKDIR %PGTOP%\include
|
||||
- IF NOT EXIST %PGTOP%\lib MKDIR %PGTOP%\lib
|
||||
- IF NOT EXIST %PGTOP%\bin MKDIR %PGTOP%\bin
|
||||
|
||||
# Download PostgreSQL source
|
||||
- CD C:\Others
|
||||
- IF NOT EXIST postgres-REL9_6_3.zip (
|
||||
curl -fsSL -o postgres-REL9_6_3.zip https://github.com/postgres/postgres/archive/REL9_6_3.zip
|
||||
- IF NOT EXIST postgres-REL_%POSTGRES_VERSION%.zip (
|
||||
curl -fsSL -o postgres-REL_%POSTGRES_VERSION%.zip https://github.com/postgres/postgres/archive/REL_%POSTGRES_VERSION%.zip
|
||||
)
|
||||
|
||||
# Setup build config file (config.pl)
|
||||
# Build libpgport first
|
||||
# Build libpq
|
||||
# Hack the Mkvcbuild.pm file so we build the lib version of libpq
|
||||
# Build libpgport, libpgcommon, libpq
|
||||
# Install includes
|
||||
# Copy over built libraries
|
||||
# Prepare local include directory for building from
|
||||
# Build pg_config in place
|
||||
# NOTE: Cannot set and use the same variable inside an IF
|
||||
- SET PGBUILD=%BUILD_DIR%\postgres-REL9_6_3
|
||||
- SET PGBUILD=%BUILD_DIR%\postgres-REL_%POSTGRES_VERSION%
|
||||
- IF NOT EXIST %PGTOP%\lib\libpq.lib (
|
||||
CD %BUILD_DIR% &&
|
||||
7z x C:\Others\postgres-REL9_6_3.zip &&
|
||||
CD postgres-REL9_6_3\src\tools\msvc &&
|
||||
7z x C:\Others\postgres-REL_%POSTGRES_VERSION%.zip &&
|
||||
CD postgres-REL_%POSTGRES_VERSION%\src\tools\msvc &&
|
||||
ECHO $config-^>{ldap} = 0; > config.pl &&
|
||||
ECHO $config-^>{openssl} = "%OPENSSLTOP:\=\\%"; >> config.pl &&
|
||||
ECHO.>> config.pl &&
|
||||
ECHO 1;>> config.pl &&
|
||||
perl -pi.bak -e "s/'libpq', 'dll'/'libpq', 'lib'/g" Mkvcbuild.pm &&
|
||||
build libpgport &&
|
||||
XCOPY /E ..\..\include %PGTOP%\include &&
|
||||
build libpgcommon &&
|
||||
build libpq &&
|
||||
ECHO "" > %PGBUILD%\src\backend\parser\gram.h &&
|
||||
perl -pi.bak -e "s/qw\(Install\)/qw\(Install CopyIncludeFiles\)/g" Install.pm &&
|
||||
perl -MInstall=CopyIncludeFiles -e"chdir('../../..'); CopyIncludeFiles('%PGTOP%')" &&
|
||||
COPY %PGBUILD%\Release\libpgport\libpgport.lib %PGTOP%\lib &&
|
||||
CD ..\..\interfaces\libpq &&
|
||||
nmake -f win32.mak USE_OPENSSL=1 ENABLE_THREAD_SAFETY=1 SSL_INC=%OPENSSLTOP%\include SSL_LIB_PATH=%OPENSSLTOP%\lib config .\Release\libpq.lib &&
|
||||
COPY *.h %PGTOP%\include &&
|
||||
COPY Release\libpq.lib %PGTOP%\lib &&
|
||||
COPY %PGBUILD%\Release\libpgcommon\libpgcommon.lib %PGTOP%\lib &&
|
||||
COPY %PGBUILD%\Release\libpq\libpq.lib %PGTOP%\lib &&
|
||||
XCOPY /Y /S %PGBUILD%\src\include\port\win32\* %PGBUILD%\src\include &&
|
||||
XCOPY /Y /S %PGBUILD%\src\include\port\win32_msvc\* %PGBUILD%\src\include &&
|
||||
CD %PGBUILD%\src\bin\pg_config &&
|
||||
cl pg_config.c /MT /nologo /I%PGBUILD%\src\include /link /LIBPATH:%PGTOP%\lib libpgcommon.lib libpgport.lib advapi32.lib /NODEFAULTLIB:libcmt.lib /OUT:%PGTOP%\bin\pg_config.exe &&
|
||||
CD %BASE_DIR% &&
|
||||
RMDIR /S /Q %PGBUILD%
|
||||
)
|
||||
|
@ -223,9 +224,10 @@ build_script:
|
|||
# Add PostgreSQL binaries to the path
|
||||
- PATH=C:\Program Files\PostgreSQL\9.6\bin\;%PATH%
|
||||
- CD C:\Project
|
||||
- "%PYTHON%\\python.exe setup.py build_ext --have-ssl -l libpgcommon -L %OPENSSLTOP%\\lib;%PGTOP%\\lib -I %OPENSSLTOP%\\include;%PGTOP%\\include"
|
||||
- "%PYTHON%\\python.exe setup.py build_ext --have-ssl --pg-config %PGTOP%\\bin\\pg_config.exe -l libpgcommon -l libpgport -L %OPENSSLTOP%\\lib -I %OPENSSLTOP%\\include"
|
||||
- "%PYTHON%\\python.exe setup.py build"
|
||||
- "%PYTHON%\\python.exe setup.py install"
|
||||
- RD /S /Q psycopg2.egg-info
|
||||
|
||||
#after_build:
|
||||
|
||||
|
@ -235,5 +237,8 @@ before_test:
|
|||
- psql -d %PSYCOPG2_TESTDB% -c "CREATE EXTENSION HSTORE;"
|
||||
|
||||
test_script:
|
||||
# Print psycopg and libpq versions
|
||||
- "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.__version__)\""
|
||||
- "%PYTHON%\\python.exe -c \"from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')\" --verbose"
|
||||
- "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.__libpq_version__)\""
|
||||
- "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.extensions.libpq_version())\""
|
||||
- "%PYTHON%\\python.exe -c \"import tests; tests.unittest.main(defaultTest='tests.test_suite')\" --verbose"
|
||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -4,6 +4,7 @@ MANIFEST
|
|||
*.pidb
|
||||
*.pyc
|
||||
*.sw[po]
|
||||
*.egg-info/
|
||||
dist/*
|
||||
build/*
|
||||
doc/src/_build/*
|
||||
|
@ -11,6 +12,8 @@ doc/html/*
|
|||
doc/psycopg2.txt
|
||||
scripts/pypi_docs_upload.py
|
||||
env
|
||||
env?
|
||||
.idea
|
||||
.tox
|
||||
/rel
|
||||
/wheels
|
||||
|
|
|
@ -7,14 +7,12 @@ language: python
|
|||
python:
|
||||
- 2.7
|
||||
- 3.6
|
||||
- 2.6
|
||||
- 3.5
|
||||
- 3.4
|
||||
- 3.3
|
||||
- 3.2
|
||||
|
||||
install:
|
||||
- python setup.py install
|
||||
- rm -rf psycopg2.egg-info
|
||||
- sudo scripts/travis_prepare.sh
|
||||
|
||||
script:
|
||||
|
|
2
AUTHORS
2
AUTHORS
|
@ -6,7 +6,7 @@ For the win32 port:
|
|||
Jason Erickson <jerickso@indian.com>
|
||||
|
||||
Additional Help:
|
||||
|
||||
|
||||
Peter Fein contributed a logging connection/cursor class that even if it
|
||||
was not used directly heavily influenced the implementation currently in
|
||||
psycopg2.extras.
|
||||
|
|
4
LICENSE
4
LICENSE
|
@ -47,8 +47,8 @@ psycopg/microprotocol*.{h,c}:
|
|||
claim that you wrote the original software. If you use this
|
||||
software in a product, an acknowledgment in the product documentation
|
||||
would be appreciated but is not required.
|
||||
|
||||
|
||||
2. Altered source versions must be plainly marked as such, and must not
|
||||
be misrepresented as being the original software.
|
||||
|
||||
|
||||
3. This notice may not be removed or altered from any source distribution.
|
||||
|
|
|
@ -6,6 +6,5 @@ include doc/README.rst doc/SUCCESS doc/COPYING.LESSER doc/pep-0249.txt
|
|||
include doc/Makefile doc/requirements.txt
|
||||
recursive-include doc/src *.rst *.py *.css Makefile
|
||||
recursive-include scripts *.py *.sh
|
||||
include scripts/maketypes.sh scripts/buildtypes.py
|
||||
include AUTHORS README.rst INSTALL LICENSE NEWS
|
||||
include MANIFEST.in setup.py setup.cfg Makefile
|
||||
|
|
5
Makefile
5
Makefile
|
@ -29,8 +29,7 @@ SOURCE := $(SOURCE_C) $(SOURCE_PY) $(SOURCE_TESTS) $(SOURCE_DOC)
|
|||
|
||||
PACKAGE := $(BUILD_DIR)/psycopg2
|
||||
PLATLIB := $(PACKAGE)/_psycopg.so
|
||||
PURELIB := $(patsubst lib/%,$(PACKAGE)/%,$(SOURCE_PY)) \
|
||||
$(patsubst tests/%,$(PACKAGE)/tests/%,$(SOURCE_TESTS))
|
||||
PURELIB := $(patsubst lib/%,$(PACKAGE)/%,$(SOURCE_PY))
|
||||
|
||||
BUILD_OPT := --build-lib=$(BUILD_DIR)
|
||||
BUILD_EXT_OPT := --build-lib=$(BUILD_DIR)
|
||||
|
@ -66,7 +65,7 @@ env:
|
|||
$(MAKE) -C doc $@
|
||||
|
||||
check:
|
||||
PYTHONPATH=$(BUILD_DIR):$(PYTHONPATH) $(PYTHON) -c "from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
|
||||
PYTHONPATH=$(BUILD_DIR) $(PYTHON) -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
|
||||
|
||||
testdb:
|
||||
@echo "* Creating $(TESTDB)"
|
||||
|
|
67
NEWS
67
NEWS
|
@ -1,11 +1,64 @@
|
|||
Current release
|
||||
---------------
|
||||
|
||||
What's new in psycopg 2.8
|
||||
-------------------------
|
||||
|
||||
Other changes:
|
||||
|
||||
- Dropped support for Python 2.6, 3.2, 3.3.
|
||||
- Dropped `psycopg1` module.
|
||||
- Dropped deprecated ``register_tstz_w_secs()`` (was previously a no-op).
|
||||
- The ``psycopg2.test`` package is no longer installed by ``python setup.py
|
||||
install``. The test source files now are compatible with Python 2 and 3
|
||||
without using 2to3.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.5
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fixed building on Solaris 11 and derivatives such as SmartOS and illumos
|
||||
(:ticket:`#677`).
|
||||
- Maybe fixed building on MSYS2 (as reported in :ticket:`#658`).
|
||||
- Allow string subclasses in connection and other places (:ticket:`#679`).
|
||||
|
||||
|
||||
What's new in psycopg 2.7.4
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Moving away from installing the wheel package by default.
|
||||
Packages installed from wheel raise a warning on import. Added package
|
||||
``psycopg2-binary`` to install from wheel instead (:ticket:`#543`).
|
||||
- Convert fields names into valid Python identifiers in
|
||||
`~psycopg2.extras.NamedTupleCursor` (:ticket:`#211`).
|
||||
- Fixed Solaris 10 support (:ticket:`#532`).
|
||||
- `cursor.mogrify()` can be called on closed cursors (:ticket:`#579`).
|
||||
- Fixed setting session characteristics in corner cases on autocommit
|
||||
connections (:ticket:`#580`).
|
||||
- Fixed `~psycopg2.extras.MinTimeLoggingCursor` on Python 3 (:ticket:`#609`).
|
||||
- Fixed parsing of array of points as floats (:ticket:`#613`).
|
||||
- Fixed `~psycopg2.__libpq_version__` building with libpq >= 10.1
|
||||
(:ticket:`632`).
|
||||
- Fixed `~cursor.rowcount` after `~cursor.executemany()` with :sql:`RETURNING`
|
||||
statements (:ticket:`633`).
|
||||
- Fixed compatibility problem with pypy3 (:ticket:`#649`).
|
||||
- Wheel packages compiled against PostgreSQL 10.1 libpq and OpenSSL 1.0.2n.
|
||||
- Wheel packages for Python 2.6 no more available (support dropped from
|
||||
wheel building infrastructure).
|
||||
|
||||
|
||||
What's new in psycopg 2.7.3.2
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Wheel package compiled against PostgreSQL 10.0 libpq and OpenSSL 1.0.2l
|
||||
(:tickets:`#601, #602`).
|
||||
|
||||
|
||||
What's new in psycopg 2.7.3.1
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Dropped libresolv from wheel package to avoid incompatibility with
|
||||
glibc 2.26 (wheels ticket #2)
|
||||
glibc 2.26 (wheels ticket #2).
|
||||
|
||||
|
||||
What's new in psycopg 2.7.3
|
||||
|
@ -100,9 +153,13 @@ New features:
|
|||
|
||||
Bug fixes:
|
||||
|
||||
- Throw an exception trying to pass ``NULL`` chars as parameters
|
||||
(:ticket:`#420`).
|
||||
- Fixed error caused by missing decoding `~psycopg2.extras.LoggingConnection`
|
||||
(:ticket:`#483`).
|
||||
- Fixed integer overflow in :sql:`interval` seconds (:ticket:`#512`).
|
||||
- Make `~psycopg2.extras.Range` objects picklable (:ticket:`#462`).
|
||||
- Fixed version parsing and building with PostgreSQL 10 (:ticket:`#489`).
|
||||
|
||||
Other changes:
|
||||
|
||||
|
@ -116,14 +173,6 @@ Other changes:
|
|||
(:ticket:`#506`)
|
||||
|
||||
|
||||
What's new in psycopg 2.6.3
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Throw an exception trying to pass ``NULL`` chars as parameters
|
||||
(:ticket:`#420`).
|
||||
- Make `~psycopg2.extras.Range` objects picklable (:ticket:`#462`).
|
||||
|
||||
|
||||
What's new in psycopg 2.6.2
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
|
|
33
README.rst
33
README.rst
|
@ -25,29 +25,40 @@ Documentation is included in the ``doc`` directory and is `available online`__.
|
|||
|
||||
.. __: http://initd.org/psycopg/docs/
|
||||
|
||||
For any other resource (source code repository, bug tracker, mailing list)
|
||||
please check the `project homepage`__.
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
If your ``pip`` version supports wheel_ packages it should be possible to
|
||||
install a binary version of Psycopg including all the dependencies from PyPI_.
|
||||
Just run::
|
||||
Building Psycopg requires a few prerequisites (a C compiler, some development
|
||||
packages): please check the install_ and the faq_ documents in the ``doc`` dir
|
||||
or online for the details.
|
||||
|
||||
If prerequisites are met, you can install psycopg like any other Python
|
||||
package, using ``pip`` to download it from PyPI_::
|
||||
|
||||
$ pip install -U pip # make sure your pip is up-to-date
|
||||
$ pip install psycopg2
|
||||
|
||||
If you want to build Psycopg from source you will need some prerequisites (a C
|
||||
compiler, development packages): please check the install_ and the faq_
|
||||
documents in the ``doc`` dir for the details.
|
||||
or using ``setup.py`` if you have downloaded the source package locally::
|
||||
|
||||
$ python setup.py build
|
||||
$ sudo python setup.py install
|
||||
|
||||
You can also obtain a stand-alone package, not requiring a compiler or
|
||||
external libraries, by installing the `psycopg2-binary`_ package from PyPI::
|
||||
|
||||
$ pip install psycopg2-binary
|
||||
|
||||
The binary package is a practical choice for development and testing but in
|
||||
production it is advised to use the package built from sources.
|
||||
|
||||
.. _wheel: http://pythonwheels.com/
|
||||
.. _PyPI: https://pypi.python.org/pypi/psycopg2
|
||||
.. _psycopg2-binary: https://pypi.python.org/pypi/psycopg2-binary
|
||||
.. _install: http://initd.org/psycopg/docs/install.html#install-from-source
|
||||
.. _faq: http://initd.org/psycopg/docs/faq.html#faq-compile
|
||||
|
||||
For any other resource (source code repository, bug tracker, mailing list)
|
||||
please check the `project homepage`__.
|
||||
|
||||
.. __: http://initd.org/psycopg/
|
||||
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
the terms and conditions of version 3 of the GNU General Public
|
||||
License, supplemented by the additional permissions listed below.
|
||||
|
||||
0. Additional Definitions.
|
||||
0. Additional Definitions.
|
||||
|
||||
As used herein, "this License" refers to version 3 of the GNU Lesser
|
||||
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
||||
|
@ -111,7 +111,7 @@ the following:
|
|||
a copy of the Library already present on the user's computer
|
||||
system, and (b) will operate properly with a modified version
|
||||
of the Library that is interface-compatible with the Linked
|
||||
Version.
|
||||
Version.
|
||||
|
||||
e) Provide Installation Information, but only if you would otherwise
|
||||
be required to provide such information under section 6 of the
|
||||
|
|
22
doc/SUCCESS
22
doc/SUCCESS
|
@ -1,10 +1,10 @@
|
|||
From: Jack Moffitt <jack@xiph.org>
|
||||
To: Psycopg Mailing List <psycopg@lists.initd.org>
|
||||
Subject: Re: [Psycopg] preparing for 1.0
|
||||
Date: 22 Oct 2001 11:16:21 -0600
|
||||
Date: 22 Oct 2001 11:16:21 -0600
|
||||
|
||||
www.vorbis.com is serving from 5-10k pages per day with psycopg serving
|
||||
data for most of that.
|
||||
www.vorbis.com is serving from 5-10k pages per day with psycopg serving
|
||||
data for most of that.
|
||||
|
||||
I plan to use it for several of our other sites, so that number will
|
||||
increase.
|
||||
|
@ -19,7 +19,7 @@ jack.
|
|||
From: Yury Don <gercon@vpcit.ru>
|
||||
To: Psycopg Mailing List <psycopg@lists.initd.org>
|
||||
Subject: Re: [Psycopg] preparing for 1.0
|
||||
Date: 23 Oct 2001 09:53:11 +0600
|
||||
Date: 23 Oct 2001 09:53:11 +0600
|
||||
|
||||
We use psycopg and psycopg zope adapter since fisrt public
|
||||
release (it seems version 0.4). Now it works on 3 our sites and in intranet
|
||||
|
@ -32,7 +32,7 @@ to solve the problem, even thouth my knowledge of c were poor.
|
|||
BTW, segfault with dictfetchall on particular data set (see [Psycopg]
|
||||
dictfetchXXX() problems) disappeared in 0.99.8pre2.
|
||||
|
||||
--
|
||||
--
|
||||
Best regards,
|
||||
Yury Don
|
||||
|
||||
|
@ -42,7 +42,7 @@ To: Federico Di Gregorio <fog@debian.org>
|
|||
Cc: Psycopg Mailing List <psycopg@lists.initd.org>
|
||||
Subject: Re: [Psycopg] preparing for 1.0
|
||||
Date: 23 Oct 2001 08:25:52 -0400
|
||||
|
||||
|
||||
The US Govt Department of Labor's Office of Disability Employment
|
||||
Policy's DisabilityDirect website is run on zope and zpsycopg.
|
||||
|
||||
|
@ -50,7 +50,7 @@ Policy's DisabilityDirect website is run on zope and zpsycopg.
|
|||
From: Scott Leerssen <sleerssen@racemi.com>
|
||||
To: Federico Di Gregorio <fog@debian.org>
|
||||
Subject: Re: [Psycopg] preparing for 1.0
|
||||
Date: 23 Oct 2001 09:56:10 -0400
|
||||
Date: 23 Oct 2001 09:56:10 -0400
|
||||
|
||||
Racemi's load management software infrastructure uses psycopg to handle
|
||||
complex server allocation decisions, plus storage and access of
|
||||
|
@ -66,10 +66,10 @@ From: Andre Schubert <andre.schubert@geyer.kabeljournal.de>
|
|||
To: Federico Di Gregorio <fog@debian.org>
|
||||
Cc: Psycopg Mailing List <psycopg@lists.initd.org>
|
||||
Subject: Re: [Psycopg] preparing for 1.0
|
||||
Date: 23 Oct 2001 11:46:07 +0200
|
||||
Date: 23 Oct 2001 11:46:07 +0200
|
||||
|
||||
i have changed the psycopg version to 0.99.8pre2 on all devel-machines
|
||||
and all segfaults are gone. after my holiday i wil change to 0.99.8pre2
|
||||
and all segfaults are gone. after my holiday i wil change to 0.99.8pre2
|
||||
or 1.0 on our production-server.
|
||||
this server contains several web-sites which are all connected to
|
||||
postgres over ZPsycopgDA.
|
||||
|
@ -81,7 +81,7 @@ From: Fred Wilson Horch <fhorch@ecoaccess.org>
|
|||
To: <psycopg@lists.initd.org>
|
||||
Subject: [Psycopg] Success story for psycopg
|
||||
Date: 23 Oct 2001 10:59:17 -0400
|
||||
|
||||
|
||||
Due to various quirks of PyGreSQL and PoPy, EcoAccess has been looking for
|
||||
a reliable, fast and relatively bug-free Python-PostgreSQL interface for
|
||||
our project.
|
||||
|
@ -98,7 +98,7 @@ reports and feature requests, and we're looking forward to using psycopg
|
|||
as the Python interface for additional database-backed web applications.
|
||||
|
||||
Keep up the good work!
|
||||
--
|
||||
--
|
||||
Fred Wilson Horch mailto:fhorch@ecoaccess.org
|
||||
Executive Director, EcoAccess http://ecoaccess.org/
|
||||
|
||||
|
|
294
doc/pep-0249.txt
294
doc/pep-0249.txt
|
@ -9,15 +9,15 @@ Replaces: 248
|
|||
Release-Date: 07 Apr 1999
|
||||
|
||||
Introduction
|
||||
|
||||
|
||||
This API has been defined to encourage similarity between the
|
||||
Python modules that are used to access databases. By doing this,
|
||||
we hope to achieve a consistency leading to more easily understood
|
||||
modules, code that is generally more portable across databases,
|
||||
and a broader reach of database connectivity from Python.
|
||||
|
||||
|
||||
The interface specification consists of several sections:
|
||||
|
||||
|
||||
* Module Interface
|
||||
* Connection Objects
|
||||
* Cursor Objects
|
||||
|
@ -25,7 +25,7 @@ Introduction
|
|||
* Type Objects and Constructors
|
||||
* Implementation Hints
|
||||
* Major Changes from 1.0 to 2.0
|
||||
|
||||
|
||||
Comments and questions about this specification may be directed
|
||||
to the SIG for Database Interfacing with Python
|
||||
(db-sig@python.org).
|
||||
|
@ -41,7 +41,7 @@ Introduction
|
|||
basis for new interfaces.
|
||||
|
||||
Module Interface
|
||||
|
||||
|
||||
Access to the database is made available through connection
|
||||
objects. The module must provide the following constructor for
|
||||
these:
|
||||
|
@ -51,17 +51,17 @@ Module Interface
|
|||
Constructor for creating a connection to the database.
|
||||
Returns a Connection Object. It takes a number of
|
||||
parameters which are database dependent. [1]
|
||||
|
||||
|
||||
These module globals must be defined:
|
||||
|
||||
apilevel
|
||||
|
||||
String constant stating the supported DB API level.
|
||||
Currently only the strings '1.0' and '2.0' are allowed.
|
||||
|
||||
|
||||
If not given, a DB-API 1.0 level interface should be
|
||||
assumed.
|
||||
|
||||
|
||||
threadsafety
|
||||
|
||||
Integer constant stating the level of thread safety the
|
||||
|
@ -81,33 +81,33 @@ Module Interface
|
|||
or other external sources that are beyond your control.
|
||||
|
||||
paramstyle
|
||||
|
||||
|
||||
String constant stating the type of parameter marker
|
||||
formatting expected by the interface. Possible values are
|
||||
[2]:
|
||||
|
||||
'qmark' Question mark style,
|
||||
'qmark' Question mark style,
|
||||
e.g. '...WHERE name=?'
|
||||
'numeric' Numeric, positional style,
|
||||
'numeric' Numeric, positional style,
|
||||
e.g. '...WHERE name=:1'
|
||||
'named' Named style,
|
||||
'named' Named style,
|
||||
e.g. '...WHERE name=:name'
|
||||
'format' ANSI C printf format codes,
|
||||
'format' ANSI C printf format codes,
|
||||
e.g. '...WHERE name=%s'
|
||||
'pyformat' Python extended format codes,
|
||||
'pyformat' Python extended format codes,
|
||||
e.g. '...WHERE name=%(name)s'
|
||||
|
||||
The module should make all error information available through
|
||||
these exceptions or subclasses thereof:
|
||||
|
||||
Warning
|
||||
|
||||
Warning
|
||||
|
||||
Exception raised for important warnings like data
|
||||
truncations while inserting, etc. It must be a subclass of
|
||||
the Python StandardError (defined in the module
|
||||
exceptions).
|
||||
|
||||
Error
|
||||
|
||||
Error
|
||||
|
||||
Exception that is the base class of all other error
|
||||
exceptions. You can use this to catch all errors with one
|
||||
|
@ -115,7 +115,7 @@ Module Interface
|
|||
errors and thus should not use this class as base. It must
|
||||
be a subclass of the Python StandardError (defined in the
|
||||
module exceptions).
|
||||
|
||||
|
||||
InterfaceError
|
||||
|
||||
Exception raised for errors that are related to the
|
||||
|
@ -126,50 +126,50 @@ Module Interface
|
|||
|
||||
Exception raised for errors that are related to the
|
||||
database. It must be a subclass of Error.
|
||||
|
||||
|
||||
DataError
|
||||
|
||||
|
||||
Exception raised for errors that are due to problems with
|
||||
the processed data like division by zero, numeric value
|
||||
out of range, etc. It must be a subclass of DatabaseError.
|
||||
|
||||
|
||||
OperationalError
|
||||
|
||||
|
||||
Exception raised for errors that are related to the
|
||||
database's operation and not necessarily under the control
|
||||
of the programmer, e.g. an unexpected disconnect occurs,
|
||||
the data source name is not found, a transaction could not
|
||||
be processed, a memory allocation error occurred during
|
||||
processing, etc. It must be a subclass of DatabaseError.
|
||||
|
||||
IntegrityError
|
||||
|
||||
|
||||
IntegrityError
|
||||
|
||||
Exception raised when the relational integrity of the
|
||||
database is affected, e.g. a foreign key check fails. It
|
||||
must be a subclass of DatabaseError.
|
||||
|
||||
InternalError
|
||||
|
||||
|
||||
InternalError
|
||||
|
||||
Exception raised when the database encounters an internal
|
||||
error, e.g. the cursor is not valid anymore, the
|
||||
transaction is out of sync, etc. It must be a subclass of
|
||||
DatabaseError.
|
||||
|
||||
|
||||
ProgrammingError
|
||||
|
||||
|
||||
Exception raised for programming errors, e.g. table not
|
||||
found or already exists, syntax error in the SQL
|
||||
statement, wrong number of parameters specified, etc. It
|
||||
must be a subclass of DatabaseError.
|
||||
|
||||
|
||||
NotSupportedError
|
||||
|
||||
|
||||
Exception raised in case a method or database API was used
|
||||
which is not supported by the database, e.g. requesting a
|
||||
.rollback() on a connection that does not support
|
||||
transaction or has transactions turned off. It must be a
|
||||
subclass of DatabaseError.
|
||||
|
||||
|
||||
This is the exception inheritance layout:
|
||||
|
||||
StandardError
|
||||
|
@ -183,17 +183,17 @@ Module Interface
|
|||
|__InternalError
|
||||
|__ProgrammingError
|
||||
|__NotSupportedError
|
||||
|
||||
|
||||
Note: The values of these exceptions are not defined. They should
|
||||
give the user a fairly good idea of what went wrong, though.
|
||||
|
||||
|
||||
|
||||
Connection Objects
|
||||
|
||||
Connection Objects should respond to the following methods:
|
||||
|
||||
.close()
|
||||
|
||||
.close()
|
||||
|
||||
Close the connection now (rather than whenever __del__ is
|
||||
called). The connection will be unusable from this point
|
||||
forward; an Error (or subclass) exception will be raised
|
||||
|
@ -203,52 +203,52 @@ Connection Objects
|
|||
committing the changes first will cause an implicit
|
||||
rollback to be performed.
|
||||
|
||||
|
||||
|
||||
.commit()
|
||||
|
||||
|
||||
Commit any pending transaction to the database. Note that
|
||||
if the database supports an auto-commit feature, this must
|
||||
be initially off. An interface method may be provided to
|
||||
turn it back on.
|
||||
|
||||
|
||||
Database modules that do not support transactions should
|
||||
implement this method with void functionality.
|
||||
|
||||
.rollback()
|
||||
|
||||
|
||||
.rollback()
|
||||
|
||||
This method is optional since not all databases provide
|
||||
transaction support. [3]
|
||||
|
||||
|
||||
In case a database does provide transactions this method
|
||||
causes the the database to roll back to the start of any
|
||||
pending transaction. Closing a connection without
|
||||
committing the changes first will cause an implicit
|
||||
rollback to be performed.
|
||||
|
||||
|
||||
.cursor()
|
||||
|
||||
|
||||
Return a new Cursor Object using the connection. If the
|
||||
database does not provide a direct cursor concept, the
|
||||
module will have to emulate cursors using other means to
|
||||
the extent needed by this specification. [4]
|
||||
|
||||
|
||||
|
||||
Cursor Objects
|
||||
|
||||
These objects represent a database cursor, which is used to
|
||||
manage the context of a fetch operation. Cursors created from
|
||||
manage the context of a fetch operation. Cursors created from
|
||||
the same connection are not isolated, i.e., any changes
|
||||
done to the database by a cursor are immediately visible by the
|
||||
other cursors. Cursors created from different connections can
|
||||
or can not be isolated, depending on how the transaction support
|
||||
is implemented (see also the connection's rollback() and commit()
|
||||
is implemented (see also the connection's rollback() and commit()
|
||||
methods.)
|
||||
|
||||
|
||||
Cursor Objects should respond to the following methods and
|
||||
attributes:
|
||||
|
||||
.description
|
||||
|
||||
.description
|
||||
|
||||
This read-only attribute is a sequence of 7-item
|
||||
sequences. Each of these sequences contains information
|
||||
describing one result column: (name, type_code,
|
||||
|
@ -260,17 +260,17 @@ Cursor Objects
|
|||
This attribute will be None for operations that
|
||||
do not return rows or if the cursor has not had an
|
||||
operation invoked via the executeXXX() method yet.
|
||||
|
||||
|
||||
The type_code can be interpreted by comparing it to the
|
||||
Type Objects specified in the section below.
|
||||
|
||||
.rowcount
|
||||
|
||||
|
||||
.rowcount
|
||||
|
||||
This read-only attribute specifies the number of rows that
|
||||
the last executeXXX() produced (for DQL statements like
|
||||
'select') or affected (for DML statements like 'update' or
|
||||
'insert').
|
||||
|
||||
|
||||
The attribute is -1 in case no executeXXX() has been
|
||||
performed on the cursor or the rowcount of the last
|
||||
operation is not determinable by the interface. [7]
|
||||
|
@ -278,96 +278,96 @@ Cursor Objects
|
|||
Note: Future versions of the DB API specification could
|
||||
redefine the latter case to have the object return None
|
||||
instead of -1.
|
||||
|
||||
|
||||
.callproc(procname[,parameters])
|
||||
|
||||
|
||||
(This method is optional since not all databases provide
|
||||
stored procedures. [3])
|
||||
|
||||
|
||||
Call a stored database procedure with the given name. The
|
||||
sequence of parameters must contain one entry for each
|
||||
argument that the procedure expects. The result of the
|
||||
call is returned as modified copy of the input
|
||||
sequence. Input parameters are left untouched, output and
|
||||
input/output parameters replaced with possibly new values.
|
||||
|
||||
|
||||
The procedure may also provide a result set as
|
||||
output. This must then be made available through the
|
||||
standard fetchXXX() methods.
|
||||
|
||||
|
||||
.close()
|
||||
|
||||
|
||||
Close the cursor now (rather than whenever __del__ is
|
||||
called). The cursor will be unusable from this point
|
||||
forward; an Error (or subclass) exception will be raised
|
||||
if any operation is attempted with the cursor.
|
||||
|
||||
.execute(operation[,parameters])
|
||||
|
||||
|
||||
.execute(operation[,parameters])
|
||||
|
||||
Prepare and execute a database operation (query or
|
||||
command). Parameters may be provided as sequence or
|
||||
mapping and will be bound to variables in the operation.
|
||||
Variables are specified in a database-specific notation
|
||||
(see the module's paramstyle attribute for details). [5]
|
||||
|
||||
|
||||
A reference to the operation will be retained by the
|
||||
cursor. If the same operation object is passed in again,
|
||||
then the cursor can optimize its behavior. This is most
|
||||
effective for algorithms where the same operation is used,
|
||||
but different parameters are bound to it (many times).
|
||||
|
||||
|
||||
For maximum efficiency when reusing an operation, it is
|
||||
best to use the setinputsizes() method to specify the
|
||||
parameter types and sizes ahead of time. It is legal for
|
||||
a parameter to not match the predefined information; the
|
||||
implementation should compensate, possibly with a loss of
|
||||
efficiency.
|
||||
|
||||
|
||||
The parameters may also be specified as list of tuples to
|
||||
e.g. insert multiple rows in a single operation, but this
|
||||
kind of usage is depreciated: executemany() should be used
|
||||
instead.
|
||||
|
||||
|
||||
Return values are not defined.
|
||||
|
||||
.executemany(operation,seq_of_parameters)
|
||||
|
||||
|
||||
.executemany(operation,seq_of_parameters)
|
||||
|
||||
Prepare a database operation (query or command) and then
|
||||
execute it against all parameter sequences or mappings
|
||||
found in the sequence seq_of_parameters.
|
||||
|
||||
|
||||
Modules are free to implement this method using multiple
|
||||
calls to the execute() method or by using array operations
|
||||
to have the database process the sequence as a whole in
|
||||
one call.
|
||||
|
||||
|
||||
Use of this method for an operation which produces one or
|
||||
more result sets constitutes undefined behavior, and the
|
||||
implementation is permitted (but not required) to raise
|
||||
implementation is permitted (but not required) to raise
|
||||
an exception when it detects that a result set has been
|
||||
created by an invocation of the operation.
|
||||
|
||||
|
||||
The same comments as for execute() also apply accordingly
|
||||
to this method.
|
||||
|
||||
|
||||
Return values are not defined.
|
||||
|
||||
.fetchone()
|
||||
|
||||
|
||||
.fetchone()
|
||||
|
||||
Fetch the next row of a query result set, returning a
|
||||
single sequence, or None when no more data is
|
||||
available. [6]
|
||||
|
||||
|
||||
An Error (or subclass) exception is raised if the previous
|
||||
call to executeXXX() did not produce any result set or no
|
||||
call was issued yet.
|
||||
|
||||
fetchmany([size=cursor.arraysize])
|
||||
|
||||
|
||||
Fetch the next set of rows of a query result, returning a
|
||||
sequence of sequences (e.g. a list of tuples). An empty
|
||||
sequence is returned when no more rows are available.
|
||||
|
||||
|
||||
The number of rows to fetch per call is specified by the
|
||||
parameter. If it is not given, the cursor's arraysize
|
||||
determines the number of rows to be fetched. The method
|
||||
|
@ -375,62 +375,62 @@ Cursor Objects
|
|||
parameter. If this is not possible due to the specified
|
||||
number of rows not being available, fewer rows may be
|
||||
returned.
|
||||
|
||||
|
||||
An Error (or subclass) exception is raised if the previous
|
||||
call to executeXXX() did not produce any result set or no
|
||||
call was issued yet.
|
||||
|
||||
|
||||
Note there are performance considerations involved with
|
||||
the size parameter. For optimal performance, it is
|
||||
usually best to use the arraysize attribute. If the size
|
||||
parameter is used, then it is best for it to retain the
|
||||
same value from one fetchmany() call to the next.
|
||||
|
||||
.fetchall()
|
||||
|
||||
.fetchall()
|
||||
|
||||
Fetch all (remaining) rows of a query result, returning
|
||||
them as a sequence of sequences (e.g. a list of tuples).
|
||||
Note that the cursor's arraysize attribute can affect the
|
||||
performance of this operation.
|
||||
|
||||
|
||||
An Error (or subclass) exception is raised if the previous
|
||||
call to executeXXX() did not produce any result set or no
|
||||
call was issued yet.
|
||||
|
||||
.nextset()
|
||||
|
||||
|
||||
.nextset()
|
||||
|
||||
(This method is optional since not all databases support
|
||||
multiple result sets. [3])
|
||||
|
||||
|
||||
This method will make the cursor skip to the next
|
||||
available set, discarding any remaining rows from the
|
||||
current set.
|
||||
|
||||
|
||||
If there are no more sets, the method returns
|
||||
None. Otherwise, it returns a true value and subsequent
|
||||
calls to the fetch methods will return rows from the next
|
||||
result set.
|
||||
|
||||
|
||||
An Error (or subclass) exception is raised if the previous
|
||||
call to executeXXX() did not produce any result set or no
|
||||
call was issued yet.
|
||||
|
||||
.arraysize
|
||||
|
||||
|
||||
This read/write attribute specifies the number of rows to
|
||||
fetch at a time with fetchmany(). It defaults to 1 meaning
|
||||
to fetch a single row at a time.
|
||||
|
||||
|
||||
Implementations must observe this value with respect to
|
||||
the fetchmany() method, but are free to interact with the
|
||||
database a single row at a time. It may also be used in
|
||||
the implementation of executemany().
|
||||
|
||||
|
||||
.setinputsizes(sizes)
|
||||
|
||||
|
||||
This can be used before a call to executeXXX() to
|
||||
predefine memory areas for the operation's parameters.
|
||||
|
||||
|
||||
sizes is specified as a sequence -- one item for each
|
||||
input parameter. The item should be a Type Object that
|
||||
corresponds to the input that will be used, or it should
|
||||
|
@ -438,27 +438,27 @@ Cursor Objects
|
|||
parameter. If the item is None, then no predefined memory
|
||||
area will be reserved for that column (this is useful to
|
||||
avoid predefined areas for large inputs).
|
||||
|
||||
|
||||
This method would be used before the executeXXX() method
|
||||
is invoked.
|
||||
|
||||
|
||||
Implementations are free to have this method do nothing
|
||||
and users are free to not use it.
|
||||
|
||||
|
||||
.setoutputsize(size[,column])
|
||||
|
||||
|
||||
Set a column buffer size for fetches of large columns
|
||||
(e.g. LONGs, BLOBs, etc.). The column is specified as an
|
||||
index into the result sequence. Not specifying the column
|
||||
will set the default size for all large columns in the
|
||||
cursor.
|
||||
|
||||
|
||||
This method would be used before the executeXXX() method
|
||||
is invoked.
|
||||
|
||||
|
||||
Implementations are free to have this method do nothing
|
||||
and users are free to not use it.
|
||||
|
||||
|
||||
|
||||
Type Objects and Constructors
|
||||
|
||||
|
@ -485,15 +485,15 @@ Type Objects and Constructors
|
|||
Implementation Hints below for details).
|
||||
|
||||
The module exports the following constructors and singletons:
|
||||
|
||||
|
||||
Date(year,month,day)
|
||||
|
||||
This function constructs an object holding a date value.
|
||||
|
||||
|
||||
Time(hour,minute,second)
|
||||
|
||||
This function constructs an object holding a time value.
|
||||
|
||||
|
||||
Timestamp(year,month,day,hour,minute,second)
|
||||
|
||||
This function constructs an object holding a time stamp
|
||||
|
@ -507,12 +507,12 @@ Type Objects and Constructors
|
|||
module for details).
|
||||
|
||||
TimeFromTicks(ticks)
|
||||
|
||||
|
||||
This function constructs an object holding a time value
|
||||
from the given ticks value (number of seconds since the
|
||||
epoch; see the documentation of the standard Python time
|
||||
module for details).
|
||||
|
||||
|
||||
TimestampFromTicks(ticks)
|
||||
|
||||
This function constructs an object holding a time stamp
|
||||
|
@ -521,10 +521,10 @@ Type Objects and Constructors
|
|||
time module for details).
|
||||
|
||||
Binary(string)
|
||||
|
||||
|
||||
This function constructs an object capable of holding a
|
||||
binary (long) string value.
|
||||
|
||||
|
||||
|
||||
STRING
|
||||
|
||||
|
@ -535,22 +535,22 @@ Type Objects and Constructors
|
|||
|
||||
This type object is used to describe (long) binary columns
|
||||
in a database (e.g. LONG, RAW, BLOBs).
|
||||
|
||||
|
||||
NUMBER
|
||||
|
||||
This type object is used to describe numeric columns in a
|
||||
database.
|
||||
|
||||
DATETIME
|
||||
|
||||
|
||||
This type object is used to describe date/time columns in
|
||||
a database.
|
||||
|
||||
|
||||
ROWID
|
||||
|
||||
|
||||
This type object is used to describe the "Row ID" column
|
||||
in a database.
|
||||
|
||||
|
||||
SQL NULL values are represented by the Python None singleton on
|
||||
input and output.
|
||||
|
||||
|
@ -563,7 +563,7 @@ Implementation Hints for Module Authors
|
|||
* The preferred object types for the date/time objects are those
|
||||
defined in the mxDateTime package. It provides all necessary
|
||||
constructors and methods both at Python and C level.
|
||||
|
||||
|
||||
* The preferred object type for Binary objects are the
|
||||
buffer types available in standard Python starting with
|
||||
version 1.5.2. Please see the Python documentation for
|
||||
|
@ -577,7 +577,7 @@ Implementation Hints for Module Authors
|
|||
processing. However, it should be noted that this does not
|
||||
expose a C API like mxDateTime does which means that integration
|
||||
with C based database modules is more difficult.
|
||||
|
||||
|
||||
* Here is a sample implementation of the Unix ticks based
|
||||
constructors for date/time delegating work to the generic
|
||||
constructors:
|
||||
|
@ -645,7 +645,7 @@ Implementation Hints for Module Authors
|
|||
|
||||
class NotSupportedError(DatabaseError):
|
||||
pass
|
||||
|
||||
|
||||
In C you can use the PyErr_NewException(fullname,
|
||||
base, NULL) API to create the exception objects.
|
||||
|
||||
|
@ -760,7 +760,7 @@ Optional DB API Extensions
|
|||
Warning Message: "DB-API extension connection.messages used"
|
||||
|
||||
Cursor Method .next()
|
||||
|
||||
|
||||
Return the next row from the currently executing SQL statement
|
||||
using the same semantics as .fetchone(). A StopIteration
|
||||
exception is raised when the result set is exhausted for Python
|
||||
|
@ -790,13 +790,13 @@ Optional DB API Extensions
|
|||
|
||||
Warning Message: "DB-API extension cursor.lastrowid used"
|
||||
|
||||
|
||||
|
||||
Optional Error Handling Extension
|
||||
|
||||
The core DB API specification only introduces a set of exceptions
|
||||
which can be raised to report errors to the user. In some cases,
|
||||
exceptions may be too disruptive for the flow of a program or even
|
||||
render execution impossible.
|
||||
render execution impossible.
|
||||
|
||||
For these cases and in order to simplify error handling when
|
||||
dealing with databases, database module authors may choose to
|
||||
|
@ -806,7 +806,7 @@ Optional Error Handling Extension
|
|||
Cursor/Connection Attribute .errorhandler
|
||||
|
||||
Read/write attribute which references an error handler to call
|
||||
in case an error condition is met.
|
||||
in case an error condition is met.
|
||||
|
||||
The handler must be a Python callable taking the following
|
||||
arguments: errorhandler(connection, cursor, errorclass,
|
||||
|
@ -836,7 +836,7 @@ Frequently Asked Questions
|
|||
specification. This section covers some of the issues people
|
||||
sometimes have with the specification.
|
||||
|
||||
Question:
|
||||
Question:
|
||||
|
||||
How can I construct a dictionary out of the tuples returned by
|
||||
.fetchxxx():
|
||||
|
@ -855,7 +855,7 @@ Frequently Asked Questions
|
|||
* Some databases don't support case-sensitive column names or
|
||||
auto-convert them to all lowercase or all uppercase
|
||||
characters.
|
||||
|
||||
|
||||
* Columns in the result set which are generated by the query
|
||||
(e.g. using SQL functions) don't map to table column names
|
||||
and databases usually generate names for these columns in a
|
||||
|
@ -872,9 +872,9 @@ Major Changes from Version 1.0 to Version 2.0
|
|||
compared to the 1.0 version. Because some of these changes will
|
||||
cause existing DB API 1.0 based scripts to break, the major
|
||||
version number was adjusted to reflect this change.
|
||||
|
||||
|
||||
These are the most important changes from 1.0 to 2.0:
|
||||
|
||||
|
||||
* The need for a separate dbi module was dropped and the
|
||||
functionality merged into the module interface itself.
|
||||
|
||||
|
@ -886,10 +886,10 @@ Major Changes from Version 1.0 to Version 2.0
|
|||
* New constants (apilevel, threadlevel, paramstyle) and
|
||||
methods (executemany, nextset) were added to provide better
|
||||
database bindings.
|
||||
|
||||
|
||||
* The semantics of .callproc() needed to call stored
|
||||
procedures are now clearly defined.
|
||||
|
||||
|
||||
* The definition of the .execute() return value changed.
|
||||
Previously, the return value was based on the SQL statement
|
||||
type (which was hard to implement right) -- it is undefined
|
||||
|
@ -898,7 +898,7 @@ Major Changes from Version 1.0 to Version 2.0
|
|||
values, but these are no longer mandated by the
|
||||
specification and should be considered database interface
|
||||
dependent.
|
||||
|
||||
|
||||
* Class based exceptions were incorporated into the
|
||||
specification. Module implementors are free to extend the
|
||||
exception layout defined in this specification by
|
||||
|
@ -916,10 +916,10 @@ Open Issues
|
|||
questions that were left open in the 1.0 version, there are still
|
||||
some remaining issues which should be addressed in future
|
||||
versions:
|
||||
|
||||
|
||||
* Define a useful return value for .nextset() for the case where
|
||||
a new result set is available.
|
||||
|
||||
|
||||
* Create a fixed point numeric type for use as loss-less
|
||||
monetary and decimal interchange format.
|
||||
|
||||
|
@ -929,17 +929,17 @@ Footnotes
|
|||
[1] As a guideline the connection constructor parameters should be
|
||||
implemented as keyword parameters for more intuitive use and
|
||||
follow this order of parameters:
|
||||
|
||||
|
||||
dsn Data source name as string
|
||||
user User name as string (optional)
|
||||
password Password as string (optional)
|
||||
host Hostname (optional)
|
||||
database Database name (optional)
|
||||
|
||||
|
||||
E.g. a connect could look like this:
|
||||
|
||||
|
||||
connect(dsn='myhost:MYDB',user='guido',password='234$')
|
||||
|
||||
|
||||
[2] Module implementors should prefer 'numeric', 'named' or
|
||||
'pyformat' over the other formats because these offer more
|
||||
clarity and flexibility.
|
||||
|
@ -947,41 +947,41 @@ Footnotes
|
|||
[3] If the database does not support the functionality required
|
||||
by the method, the interface should throw an exception in
|
||||
case the method is used.
|
||||
|
||||
|
||||
The preferred approach is to not implement the method and
|
||||
thus have Python generate an AttributeError in
|
||||
case the method is requested. This allows the programmer to
|
||||
check for database capabilities using the standard
|
||||
hasattr() function.
|
||||
|
||||
|
||||
For some dynamically configured interfaces it may not be
|
||||
appropriate to require dynamically making the method
|
||||
available. These interfaces should then raise a
|
||||
NotSupportedError to indicate the non-ability
|
||||
to perform the roll back when the method is invoked.
|
||||
|
||||
|
||||
[4] a database interface may choose to support named cursors by
|
||||
allowing a string argument to the method. This feature is
|
||||
not part of the specification, since it complicates
|
||||
semantics of the .fetchXXX() methods.
|
||||
|
||||
|
||||
[5] The module will use the __getitem__ method of the parameters
|
||||
object to map either positions (integers) or names (strings)
|
||||
to parameter values. This allows for both sequences and
|
||||
mappings to be used as input.
|
||||
|
||||
|
||||
The term "bound" refers to the process of binding an input
|
||||
value to a database execution buffer. In practical terms,
|
||||
this means that the input value is directly used as a value
|
||||
in the operation. The client should not be required to
|
||||
"escape" the value so that it can be used -- the value
|
||||
should be equal to the actual database value.
|
||||
|
||||
|
||||
[6] Note that the interface may implement row fetching using
|
||||
arrays and other optimizations. It is not
|
||||
guaranteed that a call to this method will only move the
|
||||
associated cursor forward by one row.
|
||||
|
||||
|
||||
[7] The rowcount attribute may be coded in a way that updates
|
||||
its value dynamically. This can be useful for databases that
|
||||
return usable rowcount values only after the first call to
|
||||
|
|
|
@ -36,7 +36,7 @@ How to make a psycopg2 release
|
|||
- Create a signed tag with the content of the relevant NEWS bit and push it.
|
||||
E.g.::
|
||||
|
||||
$ git tag -a -s 2_7
|
||||
$ git tag -a -s 2_7
|
||||
|
||||
Psycopg 2.7 released
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
# Packages only needed to build the docs
|
||||
Pygments>=1.5
|
||||
Sphinx>=1.2,<=1.3
|
||||
Pygments>=2.2,<2.3
|
||||
Sphinx>=1.6,<=1.7
|
||||
|
|
|
@ -188,7 +188,7 @@ representation into the previously defined `!Point` class:
|
|||
... return Point(float(m.group(1)), float(m.group(2)))
|
||||
... else:
|
||||
... raise InterfaceError("bad point representation: %r" % value)
|
||||
|
||||
|
||||
|
||||
In order to create a mapping from a PostgreSQL type (either standard or
|
||||
user-defined), its OID must be known. It can be retrieved either by the second
|
||||
|
@ -295,7 +295,9 @@ something to read::
|
|||
print "Got NOTIFY:", notify.pid, notify.channel, notify.payload
|
||||
|
||||
Running the script and executing a command such as :sql:`NOTIFY test, 'hello'`
|
||||
in a separate :program:`psql` shell, the output may look similar to::
|
||||
in a separate :program:`psql` shell, the output may look similar to:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
Waiting for notifications on channel 'test'
|
||||
Timeout
|
||||
|
|
|
@ -57,7 +57,7 @@ try:
|
|||
release = psycopg2.__version__.split()[0]
|
||||
version = '.'.join(release.split('.')[:2])
|
||||
except ImportError:
|
||||
print "WARNING: couldn't import psycopg to read version."
|
||||
print("WARNING: couldn't import psycopg to read version.")
|
||||
release = version
|
||||
|
||||
intersphinx_mapping = {
|
||||
|
@ -101,6 +101,10 @@ default_role = 'obj'
|
|||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# Using 'python' instead of the default gives warnings if parsing an example
|
||||
# fails, instead of defaulting to none
|
||||
highlight_language = 'python'
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ The ``connection`` class
|
|||
:ref:`thread-safety` for details.
|
||||
|
||||
.. method:: cursor(name=None, cursor_factory=None, scrollable=None, withhold=False)
|
||||
|
||||
|
||||
Return a new `cursor` object using the connection.
|
||||
|
||||
If *name* is specified, the returned cursor will be a :ref:`server
|
||||
|
@ -41,11 +41,6 @@ The ``connection`` class
|
|||
previously only valid PostgreSQL identifiers were accepted as
|
||||
cursor name.
|
||||
|
||||
.. warning::
|
||||
It is unsafe to expose the *name* to an untrusted source, for
|
||||
instance you shouldn't allow *name* to be read from a HTML form.
|
||||
Consider it as part of the query, not as a query parameter.
|
||||
|
||||
The *cursor_factory* argument can be used to create non-standard
|
||||
cursors. The class returned must be a subclass of
|
||||
`psycopg2.extensions.cursor`. See :ref:`subclassing-cursor` for
|
||||
|
@ -274,8 +269,8 @@ The ``connection`` class
|
|||
|
||||
.. __: http://jdbc.postgresql.org/
|
||||
|
||||
Xids returned by `!tpc_recover()` also have extra attributes
|
||||
`~psycopg2.extensions.Xid.prepared`, `~psycopg2.extensions.Xid.owner`,
|
||||
Xids returned by `!tpc_recover()` also have extra attributes
|
||||
`~psycopg2.extensions.Xid.prepared`, `~psycopg2.extensions.Xid.owner`,
|
||||
`~psycopg2.extensions.Xid.database` populated with the values read
|
||||
from the server.
|
||||
|
||||
|
@ -551,7 +546,7 @@ The ``connection`` class
|
|||
the session.
|
||||
|
||||
.. doctest::
|
||||
:options: NORMALIZE_WHITESPACE
|
||||
:options: +NORMALIZE_WHITESPACE
|
||||
|
||||
>>> cur.execute("CREATE TABLE foo (id serial PRIMARY KEY);")
|
||||
>>> pprint(conn.notices)
|
||||
|
@ -626,7 +621,7 @@ The ``connection`` class
|
|||
pair: Server; Parameters
|
||||
|
||||
.. method:: get_parameter_status(parameter)
|
||||
|
||||
|
||||
Look up a current parameter setting of the server.
|
||||
|
||||
Potential values for ``parameter`` are: ``server_version``,
|
||||
|
@ -735,7 +730,7 @@ The ``connection`` class
|
|||
The number is formed by converting the major, minor, and revision
|
||||
numbers into two-decimal-digit numbers and appending them together.
|
||||
For example, version 8.1.5 will be returned as ``80105``.
|
||||
|
||||
|
||||
.. seealso:: libpq docs for `PQserverVersion()`__ for details.
|
||||
|
||||
.. __: http://www.postgresql.org/docs/current/static/libpq-status.html#LIBPQ-PQSERVERVERSION
|
||||
|
@ -749,7 +744,7 @@ The ``connection`` class
|
|||
.. attribute:: status
|
||||
|
||||
A read-only integer representing the status of the connection.
|
||||
Symbolic constants for the values are defined in the module
|
||||
Symbolic constants for the values are defined in the module
|
||||
`psycopg2.extensions`: see :ref:`connection-status-constants`
|
||||
for the available values.
|
||||
|
||||
|
|
|
@ -34,10 +34,10 @@ The ``cursor`` class
|
|||
many cursors from the same connection and should use each cursor from
|
||||
a single thread. See :ref:`thread-safety` for details.
|
||||
|
||||
|
||||
.. attribute:: description
|
||||
|
||||
This read-only attribute is a sequence of 7-item sequences.
|
||||
.. attribute:: description
|
||||
|
||||
This read-only attribute is a sequence of 7-item sequences.
|
||||
|
||||
Each of these sequences is a named tuple (a regular tuple if
|
||||
:func:`collections.namedtuple` is not available) containing information
|
||||
|
@ -65,7 +65,7 @@ The ``cursor`` class
|
|||
This attribute will be `!None` for operations that do not return rows
|
||||
or if the cursor has not had an operation invoked via the
|
||||
|execute*|_ methods yet.
|
||||
|
||||
|
||||
.. |pg_type| replace:: :sql:`pg_type`
|
||||
.. _pg_type: http://www.postgresql.org/docs/current/static/catalog-pg-type.html
|
||||
.. _PQgetlength: http://www.postgresql.org/docs/current/static/libpq-exec.html#LIBPQ-PQGETLENGTH
|
||||
|
@ -78,7 +78,7 @@ The ``cursor`` class
|
|||
regular tuples.
|
||||
|
||||
.. method:: close()
|
||||
|
||||
|
||||
Close the cursor now (rather than whenever `del` is executed).
|
||||
The cursor will be unusable from this point forward; an
|
||||
`~psycopg2.InterfaceError` will be raised if any operation is
|
||||
|
@ -88,7 +88,7 @@ The ``cursor`` class
|
|||
the method is automatically called at the end of the ``with``
|
||||
block.
|
||||
|
||||
|
||||
|
||||
.. attribute:: closed
|
||||
|
||||
Read-only boolean attribute: specifies if the cursor is closed
|
||||
|
@ -235,7 +235,7 @@ The ``cursor`` class
|
|||
The `mogrify()` method is a Psycopg extension to the |DBAPI|.
|
||||
|
||||
.. method:: setinputsizes(sizes)
|
||||
|
||||
|
||||
This method is exposed in compliance with the |DBAPI|. It currently
|
||||
does nothing but it is safe to call it.
|
||||
|
||||
|
@ -281,17 +281,17 @@ The ``cursor`` class
|
|||
>>> cur.execute("SELECT * FROM test WHERE id = %s", (3,))
|
||||
>>> cur.fetchone()
|
||||
(3, 42, 'bar')
|
||||
|
||||
|
||||
A `~psycopg2.ProgrammingError` is raised if the previous call
|
||||
to |execute*|_ did not produce any result set or no call was issued
|
||||
yet.
|
||||
|
||||
|
||||
.. method:: fetchmany([size=cursor.arraysize])
|
||||
|
||||
|
||||
Fetch the next set of rows of a query result, returning a list of
|
||||
tuples. An empty list is returned when no more rows are available.
|
||||
|
||||
|
||||
The number of rows to fetch per call is specified by the parameter.
|
||||
If it is not given, the cursor's `~cursor.arraysize` determines
|
||||
the number of rows to be fetched. The method should try to fetch as
|
||||
|
@ -309,7 +309,7 @@ The ``cursor`` class
|
|||
|
||||
A `~psycopg2.ProgrammingError` is raised if the previous call to
|
||||
|execute*|_ did not produce any result set or no call was issued yet.
|
||||
|
||||
|
||||
Note there are performance considerations involved with the size
|
||||
parameter. For optimal performance, it is usually best to use the
|
||||
`~cursor.arraysize` attribute. If the size parameter is used,
|
||||
|
@ -344,7 +344,7 @@ The ``cursor`` class
|
|||
`~psycopg2.ProgrammingError` is raised and the cursor position is
|
||||
not changed.
|
||||
|
||||
.. note::
|
||||
.. note::
|
||||
|
||||
According to the |DBAPI|_, the exception raised for a cursor out
|
||||
of bound should have been `!IndexError`. The best option is
|
||||
|
@ -364,7 +364,7 @@ The ``cursor`` class
|
|||
|
||||
|
||||
.. attribute:: arraysize
|
||||
|
||||
|
||||
This read/write attribute specifies the number of rows to fetch at a
|
||||
time with `~cursor.fetchmany()`. It defaults to 1 meaning to fetch
|
||||
a single row at a time.
|
||||
|
@ -378,20 +378,20 @@ The ``cursor`` class
|
|||
default is 2000.
|
||||
|
||||
.. versionadded:: 2.4
|
||||
|
||||
|
||||
.. extension::
|
||||
|
||||
The `itersize` attribute is a Psycopg extension to the |DBAPI|.
|
||||
|
||||
|
||||
.. attribute:: rowcount
|
||||
|
||||
.. attribute:: rowcount
|
||||
|
||||
This read-only attribute specifies the number of rows that the last
|
||||
|execute*|_ produced (for :abbr:`DQL (Data Query Language)` statements
|
||||
like :sql:`SELECT`) or affected (for
|
||||
like :sql:`SELECT`) or affected (for
|
||||
:abbr:`DML (Data Manipulation Language)` statements like :sql:`UPDATE`
|
||||
or :sql:`INSERT`).
|
||||
|
||||
|
||||
The attribute is -1 in case no |execute*| has been performed on
|
||||
the cursor or the row count of the last operation if it can't be
|
||||
determined by the interface.
|
||||
|
@ -400,7 +400,7 @@ The ``cursor`` class
|
|||
The |DBAPI|_ interface reserves to redefine the latter case to
|
||||
have the object return `!None` instead of -1 in future versions
|
||||
of the specification.
|
||||
|
||||
|
||||
|
||||
.. attribute:: rownumber
|
||||
|
||||
|
@ -457,7 +457,7 @@ The ``cursor`` class
|
|||
command:
|
||||
|
||||
>>> cur.execute("INSERT INTO test (num, data) VALUES (%s, %s)", (42, 'bar'))
|
||||
>>> cur.statusmessage
|
||||
>>> cur.statusmessage
|
||||
'INSERT 0 1'
|
||||
|
||||
.. extension::
|
||||
|
@ -490,13 +490,13 @@ The ``cursor`` class
|
|||
|
||||
|
||||
.. method:: nextset()
|
||||
|
||||
|
||||
This method is not supported (PostgreSQL does not have multiple data
|
||||
sets) and will raise a `~psycopg2.NotSupportedError` exception.
|
||||
|
||||
|
||||
.. method:: setoutputsize(size [, column])
|
||||
|
||||
|
||||
This method is exposed in compliance with the |DBAPI|. It currently
|
||||
does nothing but it is safe to call it.
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ An example of the available constants defined in the module:
|
|||
'42P01'
|
||||
|
||||
Constants representing all the error values defined by PostgreSQL versions
|
||||
between 8.1 and 10 beta 1 are included in the module.
|
||||
between 8.1 and 10 are included in the module.
|
||||
|
||||
|
||||
.. autofunction:: lookup(code)
|
||||
|
@ -59,7 +59,7 @@ between 8.1 and 10 beta 1 are included in the module.
|
|||
|
||||
>>> try:
|
||||
... cur.execute("SELECT ouch FROM aargh;")
|
||||
... except Exception, e:
|
||||
... except Exception as e:
|
||||
... pass
|
||||
...
|
||||
>>> errorcodes.lookup(e.pgcode[:2])
|
||||
|
|
|
@ -99,20 +99,6 @@ Real dictionary cursor
|
|||
|
||||
.. versionadded:: 2.3
|
||||
|
||||
These objects require :py:func:`collections.namedtuple` to be found, so it is
|
||||
available out-of-the-box only from Python 2.6. Anyway, the namedtuple
|
||||
implementation is compatible with previous Python versions, so all you
|
||||
have to do is to `download it`__ and make it available where we
|
||||
expect it to be... ::
|
||||
|
||||
from somewhere import namedtuple
|
||||
import collections
|
||||
collections.namedtuple = namedtuple
|
||||
from psycopg.extras import NamedTupleConnection
|
||||
# ...
|
||||
|
||||
.. __: http://code.activestate.com/recipes/500261-named-tuples/
|
||||
|
||||
.. autoclass:: NamedTupleCursor
|
||||
|
||||
.. autoclass:: NamedTupleConnection
|
||||
|
@ -403,7 +389,7 @@ The individual messages in the replication stream are represented by
|
|||
|
||||
class LogicalStreamConsumer(object):
|
||||
|
||||
...
|
||||
# ...
|
||||
|
||||
def __call__(self, msg):
|
||||
self.process_message(msg.payload)
|
||||
|
@ -501,7 +487,7 @@ The individual messages in the replication stream are represented by
|
|||
from datetime import datetime
|
||||
|
||||
def consume(msg):
|
||||
...
|
||||
# ...
|
||||
|
||||
keepalive_interval = 10.0
|
||||
while True:
|
||||
|
@ -553,17 +539,13 @@ fields to JSON) you can use the `register_json()` function.
|
|||
|
||||
.. __: http://people.planetpostgresql.org/andrew/index.php?/archives/255-JSON-for-PG-9.2-...-and-now-for-9.1!.html
|
||||
|
||||
The Python library used by default to convert Python objects to JSON and to
|
||||
parse data from the database depends on the language version: with Python 2.6
|
||||
and following the :py:mod:`json` module from the standard library is used;
|
||||
with previous versions the `simplejson`_ module is used if available. Note
|
||||
that the last `!simplejson` version supporting Python 2.4 is the 2.0.9.
|
||||
The Python :py:mod:`json` module is used by default to convert Python objects
|
||||
to JSON and to parse data from the database.
|
||||
|
||||
.. _JSON: http://www.json.org/
|
||||
.. |pgjson| replace:: :sql:`json`
|
||||
.. |jsonb| replace:: :sql:`jsonb`
|
||||
.. _pgjson: http://www.postgresql.org/docs/current/static/datatype-json.html
|
||||
.. _simplejson: http://pypi.python.org/pypi/simplejson/
|
||||
|
||||
In order to pass a Python object to the database as query argument you can use
|
||||
the `Json` adapter::
|
||||
|
@ -1043,20 +1025,6 @@ parameters. By reducing the number of server roundtrips the performance can be
|
|||
.. versionadded:: 2.7
|
||||
|
||||
|
||||
|
||||
.. index::
|
||||
single: Time zones; Fractional
|
||||
|
||||
Fractional time zones
|
||||
---------------------
|
||||
|
||||
.. autofunction:: register_tstz_w_secs
|
||||
|
||||
.. versionadded:: 2.0.9
|
||||
|
||||
.. versionchanged:: 2.2.2
|
||||
function is no-op: see :ref:`tz-handling`.
|
||||
|
||||
.. index::
|
||||
pair: Example; Coroutine;
|
||||
|
||||
|
|
|
@ -306,7 +306,9 @@ I can't compile `!psycopg2`: the compiler says *error: libpq-fe.h: No such file
|
|||
API support (*i.e.* the libpq used at compile time was at least 9.3) but
|
||||
at runtime an older libpq dynamic library is found.
|
||||
|
||||
You can use::
|
||||
You can use:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ ldd /path/to/packages/psycopg2/_psycopg.so | grep libpq
|
||||
|
||||
|
@ -332,4 +334,3 @@ Psycopg raises *ImportError: cannot import name tz* on import in mod_wsgi / ASP,
|
|||
.. _egg: http://peak.telecommunity.com/DevCenter/PythonEggs
|
||||
.. __: http://stackoverflow.com/questions/2192323/what-is-the-python-egg-cache-python-egg-cache
|
||||
.. __: http://code.google.com/p/modwsgi/wiki/ConfigurationDirectives#WSGIPythonEggs
|
||||
|
||||
|
|
|
@ -65,4 +65,3 @@ Psycopg 2 is both Unicode and Python 3 friendly.
|
|||
**To Do items in the documentation**
|
||||
|
||||
.. todolist::
|
||||
|
||||
|
|
|
@ -12,16 +12,6 @@ to use Psycopg on a different Python implementation (PyPy, Jython, IronPython)
|
|||
there is an experimental `porting of Psycopg for Ctypes`__, but it is not as
|
||||
mature as the C implementation yet.
|
||||
|
||||
The current `!psycopg2` implementation supports:
|
||||
|
||||
..
|
||||
NOTE: keep consistent with setup.py and the /features/ page.
|
||||
|
||||
- Python 2 versions from 2.6 to 2.7
|
||||
- Python 3 versions from 3.2 to 3.6
|
||||
- PostgreSQL server versions from 7.4 to 9.6
|
||||
- PostgreSQL client library version from 9.1
|
||||
|
||||
.. _PostgreSQL: http://www.postgresql.org/
|
||||
.. _Python: http://www.python.org/
|
||||
.. _libpq: http://www.postgresql.org/docs/current/static/libpq.html
|
||||
|
@ -32,77 +22,20 @@ The current `!psycopg2` implementation supports:
|
|||
|
||||
|
||||
.. index::
|
||||
single: Install; from PyPI
|
||||
single: Prerequisites
|
||||
|
||||
Binary install from PyPI
|
||||
------------------------
|
||||
Prerequisites
|
||||
-------------
|
||||
|
||||
`!psycopg2` is `available on PyPI`__ in the form of wheel_ packages for the
|
||||
most common platform (Linux, OSX, Windows): this should make you able to
|
||||
install a binary version of the module including all the dependencies simply
|
||||
using:
|
||||
The current `!psycopg2` implementation supports:
|
||||
|
||||
.. code-block:: console
|
||||
..
|
||||
NOTE: keep consistent with setup.py and the /features/ page.
|
||||
|
||||
$ pip install psycopg2
|
||||
|
||||
Make sure to use an up-to-date version of :program:`pip` (you can upgrade it
|
||||
using something like ``pip install -U pip``)
|
||||
|
||||
.. __: PyPI_
|
||||
.. _PyPI: https://pypi.python.org/pypi/psycopg2/
|
||||
.. _wheel: http://pythonwheels.com/
|
||||
|
||||
.. note::
|
||||
|
||||
The binary packages come with their own versions of a few C libraries,
|
||||
among which ``libpq`` and ``libssl``, which will be used regardless of other
|
||||
libraries available on the client: upgrading the system libraries will not
|
||||
upgrade the libraries used by `!psycopg2`. Please build `!psycopg2` from
|
||||
source if you want to maintain binary upgradeability.
|
||||
|
||||
.. warning::
|
||||
|
||||
Because the `!psycopg` wheel package uses its own ``libssl`` binary, it is
|
||||
incompatible with other extension modules binding with ``libssl`` as well,
|
||||
for instance the Python `ssl` module: the result will likely be a
|
||||
segfault. If you need using both `!psycopg2` and other libraries using
|
||||
``libssl`` please :ref:`install psycopg from source
|
||||
<install-from-source>`.
|
||||
|
||||
If you prefer to use the system libraries available on your client you can use
|
||||
the :command:`pip` ``--no-binary`` option:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install --no-binary psycopg2
|
||||
|
||||
which can be specified in your :file:`requirements.txt` files too, e.g. use:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
psycopg2>=2.7,<2.8 --no-binary :all:
|
||||
|
||||
to use the last bugfix release of the `!psycopg2` 2.7 package, specifying to
|
||||
always compile it from source. Of course in this case you will have to meet
|
||||
the :ref:`build prerequisites <build-prerequisites>`.
|
||||
|
||||
|
||||
|
||||
.. index::
|
||||
single: Install; from source
|
||||
|
||||
.. _install-from-source:
|
||||
|
||||
Install from source
|
||||
-------------------
|
||||
|
||||
.. _source-package:
|
||||
|
||||
You can download a copy of Psycopg source files from the `Psycopg download
|
||||
page`__ or from PyPI_.
|
||||
|
||||
.. __: http://initd.org/psycopg/download/
|
||||
- Python version 2.7
|
||||
- Python 3 versions from 3.4 to 3.6
|
||||
- PostgreSQL server versions from 7.4 to 10
|
||||
- PostgreSQL client library version from 9.1
|
||||
|
||||
|
||||
|
||||
|
@ -111,8 +44,8 @@ page`__ or from PyPI_.
|
|||
Build prerequisites
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
These notes illustrate how to compile Psycopg on Linux. If you want to compile
|
||||
Psycopg on other platforms you may have to adjust some details accordingly.
|
||||
The build prerequisites are to be met in order to install Psycopg from source
|
||||
code, either from a source distribution package or from PyPI.
|
||||
|
||||
Psycopg is a C wrapper around the libpq_ PostgreSQL client library. To install
|
||||
it from sources you will need:
|
||||
|
@ -144,6 +77,12 @@ it from sources you will need:
|
|||
|
||||
Once everything is in place it's just a matter of running the standard:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install psycopg2
|
||||
|
||||
or, from the directory containing the source code:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python setup.py build
|
||||
|
@ -180,12 +119,92 @@ which is OS-dependent (for instance setting a suitable
|
|||
|
||||
|
||||
|
||||
.. index::
|
||||
single: Install; from PyPI
|
||||
single: Install; wheel
|
||||
single: Wheel
|
||||
|
||||
Binary install from PyPI
|
||||
------------------------
|
||||
|
||||
`!psycopg2` is also `available on PyPI`__ in the form of wheel_ packages for
|
||||
the most common platform (Linux, OSX, Windows): this should make you able to
|
||||
install a binary version of the module, not requiring the above build or
|
||||
runtime prerequisites, simply using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install psycopg2-binary
|
||||
|
||||
Make sure to use an up-to-date version of :program:`pip` (you can upgrade it
|
||||
using something like ``pip install -U pip``)
|
||||
|
||||
.. __: PyPI-binary_
|
||||
.. _PyPI-binary: https://pypi.python.org/pypi/psycopg2-binary/
|
||||
.. _wheel: http://pythonwheels.com/
|
||||
|
||||
.. note::
|
||||
|
||||
The binary packages come with their own versions of a few C libraries,
|
||||
among which ``libpq`` and ``libssl``, which will be used regardless of other
|
||||
libraries available on the client: upgrading the system libraries will not
|
||||
upgrade the libraries used by `!psycopg2`. Please build `!psycopg2` from
|
||||
source if you want to maintain binary upgradeability.
|
||||
|
||||
.. warning::
|
||||
|
||||
The `!psycopg2` wheel package comes packaged, among the others, with its
|
||||
own ``libssl`` binary. This may create conflicts with other extension
|
||||
modules binding with ``libssl`` as well, for instance with the Python
|
||||
`ssl` module: in some cases, under concurrency, the interaction between
|
||||
the two libraries may result in a segfault. In case of doubts you are
|
||||
advised to use a package built from source.
|
||||
|
||||
|
||||
|
||||
.. index::
|
||||
single: Install; disable wheel
|
||||
single: Wheel; disable
|
||||
|
||||
.. _disable-wheel:
|
||||
|
||||
Disabling wheel packages for Psycopg 2.7
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In version 2.7.x, `pip install psycopg2` would have tried to install the wheel
|
||||
binary package of Psycopg. Because of the problems the wheel package have
|
||||
displayed, `psycopg2-binary` has become a separate package, and from 2.8 it
|
||||
has become the only way to install the binary package.
|
||||
|
||||
If you are using psycopg 2.7 and you want to disable the use of wheel binary
|
||||
packages, relying on the system system libraries available on your client, you
|
||||
can use the :command:`pip` |--no-binary option|__, e.g.:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install --no-binary :all: psycopg2
|
||||
|
||||
.. |--no-binary option| replace:: ``--no-binary`` option
|
||||
.. __: https://pip.pypa.io/en/stable/reference/pip_install/#install-no-binary
|
||||
|
||||
which can be specified in your :file:`requirements.txt` files too, e.g. use:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
psycopg2>=2.7,<2.8 --no-binary psycopg2
|
||||
|
||||
to use the last bugfix release of the `!psycopg2` 2.7 package, specifying to
|
||||
always compile it from source. Of course in this case you will have to meet
|
||||
the :ref:`build prerequisites <build-prerequisites>`.
|
||||
|
||||
|
||||
|
||||
.. index::
|
||||
single: setup.py
|
||||
single: setup.cfg
|
||||
|
||||
Non-standard builds
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
-------------------
|
||||
|
||||
If you have less standard requirements such as:
|
||||
|
||||
|
@ -225,7 +244,7 @@ order to create a debug package:
|
|||
- Edit the ``setup.cfg`` file adding the ``PSYCOPG_DEBUG`` flag to the
|
||||
``define`` option.
|
||||
|
||||
- :ref:`Compile and install <source-package>` the package.
|
||||
- :ref:`Compile and install <build-prerequisites>` the package.
|
||||
|
||||
- Set the :envvar:`PSYCOPG_DEBUG` environment variable:
|
||||
|
||||
|
@ -250,11 +269,11 @@ Running the test suite
|
|||
----------------------
|
||||
|
||||
Once `!psycopg2` is installed you can run the test suite to verify it is
|
||||
working correctly. You can run:
|
||||
working correctly. From the source directory, you can run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python -c "from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
|
||||
$ python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
|
||||
|
||||
The tests run against a database called ``psycopg2_test`` on UNIX socket and
|
||||
the standard port. You can configure a different database to run the test by
|
||||
|
|
|
@ -24,13 +24,18 @@ directly in the client application.
|
|||
|
||||
.. method:: getconn(key=None)
|
||||
|
||||
Get a free connection and assign it to *key* if not `!None`.
|
||||
Get a free connection from the pool.
|
||||
|
||||
The *key* parameter is optional: if used, the connection will be
|
||||
associated to the key and calling `!getconn()` with the same key again
|
||||
will return the same connection.
|
||||
|
||||
.. method:: putconn(conn, key=None, close=False)
|
||||
|
||||
Put away a connection.
|
||||
|
||||
If *close* is `!True`, discard the connection from the pool.
|
||||
*key* should be used consistently with `getconn()`.
|
||||
|
||||
.. method:: closeall
|
||||
|
||||
|
@ -57,8 +62,7 @@ be used.
|
|||
|
||||
.. autoclass:: PersistentConnectionPool
|
||||
|
||||
.. note::
|
||||
.. note::
|
||||
|
||||
This pool class is mostly designed to interact with Zope and probably
|
||||
not useful in generic applications.
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
from docutils import nodes
|
||||
|
||||
from sphinx.locale import _
|
||||
from sphinx.util.compat import Directive, make_admonition
|
||||
from docutils.parsers.rst import Directive
|
||||
|
||||
class extension_node(nodes.Admonition, nodes.Element): pass
|
||||
|
||||
|
@ -29,12 +29,11 @@ class Extension(Directive):
|
|||
option_spec = {}
|
||||
|
||||
def run(self):
|
||||
nodes = make_admonition(extension_node,
|
||||
self.name, [_('DB API extension')], self.options,
|
||||
self.content, self.lineno, self.content_offset,
|
||||
self.block_text, self.state, self.state_machine)
|
||||
nodes[0]['classes'].append('dbapi-extension')
|
||||
return nodes
|
||||
node = extension_node('\n'.join(self.content))
|
||||
node += nodes.title(_('DB API extension'), _('DB API extension'))
|
||||
self.state.nested_parse(self.content, self.content_offset, node)
|
||||
node['classes'].append('dbapi-extension')
|
||||
return [node]
|
||||
|
||||
|
||||
def visit_extension_node(self, node):
|
||||
|
@ -50,4 +49,3 @@ def setup(app):
|
|||
text=(visit_extension_node, depart_extension_node))
|
||||
|
||||
app.add_directive('extension', Extension)
|
||||
|
||||
|
|
|
@ -12,10 +12,9 @@ from docutils import nodes, utils
|
|||
from docutils.parsers.rst import roles
|
||||
|
||||
def sql_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||
text = utils.unescape(text)
|
||||
text = utils.unescape(text)
|
||||
options['classes'] = ['sql']
|
||||
return [nodes.literal(rawtext, text, **options)], []
|
||||
|
||||
|
||||
def setup(app):
|
||||
roles.register_local_role('sql', sql_role)
|
||||
|
||||
|
|
|
@ -56,4 +56,3 @@ def setup(app):
|
|||
app.add_config_value('ticket_remap_offset', None, 'env')
|
||||
app.add_role('ticket', ticket_role)
|
||||
app.add_role('tickets', ticket_role)
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
sys.stderr.write("usage: %s index.rst text-dir\n")
|
||||
|
@ -17,23 +18,20 @@ def main():
|
|||
|
||||
return 0
|
||||
|
||||
|
||||
def iter_file_base(fn):
|
||||
f = open(fn)
|
||||
if sys.version_info[0] >= 3:
|
||||
have_line = iter(f).__next__
|
||||
else:
|
||||
have_line = iter(f).next
|
||||
|
||||
while not have_line().startswith('.. toctree'):
|
||||
while not next(f).startswith('.. toctree'):
|
||||
pass
|
||||
while have_line().strip().startswith(':'):
|
||||
while next(f).strip().startswith(':'):
|
||||
pass
|
||||
|
||||
yield os.path.splitext(os.path.basename(fn))[0]
|
||||
|
||||
n = 0
|
||||
while True:
|
||||
line = have_line()
|
||||
line = next(f)
|
||||
if line.isspace():
|
||||
continue
|
||||
if line.startswith(".."):
|
||||
|
@ -47,6 +45,7 @@ def iter_file_base(fn):
|
|||
# maybe format changed?
|
||||
raise Exception("Not enough files found. Format change in index.rst?")
|
||||
|
||||
|
||||
def emit(basename, txt_dir):
|
||||
f = open(os.path.join(txt_dir, basename + ".txt"))
|
||||
for line in f:
|
||||
|
@ -57,7 +56,6 @@ def emit(basename, txt_dir):
|
|||
# some space between sections
|
||||
sys.stdout.write("\n\n")
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
||||
|
|
|
@ -8,9 +8,8 @@
|
|||
This module holds two different tzinfo implementations that can be used as the
|
||||
`tzinfo` argument to `~datetime.datetime` constructors, directly passed to
|
||||
Psycopg functions or used to set the `cursor.tzinfo_factory` attribute in
|
||||
cursors.
|
||||
cursors.
|
||||
|
||||
.. autoclass:: psycopg2.tz.FixedOffsetTimezone
|
||||
|
||||
.. autoclass:: psycopg2.tz.LocalTimezone
|
||||
|
||||
|
|
|
@ -48,7 +48,7 @@ The main entry points of Psycopg are:
|
|||
|
||||
- The class `connection` encapsulates a database session. It allows to:
|
||||
|
||||
- create new `cursor`\s using the `~connection.cursor()` method to
|
||||
- create new `cursor` instances using the `~connection.cursor()` method to
|
||||
execute database commands and queries,
|
||||
|
||||
- terminate transactions using the methods `~connection.commit()` or
|
||||
|
@ -73,70 +73,97 @@ The main entry points of Psycopg are:
|
|||
Passing parameters to SQL queries
|
||||
---------------------------------
|
||||
|
||||
Psycopg casts Python variables to SQL literals by type. Many standard Python types
|
||||
are already `adapted to the correct SQL representation`__.
|
||||
Psycopg converts Python variables to SQL values using their types: the Python
|
||||
type determines the function used to convert the object into a string
|
||||
representation suitable for PostgreSQL. Many standard Python types are
|
||||
already `adapted to the correct SQL representation`__.
|
||||
|
||||
.. __: python-types-adaptation_
|
||||
|
||||
Example: the Python function call::
|
||||
Passing parameters to an SQL statement happens in functions such as
|
||||
`cursor.execute()` by using ``%s`` placeholders in the SQL statement, and
|
||||
passing a sequence of values as the second argument of the function. For
|
||||
example the Python function call::
|
||||
|
||||
>>> cur.execute(
|
||||
... """INSERT INTO some_table (an_int, a_date, a_string)
|
||||
... VALUES (%s, %s, %s);""",
|
||||
>>> cur.execute("""
|
||||
... INSERT INTO some_table (an_int, a_date, a_string)
|
||||
... VALUES (%s, %s, %s);
|
||||
... """,
|
||||
... (10, datetime.date(2005, 11, 18), "O'Reilly"))
|
||||
|
||||
is converted into the SQL command::
|
||||
is converted into a SQL command similar to:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
INSERT INTO some_table (an_int, a_date, a_string)
|
||||
VALUES (10, '2005-11-18', 'O''Reilly');
|
||||
VALUES (10, '2005-11-18', 'O''Reilly');
|
||||
|
||||
Named arguments are supported too using :samp:`%({name})s` placeholders.
|
||||
Using named arguments the values can be passed to the query in any order and
|
||||
many placeholders can use the same values::
|
||||
Named arguments are supported too using :samp:`%({name})s` placeholders in the
|
||||
query and specifying the values into a mapping. Using named arguments allows
|
||||
to specify the values in any order and to repeat the same value in several
|
||||
places in the query::
|
||||
|
||||
>>> cur.execute(
|
||||
... """INSERT INTO some_table (an_int, a_date, another_date, a_string)
|
||||
... VALUES (%(int)s, %(date)s, %(date)s, %(str)s);""",
|
||||
>>> cur.execute("""
|
||||
... INSERT INTO some_table (an_int, a_date, another_date, a_string)
|
||||
... VALUES (%(int)s, %(date)s, %(date)s, %(str)s);
|
||||
... """,
|
||||
... {'int': 10, 'str': "O'Reilly", 'date': datetime.date(2005, 11, 18)})
|
||||
|
||||
Using characters ``%``, ``(``, ``)`` in the argument names is not supported.
|
||||
|
||||
When parameters are used, in order to include a literal ``%`` in the query you
|
||||
can use the ``%%`` string. Using characters ``%``, ``(``, ``)`` in the
|
||||
argument names is not supported.
|
||||
can use the ``%%`` string::
|
||||
|
||||
>>> cur.execute("SELECT (%s % 2) = 0 AS even", (10,)) # WRONG
|
||||
>>> cur.execute("SELECT (%s %% 2) = 0 AS even", (10,)) # correct
|
||||
|
||||
While the mechanism resembles regular Python strings manipulation, there are a
|
||||
few subtle differences you should care about when passing parameters to a
|
||||
query:
|
||||
query.
|
||||
|
||||
- The Python string operator ``%`` is not used: the `~cursor.execute()`
|
||||
- The Python string operator ``%`` *must not be used*: the `~cursor.execute()`
|
||||
method accepts a tuple or dictionary of values as second parameter.
|
||||
|sql-warn|__.
|
||||
|sql-warn|__:
|
||||
|
||||
.. |sql-warn| replace:: **Never** use ``%`` or ``+`` to merge values
|
||||
into queries
|
||||
|
||||
.. __: sql-injection_
|
||||
|
||||
- The variables placeholder must *always be a* ``%s``, even if a different
|
||||
placeholder (such as a ``%d`` for integers or ``%f`` for floats) may look
|
||||
more appropriate::
|
||||
|
||||
>>> cur.execute("INSERT INTO numbers VALUES (%d)", (42,)) # WRONG
|
||||
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (42,)) # correct
|
||||
>>> cur.execute("INSERT INTO numbers VALUES (%s, %s)" % (10, 20)) # WRONG
|
||||
>>> cur.execute("INSERT INTO numbers VALUES (%s, %s)", (10, 20)) # correct
|
||||
|
||||
- For positional variables binding, *the second argument must always be a
|
||||
sequence*, even if it contains a single variable. And remember that Python
|
||||
requires a comma to create a single element tuple::
|
||||
sequence*, even if it contains a single variable (remember that Python
|
||||
requires a comma to create a single element tuple)::
|
||||
|
||||
>>> cur.execute("INSERT INTO foo VALUES (%s)", "bar") # WRONG
|
||||
>>> cur.execute("INSERT INTO foo VALUES (%s)", ("bar")) # WRONG
|
||||
>>> cur.execute("INSERT INTO foo VALUES (%s)", ("bar",)) # correct
|
||||
>>> cur.execute("INSERT INTO foo VALUES (%s)", ["bar"]) # correct
|
||||
|
||||
- Only query values should be bound via this method: it shouldn't be used to
|
||||
merge table or field names to the query. If you need to generate dynamically
|
||||
an SQL query (for instance choosing dynamically a table name) you can use
|
||||
the facilities provided by the `psycopg2.sql` module.
|
||||
- The placeholder *must not be quoted*. Psycopg will add quotes where needed::
|
||||
|
||||
>>> cur.execute("INSERT INTO numbers VALUES ('%s')", (10,)) # WRONG
|
||||
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (10,)) # correct
|
||||
|
||||
- The variables placeholder *must always be a* ``%s``, even if a different
|
||||
placeholder (such as a ``%d`` for integers or ``%f`` for floats) may look
|
||||
more appropriate::
|
||||
|
||||
>>> cur.execute("INSERT INTO numbers VALUES (%d)", (10,)) # WRONG
|
||||
>>> cur.execute("INSERT INTO numbers VALUES (%s)", (10,)) # correct
|
||||
|
||||
- Only query values should be bound via this method: it shouldn't be used to
|
||||
merge table or field names to the query (Psycopg will try quoting the table
|
||||
name as a string value, generating invalid SQL). If you need to generate
|
||||
dynamically SQL queries (for instance choosing dynamically a table name)
|
||||
you can use the facilities provided by the `psycopg2.sql` module::
|
||||
|
||||
>>> cur.execute("INSERT INTO %s VALUES (%s)", ('numbers', 10)) # WRONG
|
||||
>>> cur.execute( # correct
|
||||
... SQL("INSERT INTO {} VALUES (%s)").format(Identifier('numbers')),
|
||||
... (10,))
|
||||
|
||||
|
||||
.. index:: Security, SQL injection
|
||||
|
@ -430,14 +457,12 @@ the connection or globally: see the function
|
|||
Binary adaptation
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Python types representing binary objects are converted into
|
||||
PostgreSQL binary string syntax, suitable for :sql:`bytea` fields. Such
|
||||
types are `buffer` (only available in Python 2), `memoryview` (available
|
||||
from Python 2.7), `bytearray` (available from Python 2.6) and `bytes`
|
||||
(only from Python 3: the name is available from Python 2.6 but it's only an
|
||||
alias for the type `!str`). Any object implementing the `Revised Buffer
|
||||
Protocol`__ should be usable as binary type where the protocol is supported
|
||||
(i.e. from Python 2.6). Received data is returned as `!buffer` (in Python 2)
|
||||
Python types representing binary objects are converted into PostgreSQL binary
|
||||
string syntax, suitable for :sql:`bytea` fields. Such types are `buffer`
|
||||
(only available in Python 2), `memoryview`, `bytearray`, and `bytes` (only in
|
||||
Python 3: the name is available in Python 2 but it's only an alias for the
|
||||
type `!str`). Any object implementing the `Revised Buffer Protocol`__ should
|
||||
be usable as binary type. Received data is returned as `!buffer` (in Python 2)
|
||||
or `!memoryview` (in Python 3).
|
||||
|
||||
.. __: http://www.python.org/dev/peps/pep-3118/
|
||||
|
@ -535,8 +560,7 @@ rounded to the nearest minute, with an error of up to 30 seconds.
|
|||
|
||||
.. versionchanged:: 2.2.2
|
||||
timezones with seconds are supported (with rounding). Previously such
|
||||
timezones raised an error. In order to deal with them in previous
|
||||
versions use `psycopg2.extras.register_tstz_w_secs()`.
|
||||
timezones raised an error.
|
||||
|
||||
|
||||
.. index::
|
||||
|
@ -792,7 +816,9 @@ lifetime extends well after `~connection.commit()`, calling
|
|||
It is also possible to use a named cursor to consume a cursor created
|
||||
in some other way than using the |DECLARE| executed by
|
||||
`~cursor.execute()`. For example, you may have a PL/pgSQL function
|
||||
returning a cursor::
|
||||
returning a cursor:
|
||||
|
||||
.. code-block:: postgres
|
||||
|
||||
CREATE FUNCTION reffunc(refcursor) RETURNS refcursor AS $$
|
||||
BEGIN
|
||||
|
@ -990,4 +1016,3 @@ For further details see the documentation for the above methods.
|
|||
|
||||
.. __: http://www.opengroup.org/bookstore/catalog/c193.htm
|
||||
.. __: http://jdbc.postgresql.org/
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
DSN = 'dbname=test'
|
||||
|
||||
## don't modify anything below this line (except for experimenting)
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import psycopg2
|
||||
|
@ -24,9 +25,9 @@ import psycopg2
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
curs = conn.cursor()
|
||||
try:
|
||||
|
@ -52,20 +53,20 @@ curs.execute("""INSERT INTO test_binary
|
|||
|
||||
# now we try to extract the images as simple text strings
|
||||
|
||||
print "Extracting the images as strings..."
|
||||
print("Extracting the images as strings...")
|
||||
curs.execute("SELECT * FROM test_binary")
|
||||
|
||||
for row in curs.fetchall():
|
||||
name, ext = row[1].split('.')
|
||||
new_name = name + '_S.' + ext
|
||||
print " writing %s to %s ..." % (name+'.'+ext, new_name),
|
||||
print(" writing %s to %s ..." % (name+'.'+ext, new_name), end=' ')
|
||||
open(new_name, 'wb').write(row[2])
|
||||
print "done"
|
||||
print " python type of image data is", type(row[2])
|
||||
|
||||
print("done")
|
||||
print(" python type of image data is", type(row[2]))
|
||||
|
||||
# extract exactly the same data but using a binary cursor
|
||||
|
||||
print "Extracting the images using a binary cursor:"
|
||||
print("Extracting the images using a binary cursor:")
|
||||
|
||||
curs.execute("""DECLARE zot CURSOR FOR
|
||||
SELECT img, name FROM test_binary FOR READ ONLY""")
|
||||
|
@ -74,11 +75,11 @@ curs.execute("""FETCH ALL FROM zot""")
|
|||
for row in curs.fetchall():
|
||||
name, ext = row[1].split('.')
|
||||
new_name = name + '_B.' + ext
|
||||
print " writing %s to %s ..." % (name+'.'+ext, new_name),
|
||||
print(" writing %s to %s ..." % (name+'.'+ext, new_name), end=' ')
|
||||
open(new_name, 'wb').write(row[0])
|
||||
print "done"
|
||||
print " python type of image data is", type(row[0])
|
||||
|
||||
print("done")
|
||||
print(" python type of image data is", type(row[0]))
|
||||
|
||||
# this rollback is required because we can't drop a table with a binary cursor
|
||||
# declared and still open
|
||||
conn.rollback()
|
||||
|
@ -86,4 +87,4 @@ conn.rollback()
|
|||
curs.execute("DROP TABLE test_binary")
|
||||
conn.commit()
|
||||
|
||||
print "\nNow try to load the new images, to check it worked!"
|
||||
print("\nNow try to load the new images, to check it worked!")
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# copy_from.py -- example about copy_from
|
||||
# copy_from.py -- example about copy_from
|
||||
#
|
||||
# Copyright (C) 2002 Tom Jenkins <tjenkins@devis.com>
|
||||
# Copyright (C) 2005 Federico Di Gregorio <fog@initd.org>
|
||||
|
@ -27,9 +27,9 @@ import psycopg2
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
curs = conn.cursor()
|
||||
try:
|
||||
|
@ -51,16 +51,16 @@ io.close()
|
|||
|
||||
io = open('copy_from.txt', 'r')
|
||||
curs.copy_from(io, 'test_copy')
|
||||
print "1) Copy %d records from file object " % len(data) + \
|
||||
"using defaults (sep: \\t and null = \\N)"
|
||||
print("1) Copy %d records from file object " % len(data) +
|
||||
"using defaults (sep: \\t and null = \\N)")
|
||||
io.close()
|
||||
|
||||
curs.execute("SELECT * FROM test_copy")
|
||||
rows = curs.fetchall()
|
||||
print " Select returned %d rows" % len(rows)
|
||||
print(" Select returned %d rows" % len(rows))
|
||||
|
||||
for r in rows:
|
||||
print " %s %s\t%s" % (r[0], r[1], r[2])
|
||||
print(" %s %s\t%s" % (r[0], r[1], r[2]))
|
||||
curs.execute("delete from test_copy")
|
||||
conn.commit()
|
||||
|
||||
|
@ -75,15 +75,15 @@ io.close()
|
|||
|
||||
io = open('copy_from.txt', 'r')
|
||||
curs.copy_from(io, 'test_copy', ':')
|
||||
print "2) Copy %d records from file object using sep = :" % len(data)
|
||||
print("2) Copy %d records from file object using sep = :" % len(data))
|
||||
io.close()
|
||||
|
||||
curs.execute("SELECT * FROM test_copy")
|
||||
rows = curs.fetchall()
|
||||
print " Select returned %d rows" % len(rows)
|
||||
print(" Select returned %d rows" % len(rows))
|
||||
|
||||
for r in rows:
|
||||
print " %s %s\t%s" % (r[0], r[1], r[2])
|
||||
print(" %s %s\t%s" % (r[0], r[1], r[2]))
|
||||
curs.execute("delete from test_copy")
|
||||
conn.commit()
|
||||
|
||||
|
@ -98,15 +98,15 @@ io.close()
|
|||
|
||||
io = open('copy_from.txt', 'r')
|
||||
curs.copy_from(io, 'test_copy', null='NULL')
|
||||
print "3) Copy %d records from file object using null = NULL" % len(data)
|
||||
print("3) Copy %d records from file object using null = NULL" % len(data))
|
||||
io.close()
|
||||
|
||||
curs.execute("SELECT * FROM test_copy")
|
||||
rows = curs.fetchall()
|
||||
print " Select using cursor returned %d rows" % len(rows)
|
||||
print(" Select using cursor returned %d rows" % len(rows))
|
||||
|
||||
for r in rows:
|
||||
print " %s %s\t%s" % (r[0], r[1], r[2])
|
||||
print(" %s %s\t%s" % (r[0], r[1], r[2]))
|
||||
curs.execute("delete from test_copy")
|
||||
conn.commit()
|
||||
|
||||
|
@ -119,16 +119,16 @@ io.close()
|
|||
|
||||
io = open('copy_from.txt', 'r')
|
||||
curs.copy_from(io, 'test_copy', ':', 'NULL')
|
||||
print "4) Copy %d records from file object " % len(data) + \
|
||||
"using sep = : and null = NULL"
|
||||
print("4) Copy %d records from file object " % len(data) +
|
||||
"using sep = : and null = NULL")
|
||||
io.close()
|
||||
|
||||
curs.execute("SELECT * FROM test_copy")
|
||||
rows = curs.fetchall()
|
||||
print " Select using cursor returned %d rows" % len(rows)
|
||||
print(" Select using cursor returned %d rows" % len(rows))
|
||||
|
||||
for r in rows:
|
||||
print " %s %s\t%s" % (r[0], r[1], r[2])
|
||||
print(" %s %s\t%s" % (r[0], r[1], r[2]))
|
||||
curs.execute("delete from test_copy")
|
||||
conn.commit()
|
||||
|
||||
|
@ -141,20 +141,20 @@ data.write('\n'.join(['Tom\tJenkins\t37',
|
|||
data.seek(0)
|
||||
|
||||
curs.copy_from(data, 'test_copy')
|
||||
print "5) Copy 3 records from StringIO object using defaults"
|
||||
print("5) Copy 3 records from StringIO object using defaults")
|
||||
|
||||
curs.execute("SELECT * FROM test_copy")
|
||||
rows = curs.fetchall()
|
||||
print " Select using cursor returned %d rows" % len(rows)
|
||||
print(" Select using cursor returned %d rows" % len(rows))
|
||||
|
||||
for r in rows:
|
||||
print " %s %s\t%s" % (r[0], r[1], r[2])
|
||||
print(" %s %s\t%s" % (r[0], r[1], r[2]))
|
||||
curs.execute("delete from test_copy")
|
||||
conn.commit()
|
||||
|
||||
# simple error test
|
||||
|
||||
print "6) About to raise an error"
|
||||
print("6) About to raise an error")
|
||||
data = StringIO.StringIO()
|
||||
data.write('\n'.join(['Tom\tJenkins\t37',
|
||||
'Madonna\t\N\t45',
|
||||
|
@ -163,15 +163,12 @@ data.seek(0)
|
|||
|
||||
try:
|
||||
curs.copy_from(data, 'test_copy')
|
||||
except StandardError, err:
|
||||
except StandardError as err:
|
||||
conn.rollback()
|
||||
print " Caught error (as expected):\n", err
|
||||
print(" Caught error (as expected):\n", err)
|
||||
|
||||
conn.rollback()
|
||||
|
||||
curs.execute("DROP TABLE test_copy")
|
||||
os.unlink('copy_from.txt')
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# copy_to.py -- example about copy_to
|
||||
# copy_to.py -- example about copy_to
|
||||
#
|
||||
# Copyright (C) 2002 Tom Jenkins <tjenkins@devis.com>
|
||||
# Copyright (C) 2005 Federico Di Gregorio <fog@initd.org>
|
||||
|
@ -18,6 +18,7 @@
|
|||
DSN = 'dbname=test'
|
||||
|
||||
## don't modify anything below this line (except for experimenting)
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
@ -27,9 +28,9 @@ import psycopg2
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
curs = conn.cursor()
|
||||
try:
|
||||
|
@ -51,52 +52,52 @@ conn.commit()
|
|||
# copy_to using defaults
|
||||
io = open('copy_to.txt', 'w')
|
||||
curs.copy_to(io, 'test_copy')
|
||||
print "1) Copy %d records into file object using defaults: " % len (data) + \
|
||||
"sep = \\t and null = \\N"
|
||||
print("1) Copy %d records into file object using defaults: " % len (data) + \
|
||||
"sep = \\t and null = \\N")
|
||||
io.close()
|
||||
|
||||
rows = open('copy_to.txt', 'r').readlines()
|
||||
print " File has %d rows:" % len(rows)
|
||||
print(" File has %d rows:" % len(rows))
|
||||
|
||||
for r in rows:
|
||||
print " ", r,
|
||||
print(" ", r, end=' ')
|
||||
|
||||
# copy_to using custom separator
|
||||
io = open('copy_to.txt', 'w')
|
||||
curs.copy_to(io, 'test_copy', ':')
|
||||
print "2) Copy %d records into file object using sep = :" % len(data)
|
||||
print("2) Copy %d records into file object using sep = :" % len(data))
|
||||
io.close()
|
||||
|
||||
rows = open('copy_to.txt', 'r').readlines()
|
||||
print " File has %d rows:" % len(rows)
|
||||
print(" File has %d rows:" % len(rows))
|
||||
|
||||
for r in rows:
|
||||
print " ", r,
|
||||
print(" ", r, end=' ')
|
||||
|
||||
# copy_to using custom null identifier
|
||||
io = open('copy_to.txt', 'w')
|
||||
curs.copy_to(io, 'test_copy', null='NULL')
|
||||
print "3) Copy %d records into file object using null = NULL" % len(data)
|
||||
print("3) Copy %d records into file object using null = NULL" % len(data))
|
||||
io.close()
|
||||
|
||||
rows = open('copy_to.txt', 'r').readlines()
|
||||
print " File has %d rows:" % len(rows)
|
||||
print(" File has %d rows:" % len(rows))
|
||||
|
||||
for r in rows:
|
||||
print " ", r,
|
||||
print(" ", r, end=' ')
|
||||
|
||||
# copy_to using custom separator and null identifier
|
||||
io = open('copy_to.txt', 'w')
|
||||
curs.copy_to(io, 'test_copy', ':', 'NULL')
|
||||
print "4) Copy %d records into file object using sep = : and null ) NULL" % \
|
||||
len(data)
|
||||
print("4) Copy %d records into file object using sep = : and null ) NULL" % \
|
||||
len(data))
|
||||
io.close()
|
||||
|
||||
rows = open('copy_to.txt', 'r').readlines()
|
||||
print " File has %d rows:" % len(rows)
|
||||
print(" File has %d rows:" % len(rows))
|
||||
|
||||
for r in rows:
|
||||
print " ", r,
|
||||
print(" ", r, end=' ')
|
||||
|
||||
curs.execute("DROP TABLE test_copy")
|
||||
os.unlink('copy_to.txt')
|
||||
|
|
|
@ -25,9 +25,9 @@ import psycopg2.extensions
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
|
||||
class NoDataError(psycopg2.ProgrammingError):
|
||||
|
@ -49,15 +49,15 @@ class Cursor(psycopg2.extensions.cursor):
|
|||
if d is None:
|
||||
raise NoDataError("no more data")
|
||||
return d
|
||||
|
||||
|
||||
curs = conn.cursor(cursor_factory=Cursor)
|
||||
curs.execute("SELECT 1 AS foo")
|
||||
print "Result of fetchone():", curs.fetchone()
|
||||
print("Result of fetchone():", curs.fetchone())
|
||||
|
||||
# now let's raise the exception
|
||||
try:
|
||||
curs.fetchone()
|
||||
except NoDataError, err:
|
||||
print "Exception caught:", err
|
||||
except NoDataError as err:
|
||||
print("Exception caught:", err)
|
||||
|
||||
conn.rollback()
|
||||
|
|
|
@ -6,25 +6,18 @@ Mapping arbitrary objects to a PostgreSQL database with psycopg2
|
|||
- Problem
|
||||
|
||||
You need to store arbitrary objects in a PostgreSQL database without being
|
||||
intrusive for your classes (don't want inheritance from an 'Item' or
|
||||
intrusive for your classes (don't want inheritance from an 'Item' or
|
||||
'Persistent' object).
|
||||
|
||||
- Solution
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
import psycopg2
|
||||
from psycopg2.extensions import adapt, register_adapter
|
||||
|
||||
try:
|
||||
sorted()
|
||||
except:
|
||||
def sorted(seq):
|
||||
seq.sort()
|
||||
return seq
|
||||
|
||||
# Here is the adapter for every object that we may ever need to
|
||||
# Here is the adapter for every object that we may ever need to
|
||||
# insert in the database. It receives the original object and does
|
||||
# its job on that instance
|
||||
|
||||
|
@ -33,7 +26,7 @@ class ObjectMapper(object):
|
|||
self.orig = orig
|
||||
self.tmp = {}
|
||||
self.items, self.fields = self._gatherState()
|
||||
|
||||
|
||||
def _gatherState(self):
|
||||
adaptee_name = self.orig.__class__.__name__
|
||||
fields = sorted([(field, getattr(self.orig, field))
|
||||
|
@ -42,19 +35,19 @@ class ObjectMapper(object):
|
|||
for item, value in fields:
|
||||
items.append(item)
|
||||
return items, fields
|
||||
|
||||
|
||||
def getTableName(self):
|
||||
return self.orig.__class__.__name__
|
||||
|
||||
|
||||
def getMappedValues(self):
|
||||
tmp = []
|
||||
for i in self.items:
|
||||
tmp.append("%%(%s)s"%i)
|
||||
return ", ".join(tmp)
|
||||
|
||||
|
||||
def getValuesDict(self):
|
||||
return dict(self.fields)
|
||||
|
||||
|
||||
def getFields(self):
|
||||
return self.items
|
||||
|
||||
|
@ -66,14 +59,14 @@ class ObjectMapper(object):
|
|||
return qry, self.getValuesDict()
|
||||
|
||||
# Here are the objects
|
||||
class Album(object):
|
||||
id = 0
|
||||
class Album(object):
|
||||
id = 0
|
||||
def __init__(self):
|
||||
self.creation_time = datetime.now()
|
||||
self.album_id = self.id
|
||||
Album.id = Album.id + 1
|
||||
self.binary_data = buffer('12312312312121')
|
||||
|
||||
|
||||
class Order(object):
|
||||
id = 0
|
||||
def __init__(self):
|
||||
|
@ -84,7 +77,7 @@ class Order(object):
|
|||
|
||||
register_adapter(Album, ObjectMapper)
|
||||
register_adapter(Order, ObjectMapper)
|
||||
|
||||
|
||||
# Describe what is needed to save on each object
|
||||
# This is actually just configuration, you can use xml with a parser if you
|
||||
# like to have plenty of wasted CPU cycles ;P.
|
||||
|
@ -92,53 +85,53 @@ register_adapter(Order, ObjectMapper)
|
|||
persistent_fields = {'Album': ['album_id', 'creation_time', 'binary_data'],
|
||||
'Order': ['order_id', 'items', 'price']
|
||||
}
|
||||
|
||||
print adapt(Album()).generateInsert()
|
||||
print adapt(Album()).generateInsert()
|
||||
print adapt(Album()).generateInsert()
|
||||
print adapt(Order()).generateInsert()
|
||||
print adapt(Order()).generateInsert()
|
||||
print adapt(Order()).generateInsert()
|
||||
|
||||
print(adapt(Album()).generateInsert())
|
||||
print(adapt(Album()).generateInsert())
|
||||
print(adapt(Album()).generateInsert())
|
||||
print(adapt(Order()).generateInsert())
|
||||
print(adapt(Order()).generateInsert())
|
||||
print(adapt(Order()).generateInsert())
|
||||
|
||||
"""
|
||||
- Discussion
|
||||
|
||||
Psycopg 2 has a great new feature: adaptation. The big thing about
|
||||
adaptation is that it enables the programmer to glue most of the
|
||||
Psycopg 2 has a great new feature: adaptation. The big thing about
|
||||
adaptation is that it enables the programmer to glue most of the
|
||||
code out there without many difficulties.
|
||||
|
||||
This recipe tries to focus attention on a way to generate SQL queries to
|
||||
insert completely new objects inside a database. As you can see objects do
|
||||
not know anything about the code that is handling them. We specify all the
|
||||
This recipe tries to focus attention on a way to generate SQL queries to
|
||||
insert completely new objects inside a database. As you can see objects do
|
||||
not know anything about the code that is handling them. We specify all the
|
||||
fields that we need for each object through the persistent_fields dict.
|
||||
|
||||
The most important lines of this recipe are:
|
||||
register_adapter(Album, ObjectMapper)
|
||||
register_adapter(Order, ObjectMapper)
|
||||
|
||||
In these lines we notify the system that when we call adapt with an Album instance
|
||||
as an argument we want it to istantiate ObjectMapper passing the Album instance
|
||||
In these lines we notify the system that when we call adapt with an Album instance
|
||||
as an argument we want it to istantiate ObjectMapper passing the Album instance
|
||||
as argument (self.orig in the ObjectMapper class).
|
||||
|
||||
The output is something like this (for each call to generateInsert):
|
||||
|
||||
('INSERT INTO Album (album_id, binary_data, creation_time) VALUES
|
||||
(%(album_id)s, %(binary_data)s, %(creation_time)s)',
|
||||
|
||||
{'binary_data': <read-only buffer for 0x402de070, ...>,
|
||||
'creation_time': datetime.datetime(2004, 9, 10, 20, 48, 29, 633728),
|
||||
|
||||
('INSERT INTO Album (album_id, binary_data, creation_time) VALUES
|
||||
(%(album_id)s, %(binary_data)s, %(creation_time)s)',
|
||||
|
||||
{'binary_data': <read-only buffer for 0x402de070, ...>,
|
||||
'creation_time': datetime.datetime(2004, 9, 10, 20, 48, 29, 633728),
|
||||
'album_id': 1}
|
||||
)
|
||||
|
||||
This is a tuple of {SQL_QUERY, FILLING_DICT}, and all the quoting/converting
|
||||
stuff (from python's datetime to postgres s and from python's buffer to
|
||||
postgres' blob) is handled with the same adaptation process hunder the hood
|
||||
This is a tuple of {SQL_QUERY, FILLING_DICT}, and all the quoting/converting
|
||||
stuff (from python's datetime to postgres s and from python's buffer to
|
||||
postgres' blob) is handled with the same adaptation process hunder the hood
|
||||
by psycopg2.
|
||||
|
||||
At last, just notice that ObjectMapper is working for both Album and Order
|
||||
instances without any glitches at all, and both classes could have easily been
|
||||
coming from closed source libraries or C coded ones (which are not easily
|
||||
modified), whereas a common pattern in todays ORMs or OODBs is to provide
|
||||
a basic 'Persistent' object that already knows how to store itself in the
|
||||
At last, just notice that ObjectMapper is working for both Album and Order
|
||||
instances without any glitches at all, and both classes could have easily been
|
||||
coming from closed source libraries or C coded ones (which are not easily
|
||||
modified), whereas a common pattern in todays ORMs or OODBs is to provide
|
||||
a basic 'Persistent' object that already knows how to store itself in the
|
||||
database.
|
||||
"""
|
||||
|
|
|
@ -25,41 +25,41 @@ import psycopg2.extras
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
|
||||
|
||||
curs = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
||||
curs.execute("SELECT 1 AS foo, 'cip' AS bar, date(now()) as zot")
|
||||
print "Cursor's row factory is", curs.row_factory
|
||||
print("Cursor's row factory is", curs.row_factory)
|
||||
|
||||
data = curs.fetchone()
|
||||
print "The type of the data row is", type(data)
|
||||
print "Some data accessed both as tuple and dict:"
|
||||
print " ", data['foo'], data['bar'], data['zot']
|
||||
print " ", data[0], data[1], data[2]
|
||||
print("The type of the data row is", type(data))
|
||||
print("Some data accessed both as tuple and dict:")
|
||||
print(" ", data['foo'], data['bar'], data['zot'])
|
||||
print(" ", data[0], data[1], data[2])
|
||||
|
||||
# execute another query and demostrate we can still access the row
|
||||
curs.execute("SELECT 2 AS foo")
|
||||
print "The type of the data row is", type(data)
|
||||
print "Some more data accessed both as tuple and dict:"
|
||||
print " ", data['foo'], data['bar'], data['zot']
|
||||
print " ", data[0], data[1], data[2]
|
||||
print("The type of the data row is", type(data))
|
||||
print("Some more data accessed both as tuple and dict:")
|
||||
print(" ", data['foo'], data['bar'], data['zot'])
|
||||
print(" ", data[0], data[1], data[2])
|
||||
|
||||
curs = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
|
||||
curs.execute("SELECT 1 AS foo, 'cip' AS bar, date(now()) as zot")
|
||||
print "Cursor's row factory is", curs.row_factory
|
||||
print("Cursor's row factory is", curs.row_factory)
|
||||
|
||||
data = curs.fetchone()
|
||||
print "The type of the data row is", type(data)
|
||||
print "Some data accessed both as tuple and dict:"
|
||||
print " ", data['foo'], data['bar'], data['zot']
|
||||
print " ", "No access using indices: this is a specialized cursor."
|
||||
print("The type of the data row is", type(data))
|
||||
print("Some data accessed both as tuple and dict:")
|
||||
print(" ", data['foo'], data['bar'], data['zot'])
|
||||
print(" ", "No access using indices: this is a specialized cursor.")
|
||||
|
||||
# execute another query and demostrate we can still access the row
|
||||
curs.execute("SELECT 2 AS foo")
|
||||
print "The type of the data row is", type(data)
|
||||
print "Some more data accessed both as tuple and dict:"
|
||||
print " ", data['foo'], data['bar'], data['zot']
|
||||
print " ", "No access using indices: this is a specialized cursor."
|
||||
print("The type of the data row is", type(data))
|
||||
print("Some more data accessed both as tuple and dict:")
|
||||
print(" ", data['foo'], data['bar'], data['zot'])
|
||||
print(" ", "No access using indices: this is a specialized cursor.")
|
||||
|
|
|
@ -28,7 +28,7 @@ from psycopg2.extensions import adapt
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
curs = conn.cursor()
|
||||
|
||||
|
@ -52,9 +52,9 @@ mx1 = (
|
|||
|
||||
from psycopg2.extensions import adapt
|
||||
import psycopg2.extras
|
||||
print adapt(mx1)
|
||||
print(adapt(mx1))
|
||||
|
||||
print "Inserting mx.DateTime values..."
|
||||
print("Inserting mx.DateTime values...")
|
||||
curs.execute("INSERT INTO test_dt VALUES (%s, %s, %s, %s, %s)", mx1)
|
||||
|
||||
# build and insert some values using the datetime adapters
|
||||
|
@ -65,11 +65,11 @@ dt1 = (
|
|||
datetime.datetime(2004, 10, 19, 0, 11, 17, 500000),
|
||||
datetime.timedelta(13, 15*3600+17*60+59, 900000))
|
||||
|
||||
print "Inserting Python datetime values..."
|
||||
print("Inserting Python datetime values...")
|
||||
curs.execute("INSERT INTO test_dt VALUES (%s, %s, %s, %s, %s)", dt1)
|
||||
|
||||
# now extract the row from database and print them
|
||||
print "Extracting values inserted with mx.DateTime wrappers:"
|
||||
print("Extracting values inserted with mx.DateTime wrappers:")
|
||||
curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 1")
|
||||
for n, x in zip(mx1[1:], curs.fetchone()):
|
||||
try:
|
||||
|
@ -80,10 +80,10 @@ for n, x in zip(mx1[1:], curs.fetchone()):
|
|||
except:
|
||||
s = repr(n) + "\n -> " + str(adapt(n)) + \
|
||||
"\n -> " + repr(x) + "\n -> " + str(x)
|
||||
print s
|
||||
print
|
||||
print(s)
|
||||
print()
|
||||
|
||||
print "Extracting values inserted with Python datetime wrappers:"
|
||||
print("Extracting values inserted with Python datetime wrappers:")
|
||||
curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 2")
|
||||
for n, x in zip(dt1[1:], curs.fetchone()):
|
||||
try:
|
||||
|
@ -92,8 +92,8 @@ for n, x in zip(dt1[1:], curs.fetchone()):
|
|||
s = repr(n) + "\n -> " + repr(x) + "\n -> " + x.isoformat()
|
||||
except:
|
||||
s = repr(n) + "\n -> " + repr(x) + "\n -> " + str(x)
|
||||
print s
|
||||
print
|
||||
print(s)
|
||||
print()
|
||||
|
||||
curs.execute("DROP TABLE test_dt")
|
||||
conn.commit()
|
||||
|
|
|
@ -26,80 +26,80 @@ import psycopg2.extensions
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Initial encoding for this connection is", conn.encoding
|
||||
print("Initial encoding for this connection is", conn.encoding)
|
||||
|
||||
print "\n** This example is supposed to be run in a UNICODE terminal! **\n"
|
||||
print("\n** This example is supposed to be run in a UNICODE terminal! **\n")
|
||||
|
||||
print "Available encodings:"
|
||||
print("Available encodings:")
|
||||
encs = psycopg2.extensions.encodings.items()
|
||||
encs.sort()
|
||||
for a, b in encs:
|
||||
print " ", a, "<->", b
|
||||
print(" ", a, "<->", b)
|
||||
|
||||
print "Using STRING typecaster"
|
||||
print "Setting backend encoding to LATIN1 and executing queries:"
|
||||
print("Using STRING typecaster")
|
||||
print("Setting backend encoding to LATIN1 and executing queries:")
|
||||
conn.set_client_encoding('LATIN1')
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT %s::TEXT AS foo", ('àèìòù',))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", unicode(x, 'latin-1').encode('utf-8'), type(x)
|
||||
print(" ->", unicode(x, 'latin-1').encode('utf-8'), type(x))
|
||||
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", unicode(x, 'latin-1').encode('utf-8'), type(x)
|
||||
print(" ->", unicode(x, 'latin-1').encode('utf-8'), type(x))
|
||||
|
||||
print "Setting backend encoding to UTF8 and executing queries:"
|
||||
print("Setting backend encoding to UTF8 and executing queries:")
|
||||
conn.set_client_encoding('UNICODE')
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", x, type(x)
|
||||
print(" ->", x, type(x))
|
||||
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", x, type(x)
|
||||
print(" ->", x, type(x))
|
||||
|
||||
print "Using UNICODE typecaster"
|
||||
print("Using UNICODE typecaster")
|
||||
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
|
||||
|
||||
print "Setting backend encoding to LATIN1 and executing queries:"
|
||||
print("Setting backend encoding to LATIN1 and executing queries:")
|
||||
conn.set_client_encoding('LATIN1')
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT %s::TEXT AS foo", ('àèìòù',))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", x.encode('utf-8'), ":", type(x)
|
||||
print(" ->", x.encode('utf-8'), ":", type(x))
|
||||
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", x.encode('utf-8'), ":", type(x)
|
||||
print(" ->", x.encode('utf-8'), ":", type(x))
|
||||
|
||||
print "Setting backend encoding to UTF8 and executing queries:"
|
||||
print("Setting backend encoding to UTF8 and executing queries:")
|
||||
conn.set_client_encoding('UNICODE')
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", x.encode('utf-8'), ":", type(x)
|
||||
print(" ->", x.encode('utf-8'), ":", type(x))
|
||||
curs.execute("SELECT %s::TEXT AS foo", (u'àèìòù',))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", x.encode('utf-8'), ":", type(x)
|
||||
print(" ->", x.encode('utf-8'), ":", type(x))
|
||||
|
||||
print "Executing full UNICODE queries"
|
||||
print("Executing full UNICODE queries")
|
||||
|
||||
print "Setting backend encoding to LATIN1 and executing queries:"
|
||||
print("Setting backend encoding to LATIN1 and executing queries:")
|
||||
conn.set_client_encoding('LATIN1')
|
||||
curs = conn.cursor()
|
||||
curs.execute(u"SELECT %s::TEXT AS foo", ('àèìòù',))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", x.encode('utf-8'), ":", type(x)
|
||||
print(" ->", x.encode('utf-8'), ":", type(x))
|
||||
curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù',))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", x.encode('utf-8'), ":", type(x)
|
||||
print(" ->", x.encode('utf-8'), ":", type(x))
|
||||
|
||||
print "Setting backend encoding to UTF8 and executing queries:"
|
||||
print("Setting backend encoding to UTF8 and executing queries:")
|
||||
conn.set_client_encoding('UNICODE')
|
||||
curs = conn.cursor()
|
||||
curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù'.encode('utf-8'),))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", x.encode('utf-8'), ":", type(x)
|
||||
print(" ->", x.encode('utf-8'), ":", type(x))
|
||||
curs.execute(u"SELECT %s::TEXT AS foo", (u'àèìòù',))
|
||||
x = curs.fetchone()[0]
|
||||
print " ->", x.encode('utf-8'), ":", type(x)
|
||||
print(" ->", x.encode('utf-8'), ":", type(x))
|
||||
|
|
|
@ -24,9 +24,9 @@ import psycopg2
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
curs = conn.cursor()
|
||||
try:
|
||||
|
@ -68,12 +68,12 @@ conn.commit()
|
|||
|
||||
ncurs = conn.cursor("crs")
|
||||
ncurs.execute("SELECT * FROM test_fetch")
|
||||
print "First 10 rows:", flatten(ncurs.fetchmany(10))
|
||||
print("First 10 rows:", flatten(ncurs.fetchmany(10)))
|
||||
ncurs.scroll(-5)
|
||||
print "Moved back cursor by 5 rows (to row 5.)"
|
||||
print "Another 10 rows:", flatten(ncurs.fetchmany(10))
|
||||
print "Another one:", list(ncurs.fetchone())
|
||||
print "The remaining rows:", flatten(ncurs.fetchall())
|
||||
print("Moved back cursor by 5 rows (to row 5.)")
|
||||
print("Another 10 rows:", flatten(ncurs.fetchmany(10)))
|
||||
print("Another one:", list(ncurs.fetchone()))
|
||||
print("The remaining rows:", flatten(ncurs.fetchall()))
|
||||
conn.rollback()
|
||||
|
||||
curs.execute("DROP TABLE test_fetch")
|
||||
|
|
|
@ -23,7 +23,7 @@ import sys, psycopg2
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
curs = conn.cursor()
|
||||
|
||||
|
@ -42,18 +42,18 @@ curs.execute("""INSERT INTO test_oid
|
|||
VALUES (%(name)s, %(surname)s)""", data[0])
|
||||
|
||||
foid = curs.lastrowid
|
||||
print "Oid for %(name)s %(surname)s" % data[0], "is", foid
|
||||
print("Oid for %(name)s %(surname)s" % data[0], "is", foid)
|
||||
|
||||
curs.execute("""INSERT INTO test_oid
|
||||
VALUES (%(name)s, %(surname)s)""", data[1])
|
||||
moid = curs.lastrowid
|
||||
print "Oid for %(name)s %(surname)s" % data[1], "is", moid
|
||||
print("Oid for %(name)s %(surname)s" % data[1], "is", moid)
|
||||
|
||||
curs.execute("SELECT * FROM test_oid WHERE oid = %s", (foid,))
|
||||
print "Oid", foid, "selected %s %s" % curs.fetchone()
|
||||
print("Oid", foid, "selected %s %s" % curs.fetchone())
|
||||
|
||||
curs.execute("SELECT * FROM test_oid WHERE oid = %s", (moid,))
|
||||
print "Oid", moid, "selected %s %s" % curs.fetchone()
|
||||
print("Oid", moid, "selected %s %s" % curs.fetchone())
|
||||
|
||||
curs.execute("DROP TABLE test_oid")
|
||||
conn.commit()
|
||||
|
|
|
@ -24,68 +24,68 @@ import psycopg2
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
# this will create a large object with a new random oid, we'll
|
||||
# use it to make some basic tests about read/write and seek.
|
||||
lobj = conn.lobject()
|
||||
loid = lobj.oid
|
||||
print "Created a new large object with oid", loid
|
||||
print("Created a new large object with oid", loid)
|
||||
|
||||
print "Manually importing some binary data into the object:"
|
||||
print("Manually importing some binary data into the object:")
|
||||
data = open("somehackers.jpg").read()
|
||||
len = lobj.write(data)
|
||||
print " imported", len, "bytes of data"
|
||||
print(" imported", len, "bytes of data")
|
||||
|
||||
conn.commit()
|
||||
|
||||
print "Trying to (re)open large object with oid", loid
|
||||
print("Trying to (re)open large object with oid", loid)
|
||||
lobj = conn.lobject(loid)
|
||||
print "Manually exporting the data from the lobject:"
|
||||
print("Manually exporting the data from the lobject:")
|
||||
data1 = lobj.read()
|
||||
len = lobj.tell()
|
||||
lobj.seek(0, 0)
|
||||
data2 = lobj.read()
|
||||
if data1 != data2:
|
||||
print "ERROR: read after seek returned different data"
|
||||
print("ERROR: read after seek returned different data")
|
||||
open("somehackers_lobject1.jpg", 'wb').write(data1)
|
||||
print " written", len, "bytes of data to somehackers_lobject1.jpg"
|
||||
print(" written", len, "bytes of data to somehackers_lobject1.jpg")
|
||||
|
||||
lobj.unlink()
|
||||
print "Large object with oid", loid, "removed"
|
||||
print("Large object with oid", loid, "removed")
|
||||
|
||||
conn.commit()
|
||||
|
||||
# now we try to use the import and export functions to do the same
|
||||
lobj = conn.lobject(0, 'n', 0, "somehackers.jpg")
|
||||
loid = lobj.oid
|
||||
print "Imported a new large object with oid", loid
|
||||
print("Imported a new large object with oid", loid)
|
||||
|
||||
conn.commit()
|
||||
|
||||
print "Trying to (re)open large object with oid", loid
|
||||
print("Trying to (re)open large object with oid", loid)
|
||||
lobj = conn.lobject(loid, 'n')
|
||||
print "Using export() to export the data from the large object:"
|
||||
print("Using export() to export the data from the large object:")
|
||||
lobj.export("somehackers_lobject2.jpg")
|
||||
print " exported large object to somehackers_lobject2.jpg"
|
||||
print(" exported large object to somehackers_lobject2.jpg")
|
||||
|
||||
lobj.unlink()
|
||||
print "Large object with oid", loid, "removed"
|
||||
print("Large object with oid", loid, "removed")
|
||||
|
||||
conn.commit()
|
||||
|
||||
# this will create a very large object with a new random oid.
|
||||
lobj = conn.lobject()
|
||||
loid = lobj.oid
|
||||
print "Created a new large object with oid", loid
|
||||
print("Created a new large object with oid", loid)
|
||||
|
||||
print "Manually importing a lot of data into the object:"
|
||||
print("Manually importing a lot of data into the object:")
|
||||
data = "data" * 1000000
|
||||
len = lobj.write(data)
|
||||
print " imported", len, "bytes of data"
|
||||
print(" imported", len, "bytes of data")
|
||||
|
||||
conn.rollback()
|
||||
|
||||
print "\nNow try to load the new images, to check it worked!"
|
||||
print("\nNow try to load the new images, to check it worked!")
|
||||
|
|
|
@ -24,10 +24,10 @@ import sys, psycopg2
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT %(foo)s AS foo", {'foo':'bar'})
|
||||
|
@ -37,11 +37,11 @@ curs.execute("SELECT %(foo)s AS foo", {'foo':42})
|
|||
curs.execute("SELECT %(foo)s AS foo", {'foo':u'yatt<EFBFBD>!'})
|
||||
curs.execute("SELECT %(foo)s AS foo", {'foo':u'bar'})
|
||||
|
||||
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':'bar'})
|
||||
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':None})
|
||||
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':True})
|
||||
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':42})
|
||||
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':u'yatt<EFBFBD>!'})
|
||||
print curs.mogrify("SELECT %(foo)s AS foo", {'foo':u'bar'})
|
||||
print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':'bar'}))
|
||||
print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':None}))
|
||||
print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':True}))
|
||||
print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':42}))
|
||||
print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':u'yatt<EFBFBD>!'}))
|
||||
print(curs.mogrify("SELECT %(foo)s AS foo", {'foo':u'bar'}))
|
||||
|
||||
conn.rollback()
|
||||
|
|
|
@ -34,7 +34,7 @@ a not-yet well defined protocol that we can call ISQLQuote:
|
|||
|
||||
def getbinary(self):
|
||||
"Returns a binary quoted string representing the bound variable."
|
||||
|
||||
|
||||
def getbuffer(self):
|
||||
"Returns the wrapped object itself."
|
||||
|
||||
|
@ -86,10 +86,10 @@ class AsIs(object):
|
|||
self.__obj = obj
|
||||
def getquoted(self):
|
||||
return self.__obj
|
||||
|
||||
|
||||
class SQL_IN(object):
|
||||
"""Adapt a tuple to an SQL quotable object."""
|
||||
|
||||
|
||||
def __init__(self, seq):
|
||||
self._seq = seq
|
||||
|
||||
|
@ -103,10 +103,10 @@ class SQL_IN(object):
|
|||
qobjs = [str(psycoadapt(o).getquoted()) for o in self._seq]
|
||||
|
||||
return '(' + ', '.join(qobjs) + ')'
|
||||
|
||||
|
||||
__str__ = getquoted
|
||||
|
||||
|
||||
|
||||
# add our new adapter class to psycopg list of adapters
|
||||
register_adapter(tuple, SQL_IN)
|
||||
register_adapter(float, AsIs)
|
||||
|
@ -117,10 +117,10 @@ register_adapter(int, AsIs)
|
|||
# conn = psycopg.connect("...")
|
||||
# curs = conn.cursor()
|
||||
# curs.execute("SELECT ...", (("this", "is", "the", "tuple"),))
|
||||
#
|
||||
#
|
||||
# but we have no connection to a database right now, so we just check
|
||||
# the SQL_IN class by calling psycopg's adapt() directly:
|
||||
|
||||
if __name__ == '__main__':
|
||||
print "Note how the string will be SQL-quoted, but the number will not:"
|
||||
print psycoadapt(("this is an 'sql quoted' str\\ing", 1, 2.0))
|
||||
print("Note how the string will be SQL-quoted, but the number will not:")
|
||||
print(psycoadapt(("this is an 'sql quoted' str\\ing", 1, 2.0)))
|
||||
|
|
|
@ -26,20 +26,20 @@ from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
||||
curs = conn.cursor()
|
||||
|
||||
curs.execute("listen test")
|
||||
|
||||
print "Waiting for 'NOTIFY test'"
|
||||
print("Waiting for 'NOTIFY test'")
|
||||
while 1:
|
||||
if select.select([conn],[],[],5)==([],[],[]):
|
||||
print "Timeout"
|
||||
print("Timeout")
|
||||
else:
|
||||
conn.poll()
|
||||
while conn.notifies:
|
||||
print "Got NOTIFY:", conn.notifies.pop()
|
||||
print("Got NOTIFY:", conn.notifies.pop())
|
||||
|
|
|
@ -30,17 +30,17 @@ import psycopg2
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT 1 AS foo")
|
||||
print curs.fetchone()
|
||||
print(curs.fetchone())
|
||||
curs.execute("SELECT 1 AS foo")
|
||||
print curs.fetchmany()
|
||||
print(curs.fetchmany())
|
||||
curs.execute("SELECT 1 AS foo")
|
||||
print curs.fetchall()
|
||||
print(curs.fetchall())
|
||||
|
||||
conn.rollback()
|
||||
|
||||
|
|
|
@ -44,8 +44,8 @@ if len(sys.argv) > 1:
|
|||
DSN = sys.argv[1]
|
||||
if len(sys.argv) > 2:
|
||||
MODE = int(sys.argv[2])
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
curs = conn.cursor()
|
||||
|
||||
|
@ -70,35 +70,35 @@ def insert_func(conn_or_pool, rows):
|
|||
conn = conn_or_pool
|
||||
else:
|
||||
conn = conn_or_pool.getconn()
|
||||
|
||||
|
||||
for i in range(rows):
|
||||
if divmod(i, COMMIT_STEP)[1] == 0:
|
||||
conn.commit()
|
||||
if MODE == 1:
|
||||
conn_or_pool.putconn(conn)
|
||||
s = name + ": COMMIT STEP " + str(i)
|
||||
print s
|
||||
print(s)
|
||||
if MODE == 1:
|
||||
conn = conn_or_pool.getconn()
|
||||
c = conn.cursor()
|
||||
try:
|
||||
c.execute("INSERT INTO test_threads VALUES (%s, %s, %s)",
|
||||
(str(i), i, float(i)))
|
||||
except psycopg2.ProgrammingError, err:
|
||||
print name, ": an error occurred; skipping this insert"
|
||||
print err
|
||||
except psycopg2.ProgrammingError as err:
|
||||
print(name, ": an error occurred; skipping this insert")
|
||||
print(err)
|
||||
conn.commit()
|
||||
|
||||
## a nice select function that prints the current number of rows in the
|
||||
## database (and transfer them, putting some pressure on the network)
|
||||
|
||||
|
||||
def select_func(conn_or_pool, z):
|
||||
name = threading.currentThread().getName()
|
||||
|
||||
if MODE == 0:
|
||||
conn = conn_or_pool
|
||||
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
||||
|
||||
|
||||
for i in range(SELECT_SIZE):
|
||||
if divmod(i, SELECT_STEP)[1] == 0:
|
||||
try:
|
||||
|
@ -112,10 +112,10 @@ def select_func(conn_or_pool, z):
|
|||
if MODE == 1:
|
||||
conn_or_pool.putconn(conn)
|
||||
s = name + ": number of rows fetched: " + str(len(l))
|
||||
print s
|
||||
except psycopg2.ProgrammingError, err:
|
||||
print name, ": an error occurred; skipping this select"
|
||||
print err
|
||||
print(s)
|
||||
except psycopg2.ProgrammingError as err:
|
||||
print(name, ": an error occurred; skipping this select")
|
||||
print(err)
|
||||
|
||||
## create the connection pool or the connections
|
||||
if MODE == 0:
|
||||
|
@ -125,18 +125,18 @@ else:
|
|||
m = len(INSERT_THREADS) + len(SELECT_THREADS)
|
||||
n = m/2
|
||||
conn_insert = conn_select = ThreadedConnectionPool(n, m, DSN)
|
||||
|
||||
|
||||
## create the threads
|
||||
threads = []
|
||||
|
||||
print "Creating INSERT threads:"
|
||||
print("Creating INSERT threads:")
|
||||
for name in INSERT_THREADS:
|
||||
t = threading.Thread(None, insert_func, 'Thread-'+name,
|
||||
(conn_insert, ROWS))
|
||||
t.setDaemon(0)
|
||||
threads.append(t)
|
||||
|
||||
print "Creating SELECT threads:"
|
||||
print("Creating SELECT threads:")
|
||||
for name in SELECT_THREADS:
|
||||
t = threading.Thread(None, select_func, 'Thread-'+name,
|
||||
(conn_select, SELECT_DIV))
|
||||
|
@ -150,12 +150,12 @@ for t in threads:
|
|||
# and wait for them to finish
|
||||
for t in threads:
|
||||
t.join()
|
||||
print t.getName(), "exited OK"
|
||||
print(t.getName(), "exited OK")
|
||||
|
||||
|
||||
conn.commit()
|
||||
curs.execute("SELECT count(name) FROM test_threads")
|
||||
print "Inserted", curs.fetchone()[0], "rows."
|
||||
print("Inserted", curs.fetchone()[0], "rows.")
|
||||
|
||||
curs.execute("DROP TABLE test_threads")
|
||||
conn.commit()
|
||||
|
|
|
@ -29,14 +29,14 @@ import psycopg2.extensions
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Encoding for this connection is", conn.encoding
|
||||
print("Encoding for this connection is", conn.encoding)
|
||||
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT 'text'::text AS foo")
|
||||
textoid = curs.description[0][1]
|
||||
print "Oid for the text datatype is", textoid
|
||||
print("Oid for the text datatype is", textoid)
|
||||
|
||||
def castA(s, curs):
|
||||
if s is not None: return "(A) " + s
|
||||
|
@ -48,20 +48,18 @@ TYPEB = psycopg2.extensions.new_type((textoid,), "TYPEB", castB)
|
|||
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT 'some text.'::text AS foo")
|
||||
print "Some text from plain connection:", curs.fetchone()[0]
|
||||
print("Some text from plain connection:", curs.fetchone()[0])
|
||||
|
||||
psycopg2.extensions.register_type(TYPEA, conn)
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT 'some text.'::text AS foo")
|
||||
print "Some text from connection with typecaster:", curs.fetchone()[0]
|
||||
print("Some text from connection with typecaster:", curs.fetchone()[0])
|
||||
|
||||
curs = conn.cursor()
|
||||
psycopg2.extensions.register_type(TYPEB, curs)
|
||||
curs.execute("SELECT 'some text.'::text AS foo")
|
||||
print "Some text from cursor with typecaster:", curs.fetchone()[0]
|
||||
print("Some text from cursor with typecaster:", curs.fetchone()[0])
|
||||
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT 'some text.'::text AS foo")
|
||||
print "Some text from connection with typecaster again:", curs.fetchone()[0]
|
||||
|
||||
|
||||
print("Some text from connection with typecaster again:", curs.fetchone()[0])
|
||||
|
|
|
@ -28,7 +28,7 @@ from psycopg2.tz import ZERO, LOCAL, FixedOffsetTimezone
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
curs = conn.cursor()
|
||||
|
||||
|
@ -42,28 +42,28 @@ conn.commit()
|
|||
|
||||
d = datetime.datetime(1971, 10, 19, 22, 30, 0, tzinfo=LOCAL)
|
||||
curs.execute("INSERT INTO test_tz VALUES (%s)", (d,))
|
||||
print "Inserted timestamp with timezone:", d
|
||||
print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d)
|
||||
print("Inserted timestamp with timezone:", d)
|
||||
print("Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d))
|
||||
|
||||
tz = FixedOffsetTimezone(-5*60, "EST")
|
||||
d = datetime.datetime(1971, 10, 19, 22, 30, 0, tzinfo=tz)
|
||||
curs.execute("INSERT INTO test_tz VALUES (%s)", (d,))
|
||||
print "Inserted timestamp with timezone:", d
|
||||
print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d)
|
||||
print("Inserted timestamp with timezone:", d)
|
||||
print("Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d))
|
||||
|
||||
curs.execute("SELECT * FROM test_tz")
|
||||
d = curs.fetchone()[0]
|
||||
curs.execute("INSERT INTO test_tz VALUES (%s)", (d,))
|
||||
print "Inserted SELECTed timestamp:", d
|
||||
print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d)
|
||||
print("Inserted SELECTed timestamp:", d)
|
||||
print("Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d))
|
||||
|
||||
curs.execute("SELECT * FROM test_tz")
|
||||
for d in curs:
|
||||
u = d[0].utcoffset() or ZERO
|
||||
print "UTC time: ", d[0] - u
|
||||
print "Local time:", d[0]
|
||||
print "Time zone:", d[0].tzinfo.tzname(d[0]), d[0].tzinfo.utcoffset(d[0])
|
||||
|
||||
print("UTC time: ", d[0] - u)
|
||||
print("Local time:", d[0])
|
||||
print("Time zone:", d[0].tzinfo.tzname(d[0]), d[0].tzinfo.utcoffset(d[0]))
|
||||
|
||||
|
||||
curs.execute("DROP TABLE test_tz")
|
||||
conn.commit()
|
||||
|
|
|
@ -33,9 +33,9 @@ import psycopg2.extras
|
|||
if len(sys.argv) > 1:
|
||||
DSN = sys.argv[1]
|
||||
|
||||
print "Opening connection using dsn:", DSN
|
||||
print("Opening connection using dsn:", DSN)
|
||||
conn = psycopg2.connect(DSN)
|
||||
print "Initial encoding for this connection is", conn.encoding
|
||||
print("Initial encoding for this connection is", conn.encoding)
|
||||
|
||||
curs = conn.cursor()
|
||||
try:
|
||||
|
@ -58,7 +58,7 @@ class Rect(object):
|
|||
and eventually as a type-caster for the data extracted from the database
|
||||
(that's why __init__ takes the curs argument.)
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, s=None, curs=None):
|
||||
"""Init the rectangle from the optional string s."""
|
||||
self.x = self.y = self.width = self.height = 0.0
|
||||
|
@ -68,7 +68,7 @@ class Rect(object):
|
|||
"""This is a terrible hack, just ignore proto and return self."""
|
||||
if proto == psycopg2.extensions.ISQLQuote:
|
||||
return self
|
||||
|
||||
|
||||
def from_points(self, x0, y0, x1, y1):
|
||||
"""Init the rectangle from points."""
|
||||
if x0 > x1: (x0, x1) = (x1, x0)
|
||||
|
@ -94,11 +94,11 @@ class Rect(object):
|
|||
s = "X: %d\tY: %d\tWidth: %d\tHeight: %d" % (
|
||||
self.x, self.y, self.width, self.height)
|
||||
return s
|
||||
|
||||
|
||||
# here we select from the empty table, just to grab the description
|
||||
curs.execute("SELECT b FROM test_cast WHERE 0=1")
|
||||
boxoid = curs.description[0][1]
|
||||
print "Oid for the box datatype is", boxoid
|
||||
print("Oid for the box datatype is", boxoid)
|
||||
|
||||
# and build the user cast object
|
||||
BOX = psycopg2.extensions.new_type((boxoid,), "BOX", Rect)
|
||||
|
@ -113,14 +113,14 @@ for i in range(100):
|
|||
whrandom.randint(0,100), whrandom.randint(0,100))
|
||||
curs.execute("INSERT INTO test_cast VALUES ('%(p1)s', '%(p2)s', %(box)s)",
|
||||
{'box':b, 'p1':p1, 'p2':p2})
|
||||
print "Added 100 boxed to the database"
|
||||
print("Added 100 boxed to the database")
|
||||
|
||||
# select and print all boxes with at least one point inside
|
||||
curs.execute("SELECT b FROM test_cast WHERE p1 @ b OR p2 @ b")
|
||||
boxes = curs.fetchall()
|
||||
print "Found %d boxes with at least a point inside:" % len(boxes)
|
||||
print("Found %d boxes with at least a point inside:" % len(boxes))
|
||||
for box in boxes:
|
||||
print " ", box[0].show()
|
||||
print(" ", box[0].show())
|
||||
|
||||
curs.execute("DROP TABLE test_cast")
|
||||
conn.commit()
|
||||
|
|
|
@ -72,14 +72,10 @@ _ext.register_adapter(type(None), _ext.NoneAdapter)
|
|||
# Register the Decimal adapter here instead of in the C layer.
|
||||
# This way a new class is registered for each sub-interpreter.
|
||||
# See ticket #52
|
||||
try:
|
||||
from decimal import Decimal
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
from psycopg2._psycopg import Decimal as Adapter
|
||||
_ext.register_adapter(Decimal, Adapter)
|
||||
del Decimal, Adapter
|
||||
from decimal import Decimal
|
||||
from psycopg2._psycopg import Decimal as Adapter
|
||||
_ext.register_adapter(Decimal, Adapter)
|
||||
del Decimal, Adapter
|
||||
|
||||
|
||||
def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs):
|
||||
|
|
40
lib/_json.py
40
lib/_json.py
|
@ -27,22 +27,13 @@ extensions importing register_json from extras.
|
|||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
# License for more details.
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
from psycopg2._psycopg import ISQLQuote, QuotedString
|
||||
from psycopg2._psycopg import new_type, new_array_type, register_type
|
||||
|
||||
|
||||
# import the best json implementation available
|
||||
if sys.version_info[:2] >= (2, 6):
|
||||
import json
|
||||
else:
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
json = None
|
||||
|
||||
|
||||
# oids from PostgreSQL 9.2
|
||||
JSON_OID = 114
|
||||
JSONARRAY_OID = 199
|
||||
|
@ -58,22 +49,14 @@ class Json(object):
|
|||
:sql:`json` data type.
|
||||
|
||||
`!Json` can be used to wrap any object supported by the provided *dumps*
|
||||
function. If none is provided, the standard :py:func:`json.dumps()` is
|
||||
used (`!simplejson` for Python < 2.6;
|
||||
`~psycopg2.extensions.ISQLQuote.getquoted()` will raise `!ImportError` if
|
||||
the module is not available).
|
||||
function. If none is provided, the standard :py:func:`json.dumps()` is
|
||||
used.
|
||||
|
||||
"""
|
||||
def __init__(self, adapted, dumps=None):
|
||||
self.adapted = adapted
|
||||
self._conn = None
|
||||
|
||||
if dumps is not None:
|
||||
self._dumps = dumps
|
||||
elif json is not None:
|
||||
self._dumps = json.dumps
|
||||
else:
|
||||
self._dumps = None
|
||||
self._dumps = dumps or json.dumps
|
||||
|
||||
def __conform__(self, proto):
|
||||
if proto is ISQLQuote:
|
||||
|
@ -86,13 +69,7 @@ class Json(object):
|
|||
provided in the constructor. You can override this method to create a
|
||||
customized JSON wrapper.
|
||||
"""
|
||||
dumps = self._dumps
|
||||
if dumps is not None:
|
||||
return dumps(obj)
|
||||
else:
|
||||
raise ImportError(
|
||||
"json module not available: "
|
||||
"you should provide a dumps function")
|
||||
return self._dumps(obj)
|
||||
|
||||
def prepare(self, conn):
|
||||
self._conn = conn
|
||||
|
@ -181,10 +158,7 @@ def register_default_jsonb(conn_or_curs=None, globally=False, loads=None):
|
|||
def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
|
||||
"""Create typecasters for json data type."""
|
||||
if loads is None:
|
||||
if json is None:
|
||||
raise ImportError("no json module available")
|
||||
else:
|
||||
loads = json.loads
|
||||
loads = json.loads
|
||||
|
||||
def typecast_json(s, cur):
|
||||
if s is None:
|
||||
|
@ -220,7 +194,7 @@ def _get_json_oids(conn_or_curs, name='json'):
|
|||
r = curs.fetchone()
|
||||
|
||||
# revert the status of the connection as before the command
|
||||
if (conn_status != STATUS_IN_TRANSACTION and not conn.autocommit):
|
||||
if conn_status != STATUS_IN_TRANSACTION and not conn.autocommit:
|
||||
conn.rollback()
|
||||
|
||||
if not r:
|
||||
|
|
|
@ -181,11 +181,8 @@ class Range(object):
|
|||
return self.__gt__(other)
|
||||
|
||||
def __getstate__(self):
|
||||
return dict(
|
||||
(slot, getattr(self, slot))
|
||||
for slot in self.__slots__
|
||||
if hasattr(self, slot)
|
||||
)
|
||||
return {slot: getattr(self, slot)
|
||||
for slot in self.__slots__ if hasattr(self, slot)}
|
||||
|
||||
def __setstate__(self, state):
|
||||
for slot, value in state.items():
|
||||
|
|
|
@ -163,7 +163,7 @@ def make_dsn(dsn=None, **kwargs):
|
|||
kwargs['dbname'] = kwargs.pop('database')
|
||||
|
||||
# Drop the None arguments
|
||||
kwargs = dict((k, v) for (k, v) in kwargs.iteritems() if v is not None)
|
||||
kwargs = {k: v for (k, v) in kwargs.iteritems() if v is not None}
|
||||
|
||||
if dsn is not None:
|
||||
tmp = parse_dsn(dsn)
|
||||
|
|
|
@ -29,6 +29,7 @@ import os as _os
|
|||
import sys as _sys
|
||||
import time as _time
|
||||
import re as _re
|
||||
from collections import namedtuple
|
||||
|
||||
try:
|
||||
import logging as _logging
|
||||
|
@ -109,16 +110,16 @@ class DictCursorBase(_cursor):
|
|||
try:
|
||||
if self._prefetch:
|
||||
res = super(DictCursorBase, self).__iter__()
|
||||
first = res.next()
|
||||
first = next(res)
|
||||
if self._query_executed:
|
||||
self._build_index()
|
||||
if not self._prefetch:
|
||||
res = super(DictCursorBase, self).__iter__()
|
||||
first = res.next()
|
||||
first = next(res)
|
||||
|
||||
yield first
|
||||
while 1:
|
||||
yield res.next()
|
||||
yield next(res)
|
||||
except StopIteration:
|
||||
return
|
||||
|
||||
|
@ -263,7 +264,7 @@ class RealDictCursor(DictCursorBase):
|
|||
class RealDictRow(dict):
|
||||
"""A `!dict` subclass representing a data record."""
|
||||
|
||||
__slots__ = ('_column_mapping')
|
||||
__slots__ = ('_column_mapping',)
|
||||
|
||||
def __init__(self, cursor):
|
||||
dict.__init__(self)
|
||||
|
@ -279,7 +280,7 @@ class RealDictRow(dict):
|
|||
return dict.__setitem__(self, name, value)
|
||||
|
||||
def __getstate__(self):
|
||||
return (self.copy(), self._column_mapping[:])
|
||||
return self.copy(), self._column_mapping[:]
|
||||
|
||||
def __setstate__(self, data):
|
||||
self.update(data[0])
|
||||
|
@ -348,7 +349,7 @@ class NamedTupleCursor(_cursor):
|
|||
def __iter__(self):
|
||||
try:
|
||||
it = super(NamedTupleCursor, self).__iter__()
|
||||
t = it.next()
|
||||
t = next(it)
|
||||
|
||||
nt = self.Record
|
||||
if nt is None:
|
||||
|
@ -357,18 +358,22 @@ class NamedTupleCursor(_cursor):
|
|||
yield nt._make(t)
|
||||
|
||||
while 1:
|
||||
yield nt._make(it.next())
|
||||
yield nt._make(next(it))
|
||||
except StopIteration:
|
||||
return
|
||||
|
||||
try:
|
||||
from collections import namedtuple
|
||||
except ImportError, _exc:
|
||||
def _make_nt(self):
|
||||
raise self._exc
|
||||
else:
|
||||
def _make_nt(self, namedtuple=namedtuple):
|
||||
return namedtuple("Record", [d[0] for d in self.description or ()])
|
||||
def _make_nt(self):
|
||||
def f(s):
|
||||
# NOTE: Python 3 actually allows unicode chars in fields
|
||||
s = _re.sub('[^a-zA-Z0-9_]', '_', s)
|
||||
# Python identifier cannot start with numbers, namedtuple fields
|
||||
# cannot start with underscore. So...
|
||||
if _re.match('^[0-9_]', s):
|
||||
s = 'f' + s
|
||||
|
||||
return s
|
||||
|
||||
return namedtuple("Record", [f(d[0]) for d in self.description or ()])
|
||||
|
||||
|
||||
class LoggingConnection(_connection):
|
||||
|
@ -455,6 +460,8 @@ class MinTimeLoggingConnection(LoggingConnection):
|
|||
def filter(self, msg, curs):
|
||||
t = (_time.time() - curs.timestamp) * 1000
|
||||
if t > self._mintime:
|
||||
if _sys.version_info[0] >= 3 and isinstance(msg, bytes):
|
||||
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
|
||||
return msg + _os.linesep + " (execution time: %d ms)" % t
|
||||
|
||||
def cursor(self, *args, **kwargs):
|
||||
|
@ -722,18 +729,6 @@ def register_inet(oid=None, conn_or_curs=None):
|
|||
return _ext.INET
|
||||
|
||||
|
||||
def register_tstz_w_secs(oids=None, conn_or_curs=None):
|
||||
"""The function used to register an alternate type caster for
|
||||
:sql:`TIMESTAMP WITH TIME ZONE` to deal with historical time zones with
|
||||
seconds in the UTC offset.
|
||||
|
||||
These are now correctly handled by the default type caster, so currently
|
||||
the function doesn't do anything.
|
||||
"""
|
||||
import warnings
|
||||
warnings.warn("deprecated", DeprecationWarning)
|
||||
|
||||
|
||||
def wait_select(conn):
|
||||
"""Wait until a connection or cursor has data available.
|
||||
|
||||
|
@ -1055,14 +1050,8 @@ class CompositeCaster(object):
|
|||
return rv
|
||||
|
||||
def _create_type(self, name, attnames):
|
||||
try:
|
||||
from collections import namedtuple
|
||||
except ImportError:
|
||||
self.type = tuple
|
||||
self._ctor = self.type
|
||||
else:
|
||||
self.type = namedtuple(name, attnames)
|
||||
self._ctor = self.type._make
|
||||
self.type = namedtuple(name, attnames)
|
||||
self._ctor = self.type._make
|
||||
|
||||
@classmethod
|
||||
def _from_db(self, name, conn_or_curs):
|
||||
|
@ -1153,7 +1142,7 @@ def _paginate(seq, page_size):
|
|||
while 1:
|
||||
try:
|
||||
for i in xrange(page_size):
|
||||
page.append(it.next())
|
||||
page.append(next(it))
|
||||
yield page
|
||||
page = []
|
||||
except StopIteration:
|
||||
|
|
|
@ -1,96 +0,0 @@
|
|||
"""psycopg 1.1.x compatibility module
|
||||
|
||||
This module uses the new style connection and cursor types to build a psycopg
|
||||
1.1.1.x compatibility layer. It should be considered a temporary hack to run
|
||||
old code while porting to psycopg 2. Import it as follows::
|
||||
|
||||
from psycopg2 import psycopg1 as psycopg
|
||||
"""
|
||||
# psycopg/psycopg1.py - psycopg 1.1.x compatibility module
|
||||
#
|
||||
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# In addition, as a special exception, the copyright holders give
|
||||
# permission to link this program with the OpenSSL library (or with
|
||||
# modified versions of OpenSSL that use the same license as OpenSSL),
|
||||
# and distribute linked combinations including the two.
|
||||
#
|
||||
# You must obey the GNU Lesser General Public License in all respects for
|
||||
# all of the code used other than OpenSSL.
|
||||
#
|
||||
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
# License for more details.
|
||||
|
||||
from psycopg2 import _psycopg as _2psycopg # noqa
|
||||
from psycopg2.extensions import cursor as _2cursor
|
||||
from psycopg2.extensions import connection as _2connection
|
||||
|
||||
from psycopg2 import * # noqa
|
||||
from psycopg2 import extensions as _ext
|
||||
_2connect = connect
|
||||
|
||||
|
||||
def connect(*args, **kwargs):
|
||||
"""connect(dsn, ...) -> new psycopg 1.1.x compatible connection object"""
|
||||
kwargs['connection_factory'] = connection
|
||||
conn = _2connect(*args, **kwargs)
|
||||
conn.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED)
|
||||
return conn
|
||||
|
||||
|
||||
class connection(_2connection):
|
||||
"""psycopg 1.1.x connection."""
|
||||
|
||||
def cursor(self):
|
||||
"""cursor() -> new psycopg 1.1.x compatible cursor object"""
|
||||
return _2connection.cursor(self, cursor_factory=cursor)
|
||||
|
||||
def autocommit(self, on_off=1):
|
||||
"""autocommit(on_off=1) -> switch autocommit on (1) or off (0)"""
|
||||
if on_off > 0:
|
||||
self.set_isolation_level(_ext.ISOLATION_LEVEL_AUTOCOMMIT)
|
||||
else:
|
||||
self.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED)
|
||||
|
||||
|
||||
class cursor(_2cursor):
|
||||
"""psycopg 1.1.x cursor.
|
||||
|
||||
Note that this cursor implements the exact procedure used by psycopg 1 to
|
||||
build dictionaries out of result rows. The DictCursor in the
|
||||
psycopg.extras modules implements a much better and faster algorithm.
|
||||
"""
|
||||
|
||||
def __build_dict(self, row):
|
||||
res = {}
|
||||
for i in range(len(self.description)):
|
||||
res[self.description[i][0]] = row[i]
|
||||
return res
|
||||
|
||||
def dictfetchone(self):
|
||||
row = _2cursor.fetchone(self)
|
||||
if row:
|
||||
return self.__build_dict(row)
|
||||
else:
|
||||
return row
|
||||
|
||||
def dictfetchmany(self, size):
|
||||
res = []
|
||||
rows = _2cursor.fetchmany(self, size)
|
||||
for row in rows:
|
||||
res.append(self.__build_dict(row))
|
||||
return res
|
||||
|
||||
def dictfetchall(self):
|
||||
res = []
|
||||
rows = _2cursor.fetchall(self)
|
||||
for row in rows:
|
||||
res.append(self.__build_dict(row))
|
||||
return res
|
14
lib/sql.py
14
lib/sql.py
|
@ -203,12 +203,12 @@ class SQL(Composable):
|
|||
:rtype: `Composed`
|
||||
|
||||
The method is similar to the Python `str.format()` method: the string
|
||||
template supports auto-numbered (``{}``, only available from Python
|
||||
2.7), numbered (``{0}``, ``{1}``...), and named placeholders
|
||||
(``{name}``), with positional arguments replacing the numbered
|
||||
placeholders and keywords replacing the named ones. However placeholder
|
||||
modifiers (``{0!r}``, ``{0:<10}``) are not supported. Only
|
||||
`!Composable` objects can be passed to the template.
|
||||
template supports auto-numbered (``{}``), numbered (``{0}``,
|
||||
``{1}``...), and named placeholders (``{name}``), with positional
|
||||
arguments replacing the numbered placeholders and keywords replacing
|
||||
the named ones. However placeholder modifiers (``{0!r}``, ``{0:<10}``)
|
||||
are not supported. Only `!Composable` objects can be passed to the
|
||||
template.
|
||||
|
||||
Example::
|
||||
|
||||
|
@ -276,7 +276,7 @@ class SQL(Composable):
|
|||
rv = []
|
||||
it = iter(seq)
|
||||
try:
|
||||
rv.append(it.next())
|
||||
rv.append(next(it))
|
||||
except StopIteration:
|
||||
pass
|
||||
else:
|
||||
|
|
|
@ -75,7 +75,7 @@ class FixedOffsetTimezone(datetime.tzinfo):
|
|||
|
||||
def __getinitargs__(self):
|
||||
offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
|
||||
return (offset_mins, self._name)
|
||||
return offset_mins, self._name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self._offset
|
||||
|
|
|
@ -100,7 +100,7 @@ _pydatetime_string_delta(pydatetimeObject *self)
|
|||
|
||||
char buffer[8];
|
||||
int i;
|
||||
int a = obj->microseconds;
|
||||
int a = PyDateTime_DELTA_GET_MICROSECONDS(obj);
|
||||
|
||||
for (i=0; i < 6 ; i++) {
|
||||
buffer[5-i] = '0' + (a % 10);
|
||||
|
@ -109,7 +109,9 @@ _pydatetime_string_delta(pydatetimeObject *self)
|
|||
buffer[6] = '\0';
|
||||
|
||||
return Bytes_FromFormat("'%d days %d.%s seconds'::interval",
|
||||
obj->days, obj->seconds, buffer);
|
||||
PyDateTime_DELTA_GET_DAYS(obj),
|
||||
PyDateTime_DELTA_GET_SECONDS(obj),
|
||||
buffer);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
|
|
@ -295,4 +295,3 @@ Bytes_Format(PyObject *format, PyObject *args)
|
|||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
|
|
@ -67,6 +67,9 @@ const char *srv_state_guc[] = {
|
|||
};
|
||||
|
||||
|
||||
const int SRV_STATE_UNCHANGED = -1;
|
||||
|
||||
|
||||
/* Return a new "string" from a char* from the database.
|
||||
*
|
||||
* On Py2 just get a string, on Py3 decode it in the connection codec.
|
||||
|
@ -1188,8 +1191,10 @@ conn_set_session(connectionObject *self, int autocommit,
|
|||
int rv = -1;
|
||||
PGresult *pgres = NULL;
|
||||
char *error = NULL;
|
||||
int want_autocommit = autocommit == SRV_STATE_UNCHANGED ?
|
||||
self->autocommit : autocommit;
|
||||
|
||||
if (deferrable != self->deferrable && self->server_version < 90100) {
|
||||
if (deferrable != SRV_STATE_UNCHANGED && self->server_version < 90100) {
|
||||
PyErr_SetString(ProgrammingError,
|
||||
"the 'deferrable' setting is only available"
|
||||
" from PostgreSQL 9.1");
|
||||
|
@ -1209,24 +1214,24 @@ conn_set_session(connectionObject *self, int autocommit,
|
|||
Py_BEGIN_ALLOW_THREADS;
|
||||
pthread_mutex_lock(&self->lock);
|
||||
|
||||
if (autocommit) {
|
||||
/* we are in autocommit state, so no BEGIN will be issued:
|
||||
if (want_autocommit) {
|
||||
/* we are or are going in autocommit state, so no BEGIN will be issued:
|
||||
* configure the session with the characteristics requested */
|
||||
if (isolevel != self->isolevel) {
|
||||
if (isolevel != SRV_STATE_UNCHANGED) {
|
||||
if (0 > pq_set_guc_locked(self,
|
||||
"default_transaction_isolation", srv_isolevels[isolevel],
|
||||
&pgres, &error, &_save)) {
|
||||
goto endlock;
|
||||
}
|
||||
}
|
||||
if (readonly != self->readonly) {
|
||||
if (readonly != SRV_STATE_UNCHANGED) {
|
||||
if (0 > pq_set_guc_locked(self,
|
||||
"default_transaction_read_only", srv_state_guc[readonly],
|
||||
&pgres, &error, &_save)) {
|
||||
goto endlock;
|
||||
}
|
||||
}
|
||||
if (deferrable != self->deferrable) {
|
||||
if (deferrable != SRV_STATE_UNCHANGED) {
|
||||
if (0 > pq_set_guc_locked(self,
|
||||
"default_transaction_deferrable", srv_state_guc[deferrable],
|
||||
&pgres, &error, &_save)) {
|
||||
|
@ -1251,7 +1256,7 @@ conn_set_session(connectionObject *self, int autocommit,
|
|||
goto endlock;
|
||||
}
|
||||
}
|
||||
if (self->deferrable != STATE_DEFAULT) {
|
||||
if (self->server_version >= 90100 && self->deferrable != STATE_DEFAULT) {
|
||||
if (0 > pq_set_guc_locked(self,
|
||||
"default_transaction_deferrable", "default",
|
||||
&pgres, &error, &_save)) {
|
||||
|
@ -1260,10 +1265,18 @@ conn_set_session(connectionObject *self, int autocommit,
|
|||
}
|
||||
}
|
||||
|
||||
self->autocommit = autocommit;
|
||||
self->isolevel = isolevel;
|
||||
self->readonly = readonly;
|
||||
self->deferrable = deferrable;
|
||||
if (autocommit != SRV_STATE_UNCHANGED) {
|
||||
self->autocommit = autocommit;
|
||||
}
|
||||
if (isolevel != SRV_STATE_UNCHANGED) {
|
||||
self->isolevel = isolevel;
|
||||
}
|
||||
if (readonly != SRV_STATE_UNCHANGED) {
|
||||
self->readonly = readonly;
|
||||
}
|
||||
if (deferrable != SRV_STATE_UNCHANGED) {
|
||||
self->deferrable = deferrable;
|
||||
}
|
||||
rv = 0;
|
||||
|
||||
endlock:
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
extern HIDDEN const char *srv_isolevels[];
|
||||
extern HIDDEN const char *srv_readonly[];
|
||||
extern HIDDEN const char *srv_deferrable[];
|
||||
extern HIDDEN const int SRV_STATE_UNCHANGED;
|
||||
|
||||
/** DBAPI methods **/
|
||||
|
||||
|
@ -561,10 +562,10 @@ psyco_conn_set_session(connectionObject *self, PyObject *args, PyObject *kwargs)
|
|||
PyObject *deferrable = Py_None;
|
||||
PyObject *autocommit = Py_None;
|
||||
|
||||
int c_isolevel = self->isolevel;
|
||||
int c_readonly = self->readonly;
|
||||
int c_deferrable = self->deferrable;
|
||||
int c_autocommit = self->autocommit;
|
||||
int c_isolevel = SRV_STATE_UNCHANGED;
|
||||
int c_readonly = SRV_STATE_UNCHANGED;
|
||||
int c_deferrable = SRV_STATE_UNCHANGED;
|
||||
int c_autocommit = SRV_STATE_UNCHANGED;
|
||||
|
||||
static char *kwlist[] =
|
||||
{"isolation_level", "readonly", "deferrable", "autocommit", NULL};
|
||||
|
@ -637,7 +638,7 @@ psyco_conn_autocommit_set(connectionObject *self, PyObject *pyvalue)
|
|||
if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; }
|
||||
if (-1 == (value = PyObject_IsTrue(pyvalue))) { return -1; }
|
||||
if (0 > conn_set_session(self, value,
|
||||
self->isolevel, self->readonly, self->deferrable)) {
|
||||
SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
@ -668,8 +669,8 @@ psyco_conn_isolation_level_set(connectionObject *self, PyObject *pyvalue)
|
|||
|
||||
if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; }
|
||||
if (0 > (value = _psyco_conn_parse_isolevel(pyvalue))) { return -1; }
|
||||
if (0 > conn_set_session(self, self->autocommit,
|
||||
value, self->readonly, self->deferrable)) {
|
||||
if (0 > conn_set_session(self, SRV_STATE_UNCHANGED,
|
||||
value, SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
@ -715,13 +716,13 @@ psyco_conn_set_isolation_level(connectionObject *self, PyObject *args)
|
|||
|
||||
if (level == 0) {
|
||||
if (0 > conn_set_session(self, 1,
|
||||
self->isolevel, self->readonly, self->deferrable)) {
|
||||
SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED)) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (0 > conn_set_session(self, 0,
|
||||
level, self->readonly, self->deferrable)) {
|
||||
level, SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED)) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
@ -767,8 +768,8 @@ psyco_conn_readonly_set(connectionObject *self, PyObject *pyvalue)
|
|||
|
||||
if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; }
|
||||
if (0 > (value = _psyco_conn_parse_onoff(pyvalue))) { return -1; }
|
||||
if (0 > conn_set_session(self, self->autocommit,
|
||||
self->isolevel, value, self->deferrable)) {
|
||||
if (0 > conn_set_session(self, SRV_STATE_UNCHANGED,
|
||||
SRV_STATE_UNCHANGED, value, SRV_STATE_UNCHANGED)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
@ -813,8 +814,8 @@ psyco_conn_deferrable_set(connectionObject *self, PyObject *pyvalue)
|
|||
|
||||
if (!_psyco_set_session_check_setter_wrapper(self)) { return -1; }
|
||||
if (0 > (value = _psyco_conn_parse_onoff(pyvalue))) { return -1; }
|
||||
if (0 > conn_set_session(self, self->autocommit,
|
||||
self->isolevel, self->readonly, value)) {
|
||||
if (0 > conn_set_session(self, SRV_STATE_UNCHANGED,
|
||||
SRV_STATE_UNCHANGED, SRV_STATE_UNCHANGED, value)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
|
|
@ -592,8 +592,6 @@ psyco_curs_mogrify(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
EXC_IF_CURS_CLOSED(self);
|
||||
|
||||
return _psyco_curs_mogrify(self, operation, vars);
|
||||
}
|
||||
|
||||
|
|
|
@ -295,5 +295,3 @@ PyTypeObject notifyType = {
|
|||
0, /*tp_alloc*/
|
||||
notify_new, /*tp_new*/
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -52,6 +52,10 @@
|
|||
#include "win32_support.h"
|
||||
#endif
|
||||
|
||||
#if defined(__sun) && defined(__SVR4)
|
||||
#include "solaris_support.h"
|
||||
#endif
|
||||
|
||||
extern HIDDEN PyObject *psyco_DescriptionType;
|
||||
extern HIDDEN const char *srv_isolevels[];
|
||||
extern HIDDEN const char *srv_readonly[];
|
||||
|
@ -1948,8 +1952,9 @@ pq_fetch(cursorObject *curs, int no_result)
|
|||
}
|
||||
else {
|
||||
Dprintf("pq_fetch: got tuples, discarding them");
|
||||
/* TODO: is there any case in which PQntuples == PQcmdTuples? */
|
||||
_read_rowcount(curs);
|
||||
CLEARPGRES(curs->pgres);
|
||||
curs->rowcount = -1;
|
||||
ex = 0;
|
||||
}
|
||||
break;
|
||||
|
|
|
@ -72,6 +72,10 @@ HIDDEN PyObject *psyco_null = NULL;
|
|||
/* The type of the cursor.description items */
|
||||
HIDDEN PyObject *psyco_DescriptionType = NULL;
|
||||
|
||||
/* macro trick to stringify a macro expansion */
|
||||
#define xstr(s) str(s)
|
||||
#define str(s) #s
|
||||
|
||||
/** connect module-level function **/
|
||||
#define psyco_connect_doc \
|
||||
"_connect(dsn, [connection_factory], [async]) -- New database connection.\n\n"
|
||||
|
@ -990,7 +994,7 @@ INIT_MODULE(_psycopg)(void)
|
|||
psycopg_debug_enabled = 1;
|
||||
#endif
|
||||
|
||||
Dprintf("initpsycopg: initializing psycopg %s", PSYCOPG_VERSION);
|
||||
Dprintf("initpsycopg: initializing psycopg %s", xstr(PSYCOPG_VERSION));
|
||||
|
||||
/* initialize all the new types and then the module */
|
||||
Py_TYPE(&connectionType) = &PyType_Type;
|
||||
|
@ -1122,7 +1126,7 @@ INIT_MODULE(_psycopg)(void)
|
|||
if (!(psyco_DescriptionType = psyco_make_description_type())) { goto exit; }
|
||||
|
||||
/* set some module's parameters */
|
||||
PyModule_AddStringConstant(module, "__version__", PSYCOPG_VERSION);
|
||||
PyModule_AddStringConstant(module, "__version__", xstr(PSYCOPG_VERSION));
|
||||
PyModule_AddStringConstant(module, "__doc__", "psycopg PostgreSQL driver");
|
||||
PyModule_AddIntConstant(module, "__libpq_version__", PG_VERSION_NUM);
|
||||
PyModule_AddIntMacro(module, REPLICATION_PHYSICAL);
|
||||
|
|
|
@ -31,8 +31,10 @@
|
|||
#include <stringobject.h>
|
||||
#endif
|
||||
|
||||
#if PY_VERSION_HEX < 0x02060000
|
||||
# error "psycopg requires Python >= 2.6"
|
||||
#if ((PY_VERSION_HEX < 0x02070000) \
|
||||
|| ((PY_VERSION_HEX >= 0x03000000) \
|
||||
&& (PY_VERSION_HEX < 0x03040000)) )
|
||||
# error "psycopg requires Python 2.7 or 3.4+"
|
||||
#endif
|
||||
|
||||
/* hash() return size changed around version 3.2a4 on 64bit platforms. Before
|
||||
|
@ -44,14 +46,6 @@ typedef long Py_hash_t;
|
|||
typedef unsigned long Py_uhash_t;
|
||||
#endif
|
||||
|
||||
/* Macros defined in Python 2.6 */
|
||||
#ifndef Py_REFCNT
|
||||
#define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt)
|
||||
#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
|
||||
#define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size)
|
||||
#define PyVarObject_HEAD_INIT(x,n) PyObject_HEAD_INIT(x) n,
|
||||
#endif
|
||||
|
||||
/* FORMAT_CODE_PY_SSIZE_T is for Py_ssize_t: */
|
||||
#define FORMAT_CODE_PY_SSIZE_T "%" PY_FORMAT_SIZE_T "d"
|
||||
|
||||
|
@ -93,6 +87,7 @@ typedef unsigned long Py_uhash_t;
|
|||
#ifndef PyNumber_Int
|
||||
#define PyNumber_Int PyNumber_Long
|
||||
#endif
|
||||
|
||||
#endif /* PY_MAJOR_VERSION > 2 */
|
||||
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
|
@ -110,6 +105,10 @@ typedef unsigned long Py_uhash_t;
|
|||
#define Bytes_ConcatAndDel PyString_ConcatAndDel
|
||||
#define _Bytes_Resize _PyString_Resize
|
||||
|
||||
#define PyDateTime_DELTA_GET_DAYS(o) (((PyDateTime_Delta*)o)->days)
|
||||
#define PyDateTime_DELTA_GET_SECONDS(o) (((PyDateTime_Delta*)o)->seconds)
|
||||
#define PyDateTime_DELTA_GET_MICROSECONDS(o) (((PyDateTime_Delta*)o)->microseconds)
|
||||
|
||||
#else
|
||||
|
||||
#define Bytes_Type PyBytes_Type
|
||||
|
|
57
psycopg/solaris_support.c
Normal file
57
psycopg/solaris_support.c
Normal file
|
@ -0,0 +1,57 @@
|
|||
/* solaris_support.c - emulate functions missing on Solaris
|
||||
*
|
||||
* Copyright (C) 2017 My Karlsson <mk@acc.umu.se>
|
||||
* Copyright (c) 2018, Joyent, Inc.
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
* psycopg2 is free software: you can redistribute it and/or modify it
|
||||
* under the terms of the GNU Lesser General Public License as published
|
||||
* by the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* In addition, as a special exception, the copyright holders give
|
||||
* permission to link this program with the OpenSSL library (or with
|
||||
* modified versions of OpenSSL that use the same license as OpenSSL),
|
||||
* and distribute linked combinations including the two.
|
||||
*
|
||||
* You must obey the GNU Lesser General Public License in all respects for
|
||||
* all of the code used other than OpenSSL.
|
||||
*
|
||||
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
* License for more details.
|
||||
*/
|
||||
|
||||
#define PSYCOPG_MODULE
|
||||
#include "psycopg/psycopg.h"
|
||||
#include "psycopg/solaris_support.h"
|
||||
|
||||
#if defined(__sun) && defined(__SVR4)
|
||||
/* timeradd is missing on Solaris 10 */
|
||||
#ifndef timeradd
|
||||
void
|
||||
timeradd(struct timeval *a, struct timeval *b, struct timeval *c)
|
||||
{
|
||||
c->tv_sec = a->tv_sec + b->tv_sec;
|
||||
c->tv_usec = a->tv_usec + b->tv_usec;
|
||||
if (c->tv_usec >= 1000000) {
|
||||
c->tv_usec -= 1000000;
|
||||
c->tv_sec += 1;
|
||||
}
|
||||
}
|
||||
|
||||
/* timersub is missing on Solaris */
|
||||
void
|
||||
timersub(struct timeval *a, struct timeval *b, struct timeval *c)
|
||||
{
|
||||
c->tv_sec = a->tv_sec - b->tv_sec;
|
||||
c->tv_usec = a->tv_usec - b->tv_usec;
|
||||
if (c->tv_usec < 0) {
|
||||
c->tv_usec += 1000000;
|
||||
c->tv_sec -= 1;
|
||||
}
|
||||
}
|
||||
#endif /* timeradd */
|
||||
#endif /* defined(__sun) && defined(__SVR4) */
|
40
psycopg/solaris_support.h
Normal file
40
psycopg/solaris_support.h
Normal file
|
@ -0,0 +1,40 @@
|
|||
/* solaris_support.h - definitions for solaris_support.c
|
||||
*
|
||||
* Copyright (C) 2017 My Karlsson <mk@acc.umu.se>
|
||||
* Copyright (c) 2018, Joyent, Inc.
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
* psycopg2 is free software: you can redistribute it and/or modify it
|
||||
* under the terms of the GNU Lesser General Public License as published
|
||||
* by the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* In addition, as a special exception, the copyright holders give
|
||||
* permission to link this program with the OpenSSL library (or with
|
||||
* modified versions of OpenSSL that use the same license as OpenSSL),
|
||||
* and distribute linked combinations including the two.
|
||||
*
|
||||
* You must obey the GNU Lesser General Public License in all respects for
|
||||
* all of the code used other than OpenSSL.
|
||||
*
|
||||
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
* License for more details.
|
||||
*/
|
||||
#ifndef PSYCOPG_SOLARIS_SUPPORT_H
|
||||
#define PSYCOPG_SOLARIS_SUPPORT_H
|
||||
|
||||
#include "psycopg/config.h"
|
||||
|
||||
#if defined(__sun) && defined(__SVR4)
|
||||
#include <sys/time.h>
|
||||
|
||||
#ifndef timeradd
|
||||
extern HIDDEN void timeradd(struct timeval *a, struct timeval *b, struct timeval *c);
|
||||
extern HIDDEN void timersub(struct timeval *a, struct timeval *b, struct timeval *c);
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#endif /* !defined(PSYCOPG_SOLARIS_SUPPORT_H) */
|
|
@ -312,4 +312,3 @@ psycopg_parse_escape(const char *bufin, Py_ssize_t sizein, Py_ssize_t *sizeout)
|
|||
exit:
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ static long int typecast_BINARY_types[] = {17, 0};
|
|||
static long int typecast_ROWID_types[] = {26, 0};
|
||||
static long int typecast_LONGINTEGERARRAY_types[] = {1016, 0};
|
||||
static long int typecast_INTEGERARRAY_types[] = {1005, 1006, 1007, 0};
|
||||
static long int typecast_FLOATARRAY_types[] = {1017, 1021, 1022, 0};
|
||||
static long int typecast_FLOATARRAY_types[] = {1021, 1022, 0};
|
||||
static long int typecast_DECIMALARRAY_types[] = {1231, 0};
|
||||
static long int typecast_UNICODEARRAY_types[] = {1002, 1003, 1009, 1014, 1015, 0};
|
||||
static long int typecast_STRINGARRAY_types[] = {1002, 1003, 1009, 1014, 1015, 0};
|
||||
|
@ -69,4 +69,3 @@ static typecastObject_initlist typecast_builtins[] = {
|
|||
{"MACADDRARRAY", typecast_MACADDRARRAY_types, typecast_STRINGARRAY_cast, "STRING"},
|
||||
{NULL, NULL, NULL, NULL}
|
||||
};
|
||||
|
||||
|
|
|
@ -250,4 +250,3 @@ typecast_MXINTERVAL_cast(const char *str, Py_ssize_t len, PyObject *curs)
|
|||
#define typecast_DATETIME_cast typecast_MXDATE_cast
|
||||
#define typecast_DATETIMETZ_cast typecast_MXDATE_cast
|
||||
#endif
|
||||
|
||||
|
|
|
@ -168,11 +168,11 @@ psycopg_ensure_bytes(PyObject *obj)
|
|||
PyObject *rv = NULL;
|
||||
if (!obj) { return NULL; }
|
||||
|
||||
if (PyUnicode_CheckExact(obj)) {
|
||||
if (PyUnicode_Check(obj)) {
|
||||
rv = PyUnicode_AsUTF8String(obj);
|
||||
Py_DECREF(obj);
|
||||
}
|
||||
else if (Bytes_CheckExact(obj)) {
|
||||
else if (Bytes_Check(obj)) {
|
||||
rv = obj;
|
||||
}
|
||||
else {
|
||||
|
@ -282,7 +282,7 @@ exit:
|
|||
|
||||
/* Make a connection string out of a string and a dictionary of arguments.
|
||||
*
|
||||
* Helper to call psycopg2.extensions.make_dns()
|
||||
* Helper to call psycopg2.extensions.make_dsn()
|
||||
*/
|
||||
PyObject *
|
||||
psycopg_make_dsn(PyObject *dsn, PyObject *kwargs)
|
||||
|
|
|
@ -73,7 +73,6 @@
|
|||
<None Include="lib\extras.py" />
|
||||
<None Include="lib\__init__.py" />
|
||||
<None Include="lib\pool.py" />
|
||||
<None Include="lib\psycopg1.py" />
|
||||
<None Include="lib\tz.py" />
|
||||
<None Include="psycopg\adapter_asis.h" />
|
||||
<None Include="psycopg\adapter_binary.h" />
|
||||
|
@ -98,8 +97,6 @@
|
|||
<None Include="psycopg\typecast.h" />
|
||||
<None Include="psycopg\typecast_binary.h" />
|
||||
<None Include="psycopg\win32_support.h" />
|
||||
<None Include="scripts\buildtypes.py" />
|
||||
<None Include="scripts\maketypes.sh" />
|
||||
<None Include="ZPsycopgDA\dtml\add.dtml" />
|
||||
<None Include="ZPsycopgDA\dtml\browse.dtml" />
|
||||
<None Include="ZPsycopgDA\dtml\edit.dtml" />
|
||||
|
|
44
psycopg2.sln
44
psycopg2.sln
|
@ -1,44 +0,0 @@
|
|||
|
||||
Microsoft Visual Studio Solution File, Format Version 10.00
|
||||
# Visual Studio 2008
|
||||
Project("{2857B73E-F847-4B02-9238-064979017E93}") = "psycopg2", "psycopg2.cproj", "{CFD80D18-3EE5-49ED-992A-E6D433BC7641}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
Release|Any CPU = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{CFD80D18-3EE5-49ED-992A-E6D433BC7641}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{CFD80D18-3EE5-49ED-992A-E6D433BC7641}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{CFD80D18-3EE5-49ED-992A-E6D433BC7641}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{CFD80D18-3EE5-49ED-992A-E6D433BC7641}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(MonoDevelopProperties) = preSolution
|
||||
StartupItem = psycopg2.cproj
|
||||
Policies = $0
|
||||
$0.TextStylePolicy = $1
|
||||
$1.FileWidth = 120
|
||||
$1.TabWidth = 4
|
||||
$1.inheritsSet = Mono
|
||||
$1.inheritsScope = text/plain
|
||||
$0.DotNetNamingPolicy = $2
|
||||
$2.DirectoryNamespaceAssociation = None
|
||||
$2.ResourceNamePolicy = FileName
|
||||
$0.StandardHeader = $3
|
||||
$3.Text =
|
||||
$3.IncludeInNewFiles = False
|
||||
$0.TextStylePolicy = $4
|
||||
$4.FileWidth = 72
|
||||
$4.NoTabsAfterNonTabs = True
|
||||
$4.RemoveTrailingWhitespace = True
|
||||
$4.inheritsSet = VisualStudio
|
||||
$4.inheritsScope = text/plain
|
||||
$4.scope = text/x-readme
|
||||
$0.TextStylePolicy = $5
|
||||
$5.inheritsSet = VisualStudio
|
||||
$5.inheritsScope = text/plain
|
||||
$5.scope = text/plain
|
||||
name = psycopg2
|
||||
EndGlobalSection
|
||||
EndGlobal
|
|
@ -27,5 +27,4 @@ curs = conn.cursor()
|
|||
#print curs.fetchone()
|
||||
|
||||
curs.execute("SELECT %s", ([1,2,None],))
|
||||
print curs.fetchone()
|
||||
|
||||
print(curs.fetchone())
|
||||
|
|
|
@ -15,22 +15,21 @@ curs = conn.cursor()
|
|||
|
||||
def sleep(curs):
|
||||
while not curs.isready():
|
||||
print "."
|
||||
print(".")
|
||||
time.sleep(.1)
|
||||
|
||||
|
||||
#curs.execute("""
|
||||
# DECLARE zz INSENSITIVE SCROLL CURSOR WITH HOLD FOR
|
||||
# SELECT now();
|
||||
# FOR READ ONLY;""", async = 1)
|
||||
curs.execute("SELECT now() AS foo", async=1);
|
||||
curs.execute("SELECT now() AS foo", async=1)
|
||||
sleep(curs)
|
||||
print curs.fetchall()
|
||||
print(curs.fetchall())
|
||||
|
||||
#curs.execute("""
|
||||
# FETCH FORWARD 1 FROM zz;""", async = 1)
|
||||
curs.execute("SELECT now() AS bar", async=1);
|
||||
print curs.fetchall()
|
||||
curs.execute("SELECT now() AS bar", async=1)
|
||||
print(curs.fetchall())
|
||||
|
||||
curs.execute("SELECT now() AS bar");
|
||||
curs.execute("SELECT now() AS bar")
|
||||
sleep(curs)
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ def query_worker(dsn):
|
|||
break
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print 'usage: %s DSN' % sys.argv[0]
|
||||
print('usage: %s DSN' % sys.argv[0])
|
||||
sys.exit(1)
|
||||
th = threading.Thread(target=query_worker, args=(sys.argv[1],))
|
||||
th.setDaemon(True)
|
||||
|
|
|
@ -12,4 +12,4 @@ o = psycopg2.connect("dbname=test")
|
|||
c = o.cursor()
|
||||
c.execute("SELECT NULL::decimal(10,2)")
|
||||
n = c.fetchone()[0]
|
||||
print n, type(n)
|
||||
print(n, type(n))
|
||||
|
|
|
@ -4,15 +4,15 @@ con = psycopg2.connect("dbname=test")
|
|||
|
||||
cur = con.cursor()
|
||||
cur.execute("SELECT %s::regtype::oid", ('bytea', ))
|
||||
print cur.fetchone()[0]
|
||||
print(cur.fetchone()[0])
|
||||
# 17
|
||||
|
||||
cur.execute("CREATE DOMAIN thing AS bytea")
|
||||
cur.execute("SELECT %s::regtype::oid", ('thing', ))
|
||||
print cur.fetchone()[0]
|
||||
print(cur.fetchone()[0])
|
||||
#62148
|
||||
|
||||
cur.execute("CREATE TABLE thingrel (thingcol thing)")
|
||||
cur.execute("SELECT * FROM thingrel")
|
||||
print cur.description
|
||||
print(cur.description)
|
||||
#(('thingcol', 17, None, -1, None, None, None),)
|
||||
|
|
|
@ -5,15 +5,14 @@ c = o.cursor()
|
|||
|
||||
def sql():
|
||||
c.execute("SELECT 1.23 AS foo")
|
||||
print 1, c.fetchone()
|
||||
print(1, c.fetchone())
|
||||
#print c.description
|
||||
c.execute("SELECT 1.23::float AS foo")
|
||||
print 2, c.fetchone()
|
||||
print(2, c.fetchone())
|
||||
#print c.description
|
||||
|
||||
print "BEFORE"
|
||||
print("BEFORE")
|
||||
sql()
|
||||
import gtk
|
||||
print "AFTER"
|
||||
print("AFTER")
|
||||
sql()
|
||||
|
||||
|
|
|
@ -6,9 +6,8 @@ curs = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
|||
|
||||
curs.execute("SELECT '2005-2-12'::date AS foo, 'boo!' as bar")
|
||||
for x in curs.fetchall():
|
||||
print type(x), x[0], x[1], x['foo'], x['bar']
|
||||
|
||||
print(type(x), x[0], x[1], x['foo'], x['bar'])
|
||||
|
||||
curs.execute("SELECT '2005-2-12'::date AS foo, 'boo!' as bar")
|
||||
for x in curs:
|
||||
print type(x), x[0], x[1], x['foo'], x['bar']
|
||||
|
||||
print(type(x), x[0], x[1], x['foo'], x['bar'])
|
||||
|
|
|
@ -14,7 +14,7 @@ two functions:
|
|||
# leak() will cause increasingly more RAM to be used by the script.
|
||||
$ python <script_nam> leak
|
||||
|
||||
# noleak() does not have the RAM usage problem. The only difference
|
||||
# noleak() does not have the RAM usage problem. The only difference
|
||||
# between it and leak() is that 'rows' is created once, before the loop.
|
||||
$ python <script_name> noleak
|
||||
|
||||
|
@ -43,7 +43,7 @@ def leak():
|
|||
row = {'foo': i}
|
||||
rows.append(row)
|
||||
count += 1
|
||||
print "loop count:", count
|
||||
print("loop count:", count)
|
||||
cursor.executemany(insert, rows)
|
||||
connection.commit()
|
||||
except psycopg2.IntegrityError:
|
||||
|
@ -59,7 +59,7 @@ def noleak():
|
|||
while 1:
|
||||
try:
|
||||
count += 1
|
||||
print "loop count:", count
|
||||
print("loop count:", count)
|
||||
cursor.executemany(insert, rows)
|
||||
connection.commit()
|
||||
except psycopg2.IntegrityError:
|
||||
|
@ -72,12 +72,11 @@ try:
|
|||
elif 'noleak' == sys.argv[1]:
|
||||
run_function = noleak
|
||||
else:
|
||||
print usage
|
||||
print(usage)
|
||||
sys.exit()
|
||||
except IndexError:
|
||||
print usage
|
||||
print(usage)
|
||||
sys.exit()
|
||||
|
||||
# Run leak() or noleak(), whichever was indicated on the command line
|
||||
run_function()
|
||||
|
||||
|
|
|
@ -18,8 +18,8 @@ class O(object):
|
|||
|
||||
o = O('R%', second='S%')
|
||||
|
||||
print o[0]
|
||||
print o['second']
|
||||
print(o[0])
|
||||
print(o['second'])
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
@ -28,8 +28,8 @@ import psycopg2 as dbapi
|
|||
|
||||
|
||||
conn = dbapi.connect(database='test')
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
|
@ -40,6 +40,4 @@ cursor.execute("""
|
|||
""", (o,))
|
||||
|
||||
for row in cursor:
|
||||
print row
|
||||
|
||||
|
||||
print(row)
|
||||
|
|
|
@ -5,7 +5,7 @@ class Portal(psycopg2.extensions.cursor):
|
|||
def __init__(self, name, curs):
|
||||
psycopg2.extensions.cursor.__init__(
|
||||
self, curs.connection, '"'+name+'"')
|
||||
|
||||
|
||||
CURSOR = psycopg2.extensions.new_type((1790,), "CURSOR", Portal)
|
||||
psycopg2.extensions.register_type(CURSOR)
|
||||
|
||||
|
@ -15,10 +15,10 @@ curs = conn.cursor()
|
|||
curs.execute("SELECT reffunc2()")
|
||||
|
||||
portal = curs.fetchone()[0]
|
||||
print portal.fetchone()
|
||||
print portal.fetchmany(2)
|
||||
print(portal.fetchone())
|
||||
print(portal.fetchmany(2))
|
||||
portal.scroll(0, 'absolute')
|
||||
print portal.fetchall()
|
||||
print(portal.fetchall())
|
||||
|
||||
|
||||
#print curs.rowcount
|
||||
|
|
|
@ -3,11 +3,10 @@ class B(object):
|
|||
if x: self._o = True
|
||||
else: self._o = False
|
||||
def __getattribute__(self, attr):
|
||||
print "ga called", attr
|
||||
print("ga called", attr)
|
||||
return object.__getattribute__(self, attr)
|
||||
def _sqlquote(self):
|
||||
if self._o == True:
|
||||
if self._o:
|
||||
return 'It is True'
|
||||
else:
|
||||
return 'It is False'
|
||||
|
||||
|
|
|
@ -8,4 +8,4 @@ for i in range(20000):
|
|||
datafile = os.popen('ps -p %s -o rss' % os.getpid())
|
||||
line = datafile.readlines(2)[1].strip()
|
||||
datafile.close()
|
||||
print str(i) + '\t' + line
|
||||
print(str(i) + '\t' + line)
|
||||
|
|
|
@ -33,7 +33,7 @@ def g():
|
|||
line = datafile.readlines(2)[1].strip()
|
||||
datafile.close()
|
||||
n = 30
|
||||
print str(k*n) + '\t' + line
|
||||
print(str(k*n) + '\t' + line)
|
||||
k += 1
|
||||
|
||||
while threading.activeCount()>1:
|
||||
|
|
|
@ -10,28 +10,28 @@ conn = psycopg2.connect("dbname=test")
|
|||
#conn.set_client_encoding("xxx")
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT '2005-2-12'::date AS foo")
|
||||
print curs.fetchall()
|
||||
print(curs.fetchall())
|
||||
curs.execute("SELECT '10:23:60'::time AS foo")
|
||||
print curs.fetchall()
|
||||
print(curs.fetchall())
|
||||
curs.execute("SELECT '10:23:59.895342'::time AS foo")
|
||||
print curs.fetchall()
|
||||
print(curs.fetchall())
|
||||
curs.execute("SELECT '0:0:12.31423'::time with time zone AS foo")
|
||||
print curs.fetchall()
|
||||
print(curs.fetchall())
|
||||
curs.execute("SELECT '0:0:12+01:30'::time with time zone AS foo")
|
||||
print curs.fetchall()
|
||||
print(curs.fetchall())
|
||||
curs.execute("SELECT '2005-2-12 10:23:59.895342'::timestamp AS foo")
|
||||
print curs.fetchall()
|
||||
print(curs.fetchall())
|
||||
curs.execute("SELECT '2005-2-12 10:23:59.895342'::timestamp with time zone AS foo")
|
||||
print curs.fetchall()
|
||||
print(curs.fetchall())
|
||||
|
||||
#print curs.fetchmany(2)
|
||||
#print curs.fetchall()
|
||||
|
||||
def sleep(curs):
|
||||
while not curs.isready():
|
||||
print "."
|
||||
print(".")
|
||||
time.sleep(.1)
|
||||
|
||||
|
||||
#curs.execute("""
|
||||
# DECLARE zz INSENSITIVE SCROLL CURSOR WITH HOLD FOR
|
||||
# SELECT now();
|
||||
|
@ -47,4 +47,3 @@ def sleep(curs):
|
|||
|
||||
#curs.execute("SELECT now() AS bar");
|
||||
#sleep(curs)
|
||||
|
||||
|
|
|
@ -4,6 +4,5 @@ import psycopg2.extras
|
|||
conn = psycopg2.connect("dbname=test")
|
||||
curs = conn.cursor()
|
||||
curs.execute("SELECT true AS foo WHERE 'a' in %s", (("aa", "bb"),))
|
||||
print curs.fetchall()
|
||||
print curs.query
|
||||
|
||||
print(curs.fetchall())
|
||||
print(curs.query)
|
||||
|
|
|
@ -40,4 +40,3 @@ dbconn.commit()
|
|||
|
||||
cursor.close()
|
||||
dbconn.close()
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ signal.signal(signal.SIGHUP, handler)
|
|||
|
||||
def worker():
|
||||
while 1:
|
||||
print "I'm working"
|
||||
print("I'm working")
|
||||
sleep(1)
|
||||
|
||||
eventlet.spawn(worker)
|
||||
|
@ -61,21 +61,21 @@ eventlet.spawn(worker)
|
|||
# You can unplug the network cable etc. here.
|
||||
# Kill -HUP will raise an exception in the callback.
|
||||
|
||||
print "PID", os.getpid()
|
||||
print("PID", os.getpid())
|
||||
conn = psycopg2.connect(DSN)
|
||||
curs = conn.cursor()
|
||||
try:
|
||||
for i in range(1000):
|
||||
curs.execute("select %s, pg_sleep(1)", (i,))
|
||||
r = curs.fetchone()
|
||||
print "selected", r
|
||||
print("selected", r)
|
||||
|
||||
except BaseException, e:
|
||||
print "got exception:", e.__class__.__name__, e
|
||||
print("got exception:", e.__class__.__name__, e)
|
||||
|
||||
if conn.closed:
|
||||
print "the connection is closed"
|
||||
print("the connection is closed")
|
||||
else:
|
||||
conn.rollback()
|
||||
curs.execute("select 1")
|
||||
print curs.fetchone()
|
||||
print(curs.fetchone())
|
||||
|
|
|
@ -5,27 +5,27 @@ import signal
|
|||
import warnings
|
||||
import psycopg2
|
||||
|
||||
print "Testing psycopg2 version %s" % psycopg2.__version__
|
||||
print("Testing psycopg2 version %s" % psycopg2.__version__)
|
||||
|
||||
dbname = os.environ.get('PSYCOPG2_TESTDB', 'psycopg2_test')
|
||||
conn = psycopg2.connect("dbname=%s" % dbname)
|
||||
curs = conn.cursor()
|
||||
curs.isready()
|
||||
|
||||
print "Now restart the test postgresql server to drop all connections, press enter when done."
|
||||
print("Now restart the test postgresql server to drop all connections, press enter when done.")
|
||||
raw_input()
|
||||
|
||||
try:
|
||||
curs.isready() # No need to test return value
|
||||
curs.isready()
|
||||
except:
|
||||
print "Test passed"
|
||||
print("Test passed")
|
||||
sys.exit(0)
|
||||
|
||||
if curs.isready():
|
||||
print "Warning: looks like the connection didn't get killed. This test is probably in-effective"
|
||||
print "Test inconclusive"
|
||||
print("Warning: looks like the connection didn't get killed. This test is probably in-effective")
|
||||
print("Test inconclusive")
|
||||
sys.exit(1)
|
||||
|
||||
gc.collect() # used to error here
|
||||
print "Test Passed"
|
||||
print("Test Passed")
|
||||
|
|
|
@ -5,5 +5,4 @@ o = psycopg2.connect("dbname=test")
|
|||
c = o.cursor()
|
||||
c.execute("SELECT 1.23::float AS foo")
|
||||
x = c.fetchone()[0]
|
||||
print x, type(x)
|
||||
|
||||
print(x, type(x))
|
||||
|
|
|
@ -42,7 +42,7 @@ cur = conn.cursor()
|
|||
gc_thread.start()
|
||||
|
||||
# Now do lots of "cursor.copy_from" calls:
|
||||
print "copy_from"
|
||||
print("copy_from")
|
||||
for i in range(1000):
|
||||
f = StringIO("42\tfoo\n74\tbar\n")
|
||||
cur.copy_from(f, 'test', columns=('num', 'data'))
|
||||
|
@ -51,7 +51,7 @@ for i in range(1000):
|
|||
# python: Modules/gcmodule.c:277: visit_decref: Assertion `gc->gc.gc_refs != 0' failed.
|
||||
|
||||
# Also exercise the copy_to code path
|
||||
print "copy_to"
|
||||
print("copy_to")
|
||||
cur.execute("truncate test")
|
||||
f = StringIO("42\tfoo\n74\tbar\n")
|
||||
cur.copy_from(f, 'test', columns=('num', 'data'))
|
||||
|
@ -60,7 +60,7 @@ for i in range(1000):
|
|||
cur.copy_to(f, 'test', columns=('num', 'data'))
|
||||
|
||||
# And copy_expert too
|
||||
print "copy_expert"
|
||||
print("copy_expert")
|
||||
cur.execute("truncate test")
|
||||
for i in range(1000):
|
||||
f = StringIO("42\tfoo\n74\tbar\n")
|
||||
|
@ -71,5 +71,3 @@ done = 1
|
|||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ db = psycopg2.connect('dbname=test')
|
|||
|
||||
cursor = db.cursor()
|
||||
|
||||
print 'Creating tables and sample data'
|
||||
print('Creating tables and sample data')
|
||||
|
||||
cursor.execute('''
|
||||
CREATE TEMPORARY TABLE foo (
|
||||
|
@ -23,22 +23,22 @@ cursor.execute('INSERT INTO bar VALUES (1, 1)')
|
|||
|
||||
db.commit()
|
||||
|
||||
print 'Deferring constraint and breaking referential integrity'
|
||||
print('Deferring constraint and breaking referential integrity')
|
||||
cursor.execute('SET CONSTRAINTS bar_foo_fk DEFERRED')
|
||||
cursor.execute('UPDATE bar SET foo_id = 42 WHERE id = 1')
|
||||
|
||||
print 'Committing (this should fail)'
|
||||
print('Committing (this should fail)')
|
||||
try:
|
||||
db.commit()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
print 'Rolling back connection'
|
||||
print('Rolling back connection')
|
||||
db.rollback()
|
||||
|
||||
print 'Running a trivial query'
|
||||
print('Running a trivial query')
|
||||
try:
|
||||
cursor.execute('SELECT TRUE')
|
||||
except:
|
||||
traceback.print_exc()
|
||||
print 'db.closed:', db.closed
|
||||
print('db.closed:', db.closed)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import psycopg2, psycopg2.extensions
|
||||
import threading
|
||||
import gc
|
||||
|
@ -20,9 +22,9 @@ class db_user(threading.Thread):
|
|||
# the conn2 desctructor will block indefinitely
|
||||
# on the completion of the query
|
||||
# (and it will not be holding the GIL during that time)
|
||||
print >> sys.stderr, "begin conn2 del"
|
||||
print("begin conn2 del", file=sys.stderr)
|
||||
del cursor, conn2
|
||||
print >> sys.stderr, "end conn2 del"
|
||||
print("end conn2 del", file=sys.stderr)
|
||||
|
||||
def main():
|
||||
# lock out a db row
|
||||
|
@ -43,7 +45,7 @@ def main():
|
|||
# as it will avoid conn_close()
|
||||
for i in range(10):
|
||||
if gc.collect():
|
||||
print >> sys.stderr, "garbage collection done"
|
||||
print("garbage collection done", file=sys.stderr)
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
|
@ -52,9 +54,9 @@ def main():
|
|||
# concurrent thread destructor of conn2 to
|
||||
# continue and it will end up trying to free
|
||||
# self->dsn a second time.
|
||||
print >> sys.stderr, "begin conn1 del"
|
||||
print("begin conn1 del", file=sys.stderr)
|
||||
del cursor, conn1
|
||||
print >> sys.stderr, "end conn1 del"
|
||||
print("end conn1 del", file=sys.stderr)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import psycopg2.extensions
|
||||
|
||||
print dir(psycopg2._psycopg)
|
||||
print psycopg2.extensions.new_type(
|
||||
(600,), "POINT", lambda oids, name, fun: None)
|
||||
print "ciccia ciccia"
|
||||
print psycopg2._psycopg
|
||||
print(dir(psycopg2._psycopg))
|
||||
print(psycopg2.extensions.new_type(
|
||||
(600,), "POINT", lambda oids, name, fun: None))
|
||||
print("ciccia ciccia")
|
||||
print(psycopg2._psycopg)
|
||||
|
|
|
@ -6,4 +6,4 @@ conn = psycopg2.connect("dbname=test")
|
|||
curs = conn.cursor()
|
||||
curs.execute("set timezone = 'Asia/Calcutta'")
|
||||
curs.execute("SELECT now()")
|
||||
print curs.fetchone()[0]
|
||||
print(curs.fetchone()[0])
|
||||
|
|
|
@ -335,7 +335,7 @@
|
|||
|
||||
{
|
||||
Debian unstable with libc-i686 suppressions
|
||||
Memcheck:Cond
|
||||
Memcheck:Cond
|
||||
obj:/lib/ld-2.3.5.so
|
||||
obj:/lib/ld-2.3.5.so
|
||||
obj:/lib/tls/i686/cmov/libc-2.3.5.so
|
||||
|
@ -348,10 +348,10 @@
|
|||
fun:_PyImport_GetDynLoadFunc
|
||||
fun:_PyImport_LoadDynamicModule
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
Debian unstable with libc-i686 suppressions
|
||||
Memcheck:Cond
|
||||
Memcheck:Cond
|
||||
obj:/lib/ld-2.3.5.so
|
||||
obj:/lib/ld-2.3.5.so
|
||||
obj:/lib/ld-2.3.5.so
|
||||
|
@ -365,7 +365,7 @@
|
|||
fun:_PyImport_GetDynLoadFunc
|
||||
fun:_PyImport_LoadDynamicModule
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
Debian unstable with libc-i686 suppressions
|
||||
Memcheck:Addr4
|
||||
|
@ -471,7 +471,7 @@
|
|||
{
|
||||
Debian unstable with libc-i686 suppressions
|
||||
Memcheck:Cond
|
||||
obj:/lib/ld-2.3.5.so
|
||||
obj:/lib/ld-2.3.5.so
|
||||
obj:/lib/tls/i686/cmov/libc-2.3.5.so
|
||||
obj:/lib/ld-2.3.5.so
|
||||
fun:_dl_open
|
||||
|
|
|
@ -9,7 +9,14 @@ To invalidate the cache, update this file and check it into git.
|
|||
Currently used modules built in the cache:
|
||||
|
||||
OpenSSL
|
||||
Version: 1.0.2l
|
||||
Version: 1.0.2n
|
||||
|
||||
PostgreSQL
|
||||
Version: 9.6.3
|
||||
Version: 10.1
|
||||
|
||||
|
||||
NOTE: to zap the cache manually you can also use:
|
||||
|
||||
curl -X DELETE -H "Authorization: Bearer $APPVEYOR_TOKEN" -H "Content-Type: application/json" https://ci.appveyor.com/api/projects/psycopg/psycopg2/buildcache
|
||||
|
||||
with the token from https://ci.appveyor.com/api-token
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user