mirror of
https://github.com/psycopg/psycopg2.git
synced 2025-08-04 04:10:09 +03:00
Merge master into named parameters branch after 1.5 years.
This commit is contained in:
commit
bc0b1e0195
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -9,5 +9,7 @@ build/*
|
||||||
doc/src/_build/*
|
doc/src/_build/*
|
||||||
doc/html/*
|
doc/html/*
|
||||||
doc/psycopg2.txt
|
doc/psycopg2.txt
|
||||||
|
scripts/pypi_docs_upload.py
|
||||||
env
|
env
|
||||||
.tox
|
.tox
|
||||||
|
/rel
|
||||||
|
|
105
INSTALL
105
INSTALL
|
@ -1,103 +1,4 @@
|
||||||
Compiling and installing psycopg
|
Installation instructions are included in the docs.
|
||||||
********************************
|
|
||||||
|
|
||||||
** Important note: if you plan to use psycopg2 in a multithreaded application,
|
|
||||||
make sure that your libpq has been compiled with the --with-thread-safety
|
|
||||||
option. psycopg2 will work correctly even with a non-thread-safe libpq but
|
|
||||||
libpq will leak memory.
|
|
||||||
|
|
||||||
psycopg2 uses distutils for its build process, so most of the process is
|
|
||||||
executed by the setup.py script. Before building psycopg look at
|
|
||||||
setup.cfg file and change any settings to follow your system (or taste);
|
|
||||||
then:
|
|
||||||
|
|
||||||
python setup.py build
|
|
||||||
|
|
||||||
to build in the local directory; and:
|
|
||||||
|
|
||||||
python setup.py install
|
|
||||||
|
|
||||||
to install system-wide.
|
|
||||||
|
|
||||||
|
|
||||||
Common errors and build problems
|
|
||||||
================================
|
|
||||||
|
|
||||||
One of the most common errors is trying to build psycopg without the right
|
|
||||||
development headers for PostgreSQL, Python or both. If you get errors, look
|
|
||||||
for the following messages and then take the appropriate action:
|
|
||||||
|
|
||||||
libpq-fe.h: No such file or directory
|
|
||||||
PostgreSQL headers are not properly installed on your system or are
|
|
||||||
installed in a non default path. First make sure they are installed, then
|
|
||||||
check setup.cfg and make sure pg_config points to a valid pg_config
|
|
||||||
executable. If you don't have a working pg_config try to play with the
|
|
||||||
include_dirs variable (and note that a working pg_config is better.)
|
|
||||||
|
|
||||||
|
|
||||||
Running the test suite
|
|
||||||
======================
|
|
||||||
|
|
||||||
The included Makefile allows to run all the tests included in the
|
|
||||||
distribution. Just use:
|
|
||||||
|
|
||||||
make
|
|
||||||
make check
|
|
||||||
|
|
||||||
The tests are run against a database called psycopg2_test on unix socket
|
|
||||||
and standard port. You can configure a different database to run the test
|
|
||||||
by setting the environment variables:
|
|
||||||
|
|
||||||
- PSYCOPG2_TESTDB
|
|
||||||
- PSYCOPG2_TESTDB_HOST
|
|
||||||
- PSYCOPG2_TESTDB_PORT
|
|
||||||
- PSYCOPG2_TESTDB_USER
|
|
||||||
|
|
||||||
The database should be created before running the tests.
|
|
||||||
|
|
||||||
The standard Python unittest is used to run the tests. But if unittest2 is
|
|
||||||
found it will be used instead, with the result of having more informations
|
|
||||||
about skipped tests.
|
|
||||||
|
|
||||||
|
|
||||||
Building the documentation
|
|
||||||
==========================
|
|
||||||
|
|
||||||
In order to build the documentation included in the distribution, use
|
|
||||||
|
|
||||||
make env
|
|
||||||
make docs
|
|
||||||
|
|
||||||
The first command will install all the dependencies (Sphinx, Docutils) in
|
|
||||||
an 'env' directory in the project tree. The second command will build both
|
|
||||||
the html format (in the 'doc/html' directory) and in plain text
|
|
||||||
(doc/psycopg2.txt)
|
|
||||||
|
|
||||||
|
|
||||||
Using setuptools and EasyInstall
|
|
||||||
================================
|
|
||||||
|
|
||||||
If setuptools are installed on your system you can easily create an egg for
|
|
||||||
psycopg and install it. Download the source distribution (if you're reading
|
|
||||||
this file you probably already have) and then edit setup.cfg to your taste
|
|
||||||
and build from the source distribution top-level directory using:
|
|
||||||
|
|
||||||
easy_install .
|
|
||||||
|
|
||||||
|
|
||||||
Compiling under Windows with mingw32
|
|
||||||
====================================
|
|
||||||
|
|
||||||
You can compile psycopg under Windows platform with mingw32
|
|
||||||
(http://www.mingw.org/) compiler. MinGW is also shipped with IDEs such as
|
|
||||||
Dev-C++ (http://www.bloodshed.net/devcpp.html) and Code::Blocks
|
|
||||||
(http://www.codeblocks.org). gcc binaries should be in your PATH.
|
|
||||||
|
|
||||||
You need a PostgreSQL with include and library files installed. At least v8.0
|
|
||||||
is required.
|
|
||||||
|
|
||||||
First you need to create a libpython2X.a as described in
|
|
||||||
http://starship.python.net/crew/kernr/mingw32/Notes.html. Then run:
|
|
||||||
|
|
||||||
python setup.py build_ext --compiler=mingw32 install
|
|
||||||
|
|
||||||
|
Please check the 'doc/src/install.rst' file or online at
|
||||||
|
<http://initd.org/psycopg/docs/install.html>.
|
||||||
|
|
|
@ -2,9 +2,10 @@ recursive-include psycopg *.c *.h *.manifest
|
||||||
recursive-include lib *.py
|
recursive-include lib *.py
|
||||||
recursive-include tests *.py
|
recursive-include tests *.py
|
||||||
recursive-include examples *.py somehackers.jpg whereareyou.jpg
|
recursive-include examples *.py somehackers.jpg whereareyou.jpg
|
||||||
recursive-include doc README HACKING SUCCESS COPYING.LESSER pep-0249.txt
|
recursive-include doc README SUCCESS COPYING.LESSER pep-0249.txt
|
||||||
|
recursive-include doc Makefile requirements.txt
|
||||||
recursive-include doc/src *.rst *.py *.css Makefile
|
recursive-include doc/src *.rst *.py *.css Makefile
|
||||||
recursive-include scripts *.py *.sh
|
recursive-include scripts *.py *.sh
|
||||||
include scripts/maketypes.sh scripts/buildtypes.py
|
include scripts/maketypes.sh scripts/buildtypes.py
|
||||||
include AUTHORS README INSTALL LICENSE NEWS
|
include AUTHORS README.rst INSTALL LICENSE NEWS
|
||||||
include PKG-INFO MANIFEST.in MANIFEST setup.py setup.cfg Makefile
|
include PKG-INFO MANIFEST.in MANIFEST setup.py setup.cfg Makefile
|
||||||
|
|
30
Makefile
30
Makefile
|
@ -6,7 +6,7 @@
|
||||||
#
|
#
|
||||||
# Build the documentation::
|
# Build the documentation::
|
||||||
#
|
#
|
||||||
# make env
|
# make env (once)
|
||||||
# make docs
|
# make docs
|
||||||
#
|
#
|
||||||
# Create a source package::
|
# Create a source package::
|
||||||
|
@ -20,9 +20,6 @@
|
||||||
PYTHON := python$(PYTHON_VERSION)
|
PYTHON := python$(PYTHON_VERSION)
|
||||||
PYTHON_VERSION ?= $(shell $(PYTHON) -c 'import sys; print ("%d.%d" % sys.version_info[:2])')
|
PYTHON_VERSION ?= $(shell $(PYTHON) -c 'import sys; print ("%d.%d" % sys.version_info[:2])')
|
||||||
BUILD_DIR = $(shell pwd)/build/lib.$(PYTHON_VERSION)
|
BUILD_DIR = $(shell pwd)/build/lib.$(PYTHON_VERSION)
|
||||||
ENV_DIR = $(shell pwd)/env/py-$(PYTHON_VERSION)
|
|
||||||
ENV_BIN = $(ENV_DIR)/bin
|
|
||||||
ENV_LIB = $(ENV_DIR)/lib
|
|
||||||
|
|
||||||
SOURCE_C := $(wildcard psycopg/*.c psycopg/*.h)
|
SOURCE_C := $(wildcard psycopg/*.c psycopg/*.h)
|
||||||
SOURCE_PY := $(wildcard lib/*.py)
|
SOURCE_PY := $(wildcard lib/*.py)
|
||||||
|
@ -46,9 +43,6 @@ endif
|
||||||
VERSION := $(shell grep PSYCOPG_VERSION setup.py | head -1 | sed -e "s/.*'\(.*\)'/\1/")
|
VERSION := $(shell grep PSYCOPG_VERSION setup.py | head -1 | sed -e "s/.*'\(.*\)'/\1/")
|
||||||
SDIST := dist/psycopg2-$(VERSION).tar.gz
|
SDIST := dist/psycopg2-$(VERSION).tar.gz
|
||||||
|
|
||||||
EASY_INSTALL = PYTHONPATH=$(ENV_LIB) $(ENV_BIN)/easy_install-$(PYTHON_VERSION) -d $(ENV_LIB) -s $(ENV_BIN)
|
|
||||||
EZ_SETUP = $(ENV_BIN)/ez_setup.py
|
|
||||||
|
|
||||||
.PHONY: env check clean
|
.PHONY: env check clean
|
||||||
|
|
||||||
default: package
|
default: package
|
||||||
|
@ -68,22 +62,8 @@ docs-zip: doc/docs.zip
|
||||||
|
|
||||||
sdist: $(SDIST)
|
sdist: $(SDIST)
|
||||||
|
|
||||||
# The environment is currently required to build the documentation.
|
env:
|
||||||
# It is not clean by 'make clean'
|
$(MAKE) -C doc $@
|
||||||
|
|
||||||
env: easy_install
|
|
||||||
mkdir -p $(ENV_BIN)
|
|
||||||
mkdir -p $(ENV_LIB)
|
|
||||||
$(EASY_INSTALL) docutils
|
|
||||||
$(EASY_INSTALL) sphinx
|
|
||||||
|
|
||||||
easy_install: ez_setup
|
|
||||||
PYTHONPATH=$(ENV_LIB) $(PYTHON) $(EZ_SETUP) -d $(ENV_LIB) -s $(ENV_BIN) setuptools
|
|
||||||
|
|
||||||
ez_setup:
|
|
||||||
mkdir -p $(ENV_BIN)
|
|
||||||
mkdir -p $(ENV_LIB)
|
|
||||||
wget -O $(EZ_SETUP) http://peak.telecommunity.com/dist/ez_setup.py
|
|
||||||
|
|
||||||
check:
|
check:
|
||||||
PYTHONPATH=$(BUILD_DIR):$(PYTHONPATH) $(PYTHON) -c "from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
|
PYTHONPATH=$(BUILD_DIR):$(PYTHONPATH) $(PYTHON) -c "from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" --verbose
|
||||||
|
@ -122,10 +102,10 @@ MANIFEST: MANIFEST.in $(SOURCE)
|
||||||
|
|
||||||
# docs depend on the build as it partly use introspection.
|
# docs depend on the build as it partly use introspection.
|
||||||
doc/html/genindex.html: $(PLATLIB) $(PURELIB) $(SOURCE_DOC)
|
doc/html/genindex.html: $(PLATLIB) $(PURELIB) $(SOURCE_DOC)
|
||||||
PYTHONPATH=$(ENV_LIB):$(BUILD_DIR) $(MAKE) SPHINXBUILD=$(ENV_BIN)/sphinx-build -C doc html
|
$(MAKE) -C doc html
|
||||||
|
|
||||||
doc/psycopg2.txt: $(PLATLIB) $(PURELIB) $(SOURCE_DOC)
|
doc/psycopg2.txt: $(PLATLIB) $(PURELIB) $(SOURCE_DOC)
|
||||||
PYTHONPATH=$(ENV_LIB):$(BUILD_DIR) $(MAKE) SPHINXBUILD=$(ENV_BIN)/sphinx-build -C doc text
|
$(MAKE) -C doc text
|
||||||
|
|
||||||
doc/docs.zip: doc/html/genindex.html
|
doc/docs.zip: doc/html/genindex.html
|
||||||
(cd doc/html && zip -r ../docs.zip *)
|
(cd doc/html && zip -r ../docs.zip *)
|
||||||
|
|
78
NEWS
78
NEWS
|
@ -1,15 +1,87 @@
|
||||||
Current release
|
Current release
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
|
What's new in psycopg 2.7
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
New features:
|
||||||
|
|
||||||
|
- Added `~psycopg2.extensions.parse_dsn()` function (:ticket:`#321`).
|
||||||
|
- Added `~psycopg2.__libpq_version__` and
|
||||||
|
`~psycopg2.extensions.libpq_version()` to inspect the version of the
|
||||||
|
``libpq`` library the module was compiled/loaded with
|
||||||
|
(:tickets:`#35, #323`).
|
||||||
|
- The attributes `~connection.notices` and `~connection.notifies` can be
|
||||||
|
customized replacing them with any object exposing an `!append()` method
|
||||||
|
(:ticket:`#326`).
|
||||||
|
- Added `~psycopg2.extensions.quote_ident()` function (:ticket:`#359`).
|
||||||
|
|
||||||
|
|
||||||
|
What's new in psycopg 2.6.2
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
- Report the server response status on errors (such as :ticket:`#281`).
|
||||||
|
- The `~psycopg2.extras.wait_select` callback allows interrupting a
|
||||||
|
long-running query in an interactive shell using :kbd:`Ctrl-C`
|
||||||
|
(:ticket:`#333`).
|
||||||
|
- Raise `!NotSupportedError` on unhandled server response status
|
||||||
|
(:ticket:`#352`).
|
||||||
|
- Fixed `!PersistentConnectionPool` on Python 3 (:ticket:`#348`).
|
||||||
|
|
||||||
|
|
||||||
|
What's new in psycopg 2.6.1
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
- Lists consisting of only `None` are escaped correctly (:ticket:`#285`).
|
||||||
|
- Fixed deadlock in multithread programs using OpenSSL (:ticket:`#290`).
|
||||||
|
- Correctly unlock the connection after error in flush (:ticket:`#294`).
|
||||||
|
- Fixed `!MinTimeLoggingCursor.callproc()` (:ticket:`#309`).
|
||||||
|
- Added support for MSVC 2015 compiler (:ticket:`#350`).
|
||||||
|
|
||||||
|
|
||||||
What's new in psycopg 2.6
|
What's new in psycopg 2.6
|
||||||
-------------------------
|
-------------------------
|
||||||
|
|
||||||
|
New features:
|
||||||
|
|
||||||
|
- Added support for large objects larger than 2GB. Many thanks to Blake Rouse
|
||||||
|
and the MAAS Team for the feature development.
|
||||||
|
- Python `time` objects with a tzinfo specified and PostgreSQL :sql:`timetz`
|
||||||
|
data are converted into each other (:ticket:`#272`).
|
||||||
|
|
||||||
Bug fixes:
|
Bug fixes:
|
||||||
|
|
||||||
- Json apapter's `!str()` returns the adapted content instead of the `!repr()`
|
- Json adapter's `!str()` returns the adapted content instead of the `!repr()`
|
||||||
(:ticket:`#191`).
|
(:ticket:`#191`).
|
||||||
|
|
||||||
|
|
||||||
|
What's new in psycopg 2.5.5
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
- Named cursors used as context manager don't swallow the exception on exit
|
||||||
|
(:ticket:`#262`).
|
||||||
|
- `cursor.description` can be pickled (:ticket:`#265`).
|
||||||
|
- Propagate read error messages in COPY FROM (:ticket:`#270`).
|
||||||
|
- PostgreSQL time 24:00 is converted to Python 00:00 (:ticket:`#278`).
|
||||||
|
|
||||||
|
|
||||||
|
What's new in psycopg 2.5.4
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
- Added :sql:`jsonb` support for PostgreSQL 9.4 (:ticket:`#226`).
|
||||||
|
- Fixed segfault if COPY statements are passed to `~cursor.execute()` instead
|
||||||
|
of using the proper methods (:ticket:`#219`).
|
||||||
|
- Force conversion of pool arguments to integer to avoid potentially unbounded
|
||||||
|
pools (:ticket:`#220`).
|
||||||
|
- Cursors :sql:`WITH HOLD` don't begin a new transaction upon move/fetch/close
|
||||||
|
(:ticket:`#228`).
|
||||||
|
- Cursors :sql:`WITH HOLD` can be used in autocommit (:ticket:`#229`).
|
||||||
|
- `~cursor.callproc()` doesn't silently ignore an argument without a length.
|
||||||
|
- Fixed memory leak with large objects (:ticket:`#256`).
|
||||||
|
- Make sure the internal ``_psycopg.so`` module can be imported stand-alone (to
|
||||||
|
allow modules juggling such as the one described in :ticket:`#201`).
|
||||||
|
|
||||||
|
|
||||||
What's new in psycopg 2.5.3
|
What's new in psycopg 2.5.3
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
@ -115,7 +187,7 @@ What's new in psycopg 2.4.6
|
||||||
- 'register_hstore()', 'register_composite()', 'tpc_recover()' work with
|
- 'register_hstore()', 'register_composite()', 'tpc_recover()' work with
|
||||||
RealDictConnection and Cursor (:ticket:`#114`).
|
RealDictConnection and Cursor (:ticket:`#114`).
|
||||||
- Fixed broken pool for Zope and connections re-init across ZSQL methods
|
- Fixed broken pool for Zope and connections re-init across ZSQL methods
|
||||||
in the same request (tickets #123, #125, #142).
|
in the same request (:tickets:`#123, #125, #142`).
|
||||||
- connect() raises an exception instead of swallowing keyword arguments
|
- connect() raises an exception instead of swallowing keyword arguments
|
||||||
when a connection string is specified as well (:ticket:`#131`).
|
when a connection string is specified as well (:ticket:`#131`).
|
||||||
- Discard any result produced by 'executemany()' (:ticket:`#133`).
|
- Discard any result produced by 'executemany()' (:ticket:`#133`).
|
||||||
|
@ -137,7 +209,7 @@ What's new in psycopg 2.4.5
|
||||||
- Error and its subclasses are picklable, useful for multiprocessing
|
- Error and its subclasses are picklable, useful for multiprocessing
|
||||||
interaction (:ticket:`#90`).
|
interaction (:ticket:`#90`).
|
||||||
- Better efficiency and formatting of timezone offset objects thanks
|
- Better efficiency and formatting of timezone offset objects thanks
|
||||||
to Menno Smits (tickets #94, #95).
|
to Menno Smits (:tickets:`#94, #95`).
|
||||||
- Fixed 'rownumber' during iteration on cursor subclasses.
|
- Fixed 'rownumber' during iteration on cursor subclasses.
|
||||||
Regression introduced in 2.4.4 (:ticket:`#100`).
|
Regression introduced in 2.4.4 (:ticket:`#100`).
|
||||||
- Added support for 'inet' arrays.
|
- Added support for 'inet' arrays.
|
||||||
|
|
38
README
38
README
|
@ -1,38 +0,0 @@
|
||||||
psycopg2 - Python-PostgreSQL Database Adapter
|
|
||||||
********************************************
|
|
||||||
|
|
||||||
psycopg2 is a PostgreSQL database adapter for the Python programming
|
|
||||||
language. psycopg2 was written with the aim of being very small and fast,
|
|
||||||
and stable as a rock.
|
|
||||||
|
|
||||||
psycopg2 is different from the other database adapter because it was
|
|
||||||
designed for heavily multi-threaded applications that create and destroy
|
|
||||||
lots of cursors and make a conspicuous number of concurrent INSERTs or
|
|
||||||
UPDATEs. psycopg2 also provides full asynchronous operations and support
|
|
||||||
for coroutine libraries.
|
|
||||||
|
|
||||||
psycopg2 can compile and run on Linux, FreeBSD, Solaris, MacOS X and
|
|
||||||
Windows architecture. It supports Python versions from 2.4 onwards and
|
|
||||||
PostgreSQL versions from 7.4 onwards.
|
|
||||||
|
|
||||||
psycopg2 is free software ("free as in freedom" but I like beer too.)
|
|
||||||
It is licensed under the GNU Lesser General Public License, version 3 or
|
|
||||||
later plus an exception to allow OpenSSL (libpq) linking; see LICENSE for
|
|
||||||
more details.
|
|
||||||
|
|
||||||
Documentation
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Start by reading the INSTALL file. More information about psycopg2 extensions
|
|
||||||
to the DBAPI-2.0 is available in the files located in the doc/ direcory.
|
|
||||||
Example code can be found in the examples/ directory. If you make any changes
|
|
||||||
to the code make sure to run the unit tests localed in tests/.
|
|
||||||
|
|
||||||
Online documentation can be found at: http://initd.org/psycopg/
|
|
||||||
|
|
||||||
If you stumble upon any bugs, please tell us at: http://psycopg.lighthouseapp.com/
|
|
||||||
|
|
||||||
Contributors
|
|
||||||
------------
|
|
||||||
|
|
||||||
For a list of contributors to the project, see the AUTHORS file.
|
|
46
README.rst
Normal file
46
README.rst
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
psycopg2 - Python-PostgreSQL Database Adapter
|
||||||
|
=============================================
|
||||||
|
|
||||||
|
Psycopg is the most popular PostgreSQL database adapter for the Python
|
||||||
|
programming language. Its main features are the complete implementation of
|
||||||
|
the Python DB API 2.0 specification and the thread safety (several threads can
|
||||||
|
share the same connection). It was designed for heavily multi-threaded
|
||||||
|
applications that create and destroy lots of cursors and make a large number
|
||||||
|
of concurrent "INSERT"s or "UPDATE"s.
|
||||||
|
|
||||||
|
Psycopg 2 is mostly implemented in C as a libpq wrapper, resulting in being
|
||||||
|
both efficient and secure. It features client-side and server-side cursors,
|
||||||
|
asynchronous communication and notifications, "COPY TO/COPY FROM" support.
|
||||||
|
Many Python types are supported out-of-the-box and adapted to matching
|
||||||
|
PostgreSQL data types; adaptation can be extended and customized thanks to a
|
||||||
|
flexible objects adaptation system.
|
||||||
|
|
||||||
|
Psycopg 2 is both Unicode and Python 3 friendly.
|
||||||
|
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Documentation is included in the 'doc' directory and is `available online`__.
|
||||||
|
|
||||||
|
.. __: http://initd.org/psycopg/docs/
|
||||||
|
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
If all the dependencies are met (i.e. you have the Python and libpq
|
||||||
|
development packages installed in your system) the standard::
|
||||||
|
|
||||||
|
python setup.py build
|
||||||
|
sudo python setup.py install
|
||||||
|
|
||||||
|
should work no problem. In case you have any problem check the 'install' and
|
||||||
|
the 'faq' documents in the docs or online__.
|
||||||
|
|
||||||
|
.. __: http://initd.org/psycopg/docs/install.html
|
||||||
|
|
||||||
|
For any other resource (source code repository, bug tracker, mailing list)
|
||||||
|
please check the `project homepage`__.
|
||||||
|
|
||||||
|
.. __: http://initd.org/psycopg/
|
43
doc/HACKING
43
doc/HACKING
|
@ -1,43 +0,0 @@
|
||||||
General information
|
|
||||||
*******************
|
|
||||||
|
|
||||||
Some help to people wanting to hack on psycopg. First of all, note that
|
|
||||||
*every* function in the psycopg module source code is prefixed by one of the
|
|
||||||
following words:
|
|
||||||
|
|
||||||
psyco is used for function directly callable from python (i.e., functions
|
|
||||||
in the psycopg module itself.) the only notable exception is the
|
|
||||||
source code for the module itself, that uses "psyco" even for C-only
|
|
||||||
functions.
|
|
||||||
|
|
||||||
conn is used for functions related to connection objects.
|
|
||||||
|
|
||||||
curs is used for functions related to cursor objects.
|
|
||||||
|
|
||||||
typecast is used for typecasters and utility function related to
|
|
||||||
typecaster creation and registration.
|
|
||||||
|
|
||||||
Pythonic definition of types and functions available from python are defined
|
|
||||||
in *_type.c files. Internal functions, callable only from C are located in
|
|
||||||
*_int.c files and extensions to the DBAPI can be found in the *_ext.c files.
|
|
||||||
|
|
||||||
|
|
||||||
Patches
|
|
||||||
*******
|
|
||||||
|
|
||||||
If you submit a patch, please send a diff generated with the "-u" switch.
|
|
||||||
Also note that I don't like that much cosmetic changes (like renaming
|
|
||||||
already existing variables) and I will rewrap the patch to 78 columns
|
|
||||||
anyway, so it is much better if you do that beforehand.
|
|
||||||
|
|
||||||
|
|
||||||
The type system
|
|
||||||
***************
|
|
||||||
|
|
||||||
Simple types, like integers and strings, are converted to python base types
|
|
||||||
(the conversion functions are in typecast_base.c). Complex types are
|
|
||||||
converted to ad-hoc types, defined in the typeobj_*.{c,h} files. The
|
|
||||||
conversion function are in the other typecast_*.c files. typecast.c defines
|
|
||||||
the basic utility functions (available through the psycopg module) used when
|
|
||||||
defining new typecasters from C and python.
|
|
||||||
|
|
32
doc/Makefile
32
doc/Makefile
|
@ -1,23 +1,41 @@
|
||||||
.PHONY: help clean html text doctest
|
.PHONY: env help clean html text doctest
|
||||||
|
|
||||||
docs: html text
|
docs: html text
|
||||||
|
|
||||||
check: doctest
|
check: doctest
|
||||||
|
|
||||||
help:
|
# The environment is currently required to build the documentation.
|
||||||
cd src && $(MAKE) $@
|
# It is not clean by 'make clean'
|
||||||
|
|
||||||
|
PYTHON := python$(PYTHON_VERSION)
|
||||||
|
PYTHON_VERSION ?= $(shell $(PYTHON) -c 'import sys; print ("%d.%d" % sys.version_info[:2])')
|
||||||
|
|
||||||
|
SPHOPTS=PYTHONPATH=$$(pwd)/../build/lib.$(PYTHON_VERSION)/ SPHINXBUILD=$$(pwd)/env/bin/sphinx-build
|
||||||
|
|
||||||
html:
|
html:
|
||||||
cd src && $(MAKE) $@
|
$(MAKE) PYTHON=$(PYTHON) -C .. package
|
||||||
|
$(MAKE) $(SPHOPTS) -C src $@
|
||||||
cp -r src/_build/html .
|
cp -r src/_build/html .
|
||||||
|
|
||||||
text:
|
text:
|
||||||
cd src && $(MAKE) $@
|
$(MAKE) PYTHON=$(PYTHON) -C .. package
|
||||||
|
$(MAKE) $(SPHOPTS) -C src $@
|
||||||
cd src && tools/stitch_text.py index.rst _build/text > ../psycopg2.txt
|
cd src && tools/stitch_text.py index.rst _build/text > ../psycopg2.txt
|
||||||
|
|
||||||
doctest:
|
doctest:
|
||||||
cd src && $(MAKE) $@
|
$(MAKE) PYTHON=$(PYTHON) -C .. package
|
||||||
|
$(MAKE) $(SPHOPTS) -C src $@
|
||||||
|
|
||||||
|
upload:
|
||||||
|
# this command requires ssh configured to the proper target
|
||||||
|
tar czf - -C html . | ssh psycoweb tar xzvf - -C docs/current
|
||||||
|
# this command requires a .pypirc with the right privileges
|
||||||
|
python src/tools/pypi_docs_upload.py psycopg2 $$(pwd)/html
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
cd src && $(MAKE) $@
|
$(MAKE) $(SPHOPTS) -C src $@
|
||||||
rm -rf html psycopg2.txt
|
rm -rf html psycopg2.txt
|
||||||
|
|
||||||
|
env: requirements.txt
|
||||||
|
virtualenv env
|
||||||
|
./env/bin/pip install -r requirements.txt
|
||||||
|
|
42
doc/README
42
doc/README
|
@ -1,42 +0,0 @@
|
||||||
How to build psycopg documentation
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
- Install Sphinx, maybe in a virtualenv. Tested with Sphinx 0.6.4::
|
|
||||||
|
|
||||||
~$ virtualenv pd
|
|
||||||
New python executable in pd/bin/python
|
|
||||||
Installing setuptools............done.
|
|
||||||
~$ cd pd
|
|
||||||
~/pd$ source bin/activate
|
|
||||||
(pd)~/pd$
|
|
||||||
|
|
||||||
- Install Sphinx in the env::
|
|
||||||
|
|
||||||
(pd)~/pd$ easy_install sphinx
|
|
||||||
Searching for sphinx
|
|
||||||
Reading http://pypi.python.org/simple/sphinx/
|
|
||||||
Reading http://sphinx.pocoo.org/
|
|
||||||
Best match: Sphinx 0.6.4
|
|
||||||
...
|
|
||||||
Finished processing dependencies for sphinx
|
|
||||||
|
|
||||||
- Build psycopg2 and ensure the package can be imported (it will be used for
|
|
||||||
reading the version number, autodocs etc.)::
|
|
||||||
|
|
||||||
(pd)~/pd/psycopg2$ python setup.py build
|
|
||||||
(pd)~/pd/psycopg2$ python setup.py install
|
|
||||||
running install
|
|
||||||
...
|
|
||||||
creating ~/pd/lib/python2.6/site-packages/psycopg2
|
|
||||||
...
|
|
||||||
|
|
||||||
- Move to the ``doc`` dir and run ``make`` from there::
|
|
||||||
|
|
||||||
(pd)~/pd/psycopg2$ cd doc/
|
|
||||||
(pd)~/pd/psycopg2/doc$ make
|
|
||||||
Running Sphinx v0.6.4
|
|
||||||
...
|
|
||||||
|
|
||||||
You should have the rendered documentation in ``./html`` and the text file
|
|
||||||
``psycopg2.txt`` now.
|
|
||||||
|
|
26
doc/README.rst
Normal file
26
doc/README.rst
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
How to build psycopg documentation
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
Building the documentation usually requires building the library too for
|
||||||
|
introspection, so you will need the same prerequisites_. The only extra
|
||||||
|
prerequisite is virtualenv_: the packages needed to build the docs will be
|
||||||
|
installed when building the env.
|
||||||
|
|
||||||
|
.. _prerequisites: http://initd.org/psycopg/docs/install.html#install-from-source
|
||||||
|
.. _virtualenv: https://virtualenv.pypa.io/en/latest/
|
||||||
|
|
||||||
|
Build the env once with::
|
||||||
|
|
||||||
|
make env
|
||||||
|
|
||||||
|
Then you can build the documentation with::
|
||||||
|
|
||||||
|
make
|
||||||
|
|
||||||
|
Or the single targets::
|
||||||
|
|
||||||
|
make html
|
||||||
|
make text
|
||||||
|
|
||||||
|
You should find the rendered documentation in the ``html`` dir and the text
|
||||||
|
file ``psycopg2.txt``.
|
3
doc/requirements.txt
Normal file
3
doc/requirements.txt
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
# Packages only needed to build the docs
|
||||||
|
Pygments>=1.5
|
||||||
|
Sphinx>=1.2,<=1.3
|
|
@ -145,7 +145,9 @@ geometric type:
|
||||||
... self.y = y
|
... self.y = y
|
||||||
|
|
||||||
>>> def adapt_point(point):
|
>>> def adapt_point(point):
|
||||||
... return AsIs("'(%s, %s)'" % (adapt(point.x), adapt(point.y)))
|
... x = adapt(point.x).getquoted()
|
||||||
|
... y = adapt(point.y).getquoted()
|
||||||
|
... return AsIs("'(%s, %s)'" % (x, y))
|
||||||
|
|
||||||
>>> register_adapter(Point, adapt_point)
|
>>> register_adapter(Point, adapt_point)
|
||||||
|
|
||||||
|
@ -289,7 +291,7 @@ something to read::
|
||||||
else:
|
else:
|
||||||
conn.poll()
|
conn.poll()
|
||||||
while conn.notifies:
|
while conn.notifies:
|
||||||
notify = conn.notifies.pop()
|
notify = conn.notifies.pop(0)
|
||||||
print "Got NOTIFY:", notify.pid, notify.channel, notify.payload
|
print "Got NOTIFY:", notify.pid, notify.channel, notify.payload
|
||||||
|
|
||||||
Running the script and executing a command such as :sql:`NOTIFY test, 'hello'`
|
Running the script and executing a command such as :sql:`NOTIFY test, 'hello'`
|
||||||
|
@ -310,6 +312,10 @@ received from a previous version server will have the
|
||||||
Added `~psycopg2.extensions.Notify` object and handling notification
|
Added `~psycopg2.extensions.Notify` object and handling notification
|
||||||
payload.
|
payload.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.7
|
||||||
|
The `~connection.notifies` attribute is writable: it is possible to
|
||||||
|
replace it with any object exposing an `!append()` method. An useful
|
||||||
|
example would be to use a `~collections.deque` object.
|
||||||
|
|
||||||
|
|
||||||
.. index::
|
.. index::
|
||||||
|
|
|
@ -42,7 +42,9 @@ master_doc = 'index'
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'Psycopg'
|
project = u'Psycopg'
|
||||||
copyright = u'2001-2013, Federico Di Gregorio. Documentation by Daniele Varrazzo'
|
from datetime import date
|
||||||
|
year = date.today().year
|
||||||
|
copyright = u'2001-%s, Federico Di Gregorio, Daniele Varrazzo' % year
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
@ -66,7 +68,9 @@ intersphinx_mapping = {
|
||||||
}
|
}
|
||||||
|
|
||||||
# Pattern to generate links to the bug tracker
|
# Pattern to generate links to the bug tracker
|
||||||
ticket_url = 'http://psycopg.lighthouseapp.com/projects/62710/tickets/%s'
|
ticket_url = 'https://github.com/psycopg/psycopg2/issues/%s'
|
||||||
|
ticket_remap_until = 25
|
||||||
|
ticket_remap_offset = 230
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
|
|
|
@ -351,17 +351,14 @@ The ``connection`` class
|
||||||
.. method:: set_session(isolation_level=None, readonly=None, deferrable=None, autocommit=None)
|
.. method:: set_session(isolation_level=None, readonly=None, deferrable=None, autocommit=None)
|
||||||
|
|
||||||
Set one or more parameters for the next transactions or statements in
|
Set one or more parameters for the next transactions or statements in
|
||||||
the current session. See |SET TRANSACTION|_ for further details.
|
the current session.
|
||||||
|
|
||||||
.. |SET TRANSACTION| replace:: :sql:`SET TRANSACTION`
|
|
||||||
.. _SET TRANSACTION: http://www.postgresql.org/docs/current/static/sql-set-transaction.html
|
|
||||||
|
|
||||||
:param isolation_level: set the `isolation level`_ for the next
|
:param isolation_level: set the `isolation level`_ for the next
|
||||||
transactions/statements. The value can be one of the
|
transactions/statements. The value can be one of the literal
|
||||||
:ref:`constants <isolation-level-constants>` defined in the
|
values ``READ UNCOMMITTED``, ``READ COMMITTED``, ``REPEATABLE
|
||||||
`~psycopg2.extensions` module or one of the literal values
|
READ``, ``SERIALIZABLE`` or the equivalent :ref:`constant
|
||||||
``READ UNCOMMITTED``, ``READ COMMITTED``, ``REPEATABLE READ``,
|
<isolation-level-constants>` defined in the `~psycopg2.extensions`
|
||||||
``SERIALIZABLE``.
|
module.
|
||||||
:param readonly: if `!True`, set the connection to read only;
|
:param readonly: if `!True`, set the connection to read only;
|
||||||
read/write if `!False`.
|
read/write if `!False`.
|
||||||
:param deferrable: if `!True`, set the connection to deferrable;
|
:param deferrable: if `!True`, set the connection to deferrable;
|
||||||
|
@ -370,19 +367,14 @@ The ``connection`` class
|
||||||
PostgreSQL session setting but an alias for setting the
|
PostgreSQL session setting but an alias for setting the
|
||||||
`autocommit` attribute.
|
`autocommit` attribute.
|
||||||
|
|
||||||
Parameter passed as `!None` (the default for all) will not be changed.
|
|
||||||
The parameters *isolation_level*, *readonly* and *deferrable* also
|
|
||||||
accept the string ``DEFAULT`` as a value: the effect is to reset the
|
|
||||||
parameter to the server default.
|
|
||||||
|
|
||||||
.. _isolation level:
|
.. _isolation level:
|
||||||
http://www.postgresql.org/docs/current/static/transaction-iso.html
|
http://www.postgresql.org/docs/current/static/transaction-iso.html
|
||||||
|
|
||||||
The function must be invoked with no transaction in progress. At every
|
Arguments set to `!None` (the default for all) will not be changed.
|
||||||
function invocation, only the specified parameters are changed.
|
The parameters *isolation_level*, *readonly* and *deferrable* also
|
||||||
|
accept the string ``DEFAULT`` as a value: the effect is to reset the
|
||||||
The default for the values are defined by the server configuration:
|
parameter to the server default. Defaults are defined by the server
|
||||||
see values for |default_transaction_isolation|__,
|
configuration: see values for |default_transaction_isolation|__,
|
||||||
|default_transaction_read_only|__, |default_transaction_deferrable|__.
|
|default_transaction_read_only|__, |default_transaction_deferrable|__.
|
||||||
|
|
||||||
.. |default_transaction_isolation| replace:: :sql:`default_transaction_isolation`
|
.. |default_transaction_isolation| replace:: :sql:`default_transaction_isolation`
|
||||||
|
@ -392,12 +384,20 @@ The ``connection`` class
|
||||||
.. |default_transaction_deferrable| replace:: :sql:`default_transaction_deferrable`
|
.. |default_transaction_deferrable| replace:: :sql:`default_transaction_deferrable`
|
||||||
.. __: http://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-DEFAULT-TRANSACTION-DEFERRABLE
|
.. __: http://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-DEFAULT-TRANSACTION-DEFERRABLE
|
||||||
|
|
||||||
|
The function must be invoked with no transaction in progress.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
There is currently no builtin method to read the current value for
|
There is currently no builtin method to read the current value for
|
||||||
the parameters: use :sql:`SHOW default_transaction_...` to read
|
the parameters: use :sql:`SHOW default_transaction_...` to read
|
||||||
the values from the backend.
|
the values from the backend.
|
||||||
|
|
||||||
|
.. seealso:: |SET TRANSACTION|_ for further details about the behaviour
|
||||||
|
of the transaction parameters in the server.
|
||||||
|
|
||||||
|
.. |SET TRANSACTION| replace:: :sql:`SET TRANSACTION`
|
||||||
|
.. _SET TRANSACTION: http://www.postgresql.org/docs/current/static/sql-set-transaction.html
|
||||||
|
|
||||||
.. versionadded:: 2.4.2
|
.. versionadded:: 2.4.2
|
||||||
|
|
||||||
|
|
||||||
|
@ -419,8 +419,8 @@ The ``connection`` class
|
||||||
|
|
||||||
By default, any query execution, including a simple :sql:`SELECT`
|
By default, any query execution, including a simple :sql:`SELECT`
|
||||||
will start a transaction: for long-running programs, if no further
|
will start a transaction: for long-running programs, if no further
|
||||||
action is taken, the session will remain "idle in transaction", a
|
action is taken, the session will remain "idle in transaction", an
|
||||||
condition non desiderable for several reasons (locks are held by
|
undesirable condition for several reasons (locks are held by
|
||||||
the session, tables bloat...). For long lived scripts, either
|
the session, tables bloat...). For long lived scripts, either
|
||||||
ensure to terminate a transaction as soon as possible or use an
|
ensure to terminate a transaction as soon as possible or use an
|
||||||
autocommit connection.
|
autocommit connection.
|
||||||
|
@ -483,13 +483,21 @@ The ``connection`` class
|
||||||
['NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "foo_pkey" for table "foo"\n',
|
['NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "foo_pkey" for table "foo"\n',
|
||||||
'NOTICE: CREATE TABLE will create implicit sequence "foo_id_seq" for serial column "foo.id"\n']
|
'NOTICE: CREATE TABLE will create implicit sequence "foo_id_seq" for serial column "foo.id"\n']
|
||||||
|
|
||||||
|
.. versionchanged:: 2.7
|
||||||
|
The `!notices` attribute is writable: the user may replace it
|
||||||
|
with any Python object exposing an `!append()` method. If
|
||||||
|
appending raises an exception the notice is silently
|
||||||
|
dropped.
|
||||||
|
|
||||||
To avoid a leak in case excessive notices are generated, only the last
|
To avoid a leak in case excessive notices are generated, only the last
|
||||||
50 messages are kept.
|
50 messages are kept. This check is only in place if the `!notices`
|
||||||
|
attribute is a list: if any other object is used it will be up to the
|
||||||
|
user to guard from leakage.
|
||||||
|
|
||||||
You can configure what messages to receive using `PostgreSQL logging
|
You can configure what messages to receive using `PostgreSQL logging
|
||||||
configuration parameters`__ such as ``log_statement``,
|
configuration parameters`__ such as ``log_statement``,
|
||||||
``client_min_messages``, ``log_min_duration_statement`` etc.
|
``client_min_messages``, ``log_min_duration_statement`` etc.
|
||||||
|
|
||||||
.. __: http://www.postgresql.org/docs/current/static/runtime-config-logging.html
|
.. __: http://www.postgresql.org/docs/current/static/runtime-config-logging.html
|
||||||
|
|
||||||
|
|
||||||
|
@ -506,6 +514,12 @@ The ``connection`` class
|
||||||
the payload was not accessible. To keep backward compatibility,
|
the payload was not accessible. To keep backward compatibility,
|
||||||
`!Notify` objects can still be accessed as 2 items tuples.
|
`!Notify` objects can still be accessed as 2 items tuples.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.7
|
||||||
|
The `!notifies` attribute is writable: the user may replace it
|
||||||
|
with any Python object exposing an `!append()` method. If
|
||||||
|
appending raises an exception the notification is silently
|
||||||
|
dropped.
|
||||||
|
|
||||||
|
|
||||||
.. attribute:: cursor_factory
|
.. attribute:: cursor_factory
|
||||||
|
|
||||||
|
|
|
@ -529,6 +529,13 @@ The ``cursor`` class
|
||||||
>>> cur.fetchall()
|
>>> cur.fetchall()
|
||||||
[(6, 42, 'foo'), (7, 74, 'bar')]
|
[(6, 42, 'foo'), (7, 74, 'bar')]
|
||||||
|
|
||||||
|
.. note:: the name of the table is not quoted: if the table name
|
||||||
|
contains uppercase letters or special characters it must be quoted
|
||||||
|
with double quotes::
|
||||||
|
|
||||||
|
cur.copy_from(f, '"TABLE"')
|
||||||
|
|
||||||
|
|
||||||
.. versionchanged:: 2.0.6
|
.. versionchanged:: 2.0.6
|
||||||
added the *columns* parameter.
|
added the *columns* parameter.
|
||||||
|
|
||||||
|
@ -558,6 +565,12 @@ The ``cursor`` class
|
||||||
2|\N|dada
|
2|\N|dada
|
||||||
...
|
...
|
||||||
|
|
||||||
|
.. note:: the name of the table is not quoted: if the table name
|
||||||
|
contains uppercase letters or special characters it must be quoted
|
||||||
|
with double quotes::
|
||||||
|
|
||||||
|
cur.copy_to(f, '"TABLE"')
|
||||||
|
|
||||||
.. versionchanged:: 2.0.6
|
.. versionchanged:: 2.0.6
|
||||||
added the *columns* parameter.
|
added the *columns* parameter.
|
||||||
|
|
||||||
|
|
|
@ -49,8 +49,8 @@ An example of the available constants defined in the module:
|
||||||
>>> errorcodes.UNDEFINED_TABLE
|
>>> errorcodes.UNDEFINED_TABLE
|
||||||
'42P01'
|
'42P01'
|
||||||
|
|
||||||
Constants representing all the error values documented by PostgreSQL versions
|
Constants representing all the error values defined by PostgreSQL versions
|
||||||
between 8.1 and 9.2 are included in the module.
|
between 8.1 and 9.4 are included in the module.
|
||||||
|
|
||||||
|
|
||||||
.. autofunction:: lookup(code)
|
.. autofunction:: lookup(code)
|
||||||
|
|
|
@ -12,6 +12,17 @@
|
||||||
The module contains a few objects and function extending the minimum set of
|
The module contains a few objects and function extending the minimum set of
|
||||||
functionalities defined by the |DBAPI|_.
|
functionalities defined by the |DBAPI|_.
|
||||||
|
|
||||||
|
.. function:: parse_dsn(dsn)
|
||||||
|
|
||||||
|
Parse connection string into a dictionary of keywords and values.
|
||||||
|
|
||||||
|
Uses libpq's ``PQconninfoParse`` to parse the string according to
|
||||||
|
accepted format(s) and check for supported keywords.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
>>> psycopg2.extensions.parse_dsn('dbname=test user=postgres password=secret')
|
||||||
|
{'password': 'secret', 'user': 'postgres', 'dbname': 'test'}
|
||||||
|
|
||||||
.. class:: connection(dsn, async=False)
|
.. class:: connection(dsn, async=False)
|
||||||
|
|
||||||
|
@ -40,18 +51,20 @@ functionalities defined by the |DBAPI|_.
|
||||||
|
|
||||||
The class can be subclassed: see the `connection.lobject()` to know
|
The class can be subclassed: see the `connection.lobject()` to know
|
||||||
how to specify a `!lobject` subclass.
|
how to specify a `!lobject` subclass.
|
||||||
|
|
||||||
.. versionadded:: 2.0.8
|
.. versionadded:: 2.0.8
|
||||||
|
|
||||||
.. attribute:: oid
|
.. attribute:: oid
|
||||||
|
|
||||||
Database OID of the object.
|
Database OID of the object.
|
||||||
|
|
||||||
|
|
||||||
.. attribute:: mode
|
.. attribute:: mode
|
||||||
|
|
||||||
The mode the database was open. See `connection.lobject()` for a
|
The mode the database was open. See `connection.lobject()` for a
|
||||||
description of the available modes.
|
description of the available modes.
|
||||||
|
|
||||||
|
|
||||||
.. method:: read(bytes=-1)
|
.. method:: read(bytes=-1)
|
||||||
|
|
||||||
Read a chunk of data from the current file position. If -1 (default)
|
Read a chunk of data from the current file position. If -1 (default)
|
||||||
|
@ -64,6 +77,7 @@ functionalities defined by the |DBAPI|_.
|
||||||
.. versionchanged:: 2.4
|
.. versionchanged:: 2.4
|
||||||
added Unicode support.
|
added Unicode support.
|
||||||
|
|
||||||
|
|
||||||
.. method:: write(str)
|
.. method:: write(str)
|
||||||
|
|
||||||
Write a string to the large object. Return the number of bytes
|
Write a string to the large object. Return the number of bytes
|
||||||
|
@ -73,42 +87,60 @@ functionalities defined by the |DBAPI|_.
|
||||||
.. versionchanged:: 2.4
|
.. versionchanged:: 2.4
|
||||||
added Unicode support.
|
added Unicode support.
|
||||||
|
|
||||||
|
|
||||||
.. method:: export(file_name)
|
.. method:: export(file_name)
|
||||||
|
|
||||||
Export the large object content to the file system.
|
Export the large object content to the file system.
|
||||||
|
|
||||||
The method uses the efficient |lo_export|_ libpq function.
|
The method uses the efficient |lo_export|_ libpq function.
|
||||||
|
|
||||||
.. |lo_export| replace:: `!lo_export()`
|
.. |lo_export| replace:: `!lo_export()`
|
||||||
.. _lo_export: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-EXPORT
|
.. _lo_export: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-EXPORT
|
||||||
|
|
||||||
|
|
||||||
.. method:: seek(offset, whence=0)
|
.. method:: seek(offset, whence=0)
|
||||||
|
|
||||||
Set the lobject current position.
|
Set the lobject current position.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.6.0
|
||||||
|
added support for *offset* > 2GB.
|
||||||
|
|
||||||
|
|
||||||
.. method:: tell()
|
.. method:: tell()
|
||||||
|
|
||||||
Return the lobject current position.
|
Return the lobject current position.
|
||||||
|
|
||||||
.. method:: truncate(len=0)
|
|
||||||
|
|
||||||
.. versionadded:: 2.2.0
|
.. versionadded:: 2.2.0
|
||||||
|
|
||||||
|
.. versionchanged:: 2.6.0
|
||||||
|
added support for return value > 2GB.
|
||||||
|
|
||||||
|
|
||||||
|
.. method:: truncate(len=0)
|
||||||
|
|
||||||
Truncate the lobject to the given size.
|
Truncate the lobject to the given size.
|
||||||
|
|
||||||
The method will only be available if Psycopg has been built against libpq
|
The method will only be available if Psycopg has been built against
|
||||||
from PostgreSQL 8.3 or later and can only be used with PostgreSQL servers
|
libpq from PostgreSQL 8.3 or later and can only be used with
|
||||||
running these versions. It uses the |lo_truncate|_ libpq function.
|
PostgreSQL servers running these versions. It uses the |lo_truncate|_
|
||||||
|
libpq function.
|
||||||
|
|
||||||
.. |lo_truncate| replace:: `!lo_truncate()`
|
.. |lo_truncate| replace:: `!lo_truncate()`
|
||||||
.. _lo_truncate: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-TRUNCATE
|
.. _lo_truncate: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-TRUNCATE
|
||||||
|
|
||||||
.. warning::
|
.. versionadded:: 2.2.0
|
||||||
|
|
||||||
|
.. versionchanged:: 2.6.0
|
||||||
|
added support for *len* > 2GB.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
If Psycopg is built with |lo_truncate| support or with the 64 bits API
|
||||||
|
support (resp. from PostgreSQL versions 8.3 and 9.3) but at runtime an
|
||||||
|
older version of the dynamic library is found, the ``psycopg2`` module
|
||||||
|
will fail to import. See :ref:`the lo_truncate FAQ <faq-lo_truncate>`
|
||||||
|
about the problem.
|
||||||
|
|
||||||
If Psycopg is built with |lo_truncate| support (i.e. if the
|
|
||||||
:program:`pg_config` used during setup is version >= 8.3), but at
|
|
||||||
runtime an older libpq is found, Psycopg will fail to import. See
|
|
||||||
:ref:`the lo_truncate FAQ <faq-lo_truncate>` about the problem.
|
|
||||||
|
|
||||||
.. method:: close()
|
.. method:: close()
|
||||||
|
|
||||||
|
@ -176,6 +208,31 @@ functionalities defined by the |DBAPI|_.
|
||||||
|
|
||||||
.. versionadded:: 2.2.0
|
.. versionadded:: 2.2.0
|
||||||
|
|
||||||
|
.. function:: libpq_version()
|
||||||
|
|
||||||
|
Return the version number of the ``libpq`` dynamic library loaded as an
|
||||||
|
integer, in the same format of `~connection.server_version`.
|
||||||
|
|
||||||
|
Raise `~psycopg2.NotSupportedError` if the ``psycopg2`` module was
|
||||||
|
compiled with a ``libpq`` version lesser than 9.1 (which can be detected
|
||||||
|
by the `~psycopg2.__libpq_version__` constant).
|
||||||
|
|
||||||
|
.. seealso:: libpq docs for `PQlibVersion()`__.
|
||||||
|
|
||||||
|
.. __: http://www.postgresql.org/docs/current/static/libpq-misc.html#LIBPQ-PQLIBVERSION
|
||||||
|
|
||||||
|
.. function:: quote_ident(str, scope)
|
||||||
|
|
||||||
|
Return quoted identifier according to PostgreSQL quoting rules.
|
||||||
|
|
||||||
|
The *scope* must be a `connection` or a `cursor`, the underlying
|
||||||
|
connection encoding is used for any necessary character conversion.
|
||||||
|
|
||||||
|
Requires libpq >= 9.0.
|
||||||
|
|
||||||
|
.. seealso:: libpq docs for `PQescapeIdentifier()`__
|
||||||
|
|
||||||
|
.. __: http://www.postgresql.org/docs/current/static/libpq-exec.html#LIBPQ-PQESCAPEIDENTIFIER
|
||||||
|
|
||||||
.. _sql-adaptation-objects:
|
.. _sql-adaptation-objects:
|
||||||
|
|
||||||
|
@ -189,7 +246,7 @@ deal with Python objects adaptation:
|
||||||
|
|
||||||
.. function:: adapt(obj)
|
.. function:: adapt(obj)
|
||||||
|
|
||||||
Return the SQL representation of *obj* as a string. Raise a
|
Return the SQL representation of *obj* as an `ISQLQuote`. Raise a
|
||||||
`~psycopg2.ProgrammingError` if how to adapt the object is unknown.
|
`~psycopg2.ProgrammingError` if how to adapt the object is unknown.
|
||||||
In order to allow new objects to be adapted, register a new adapter for it
|
In order to allow new objects to be adapted, register a new adapter for it
|
||||||
using the `register_adapter()` function.
|
using the `register_adapter()` function.
|
||||||
|
@ -203,7 +260,7 @@ deal with Python objects adaptation:
|
||||||
Register a new adapter for the objects of class *class*.
|
Register a new adapter for the objects of class *class*.
|
||||||
|
|
||||||
*adapter* should be a function taking a single argument (the object
|
*adapter* should be a function taking a single argument (the object
|
||||||
to adapt) and returning an object conforming the `ISQLQuote`
|
to adapt) and returning an object conforming to the `ISQLQuote`
|
||||||
protocol (e.g. exposing a `!getquoted()` method). The `AsIs` is
|
protocol (e.g. exposing a `!getquoted()` method). The `AsIs` is
|
||||||
often useful for this task.
|
often useful for this task.
|
||||||
|
|
||||||
|
|
|
@ -160,23 +160,27 @@ JSON_ adaptation
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
.. versionadded:: 2.5
|
.. versionadded:: 2.5
|
||||||
|
.. versionchanged:: 2.5.4
|
||||||
|
added |jsonb| support. In previous versions |jsonb| values are returned
|
||||||
|
as strings. See :ref:`the FAQ <faq-jsonb-adapt>` for a workaround.
|
||||||
|
|
||||||
Psycopg can adapt Python objects to and from the PostgreSQL |pgjson|_ type.
|
Psycopg can adapt Python objects to and from the PostgreSQL |pgjson|_ and
|
||||||
With PostgreSQL 9.2 adaptation is available out-of-the-box. To use JSON data
|
|jsonb| types. With PostgreSQL 9.2 and following versions adaptation is
|
||||||
with previous database versions (either with the `9.1 json extension`__, but
|
available out-of-the-box. To use JSON data with previous database versions
|
||||||
even if you want to convert text fields to JSON) you can use
|
(either with the `9.1 json extension`__, but even if you want to convert text
|
||||||
`register_json()`.
|
fields to JSON) you can use the `register_json()` function.
|
||||||
|
|
||||||
.. __: http://people.planetpostgresql.org/andrew/index.php?/archives/255-JSON-for-PG-9.2-...-and-now-for-9.1!.html
|
.. __: http://people.planetpostgresql.org/andrew/index.php?/archives/255-JSON-for-PG-9.2-...-and-now-for-9.1!.html
|
||||||
|
|
||||||
The Python library used to convert Python objects to JSON depends on the
|
The Python library used by default to convert Python objects to JSON and to
|
||||||
language version: with Python 2.6 and following the :py:mod:`json` module from
|
parse data from the database depends on the language version: with Python 2.6
|
||||||
the standard library is used; with previous versions the `simplejson`_ module
|
and following the :py:mod:`json` module from the standard library is used;
|
||||||
is used if available. Note that the last `!simplejson` version supporting
|
with previous versions the `simplejson`_ module is used if available. Note
|
||||||
Python 2.4 is the 2.0.9.
|
that the last `!simplejson` version supporting Python 2.4 is the 2.0.9.
|
||||||
|
|
||||||
.. _JSON: http://www.json.org/
|
.. _JSON: http://www.json.org/
|
||||||
.. |pgjson| replace:: :sql:`json`
|
.. |pgjson| replace:: :sql:`json`
|
||||||
|
.. |jsonb| replace:: :sql:`jsonb`
|
||||||
.. _pgjson: http://www.postgresql.org/docs/current/static/datatype-json.html
|
.. _pgjson: http://www.postgresql.org/docs/current/static/datatype-json.html
|
||||||
.. _simplejson: http://pypi.python.org/pypi/simplejson/
|
.. _simplejson: http://pypi.python.org/pypi/simplejson/
|
||||||
|
|
||||||
|
@ -186,8 +190,8 @@ the `Json` adapter::
|
||||||
curs.execute("insert into mytable (jsondata) values (%s)",
|
curs.execute("insert into mytable (jsondata) values (%s)",
|
||||||
[Json({'a': 100})])
|
[Json({'a': 100})])
|
||||||
|
|
||||||
Reading from the database, |pgjson| values will be automatically converted to
|
Reading from the database, |pgjson| and |jsonb| values will be automatically
|
||||||
Python objects.
|
converted to Python objects.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
|
@ -233,9 +237,11 @@ or you can subclass it overriding the `~Json.dumps()` method::
|
||||||
[MyJson({'a': 100})])
|
[MyJson({'a': 100})])
|
||||||
|
|
||||||
Customizing the conversion from PostgreSQL to Python can be done passing a
|
Customizing the conversion from PostgreSQL to Python can be done passing a
|
||||||
custom `!loads()` function to `register_json()` (or `register_default_json()`
|
custom `!loads()` function to `register_json()`. For the builtin data types
|
||||||
for PostgreSQL 9.2). For example, if you want to convert the float values
|
(|pgjson| from PostgreSQL 9.2, |jsonb| from PostgreSQL 9.4) use
|
||||||
from :sql:`json` into :py:class:`~decimal.Decimal` you can use::
|
`register_default_json()` and `register_default_jsonb()`. For example, if you
|
||||||
|
want to convert the float values from :sql:`json` into
|
||||||
|
:py:class:`~decimal.Decimal` you can use::
|
||||||
|
|
||||||
loads = lambda x: json.loads(x, parse_float=Decimal)
|
loads = lambda x: json.loads(x, parse_float=Decimal)
|
||||||
psycopg2.extras.register_json(conn, loads=loads)
|
psycopg2.extras.register_json(conn, loads=loads)
|
||||||
|
@ -248,8 +254,15 @@ from :sql:`json` into :py:class:`~decimal.Decimal` you can use::
|
||||||
|
|
||||||
.. autofunction:: register_json
|
.. autofunction:: register_json
|
||||||
|
|
||||||
|
.. versionchanged:: 2.5.4
|
||||||
|
added the *name* parameter to enable :sql:`jsonb` support.
|
||||||
|
|
||||||
.. autofunction:: register_default_json
|
.. autofunction:: register_default_json
|
||||||
|
|
||||||
|
.. autofunction:: register_default_jsonb
|
||||||
|
|
||||||
|
.. versionadded:: 2.5.4
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.. index::
|
.. index::
|
||||||
|
@ -598,3 +611,6 @@ Coroutine support
|
||||||
|
|
||||||
.. autofunction:: wait_select(conn)
|
.. autofunction:: wait_select(conn)
|
||||||
|
|
||||||
|
.. versionchanged:: 2.6.2
|
||||||
|
allow to cancel a query using :kbd:`Ctrl-C`, see
|
||||||
|
:ref:`the FAQ <faq-interrupt-query>` for an example.
|
||||||
|
|
|
@ -137,6 +137,20 @@ Psycopg automatically converts PostgreSQL :sql:`json` data into Python objects.
|
||||||
See :ref:`adapt-json` for further details.
|
See :ref:`adapt-json` for further details.
|
||||||
|
|
||||||
|
|
||||||
|
.. _faq-jsonb-adapt:
|
||||||
|
.. cssclass:: faq
|
||||||
|
|
||||||
|
Psycopg converts :sql:`json` values into Python objects but :sql:`jsonb` values are returned as strings. Can :sql:`jsonb` be converted automatically?
|
||||||
|
Automatic conversion of :sql:`jsonb` values is supported from Psycopg
|
||||||
|
release 2.5.4. For previous versions you can register the :sql:`json`
|
||||||
|
typecaster on the :sql:`jsonb` oids (which are known and not suppsed to
|
||||||
|
change in future PostgreSQL versions)::
|
||||||
|
|
||||||
|
psycopg2.extras.register_json(oid=3802, array_oid=3807, globally=True)
|
||||||
|
|
||||||
|
See :ref:`adapt-json` for further details.
|
||||||
|
|
||||||
|
|
||||||
.. _faq-bytea-9.0:
|
.. _faq-bytea-9.0:
|
||||||
.. cssclass:: faq
|
.. cssclass:: faq
|
||||||
|
|
||||||
|
@ -209,6 +223,37 @@ What are the advantages or disadvantages of using named cursors?
|
||||||
little memory on the client and to skip or discard parts of the result set.
|
little memory on the client and to skip or discard parts of the result set.
|
||||||
|
|
||||||
|
|
||||||
|
.. _faq-interrupt-query:
|
||||||
|
.. cssclass:: faq
|
||||||
|
|
||||||
|
How do I interrupt a long-running query in an interactive shell?
|
||||||
|
Normally the interactive shell becomes unresponsive to :kbd:`Ctrl-C` when
|
||||||
|
running a query. Using a connection in green mode allows Python to
|
||||||
|
receive and handle the interrupt, although it may leave the connection
|
||||||
|
broken, if the async callback doesn't handle the `!KeyboardInterrupt`
|
||||||
|
correctly.
|
||||||
|
|
||||||
|
Starting from psycopg 2.6.2, the `~psycopg2.extras.wait_select` callback
|
||||||
|
can handle a :kbd:`Ctrl-C` correctly. For previous versions, you can use
|
||||||
|
`this implementation`__.
|
||||||
|
|
||||||
|
.. __: http://initd.org/psycopg/articles/2014/07/20/cancelling-postgresql-statements-python/
|
||||||
|
|
||||||
|
.. code-block:: pycon
|
||||||
|
|
||||||
|
>>> psycopg2.extensions.set_wait_callback(psycopg2.extensions.wait_select)
|
||||||
|
>>> cnn = psycopg2.connect('')
|
||||||
|
>>> cur = cnn.cursor()
|
||||||
|
>>> cur.execute("select pg_sleep(10)")
|
||||||
|
^C
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
QueryCanceledError: canceling statement due to user request
|
||||||
|
|
||||||
|
>>> cnn.rollback()
|
||||||
|
>>> # You can use the connection and cursor again from here
|
||||||
|
|
||||||
|
|
||||||
.. _faq-compile:
|
.. _faq-compile:
|
||||||
|
|
||||||
Problems compiling and deploying psycopg2
|
Problems compiling and deploying psycopg2
|
||||||
|
@ -234,13 +279,20 @@ I can't compile `!psycopg2`: the compiler says *error: libpq-fe.h: No such file
|
||||||
.. cssclass:: faq
|
.. cssclass:: faq
|
||||||
|
|
||||||
`!psycopg2` raises `!ImportError` with message *_psycopg.so: undefined symbol: lo_truncate* when imported.
|
`!psycopg2` raises `!ImportError` with message *_psycopg.so: undefined symbol: lo_truncate* when imported.
|
||||||
This means that Psycopg has been compiled with |lo_truncate|_ support,
|
This means that Psycopg was compiled with |lo_truncate|_ support (*i.e.*
|
||||||
which means that the libpq used at compile time was version >= 8.3, but at
|
the libpq used at compile time was version >= 8.3) but at runtime an older
|
||||||
runtime an older libpq library is found. You can use::
|
libpq dynamic library is found.
|
||||||
|
|
||||||
|
Fast-forward several years, if the message reports *undefined symbol:
|
||||||
|
lo_truncate64* it means that Psycopg was built with large objects 64 bits
|
||||||
|
API support (*i.e.* the libpq used at compile time was at least 9.3) but
|
||||||
|
at runtime an older libpq dynamic library is found.
|
||||||
|
|
||||||
|
You can use::
|
||||||
|
|
||||||
$ ldd /path/to/packages/psycopg2/_psycopg.so | grep libpq
|
$ ldd /path/to/packages/psycopg2/_psycopg.so | grep libpq
|
||||||
|
|
||||||
to find what is the version used at runtime.
|
to find what is the libpq dynamic library used at runtime.
|
||||||
|
|
||||||
You can avoid the problem by using the same version of the
|
You can avoid the problem by using the same version of the
|
||||||
:program:`pg_config` at install time and the libpq at runtime.
|
:program:`pg_config` at install time and the libpq at runtime.
|
||||||
|
|
|
@ -43,9 +43,9 @@ Psycopg 2 is both Unicode and Python 3 friendly.
|
||||||
cursor
|
cursor
|
||||||
advanced
|
advanced
|
||||||
extensions
|
extensions
|
||||||
|
extras
|
||||||
tz
|
tz
|
||||||
pool
|
pool
|
||||||
extras
|
|
||||||
errorcodes
|
errorcodes
|
||||||
faq
|
faq
|
||||||
news
|
news
|
||||||
|
|
|
@ -14,9 +14,12 @@ mature as the C implementation yet.
|
||||||
|
|
||||||
The current `!psycopg2` implementation supports:
|
The current `!psycopg2` implementation supports:
|
||||||
|
|
||||||
|
..
|
||||||
|
NOTE: keep consistent with setup.py and the /features/ page.
|
||||||
|
|
||||||
- Python 2 versions from 2.5 to 2.7
|
- Python 2 versions from 2.5 to 2.7
|
||||||
- Python 3 versions from 3.1 to 3.3
|
- Python 3 versions from 3.1 to 3.4
|
||||||
- PostgreSQL versions from 7.4 to 9.2
|
- PostgreSQL versions from 7.4 to 9.4
|
||||||
|
|
||||||
.. _PostgreSQL: http://www.postgresql.org/
|
.. _PostgreSQL: http://www.postgresql.org/
|
||||||
.. _Python: http://www.python.org/
|
.. _Python: http://www.python.org/
|
||||||
|
@ -202,6 +205,33 @@ supported.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
.. index::
|
||||||
|
single: tests
|
||||||
|
|
||||||
|
.. _test-suite:
|
||||||
|
|
||||||
|
Running the test suite
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The included ``Makefile`` allows to run all the tests included in the
|
||||||
|
distribution. Just run::
|
||||||
|
|
||||||
|
make
|
||||||
|
make check
|
||||||
|
|
||||||
|
The tests run against a database called ``psycopg2_test`` on UNIX socket and
|
||||||
|
the standard port. You can configure a different database to run the test by
|
||||||
|
setting the environment variables:
|
||||||
|
|
||||||
|
- :envvar:`PSYCOPG2_TESTDB`
|
||||||
|
- :envvar:`PSYCOPG2_TESTDB_HOST`
|
||||||
|
- :envvar:`PSYCOPG2_TESTDB_PORT`
|
||||||
|
- :envvar:`PSYCOPG2_TESTDB_USER`
|
||||||
|
|
||||||
|
The database should already exist before running the tests.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.. index::
|
.. index::
|
||||||
single: debug
|
single: debug
|
||||||
single: PSYCOPG_DEBUG
|
single: PSYCOPG_DEBUG
|
||||||
|
@ -222,13 +252,13 @@ order to create a debug package:
|
||||||
|
|
||||||
- :ref:`Compile and install <source-package>` the package.
|
- :ref:`Compile and install <source-package>` the package.
|
||||||
|
|
||||||
- Set the :envvar:`PSYCOPG_DEBUG` variable::
|
- Set the :envvar:`PSYCOPG_DEBUG` environment variable::
|
||||||
|
|
||||||
$ export PSYCOPG_DEBUG=1
|
$ export PSYCOPG_DEBUG=1
|
||||||
|
|
||||||
- Run your program (making sure that the `!psycopg2` package imported is the
|
- Run your program (making sure that the `!psycopg2` package imported is the
|
||||||
one you just compiled and not e.g. the system one): you will have a copious
|
one you just compiled and not e.g. the system one): you will have a copious
|
||||||
stream of informations printed on stdout.
|
stream of informations printed on stderr.
|
||||||
|
|
||||||
.. __: http://initd.org/psycopg/download/
|
.. __: http://initd.org/psycopg/download/
|
||||||
|
|
||||||
|
|
|
@ -78,6 +78,7 @@ The module interface respects the standard defined in the |DBAPI|_.
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
|
|
||||||
|
- `~psycopg2.extensions.parse_dsn`
|
||||||
- libpq `connection string syntax`__
|
- libpq `connection string syntax`__
|
||||||
- libpq supported `connection parameters`__
|
- libpq supported `connection parameters`__
|
||||||
- libpq supported `environment variables`__
|
- libpq supported `environment variables`__
|
||||||
|
@ -91,7 +92,6 @@ The module interface respects the standard defined in the |DBAPI|_.
|
||||||
The parameters *connection_factory* and *async* are Psycopg extensions
|
The parameters *connection_factory* and *async* are Psycopg extensions
|
||||||
to the |DBAPI|.
|
to the |DBAPI|.
|
||||||
|
|
||||||
|
|
||||||
.. data:: apilevel
|
.. data:: apilevel
|
||||||
|
|
||||||
String constant stating the supported DB API level. For `psycopg2` is
|
String constant stating the supported DB API level. For `psycopg2` is
|
||||||
|
@ -109,9 +109,16 @@ The module interface respects the standard defined in the |DBAPI|_.
|
||||||
by the interface. For `psycopg2` is ``pyformat``. See also
|
by the interface. For `psycopg2` is ``pyformat``. See also
|
||||||
:ref:`query-parameters`.
|
:ref:`query-parameters`.
|
||||||
|
|
||||||
|
.. data:: __libpq_version__
|
||||||
|
|
||||||
|
Integer constant reporting the version of the ``libpq`` library this
|
||||||
|
``psycopg2`` module was compiled with (in the same format of
|
||||||
|
`~connection.server_version`). If this value is greater or equal than
|
||||||
|
``90100`` then you may query the version of the actually loaded library
|
||||||
|
using the `~psycopg2.extensions.libpq_version()` function.
|
||||||
|
|
||||||
|
|
||||||
.. index::
|
.. index::
|
||||||
single: Exceptions; DB API
|
single: Exceptions; DB API
|
||||||
|
|
||||||
.. _dbapi-exceptions:
|
.. _dbapi-exceptions:
|
||||||
|
@ -122,12 +129,12 @@ Exceptions
|
||||||
In compliance with the |DBAPI|_, the module makes informations about errors
|
In compliance with the |DBAPI|_, the module makes informations about errors
|
||||||
available through the following exceptions:
|
available through the following exceptions:
|
||||||
|
|
||||||
.. exception:: Warning
|
.. exception:: Warning
|
||||||
|
|
||||||
Exception raised for important warnings like data truncations while
|
Exception raised for important warnings like data truncations while
|
||||||
inserting, etc. It is a subclass of the Python `~exceptions.StandardError`.
|
inserting, etc. It is a subclass of the Python `~exceptions.StandardError`.
|
||||||
|
|
||||||
.. exception:: Error
|
.. exception:: Error
|
||||||
|
|
||||||
Exception that is the base class of all other error exceptions. You can
|
Exception that is the base class of all other error exceptions. You can
|
||||||
use this to catch all errors with one single `!except` statement. Warnings
|
use this to catch all errors with one single `!except` statement. Warnings
|
||||||
|
@ -150,7 +157,7 @@ available through the following exceptions:
|
||||||
|
|
||||||
>>> try:
|
>>> try:
|
||||||
... cur.execute("SELECT * FROM barf")
|
... cur.execute("SELECT * FROM barf")
|
||||||
... except Exception, e:
|
... except psycopg2.Error as e:
|
||||||
... pass
|
... pass
|
||||||
|
|
||||||
>>> e.pgcode
|
>>> e.pgcode
|
||||||
|
@ -159,6 +166,7 @@ available through the following exceptions:
|
||||||
ERROR: relation "barf" does not exist
|
ERROR: relation "barf" does not exist
|
||||||
LINE 1: SELECT * FROM barf
|
LINE 1: SELECT * FROM barf
|
||||||
^
|
^
|
||||||
|
|
||||||
.. attribute:: cursor
|
.. attribute:: cursor
|
||||||
|
|
||||||
The cursor the exception was raised from; `None` if not applicable.
|
The cursor the exception was raised from; `None` if not applicable.
|
||||||
|
@ -170,7 +178,7 @@ available through the following exceptions:
|
||||||
|
|
||||||
>>> try:
|
>>> try:
|
||||||
... cur.execute("SELECT * FROM barf")
|
... cur.execute("SELECT * FROM barf")
|
||||||
... except Exception, e:
|
... except psycopg2.Error, e:
|
||||||
... pass
|
... pass
|
||||||
|
|
||||||
>>> e.diag.severity
|
>>> e.diag.severity
|
||||||
|
@ -195,41 +203,41 @@ available through the following exceptions:
|
||||||
|
|
||||||
Exception raised for errors that are related to the database. It is a
|
Exception raised for errors that are related to the database. It is a
|
||||||
subclass of `Error`.
|
subclass of `Error`.
|
||||||
|
|
||||||
.. exception:: DataError
|
.. exception:: DataError
|
||||||
|
|
||||||
Exception raised for errors that are due to problems with the processed
|
Exception raised for errors that are due to problems with the processed
|
||||||
data like division by zero, numeric value out of range, etc. It is a
|
data like division by zero, numeric value out of range, etc. It is a
|
||||||
subclass of `DatabaseError`.
|
subclass of `DatabaseError`.
|
||||||
|
|
||||||
.. exception:: OperationalError
|
.. exception:: OperationalError
|
||||||
|
|
||||||
Exception raised for errors that are related to the database's operation
|
Exception raised for errors that are related to the database's operation
|
||||||
and not necessarily under the control of the programmer, e.g. an
|
and not necessarily under the control of the programmer, e.g. an
|
||||||
unexpected disconnect occurs, the data source name is not found, a
|
unexpected disconnect occurs, the data source name is not found, a
|
||||||
transaction could not be processed, a memory allocation error occurred
|
transaction could not be processed, a memory allocation error occurred
|
||||||
during processing, etc. It is a subclass of `DatabaseError`.
|
during processing, etc. It is a subclass of `DatabaseError`.
|
||||||
|
|
||||||
.. exception:: IntegrityError
|
.. exception:: IntegrityError
|
||||||
|
|
||||||
Exception raised when the relational integrity of the database is
|
Exception raised when the relational integrity of the database is
|
||||||
affected, e.g. a foreign key check fails. It is a subclass of
|
affected, e.g. a foreign key check fails. It is a subclass of
|
||||||
`DatabaseError`.
|
`DatabaseError`.
|
||||||
|
|
||||||
.. exception:: InternalError
|
.. exception:: InternalError
|
||||||
|
|
||||||
Exception raised when the database encounters an internal error, e.g. the
|
Exception raised when the database encounters an internal error, e.g. the
|
||||||
cursor is not valid anymore, the transaction is out of sync, etc. It is a
|
cursor is not valid anymore, the transaction is out of sync, etc. It is a
|
||||||
subclass of `DatabaseError`.
|
subclass of `DatabaseError`.
|
||||||
|
|
||||||
.. exception:: ProgrammingError
|
.. exception:: ProgrammingError
|
||||||
|
|
||||||
Exception raised for programming errors, e.g. table not found or already
|
Exception raised for programming errors, e.g. table not found or already
|
||||||
exists, syntax error in the SQL statement, wrong number of parameters
|
exists, syntax error in the SQL statement, wrong number of parameters
|
||||||
specified, etc. It is a subclass of `DatabaseError`.
|
specified, etc. It is a subclass of `DatabaseError`.
|
||||||
|
|
||||||
.. exception:: NotSupportedError
|
.. exception:: NotSupportedError
|
||||||
|
|
||||||
Exception raised in case a method or database API was used which is not
|
Exception raised in case a method or database API was used which is not
|
||||||
supported by the database, e.g. requesting a `!rollback()` on a
|
supported by the database, e.g. requesting a `!rollback()` on a
|
||||||
connection that does not support transaction or has transactions turned
|
connection that does not support transaction or has transactions turned
|
||||||
|
|
|
@ -3,37 +3,57 @@
|
||||||
ticket role
|
ticket role
|
||||||
~~~~~~~~~~~
|
~~~~~~~~~~~
|
||||||
|
|
||||||
An interpreted text role to link docs to lighthouse issues.
|
An interpreted text role to link docs to tickets issues.
|
||||||
|
|
||||||
:copyright: Copyright 2013 by Daniele Varrazzo.
|
:copyright: Copyright 2013 by Daniele Varrazzo.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
from docutils import nodes, utils
|
from docutils import nodes, utils
|
||||||
from docutils.parsers.rst import roles
|
from docutils.parsers.rst import roles
|
||||||
|
|
||||||
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
|
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||||
try:
|
cfg = inliner.document.settings.env.app.config
|
||||||
num = int(text.replace('#', ''))
|
if cfg.ticket_url is None:
|
||||||
except ValueError:
|
|
||||||
msg = inliner.reporter.error(
|
|
||||||
"ticket number must be... a number, got '%s'" % text)
|
|
||||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
|
||||||
return [prb], [msg]
|
|
||||||
|
|
||||||
url_pattern = inliner.document.settings.env.app.config.ticket_url
|
|
||||||
if url_pattern is None:
|
|
||||||
msg = inliner.reporter.warning(
|
msg = inliner.reporter.warning(
|
||||||
"ticket not configured: please configure ticket_url in conf.py")
|
"ticket not configured: please configure ticket_url in conf.py")
|
||||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||||
return [prb], [msg]
|
return [prb], [msg]
|
||||||
|
|
||||||
url = url_pattern % num
|
rv = [nodes.Text(name + ' ')]
|
||||||
roles.set_classes(options)
|
tokens = re.findall(r'(#?\d+)|([^\d#]+)', text)
|
||||||
node = nodes.reference(rawtext, 'ticket ' + utils.unescape(text),
|
for ticket, noise in tokens:
|
||||||
refuri=url, **options)
|
if ticket:
|
||||||
return [node], []
|
num = int(ticket.replace('#', ''))
|
||||||
|
|
||||||
|
# Push numbers of the oldel tickets ahead.
|
||||||
|
# We moved the tickets from a different tracker to GitHub and the
|
||||||
|
# latter already had a few ticket numbers taken (as merge
|
||||||
|
# requests).
|
||||||
|
remap_until = cfg.ticket_remap_until
|
||||||
|
remap_offset = cfg.ticket_remap_offset
|
||||||
|
if remap_until and remap_offset:
|
||||||
|
if num <= remap_until:
|
||||||
|
num += remap_offset
|
||||||
|
|
||||||
|
url = cfg.ticket_url % num
|
||||||
|
roles.set_classes(options)
|
||||||
|
node = nodes.reference(ticket, utils.unescape(ticket),
|
||||||
|
refuri=url, **options)
|
||||||
|
|
||||||
|
rv.append(node)
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert noise
|
||||||
|
rv.append(nodes.Text(noise))
|
||||||
|
|
||||||
|
return rv, []
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app):
|
||||||
app.add_config_value('ticket_url', None, 'env')
|
app.add_config_value('ticket_url', None, 'env')
|
||||||
|
app.add_config_value('ticket_remap_until', None, 'env')
|
||||||
|
app.add_config_value('ticket_remap_offset', None, 'env')
|
||||||
app.add_role('ticket', ticket_role)
|
app.add_role('ticket', ticket_role)
|
||||||
|
app.add_role('tickets', ticket_role)
|
||||||
|
|
||||||
|
|
166
doc/src/tools/pypi_docs_upload.py
Executable file
166
doc/src/tools/pypi_docs_upload.py
Executable file
|
@ -0,0 +1,166 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
Standalone script to upload a project docs on PyPI
|
||||||
|
|
||||||
|
Hacked together from the following distutils extension, avaliable from
|
||||||
|
https://bitbucket.org/jezdez/sphinx-pypi-upload/overview (ver. 0.2.1)
|
||||||
|
|
||||||
|
sphinx_pypi_upload
|
||||||
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
setuptools command for uploading Sphinx documentation to PyPI
|
||||||
|
|
||||||
|
:author: Jannis Leidel
|
||||||
|
:contact: jannis@leidel.info
|
||||||
|
:copyright: Copyright 2009, Jannis Leidel.
|
||||||
|
:license: BSD, see LICENSE for details.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import socket
|
||||||
|
import zipfile
|
||||||
|
import httplib
|
||||||
|
import base64
|
||||||
|
import urlparse
|
||||||
|
import tempfile
|
||||||
|
import cStringIO as StringIO
|
||||||
|
from ConfigParser import ConfigParser
|
||||||
|
|
||||||
|
from distutils import log
|
||||||
|
from distutils.command.upload import upload
|
||||||
|
from distutils.errors import DistutilsOptionError
|
||||||
|
|
||||||
|
class UploadDoc(object):
|
||||||
|
"""Distutils command to upload Sphinx documentation."""
|
||||||
|
def __init__(self, name, upload_dir, repository=None):
|
||||||
|
self.name = name
|
||||||
|
self.upload_dir = upload_dir
|
||||||
|
|
||||||
|
p = ConfigParser()
|
||||||
|
p.read(os.path.expanduser('~/.pypirc'))
|
||||||
|
self.username = p.get('pypi', 'username')
|
||||||
|
self.password = p.get('pypi', 'password')
|
||||||
|
|
||||||
|
self.show_response = False
|
||||||
|
self.repository = repository or upload.DEFAULT_REPOSITORY
|
||||||
|
|
||||||
|
def create_zipfile(self):
|
||||||
|
# name = self.distribution.metadata.get_name()
|
||||||
|
name = self.name
|
||||||
|
tmp_dir = tempfile.mkdtemp()
|
||||||
|
tmp_file = os.path.join(tmp_dir, "%s.zip" % name)
|
||||||
|
zip_file = zipfile.ZipFile(tmp_file, "w")
|
||||||
|
for root, dirs, files in os.walk(self.upload_dir):
|
||||||
|
if not files:
|
||||||
|
raise DistutilsOptionError, \
|
||||||
|
"no files found in upload directory '%s'" % self.upload_dir
|
||||||
|
for name in files:
|
||||||
|
full = os.path.join(root, name)
|
||||||
|
relative = root[len(self.upload_dir):].lstrip(os.path.sep)
|
||||||
|
dest = os.path.join(relative, name)
|
||||||
|
zip_file.write(full, dest)
|
||||||
|
zip_file.close()
|
||||||
|
return tmp_file
|
||||||
|
|
||||||
|
def upload_file(self, filename):
|
||||||
|
content = open(filename,'rb').read()
|
||||||
|
# meta = self.distribution.metadata
|
||||||
|
data = {
|
||||||
|
':action': 'doc_upload',
|
||||||
|
'name': self.name, # meta.get_name(),
|
||||||
|
'content': (os.path.basename(filename),content),
|
||||||
|
}
|
||||||
|
# set up the authentication
|
||||||
|
auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
|
||||||
|
|
||||||
|
# Build up the MIME payload for the POST data
|
||||||
|
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||||
|
sep_boundary = '\n--' + boundary
|
||||||
|
end_boundary = sep_boundary + '--'
|
||||||
|
body = StringIO.StringIO()
|
||||||
|
for key, value in data.items():
|
||||||
|
# handle multiple entries for the same name
|
||||||
|
if type(value) != type([]):
|
||||||
|
value = [value]
|
||||||
|
for value in value:
|
||||||
|
if type(value) is tuple:
|
||||||
|
fn = ';filename="%s"' % value[0]
|
||||||
|
value = value[1]
|
||||||
|
else:
|
||||||
|
fn = ""
|
||||||
|
value = str(value)
|
||||||
|
body.write(sep_boundary)
|
||||||
|
body.write('\nContent-Disposition: form-data; name="%s"'%key)
|
||||||
|
body.write(fn)
|
||||||
|
body.write("\n\n")
|
||||||
|
body.write(value)
|
||||||
|
if value and value[-1] == '\r':
|
||||||
|
body.write('\n') # write an extra newline (lurve Macs)
|
||||||
|
body.write(end_boundary)
|
||||||
|
body.write("\n")
|
||||||
|
body = body.getvalue()
|
||||||
|
|
||||||
|
self.announce("Submitting documentation to %s" % (self.repository), log.INFO)
|
||||||
|
|
||||||
|
# build the Request
|
||||||
|
# We can't use urllib2 since we need to send the Basic
|
||||||
|
# auth right with the first request
|
||||||
|
schema, netloc, url, params, query, fragments = \
|
||||||
|
urlparse.urlparse(self.repository)
|
||||||
|
assert not params and not query and not fragments
|
||||||
|
if schema == 'http':
|
||||||
|
http = httplib.HTTPConnection(netloc)
|
||||||
|
elif schema == 'https':
|
||||||
|
http = httplib.HTTPSConnection(netloc)
|
||||||
|
else:
|
||||||
|
raise AssertionError, "unsupported schema "+schema
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
loglevel = log.INFO
|
||||||
|
try:
|
||||||
|
http.connect()
|
||||||
|
http.putrequest("POST", url)
|
||||||
|
http.putheader('Content-type',
|
||||||
|
'multipart/form-data; boundary=%s'%boundary)
|
||||||
|
http.putheader('Content-length', str(len(body)))
|
||||||
|
http.putheader('Authorization', auth)
|
||||||
|
http.endheaders()
|
||||||
|
http.send(body)
|
||||||
|
except socket.error, e:
|
||||||
|
self.announce(str(e), log.ERROR)
|
||||||
|
return
|
||||||
|
|
||||||
|
response = http.getresponse()
|
||||||
|
if response.status == 200:
|
||||||
|
self.announce('Server response (%s): %s' % (response.status, response.reason),
|
||||||
|
log.INFO)
|
||||||
|
elif response.status == 301:
|
||||||
|
location = response.getheader('Location')
|
||||||
|
if location is None:
|
||||||
|
location = 'http://packages.python.org/%s/' % self.name # meta.get_name()
|
||||||
|
self.announce('Upload successful. Visit %s' % location,
|
||||||
|
log.INFO)
|
||||||
|
else:
|
||||||
|
self.announce('Upload failed (%s): %s' % (response.status, response.reason),
|
||||||
|
log.ERROR)
|
||||||
|
if self.show_response:
|
||||||
|
print '-'*75, response.read(), '-'*75
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
zip_file = self.create_zipfile()
|
||||||
|
self.upload_file(zip_file)
|
||||||
|
os.remove(zip_file)
|
||||||
|
|
||||||
|
def announce(self, msg, *args, **kwargs):
|
||||||
|
print msg
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
if len(sys.argv) != 3:
|
||||||
|
print >>sys.stderr, "usage: %s PROJECT UPLOAD_DIR" % sys.argv[0]
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
project, upload_dir = sys.argv[1:]
|
||||||
|
up = UploadDoc(project, upload_dir=upload_dir)
|
||||||
|
up.run()
|
||||||
|
|
|
@ -145,13 +145,15 @@ query:
|
||||||
The problem with the query parameters
|
The problem with the query parameters
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The SQL representation for many data types is often not the same of the Python
|
The SQL representation of many data types is often different from their Python
|
||||||
string representation. The classic example is with single quotes in
|
string representation. The typical example is with single quotes in strings:
|
||||||
strings: SQL uses them as string constants bounds and requires them to be
|
in SQL single quotes are used as string literal delimiters, so the ones
|
||||||
escaped, whereas in Python single quotes can be left unescaped in strings
|
appearing inside the string itself must be escaped, whereas in Python single
|
||||||
bounded by double quotes. For this reason a naïve approach to the composition
|
quotes can be left unescaped if the string is delimited by double quotes.
|
||||||
of query strings, e.g. using string concatenation, is a recipe for terrible
|
|
||||||
problems::
|
Because of the difference, sometime subtle, between the data types
|
||||||
|
representations, a naïve approach to query strings composition, such as using
|
||||||
|
Python strings concatenation, is a recipe for *terrible* problems::
|
||||||
|
|
||||||
>>> SQL = "INSERT INTO authors (name) VALUES ('%s');" # NEVER DO THIS
|
>>> SQL = "INSERT INTO authors (name) VALUES ('%s');" # NEVER DO THIS
|
||||||
>>> data = ("O'Reilly", )
|
>>> data = ("O'Reilly", )
|
||||||
|
@ -160,13 +162,13 @@ problems::
|
||||||
LINE 1: INSERT INTO authors (name) VALUES ('O'Reilly')
|
LINE 1: INSERT INTO authors (name) VALUES ('O'Reilly')
|
||||||
^
|
^
|
||||||
|
|
||||||
If the variable containing the data to be sent to the database comes from an
|
If the variables containing the data to send to the database come from an
|
||||||
untrusted source (e.g. a form published on a web site) an attacker could
|
untrusted source (such as a form published on a web site) an attacker could
|
||||||
easily craft a malformed string, either gaining access to unauthorized data or
|
easily craft a malformed string, either gaining access to unauthorized data or
|
||||||
performing destructive operations on the database. This form of attack is
|
performing destructive operations on the database. This form of attack is
|
||||||
called `SQL injection`_ and is known to be one of the most widespread forms of
|
called `SQL injection`_ and is known to be one of the most widespread forms of
|
||||||
attack to servers. Before continuing, please print `this page`__ as a memo and
|
attack to database servers. Before continuing, please print `this page`__ as a
|
||||||
hang it onto your desk.
|
memo and hang it onto your desk.
|
||||||
|
|
||||||
.. _SQL injection: http://en.wikipedia.org/wiki/SQL_injection
|
.. _SQL injection: http://en.wikipedia.org/wiki/SQL_injection
|
||||||
.. __: http://xkcd.com/327/
|
.. __: http://xkcd.com/327/
|
||||||
|
@ -243,7 +245,8 @@ types:
|
||||||
+--------------------+-------------------------+--------------------------+
|
+--------------------+-------------------------+--------------------------+
|
||||||
| `!date` | :sql:`date` | :ref:`adapt-date` |
|
| `!date` | :sql:`date` | :ref:`adapt-date` |
|
||||||
+--------------------+-------------------------+ |
|
+--------------------+-------------------------+ |
|
||||||
| `!time` | :sql:`time` | |
|
| `!time` | | :sql:`time` | |
|
||||||
|
| | | :sql:`timetz` | |
|
||||||
+--------------------+-------------------------+ |
|
+--------------------+-------------------------+ |
|
||||||
| `!datetime` | | :sql:`timestamp` | |
|
| `!datetime` | | :sql:`timestamp` | |
|
||||||
| | | :sql:`timestamptz` | |
|
| | | :sql:`timestamptz` | |
|
||||||
|
@ -480,7 +483,7 @@ Date/Time objects adaptation
|
||||||
|
|
||||||
Python builtin `~datetime.datetime`, `~datetime.date`,
|
Python builtin `~datetime.datetime`, `~datetime.date`,
|
||||||
`~datetime.time`, `~datetime.timedelta` are converted into PostgreSQL's
|
`~datetime.time`, `~datetime.timedelta` are converted into PostgreSQL's
|
||||||
:sql:`timestamp[tz]`, :sql:`date`, :sql:`time`, :sql:`interval` data types.
|
:sql:`timestamp[tz]`, :sql:`date`, :sql:`time[tz]`, :sql:`interval` data types.
|
||||||
Time zones are supported too. The Egenix `mx.DateTime`_ objects are adapted
|
Time zones are supported too. The Egenix `mx.DateTime`_ objects are adapted
|
||||||
the same way::
|
the same way::
|
||||||
|
|
||||||
|
@ -676,7 +679,7 @@ older versions).
|
||||||
|
|
||||||
By default even a simple :sql:`SELECT` will start a transaction: in
|
By default even a simple :sql:`SELECT` will start a transaction: in
|
||||||
long-running programs, if no further action is taken, the session will
|
long-running programs, if no further action is taken, the session will
|
||||||
remain "idle in transaction", a condition non desiderable for several
|
remain "idle in transaction", an undesirable condition for several
|
||||||
reasons (locks are held by the session, tables bloat...). For long lived
|
reasons (locks are held by the session, tables bloat...). For long lived
|
||||||
scripts, either make sure to terminate a transaction as soon as possible or
|
scripts, either make sure to terminate a transaction as soon as possible or
|
||||||
use an autocommit connection.
|
use an autocommit connection.
|
||||||
|
@ -702,13 +705,28 @@ managers* and can be used with the ``with`` statement::
|
||||||
|
|
||||||
When a connection exits the ``with`` block, if no exception has been raised by
|
When a connection exits the ``with`` block, if no exception has been raised by
|
||||||
the block, the transaction is committed. In case of exception the transaction
|
the block, the transaction is committed. In case of exception the transaction
|
||||||
is rolled back. In no case the connection is closed: a connection can be used
|
is rolled back.
|
||||||
in more than a ``with`` statement and each ``with`` block is effectively
|
|
||||||
wrapped in a transaction.
|
|
||||||
|
|
||||||
When a cursor exits the ``with`` block it is closed, releasing any resource
|
When a cursor exits the ``with`` block it is closed, releasing any resource
|
||||||
eventually associated with it. The state of the transaction is not affected.
|
eventually associated with it. The state of the transaction is not affected.
|
||||||
|
|
||||||
|
Note that, unlike file objects or other resources, exiting the connection's
|
||||||
|
``with`` block *doesn't close the connection* but only the transaction
|
||||||
|
associated with it: a connection can be used in more than a ``with`` statement
|
||||||
|
and each ``with`` block is effectively wrapped in a separate transaction::
|
||||||
|
|
||||||
|
conn = psycopg2.connect(DSN)
|
||||||
|
|
||||||
|
with conn:
|
||||||
|
with conn.cursor() as curs:
|
||||||
|
curs.execute(SQL1)
|
||||||
|
|
||||||
|
with conn:
|
||||||
|
with conn.cursor() as curs:
|
||||||
|
curs.execute(SQL2)
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.. index::
|
.. index::
|
||||||
|
@ -897,6 +915,20 @@ using the |lo_import|_ and |lo_export|_ libpq functions.
|
||||||
.. |lo_export| replace:: `!lo_export()`
|
.. |lo_export| replace:: `!lo_export()`
|
||||||
.. _lo_export: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-EXPORT
|
.. _lo_export: http://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-EXPORT
|
||||||
|
|
||||||
|
.. versionchanged:: 2.6
|
||||||
|
added support for large objects greated than 2GB. Note that the support is
|
||||||
|
enabled only if all the following conditions are verified:
|
||||||
|
|
||||||
|
- the Python build is 64 bits;
|
||||||
|
- the extension was built against at least libpq 9.3;
|
||||||
|
- the server version is at least PostgreSQL 9.3
|
||||||
|
(`~connection.server_version` must be >= ``90300``).
|
||||||
|
|
||||||
|
If Psycopg was built with 64 bits large objects support (i.e. the first
|
||||||
|
two contidions above are verified), the `psycopg2.__version__` constant
|
||||||
|
will contain the ``lo64`` flag. If any of the contition is not met
|
||||||
|
several `!lobject` methods will fail if the arguments exceed 2GB.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.. index::
|
.. index::
|
||||||
|
|
|
@ -57,7 +57,7 @@ from psycopg2._psycopg import IntegrityError, InterfaceError, InternalError
|
||||||
from psycopg2._psycopg import NotSupportedError, OperationalError
|
from psycopg2._psycopg import NotSupportedError, OperationalError
|
||||||
|
|
||||||
from psycopg2._psycopg import _connect, apilevel, threadsafety, paramstyle
|
from psycopg2._psycopg import _connect, apilevel, threadsafety, paramstyle
|
||||||
from psycopg2._psycopg import __version__
|
from psycopg2._psycopg import __version__, __libpq_version__
|
||||||
|
|
||||||
from psycopg2 import tz
|
from psycopg2 import tz
|
||||||
|
|
||||||
|
|
46
lib/_json.py
46
lib/_json.py
|
@ -47,6 +47,10 @@ else:
|
||||||
JSON_OID = 114
|
JSON_OID = 114
|
||||||
JSONARRAY_OID = 199
|
JSONARRAY_OID = 199
|
||||||
|
|
||||||
|
# oids from PostgreSQL 9.4
|
||||||
|
JSONB_OID = 3802
|
||||||
|
JSONBARRAY_OID = 3807
|
||||||
|
|
||||||
class Json(object):
|
class Json(object):
|
||||||
"""
|
"""
|
||||||
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
|
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
|
||||||
|
@ -98,11 +102,11 @@ class Json(object):
|
||||||
else:
|
else:
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
# getquoted is binary in Py3
|
# getquoted is binary in Py3
|
||||||
return self.getquoted().decode('ascii', errors='replace')
|
return self.getquoted().decode('ascii', 'replace')
|
||||||
|
|
||||||
|
|
||||||
def register_json(conn_or_curs=None, globally=False, loads=None,
|
def register_json(conn_or_curs=None, globally=False, loads=None,
|
||||||
oid=None, array_oid=None):
|
oid=None, array_oid=None, name='json'):
|
||||||
"""Create and register typecasters converting :sql:`json` type to Python objects.
|
"""Create and register typecasters converting :sql:`json` type to Python objects.
|
||||||
|
|
||||||
:param conn_or_curs: a connection or cursor used to find the :sql:`json`
|
:param conn_or_curs: a connection or cursor used to find the :sql:`json`
|
||||||
|
@ -118,17 +122,19 @@ def register_json(conn_or_curs=None, globally=False, loads=None,
|
||||||
queried on *conn_or_curs*
|
queried on *conn_or_curs*
|
||||||
:param array_oid: the OID of the :sql:`json[]` array type if known;
|
:param array_oid: the OID of the :sql:`json[]` array type if known;
|
||||||
if not, it will be queried on *conn_or_curs*
|
if not, it will be queried on *conn_or_curs*
|
||||||
|
:param name: the name of the data type to look for in *conn_or_curs*
|
||||||
|
|
||||||
The connection or cursor passed to the function will be used to query the
|
The connection or cursor passed to the function will be used to query the
|
||||||
database and look for the OID of the :sql:`json` type. No query is
|
database and look for the OID of the :sql:`json` type (or an alternative
|
||||||
performed if *oid* and *array_oid* are provided. Raise
|
type if *name* if provided). No query is performed if *oid* and *array_oid*
|
||||||
`~psycopg2.ProgrammingError` if the type is not found.
|
are provided. Raise `~psycopg2.ProgrammingError` if the type is not found.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if oid is None:
|
if oid is None:
|
||||||
oid, array_oid = _get_json_oids(conn_or_curs)
|
oid, array_oid = _get_json_oids(conn_or_curs, name)
|
||||||
|
|
||||||
JSON, JSONARRAY = _create_json_typecasters(oid, array_oid, loads)
|
JSON, JSONARRAY = _create_json_typecasters(
|
||||||
|
oid, array_oid, loads=loads, name=name.upper())
|
||||||
|
|
||||||
register_type(JSON, not globally and conn_or_curs or None)
|
register_type(JSON, not globally and conn_or_curs or None)
|
||||||
|
|
||||||
|
@ -149,7 +155,19 @@ def register_default_json(conn_or_curs=None, globally=False, loads=None):
|
||||||
return register_json(conn_or_curs=conn_or_curs, globally=globally,
|
return register_json(conn_or_curs=conn_or_curs, globally=globally,
|
||||||
loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID)
|
loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID)
|
||||||
|
|
||||||
def _create_json_typecasters(oid, array_oid, loads=None):
|
def register_default_jsonb(conn_or_curs=None, globally=False, loads=None):
|
||||||
|
"""
|
||||||
|
Create and register :sql:`jsonb` typecasters for PostgreSQL 9.4 and following.
|
||||||
|
|
||||||
|
As in `register_default_json()`, the function allows to register a
|
||||||
|
customized *loads* function for the :sql:`jsonb` type at its known oid for
|
||||||
|
PostgreSQL 9.4 and following versions. All the parameters have the same
|
||||||
|
meaning of `register_json()`.
|
||||||
|
"""
|
||||||
|
return register_json(conn_or_curs=conn_or_curs, globally=globally,
|
||||||
|
loads=loads, oid=JSONB_OID, array_oid=JSONBARRAY_OID, name='jsonb')
|
||||||
|
|
||||||
|
def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
|
||||||
"""Create typecasters for json data type."""
|
"""Create typecasters for json data type."""
|
||||||
if loads is None:
|
if loads is None:
|
||||||
if json is None:
|
if json is None:
|
||||||
|
@ -162,15 +180,15 @@ def _create_json_typecasters(oid, array_oid, loads=None):
|
||||||
return None
|
return None
|
||||||
return loads(s)
|
return loads(s)
|
||||||
|
|
||||||
JSON = new_type((oid, ), 'JSON', typecast_json)
|
JSON = new_type((oid, ), name, typecast_json)
|
||||||
if array_oid is not None:
|
if array_oid is not None:
|
||||||
JSONARRAY = new_array_type((array_oid, ), "JSONARRAY", JSON)
|
JSONARRAY = new_array_type((array_oid, ), "%sARRAY" % name, JSON)
|
||||||
else:
|
else:
|
||||||
JSONARRAY = None
|
JSONARRAY = None
|
||||||
|
|
||||||
return JSON, JSONARRAY
|
return JSON, JSONARRAY
|
||||||
|
|
||||||
def _get_json_oids(conn_or_curs):
|
def _get_json_oids(conn_or_curs, name='json'):
|
||||||
# lazy imports
|
# lazy imports
|
||||||
from psycopg2.extensions import STATUS_IN_TRANSACTION
|
from psycopg2.extensions import STATUS_IN_TRANSACTION
|
||||||
from psycopg2.extras import _solve_conn_curs
|
from psycopg2.extras import _solve_conn_curs
|
||||||
|
@ -185,8 +203,8 @@ def _get_json_oids(conn_or_curs):
|
||||||
|
|
||||||
# get the oid for the hstore
|
# get the oid for the hstore
|
||||||
curs.execute(
|
curs.execute(
|
||||||
"SELECT t.oid, %s FROM pg_type t WHERE t.typname = 'json';"
|
"SELECT t.oid, %s FROM pg_type t WHERE t.typname = %%s;"
|
||||||
% typarray)
|
% typarray, (name,))
|
||||||
r = curs.fetchone()
|
r = curs.fetchone()
|
||||||
|
|
||||||
# revert the status of the connection as before the command
|
# revert the status of the connection as before the command
|
||||||
|
@ -194,7 +212,7 @@ def _get_json_oids(conn_or_curs):
|
||||||
conn.rollback()
|
conn.rollback()
|
||||||
|
|
||||||
if not r:
|
if not r:
|
||||||
raise conn.ProgrammingError("json data type not found")
|
raise conn.ProgrammingError("%s data type not found" % name)
|
||||||
|
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
|
@ -56,9 +56,9 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
from psycopg2._psycopg import adapt, adapters, encodings, connection, cursor, lobject, Xid
|
from psycopg2._psycopg import adapt, adapters, encodings, connection, cursor, lobject, Xid, libpq_version, parse_dsn, quote_ident
|
||||||
from psycopg2._psycopg import string_types, binary_types, new_type, new_array_type, register_type
|
from psycopg2._psycopg import string_types, binary_types, new_type, new_array_type, register_type
|
||||||
from psycopg2._psycopg import ISQLQuote, Notify, Diagnostics
|
from psycopg2._psycopg import ISQLQuote, Notify, Diagnostics, Column
|
||||||
|
|
||||||
from psycopg2._psycopg import QueryCanceledError, TransactionRollbackError
|
from psycopg2._psycopg import QueryCanceledError, TransactionRollbackError
|
||||||
|
|
||||||
|
@ -152,20 +152,22 @@ class NoneAdapter(object):
|
||||||
|
|
||||||
|
|
||||||
# Create default json typecasters for PostgreSQL 9.2 oids
|
# Create default json typecasters for PostgreSQL 9.2 oids
|
||||||
from psycopg2._json import register_default_json
|
from psycopg2._json import register_default_json, register_default_jsonb
|
||||||
|
|
||||||
try:
|
try:
|
||||||
JSON, JSONARRAY = register_default_json()
|
JSON, JSONARRAY = register_default_json()
|
||||||
|
JSONB, JSONBARRAY = register_default_jsonb()
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
del register_default_json
|
del register_default_json, register_default_jsonb
|
||||||
|
|
||||||
|
|
||||||
# Create default Range typecasters
|
# Create default Range typecasters
|
||||||
from psycopg2. _range import Range
|
from psycopg2. _range import Range
|
||||||
del Range
|
del Range
|
||||||
|
|
||||||
|
|
||||||
# Add the "cleaned" version of the encodings to the key.
|
# Add the "cleaned" version of the encodings to the key.
|
||||||
# When the encoding is set its name is cleaned up from - and _ and turned
|
# When the encoding is set its name is cleaned up from - and _ and turned
|
||||||
# uppercase, so an encoding not respecting these rules wouldn't be found in the
|
# uppercase, so an encoding not respecting these rules wouldn't be found in the
|
||||||
|
|
|
@ -434,7 +434,7 @@ class MinTimeLoggingCursor(LoggingCursor):
|
||||||
|
|
||||||
def callproc(self, procname, vars=None):
|
def callproc(self, procname, vars=None):
|
||||||
self.timestamp = _time.time()
|
self.timestamp = _time.time()
|
||||||
return LoggingCursor.execute(self, procname, vars)
|
return LoggingCursor.callproc(self, procname, vars)
|
||||||
|
|
||||||
|
|
||||||
# a dbtype and adapter for Python UUID type
|
# a dbtype and adapter for Python UUID type
|
||||||
|
@ -575,15 +575,20 @@ def wait_select(conn):
|
||||||
from psycopg2.extensions import POLL_OK, POLL_READ, POLL_WRITE
|
from psycopg2.extensions import POLL_OK, POLL_READ, POLL_WRITE
|
||||||
|
|
||||||
while 1:
|
while 1:
|
||||||
state = conn.poll()
|
try:
|
||||||
if state == POLL_OK:
|
state = conn.poll()
|
||||||
break
|
if state == POLL_OK:
|
||||||
elif state == POLL_READ:
|
break
|
||||||
select.select([conn.fileno()], [], [])
|
elif state == POLL_READ:
|
||||||
elif state == POLL_WRITE:
|
select.select([conn.fileno()], [], [])
|
||||||
select.select([], [conn.fileno()], [])
|
elif state == POLL_WRITE:
|
||||||
else:
|
select.select([], [conn.fileno()], [])
|
||||||
raise conn.OperationalError("bad state from poll: %s" % state)
|
else:
|
||||||
|
raise conn.OperationalError("bad state from poll: %s" % state)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
conn.cancel()
|
||||||
|
# the loop will be broken by a server error
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
def _solve_conn_curs(conn_or_curs):
|
def _solve_conn_curs(conn_or_curs):
|
||||||
|
@ -965,7 +970,8 @@ def register_composite(name, conn_or_curs, globally=False, factory=None):
|
||||||
|
|
||||||
|
|
||||||
# expose the json adaptation stuff into the module
|
# expose the json adaptation stuff into the module
|
||||||
from psycopg2._json import json, Json, register_json, register_default_json
|
from psycopg2._json import json, Json, register_json
|
||||||
|
from psycopg2._json import register_default_json, register_default_jsonb
|
||||||
|
|
||||||
|
|
||||||
# Expose range-related objects
|
# Expose range-related objects
|
||||||
|
|
10
lib/pool.py
10
lib/pool.py
|
@ -42,8 +42,8 @@ class AbstractConnectionPool(object):
|
||||||
with given parameters. The connection pool will support a maximum of
|
with given parameters. The connection pool will support a maximum of
|
||||||
about 'maxconn' connections.
|
about 'maxconn' connections.
|
||||||
"""
|
"""
|
||||||
self.minconn = minconn
|
self.minconn = int(minconn)
|
||||||
self.maxconn = maxconn
|
self.maxconn = int(maxconn)
|
||||||
self.closed = False
|
self.closed = False
|
||||||
|
|
||||||
self._args = args
|
self._args = args
|
||||||
|
@ -86,7 +86,7 @@ class AbstractConnectionPool(object):
|
||||||
return conn
|
return conn
|
||||||
else:
|
else:
|
||||||
if len(self._used) == self.maxconn:
|
if len(self._used) == self.maxconn:
|
||||||
raise PoolError("connection pool exausted")
|
raise PoolError("connection pool exhausted")
|
||||||
return self._connect(key)
|
return self._connect(key)
|
||||||
|
|
||||||
def _putconn(self, conn, key=None, close=False):
|
def _putconn(self, conn, key=None, close=False):
|
||||||
|
@ -204,8 +204,8 @@ class PersistentConnectionPool(AbstractConnectionPool):
|
||||||
|
|
||||||
# we we'll need the thread module, to determine thread ids, so we
|
# we we'll need the thread module, to determine thread ids, so we
|
||||||
# import it here and copy it in an instance variable
|
# import it here and copy it in an instance variable
|
||||||
import thread
|
import thread as _thread # work around for 2to3 bug - see ticket #348
|
||||||
self.__thread = thread
|
self.__thread = _thread
|
||||||
|
|
||||||
def getconn(self):
|
def getconn(self):
|
||||||
"""Generate thread id and return a connection."""
|
"""Generate thread id and return a connection."""
|
||||||
|
|
|
@ -149,12 +149,6 @@ asis_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
return type->tp_alloc(type, 0);
|
return type->tp_alloc(type, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
|
||||||
asis_repr(asisObject *self)
|
|
||||||
{
|
|
||||||
return PyString_FromFormat("<psycopg2._psycopg.AsIs object at %p>", self);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* object type */
|
/* object type */
|
||||||
|
|
||||||
|
@ -163,14 +157,14 @@ asis_repr(asisObject *self)
|
||||||
|
|
||||||
PyTypeObject asisType = {
|
PyTypeObject asisType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.AsIs",
|
"psycopg2.extensions.AsIs",
|
||||||
sizeof(asisObject), 0,
|
sizeof(asisObject), 0,
|
||||||
asis_dealloc, /*tp_dealloc*/
|
asis_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
0, /*tp_getattr*/
|
0, /*tp_getattr*/
|
||||||
0, /*tp_setattr*/
|
0, /*tp_setattr*/
|
||||||
0, /*tp_compare*/
|
0, /*tp_compare*/
|
||||||
(reprfunc)asis_repr, /*tp_repr*/
|
0, /*tp_repr*/
|
||||||
0, /*tp_as_number*/
|
0, /*tp_as_number*/
|
||||||
0, /*tp_as_sequence*/
|
0, /*tp_as_sequence*/
|
||||||
0, /*tp_as_mapping*/
|
0, /*tp_as_mapping*/
|
||||||
|
@ -200,17 +194,3 @@ PyTypeObject asisType = {
|
||||||
0, /*tp_alloc*/
|
0, /*tp_alloc*/
|
||||||
asis_new, /*tp_new*/
|
asis_new, /*tp_new*/
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/** module-level functions **/
|
|
||||||
|
|
||||||
PyObject *
|
|
||||||
psyco_AsIs(PyObject *module, PyObject *args)
|
|
||||||
{
|
|
||||||
PyObject *obj;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "O", &obj))
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
return PyObject_CallFunctionObjArgs((PyObject *)&asisType, obj, NULL);
|
|
||||||
}
|
|
||||||
|
|
|
@ -40,12 +40,6 @@ typedef struct {
|
||||||
|
|
||||||
} asisObject;
|
} asisObject;
|
||||||
|
|
||||||
/* functions exported to psycopgmodule.c */
|
|
||||||
|
|
||||||
HIDDEN PyObject *psyco_AsIs(PyObject *module, PyObject *args);
|
|
||||||
#define psyco_AsIs_doc \
|
|
||||||
"AsIs(obj) -> new AsIs wrapper object"
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -39,7 +39,7 @@ static unsigned char *
|
||||||
binary_escape(unsigned char *from, size_t from_length,
|
binary_escape(unsigned char *from, size_t from_length,
|
||||||
size_t *to_length, PGconn *conn)
|
size_t *to_length, PGconn *conn)
|
||||||
{
|
{
|
||||||
#if PG_VERSION_HEX >= 0x080104
|
#if PG_VERSION_NUM >= 80104
|
||||||
if (conn)
|
if (conn)
|
||||||
return PQescapeByteaConn(conn, from, from_length, to_length);
|
return PQescapeByteaConn(conn, from, from_length, to_length);
|
||||||
else
|
else
|
||||||
|
@ -254,11 +254,6 @@ binary_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
return type->tp_alloc(type, 0);
|
return type->tp_alloc(type, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
|
||||||
binary_repr(binaryObject *self)
|
|
||||||
{
|
|
||||||
return PyString_FromFormat("<psycopg2._psycopg.Binary object at %p>", self);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* object type */
|
/* object type */
|
||||||
|
|
||||||
|
@ -267,14 +262,14 @@ binary_repr(binaryObject *self)
|
||||||
|
|
||||||
PyTypeObject binaryType = {
|
PyTypeObject binaryType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.Binary",
|
"psycopg2.extensions.Binary",
|
||||||
sizeof(binaryObject), 0,
|
sizeof(binaryObject), 0,
|
||||||
binary_dealloc, /*tp_dealloc*/
|
binary_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
0, /*tp_getattr*/
|
0, /*tp_getattr*/
|
||||||
0, /*tp_setattr*/
|
0, /*tp_setattr*/
|
||||||
0, /*tp_compare*/
|
0, /*tp_compare*/
|
||||||
(reprfunc)binary_repr, /*tp_repr*/
|
0, /*tp_repr*/
|
||||||
0, /*tp_as_number*/
|
0, /*tp_as_number*/
|
||||||
0, /*tp_as_sequence*/
|
0, /*tp_as_sequence*/
|
||||||
0, /*tp_as_mapping*/
|
0, /*tp_as_mapping*/
|
||||||
|
@ -304,17 +299,3 @@ PyTypeObject binaryType = {
|
||||||
0, /*tp_alloc*/
|
0, /*tp_alloc*/
|
||||||
binary_new, /*tp_new*/
|
binary_new, /*tp_new*/
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/** module-level functions **/
|
|
||||||
|
|
||||||
PyObject *
|
|
||||||
psyco_Binary(PyObject *module, PyObject *args)
|
|
||||||
{
|
|
||||||
PyObject *str;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "O", &str))
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
return PyObject_CallFunctionObjArgs((PyObject *)&binaryType, str, NULL);
|
|
||||||
}
|
|
||||||
|
|
|
@ -40,13 +40,6 @@ typedef struct {
|
||||||
PyObject *conn;
|
PyObject *conn;
|
||||||
} binaryObject;
|
} binaryObject;
|
||||||
|
|
||||||
/* functions exported to psycopgmodule.c */
|
|
||||||
|
|
||||||
HIDDEN PyObject *psyco_Binary(PyObject *module, PyObject *args);
|
|
||||||
#define psyco_Binary_doc \
|
|
||||||
"Binary(buffer) -> new binary object\n\n" \
|
|
||||||
"Build an object capable to hold a binary string value."
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -35,9 +35,6 @@
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
|
||||||
extern HIDDEN PyObject *pyPsycopgTzModule;
|
|
||||||
extern HIDDEN PyObject *pyPsycopgTzLOCAL;
|
|
||||||
|
|
||||||
int
|
int
|
||||||
psyco_adapter_datetime_init(void)
|
psyco_adapter_datetime_init(void)
|
||||||
{
|
{
|
||||||
|
@ -65,7 +62,10 @@ _pydatetime_string_date_time(pydatetimeObject *self)
|
||||||
char *fmt = NULL;
|
char *fmt = NULL;
|
||||||
switch (self->type) {
|
switch (self->type) {
|
||||||
case PSYCO_DATETIME_TIME:
|
case PSYCO_DATETIME_TIME:
|
||||||
fmt = "'%s'::time";
|
tz = PyObject_GetAttrString(self->wrapped, "tzinfo");
|
||||||
|
if (!tz) { goto error; }
|
||||||
|
fmt = (tz == Py_None) ? "'%s'::time" : "'%s'::timetz";
|
||||||
|
Py_DECREF(tz);
|
||||||
break;
|
break;
|
||||||
case PSYCO_DATETIME_DATE:
|
case PSYCO_DATETIME_DATE:
|
||||||
fmt = "'%s'::date";
|
fmt = "'%s'::date";
|
||||||
|
@ -214,12 +214,6 @@ pydatetime_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
return type->tp_alloc(type, 0);
|
return type->tp_alloc(type, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
|
||||||
pydatetime_repr(pydatetimeObject *self)
|
|
||||||
{
|
|
||||||
return PyString_FromFormat("<psycopg2._psycopg.datetime object at %p>",
|
|
||||||
self);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* object type */
|
/* object type */
|
||||||
|
|
||||||
|
@ -235,7 +229,7 @@ PyTypeObject pydatetimeType = {
|
||||||
0, /*tp_getattr*/
|
0, /*tp_getattr*/
|
||||||
0, /*tp_setattr*/
|
0, /*tp_setattr*/
|
||||||
0, /*tp_compare*/
|
0, /*tp_compare*/
|
||||||
(reprfunc)pydatetime_repr, /*tp_repr*/
|
0, /*tp_repr*/
|
||||||
0, /*tp_as_number*/
|
0, /*tp_as_number*/
|
||||||
0, /*tp_as_sequence*/
|
0, /*tp_as_sequence*/
|
||||||
0, /*tp_as_mapping*/
|
0, /*tp_as_mapping*/
|
||||||
|
@ -392,9 +386,9 @@ psyco_DateFromTicks(PyObject *self, PyObject *args)
|
||||||
Py_DECREF(args);
|
Py_DECREF(args);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
PyErr_SetString(InterfaceError, "failed localtime call");
|
PyErr_SetString(InterfaceError, "failed localtime call");
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -420,9 +414,9 @@ psyco_TimeFromTicks(PyObject *self, PyObject *args)
|
||||||
Py_DECREF(args);
|
Py_DECREF(args);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
PyErr_SetString(InterfaceError, "failed localtime call");
|
PyErr_SetString(InterfaceError, "failed localtime call");
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -430,6 +424,8 @@ psyco_TimeFromTicks(PyObject *self, PyObject *args)
|
||||||
PyObject *
|
PyObject *
|
||||||
psyco_TimestampFromTicks(PyObject *self, PyObject *args)
|
psyco_TimestampFromTicks(PyObject *self, PyObject *args)
|
||||||
{
|
{
|
||||||
|
PyObject *m = NULL;
|
||||||
|
PyObject *tz = NULL;
|
||||||
PyObject *res = NULL;
|
PyObject *res = NULL;
|
||||||
struct tm tm;
|
struct tm tm;
|
||||||
time_t t;
|
time_t t;
|
||||||
|
@ -438,18 +434,25 @@ psyco_TimestampFromTicks(PyObject *self, PyObject *args)
|
||||||
if (!PyArg_ParseTuple(args, "d", &ticks))
|
if (!PyArg_ParseTuple(args, "d", &ticks))
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
|
/* get psycopg2.tz.LOCAL from pythonland */
|
||||||
|
if (!(m = PyImport_ImportModule("psycopg2.tz"))) { goto exit; }
|
||||||
|
if (!(tz = PyObject_GetAttrString(m, "LOCAL"))) { goto exit; }
|
||||||
|
|
||||||
t = (time_t)floor(ticks);
|
t = (time_t)floor(ticks);
|
||||||
ticks -= (double)t;
|
ticks -= (double)t;
|
||||||
if (localtime_r(&t, &tm)) {
|
if (!localtime_r(&t, &tm)) {
|
||||||
res = _psyco_Timestamp(
|
PyErr_SetString(InterfaceError, "failed localtime call");
|
||||||
tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday,
|
goto exit;
|
||||||
tm.tm_hour, tm.tm_min, (double)tm.tm_sec + ticks,
|
|
||||||
pyPsycopgTzLOCAL);
|
|
||||||
}
|
}
|
||||||
else {
|
|
||||||
PyErr_SetString(InterfaceError, "failed localtime call");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
res = _psyco_Timestamp(
|
||||||
|
tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday,
|
||||||
|
tm.tm_hour, tm.tm_min, (double)tm.tm_sec + ticks,
|
||||||
|
tz);
|
||||||
|
|
||||||
|
exit:
|
||||||
|
Py_DECREF(tz);
|
||||||
|
Py_XDECREF(m);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,14 @@ list_quote(listObject *self)
|
||||||
/* adapt the list by calling adapt() recursively and then wrapping
|
/* adapt the list by calling adapt() recursively and then wrapping
|
||||||
everything into "ARRAY[]" */
|
everything into "ARRAY[]" */
|
||||||
PyObject *tmp = NULL, *str = NULL, *joined = NULL, *res = NULL;
|
PyObject *tmp = NULL, *str = NULL, *joined = NULL, *res = NULL;
|
||||||
|
|
||||||
|
/* list consisting of only NULL don't work with the ARRAY[] construct
|
||||||
|
* so we use the {NULL,...} syntax. Note however that list of lists where
|
||||||
|
* some element is a list of only null still fails: for that we should use
|
||||||
|
* the '{...}' syntax uniformly but we cannot do it in the current
|
||||||
|
* infrastructure. TODO in psycopg3 */
|
||||||
|
int all_nulls = 1;
|
||||||
|
|
||||||
Py_ssize_t i, len;
|
Py_ssize_t i, len;
|
||||||
|
|
||||||
len = PyList_GET_SIZE(self->wrapped);
|
len = PyList_GET_SIZE(self->wrapped);
|
||||||
|
@ -60,6 +68,7 @@ list_quote(listObject *self)
|
||||||
quoted = microprotocol_getquoted(wrapped,
|
quoted = microprotocol_getquoted(wrapped,
|
||||||
(connectionObject*)self->connection);
|
(connectionObject*)self->connection);
|
||||||
if (quoted == NULL) goto error;
|
if (quoted == NULL) goto error;
|
||||||
|
all_nulls = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* here we don't loose a refcnt: SET_ITEM does not change the
|
/* here we don't loose a refcnt: SET_ITEM does not change the
|
||||||
|
@ -74,7 +83,12 @@ list_quote(listObject *self)
|
||||||
joined = PyObject_CallMethod(str, "join", "(O)", tmp);
|
joined = PyObject_CallMethod(str, "join", "(O)", tmp);
|
||||||
if (joined == NULL) goto error;
|
if (joined == NULL) goto error;
|
||||||
|
|
||||||
res = Bytes_FromFormat("ARRAY[%s]", Bytes_AsString(joined));
|
/* PG doesn't like ARRAY[NULL..] */
|
||||||
|
if (!all_nulls) {
|
||||||
|
res = Bytes_FromFormat("ARRAY[%s]", Bytes_AsString(joined));
|
||||||
|
} else {
|
||||||
|
res = Bytes_FromFormat("'{%s}'", Bytes_AsString(joined));
|
||||||
|
}
|
||||||
|
|
||||||
error:
|
error:
|
||||||
Py_XDECREF(tmp);
|
Py_XDECREF(tmp);
|
||||||
|
@ -215,11 +229,6 @@ list_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
return type->tp_alloc(type, 0);
|
return type->tp_alloc(type, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
|
||||||
list_repr(listObject *self)
|
|
||||||
{
|
|
||||||
return PyString_FromFormat("<psycopg2._psycopg.List object at %p>", self);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* object type */
|
/* object type */
|
||||||
|
|
||||||
|
@ -235,7 +244,7 @@ PyTypeObject listType = {
|
||||||
0, /*tp_getattr*/
|
0, /*tp_getattr*/
|
||||||
0, /*tp_setattr*/
|
0, /*tp_setattr*/
|
||||||
0, /*tp_compare*/
|
0, /*tp_compare*/
|
||||||
(reprfunc)list_repr, /*tp_repr*/
|
0, /*tp_repr*/
|
||||||
0, /*tp_as_number*/
|
0, /*tp_as_number*/
|
||||||
0, /*tp_as_sequence*/
|
0, /*tp_as_sequence*/
|
||||||
0, /*tp_as_mapping*/
|
0, /*tp_as_mapping*/
|
||||||
|
@ -265,17 +274,3 @@ PyTypeObject listType = {
|
||||||
0, /*tp_alloc*/
|
0, /*tp_alloc*/
|
||||||
list_new, /*tp_new*/
|
list_new, /*tp_new*/
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/** module-level functions **/
|
|
||||||
|
|
||||||
PyObject *
|
|
||||||
psyco_List(PyObject *module, PyObject *args)
|
|
||||||
{
|
|
||||||
PyObject *str;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "O", &str))
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
return PyObject_CallFunctionObjArgs((PyObject *)&listType, "O", str, NULL);
|
|
||||||
}
|
|
||||||
|
|
|
@ -39,10 +39,6 @@ typedef struct {
|
||||||
PyObject *connection;
|
PyObject *connection;
|
||||||
} listObject;
|
} listObject;
|
||||||
|
|
||||||
HIDDEN PyObject *psyco_List(PyObject *module, PyObject *args);
|
|
||||||
#define psyco_List_doc \
|
|
||||||
"List(list, enc) -> new quoted list"
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -205,12 +205,6 @@ mxdatetime_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
return type->tp_alloc(type, 0);
|
return type->tp_alloc(type, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
|
||||||
mxdatetime_repr(mxdatetimeObject *self)
|
|
||||||
{
|
|
||||||
return PyString_FromFormat("<psycopg2._psycopg.MxDateTime object at %p>",
|
|
||||||
self);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* object type */
|
/* object type */
|
||||||
|
|
||||||
|
@ -226,7 +220,7 @@ PyTypeObject mxdatetimeType = {
|
||||||
0, /*tp_getattr*/
|
0, /*tp_getattr*/
|
||||||
0, /*tp_setattr*/
|
0, /*tp_setattr*/
|
||||||
0, /*tp_compare*/
|
0, /*tp_compare*/
|
||||||
(reprfunc)mxdatetime_repr, /*tp_repr*/
|
0, /*tp_repr*/
|
||||||
0, /*tp_as_number*/
|
0, /*tp_as_number*/
|
||||||
0, /*tp_as_sequence*/
|
0, /*tp_as_sequence*/
|
||||||
0, /*tp_as_mapping*/
|
0, /*tp_as_mapping*/
|
||||||
|
|
|
@ -37,21 +37,12 @@
|
||||||
static PyObject *
|
static PyObject *
|
||||||
pboolean_getquoted(pbooleanObject *self, PyObject *args)
|
pboolean_getquoted(pbooleanObject *self, PyObject *args)
|
||||||
{
|
{
|
||||||
#ifdef PSYCOPG_NEW_BOOLEAN
|
|
||||||
if (PyObject_IsTrue(self->wrapped)) {
|
if (PyObject_IsTrue(self->wrapped)) {
|
||||||
return Bytes_FromString("true");
|
return Bytes_FromString("true");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return Bytes_FromString("false");
|
return Bytes_FromString("false");
|
||||||
}
|
}
|
||||||
#else
|
|
||||||
if (PyObject_IsTrue(self->wrapped)) {
|
|
||||||
return Bytes_FromString("'t'");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return Bytes_FromString("'f'");
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
static PyObject *
|
||||||
|
@ -146,13 +137,6 @@ pboolean_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
return type->tp_alloc(type, 0);
|
return type->tp_alloc(type, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
|
||||||
pboolean_repr(pbooleanObject *self)
|
|
||||||
{
|
|
||||||
return PyString_FromFormat("<psycopg2._psycopg.Boolean object at %p>",
|
|
||||||
self);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* object type */
|
/* object type */
|
||||||
|
|
||||||
|
@ -161,14 +145,14 @@ pboolean_repr(pbooleanObject *self)
|
||||||
|
|
||||||
PyTypeObject pbooleanType = {
|
PyTypeObject pbooleanType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.Boolean",
|
"psycopg2.extensions.Boolean",
|
||||||
sizeof(pbooleanObject), 0,
|
sizeof(pbooleanObject), 0,
|
||||||
pboolean_dealloc, /*tp_dealloc*/
|
pboolean_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
0, /*tp_getattr*/
|
0, /*tp_getattr*/
|
||||||
0, /*tp_setattr*/
|
0, /*tp_setattr*/
|
||||||
0, /*tp_compare*/
|
0, /*tp_compare*/
|
||||||
(reprfunc)pboolean_repr, /*tp_repr*/
|
0, /*tp_repr*/
|
||||||
0, /*tp_as_number*/
|
0, /*tp_as_number*/
|
||||||
0, /*tp_as_sequence*/
|
0, /*tp_as_sequence*/
|
||||||
0, /*tp_as_mapping*/
|
0, /*tp_as_mapping*/
|
||||||
|
@ -198,17 +182,3 @@ PyTypeObject pbooleanType = {
|
||||||
0, /*tp_alloc*/
|
0, /*tp_alloc*/
|
||||||
pboolean_new, /*tp_new*/
|
pboolean_new, /*tp_new*/
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/** module-level functions **/
|
|
||||||
|
|
||||||
PyObject *
|
|
||||||
psyco_Boolean(PyObject *module, PyObject *args)
|
|
||||||
{
|
|
||||||
PyObject *obj;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "O", &obj))
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
return PyObject_CallFunctionObjArgs((PyObject *)&pbooleanType, obj, NULL);
|
|
||||||
}
|
|
||||||
|
|
|
@ -40,12 +40,6 @@ typedef struct {
|
||||||
|
|
||||||
} pbooleanObject;
|
} pbooleanObject;
|
||||||
|
|
||||||
/* functions exported to psycopgmodule.c */
|
|
||||||
|
|
||||||
HIDDEN PyObject *psyco_Boolean(PyObject *module, PyObject *args);
|
|
||||||
#define psyco_Boolean_doc \
|
|
||||||
"Boolean(obj) -> new boolean value"
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -202,13 +202,6 @@ pdecimal_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
return type->tp_alloc(type, 0);
|
return type->tp_alloc(type, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
|
||||||
pdecimal_repr(pdecimalObject *self)
|
|
||||||
{
|
|
||||||
return PyString_FromFormat("<psycopg2._psycopg.Decimal object at %p>",
|
|
||||||
self);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* object type */
|
/* object type */
|
||||||
|
|
||||||
|
@ -224,7 +217,7 @@ PyTypeObject pdecimalType = {
|
||||||
0, /*tp_getattr*/
|
0, /*tp_getattr*/
|
||||||
0, /*tp_setattr*/
|
0, /*tp_setattr*/
|
||||||
0, /*tp_compare*/
|
0, /*tp_compare*/
|
||||||
(reprfunc)pdecimal_repr, /*tp_repr*/
|
0, /*tp_repr*/
|
||||||
0, /*tp_as_number*/
|
0, /*tp_as_number*/
|
||||||
0, /*tp_as_sequence*/
|
0, /*tp_as_sequence*/
|
||||||
0, /*tp_as_mapping*/
|
0, /*tp_as_mapping*/
|
||||||
|
@ -254,17 +247,3 @@ PyTypeObject pdecimalType = {
|
||||||
0, /*tp_alloc*/
|
0, /*tp_alloc*/
|
||||||
pdecimal_new, /*tp_new*/
|
pdecimal_new, /*tp_new*/
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/** module-level functions **/
|
|
||||||
|
|
||||||
PyObject *
|
|
||||||
psyco_Decimal(PyObject *module, PyObject *args)
|
|
||||||
{
|
|
||||||
PyObject *obj;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "O", &obj))
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
return PyObject_CallFunctionObjArgs((PyObject *)&pdecimalType, obj, NULL);
|
|
||||||
}
|
|
||||||
|
|
|
@ -40,12 +40,6 @@ typedef struct {
|
||||||
|
|
||||||
} pdecimalObject;
|
} pdecimalObject;
|
||||||
|
|
||||||
/* functions exported to psycopgmodule.c */
|
|
||||||
|
|
||||||
HIDDEN PyObject *psyco_Decimal(PyObject *module, PyObject *args);
|
|
||||||
#define psyco_Decimal_doc \
|
|
||||||
"Decimal(obj) -> new decimal.Decimal value"
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -175,13 +175,6 @@ pfloat_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
return type->tp_alloc(type, 0);
|
return type->tp_alloc(type, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
|
||||||
pfloat_repr(pfloatObject *self)
|
|
||||||
{
|
|
||||||
return PyString_FromFormat("<psycopg2._psycopg.Float object at %p>",
|
|
||||||
self);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* object type */
|
/* object type */
|
||||||
|
|
||||||
|
@ -190,14 +183,14 @@ pfloat_repr(pfloatObject *self)
|
||||||
|
|
||||||
PyTypeObject pfloatType = {
|
PyTypeObject pfloatType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.Float",
|
"psycopg2.extensions.Float",
|
||||||
sizeof(pfloatObject), 0,
|
sizeof(pfloatObject), 0,
|
||||||
pfloat_dealloc, /*tp_dealloc*/
|
pfloat_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
0, /*tp_getattr*/
|
0, /*tp_getattr*/
|
||||||
0, /*tp_setattr*/
|
0, /*tp_setattr*/
|
||||||
0, /*tp_compare*/
|
0, /*tp_compare*/
|
||||||
(reprfunc)pfloat_repr, /*tp_repr*/
|
0, /*tp_repr*/
|
||||||
0, /*tp_as_number*/
|
0, /*tp_as_number*/
|
||||||
0, /*tp_as_sequence*/
|
0, /*tp_as_sequence*/
|
||||||
0, /*tp_as_mapping*/
|
0, /*tp_as_mapping*/
|
||||||
|
@ -227,17 +220,3 @@ PyTypeObject pfloatType = {
|
||||||
0, /*tp_alloc*/
|
0, /*tp_alloc*/
|
||||||
pfloat_new, /*tp_new*/
|
pfloat_new, /*tp_new*/
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/** module-level functions **/
|
|
||||||
|
|
||||||
PyObject *
|
|
||||||
psyco_Float(PyObject *module, PyObject *args)
|
|
||||||
{
|
|
||||||
PyObject *obj;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "O", &obj))
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
return PyObject_CallFunctionObjArgs((PyObject *)&pfloatType, obj, NULL);
|
|
||||||
}
|
|
||||||
|
|
|
@ -40,12 +40,6 @@ typedef struct {
|
||||||
|
|
||||||
} pfloatObject;
|
} pfloatObject;
|
||||||
|
|
||||||
/* functions exported to psycopgmodule.c */
|
|
||||||
|
|
||||||
HIDDEN PyObject *psyco_Float(PyObject *module, PyObject *args);
|
|
||||||
#define psyco_Float_doc \
|
|
||||||
"Float(obj) -> new float value"
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -161,13 +161,6 @@ pint_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
return type->tp_alloc(type, 0);
|
return type->tp_alloc(type, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
|
||||||
pint_repr(pintObject *self)
|
|
||||||
{
|
|
||||||
return PyString_FromFormat("<psycopg2._psycopg.Int object at %p>",
|
|
||||||
self);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* object type */
|
/* object type */
|
||||||
|
|
||||||
|
@ -176,14 +169,14 @@ pint_repr(pintObject *self)
|
||||||
|
|
||||||
PyTypeObject pintType = {
|
PyTypeObject pintType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.Int",
|
"psycopg2.extensions.Int",
|
||||||
sizeof(pintObject), 0,
|
sizeof(pintObject), 0,
|
||||||
pint_dealloc, /*tp_dealloc*/
|
pint_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
0, /*tp_getattr*/
|
0, /*tp_getattr*/
|
||||||
0, /*tp_setattr*/
|
0, /*tp_setattr*/
|
||||||
0, /*tp_compare*/
|
0, /*tp_compare*/
|
||||||
(reprfunc)pint_repr, /*tp_repr*/
|
0, /*tp_repr*/
|
||||||
0, /*tp_as_number*/
|
0, /*tp_as_number*/
|
||||||
0, /*tp_as_sequence*/
|
0, /*tp_as_sequence*/
|
||||||
0, /*tp_as_mapping*/
|
0, /*tp_as_mapping*/
|
||||||
|
@ -213,17 +206,3 @@ PyTypeObject pintType = {
|
||||||
0, /*tp_alloc*/
|
0, /*tp_alloc*/
|
||||||
pint_new, /*tp_new*/
|
pint_new, /*tp_new*/
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/** module-level functions **/
|
|
||||||
|
|
||||||
PyObject *
|
|
||||||
psyco_Int(PyObject *module, PyObject *args)
|
|
||||||
{
|
|
||||||
PyObject *obj;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "O", &obj))
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
return PyObject_CallFunctionObjArgs((PyObject *)&pintType, obj, NULL);
|
|
||||||
}
|
|
||||||
|
|
|
@ -40,12 +40,6 @@ typedef struct {
|
||||||
|
|
||||||
} pintObject;
|
} pintObject;
|
||||||
|
|
||||||
/* functions exported to psycopgmodule.c */
|
|
||||||
|
|
||||||
HIDDEN PyObject *psyco_Int(PyObject *module, PyObject *args);
|
|
||||||
#define psyco_Int_doc \
|
|
||||||
"Int(obj) -> new int value"
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -242,12 +242,6 @@ qstring_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
return type->tp_alloc(type, 0);
|
return type->tp_alloc(type, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static PyObject *
|
|
||||||
qstring_repr(qstringObject *self)
|
|
||||||
{
|
|
||||||
return PyString_FromFormat("<psycopg2._psycopg.QuotedString object at %p>",
|
|
||||||
self);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* object type */
|
/* object type */
|
||||||
|
|
||||||
|
@ -256,14 +250,14 @@ qstring_repr(qstringObject *self)
|
||||||
|
|
||||||
PyTypeObject qstringType = {
|
PyTypeObject qstringType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.QuotedString",
|
"psycopg2.extensions.QuotedString",
|
||||||
sizeof(qstringObject), 0,
|
sizeof(qstringObject), 0,
|
||||||
qstring_dealloc, /*tp_dealloc*/
|
qstring_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
0, /*tp_getattr*/
|
0, /*tp_getattr*/
|
||||||
0, /*tp_setattr*/
|
0, /*tp_setattr*/
|
||||||
0, /*tp_compare*/
|
0, /*tp_compare*/
|
||||||
(reprfunc)qstring_repr, /*tp_repr*/
|
0, /*tp_repr*/
|
||||||
0, /*tp_as_number*/
|
0, /*tp_as_number*/
|
||||||
0, /*tp_as_sequence*/
|
0, /*tp_as_sequence*/
|
||||||
0, /*tp_as_mapping*/
|
0, /*tp_as_mapping*/
|
||||||
|
@ -293,17 +287,3 @@ PyTypeObject qstringType = {
|
||||||
0, /*tp_alloc*/
|
0, /*tp_alloc*/
|
||||||
qstring_new, /*tp_new*/
|
qstring_new, /*tp_new*/
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/** module-level functions **/
|
|
||||||
|
|
||||||
PyObject *
|
|
||||||
psyco_QuotedString(PyObject *module, PyObject *args)
|
|
||||||
{
|
|
||||||
PyObject *str;
|
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "O", &str))
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
return PyObject_CallFunctionObjArgs((PyObject *)&qstringType, str, NULL);
|
|
||||||
}
|
|
||||||
|
|
|
@ -41,12 +41,6 @@ typedef struct {
|
||||||
connectionObject *conn;
|
connectionObject *conn;
|
||||||
} qstringObject;
|
} qstringObject;
|
||||||
|
|
||||||
/* functions exported to psycopgmodule.c */
|
|
||||||
|
|
||||||
HIDDEN PyObject *psyco_QuotedString(PyObject *module, PyObject *args);
|
|
||||||
#define psyco_QuotedString_doc \
|
|
||||||
"QuotedString(str, enc) -> new quoted string"
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -129,32 +129,32 @@ static int pthread_mutex_init(pthread_mutex_t *mutex, void* fake)
|
||||||
/* remove the inline keyword, since it doesn't work unless C++ file */
|
/* remove the inline keyword, since it doesn't work unless C++ file */
|
||||||
#define inline
|
#define inline
|
||||||
|
|
||||||
/* Hmmm, MSVC doesn't have a isnan/isinf function, but has _isnan function */
|
/* Hmmm, MSVC <2015 doesn't have a isnan/isinf function, but has _isnan function */
|
||||||
#if defined (_MSC_VER)
|
#if defined (_MSC_VER)
|
||||||
|
#if !defined(isnan)
|
||||||
#define isnan(x) (_isnan(x))
|
#define isnan(x) (_isnan(x))
|
||||||
/* The following line was hacked together from simliar code by Bjorn Reese
|
/* The following line was hacked together from simliar code by Bjorn Reese
|
||||||
* in libxml2 code */
|
* in libxml2 code */
|
||||||
#define isinf(x) ((_fpclass(x) == _FPCLASS_PINF) ? 1 \
|
#define isinf(x) ((_fpclass(x) == _FPCLASS_PINF) ? 1 \
|
||||||
: ((_fpclass(x) == _FPCLASS_NINF) ? -1 : 0))
|
: ((_fpclass(x) == _FPCLASS_NINF) ? -1 : 0))
|
||||||
|
#endif
|
||||||
#define strcasecmp(x, y) lstrcmpi(x, y)
|
#define strcasecmp(x, y) lstrcmpi(x, y)
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
/* what's this, we have no round function either? */
|
||||||
#if (defined(__FreeBSD__) && __FreeBSD_version < 503000) \
|
#if (defined(__FreeBSD__) && __FreeBSD_version < 503000) \
|
||||||
|| (defined(_WIN32) && !defined(__GNUC__)) \
|
|| (defined(_WIN32) && !defined(__GNUC__)) \
|
||||||
|| (defined(sun) || defined(__sun__)) \
|
|| (defined(sun) || defined(__sun__)) \
|
||||||
&& (defined(__SunOS_5_8) || defined(__SunOS_5_9))
|
&& (defined(__SunOS_5_8) || defined(__SunOS_5_9))
|
||||||
/* what's this, we have no round function either? */
|
|
||||||
|
/* round has been added in the standard library with MSVC 2015 */
|
||||||
|
#if _MSC_VER < 1900
|
||||||
static double round(double num)
|
static double round(double num)
|
||||||
{
|
{
|
||||||
return (num >= 0) ? floor(num + 0.5) : ceil(num - 0.5);
|
return (num >= 0) ? floor(num + 0.5) : ceil(num - 0.5);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* postgresql < 7.4 does not have PQfreemem */
|
|
||||||
#ifndef HAVE_PQFREEMEM
|
|
||||||
#define PQfreemem free
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* resolve missing isinf() function for Solaris */
|
/* resolve missing isinf() function for Solaris */
|
||||||
|
|
|
@ -71,7 +71,7 @@ extern HIDDEN PyTypeObject connectionType;
|
||||||
|
|
||||||
struct connectionObject_notice {
|
struct connectionObject_notice {
|
||||||
struct connectionObject_notice *next;
|
struct connectionObject_notice *next;
|
||||||
const char *message;
|
char *message;
|
||||||
};
|
};
|
||||||
|
|
||||||
/* the typedef is forward-declared in psycopg.h */
|
/* the typedef is forward-declared in psycopg.h */
|
||||||
|
@ -106,8 +106,8 @@ struct connectionObject {
|
||||||
|
|
||||||
/* notice processing */
|
/* notice processing */
|
||||||
PyObject *notice_list;
|
PyObject *notice_list;
|
||||||
PyObject *notice_filter;
|
|
||||||
struct connectionObject_notice *notice_pending;
|
struct connectionObject_notice *notice_pending;
|
||||||
|
struct connectionObject_notice *last_notice;
|
||||||
|
|
||||||
/* notifies */
|
/* notifies */
|
||||||
PyObject *notifies;
|
PyObject *notifies;
|
||||||
|
|
|
@ -87,13 +87,20 @@ conn_notice_callback(void *args, const char *message)
|
||||||
/* Discard the notice in case of failed allocation. */
|
/* Discard the notice in case of failed allocation. */
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
notice->next = NULL;
|
||||||
notice->message = strdup(message);
|
notice->message = strdup(message);
|
||||||
if (NULL == notice->message) {
|
if (NULL == notice->message) {
|
||||||
free(notice);
|
free(notice);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
notice->next = self->notice_pending;
|
|
||||||
self->notice_pending = notice;
|
if (NULL == self->last_notice) {
|
||||||
|
self->notice_pending = self->last_notice = notice;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
self->last_notice->next = notice;
|
||||||
|
self->last_notice = notice;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Expose the notices received as Python objects.
|
/* Expose the notices received as Python objects.
|
||||||
|
@ -104,44 +111,60 @@ void
|
||||||
conn_notice_process(connectionObject *self)
|
conn_notice_process(connectionObject *self)
|
||||||
{
|
{
|
||||||
struct connectionObject_notice *notice;
|
struct connectionObject_notice *notice;
|
||||||
Py_ssize_t nnotices;
|
PyObject *msg = NULL;
|
||||||
|
PyObject *tmp = NULL;
|
||||||
|
static PyObject *append;
|
||||||
|
|
||||||
if (NULL == self->notice_pending) {
|
if (NULL == self->notice_pending) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
notice = self->notice_pending;
|
if (!append) {
|
||||||
nnotices = PyList_GET_SIZE(self->notice_list);
|
if (!(append = Text_FromUTF8("append"))) {
|
||||||
|
goto error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
notice = self->notice_pending;
|
||||||
while (notice != NULL) {
|
while (notice != NULL) {
|
||||||
PyObject *msg;
|
|
||||||
msg = conn_text_from_chars(self, notice->message);
|
|
||||||
Dprintf("conn_notice_process: %s", notice->message);
|
Dprintf("conn_notice_process: %s", notice->message);
|
||||||
|
|
||||||
/* Respect the order in which notices were produced,
|
if (!(msg = conn_text_from_chars(self, notice->message))) { goto error; }
|
||||||
because in notice_list they are reversed (see ticket #9) */
|
|
||||||
if (msg) {
|
if (!(tmp = PyObject_CallMethodObjArgs(
|
||||||
PyList_Insert(self->notice_list, nnotices, msg);
|
self->notice_list, append, msg, NULL))) {
|
||||||
Py_DECREF(msg);
|
|
||||||
}
|
goto error;
|
||||||
else {
|
|
||||||
/* We don't really have a way to report errors, so gulp it.
|
|
||||||
* The function should only fail for out of memory, so we are
|
|
||||||
* likely going to die anyway. */
|
|
||||||
PyErr_Clear();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Py_DECREF(tmp); tmp = NULL;
|
||||||
|
Py_DECREF(msg); msg = NULL;
|
||||||
|
|
||||||
notice = notice->next;
|
notice = notice->next;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Remove the oldest item if the queue is getting too long. */
|
/* Remove the oldest item if the queue is getting too long. */
|
||||||
nnotices = PyList_GET_SIZE(self->notice_list);
|
if (PyList_Check(self->notice_list)) {
|
||||||
if (nnotices > CONN_NOTICES_LIMIT) {
|
Py_ssize_t nnotices;
|
||||||
PySequence_DelSlice(self->notice_list,
|
nnotices = PyList_GET_SIZE(self->notice_list);
|
||||||
0, nnotices - CONN_NOTICES_LIMIT);
|
if (nnotices > CONN_NOTICES_LIMIT) {
|
||||||
|
if (-1 == PySequence_DelSlice(self->notice_list,
|
||||||
|
0, nnotices - CONN_NOTICES_LIMIT)) {
|
||||||
|
PyErr_Clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
conn_notice_clean(self);
|
conn_notice_clean(self);
|
||||||
|
return;
|
||||||
|
|
||||||
|
error:
|
||||||
|
Py_XDECREF(tmp);
|
||||||
|
Py_XDECREF(msg);
|
||||||
|
conn_notice_clean(self);
|
||||||
|
|
||||||
|
/* TODO: the caller doesn't expects errors from us */
|
||||||
|
PyErr_Clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -154,11 +177,11 @@ conn_notice_clean(connectionObject *self)
|
||||||
while (notice != NULL) {
|
while (notice != NULL) {
|
||||||
tmp = notice;
|
tmp = notice;
|
||||||
notice = notice->next;
|
notice = notice->next;
|
||||||
free((void*)tmp->message);
|
free(tmp->message);
|
||||||
free(tmp);
|
free(tmp);
|
||||||
}
|
}
|
||||||
|
|
||||||
self->notice_pending = NULL;
|
self->last_notice = self->notice_pending = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -173,6 +196,15 @@ conn_notifies_process(connectionObject *self)
|
||||||
PGnotify *pgn = NULL;
|
PGnotify *pgn = NULL;
|
||||||
PyObject *notify = NULL;
|
PyObject *notify = NULL;
|
||||||
PyObject *pid = NULL, *channel = NULL, *payload = NULL;
|
PyObject *pid = NULL, *channel = NULL, *payload = NULL;
|
||||||
|
PyObject *tmp = NULL;
|
||||||
|
|
||||||
|
static PyObject *append;
|
||||||
|
|
||||||
|
if (!append) {
|
||||||
|
if (!(append = Text_FromUTF8("append"))) {
|
||||||
|
goto error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
while ((pgn = PQnotifies(self->pgconn)) != NULL) {
|
while ((pgn = PQnotifies(self->pgconn)) != NULL) {
|
||||||
|
|
||||||
|
@ -192,7 +224,11 @@ conn_notifies_process(connectionObject *self)
|
||||||
Py_DECREF(channel); channel = NULL;
|
Py_DECREF(channel); channel = NULL;
|
||||||
Py_DECREF(payload); payload = NULL;
|
Py_DECREF(payload); payload = NULL;
|
||||||
|
|
||||||
PyList_Append(self->notifies, (PyObject *)notify);
|
if (!(tmp = PyObject_CallMethodObjArgs(
|
||||||
|
self->notifies, append, notify, NULL))) {
|
||||||
|
goto error;
|
||||||
|
}
|
||||||
|
Py_DECREF(tmp); tmp = NULL;
|
||||||
|
|
||||||
Py_DECREF(notify); notify = NULL;
|
Py_DECREF(notify); notify = NULL;
|
||||||
PQfreemem(pgn); pgn = NULL;
|
PQfreemem(pgn); pgn = NULL;
|
||||||
|
@ -201,6 +237,7 @@ conn_notifies_process(connectionObject *self)
|
||||||
|
|
||||||
error:
|
error:
|
||||||
if (pgn) { PQfreemem(pgn); }
|
if (pgn) { PQfreemem(pgn); }
|
||||||
|
Py_XDECREF(tmp);
|
||||||
Py_XDECREF(notify);
|
Py_XDECREF(notify);
|
||||||
Py_XDECREF(pid);
|
Py_XDECREF(pid);
|
||||||
Py_XDECREF(channel);
|
Py_XDECREF(channel);
|
||||||
|
|
|
@ -103,7 +103,7 @@ psyco_conn_cursor(connectionObject *self, PyObject *args, PyObject *kwargs)
|
||||||
|
|
||||||
if (PyObject_IsInstance(obj, (PyObject *)&cursorType) == 0) {
|
if (PyObject_IsInstance(obj, (PyObject *)&cursorType) == 0) {
|
||||||
PyErr_SetString(PyExc_TypeError,
|
PyErr_SetString(PyExc_TypeError,
|
||||||
"cursor factory must be subclass of psycopg2._psycopg.cursor");
|
"cursor factory must be subclass of psycopg2.extensions.cursor");
|
||||||
goto exit;
|
goto exit;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -442,9 +442,6 @@ exit:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
|
|
||||||
|
|
||||||
/* parse a python object into one of the possible isolation level values */
|
/* parse a python object into one of the possible isolation level values */
|
||||||
|
|
||||||
extern const IsolationLevel conn_isolevels[];
|
extern const IsolationLevel conn_isolevels[];
|
||||||
|
@ -787,7 +784,7 @@ psyco_conn_lobject(connectionObject *self, PyObject *args, PyObject *keywds)
|
||||||
if (obj == NULL) return NULL;
|
if (obj == NULL) return NULL;
|
||||||
if (PyObject_IsInstance(obj, (PyObject *)&lobjectType) == 0) {
|
if (PyObject_IsInstance(obj, (PyObject *)&lobjectType) == 0) {
|
||||||
PyErr_SetString(PyExc_TypeError,
|
PyErr_SetString(PyExc_TypeError,
|
||||||
"lobject factory must be subclass of psycopg2._psycopg.lobject");
|
"lobject factory must be subclass of psycopg2.extensions.lobject");
|
||||||
Py_DECREF(obj);
|
Py_DECREF(obj);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
@ -843,6 +840,10 @@ psyco_conn_get_exception(PyObject *self, void *closure)
|
||||||
return exception;
|
return exception;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#define psyco_conn_poll_doc \
|
||||||
|
"poll() -> int -- Advance the connection or query process without blocking."
|
||||||
|
|
||||||
static PyObject *
|
static PyObject *
|
||||||
psyco_conn_poll(connectionObject *self)
|
psyco_conn_poll(connectionObject *self)
|
||||||
{
|
{
|
||||||
|
@ -860,8 +861,6 @@ psyco_conn_poll(connectionObject *self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* extension: fileno - return the file descriptor of the connection */
|
|
||||||
|
|
||||||
#define psyco_conn_fileno_doc \
|
#define psyco_conn_fileno_doc \
|
||||||
"fileno() -> int -- Return file descriptor associated to database connection."
|
"fileno() -> int -- Return file descriptor associated to database connection."
|
||||||
|
|
||||||
|
@ -878,8 +877,6 @@ psyco_conn_fileno(connectionObject *self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* extension: isexecuting - check for asynchronous operations */
|
|
||||||
|
|
||||||
#define psyco_conn_isexecuting_doc \
|
#define psyco_conn_isexecuting_doc \
|
||||||
"isexecuting() -> bool -- Return True if the connection is " \
|
"isexecuting() -> bool -- Return True if the connection is " \
|
||||||
"executing an asynchronous operation."
|
"executing an asynchronous operation."
|
||||||
|
@ -911,8 +908,6 @@ psyco_conn_isexecuting(connectionObject *self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* extension: cancel - cancel the current operation */
|
|
||||||
|
|
||||||
#define psyco_conn_cancel_doc \
|
#define psyco_conn_cancel_doc \
|
||||||
"cancel() -- cancel the current operation"
|
"cancel() -- cancel the current operation"
|
||||||
|
|
||||||
|
@ -941,8 +936,6 @@ psyco_conn_cancel(connectionObject *self)
|
||||||
Py_RETURN_NONE;
|
Py_RETURN_NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* PSYCOPG_EXTENSIONS */
|
|
||||||
|
|
||||||
|
|
||||||
/** the connection object **/
|
/** the connection object **/
|
||||||
|
|
||||||
|
@ -974,7 +967,6 @@ static struct PyMethodDef connectionObject_methods[] = {
|
||||||
METH_NOARGS, psyco_conn_enter_doc},
|
METH_NOARGS, psyco_conn_enter_doc},
|
||||||
{"__exit__", (PyCFunction)psyco_conn_exit,
|
{"__exit__", (PyCFunction)psyco_conn_exit,
|
||||||
METH_VARARGS, psyco_conn_exit_doc},
|
METH_VARARGS, psyco_conn_exit_doc},
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
{"set_session", (PyCFunction)psyco_conn_set_session,
|
{"set_session", (PyCFunction)psyco_conn_set_session,
|
||||||
METH_VARARGS|METH_KEYWORDS, psyco_conn_set_session_doc},
|
METH_VARARGS|METH_KEYWORDS, psyco_conn_set_session_doc},
|
||||||
{"set_isolation_level", (PyCFunction)psyco_conn_set_isolation_level,
|
{"set_isolation_level", (PyCFunction)psyco_conn_set_isolation_level,
|
||||||
|
@ -992,27 +984,25 @@ static struct PyMethodDef connectionObject_methods[] = {
|
||||||
{"reset", (PyCFunction)psyco_conn_reset,
|
{"reset", (PyCFunction)psyco_conn_reset,
|
||||||
METH_NOARGS, psyco_conn_reset_doc},
|
METH_NOARGS, psyco_conn_reset_doc},
|
||||||
{"poll", (PyCFunction)psyco_conn_poll,
|
{"poll", (PyCFunction)psyco_conn_poll,
|
||||||
METH_NOARGS, psyco_conn_lobject_doc},
|
METH_NOARGS, psyco_conn_poll_doc},
|
||||||
{"fileno", (PyCFunction)psyco_conn_fileno,
|
{"fileno", (PyCFunction)psyco_conn_fileno,
|
||||||
METH_NOARGS, psyco_conn_fileno_doc},
|
METH_NOARGS, psyco_conn_fileno_doc},
|
||||||
{"isexecuting", (PyCFunction)psyco_conn_isexecuting,
|
{"isexecuting", (PyCFunction)psyco_conn_isexecuting,
|
||||||
METH_NOARGS, psyco_conn_isexecuting_doc},
|
METH_NOARGS, psyco_conn_isexecuting_doc},
|
||||||
{"cancel", (PyCFunction)psyco_conn_cancel,
|
{"cancel", (PyCFunction)psyco_conn_cancel,
|
||||||
METH_NOARGS, psyco_conn_cancel_doc},
|
METH_NOARGS, psyco_conn_cancel_doc},
|
||||||
#endif
|
|
||||||
{NULL}
|
{NULL}
|
||||||
};
|
};
|
||||||
|
|
||||||
/* object member list */
|
/* object member list */
|
||||||
|
|
||||||
static struct PyMemberDef connectionObject_members[] = {
|
static struct PyMemberDef connectionObject_members[] = {
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
{"closed", T_LONG, offsetof(connectionObject, closed), READONLY,
|
{"closed", T_LONG, offsetof(connectionObject, closed), READONLY,
|
||||||
"True if the connection is closed."},
|
"True if the connection is closed."},
|
||||||
{"encoding", T_STRING, offsetof(connectionObject, encoding), READONLY,
|
{"encoding", T_STRING, offsetof(connectionObject, encoding), READONLY,
|
||||||
"The current client encoding."},
|
"The current client encoding."},
|
||||||
{"notices", T_OBJECT, offsetof(connectionObject, notice_list), READONLY},
|
{"notices", T_OBJECT, offsetof(connectionObject, notice_list), 0},
|
||||||
{"notifies", T_OBJECT, offsetof(connectionObject, notifies), READONLY},
|
{"notifies", T_OBJECT, offsetof(connectionObject, notifies), 0},
|
||||||
{"dsn", T_STRING, offsetof(connectionObject, dsn), READONLY,
|
{"dsn", T_STRING, offsetof(connectionObject, dsn), READONLY,
|
||||||
"The current connection string."},
|
"The current connection string."},
|
||||||
{"async", T_LONG, offsetof(connectionObject, async), READONLY,
|
{"async", T_LONG, offsetof(connectionObject, async), READONLY,
|
||||||
|
@ -1032,7 +1022,6 @@ static struct PyMemberDef connectionObject_members[] = {
|
||||||
{"server_version", T_INT,
|
{"server_version", T_INT,
|
||||||
offsetof(connectionObject, server_version), READONLY,
|
offsetof(connectionObject, server_version), READONLY,
|
||||||
"Server version."},
|
"Server version."},
|
||||||
#endif
|
|
||||||
{NULL}
|
{NULL}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1040,7 +1029,6 @@ static struct PyMemberDef connectionObject_members[] = {
|
||||||
{ #exc, psyco_conn_get_exception, NULL, exc ## _doc, &exc }
|
{ #exc, psyco_conn_get_exception, NULL, exc ## _doc, &exc }
|
||||||
|
|
||||||
static struct PyGetSetDef connectionObject_getsets[] = {
|
static struct PyGetSetDef connectionObject_getsets[] = {
|
||||||
/* DBAPI-2.0 extensions (exception objects) */
|
|
||||||
EXCEPTION_GETTER(Error),
|
EXCEPTION_GETTER(Error),
|
||||||
EXCEPTION_GETTER(Warning),
|
EXCEPTION_GETTER(Warning),
|
||||||
EXCEPTION_GETTER(InterfaceError),
|
EXCEPTION_GETTER(InterfaceError),
|
||||||
|
@ -1051,7 +1039,6 @@ static struct PyGetSetDef connectionObject_getsets[] = {
|
||||||
EXCEPTION_GETTER(IntegrityError),
|
EXCEPTION_GETTER(IntegrityError),
|
||||||
EXCEPTION_GETTER(DataError),
|
EXCEPTION_GETTER(DataError),
|
||||||
EXCEPTION_GETTER(NotSupportedError),
|
EXCEPTION_GETTER(NotSupportedError),
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
{ "autocommit",
|
{ "autocommit",
|
||||||
(getter)psyco_conn_autocommit_get,
|
(getter)psyco_conn_autocommit_get,
|
||||||
(setter)psyco_conn_autocommit_set,
|
(setter)psyco_conn_autocommit_set,
|
||||||
|
@ -1060,7 +1047,6 @@ static struct PyGetSetDef connectionObject_getsets[] = {
|
||||||
(getter)psyco_conn_isolation_level_get,
|
(getter)psyco_conn_isolation_level_get,
|
||||||
(setter)NULL,
|
(setter)NULL,
|
||||||
"The current isolation level." },
|
"The current isolation level." },
|
||||||
#endif
|
|
||||||
{NULL}
|
{NULL}
|
||||||
};
|
};
|
||||||
#undef EXCEPTION_GETTER
|
#undef EXCEPTION_GETTER
|
||||||
|
@ -1119,7 +1105,6 @@ connection_clear(connectionObject *self)
|
||||||
Py_CLEAR(self->tpc_xid);
|
Py_CLEAR(self->tpc_xid);
|
||||||
Py_CLEAR(self->async_cursor);
|
Py_CLEAR(self->async_cursor);
|
||||||
Py_CLEAR(self->notice_list);
|
Py_CLEAR(self->notice_list);
|
||||||
Py_CLEAR(self->notice_filter);
|
|
||||||
Py_CLEAR(self->notifies);
|
Py_CLEAR(self->notifies);
|
||||||
Py_CLEAR(self->string_types);
|
Py_CLEAR(self->string_types);
|
||||||
Py_CLEAR(self->binary_types);
|
Py_CLEAR(self->binary_types);
|
||||||
|
@ -1195,7 +1180,6 @@ connection_traverse(connectionObject *self, visitproc visit, void *arg)
|
||||||
Py_VISIT((PyObject *)(self->tpc_xid));
|
Py_VISIT((PyObject *)(self->tpc_xid));
|
||||||
Py_VISIT(self->async_cursor);
|
Py_VISIT(self->async_cursor);
|
||||||
Py_VISIT(self->notice_list);
|
Py_VISIT(self->notice_list);
|
||||||
Py_VISIT(self->notice_filter);
|
|
||||||
Py_VISIT(self->notifies);
|
Py_VISIT(self->notifies);
|
||||||
Py_VISIT(self->string_types);
|
Py_VISIT(self->string_types);
|
||||||
Py_VISIT(self->binary_types);
|
Py_VISIT(self->binary_types);
|
||||||
|
@ -1214,7 +1198,7 @@ connection_traverse(connectionObject *self, visitproc visit, void *arg)
|
||||||
|
|
||||||
PyTypeObject connectionType = {
|
PyTypeObject connectionType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.connection",
|
"psycopg2.extensions.connection",
|
||||||
sizeof(connectionObject), 0,
|
sizeof(connectionObject), 0,
|
||||||
connection_dealloc, /*tp_dealloc*/
|
connection_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
|
|
|
@ -39,9 +39,6 @@
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
|
|
||||||
|
|
||||||
extern PyObject *pyPsycopgTzFixedOffsetTimezone;
|
|
||||||
|
|
||||||
|
|
||||||
/** DBAPI methods **/
|
/** DBAPI methods **/
|
||||||
|
|
||||||
/* close method - close the cursor */
|
/* close method - close the cursor */
|
||||||
|
@ -60,10 +57,24 @@ psyco_curs_close(cursorObject *self)
|
||||||
|
|
||||||
if (self->name != NULL) {
|
if (self->name != NULL) {
|
||||||
char buffer[128];
|
char buffer[128];
|
||||||
|
PGTransactionStatusType status;
|
||||||
|
|
||||||
EXC_IF_NO_MARK(self);
|
if (self->conn) {
|
||||||
PyOS_snprintf(buffer, 127, "CLOSE \"%s\"", self->name);
|
status = PQtransactionStatus(self->conn->pgconn);
|
||||||
if (pq_execute(self, buffer, 0, 0) == -1) return NULL;
|
}
|
||||||
|
else {
|
||||||
|
status = PQTRANS_UNKNOWN;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(status == PQTRANS_UNKNOWN || status == PQTRANS_INERROR)) {
|
||||||
|
EXC_IF_NO_MARK(self);
|
||||||
|
PyOS_snprintf(buffer, 127, "CLOSE \"%s\"", self->name);
|
||||||
|
if (pq_execute(self, buffer, 0, 0, 1) == -1) return NULL;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Dprintf("skipping named curs close because tx status %d",
|
||||||
|
(int)status);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self->closed = 1;
|
self->closed = 1;
|
||||||
|
@ -444,7 +455,7 @@ _psyco_curs_execute(cursorObject *self,
|
||||||
|
|
||||||
/* At this point, the SQL statement must be str, not unicode */
|
/* At this point, the SQL statement must be str, not unicode */
|
||||||
|
|
||||||
tmp = pq_execute(self, Bytes_AS_STRING(self->query), async, no_result);
|
tmp = pq_execute(self, Bytes_AS_STRING(self->query), async, no_result, 0);
|
||||||
Dprintf("psyco_curs_execute: res = %d, pgres = %p", tmp, self->pgres);
|
Dprintf("psyco_curs_execute: res = %d, pgres = %p", tmp, self->pgres);
|
||||||
if (tmp < 0) { goto exit; }
|
if (tmp < 0) { goto exit; }
|
||||||
|
|
||||||
|
@ -478,7 +489,7 @@ psyco_curs_execute(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||||
"can't call .execute() on named cursors more than once");
|
"can't call .execute() on named cursors more than once");
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
if (self->conn->autocommit) {
|
if (self->conn->autocommit && !self->withhold) {
|
||||||
psyco_set_error(ProgrammingError, self,
|
psyco_set_error(ProgrammingError, self,
|
||||||
"can't use a named cursor outside of transactions");
|
"can't use a named cursor outside of transactions");
|
||||||
return NULL;
|
return NULL;
|
||||||
|
@ -559,7 +570,6 @@ psyco_curs_executemany(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
#define psyco_curs_mogrify_doc \
|
#define psyco_curs_mogrify_doc \
|
||||||
"mogrify(query, vars=None) -> str -- Return query after vars binding."
|
"mogrify(query, vars=None) -> str -- Return query after vars binding."
|
||||||
|
|
||||||
|
@ -622,7 +632,6 @@ psyco_curs_mogrify(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||||
|
|
||||||
return _psyco_curs_mogrify(self, operation, vars);
|
return _psyco_curs_mogrify(self, operation, vars);
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
/* cast method - convert an oid/string into a Python object */
|
/* cast method - convert an oid/string into a Python object */
|
||||||
|
@ -766,7 +775,7 @@ psyco_curs_fetchone(cursorObject *self)
|
||||||
EXC_IF_ASYNC_IN_PROGRESS(self, fetchone);
|
EXC_IF_ASYNC_IN_PROGRESS(self, fetchone);
|
||||||
EXC_IF_TPC_PREPARED(self->conn, fetchone);
|
EXC_IF_TPC_PREPARED(self->conn, fetchone);
|
||||||
PyOS_snprintf(buffer, 127, "FETCH FORWARD 1 FROM \"%s\"", self->name);
|
PyOS_snprintf(buffer, 127, "FETCH FORWARD 1 FROM \"%s\"", self->name);
|
||||||
if (pq_execute(self, buffer, 0, 0) == -1) return NULL;
|
if (pq_execute(self, buffer, 0, 0, self->withhold) == -1) return NULL;
|
||||||
if (_psyco_curs_prefetch(self) < 0) return NULL;
|
if (_psyco_curs_prefetch(self) < 0) return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -816,7 +825,7 @@ psyco_curs_next_named(cursorObject *self)
|
||||||
|
|
||||||
PyOS_snprintf(buffer, 127, "FETCH FORWARD %ld FROM \"%s\"",
|
PyOS_snprintf(buffer, 127, "FETCH FORWARD %ld FROM \"%s\"",
|
||||||
self->itersize, self->name);
|
self->itersize, self->name);
|
||||||
if (pq_execute(self, buffer, 0, 0) == -1) return NULL;
|
if (pq_execute(self, buffer, 0, 0, self->withhold) == -1) return NULL;
|
||||||
if (_psyco_curs_prefetch(self) < 0) return NULL;
|
if (_psyco_curs_prefetch(self) < 0) return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -885,7 +894,7 @@ psyco_curs_fetchmany(cursorObject *self, PyObject *args, PyObject *kwords)
|
||||||
EXC_IF_TPC_PREPARED(self->conn, fetchone);
|
EXC_IF_TPC_PREPARED(self->conn, fetchone);
|
||||||
PyOS_snprintf(buffer, 127, "FETCH FORWARD %d FROM \"%s\"",
|
PyOS_snprintf(buffer, 127, "FETCH FORWARD %d FROM \"%s\"",
|
||||||
(int)size, self->name);
|
(int)size, self->name);
|
||||||
if (pq_execute(self, buffer, 0, 0) == -1) { goto exit; }
|
if (pq_execute(self, buffer, 0, 0, self->withhold) == -1) { goto exit; }
|
||||||
if (_psyco_curs_prefetch(self) < 0) { goto exit; }
|
if (_psyco_curs_prefetch(self) < 0) { goto exit; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -960,7 +969,7 @@ psyco_curs_fetchall(cursorObject *self)
|
||||||
EXC_IF_ASYNC_IN_PROGRESS(self, fetchall);
|
EXC_IF_ASYNC_IN_PROGRESS(self, fetchall);
|
||||||
EXC_IF_TPC_PREPARED(self->conn, fetchall);
|
EXC_IF_TPC_PREPARED(self->conn, fetchall);
|
||||||
PyOS_snprintf(buffer, 127, "FETCH FORWARD ALL FROM \"%s\"", self->name);
|
PyOS_snprintf(buffer, 127, "FETCH FORWARD ALL FROM \"%s\"", self->name);
|
||||||
if (pq_execute(self, buffer, 0, 0) == -1) { goto exit; }
|
if (pq_execute(self, buffer, 0, 0, self->withhold) == -1) { goto exit; }
|
||||||
if (_psyco_curs_prefetch(self) < 0) { goto exit; }
|
if (_psyco_curs_prefetch(self) < 0) { goto exit; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1017,7 +1026,7 @@ _escape_identifier(PGconn *pgconn, const char *str, size_t length)
|
||||||
{
|
{
|
||||||
char *rv = NULL;
|
char *rv = NULL;
|
||||||
|
|
||||||
#if PG_VERSION_HEX >= 0x090000
|
#if PG_VERSION_NUM >= 90000
|
||||||
rv = PQescapeIdentifier(pgconn, str, length);
|
rv = PQescapeIdentifier(pgconn, str, length);
|
||||||
if (!rv) {
|
if (!rv) {
|
||||||
char *msg;
|
char *msg;
|
||||||
|
@ -1029,7 +1038,7 @@ _escape_identifier(PGconn *pgconn, const char *str, size_t length)
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
PyErr_Format(PyExc_NotImplementedError,
|
PyErr_Format(PyExc_NotImplementedError,
|
||||||
"named parameters require psycopg2 compiled against libpq 9.0+");
|
"named parameters require psycopg2 compiled against libpq 9.0");
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return rv;
|
return rv;
|
||||||
|
@ -1068,9 +1077,7 @@ psyco_curs_callproc(cursorObject *self, PyObject *args)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (parameters != Py_None) {
|
if (parameters != Py_None) {
|
||||||
if (-1 == (nparameters = PyObject_Length(parameters))) {
|
if (-1 == (nparameters = PyObject_Length(parameters))) { goto exit; }
|
||||||
goto exit;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
using_dict = nparameters > 0 && PyDict_Check(parameters);
|
using_dict = nparameters > 0 && PyDict_Check(parameters);
|
||||||
|
@ -1288,7 +1295,7 @@ psyco_curs_scroll(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||||
else {
|
else {
|
||||||
PyOS_snprintf(buffer, 127, "MOVE %d FROM \"%s\"", value, self->name);
|
PyOS_snprintf(buffer, 127, "MOVE %d FROM \"%s\"", value, self->name);
|
||||||
}
|
}
|
||||||
if (pq_execute(self, buffer, 0, 0) == -1) return NULL;
|
if (pq_execute(self, buffer, 0, 0, self->withhold) == -1) return NULL;
|
||||||
if (_psyco_curs_prefetch(self) < 0) return NULL;
|
if (_psyco_curs_prefetch(self) < 0) return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1332,8 +1339,6 @@ exit:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
|
|
||||||
/* Return a newly allocated buffer containing the list of columns to be
|
/* Return a newly allocated buffer containing the list of columns to be
|
||||||
* copied. On error return NULL and set an exception.
|
* copied. On error return NULL and set an exception.
|
||||||
*/
|
*/
|
||||||
|
@ -1501,7 +1506,7 @@ psyco_curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||||
Py_INCREF(file);
|
Py_INCREF(file);
|
||||||
self->copyfile = file;
|
self->copyfile = file;
|
||||||
|
|
||||||
if (pq_execute(self, query, 0, 0) >= 0) {
|
if (pq_execute(self, query, 0, 0, 0) >= 0) {
|
||||||
res = Py_None;
|
res = Py_None;
|
||||||
Py_INCREF(Py_None);
|
Py_INCREF(Py_None);
|
||||||
}
|
}
|
||||||
|
@ -1595,7 +1600,7 @@ psyco_curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||||
Py_INCREF(file);
|
Py_INCREF(file);
|
||||||
self->copyfile = file;
|
self->copyfile = file;
|
||||||
|
|
||||||
if (pq_execute(self, query, 0, 0) >= 0) {
|
if (pq_execute(self, query, 0, 0, 0) >= 0) {
|
||||||
res = Py_None;
|
res = Py_None;
|
||||||
Py_INCREF(Py_None);
|
Py_INCREF(Py_None);
|
||||||
}
|
}
|
||||||
|
@ -1649,7 +1654,7 @@ psyco_curs_copy_expert(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||||
if (sql == NULL) { goto exit; }
|
if (sql == NULL) { goto exit; }
|
||||||
|
|
||||||
/* This validation of file is rather weak, in that it doesn't enforce the
|
/* This validation of file is rather weak, in that it doesn't enforce the
|
||||||
assocation between "COPY FROM" -> "read" and "COPY TO" -> "write".
|
association between "COPY FROM" -> "read" and "COPY TO" -> "write".
|
||||||
However, the error handling in _pq_copy_[in|out] must be able to handle
|
However, the error handling in _pq_copy_[in|out] must be able to handle
|
||||||
the case where the attempt to call file.read|write fails, so no harm
|
the case where the attempt to call file.read|write fails, so no harm
|
||||||
done. */
|
done. */
|
||||||
|
@ -1669,7 +1674,7 @@ psyco_curs_copy_expert(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||||
self->copyfile = file;
|
self->copyfile = file;
|
||||||
|
|
||||||
/* At this point, the SQL statement must be str, not unicode */
|
/* At this point, the SQL statement must be str, not unicode */
|
||||||
if (pq_execute(self, Bytes_AS_STRING(sql), 0, 0) >= 0) {
|
if (pq_execute(self, Bytes_AS_STRING(sql), 0, 0, 0) >= 0) {
|
||||||
res = Py_None;
|
res = Py_None;
|
||||||
Py_INCREF(res);
|
Py_INCREF(res);
|
||||||
}
|
}
|
||||||
|
@ -1779,8 +1784,6 @@ psyco_curs_scrollable_set(cursorObject *self, PyObject *pyvalue)
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
/** the cursor object **/
|
/** the cursor object **/
|
||||||
|
|
||||||
|
@ -1848,7 +1851,6 @@ static struct PyMethodDef cursorObject_methods[] = {
|
||||||
{"__exit__", (PyCFunction)psyco_curs_exit,
|
{"__exit__", (PyCFunction)psyco_curs_exit,
|
||||||
METH_VARARGS, psyco_curs_exit_doc},
|
METH_VARARGS, psyco_curs_exit_doc},
|
||||||
/* psycopg extensions */
|
/* psycopg extensions */
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
{"cast", (PyCFunction)psyco_curs_cast,
|
{"cast", (PyCFunction)psyco_curs_cast,
|
||||||
METH_VARARGS, psyco_curs_cast_doc},
|
METH_VARARGS, psyco_curs_cast_doc},
|
||||||
{"mogrify", (PyCFunction)psyco_curs_mogrify,
|
{"mogrify", (PyCFunction)psyco_curs_mogrify,
|
||||||
|
@ -1859,7 +1861,6 @@ static struct PyMethodDef cursorObject_methods[] = {
|
||||||
METH_VARARGS|METH_KEYWORDS, psyco_curs_copy_to_doc},
|
METH_VARARGS|METH_KEYWORDS, psyco_curs_copy_to_doc},
|
||||||
{"copy_expert", (PyCFunction)psyco_curs_copy_expert,
|
{"copy_expert", (PyCFunction)psyco_curs_copy_expert,
|
||||||
METH_VARARGS|METH_KEYWORDS, psyco_curs_copy_expert_doc},
|
METH_VARARGS|METH_KEYWORDS, psyco_curs_copy_expert_doc},
|
||||||
#endif
|
|
||||||
{NULL}
|
{NULL}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1885,7 +1886,6 @@ static struct PyMemberDef cursorObject_members[] = {
|
||||||
"The current row position."},
|
"The current row position."},
|
||||||
{"connection", T_OBJECT, OFFSETOF(conn), READONLY,
|
{"connection", T_OBJECT, OFFSETOF(conn), READONLY,
|
||||||
"The connection where the cursor comes from."},
|
"The connection where the cursor comes from."},
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
{"name", T_STRING, OFFSETOF(name), READONLY},
|
{"name", T_STRING, OFFSETOF(name), READONLY},
|
||||||
{"statusmessage", T_OBJECT, OFFSETOF(pgstatus), READONLY,
|
{"statusmessage", T_OBJECT, OFFSETOF(pgstatus), READONLY,
|
||||||
"The return message of the last command."},
|
"The return message of the last command."},
|
||||||
|
@ -1896,13 +1896,11 @@ static struct PyMemberDef cursorObject_members[] = {
|
||||||
{"typecaster", T_OBJECT, OFFSETOF(caster), READONLY},
|
{"typecaster", T_OBJECT, OFFSETOF(caster), READONLY},
|
||||||
{"string_types", T_OBJECT, OFFSETOF(string_types), 0},
|
{"string_types", T_OBJECT, OFFSETOF(string_types), 0},
|
||||||
{"binary_types", T_OBJECT, OFFSETOF(binary_types), 0},
|
{"binary_types", T_OBJECT, OFFSETOF(binary_types), 0},
|
||||||
#endif
|
|
||||||
{NULL}
|
{NULL}
|
||||||
};
|
};
|
||||||
|
|
||||||
/* object calculated member list */
|
/* object calculated member list */
|
||||||
static struct PyGetSetDef cursorObject_getsets[] = {
|
static struct PyGetSetDef cursorObject_getsets[] = {
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
{ "closed", (getter)psyco_curs_get_closed, NULL,
|
{ "closed", (getter)psyco_curs_get_closed, NULL,
|
||||||
psyco_curs_closed_doc, NULL },
|
psyco_curs_closed_doc, NULL },
|
||||||
{ "withhold",
|
{ "withhold",
|
||||||
|
@ -1913,7 +1911,6 @@ static struct PyGetSetDef cursorObject_getsets[] = {
|
||||||
(getter)psyco_curs_scrollable_get,
|
(getter)psyco_curs_scrollable_get,
|
||||||
(setter)psyco_curs_scrollable_set,
|
(setter)psyco_curs_scrollable_set,
|
||||||
psyco_curs_scrollable_doc, NULL },
|
psyco_curs_scrollable_doc, NULL },
|
||||||
#endif
|
|
||||||
{NULL}
|
{NULL}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1935,7 +1932,7 @@ cursor_setup(cursorObject *self, connectionObject *conn, const char *name)
|
||||||
if (PyObject_IsInstance((PyObject*)conn,
|
if (PyObject_IsInstance((PyObject*)conn,
|
||||||
(PyObject *)&connectionType) == 0) {
|
(PyObject *)&connectionType) == 0) {
|
||||||
PyErr_SetString(PyExc_TypeError,
|
PyErr_SetString(PyExc_TypeError,
|
||||||
"argument 1 must be subclass of psycopg2._psycopg.connection");
|
"argument 1 must be subclass of psycopg2.extensions.connection");
|
||||||
return -1;
|
return -1;
|
||||||
} */
|
} */
|
||||||
Py_INCREF(conn);
|
Py_INCREF(conn);
|
||||||
|
@ -1952,8 +1949,17 @@ cursor_setup(cursorObject *self, connectionObject *conn, const char *name)
|
||||||
self->tuple_factory = Py_None;
|
self->tuple_factory = Py_None;
|
||||||
|
|
||||||
/* default tzinfo factory */
|
/* default tzinfo factory */
|
||||||
Py_INCREF(pyPsycopgTzFixedOffsetTimezone);
|
{
|
||||||
self->tzinfo_factory = pyPsycopgTzFixedOffsetTimezone;
|
PyObject *m = NULL;
|
||||||
|
if ((m = PyImport_ImportModule("psycopg2.tz"))) {
|
||||||
|
self->tzinfo_factory = PyObject_GetAttrString(
|
||||||
|
m, "FixedOffsetTimezone");
|
||||||
|
Py_DECREF(m);
|
||||||
|
}
|
||||||
|
if (!self->tzinfo_factory) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Dprintf("cursor_setup: good cursor object at %p, refcnt = "
|
Dprintf("cursor_setup: good cursor object at %p, refcnt = "
|
||||||
FORMAT_CODE_PY_SSIZE_T,
|
FORMAT_CODE_PY_SSIZE_T,
|
||||||
|
@ -2076,7 +2082,7 @@ cursor_traverse(cursorObject *self, visitproc visit, void *arg)
|
||||||
|
|
||||||
PyTypeObject cursorType = {
|
PyTypeObject cursorType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.cursor",
|
"psycopg2.extensions.cursor",
|
||||||
sizeof(cursorObject), 0,
|
sizeof(cursorObject), 0,
|
||||||
cursor_dealloc, /*tp_dealloc*/
|
cursor_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
|
|
|
@ -158,7 +158,7 @@ static const char diagnosticsType_doc[] =
|
||||||
|
|
||||||
PyTypeObject diagnosticsType = {
|
PyTypeObject diagnosticsType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.Diagnostics",
|
"psycopg2.extensions.Diagnostics",
|
||||||
sizeof(diagnosticsObject), 0,
|
sizeof(diagnosticsObject), 0,
|
||||||
(destructor)diagnostics_dealloc, /*tp_dealloc*/
|
(destructor)diagnostics_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
|
|
|
@ -80,11 +80,7 @@ psyco_get_wait_callback(PyObject *self, PyObject *obj)
|
||||||
int
|
int
|
||||||
psyco_green()
|
psyco_green()
|
||||||
{
|
{
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
return (NULL != wait_callback);
|
return (NULL != wait_callback);
|
||||||
#else
|
|
||||||
return 0;
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Return the wait callback if available.
|
/* Return the wait callback if available.
|
||||||
|
|
|
@ -60,8 +60,8 @@ RAISES_NEG HIDDEN int lobject_export(lobjectObject *self, const char *filename);
|
||||||
RAISES_NEG HIDDEN Py_ssize_t lobject_read(lobjectObject *self, char *buf, size_t len);
|
RAISES_NEG HIDDEN Py_ssize_t lobject_read(lobjectObject *self, char *buf, size_t len);
|
||||||
RAISES_NEG HIDDEN Py_ssize_t lobject_write(lobjectObject *self, const char *buf,
|
RAISES_NEG HIDDEN Py_ssize_t lobject_write(lobjectObject *self, const char *buf,
|
||||||
size_t len);
|
size_t len);
|
||||||
RAISES_NEG HIDDEN int lobject_seek(lobjectObject *self, int pos, int whence);
|
RAISES_NEG HIDDEN long lobject_seek(lobjectObject *self, long pos, int whence);
|
||||||
RAISES_NEG HIDDEN int lobject_tell(lobjectObject *self);
|
RAISES_NEG HIDDEN long lobject_tell(lobjectObject *self);
|
||||||
RAISES_NEG HIDDEN int lobject_truncate(lobjectObject *self, size_t len);
|
RAISES_NEG HIDDEN int lobject_truncate(lobjectObject *self, size_t len);
|
||||||
RAISES_NEG HIDDEN int lobject_close(lobjectObject *self);
|
RAISES_NEG HIDDEN int lobject_close(lobjectObject *self);
|
||||||
|
|
||||||
|
|
|
@ -32,8 +32,6 @@
|
||||||
|
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
|
|
||||||
static void
|
static void
|
||||||
collect_error(connectionObject *conn, char **error)
|
collect_error(connectionObject *conn, char **error)
|
||||||
{
|
{
|
||||||
|
@ -378,21 +376,29 @@ lobject_read(lobjectObject *self, char *buf, size_t len)
|
||||||
|
|
||||||
/* lobject_seek - move the current position in the lo */
|
/* lobject_seek - move the current position in the lo */
|
||||||
|
|
||||||
RAISES_NEG int
|
RAISES_NEG long
|
||||||
lobject_seek(lobjectObject *self, int pos, int whence)
|
lobject_seek(lobjectObject *self, long pos, int whence)
|
||||||
{
|
{
|
||||||
PGresult *pgres = NULL;
|
PGresult *pgres = NULL;
|
||||||
char *error = NULL;
|
char *error = NULL;
|
||||||
int where;
|
long where;
|
||||||
|
|
||||||
Dprintf("lobject_seek: fd = %d, pos = %d, whence = %d",
|
Dprintf("lobject_seek: fd = %d, pos = %ld, whence = %d",
|
||||||
self->fd, pos, whence);
|
self->fd, pos, whence);
|
||||||
|
|
||||||
Py_BEGIN_ALLOW_THREADS;
|
Py_BEGIN_ALLOW_THREADS;
|
||||||
pthread_mutex_lock(&(self->conn->lock));
|
pthread_mutex_lock(&(self->conn->lock));
|
||||||
|
|
||||||
where = lo_lseek(self->conn->pgconn, self->fd, pos, whence);
|
#ifdef HAVE_LO64
|
||||||
Dprintf("lobject_seek: where = %d", where);
|
if (self->conn->server_version < 90300) {
|
||||||
|
where = (long)lo_lseek(self->conn->pgconn, self->fd, (int)pos, whence);
|
||||||
|
} else {
|
||||||
|
where = lo_lseek64(self->conn->pgconn, self->fd, pos, whence);
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
where = (long)lo_lseek(self->conn->pgconn, self->fd, (int)pos, whence);
|
||||||
|
#endif
|
||||||
|
Dprintf("lobject_seek: where = %ld", where);
|
||||||
if (where < 0)
|
if (where < 0)
|
||||||
collect_error(self->conn, &error);
|
collect_error(self->conn, &error);
|
||||||
|
|
||||||
|
@ -406,20 +412,28 @@ lobject_seek(lobjectObject *self, int pos, int whence)
|
||||||
|
|
||||||
/* lobject_tell - tell the current position in the lo */
|
/* lobject_tell - tell the current position in the lo */
|
||||||
|
|
||||||
RAISES_NEG int
|
RAISES_NEG long
|
||||||
lobject_tell(lobjectObject *self)
|
lobject_tell(lobjectObject *self)
|
||||||
{
|
{
|
||||||
PGresult *pgres = NULL;
|
PGresult *pgres = NULL;
|
||||||
char *error = NULL;
|
char *error = NULL;
|
||||||
int where;
|
long where;
|
||||||
|
|
||||||
Dprintf("lobject_tell: fd = %d", self->fd);
|
Dprintf("lobject_tell: fd = %d", self->fd);
|
||||||
|
|
||||||
Py_BEGIN_ALLOW_THREADS;
|
Py_BEGIN_ALLOW_THREADS;
|
||||||
pthread_mutex_lock(&(self->conn->lock));
|
pthread_mutex_lock(&(self->conn->lock));
|
||||||
|
|
||||||
where = lo_tell(self->conn->pgconn, self->fd);
|
#ifdef HAVE_LO64
|
||||||
Dprintf("lobject_tell: where = %d", where);
|
if (self->conn->server_version < 90300) {
|
||||||
|
where = (long)lo_tell(self->conn->pgconn, self->fd);
|
||||||
|
} else {
|
||||||
|
where = lo_tell64(self->conn->pgconn, self->fd);
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
where = (long)lo_tell(self->conn->pgconn, self->fd);
|
||||||
|
#endif
|
||||||
|
Dprintf("lobject_tell: where = %ld", where);
|
||||||
if (where < 0)
|
if (where < 0)
|
||||||
collect_error(self->conn, &error);
|
collect_error(self->conn, &error);
|
||||||
|
|
||||||
|
@ -460,7 +474,7 @@ lobject_export(lobjectObject *self, const char *filename)
|
||||||
return retvalue;
|
return retvalue;
|
||||||
}
|
}
|
||||||
|
|
||||||
#if PG_VERSION_HEX >= 0x080300
|
#if PG_VERSION_NUM >= 80300
|
||||||
|
|
||||||
RAISES_NEG int
|
RAISES_NEG int
|
||||||
lobject_truncate(lobjectObject *self, size_t len)
|
lobject_truncate(lobjectObject *self, size_t len)
|
||||||
|
@ -475,7 +489,15 @@ lobject_truncate(lobjectObject *self, size_t len)
|
||||||
Py_BEGIN_ALLOW_THREADS;
|
Py_BEGIN_ALLOW_THREADS;
|
||||||
pthread_mutex_lock(&(self->conn->lock));
|
pthread_mutex_lock(&(self->conn->lock));
|
||||||
|
|
||||||
|
#ifdef HAVE_LO64
|
||||||
|
if (self->conn->server_version < 90300) {
|
||||||
|
retvalue = lo_truncate(self->conn->pgconn, self->fd, len);
|
||||||
|
} else {
|
||||||
|
retvalue = lo_truncate64(self->conn->pgconn, self->fd, len);
|
||||||
|
}
|
||||||
|
#else
|
||||||
retvalue = lo_truncate(self->conn->pgconn, self->fd, len);
|
retvalue = lo_truncate(self->conn->pgconn, self->fd, len);
|
||||||
|
#endif
|
||||||
Dprintf("lobject_truncate: result = %d", retvalue);
|
Dprintf("lobject_truncate: result = %d", retvalue);
|
||||||
if (retvalue < 0)
|
if (retvalue < 0)
|
||||||
collect_error(self->conn, &error);
|
collect_error(self->conn, &error);
|
||||||
|
@ -489,7 +511,4 @@ lobject_truncate(lobjectObject *self, size_t len)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* PG_VERSION_HEX >= 0x080300 */
|
#endif /* PG_VERSION_NUM >= 80300 */
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
|
@ -35,8 +35,6 @@
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
|
|
||||||
/** public methods **/
|
/** public methods **/
|
||||||
|
|
||||||
/* close method - close the lobject */
|
/* close method - close the lobject */
|
||||||
|
@ -52,7 +50,7 @@ psyco_lobj_close(lobjectObject *self, PyObject *args)
|
||||||
opened large objects */
|
opened large objects */
|
||||||
if (!lobject_is_closed(self)
|
if (!lobject_is_closed(self)
|
||||||
&& !self->conn->autocommit
|
&& !self->conn->autocommit
|
||||||
&& self->conn->mark == self->mark)
|
&& self->conn->mark == self->mark)
|
||||||
{
|
{
|
||||||
Dprintf("psyco_lobj_close: closing lobject at %p", self);
|
Dprintf("psyco_lobj_close: closing lobject at %p", self);
|
||||||
if (lobject_close(self) < 0)
|
if (lobject_close(self) < 0)
|
||||||
|
@ -123,7 +121,7 @@ static PyObject *
|
||||||
psyco_lobj_read(lobjectObject *self, PyObject *args)
|
psyco_lobj_read(lobjectObject *self, PyObject *args)
|
||||||
{
|
{
|
||||||
PyObject *res;
|
PyObject *res;
|
||||||
int where, end;
|
long where, end;
|
||||||
Py_ssize_t size = -1;
|
Py_ssize_t size = -1;
|
||||||
char *buffer;
|
char *buffer;
|
||||||
|
|
||||||
|
@ -167,20 +165,39 @@ psyco_lobj_read(lobjectObject *self, PyObject *args)
|
||||||
static PyObject *
|
static PyObject *
|
||||||
psyco_lobj_seek(lobjectObject *self, PyObject *args)
|
psyco_lobj_seek(lobjectObject *self, PyObject *args)
|
||||||
{
|
{
|
||||||
int offset, whence=0;
|
long offset, pos=0;
|
||||||
int pos=0;
|
int whence=0;
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "i|i", &offset, &whence))
|
if (!PyArg_ParseTuple(args, "l|i", &offset, &whence))
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
EXC_IF_LOBJ_CLOSED(self);
|
EXC_IF_LOBJ_CLOSED(self);
|
||||||
EXC_IF_LOBJ_LEVEL0(self);
|
EXC_IF_LOBJ_LEVEL0(self);
|
||||||
EXC_IF_LOBJ_UNMARKED(self);
|
EXC_IF_LOBJ_UNMARKED(self);
|
||||||
|
|
||||||
if ((pos = lobject_seek(self, offset, whence)) < 0)
|
#ifdef HAVE_LO64
|
||||||
return NULL;
|
if ((offset < INT_MIN || offset > INT_MAX)
|
||||||
|
&& self->conn->server_version < 90300) {
|
||||||
|
PyErr_Format(NotSupportedError,
|
||||||
|
"offset out of range (%ld): server version %d "
|
||||||
|
"does not support the lobject 64 API",
|
||||||
|
offset, self->conn->server_version);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
if (offset < INT_MIN || offset > INT_MAX) {
|
||||||
|
PyErr_Format(InterfaceError,
|
||||||
|
"offset out of range (%ld): this psycopg version was not built "
|
||||||
|
"with lobject 64 API support",
|
||||||
|
offset);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
return PyInt_FromLong((long)pos);
|
if ((pos = lobject_seek(self, offset, whence)) < 0)
|
||||||
|
return NULL;
|
||||||
|
|
||||||
|
return PyLong_FromLong(pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* tell method - tell current position in the lobject */
|
/* tell method - tell current position in the lobject */
|
||||||
|
@ -191,16 +208,16 @@ psyco_lobj_seek(lobjectObject *self, PyObject *args)
|
||||||
static PyObject *
|
static PyObject *
|
||||||
psyco_lobj_tell(lobjectObject *self, PyObject *args)
|
psyco_lobj_tell(lobjectObject *self, PyObject *args)
|
||||||
{
|
{
|
||||||
int pos;
|
long pos;
|
||||||
|
|
||||||
EXC_IF_LOBJ_CLOSED(self);
|
EXC_IF_LOBJ_CLOSED(self);
|
||||||
EXC_IF_LOBJ_LEVEL0(self);
|
EXC_IF_LOBJ_LEVEL0(self);
|
||||||
EXC_IF_LOBJ_UNMARKED(self);
|
EXC_IF_LOBJ_UNMARKED(self);
|
||||||
|
|
||||||
if ((pos = lobject_tell(self)) < 0)
|
if ((pos = lobject_tell(self)) < 0)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
return PyInt_FromLong((long)pos);
|
return PyLong_FromLong(pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* unlink method - unlink (destroy) the lobject */
|
/* unlink method - unlink (destroy) the lobject */
|
||||||
|
@ -249,7 +266,7 @@ psyco_lobj_get_closed(lobjectObject *self, void *closure)
|
||||||
return closed;
|
return closed;
|
||||||
}
|
}
|
||||||
|
|
||||||
#if PG_VERSION_HEX >= 0x080300
|
#if PG_VERSION_NUM >= 80300
|
||||||
|
|
||||||
#define psyco_lobj_truncate_doc \
|
#define psyco_lobj_truncate_doc \
|
||||||
"truncate(len=0) -- Truncate large object to given size."
|
"truncate(len=0) -- Truncate large object to given size."
|
||||||
|
@ -257,23 +274,39 @@ psyco_lobj_get_closed(lobjectObject *self, void *closure)
|
||||||
static PyObject *
|
static PyObject *
|
||||||
psyco_lobj_truncate(lobjectObject *self, PyObject *args)
|
psyco_lobj_truncate(lobjectObject *self, PyObject *args)
|
||||||
{
|
{
|
||||||
int len = 0;
|
long len = 0;
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "|i", &len))
|
if (!PyArg_ParseTuple(args, "|l", &len))
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
EXC_IF_LOBJ_CLOSED(self);
|
EXC_IF_LOBJ_CLOSED(self);
|
||||||
EXC_IF_LOBJ_LEVEL0(self);
|
EXC_IF_LOBJ_LEVEL0(self);
|
||||||
EXC_IF_LOBJ_UNMARKED(self);
|
EXC_IF_LOBJ_UNMARKED(self);
|
||||||
|
|
||||||
|
#ifdef HAVE_LO64
|
||||||
|
if (len > INT_MAX && self->conn->server_version < 90300) {
|
||||||
|
PyErr_Format(NotSupportedError,
|
||||||
|
"len out of range (%ld): server version %d "
|
||||||
|
"does not support the lobject 64 API",
|
||||||
|
len, self->conn->server_version);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
if (len > INT_MAX) {
|
||||||
|
PyErr_Format(InterfaceError,
|
||||||
|
"len out of range (%ld): this psycopg version was not built "
|
||||||
|
"with lobject 64 API support",
|
||||||
|
len);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
if (lobject_truncate(self, len) < 0)
|
if (lobject_truncate(self, len) < 0)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
Py_RETURN_NONE;
|
Py_RETURN_NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* PG_VERSION_HEX >= 0x080300 */
|
|
||||||
|
|
||||||
|
|
||||||
/** the lobject object **/
|
/** the lobject object **/
|
||||||
|
|
||||||
|
@ -294,10 +327,10 @@ static struct PyMethodDef lobjectObject_methods[] = {
|
||||||
METH_NOARGS, psyco_lobj_unlink_doc},
|
METH_NOARGS, psyco_lobj_unlink_doc},
|
||||||
{"export",(PyCFunction)psyco_lobj_export,
|
{"export",(PyCFunction)psyco_lobj_export,
|
||||||
METH_VARARGS, psyco_lobj_export_doc},
|
METH_VARARGS, psyco_lobj_export_doc},
|
||||||
#if PG_VERSION_HEX >= 0x080300
|
#if PG_VERSION_NUM >= 80300
|
||||||
{"truncate",(PyCFunction)psyco_lobj_truncate,
|
{"truncate",(PyCFunction)psyco_lobj_truncate,
|
||||||
METH_VARARGS, psyco_lobj_truncate_doc},
|
METH_VARARGS, psyco_lobj_truncate_doc},
|
||||||
#endif /* PG_VERSION_HEX >= 0x080300 */
|
#endif /* PG_VERSION_NUM >= 80300 */
|
||||||
{NULL}
|
{NULL}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -333,11 +366,10 @@ lobject_setup(lobjectObject *self, connectionObject *conn,
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Py_INCREF((PyObject*)conn);
|
||||||
self->conn = conn;
|
self->conn = conn;
|
||||||
self->mark = conn->mark;
|
self->mark = conn->mark;
|
||||||
|
|
||||||
Py_INCREF((PyObject*)self->conn);
|
|
||||||
|
|
||||||
self->fd = -1;
|
self->fd = -1;
|
||||||
self->oid = InvalidOid;
|
self->oid = InvalidOid;
|
||||||
|
|
||||||
|
@ -358,8 +390,8 @@ lobject_dealloc(PyObject* obj)
|
||||||
if (self->conn && self->fd != -1) {
|
if (self->conn && self->fd != -1) {
|
||||||
if (lobject_close(self) < 0)
|
if (lobject_close(self) < 0)
|
||||||
PyErr_Print();
|
PyErr_Print();
|
||||||
Py_XDECREF((PyObject*)self->conn);
|
|
||||||
}
|
}
|
||||||
|
Py_CLEAR(self->conn);
|
||||||
PyMem_Free(self->smode);
|
PyMem_Free(self->smode);
|
||||||
|
|
||||||
Dprintf("lobject_dealloc: deleted lobject object at %p, refcnt = "
|
Dprintf("lobject_dealloc: deleted lobject object at %p, refcnt = "
|
||||||
|
@ -406,7 +438,7 @@ lobject_repr(lobjectObject *self)
|
||||||
|
|
||||||
PyTypeObject lobjectType = {
|
PyTypeObject lobjectType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.lobject",
|
"psycopg2.extensions.lobject",
|
||||||
sizeof(lobjectObject), 0,
|
sizeof(lobjectObject), 0,
|
||||||
lobject_dealloc, /*tp_dealloc*/
|
lobject_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
|
|
|
@ -142,7 +142,7 @@ isqlquote_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
|
|
||||||
PyTypeObject isqlquoteType = {
|
PyTypeObject isqlquoteType = {
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
PyVarObject_HEAD_INIT(NULL, 0)
|
||||||
"psycopg2._psycopg.ISQLQuote",
|
"psycopg2.extensions.ISQLQuote",
|
||||||
sizeof(isqlquoteObject), 0,
|
sizeof(isqlquoteObject), 0,
|
||||||
isqlquote_dealloc, /*tp_dealloc*/
|
isqlquote_dealloc, /*tp_dealloc*/
|
||||||
0, /*tp_print*/
|
0, /*tp_print*/
|
||||||
|
|
|
@ -113,11 +113,7 @@ exception_from_sqlstate(const char *sqlstate)
|
||||||
case '4':
|
case '4':
|
||||||
switch (sqlstate[1]) {
|
switch (sqlstate[1]) {
|
||||||
case '0': /* Class 40 - Transaction Rollback */
|
case '0': /* Class 40 - Transaction Rollback */
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
return TransactionRollbackError;
|
return TransactionRollbackError;
|
||||||
#else
|
|
||||||
return OperationalError;
|
|
||||||
#endif
|
|
||||||
case '2': /* Class 42 - Syntax Error or Access Rule Violation */
|
case '2': /* Class 42 - Syntax Error or Access Rule Violation */
|
||||||
case '4': /* Class 44 - WITH CHECK OPTION Violation */
|
case '4': /* Class 44 - WITH CHECK OPTION Violation */
|
||||||
return ProgrammingError;
|
return ProgrammingError;
|
||||||
|
@ -129,11 +125,9 @@ exception_from_sqlstate(const char *sqlstate)
|
||||||
Class 55 - Object Not In Prerequisite State
|
Class 55 - Object Not In Prerequisite State
|
||||||
Class 57 - Operator Intervention
|
Class 57 - Operator Intervention
|
||||||
Class 58 - System Error (errors external to PostgreSQL itself) */
|
Class 58 - System Error (errors external to PostgreSQL itself) */
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
if (!strcmp(sqlstate, "57014"))
|
if (!strcmp(sqlstate, "57014"))
|
||||||
return QueryCanceledError;
|
return QueryCanceledError;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
return OperationalError;
|
return OperationalError;
|
||||||
case 'F': /* Class F0 - Configuration File Error */
|
case 'F': /* Class F0 - Configuration File Error */
|
||||||
return InternalError;
|
return InternalError;
|
||||||
|
@ -196,8 +190,10 @@ pq_raise(connectionObject *conn, cursorObject *curs, PGresult **pgres)
|
||||||
raise and a meaningful message is better than an empty one.
|
raise and a meaningful message is better than an empty one.
|
||||||
Note: it can happen without it being our error: see ticket #82 */
|
Note: it can happen without it being our error: see ticket #82 */
|
||||||
if (err == NULL || err[0] == '\0') {
|
if (err == NULL || err[0] == '\0') {
|
||||||
PyErr_SetString(DatabaseError,
|
PyErr_Format(DatabaseError,
|
||||||
"error with no message from the libpq");
|
"error with status %s and no message from the libpq",
|
||||||
|
PQresStatus(pgres == NULL ?
|
||||||
|
PQstatus(conn->pgconn) : PQresultStatus(*pgres)));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -893,7 +889,7 @@ pq_flush(connectionObject *conn)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
RAISES_NEG int
|
RAISES_NEG int
|
||||||
pq_execute(cursorObject *curs, const char *query, int async, int no_result)
|
pq_execute(cursorObject *curs, const char *query, int async, int no_result, int no_begin)
|
||||||
{
|
{
|
||||||
PGresult *pgres = NULL;
|
PGresult *pgres = NULL;
|
||||||
char *error = NULL;
|
char *error = NULL;
|
||||||
|
@ -916,7 +912,7 @@ pq_execute(cursorObject *curs, const char *query, int async, int no_result)
|
||||||
Py_BEGIN_ALLOW_THREADS;
|
Py_BEGIN_ALLOW_THREADS;
|
||||||
pthread_mutex_lock(&(curs->conn->lock));
|
pthread_mutex_lock(&(curs->conn->lock));
|
||||||
|
|
||||||
if (pq_begin_locked(curs->conn, &pgres, &error, &_save) < 0) {
|
if (!no_begin && pq_begin_locked(curs->conn, &pgres, &error, &_save) < 0) {
|
||||||
pthread_mutex_unlock(&(curs->conn->lock));
|
pthread_mutex_unlock(&(curs->conn->lock));
|
||||||
Py_BLOCK_THREADS;
|
Py_BLOCK_THREADS;
|
||||||
pq_complete_error(curs->conn, &pgres, &error);
|
pq_complete_error(curs->conn, &pgres, &error);
|
||||||
|
@ -986,6 +982,10 @@ pq_execute(cursorObject *curs, const char *query, int async, int no_result)
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
/* there was an error */
|
/* there was an error */
|
||||||
|
pthread_mutex_unlock(&(curs->conn->lock));
|
||||||
|
Py_BLOCK_THREADS;
|
||||||
|
PyErr_SetString(OperationalError,
|
||||||
|
PQerrorMessage(curs->conn->pgconn));
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1288,6 +1288,13 @@ _pq_copy_in_v3(cursorObject *curs)
|
||||||
Py_ssize_t length = 0;
|
Py_ssize_t length = 0;
|
||||||
int res, error = 0;
|
int res, error = 0;
|
||||||
|
|
||||||
|
if (!curs->copyfile) {
|
||||||
|
PyErr_SetString(ProgrammingError,
|
||||||
|
"can't execute COPY FROM: use the copy_from() method instead");
|
||||||
|
error = 1;
|
||||||
|
goto exit;
|
||||||
|
}
|
||||||
|
|
||||||
if (!(func = PyObject_GetAttrString(curs->copyfile, "read"))) {
|
if (!(func = PyObject_GetAttrString(curs->copyfile, "read"))) {
|
||||||
Dprintf("_pq_copy_in_v3: can't get o.read");
|
Dprintf("_pq_copy_in_v3: can't get o.read");
|
||||||
error = 1;
|
error = 1;
|
||||||
|
@ -1369,9 +1376,27 @@ _pq_copy_in_v3(cursorObject *curs)
|
||||||
res = PQputCopyEnd(curs->conn->pgconn, NULL);
|
res = PQputCopyEnd(curs->conn->pgconn, NULL);
|
||||||
else if (error == 2)
|
else if (error == 2)
|
||||||
res = PQputCopyEnd(curs->conn->pgconn, "error in PQputCopyData() call");
|
res = PQputCopyEnd(curs->conn->pgconn, "error in PQputCopyData() call");
|
||||||
else
|
else {
|
||||||
/* XXX would be nice to propagate the exception */
|
char buf[1024];
|
||||||
res = PQputCopyEnd(curs->conn->pgconn, "error in .read() call");
|
strcpy(buf, "error in .read() call");
|
||||||
|
if (PyErr_Occurred()) {
|
||||||
|
PyObject *t, *ex, *tb;
|
||||||
|
PyErr_Fetch(&t, &ex, &tb);
|
||||||
|
if (ex) {
|
||||||
|
PyObject *str;
|
||||||
|
str = PyObject_Str(ex);
|
||||||
|
str = psycopg_ensure_bytes(str);
|
||||||
|
if (str) {
|
||||||
|
PyOS_snprintf(buf, sizeof(buf),
|
||||||
|
"error in .read() call: %s %s",
|
||||||
|
((PyTypeObject *)t)->tp_name, Bytes_AsString(str));
|
||||||
|
Py_DECREF(str);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
PyErr_Restore(t, ex, tb);
|
||||||
|
}
|
||||||
|
res = PQputCopyEnd(curs->conn->pgconn, buf);
|
||||||
|
}
|
||||||
|
|
||||||
CLEARPGRES(curs->pgres);
|
CLEARPGRES(curs->pgres);
|
||||||
|
|
||||||
|
@ -1411,7 +1436,8 @@ exit:
|
||||||
static int
|
static int
|
||||||
_pq_copy_out_v3(cursorObject *curs)
|
_pq_copy_out_v3(cursorObject *curs)
|
||||||
{
|
{
|
||||||
PyObject *tmp = NULL, *func;
|
PyObject *tmp = NULL;
|
||||||
|
PyObject *func = NULL;
|
||||||
PyObject *obj = NULL;
|
PyObject *obj = NULL;
|
||||||
int ret = -1;
|
int ret = -1;
|
||||||
int is_text;
|
int is_text;
|
||||||
|
@ -1419,6 +1445,12 @@ _pq_copy_out_v3(cursorObject *curs)
|
||||||
char *buffer;
|
char *buffer;
|
||||||
Py_ssize_t len;
|
Py_ssize_t len;
|
||||||
|
|
||||||
|
if (!curs->copyfile) {
|
||||||
|
PyErr_SetString(ProgrammingError,
|
||||||
|
"can't execute COPY TO: use the copy_to() method instead");
|
||||||
|
goto exit;
|
||||||
|
}
|
||||||
|
|
||||||
if (!(func = PyObject_GetAttrString(curs->copyfile, "write"))) {
|
if (!(func = PyObject_GetAttrString(curs->copyfile, "write"))) {
|
||||||
Dprintf("_pq_copy_out_v3: can't get o.write");
|
Dprintf("_pq_copy_out_v3: can't get o.write");
|
||||||
goto exit;
|
goto exit;
|
||||||
|
@ -1565,11 +1597,26 @@ pq_fetch(cursorObject *curs, int no_result)
|
||||||
ex = -1;
|
ex = -1;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
case PGRES_BAD_RESPONSE:
|
||||||
Dprintf("pq_fetch: uh-oh, something FAILED: pgconn = %p", curs->conn);
|
case PGRES_NONFATAL_ERROR:
|
||||||
|
case PGRES_FATAL_ERROR:
|
||||||
|
Dprintf("pq_fetch: uh-oh, something FAILED: status = %d pgconn = %p",
|
||||||
|
pgstatus, curs->conn);
|
||||||
pq_raise(curs->conn, curs, NULL);
|
pq_raise(curs->conn, curs, NULL);
|
||||||
ex = -1;
|
ex = -1;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
/* PGRES_COPY_BOTH, PGRES_SINGLE_TUPLE, future statuses */
|
||||||
|
Dprintf("pq_fetch: got unsupported result: status = %d pgconn = %p",
|
||||||
|
pgstatus, curs->conn);
|
||||||
|
PyErr_Format(NotSupportedError,
|
||||||
|
"got server response with unsupported status %s",
|
||||||
|
PQresStatus(curs->pgres == NULL ?
|
||||||
|
PQstatus(curs->conn->pgconn) : PQresultStatus(curs->pgres)));
|
||||||
|
CLEARPGRES(curs->pgres);
|
||||||
|
ex = -1;
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* error checking, close the connection if necessary (some critical errors
|
/* error checking, close the connection if necessary (some critical errors
|
||||||
|
|
|
@ -36,7 +36,7 @@
|
||||||
HIDDEN PGresult *pq_get_last_result(connectionObject *conn);
|
HIDDEN PGresult *pq_get_last_result(connectionObject *conn);
|
||||||
RAISES_NEG HIDDEN int pq_fetch(cursorObject *curs, int no_result);
|
RAISES_NEG HIDDEN int pq_fetch(cursorObject *curs, int no_result);
|
||||||
RAISES_NEG HIDDEN int pq_execute(cursorObject *curs, const char *query,
|
RAISES_NEG HIDDEN int pq_execute(cursorObject *curs, const char *query,
|
||||||
int async, int no_result);
|
int async, int no_result, int no_begin);
|
||||||
HIDDEN int pq_send_query(connectionObject *conn, const char *query);
|
HIDDEN int pq_send_query(connectionObject *conn, const char *query);
|
||||||
HIDDEN int pq_begin_locked(connectionObject *conn, PGresult **pgres,
|
HIDDEN int pq_begin_locked(connectionObject *conn, PGresult **pgres,
|
||||||
char **error, PyThreadState **tstate);
|
char **error, PyThreadState **tstate);
|
||||||
|
|
|
@ -63,9 +63,7 @@ HIDDEN psyco_errors_set_RETURN psyco_errors_set psyco_errors_set_PROTO;
|
||||||
extern HIDDEN PyObject *Error, *Warning, *InterfaceError, *DatabaseError,
|
extern HIDDEN PyObject *Error, *Warning, *InterfaceError, *DatabaseError,
|
||||||
*InternalError, *OperationalError, *ProgrammingError,
|
*InternalError, *OperationalError, *ProgrammingError,
|
||||||
*IntegrityError, *DataError, *NotSupportedError;
|
*IntegrityError, *DataError, *NotSupportedError;
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
extern HIDDEN PyObject *QueryCanceledError, *TransactionRollbackError;
|
extern HIDDEN PyObject *QueryCanceledError, *TransactionRollbackError;
|
||||||
#endif
|
|
||||||
|
|
||||||
/* python versions and compatibility stuff */
|
/* python versions and compatibility stuff */
|
||||||
#ifndef PyMODINIT_FUNC
|
#ifndef PyMODINIT_FUNC
|
||||||
|
@ -164,13 +162,11 @@ STEALS(1) HIDDEN PyObject * psycopg_ensure_text(PyObject *obj);
|
||||||
#define NotSupportedError_doc \
|
#define NotSupportedError_doc \
|
||||||
"A method or database API was used which is not supported by the database."
|
"A method or database API was used which is not supported by the database."
|
||||||
|
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
#define QueryCanceledError_doc \
|
#define QueryCanceledError_doc \
|
||||||
"Error related to SQL query cancellation."
|
"Error related to SQL query cancellation."
|
||||||
|
|
||||||
#define TransactionRollbackError_doc \
|
#define TransactionRollbackError_doc \
|
||||||
"Error causing transaction rollback (deadlocks, serialization failures, etc)."
|
"Error causing transaction rollback (deadlocks, serialization failures, etc)."
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,11 +58,6 @@
|
||||||
#include "psycopg/adapter_datetime.h"
|
#include "psycopg/adapter_datetime.h"
|
||||||
HIDDEN PyObject *pyDateTimeModuleP = NULL;
|
HIDDEN PyObject *pyDateTimeModuleP = NULL;
|
||||||
|
|
||||||
/* pointers to the psycopg.tz classes */
|
|
||||||
HIDDEN PyObject *pyPsycopgTzModule = NULL;
|
|
||||||
HIDDEN PyObject *pyPsycopgTzLOCAL = NULL;
|
|
||||||
HIDDEN PyObject *pyPsycopgTzFixedOffsetTimezone = NULL;
|
|
||||||
|
|
||||||
HIDDEN PyObject *psycoEncodings = NULL;
|
HIDDEN PyObject *psycoEncodings = NULL;
|
||||||
|
|
||||||
#ifdef PSYCOPG_DEBUG
|
#ifdef PSYCOPG_DEBUG
|
||||||
|
@ -117,6 +112,115 @@ psyco_connect(PyObject *self, PyObject *args, PyObject *keywds)
|
||||||
return conn;
|
return conn;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#define psyco_parse_dsn_doc "parse_dsn(dsn) -> dict"
|
||||||
|
|
||||||
|
static PyObject *
|
||||||
|
psyco_parse_dsn(PyObject *self, PyObject *args, PyObject *kwargs)
|
||||||
|
{
|
||||||
|
char *err = NULL;
|
||||||
|
PQconninfoOption *options = NULL, *o;
|
||||||
|
PyObject *dict = NULL, *res = NULL, *dsn;
|
||||||
|
|
||||||
|
static char *kwlist[] = {"dsn", NULL};
|
||||||
|
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O", kwlist, &dsn)) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
Py_INCREF(dsn); /* for ensure_bytes */
|
||||||
|
if (!(dsn = psycopg_ensure_bytes(dsn))) { goto exit; }
|
||||||
|
|
||||||
|
options = PQconninfoParse(Bytes_AS_STRING(dsn), &err);
|
||||||
|
if (options == NULL) {
|
||||||
|
if (err != NULL) {
|
||||||
|
PyErr_Format(ProgrammingError, "error parsing the dsn: %s", err);
|
||||||
|
PQfreemem(err);
|
||||||
|
} else {
|
||||||
|
PyErr_SetString(OperationalError, "PQconninfoParse() failed");
|
||||||
|
}
|
||||||
|
goto exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(dict = PyDict_New())) { goto exit; }
|
||||||
|
for (o = options; o->keyword != NULL; o++) {
|
||||||
|
if (o->val != NULL) {
|
||||||
|
PyObject *value;
|
||||||
|
if (!(value = Text_FromUTF8(o->val))) { goto exit; }
|
||||||
|
if (PyDict_SetItemString(dict, o->keyword, value) != 0) {
|
||||||
|
Py_DECREF(value);
|
||||||
|
goto exit;
|
||||||
|
}
|
||||||
|
Py_DECREF(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* success */
|
||||||
|
res = dict;
|
||||||
|
dict = NULL;
|
||||||
|
|
||||||
|
exit:
|
||||||
|
PQconninfoFree(options); /* safe on null */
|
||||||
|
Py_XDECREF(dict);
|
||||||
|
Py_XDECREF(dsn);
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#define psyco_quote_ident_doc \
|
||||||
|
"quote_ident(str, conn_or_curs) -> str -- wrapper around PQescapeIdentifier\n\n" \
|
||||||
|
":Parameters:\n" \
|
||||||
|
" * `str`: A bytes or unicode object\n" \
|
||||||
|
" * `conn_or_curs`: A connection or cursor, required"
|
||||||
|
|
||||||
|
static PyObject *
|
||||||
|
psyco_quote_ident(PyObject *self, PyObject *args, PyObject *kwargs)
|
||||||
|
{
|
||||||
|
#if PG_VERSION_NUM >= 90000
|
||||||
|
PyObject *ident = NULL, *obj = NULL, *result = NULL;
|
||||||
|
connectionObject *conn;
|
||||||
|
const char *str;
|
||||||
|
char *quoted = NULL;
|
||||||
|
|
||||||
|
static char *kwlist[] = {"ident", "scope", NULL};
|
||||||
|
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OO", kwlist, &ident, &obj)) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (PyObject_TypeCheck(obj, &cursorType)) {
|
||||||
|
conn = ((cursorObject*)obj)->conn;
|
||||||
|
}
|
||||||
|
else if (PyObject_TypeCheck(obj, &connectionType)) {
|
||||||
|
conn = (connectionObject*)obj;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
PyErr_SetString(PyExc_TypeError,
|
||||||
|
"argument 2 must be a connection or a cursor");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
Py_INCREF(ident); /* for ensure_bytes */
|
||||||
|
if (!(ident = psycopg_ensure_bytes(ident))) { goto exit; }
|
||||||
|
|
||||||
|
str = Bytes_AS_STRING(ident);
|
||||||
|
|
||||||
|
quoted = PQescapeIdentifier(conn->pgconn, str, strlen(str));
|
||||||
|
if (!quoted) {
|
||||||
|
PyErr_NoMemory();
|
||||||
|
goto exit;
|
||||||
|
}
|
||||||
|
result = conn_text_from_chars(conn, quoted);
|
||||||
|
|
||||||
|
exit:
|
||||||
|
PQfreemem(quoted);
|
||||||
|
Py_XDECREF(ident);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
#else
|
||||||
|
PyErr_SetString(NotSupportedError, "PQescapeIdentifier not available in libpq < 9.0");
|
||||||
|
return NULL;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
/** type registration **/
|
/** type registration **/
|
||||||
#define psyco_register_type_doc \
|
#define psyco_register_type_doc \
|
||||||
"register_type(obj, conn_or_curs) -> None -- register obj with psycopg type system\n\n" \
|
"register_type(obj, conn_or_curs) -> None -- register obj with psycopg type system\n\n" \
|
||||||
|
@ -181,6 +285,29 @@ psyco_register_type(PyObject *self, PyObject *args)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/* Make sure libcrypto thread callbacks are set up. */
|
||||||
|
static void
|
||||||
|
psyco_libcrypto_threads_init(void)
|
||||||
|
{
|
||||||
|
PyObject *m;
|
||||||
|
|
||||||
|
/* importing the ssl module sets up Python's libcrypto callbacks */
|
||||||
|
if ((m = PyImport_ImportModule("ssl"))) {
|
||||||
|
/* disable libcrypto setup in libpq, so it won't stomp on the callbacks
|
||||||
|
that have already been set up */
|
||||||
|
#if PG_VERSION_NUM >= 80400
|
||||||
|
PQinitOpenSSL(1, 0);
|
||||||
|
#endif
|
||||||
|
Py_DECREF(m);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
/* might mean that Python has been compiled without OpenSSL support,
|
||||||
|
fall back to relying on libpq's libcrypto locking */
|
||||||
|
PyErr_Clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Initialize the default adapters map
|
/* Initialize the default adapters map
|
||||||
*
|
*
|
||||||
* Return 0 on success, else -1 and set an exception.
|
* Return 0 on success, else -1 and set an exception.
|
||||||
|
@ -285,6 +412,19 @@ exit:
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#define psyco_libpq_version_doc "Query actual libpq version loaded."
|
||||||
|
|
||||||
|
static PyObject*
|
||||||
|
psyco_libpq_version(PyObject *self)
|
||||||
|
{
|
||||||
|
#if PG_VERSION_NUM >= 90100
|
||||||
|
return PyInt_FromLong(PQlibVersion());
|
||||||
|
#else
|
||||||
|
PyErr_SetString(NotSupportedError, "version discovery is not supported in libpq < 9.1");
|
||||||
|
return NULL;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
/* psyco_encodings_fill
|
/* psyco_encodings_fill
|
||||||
|
|
||||||
Fill the module's postgresql<->python encoding table */
|
Fill the module's postgresql<->python encoding table */
|
||||||
|
@ -398,9 +538,7 @@ exit:
|
||||||
PyObject *Error, *Warning, *InterfaceError, *DatabaseError,
|
PyObject *Error, *Warning, *InterfaceError, *DatabaseError,
|
||||||
*InternalError, *OperationalError, *ProgrammingError,
|
*InternalError, *OperationalError, *ProgrammingError,
|
||||||
*IntegrityError, *DataError, *NotSupportedError;
|
*IntegrityError, *DataError, *NotSupportedError;
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
PyObject *QueryCanceledError, *TransactionRollbackError;
|
PyObject *QueryCanceledError, *TransactionRollbackError;
|
||||||
#endif
|
|
||||||
|
|
||||||
/* mapping between exception names and their PyObject */
|
/* mapping between exception names and their PyObject */
|
||||||
static struct {
|
static struct {
|
||||||
|
@ -423,13 +561,11 @@ static struct {
|
||||||
{ "psycopg2.DataError", &DataError, &DatabaseError, DataError_doc },
|
{ "psycopg2.DataError", &DataError, &DatabaseError, DataError_doc },
|
||||||
{ "psycopg2.NotSupportedError", &NotSupportedError, &DatabaseError,
|
{ "psycopg2.NotSupportedError", &NotSupportedError, &DatabaseError,
|
||||||
NotSupportedError_doc },
|
NotSupportedError_doc },
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
{ "psycopg2.extensions.QueryCanceledError", &QueryCanceledError,
|
{ "psycopg2.extensions.QueryCanceledError", &QueryCanceledError,
|
||||||
&OperationalError, QueryCanceledError_doc },
|
&OperationalError, QueryCanceledError_doc },
|
||||||
{ "psycopg2.extensions.TransactionRollbackError",
|
{ "psycopg2.extensions.TransactionRollbackError",
|
||||||
&TransactionRollbackError, &OperationalError,
|
&TransactionRollbackError, &OperationalError,
|
||||||
TransactionRollbackError_doc },
|
TransactionRollbackError_doc },
|
||||||
#endif
|
|
||||||
{NULL} /* Sentinel */
|
{NULL} /* Sentinel */
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -630,8 +766,10 @@ psyco_GetDecimalType(void)
|
||||||
static PyObject *
|
static PyObject *
|
||||||
psyco_make_description_type(void)
|
psyco_make_description_type(void)
|
||||||
{
|
{
|
||||||
PyObject *nt = NULL;
|
|
||||||
PyObject *coll = NULL;
|
PyObject *coll = NULL;
|
||||||
|
PyObject *nt = NULL;
|
||||||
|
PyTypeObject *t = NULL;
|
||||||
|
PyObject *s = NULL;
|
||||||
PyObject *rv = NULL;
|
PyObject *rv = NULL;
|
||||||
|
|
||||||
/* Try to import collections.namedtuple */
|
/* Try to import collections.namedtuple */
|
||||||
|
@ -645,12 +783,26 @@ psyco_make_description_type(void)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Build the namedtuple */
|
/* Build the namedtuple */
|
||||||
rv = PyObject_CallFunction(nt, "ss", "Column",
|
if(!(t = (PyTypeObject *)PyObject_CallFunction(nt, "ss", "Column",
|
||||||
"name type_code display_size internal_size precision scale null_ok");
|
"name type_code display_size internal_size precision scale null_ok"))) {
|
||||||
|
goto exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Export the tuple on the extensions module
|
||||||
|
* Required to guarantee picklability on Py > 3.3 (see Python issue 21374)
|
||||||
|
* for previous Py version the module is psycopg2 anyway but for consistency
|
||||||
|
* we'd rather expose it from the extensions module. */
|
||||||
|
if (!(s = Text_FromUTF8("psycopg2.extensions"))) { goto exit; }
|
||||||
|
if (0 > PyDict_SetItemString(t->tp_dict, "__module__", s)) { goto exit; }
|
||||||
|
|
||||||
|
rv = (PyObject *)t;
|
||||||
|
t = NULL;
|
||||||
|
|
||||||
exit:
|
exit:
|
||||||
Py_XDECREF(coll);
|
Py_XDECREF(coll);
|
||||||
Py_XDECREF(nt);
|
Py_XDECREF(nt);
|
||||||
|
Py_XDECREF((PyObject *)t);
|
||||||
|
Py_XDECREF(s);
|
||||||
|
|
||||||
return rv;
|
return rv;
|
||||||
|
|
||||||
|
@ -668,6 +820,10 @@ error:
|
||||||
static PyMethodDef psycopgMethods[] = {
|
static PyMethodDef psycopgMethods[] = {
|
||||||
{"_connect", (PyCFunction)psyco_connect,
|
{"_connect", (PyCFunction)psyco_connect,
|
||||||
METH_VARARGS|METH_KEYWORDS, psyco_connect_doc},
|
METH_VARARGS|METH_KEYWORDS, psyco_connect_doc},
|
||||||
|
{"parse_dsn", (PyCFunction)psyco_parse_dsn,
|
||||||
|
METH_VARARGS|METH_KEYWORDS, psyco_parse_dsn_doc},
|
||||||
|
{"quote_ident", (PyCFunction)psyco_quote_ident,
|
||||||
|
METH_VARARGS|METH_KEYWORDS, psyco_quote_ident_doc},
|
||||||
{"adapt", (PyCFunction)psyco_microprotocols_adapt,
|
{"adapt", (PyCFunction)psyco_microprotocols_adapt,
|
||||||
METH_VARARGS, psyco_microprotocols_adapt_doc},
|
METH_VARARGS, psyco_microprotocols_adapt_doc},
|
||||||
|
|
||||||
|
@ -677,21 +833,9 @@ static PyMethodDef psycopgMethods[] = {
|
||||||
METH_VARARGS|METH_KEYWORDS, typecast_from_python_doc},
|
METH_VARARGS|METH_KEYWORDS, typecast_from_python_doc},
|
||||||
{"new_array_type", (PyCFunction)typecast_array_from_python,
|
{"new_array_type", (PyCFunction)typecast_array_from_python,
|
||||||
METH_VARARGS|METH_KEYWORDS, typecast_array_from_python_doc},
|
METH_VARARGS|METH_KEYWORDS, typecast_array_from_python_doc},
|
||||||
|
{"libpq_version", (PyCFunction)psyco_libpq_version,
|
||||||
|
METH_NOARGS, psyco_libpq_version_doc},
|
||||||
|
|
||||||
{"AsIs", (PyCFunction)psyco_AsIs,
|
|
||||||
METH_VARARGS, psyco_AsIs_doc},
|
|
||||||
{"QuotedString", (PyCFunction)psyco_QuotedString,
|
|
||||||
METH_VARARGS, psyco_QuotedString_doc},
|
|
||||||
{"Boolean", (PyCFunction)psyco_Boolean,
|
|
||||||
METH_VARARGS, psyco_Boolean_doc},
|
|
||||||
{"Int", (PyCFunction)psyco_Int,
|
|
||||||
METH_VARARGS, psyco_Int_doc},
|
|
||||||
{"Float", (PyCFunction)psyco_Float,
|
|
||||||
METH_VARARGS, psyco_Float_doc},
|
|
||||||
{"Decimal", (PyCFunction)psyco_Decimal,
|
|
||||||
METH_VARARGS, psyco_Decimal_doc},
|
|
||||||
{"Binary", (PyCFunction)psyco_Binary,
|
|
||||||
METH_VARARGS, psyco_Binary_doc},
|
|
||||||
{"Date", (PyCFunction)psyco_Date,
|
{"Date", (PyCFunction)psyco_Date,
|
||||||
METH_VARARGS, psyco_Date_doc},
|
METH_VARARGS, psyco_Date_doc},
|
||||||
{"Time", (PyCFunction)psyco_Time,
|
{"Time", (PyCFunction)psyco_Time,
|
||||||
|
@ -704,8 +848,6 @@ static PyMethodDef psycopgMethods[] = {
|
||||||
METH_VARARGS, psyco_TimeFromTicks_doc},
|
METH_VARARGS, psyco_TimeFromTicks_doc},
|
||||||
{"TimestampFromTicks", (PyCFunction)psyco_TimestampFromTicks,
|
{"TimestampFromTicks", (PyCFunction)psyco_TimestampFromTicks,
|
||||||
METH_VARARGS, psyco_TimestampFromTicks_doc},
|
METH_VARARGS, psyco_TimestampFromTicks_doc},
|
||||||
{"List", (PyCFunction)psyco_List,
|
|
||||||
METH_VARARGS, psyco_List_doc},
|
|
||||||
|
|
||||||
{"DateFromPy", (PyCFunction)psyco_DateFromPy,
|
{"DateFromPy", (PyCFunction)psyco_DateFromPy,
|
||||||
METH_VARARGS, psyco_DateFromPy_doc},
|
METH_VARARGS, psyco_DateFromPy_doc},
|
||||||
|
@ -728,12 +870,10 @@ static PyMethodDef psycopgMethods[] = {
|
||||||
METH_VARARGS, psyco_IntervalFromMx_doc},
|
METH_VARARGS, psyco_IntervalFromMx_doc},
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
{"set_wait_callback", (PyCFunction)psyco_set_wait_callback,
|
{"set_wait_callback", (PyCFunction)psyco_set_wait_callback,
|
||||||
METH_O, psyco_set_wait_callback_doc},
|
METH_O, psyco_set_wait_callback_doc},
|
||||||
{"get_wait_callback", (PyCFunction)psyco_get_wait_callback,
|
{"get_wait_callback", (PyCFunction)psyco_get_wait_callback,
|
||||||
METH_NOARGS, psyco_get_wait_callback_doc},
|
METH_NOARGS, psyco_get_wait_callback_doc},
|
||||||
#endif
|
|
||||||
|
|
||||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||||
};
|
};
|
||||||
|
@ -822,10 +962,11 @@ INIT_MODULE(_psycopg)(void)
|
||||||
Py_TYPE(&diagnosticsType) = &PyType_Type;
|
Py_TYPE(&diagnosticsType) = &PyType_Type;
|
||||||
if (PyType_Ready(&diagnosticsType) == -1) goto exit;
|
if (PyType_Ready(&diagnosticsType) == -1) goto exit;
|
||||||
|
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
|
||||||
Py_TYPE(&lobjectType) = &PyType_Type;
|
Py_TYPE(&lobjectType) = &PyType_Type;
|
||||||
if (PyType_Ready(&lobjectType) == -1) goto exit;
|
if (PyType_Ready(&lobjectType) == -1) goto exit;
|
||||||
#endif
|
|
||||||
|
/* initialize libcrypto threading callbacks */
|
||||||
|
psyco_libcrypto_threads_init();
|
||||||
|
|
||||||
/* import mx.DateTime module, if necessary */
|
/* import mx.DateTime module, if necessary */
|
||||||
#ifdef HAVE_MXDATETIME
|
#ifdef HAVE_MXDATETIME
|
||||||
|
@ -859,18 +1000,6 @@ INIT_MODULE(_psycopg)(void)
|
||||||
Py_TYPE(&pydatetimeType) = &PyType_Type;
|
Py_TYPE(&pydatetimeType) = &PyType_Type;
|
||||||
if (PyType_Ready(&pydatetimeType) == -1) goto exit;
|
if (PyType_Ready(&pydatetimeType) == -1) goto exit;
|
||||||
|
|
||||||
/* import psycopg2.tz anyway (TODO: replace with C-level module?) */
|
|
||||||
pyPsycopgTzModule = PyImport_ImportModule("psycopg2.tz");
|
|
||||||
if (pyPsycopgTzModule == NULL) {
|
|
||||||
Dprintf("initpsycopg: can't import psycopg2.tz module");
|
|
||||||
PyErr_SetString(PyExc_ImportError, "can't import psycopg2.tz module");
|
|
||||||
goto exit;
|
|
||||||
}
|
|
||||||
pyPsycopgTzLOCAL =
|
|
||||||
PyObject_GetAttrString(pyPsycopgTzModule, "LOCAL");
|
|
||||||
pyPsycopgTzFixedOffsetTimezone =
|
|
||||||
PyObject_GetAttrString(pyPsycopgTzModule, "FixedOffsetTimezone");
|
|
||||||
|
|
||||||
/* initialize the module and grab module's dictionary */
|
/* initialize the module and grab module's dictionary */
|
||||||
#if PY_MAJOR_VERSION < 3
|
#if PY_MAJOR_VERSION < 3
|
||||||
module = Py_InitModule("_psycopg", psycopgMethods);
|
module = Py_InitModule("_psycopg", psycopgMethods);
|
||||||
|
@ -901,6 +1030,7 @@ INIT_MODULE(_psycopg)(void)
|
||||||
/* set some module's parameters */
|
/* set some module's parameters */
|
||||||
PyModule_AddStringConstant(module, "__version__", PSYCOPG_VERSION);
|
PyModule_AddStringConstant(module, "__version__", PSYCOPG_VERSION);
|
||||||
PyModule_AddStringConstant(module, "__doc__", "psycopg PostgreSQL driver");
|
PyModule_AddStringConstant(module, "__doc__", "psycopg PostgreSQL driver");
|
||||||
|
PyModule_AddIntConstant(module, "__libpq_version__", PG_VERSION_NUM);
|
||||||
PyModule_AddObject(module, "apilevel", Text_FromUTF8(APILEVEL));
|
PyModule_AddObject(module, "apilevel", Text_FromUTF8(APILEVEL));
|
||||||
PyModule_AddObject(module, "threadsafety", PyInt_FromLong(THREADSAFETY));
|
PyModule_AddObject(module, "threadsafety", PyInt_FromLong(THREADSAFETY));
|
||||||
PyModule_AddObject(module, "paramstyle", Text_FromUTF8(PARAMSTYLE));
|
PyModule_AddObject(module, "paramstyle", Text_FromUTF8(PARAMSTYLE));
|
||||||
|
@ -912,9 +1042,16 @@ INIT_MODULE(_psycopg)(void)
|
||||||
PyModule_AddObject(module, "Notify", (PyObject*)¬ifyType);
|
PyModule_AddObject(module, "Notify", (PyObject*)¬ifyType);
|
||||||
PyModule_AddObject(module, "Xid", (PyObject*)&xidType);
|
PyModule_AddObject(module, "Xid", (PyObject*)&xidType);
|
||||||
PyModule_AddObject(module, "Diagnostics", (PyObject*)&diagnosticsType);
|
PyModule_AddObject(module, "Diagnostics", (PyObject*)&diagnosticsType);
|
||||||
#ifdef PSYCOPG_EXTENSIONS
|
PyModule_AddObject(module, "AsIs", (PyObject*)&asisType);
|
||||||
|
PyModule_AddObject(module, "Binary", (PyObject*)&binaryType);
|
||||||
|
PyModule_AddObject(module, "Boolean", (PyObject*)&pbooleanType);
|
||||||
|
PyModule_AddObject(module, "Decimal", (PyObject*)&pdecimalType);
|
||||||
|
PyModule_AddObject(module, "Int", (PyObject*)&pintType);
|
||||||
|
PyModule_AddObject(module, "Float", (PyObject*)&pfloatType);
|
||||||
|
PyModule_AddObject(module, "List", (PyObject*)&listType);
|
||||||
|
PyModule_AddObject(module, "QuotedString", (PyObject*)&qstringType);
|
||||||
PyModule_AddObject(module, "lobject", (PyObject*)&lobjectType);
|
PyModule_AddObject(module, "lobject", (PyObject*)&lobjectType);
|
||||||
#endif
|
PyModule_AddObject(module, "Column", psyco_DescriptionType);
|
||||||
|
|
||||||
/* encodings dictionary in module dictionary */
|
/* encodings dictionary in module dictionary */
|
||||||
PyModule_AddObject(module, "encodings", psycoEncodings);
|
PyModule_AddObject(module, "encodings", psycoEncodings);
|
||||||
|
|
|
@ -164,6 +164,9 @@ typecast_parse_time(const char* s, const char** t, Py_ssize_t* len,
|
||||||
while (usd++ < 6) *us *= 10;
|
while (usd++ < 6) *us *= 10;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* 24:00:00 -> 00:00:00 (ticket #278) */
|
||||||
|
if (*hh == 24) { *hh = 0; }
|
||||||
|
|
||||||
return cz;
|
return cz;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -62,7 +62,7 @@ psycopg_escape_string(connectionObject *conn, const char *from, Py_ssize_t len,
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
#if PG_VERSION_HEX >= 0x080104
|
#if PG_VERSION_NUM >= 80104
|
||||||
int err;
|
int err;
|
||||||
if (conn && conn->pgconn)
|
if (conn && conn->pgconn)
|
||||||
ql = PQescapeStringConn(conn->pgconn, to+eq+1, from, len, &err);
|
ql = PQescapeStringConn(conn->pgconn, to+eq+1, from, len, &err);
|
||||||
|
@ -87,7 +87,7 @@ psycopg_escape_string(connectionObject *conn, const char *from, Py_ssize_t len,
|
||||||
return to;
|
return to;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Escape a string to build a valid PostgreSQL identifier
|
/* Escape a string to build a valid PostgreSQL identifier.
|
||||||
*
|
*
|
||||||
* Allocate a new buffer on the Python heap containing the new string.
|
* Allocate a new buffer on the Python heap containing the new string.
|
||||||
* 'len' is optional: if 0 the length is calculated.
|
* 'len' is optional: if 0 the length is calculated.
|
||||||
|
@ -96,7 +96,7 @@ psycopg_escape_string(connectionObject *conn, const char *from, Py_ssize_t len,
|
||||||
*
|
*
|
||||||
* WARNING: this function is not so safe to allow untrusted input: it does no
|
* WARNING: this function is not so safe to allow untrusted input: it does no
|
||||||
* check for multibyte chars. Such a function should be built on
|
* check for multibyte chars. Such a function should be built on
|
||||||
* PQescapeIndentifier, which is only available from PostgreSQL 9.0.
|
* PQescapeIdentifier, which is only available from PostgreSQL 9.0.
|
||||||
*/
|
*/
|
||||||
char *
|
char *
|
||||||
psycopg_escape_identifier_easy(const char *from, Py_ssize_t len)
|
psycopg_escape_identifier_easy(const char *from, Py_ssize_t len)
|
||||||
|
|
|
@ -44,10 +44,9 @@
|
||||||
<None Include="INSTALL" />
|
<None Include="INSTALL" />
|
||||||
<None Include="LICENSE" />
|
<None Include="LICENSE" />
|
||||||
<None Include="MANIFEST.in" />
|
<None Include="MANIFEST.in" />
|
||||||
<None Include="README" />
|
<None Include="README.rst" />
|
||||||
<None Include="setup.cfg" />
|
<None Include="setup.cfg" />
|
||||||
<None Include="setup.py" />
|
<None Include="setup.py" />
|
||||||
<None Include="doc\HACKING" />
|
|
||||||
<None Include="doc\SUCCESS" />
|
<None Include="doc\SUCCESS" />
|
||||||
<None Include="examples\binary.py" />
|
<None Include="examples\binary.py" />
|
||||||
<None Include="examples\copy_from.py" />
|
<None Include="examples\copy_from.py" />
|
||||||
|
|
|
@ -16,6 +16,7 @@ The script can be run at a new PostgreSQL release to refresh the module.
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||||
# License for more details.
|
# License for more details.
|
||||||
|
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
import urllib2
|
import urllib2
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
@ -30,8 +31,9 @@ def main():
|
||||||
filename = sys.argv[1]
|
filename = sys.argv[1]
|
||||||
|
|
||||||
file_start = read_base_file(filename)
|
file_start = read_base_file(filename)
|
||||||
|
# If you add a version to the list fix the docs (errorcodes.rst, err.rst)
|
||||||
classes, errors = fetch_errors(
|
classes, errors = fetch_errors(
|
||||||
['8.1', '8.2', '8.3', '8.4', '9.0', '9.1', '9.2'])
|
['8.1', '8.2', '8.3', '8.4', '9.0', '9.1', '9.2', '9.3', '9.4'])
|
||||||
|
|
||||||
f = open(filename, "w")
|
f = open(filename, "w")
|
||||||
for line in file_start:
|
for line in file_start:
|
||||||
|
@ -48,7 +50,41 @@ def read_base_file(filename):
|
||||||
|
|
||||||
raise ValueError("can't find the separator. Is this the right file?")
|
raise ValueError("can't find the separator. Is this the right file?")
|
||||||
|
|
||||||
def parse_errors(url):
|
def parse_errors_txt(url):
|
||||||
|
classes = {}
|
||||||
|
errors = defaultdict(dict)
|
||||||
|
|
||||||
|
page = urllib2.urlopen(url)
|
||||||
|
for line in page:
|
||||||
|
# Strip comments and skip blanks
|
||||||
|
line = line.split('#')[0].strip()
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Parse a section
|
||||||
|
m = re.match(r"Section: (Class (..) - .+)", line)
|
||||||
|
if m:
|
||||||
|
label, class_ = m.groups()
|
||||||
|
classes[class_] = label
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Parse an error
|
||||||
|
m = re.match(r"(.....)\s+(?:E|W|S)\s+ERRCODE_(\S+)(?:\s+(\S+))?$", line)
|
||||||
|
if m:
|
||||||
|
errcode, macro, spec = m.groups()
|
||||||
|
# skip errcodes without specs as they are not publically visible
|
||||||
|
if not spec:
|
||||||
|
continue
|
||||||
|
errlabel = spec.upper()
|
||||||
|
errors[class_][errcode] = errlabel
|
||||||
|
continue
|
||||||
|
|
||||||
|
# We don't expect anything else
|
||||||
|
raise ValueError("unexpected line:\n%s" % line)
|
||||||
|
|
||||||
|
return classes, errors
|
||||||
|
|
||||||
|
def parse_errors_sgml(url):
|
||||||
page = BS(urllib2.urlopen(url))
|
page = BS(urllib2.urlopen(url))
|
||||||
table = page('table')[1]('tbody')[0]
|
table = page('table')[1]('tbody')[0]
|
||||||
|
|
||||||
|
@ -87,14 +123,25 @@ def parse_errors(url):
|
||||||
|
|
||||||
return classes, errors
|
return classes, errors
|
||||||
|
|
||||||
errors_url="http://www.postgresql.org/docs/%s/static/errcodes-appendix.html"
|
errors_sgml_url = \
|
||||||
|
"http://www.postgresql.org/docs/%s/static/errcodes-appendix.html"
|
||||||
|
|
||||||
|
errors_txt_url = \
|
||||||
|
"http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob_plain;" \
|
||||||
|
"f=src/backend/utils/errcodes.txt;hb=REL%s_STABLE"
|
||||||
|
|
||||||
def fetch_errors(versions):
|
def fetch_errors(versions):
|
||||||
classes = {}
|
classes = {}
|
||||||
errors = defaultdict(dict)
|
errors = defaultdict(dict)
|
||||||
|
|
||||||
for version in versions:
|
for version in versions:
|
||||||
c1, e1 = parse_errors(errors_url % version)
|
print >> sys.stderr, version
|
||||||
|
tver = tuple(map(int, version.split('.')))
|
||||||
|
if tver < (9, 1):
|
||||||
|
c1, e1 = parse_errors_sgml(errors_sgml_url % version)
|
||||||
|
else:
|
||||||
|
c1, e1 = parse_errors_txt(
|
||||||
|
errors_txt_url % version.replace('.', '_'))
|
||||||
classes.update(c1)
|
classes.update(c1)
|
||||||
for c, cerrs in e1.iteritems():
|
for c, cerrs in e1.iteritems():
|
||||||
errors[c].update(cerrs)
|
errors[c].update(cerrs)
|
||||||
|
|
15
scripts/upload-docs.sh
Executable file
15
scripts/upload-docs.sh
Executable file
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
DOCDIR="$DIR/../doc"
|
||||||
|
|
||||||
|
# this command requires ssh configured to the proper target
|
||||||
|
tar czf - -C "$DOCDIR/html" . | ssh psycoweb tar xzvf - -C docs/current
|
||||||
|
|
||||||
|
# download the script to upload the docs to PyPI
|
||||||
|
test -e "$DIR/pypi_docs_upload.py" \
|
||||||
|
|| wget -O "$DIR/pypi_docs_upload.py" \
|
||||||
|
https://gist.githubusercontent.com/dvarrazzo/dac46237070d69dbc075/raw
|
||||||
|
|
||||||
|
# this command requires a ~/.pypirc with the right privileges
|
||||||
|
python "$DIR/pypi_docs_upload.py" psycopg2 "$DOCDIR/html"
|
54
scripts/upload-release.sh
Executable file
54
scripts/upload-release.sh
Executable file
|
@ -0,0 +1,54 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# Script to create a psycopg release
|
||||||
|
#
|
||||||
|
# You must create a release tag before running the script, e.g. 2_5_4.
|
||||||
|
# The script will check out in a clear environment, build the sdist package,
|
||||||
|
# unpack and test it, then upload on PyPI and on the psycopg website.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
REPO_URL=git@github.com:psycopg/psycopg2.git
|
||||||
|
|
||||||
|
VER=$(grep ^PSYCOPG_VERSION setup.py | cut -d "'" -f 2)
|
||||||
|
|
||||||
|
# avoid releasing a testing version
|
||||||
|
echo "$VER" | grep -qE '^[0-9]+\.[0-9]+(\.[0-9]+)?$' \
|
||||||
|
|| (echo "bad release: $VER" >&2 && exit 1)
|
||||||
|
|
||||||
|
# Check out in a clean environment
|
||||||
|
rm -rf rel
|
||||||
|
mkdir rel
|
||||||
|
cd rel
|
||||||
|
git clone $REPO_URL psycopg
|
||||||
|
cd psycopg
|
||||||
|
TAG=${VER//./_}
|
||||||
|
git checkout -b $TAG $TAG
|
||||||
|
make sdist
|
||||||
|
|
||||||
|
# Test the sdist just created
|
||||||
|
cd dist
|
||||||
|
tar xzvf psycopg2-$VER.tar.gz
|
||||||
|
cd psycopg2-$VER
|
||||||
|
make
|
||||||
|
make check
|
||||||
|
cd ../../
|
||||||
|
|
||||||
|
read -p "if you are not fine with the above stop me now..."
|
||||||
|
|
||||||
|
# upload to pypi and to the website
|
||||||
|
|
||||||
|
python setup.py sdist --formats=gztar upload -s
|
||||||
|
|
||||||
|
DASHVER=${VER//./-}
|
||||||
|
DASHVER=${DASHVER:0:3}
|
||||||
|
|
||||||
|
# Requires ssh configuration for 'psycoweb'
|
||||||
|
scp dist/psycopg2-${VER}.tar.gz psycoweb:tarballs/PSYCOPG-${DASHVER}/
|
||||||
|
ssh psycoweb ln -sfv PSYCOPG-${DASHVER}/psycopg2-${VER}.tar.gz \
|
||||||
|
tarballs/psycopg2-latest.tar.gz
|
||||||
|
|
||||||
|
scp dist/psycopg2-${VER}.tar.gz.asc psycoweb:tarballs/PSYCOPG-${DASHVER}/
|
||||||
|
ssh psycoweb ln -sfv PSYCOPG-${DASHVER}/psycopg2-${VER}.tar.gz.asc \
|
||||||
|
tarballs/psycopg2-latest.tar.gz.asc
|
||||||
|
|
||||||
|
echo "great, now write release notes and an email!"
|
|
@ -1,11 +1,9 @@
|
||||||
[build_ext]
|
[build_ext]
|
||||||
define=PSYCOPG_EXTENSIONS,PSYCOPG_NEW_BOOLEAN,HAVE_PQFREEMEM
|
define=
|
||||||
|
|
||||||
# PSYCOPG_EXTENSIONS enables extensions to PEP-249 (you really want this)
|
|
||||||
# PSYCOPG_DISPLAY_SIZE enable display size calculation (a little slower)
|
# PSYCOPG_DISPLAY_SIZE enable display size calculation (a little slower)
|
||||||
# HAVE_PQFREEMEM should be defined on PostgreSQL >= 7.4
|
# HAVE_PQFREEMEM should be defined on PostgreSQL >= 7.4
|
||||||
# PSYCOPG_DEBUG can be added to enable verbose debug information
|
# PSYCOPG_DEBUG can be added to enable verbose debug information
|
||||||
# PSYCOPG_NEW_BOOLEAN to format booleans as true/false vs 't'/'f'
|
|
||||||
|
|
||||||
# "pg_config" is required to locate PostgreSQL headers and libraries needed to
|
# "pg_config" is required to locate PostgreSQL headers and libraries needed to
|
||||||
# build psycopg2. If pg_config is not in the path or is installed under a
|
# build psycopg2. If pg_config is not in the path or is installed under a
|
||||||
|
|
53
setup.py
53
setup.py
|
@ -25,6 +25,8 @@ UPDATEs. psycopg2 also provide full asynchronous operations and support
|
||||||
for coroutine libraries.
|
for coroutine libraries.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# note: if you are changing the list of supported Python version please fix
|
||||||
|
# the docs in install.rst and the /features/ page on the website.
|
||||||
classifiers = """\
|
classifiers = """\
|
||||||
Development Status :: 5 - Production/Stable
|
Development Status :: 5 - Production/Stable
|
||||||
Intended Audience :: Developers
|
Intended Audience :: Developers
|
||||||
|
@ -84,7 +86,7 @@ except ImportError:
|
||||||
# Take a look at http://www.python.org/dev/peps/pep-0386/
|
# Take a look at http://www.python.org/dev/peps/pep-0386/
|
||||||
# for a consistent versioning pattern.
|
# for a consistent versioning pattern.
|
||||||
|
|
||||||
PSYCOPG_VERSION = '2.6.dev0'
|
PSYCOPG_VERSION = '2.7.dev0'
|
||||||
|
|
||||||
version_flags = ['dt', 'dec']
|
version_flags = ['dt', 'dec']
|
||||||
|
|
||||||
|
@ -405,14 +407,32 @@ class psycopg_build_ext(build_ext):
|
||||||
pgmajor, pgminor, pgpatch = m.group(1, 2, 3)
|
pgmajor, pgminor, pgpatch = m.group(1, 2, 3)
|
||||||
if pgpatch is None or not pgpatch.isdigit():
|
if pgpatch is None or not pgpatch.isdigit():
|
||||||
pgpatch = 0
|
pgpatch = 0
|
||||||
|
pgmajor = int(pgmajor)
|
||||||
|
pgminor = int(pgminor)
|
||||||
|
pgpatch = int(pgpatch)
|
||||||
else:
|
else:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
"Error: could not determine PostgreSQL version from '%s'"
|
"Error: could not determine PostgreSQL version from '%s'"
|
||||||
% pgversion)
|
% pgversion)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
define_macros.append(("PG_VERSION_HEX", "0x%02X%02X%02X" %
|
define_macros.append(("PG_VERSION_NUM", "%d%02d%02d" %
|
||||||
(int(pgmajor), int(pgminor), int(pgpatch))))
|
(pgmajor, pgminor, pgpatch)))
|
||||||
|
|
||||||
|
# enable lo64 if libpq >= 9.3 and Python 64 bits
|
||||||
|
if (pgmajor, pgminor) >= (9, 3) and is_py_64():
|
||||||
|
define_macros.append(("HAVE_LO64", "1"))
|
||||||
|
|
||||||
|
# Inject the flag in the version string already packed up
|
||||||
|
# because we didn't know the version before.
|
||||||
|
# With distutils everything is complicated.
|
||||||
|
for i, t in enumerate(define_macros):
|
||||||
|
if t[0] == 'PSYCOPG_VERSION':
|
||||||
|
n = t[1].find(')')
|
||||||
|
if n > 0:
|
||||||
|
define_macros[i] = (
|
||||||
|
t[0], t[1][:n] + ' lo64' + t[1][n:])
|
||||||
|
|
||||||
except Warning:
|
except Warning:
|
||||||
w = sys.exc_info()[1] # work around py 2/3 different syntax
|
w = sys.exc_info()[1] # work around py 2/3 different syntax
|
||||||
sys.stderr.write("Error: %s\n" % w)
|
sys.stderr.write("Error: %s\n" % w)
|
||||||
|
@ -421,6 +441,13 @@ class psycopg_build_ext(build_ext):
|
||||||
if hasattr(self, "finalize_" + sys.platform):
|
if hasattr(self, "finalize_" + sys.platform):
|
||||||
getattr(self, "finalize_" + sys.platform)()
|
getattr(self, "finalize_" + sys.platform)()
|
||||||
|
|
||||||
|
def is_py_64():
|
||||||
|
# sys.maxint not available since Py 3.1;
|
||||||
|
# sys.maxsize not available before Py 2.6;
|
||||||
|
# this is portable at least between Py 2.4 and 3.4.
|
||||||
|
import struct
|
||||||
|
return struct.calcsize("P") > 4
|
||||||
|
|
||||||
|
|
||||||
# let's start with macro definitions (the ones not already in setup.cfg)
|
# let's start with macro definitions (the ones not already in setup.cfg)
|
||||||
define_macros = []
|
define_macros = []
|
||||||
|
@ -509,10 +536,7 @@ you probably need to install its companion -dev or -devel package."""
|
||||||
|
|
||||||
# generate a nice version string to avoid confusion when users report bugs
|
# generate a nice version string to avoid confusion when users report bugs
|
||||||
version_flags.append('pq3') # no more a choice
|
version_flags.append('pq3') # no more a choice
|
||||||
|
version_flags.append('ext') # no more a choice
|
||||||
for have in parser.get('build_ext', 'define').split(','):
|
|
||||||
if have == 'PSYCOPG_EXTENSIONS':
|
|
||||||
version_flags.append('ext')
|
|
||||||
|
|
||||||
if version_flags:
|
if version_flags:
|
||||||
PSYCOPG_VERSION_EX = PSYCOPG_VERSION + " (%s)" % ' '.join(version_flags)
|
PSYCOPG_VERSION_EX = PSYCOPG_VERSION + " (%s)" % ' '.join(version_flags)
|
||||||
|
@ -539,7 +563,8 @@ else:
|
||||||
# when called e.g. with "pip -e git+url'. This results in declarations
|
# when called e.g. with "pip -e git+url'. This results in declarations
|
||||||
# duplicate on the commandline, which I hope is not a problem.
|
# duplicate on the commandline, which I hope is not a problem.
|
||||||
for define in parser.get('build_ext', 'define').split(','):
|
for define in parser.get('build_ext', 'define').split(','):
|
||||||
define_macros.append((define, '1'))
|
if define:
|
||||||
|
define_macros.append((define, '1'))
|
||||||
|
|
||||||
# build the extension
|
# build the extension
|
||||||
|
|
||||||
|
@ -560,6 +585,14 @@ download_url = (
|
||||||
"http://initd.org/psycopg/tarballs/PSYCOPG-%s/psycopg2-%s.tar.gz"
|
"http://initd.org/psycopg/tarballs/PSYCOPG-%s/psycopg2-%s.tar.gz"
|
||||||
% ('-'.join(PSYCOPG_VERSION.split('.')[:2]), PSYCOPG_VERSION))
|
% ('-'.join(PSYCOPG_VERSION.split('.')[:2]), PSYCOPG_VERSION))
|
||||||
|
|
||||||
|
try:
|
||||||
|
f = open("README.rst")
|
||||||
|
readme = f.read()
|
||||||
|
f.close()
|
||||||
|
except:
|
||||||
|
print("failed to read readme: ignoring...")
|
||||||
|
readme = __doc__
|
||||||
|
|
||||||
setup(name="psycopg2",
|
setup(name="psycopg2",
|
||||||
version=PSYCOPG_VERSION,
|
version=PSYCOPG_VERSION,
|
||||||
maintainer="Federico Di Gregorio",
|
maintainer="Federico Di Gregorio",
|
||||||
|
@ -570,8 +603,8 @@ setup(name="psycopg2",
|
||||||
download_url=download_url,
|
download_url=download_url,
|
||||||
license="LGPL with exceptions or ZPL",
|
license="LGPL with exceptions or ZPL",
|
||||||
platforms=["any"],
|
platforms=["any"],
|
||||||
description=__doc__.split("\n")[0],
|
description=readme.split("\n")[0],
|
||||||
long_description="\n".join(__doc__.split("\n")[2:]),
|
long_description="\n".join(readme.split("\n")[2:]).lstrip(),
|
||||||
classifiers=[x for x in classifiers.split("\n") if x],
|
classifiers=[x for x in classifiers.split("\n") if x],
|
||||||
data_files=data_files,
|
data_files=data_files,
|
||||||
package_dir={'psycopg2': 'lib', 'psycopg2.tests': 'tests'},
|
package_dir={'psycopg2': 'lib', 'psycopg2.tests': 'tests'},
|
||||||
|
|
|
@ -62,7 +62,7 @@ import sys
|
||||||
# - Reversed the polarity of buggy test in test_description
|
# - Reversed the polarity of buggy test in test_description
|
||||||
# - Test exception hierarchy correctly
|
# - Test exception hierarchy correctly
|
||||||
# - self.populate is now self._populate(), so if a driver stub
|
# - self.populate is now self._populate(), so if a driver stub
|
||||||
# overrides self.ddl1 this change propogates
|
# overrides self.ddl1 this change propagates
|
||||||
# - VARCHAR columns now have a width, which will hopefully make the
|
# - VARCHAR columns now have a width, which will hopefully make the
|
||||||
# DDL even more portible (this will be reversed if it causes more problems)
|
# DDL even more portible (this will be reversed if it causes more problems)
|
||||||
# - cursor.rowcount being checked after various execute and fetchXXX methods
|
# - cursor.rowcount being checked after various execute and fetchXXX methods
|
||||||
|
@ -804,7 +804,7 @@ class DatabaseAPI20Test(unittest.TestCase):
|
||||||
con.close()
|
con.close()
|
||||||
|
|
||||||
def test_setoutputsize(self):
|
def test_setoutputsize(self):
|
||||||
# Real test for setoutputsize is driver dependant
|
# Real test for setoutputsize is driver dependent
|
||||||
raise NotImplementedError('Driver needed to override this test')
|
raise NotImplementedError('Driver needed to override this test')
|
||||||
|
|
||||||
def test_None(self):
|
def test_None(self):
|
||||||
|
|
|
@ -23,16 +23,18 @@
|
||||||
# License for more details.
|
# License for more details.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
from operator import attrgetter
|
from operator import attrgetter
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import psycopg2.errorcodes
|
import psycopg2.errorcodes
|
||||||
import psycopg2.extensions
|
import psycopg2.extensions
|
||||||
|
|
||||||
from testutils import unittest, decorate_all_tests, skip_if_no_superuser
|
from testutils import unittest, decorate_all_tests, skip_if_no_superuser
|
||||||
from testutils import skip_before_postgres, skip_after_postgres
|
from testutils import skip_before_postgres, skip_after_postgres, skip_before_libpq
|
||||||
from testutils import ConnectingTestCase, skip_if_tpc_disabled
|
from testutils import ConnectingTestCase, skip_if_tpc_disabled
|
||||||
from testutils import skip_if_windows
|
from testutils import skip_if_windows
|
||||||
from testconfig import dsn, dbname
|
from testconfig import dsn, dbname
|
||||||
|
@ -127,11 +129,45 @@ class ConnectionTests(ConnectingTestCase):
|
||||||
cur.execute(sql)
|
cur.execute(sql)
|
||||||
|
|
||||||
self.assertEqual(50, len(conn.notices))
|
self.assertEqual(50, len(conn.notices))
|
||||||
self.assert_('table50' in conn.notices[0], conn.notices[0])
|
|
||||||
self.assert_('table51' in conn.notices[1], conn.notices[1])
|
|
||||||
self.assert_('table98' in conn.notices[-2], conn.notices[-2])
|
|
||||||
self.assert_('table99' in conn.notices[-1], conn.notices[-1])
|
self.assert_('table99' in conn.notices[-1], conn.notices[-1])
|
||||||
|
|
||||||
|
def test_notices_deque(self):
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
|
conn = self.conn
|
||||||
|
self.conn.notices = deque()
|
||||||
|
cur = conn.cursor()
|
||||||
|
if self.conn.server_version >= 90300:
|
||||||
|
cur.execute("set client_min_messages=debug1")
|
||||||
|
|
||||||
|
cur.execute("create temp table table1 (id serial); create temp table table2 (id serial);")
|
||||||
|
cur.execute("create temp table table3 (id serial); create temp table table4 (id serial);")
|
||||||
|
self.assertEqual(len(conn.notices), 4)
|
||||||
|
self.assert_('table1' in conn.notices.popleft())
|
||||||
|
self.assert_('table2' in conn.notices.popleft())
|
||||||
|
self.assert_('table3' in conn.notices.popleft())
|
||||||
|
self.assert_('table4' in conn.notices.popleft())
|
||||||
|
self.assertEqual(len(conn.notices), 0)
|
||||||
|
|
||||||
|
# not limited, but no error
|
||||||
|
for i in range(0, 100, 10):
|
||||||
|
sql = " ".join(["create temp table table2_%d (id serial);" % j for j in range(i, i+10)])
|
||||||
|
cur.execute(sql)
|
||||||
|
|
||||||
|
self.assertEqual(len([n for n in conn.notices if 'CREATE TABLE' in n]),
|
||||||
|
100)
|
||||||
|
|
||||||
|
def test_notices_noappend(self):
|
||||||
|
conn = self.conn
|
||||||
|
self.conn.notices = None # will make an error swallowes ok
|
||||||
|
cur = conn.cursor()
|
||||||
|
if self.conn.server_version >= 90300:
|
||||||
|
cur.execute("set client_min_messages=debug1")
|
||||||
|
|
||||||
|
cur.execute("create temp table table1 (id serial);")
|
||||||
|
|
||||||
|
self.assertEqual(self.conn.notices, None)
|
||||||
|
|
||||||
def test_server_version(self):
|
def test_server_version(self):
|
||||||
self.assert_(self.conn.server_version)
|
self.assert_(self.conn.server_version)
|
||||||
|
|
||||||
|
@ -274,6 +310,78 @@ class ConnectionTests(ConnectingTestCase):
|
||||||
self.assert_('foobar' not in c.dsn, "password was not obscured")
|
self.assert_('foobar' not in c.dsn, "password was not obscured")
|
||||||
|
|
||||||
|
|
||||||
|
class ParseDsnTestCase(ConnectingTestCase):
|
||||||
|
def test_parse_dsn(self):
|
||||||
|
from psycopg2 import ProgrammingError
|
||||||
|
from psycopg2.extensions import parse_dsn
|
||||||
|
|
||||||
|
self.assertEqual(parse_dsn('dbname=test user=tester password=secret'),
|
||||||
|
dict(user='tester', password='secret', dbname='test'),
|
||||||
|
"simple DSN parsed")
|
||||||
|
|
||||||
|
self.assertRaises(ProgrammingError, parse_dsn,
|
||||||
|
"dbname=test 2 user=tester password=secret")
|
||||||
|
|
||||||
|
self.assertEqual(parse_dsn("dbname='test 2' user=tester password=secret"),
|
||||||
|
dict(user='tester', password='secret', dbname='test 2'),
|
||||||
|
"DSN with quoting parsed")
|
||||||
|
|
||||||
|
# Can't really use assertRaisesRegexp() here since we need to
|
||||||
|
# make sure that secret is *not* exposed in the error messgage
|
||||||
|
# (and it also requires python >= 2.7).
|
||||||
|
raised = False
|
||||||
|
try:
|
||||||
|
# unterminated quote after dbname:
|
||||||
|
parse_dsn("dbname='test 2 user=tester password=secret")
|
||||||
|
except ProgrammingError, e:
|
||||||
|
raised = True
|
||||||
|
self.assertTrue(str(e).find('secret') < 0,
|
||||||
|
"DSN was not exposed in error message")
|
||||||
|
except e:
|
||||||
|
self.fail("unexpected error condition: " + repr(e))
|
||||||
|
self.assertTrue(raised, "ProgrammingError raised due to invalid DSN")
|
||||||
|
|
||||||
|
@skip_before_libpq(9, 2)
|
||||||
|
def test_parse_dsn_uri(self):
|
||||||
|
from psycopg2.extensions import parse_dsn
|
||||||
|
|
||||||
|
self.assertEqual(parse_dsn('postgresql://tester:secret@/test'),
|
||||||
|
dict(user='tester', password='secret', dbname='test'),
|
||||||
|
"valid URI dsn parsed")
|
||||||
|
|
||||||
|
raised = False
|
||||||
|
try:
|
||||||
|
# extra '=' after port value
|
||||||
|
parse_dsn(dsn='postgresql://tester:secret@/test?port=1111=x')
|
||||||
|
except psycopg2.ProgrammingError, e:
|
||||||
|
raised = True
|
||||||
|
self.assertTrue(str(e).find('secret') < 0,
|
||||||
|
"URI was not exposed in error message")
|
||||||
|
except e:
|
||||||
|
self.fail("unexpected error condition: " + repr(e))
|
||||||
|
self.assertTrue(raised, "ProgrammingError raised due to invalid URI")
|
||||||
|
|
||||||
|
def test_unicode_value(self):
|
||||||
|
from psycopg2.extensions import parse_dsn
|
||||||
|
snowman = u"\u2603"
|
||||||
|
d = parse_dsn('dbname=' + snowman)
|
||||||
|
if sys.version_info[0] < 3:
|
||||||
|
self.assertEqual(d['dbname'], snowman.encode('utf8'))
|
||||||
|
else:
|
||||||
|
self.assertEqual(d['dbname'], snowman)
|
||||||
|
|
||||||
|
def test_unicode_key(self):
|
||||||
|
from psycopg2.extensions import parse_dsn
|
||||||
|
snowman = u"\u2603"
|
||||||
|
self.assertRaises(psycopg2.ProgrammingError, parse_dsn,
|
||||||
|
snowman + '=' + snowman)
|
||||||
|
|
||||||
|
def test_bad_param(self):
|
||||||
|
from psycopg2.extensions import parse_dsn
|
||||||
|
self.assertRaises(TypeError, parse_dsn, None)
|
||||||
|
self.assertRaises(TypeError, parse_dsn, 42)
|
||||||
|
|
||||||
|
|
||||||
class IsolationLevelsTestCase(ConnectingTestCase):
|
class IsolationLevelsTestCase(ConnectingTestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -1070,6 +1178,17 @@ class AutocommitTests(ConnectingTestCase):
|
||||||
self.assertEqual(cur.fetchone()[0], 'on')
|
self.assertEqual(cur.fetchone()[0], 'on')
|
||||||
|
|
||||||
|
|
||||||
|
class ReplicationTest(ConnectingTestCase):
|
||||||
|
@skip_before_postgres(9, 0)
|
||||||
|
def test_replication_not_supported(self):
|
||||||
|
conn = self.repl_connect()
|
||||||
|
if conn is None: return
|
||||||
|
cur = conn.cursor()
|
||||||
|
f = StringIO()
|
||||||
|
self.assertRaises(psycopg2.NotSupportedError,
|
||||||
|
cur.copy_expert, "START_REPLICATION 0/0", f)
|
||||||
|
|
||||||
|
|
||||||
def test_suite():
|
def test_suite():
|
||||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||||
|
|
||||||
|
|
|
@ -28,10 +28,13 @@ from testutils import unittest, ConnectingTestCase, decorate_all_tests
|
||||||
from testutils import skip_if_no_iobase, skip_before_postgres
|
from testutils import skip_if_no_iobase, skip_before_postgres
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
from itertools import cycle, izip
|
from itertools import cycle, izip
|
||||||
|
from subprocess import Popen, PIPE
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import psycopg2.extensions
|
import psycopg2.extensions
|
||||||
from testutils import skip_copy_if_green
|
from testutils import skip_copy_if_green, script_to_py3
|
||||||
|
from testconfig import dsn
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info[0] < 3:
|
if sys.version_info[0] < 3:
|
||||||
_base = object
|
_base = object
|
||||||
|
@ -301,6 +304,69 @@ class CopyTests(ConnectingTestCase):
|
||||||
curs.copy_from, StringIO('aaa\nbbb\nccc\n'), 'tcopy')
|
curs.copy_from, StringIO('aaa\nbbb\nccc\n'), 'tcopy')
|
||||||
self.assertEqual(curs.rowcount, -1)
|
self.assertEqual(curs.rowcount, -1)
|
||||||
|
|
||||||
|
def test_copy_from_segfault(self):
|
||||||
|
# issue #219
|
||||||
|
script = ("""\
|
||||||
|
import psycopg2
|
||||||
|
conn = psycopg2.connect(%(dsn)r)
|
||||||
|
curs = conn.cursor()
|
||||||
|
curs.execute("create table copy_segf (id int)")
|
||||||
|
try:
|
||||||
|
curs.execute("copy copy_segf from stdin")
|
||||||
|
except psycopg2.ProgrammingError:
|
||||||
|
pass
|
||||||
|
conn.close()
|
||||||
|
""" % { 'dsn': dsn,})
|
||||||
|
|
||||||
|
proc = Popen([sys.executable, '-c', script_to_py3(script)])
|
||||||
|
proc.communicate()
|
||||||
|
self.assertEqual(0, proc.returncode)
|
||||||
|
|
||||||
|
def test_copy_to_segfault(self):
|
||||||
|
# issue #219
|
||||||
|
script = ("""\
|
||||||
|
import psycopg2
|
||||||
|
conn = psycopg2.connect(%(dsn)r)
|
||||||
|
curs = conn.cursor()
|
||||||
|
curs.execute("create table copy_segf (id int)")
|
||||||
|
try:
|
||||||
|
curs.execute("copy copy_segf to stdout")
|
||||||
|
except psycopg2.ProgrammingError:
|
||||||
|
pass
|
||||||
|
conn.close()
|
||||||
|
""" % { 'dsn': dsn,})
|
||||||
|
|
||||||
|
proc = Popen([sys.executable, '-c', script_to_py3(script)], stdout=PIPE)
|
||||||
|
proc.communicate()
|
||||||
|
self.assertEqual(0, proc.returncode)
|
||||||
|
|
||||||
|
def test_copy_from_propagate_error(self):
|
||||||
|
class BrokenRead(_base):
|
||||||
|
def read(self, size):
|
||||||
|
return 1/0
|
||||||
|
|
||||||
|
def readline(self):
|
||||||
|
return 1/0
|
||||||
|
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
# It seems we cannot do this, but now at least we propagate the error
|
||||||
|
# self.assertRaises(ZeroDivisionError,
|
||||||
|
# curs.copy_from, BrokenRead(), "tcopy")
|
||||||
|
try:
|
||||||
|
curs.copy_from(BrokenRead(), "tcopy")
|
||||||
|
except Exception, e:
|
||||||
|
self.assert_('ZeroDivisionError' in str(e))
|
||||||
|
|
||||||
|
def test_copy_to_propagate_error(self):
|
||||||
|
class BrokenWrite(_base):
|
||||||
|
def write(self, data):
|
||||||
|
return 1/0
|
||||||
|
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
curs.execute("insert into tcopy values (10, 'hi')")
|
||||||
|
self.assertRaises(ZeroDivisionError,
|
||||||
|
curs.copy_to, BrokenWrite(), "tcopy")
|
||||||
|
|
||||||
|
|
||||||
decorate_all_tests(CopyTests, skip_copy_if_green)
|
decorate_all_tests(CopyTests, skip_copy_if_green)
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@
|
||||||
# License for more details.
|
# License for more details.
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
import pickle
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import psycopg2.extensions
|
import psycopg2.extensions
|
||||||
from psycopg2.extensions import b
|
from psycopg2.extensions import b
|
||||||
|
@ -176,10 +177,7 @@ class CursorTests(ConnectingTestCase):
|
||||||
curs.execute("select data from invname order by data")
|
curs.execute("select data from invname order by data")
|
||||||
self.assertEqual(curs.fetchall(), [(10,), (20,), (30,)])
|
self.assertEqual(curs.fetchall(), [(10,), (20,), (30,)])
|
||||||
|
|
||||||
def test_withhold(self):
|
def _create_withhold_table(self):
|
||||||
self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor,
|
|
||||||
withhold=True)
|
|
||||||
|
|
||||||
curs = self.conn.cursor()
|
curs = self.conn.cursor()
|
||||||
try:
|
try:
|
||||||
curs.execute("drop table withhold")
|
curs.execute("drop table withhold")
|
||||||
|
@ -190,6 +188,11 @@ class CursorTests(ConnectingTestCase):
|
||||||
curs.execute("insert into withhold values (%s)", (i,))
|
curs.execute("insert into withhold values (%s)", (i,))
|
||||||
curs.close()
|
curs.close()
|
||||||
|
|
||||||
|
def test_withhold(self):
|
||||||
|
self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor,
|
||||||
|
withhold=True)
|
||||||
|
|
||||||
|
self._create_withhold_table()
|
||||||
curs = self.conn.cursor("W")
|
curs = self.conn.cursor("W")
|
||||||
self.assertEqual(curs.withhold, False);
|
self.assertEqual(curs.withhold, False);
|
||||||
curs.withhold = True
|
curs.withhold = True
|
||||||
|
@ -209,6 +212,52 @@ class CursorTests(ConnectingTestCase):
|
||||||
curs.execute("drop table withhold")
|
curs.execute("drop table withhold")
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
|
def test_withhold_no_begin(self):
|
||||||
|
self._create_withhold_table()
|
||||||
|
curs = self.conn.cursor("w", withhold=True)
|
||||||
|
curs.execute("select data from withhold order by data")
|
||||||
|
self.assertEqual(curs.fetchone(), (10,))
|
||||||
|
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_BEGIN)
|
||||||
|
self.assertEqual(self.conn.get_transaction_status(),
|
||||||
|
psycopg2.extensions.TRANSACTION_STATUS_INTRANS)
|
||||||
|
|
||||||
|
self.conn.commit()
|
||||||
|
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
|
||||||
|
self.assertEqual(self.conn.get_transaction_status(),
|
||||||
|
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
|
||||||
|
|
||||||
|
self.assertEqual(curs.fetchone(), (20,))
|
||||||
|
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
|
||||||
|
self.assertEqual(self.conn.get_transaction_status(),
|
||||||
|
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
|
||||||
|
|
||||||
|
curs.close()
|
||||||
|
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
|
||||||
|
self.assertEqual(self.conn.get_transaction_status(),
|
||||||
|
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
|
||||||
|
|
||||||
|
def test_withhold_autocommit(self):
|
||||||
|
self._create_withhold_table()
|
||||||
|
self.conn.commit()
|
||||||
|
self.conn.autocommit = True
|
||||||
|
curs = self.conn.cursor("w", withhold=True)
|
||||||
|
curs.execute("select data from withhold order by data")
|
||||||
|
|
||||||
|
self.assertEqual(curs.fetchone(), (10,))
|
||||||
|
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
|
||||||
|
self.assertEqual(self.conn.get_transaction_status(),
|
||||||
|
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
|
||||||
|
|
||||||
|
self.conn.commit()
|
||||||
|
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
|
||||||
|
self.assertEqual(self.conn.get_transaction_status(),
|
||||||
|
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
|
||||||
|
|
||||||
|
curs.close()
|
||||||
|
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
|
||||||
|
self.assertEqual(self.conn.get_transaction_status(),
|
||||||
|
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
|
||||||
|
|
||||||
def test_scrollable(self):
|
def test_scrollable(self):
|
||||||
self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor,
|
self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor,
|
||||||
scrollable=True)
|
scrollable=True)
|
||||||
|
@ -352,6 +401,16 @@ class CursorTests(ConnectingTestCase):
|
||||||
self.assertEqual(c.precision, None)
|
self.assertEqual(c.precision, None)
|
||||||
self.assertEqual(c.scale, None)
|
self.assertEqual(c.scale, None)
|
||||||
|
|
||||||
|
def test_pickle_description(self):
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
curs.execute('SELECT 1 AS foo')
|
||||||
|
description = curs.description
|
||||||
|
|
||||||
|
pickled = pickle.dumps(description, pickle.HIGHEST_PROTOCOL)
|
||||||
|
unpickled = pickle.loads(pickled)
|
||||||
|
|
||||||
|
self.assertEqual(description, unpickled)
|
||||||
|
|
||||||
@skip_before_postgres(8, 0)
|
@skip_before_postgres(8, 0)
|
||||||
def test_named_cursor_stealing(self):
|
def test_named_cursor_stealing(self):
|
||||||
# you can use a named cursor to iterate on a refcursor created
|
# you can use a named cursor to iterate on a refcursor created
|
||||||
|
@ -427,6 +486,10 @@ class CursorTests(ConnectingTestCase):
|
||||||
self.assertRaises(psycopg2.InterfaceError, cur.executemany,
|
self.assertRaises(psycopg2.InterfaceError, cur.executemany,
|
||||||
'select 1', [])
|
'select 1', [])
|
||||||
|
|
||||||
|
def test_callproc_badparam(self):
|
||||||
|
cur = self.conn.cursor()
|
||||||
|
self.assertRaises(TypeError, cur.callproc, 'lower', 42)
|
||||||
|
|
||||||
# It would be inappropriate to test callproc's named parameters in the
|
# It would be inappropriate to test callproc's named parameters in the
|
||||||
# DBAPI2.0 test section because they are a psycopg2 extension.
|
# DBAPI2.0 test section because they are a psycopg2 extension.
|
||||||
@skip_before_postgres(9, 0)
|
@skip_before_postgres(9, 0)
|
||||||
|
@ -465,10 +528,6 @@ class CursorTests(ConnectingTestCase):
|
||||||
self.assertRaises(exception, cur.callproc, procname, parameter_sequence)
|
self.assertRaises(exception, cur.callproc, procname, parameter_sequence)
|
||||||
self.conn.rollback()
|
self.conn.rollback()
|
||||||
|
|
||||||
def test_callproc_badparam(self):
|
|
||||||
cur = self.conn.cursor()
|
|
||||||
self.assertRaises(TypeError, cur.callproc, 'lower', 42)
|
|
||||||
|
|
||||||
|
|
||||||
def test_suite():
|
def test_suite():
|
||||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||||
|
|
|
@ -25,7 +25,7 @@
|
||||||
import math
|
import math
|
||||||
import psycopg2
|
import psycopg2
|
||||||
from psycopg2.tz import FixedOffsetTimezone, ZERO
|
from psycopg2.tz import FixedOffsetTimezone, ZERO
|
||||||
from testutils import unittest, ConnectingTestCase
|
from testutils import unittest, ConnectingTestCase, skip_before_postgres
|
||||||
|
|
||||||
class CommonDatetimeTestsMixin:
|
class CommonDatetimeTestsMixin:
|
||||||
|
|
||||||
|
@ -287,7 +287,17 @@ class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
|
||||||
|
|
||||||
def test_type_roundtrip_time(self):
|
def test_type_roundtrip_time(self):
|
||||||
from datetime import time
|
from datetime import time
|
||||||
self._test_type_roundtrip(time(10,20,30))
|
tm = self._test_type_roundtrip(time(10,20,30))
|
||||||
|
self.assertEqual(None, tm.tzinfo)
|
||||||
|
|
||||||
|
def test_type_roundtrip_timetz(self):
|
||||||
|
from datetime import time
|
||||||
|
import psycopg2.tz
|
||||||
|
tz = psycopg2.tz.FixedOffsetTimezone(8*60)
|
||||||
|
tm1 = time(10,20,30, tzinfo=tz)
|
||||||
|
tm2 = self._test_type_roundtrip(tm1)
|
||||||
|
self.assertNotEqual(None, tm2.tzinfo)
|
||||||
|
self.assertEqual(tm1, tm2)
|
||||||
|
|
||||||
def test_type_roundtrip_interval(self):
|
def test_type_roundtrip_interval(self):
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
@ -309,6 +319,19 @@ class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
self._test_type_roundtrip_array(timedelta(seconds=30))
|
self._test_type_roundtrip_array(timedelta(seconds=30))
|
||||||
|
|
||||||
|
@skip_before_postgres(8, 1)
|
||||||
|
def test_time_24(self):
|
||||||
|
from datetime import time
|
||||||
|
|
||||||
|
t = self.execute("select '24:00'::time;")
|
||||||
|
self.assertEqual(t, time(0, 0))
|
||||||
|
|
||||||
|
t = self.execute("select '24:00+05'::timetz;")
|
||||||
|
self.assertEqual(t, time(0, 0, tzinfo=FixedOffsetTimezone(300)))
|
||||||
|
|
||||||
|
t = self.execute("select '24:00+05:30'::timetz;")
|
||||||
|
self.assertEqual(t, time(0, 0, tzinfo=FixedOffsetTimezone(330)))
|
||||||
|
|
||||||
|
|
||||||
# Only run the datetime tests if psycopg was compiled with support.
|
# Only run the datetime tests if psycopg was compiled with support.
|
||||||
if not hasattr(psycopg2.extensions, 'PYDATETIME'):
|
if not hasattr(psycopg2.extensions, 'PYDATETIME'):
|
||||||
|
|
|
@ -440,6 +440,68 @@ decorate_all_tests(LargeObjectTruncateTests,
|
||||||
skip_if_no_lo, skip_lo_if_green, skip_if_no_truncate)
|
skip_if_no_lo, skip_lo_if_green, skip_if_no_truncate)
|
||||||
|
|
||||||
|
|
||||||
|
def _has_lo64(conn):
|
||||||
|
"""Return (bool, msg) about the lo64 support"""
|
||||||
|
if conn.server_version < 90300:
|
||||||
|
return (False, "server version %s doesn't support the lo64 API"
|
||||||
|
% conn.server_version)
|
||||||
|
|
||||||
|
if 'lo64' not in psycopg2.__version__:
|
||||||
|
return (False, "this psycopg build doesn't support the lo64 API")
|
||||||
|
|
||||||
|
return (True, "this server and build support the lo64 API")
|
||||||
|
|
||||||
|
def skip_if_no_lo64(f):
|
||||||
|
@wraps(f)
|
||||||
|
def skip_if_no_lo64_(self):
|
||||||
|
lo64, msg = _has_lo64(self.conn)
|
||||||
|
if not lo64: return self.skipTest(msg)
|
||||||
|
else: return f(self)
|
||||||
|
|
||||||
|
return skip_if_no_lo64_
|
||||||
|
|
||||||
|
class LargeObject64Tests(LargeObjectTestCase):
|
||||||
|
def test_seek_tell_truncate_greater_than_2gb(self):
|
||||||
|
lo = self.conn.lobject()
|
||||||
|
|
||||||
|
length = (1 << 31) + (1 << 30) # 2gb + 1gb = 3gb
|
||||||
|
lo.truncate(length)
|
||||||
|
|
||||||
|
self.assertEqual(lo.seek(length, 0), length)
|
||||||
|
self.assertEqual(lo.tell(), length)
|
||||||
|
|
||||||
|
decorate_all_tests(LargeObject64Tests,
|
||||||
|
skip_if_no_lo, skip_lo_if_green, skip_if_no_truncate, skip_if_no_lo64)
|
||||||
|
|
||||||
|
|
||||||
|
def skip_if_lo64(f):
|
||||||
|
@wraps(f)
|
||||||
|
def skip_if_lo64_(self):
|
||||||
|
lo64, msg = _has_lo64(self.conn)
|
||||||
|
if lo64: return self.skipTest(msg)
|
||||||
|
else: return f(self)
|
||||||
|
|
||||||
|
return skip_if_lo64_
|
||||||
|
|
||||||
|
class LargeObjectNot64Tests(LargeObjectTestCase):
|
||||||
|
def test_seek_larger_than_2gb(self):
|
||||||
|
lo = self.conn.lobject()
|
||||||
|
offset = 1 << 32 # 4gb
|
||||||
|
self.assertRaises(
|
||||||
|
(OverflowError, psycopg2.InterfaceError, psycopg2.NotSupportedError),
|
||||||
|
lo.seek, offset, 0)
|
||||||
|
|
||||||
|
def test_truncate_larger_than_2gb(self):
|
||||||
|
lo = self.conn.lobject()
|
||||||
|
length = 1 << 32 # 4gb
|
||||||
|
self.assertRaises(
|
||||||
|
(OverflowError, psycopg2.InterfaceError, psycopg2.NotSupportedError),
|
||||||
|
lo.truncate, length)
|
||||||
|
|
||||||
|
decorate_all_tests(LargeObjectNot64Tests,
|
||||||
|
skip_if_no_lo, skip_lo_if_green, skip_if_no_truncate, skip_if_lo64)
|
||||||
|
|
||||||
|
|
||||||
def test_suite():
|
def test_suite():
|
||||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||||
|
|
||||||
|
|
|
@ -22,8 +22,12 @@
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||||
# License for more details.
|
# License for more details.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from subprocess import Popen
|
||||||
|
|
||||||
from testutils import unittest, skip_before_python, skip_before_postgres
|
from testutils import unittest, skip_before_python, skip_before_postgres
|
||||||
from testutils import ConnectingTestCase, skip_copy_if_green
|
from testutils import ConnectingTestCase, skip_copy_if_green, script_to_py3
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
|
||||||
|
@ -295,6 +299,36 @@ class ExceptionsTestCase(ConnectingTestCase):
|
||||||
self.assert_(e1.cursor is None)
|
self.assert_(e1.cursor is None)
|
||||||
|
|
||||||
|
|
||||||
|
class TestExtensionModule(unittest.TestCase):
|
||||||
|
def test_import_internal(self):
|
||||||
|
# check that the internal package can be imported "naked"
|
||||||
|
# we may break this property if there is a compelling reason to do so,
|
||||||
|
# however having it allows for some import juggling such as the one
|
||||||
|
# required in ticket #201.
|
||||||
|
pkgdir = os.path.dirname(psycopg2.__file__)
|
||||||
|
pardir = os.path.dirname(pkgdir)
|
||||||
|
self.assert_(pardir in sys.path)
|
||||||
|
script = ("""
|
||||||
|
import sys
|
||||||
|
sys.path.remove(%r)
|
||||||
|
sys.path.insert(0, %r)
|
||||||
|
import _psycopg
|
||||||
|
""" % (pardir, pkgdir))
|
||||||
|
|
||||||
|
proc = Popen([sys.executable, '-c', script_to_py3(script)])
|
||||||
|
proc.communicate()
|
||||||
|
self.assertEqual(0, proc.returncode)
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionDiscovery(unittest.TestCase):
|
||||||
|
def test_libpq_version(self):
|
||||||
|
self.assertTrue(type(psycopg2.__libpq_version__) is int)
|
||||||
|
try:
|
||||||
|
self.assertTrue(type(psycopg2.extensions.libpq_version()) is int)
|
||||||
|
except NotSupportedError:
|
||||||
|
self.assertTrue(psycopg2.__libpq_version__ < 90100)
|
||||||
|
|
||||||
|
|
||||||
def test_suite():
|
def test_suite():
|
||||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||||
|
|
||||||
|
|
|
@ -155,6 +155,27 @@ conn.close()
|
||||||
self.assertEqual('foo', notify.channel)
|
self.assertEqual('foo', notify.channel)
|
||||||
self.assertEqual('Hello, world!', notify.payload)
|
self.assertEqual('Hello, world!', notify.payload)
|
||||||
|
|
||||||
|
def test_notify_deque(self):
|
||||||
|
from collections import deque
|
||||||
|
self.autocommit(self.conn)
|
||||||
|
self.conn.notifies = deque()
|
||||||
|
self.listen('foo')
|
||||||
|
self.notify('foo').communicate()
|
||||||
|
time.sleep(0.5)
|
||||||
|
self.conn.poll()
|
||||||
|
notify = self.conn.notifies.popleft()
|
||||||
|
self.assert_(isinstance(notify, psycopg2.extensions.Notify))
|
||||||
|
self.assertEqual(len(self.conn.notifies), 0)
|
||||||
|
|
||||||
|
def test_notify_noappend(self):
|
||||||
|
self.autocommit(self.conn)
|
||||||
|
self.conn.notifies = None
|
||||||
|
self.listen('foo')
|
||||||
|
self.notify('foo').communicate()
|
||||||
|
time.sleep(0.5)
|
||||||
|
self.conn.poll()
|
||||||
|
self.assertEqual(self.conn.notifies, None)
|
||||||
|
|
||||||
def test_notify_init(self):
|
def test_notify_init(self):
|
||||||
n = psycopg2.extensions.Notify(10, 'foo')
|
n = psycopg2.extensions.Notify(10, 'foo')
|
||||||
self.assertEqual(10, n.pid)
|
self.assertEqual(10, n.pid)
|
||||||
|
@ -192,6 +213,7 @@ conn.close()
|
||||||
self.assertNotEqual(hash(Notify(10, 'foo', 'bar')),
|
self.assertNotEqual(hash(Notify(10, 'foo', 'bar')),
|
||||||
hash(Notify(10, 'foo')))
|
hash(Notify(10, 'foo')))
|
||||||
|
|
||||||
|
|
||||||
def test_suite():
|
def test_suite():
|
||||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@
|
||||||
# License for more details.
|
# License for more details.
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from testutils import unittest, ConnectingTestCase
|
from testutils import unittest, ConnectingTestCase, skip_before_libpq
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import psycopg2.extensions
|
import psycopg2.extensions
|
||||||
|
@ -165,6 +165,24 @@ class TestQuotedString(ConnectingTestCase):
|
||||||
self.assertEqual(q.encoding, 'utf_8')
|
self.assertEqual(q.encoding, 'utf_8')
|
||||||
|
|
||||||
|
|
||||||
|
class TestQuotedIdentifier(ConnectingTestCase):
|
||||||
|
@skip_before_libpq(9, 0)
|
||||||
|
def test_identifier(self):
|
||||||
|
from psycopg2.extensions import quote_ident
|
||||||
|
self.assertEqual(quote_ident('blah-blah', self.conn), '"blah-blah"')
|
||||||
|
self.assertEqual(quote_ident('quote"inside', self.conn), '"quote""inside"')
|
||||||
|
|
||||||
|
@skip_before_libpq(9, 0)
|
||||||
|
def test_unicode_ident(self):
|
||||||
|
from psycopg2.extensions import quote_ident
|
||||||
|
snowman = u"\u2603"
|
||||||
|
quoted = '"' + snowman + '"'
|
||||||
|
if sys.version_info[0] < 3:
|
||||||
|
self.assertEqual(quote_ident(snowman, self.conn), quoted.encode('utf8'))
|
||||||
|
else:
|
||||||
|
self.assertEqual(quote_ident(snowman, self.conn), quoted)
|
||||||
|
|
||||||
|
|
||||||
def test_suite():
|
def test_suite():
|
||||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||||
|
|
||||||
|
|
|
@ -192,6 +192,40 @@ class TypesBasicTests(ConnectingTestCase):
|
||||||
self.assertRaises(psycopg2.DataError,
|
self.assertRaises(psycopg2.DataError,
|
||||||
psycopg2.extensions.STRINGARRAY, b(s), curs)
|
psycopg2.extensions.STRINGARRAY, b(s), curs)
|
||||||
|
|
||||||
|
def testArrayOfNulls(self):
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
curs.execute("""
|
||||||
|
create table na (
|
||||||
|
texta text[],
|
||||||
|
inta int[],
|
||||||
|
boola boolean[],
|
||||||
|
|
||||||
|
textaa text[][],
|
||||||
|
intaa int[][],
|
||||||
|
boolaa boolean[][]
|
||||||
|
)""")
|
||||||
|
|
||||||
|
curs.execute("insert into na (texta) values (%s)", ([None],))
|
||||||
|
curs.execute("insert into na (texta) values (%s)", (['a', None],))
|
||||||
|
curs.execute("insert into na (texta) values (%s)", ([None, None],))
|
||||||
|
curs.execute("insert into na (inta) values (%s)", ([None],))
|
||||||
|
curs.execute("insert into na (inta) values (%s)", ([42, None],))
|
||||||
|
curs.execute("insert into na (inta) values (%s)", ([None, None],))
|
||||||
|
curs.execute("insert into na (boola) values (%s)", ([None],))
|
||||||
|
curs.execute("insert into na (boola) values (%s)", ([True, None],))
|
||||||
|
curs.execute("insert into na (boola) values (%s)", ([None, None],))
|
||||||
|
|
||||||
|
# TODO: array of array of nulls are not supported yet
|
||||||
|
# curs.execute("insert into na (textaa) values (%s)", ([[None]],))
|
||||||
|
curs.execute("insert into na (textaa) values (%s)", ([['a', None]],))
|
||||||
|
# curs.execute("insert into na (textaa) values (%s)", ([[None, None]],))
|
||||||
|
# curs.execute("insert into na (intaa) values (%s)", ([[None]],))
|
||||||
|
curs.execute("insert into na (intaa) values (%s)", ([[42, None]],))
|
||||||
|
# curs.execute("insert into na (intaa) values (%s)", ([[None, None]],))
|
||||||
|
# curs.execute("insert into na (boolaa) values (%s)", ([[None]],))
|
||||||
|
curs.execute("insert into na (boolaa) values (%s)", ([[True, None]],))
|
||||||
|
# curs.execute("insert into na (boolaa) values (%s)", ([[None, None]],))
|
||||||
|
|
||||||
@testutils.skip_from_python(3)
|
@testutils.skip_from_python(3)
|
||||||
def testTypeRoundtripBuffer(self):
|
def testTypeRoundtripBuffer(self):
|
||||||
o1 = buffer("".join(map(chr, range(256))))
|
o1 = buffer("".join(map(chr, range(256))))
|
||||||
|
|
|
@ -27,6 +27,7 @@ from testutils import py3_raises_typeerror
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import psycopg2.extras
|
import psycopg2.extras
|
||||||
|
import psycopg2.extensions as ext
|
||||||
from psycopg2.extensions import b
|
from psycopg2.extensions import b
|
||||||
|
|
||||||
|
|
||||||
|
@ -111,9 +112,9 @@ class TypesExtrasTests(ConnectingTestCase):
|
||||||
def test_adapt_fail(self):
|
def test_adapt_fail(self):
|
||||||
class Foo(object): pass
|
class Foo(object): pass
|
||||||
self.assertRaises(psycopg2.ProgrammingError,
|
self.assertRaises(psycopg2.ProgrammingError,
|
||||||
psycopg2.extensions.adapt, Foo(), psycopg2.extensions.ISQLQuote, None)
|
psycopg2.extensions.adapt, Foo(), ext.ISQLQuote, None)
|
||||||
try:
|
try:
|
||||||
psycopg2.extensions.adapt(Foo(), psycopg2.extensions.ISQLQuote, None)
|
psycopg2.extensions.adapt(Foo(), ext.ISQLQuote, None)
|
||||||
except psycopg2.ProgrammingError, err:
|
except psycopg2.ProgrammingError, err:
|
||||||
self.failUnless(str(err) == "can't adapt type 'Foo'")
|
self.failUnless(str(err) == "can't adapt type 'Foo'")
|
||||||
|
|
||||||
|
@ -460,7 +461,6 @@ class AdaptTypeTestCase(ConnectingTestCase):
|
||||||
|
|
||||||
def test_none_fast_path(self):
|
def test_none_fast_path(self):
|
||||||
# the None adapter is not actually invoked in regular adaptation
|
# the None adapter is not actually invoked in regular adaptation
|
||||||
ext = psycopg2.extensions
|
|
||||||
|
|
||||||
class WonkyAdapter(object):
|
class WonkyAdapter(object):
|
||||||
def __init__(self, obj): pass
|
def __init__(self, obj): pass
|
||||||
|
@ -923,7 +923,7 @@ class JsonTestCase(ConnectingTestCase):
|
||||||
self.assertEqual(curs.mogrify("%s", (obj,)),
|
self.assertEqual(curs.mogrify("%s", (obj,)),
|
||||||
b("""'{"a": 123}'"""))
|
b("""'{"a": 123}'"""))
|
||||||
finally:
|
finally:
|
||||||
del psycopg2.extensions.adapters[dict, psycopg2.extensions.ISQLQuote]
|
del psycopg2.extensions.adapters[dict, ext.ISQLQuote]
|
||||||
|
|
||||||
|
|
||||||
def test_type_not_available(self):
|
def test_type_not_available(self):
|
||||||
|
@ -1069,6 +1069,97 @@ class JsonTestCase(ConnectingTestCase):
|
||||||
self.assert_(s.endswith("'"))
|
self.assert_(s.endswith("'"))
|
||||||
|
|
||||||
|
|
||||||
|
def skip_if_no_jsonb_type(f):
|
||||||
|
return skip_before_postgres(9, 4)(f)
|
||||||
|
|
||||||
|
class JsonbTestCase(ConnectingTestCase):
|
||||||
|
@staticmethod
|
||||||
|
def myloads(s):
|
||||||
|
import json
|
||||||
|
rv = json.loads(s)
|
||||||
|
rv['test'] = 1
|
||||||
|
return rv
|
||||||
|
|
||||||
|
def test_default_cast(self):
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
|
||||||
|
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
|
||||||
|
self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None})
|
||||||
|
|
||||||
|
curs.execute("""select array['{"a": 100.0, "b": null}']::jsonb[]""")
|
||||||
|
self.assertEqual(curs.fetchone()[0], [{'a': 100.0, 'b': None}])
|
||||||
|
|
||||||
|
def test_register_on_connection(self):
|
||||||
|
psycopg2.extras.register_json(self.conn, loads=self.myloads, name='jsonb')
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
|
||||||
|
self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None, 'test': 1})
|
||||||
|
|
||||||
|
def test_register_on_cursor(self):
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
psycopg2.extras.register_json(curs, loads=self.myloads, name='jsonb')
|
||||||
|
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
|
||||||
|
self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None, 'test': 1})
|
||||||
|
|
||||||
|
def test_register_globally(self):
|
||||||
|
old = psycopg2.extensions.string_types.get(3802)
|
||||||
|
olda = psycopg2.extensions.string_types.get(3807)
|
||||||
|
try:
|
||||||
|
new, newa = psycopg2.extras.register_json(self.conn,
|
||||||
|
loads=self.myloads, globally=True, name='jsonb')
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
|
||||||
|
self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None, 'test': 1})
|
||||||
|
finally:
|
||||||
|
psycopg2.extensions.string_types.pop(new.values[0])
|
||||||
|
psycopg2.extensions.string_types.pop(newa.values[0])
|
||||||
|
if old:
|
||||||
|
psycopg2.extensions.register_type(old)
|
||||||
|
if olda:
|
||||||
|
psycopg2.extensions.register_type(olda)
|
||||||
|
|
||||||
|
def test_loads(self):
|
||||||
|
json = psycopg2.extras.json
|
||||||
|
loads = lambda x: json.loads(x, parse_float=Decimal)
|
||||||
|
psycopg2.extras.register_json(self.conn, loads=loads, name='jsonb')
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
|
||||||
|
data = curs.fetchone()[0]
|
||||||
|
self.assert_(isinstance(data['a'], Decimal))
|
||||||
|
self.assertEqual(data['a'], Decimal('100.0'))
|
||||||
|
# sure we are not manling json too?
|
||||||
|
curs.execute("""select '{"a": 100.0, "b": null}'::json""")
|
||||||
|
data = curs.fetchone()[0]
|
||||||
|
self.assert_(isinstance(data['a'], float))
|
||||||
|
self.assertEqual(data['a'], 100.0)
|
||||||
|
|
||||||
|
def test_register_default(self):
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
|
||||||
|
loads = lambda x: psycopg2.extras.json.loads(x, parse_float=Decimal)
|
||||||
|
psycopg2.extras.register_default_jsonb(curs, loads=loads)
|
||||||
|
|
||||||
|
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
|
||||||
|
data = curs.fetchone()[0]
|
||||||
|
self.assert_(isinstance(data['a'], Decimal))
|
||||||
|
self.assertEqual(data['a'], Decimal('100.0'))
|
||||||
|
|
||||||
|
curs.execute("""select array['{"a": 100.0, "b": null}']::jsonb[]""")
|
||||||
|
data = curs.fetchone()[0]
|
||||||
|
self.assert_(isinstance(data[0]['a'], Decimal))
|
||||||
|
self.assertEqual(data[0]['a'], Decimal('100.0'))
|
||||||
|
|
||||||
|
def test_null(self):
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
curs.execute("""select NULL::jsonb""")
|
||||||
|
self.assertEqual(curs.fetchone()[0], None)
|
||||||
|
curs.execute("""select NULL::jsonb[]""")
|
||||||
|
self.assertEqual(curs.fetchone()[0], None)
|
||||||
|
|
||||||
|
decorate_all_tests(JsonbTestCase, skip_if_no_json_module)
|
||||||
|
decorate_all_tests(JsonbTestCase, skip_if_no_jsonb_type)
|
||||||
|
|
||||||
|
|
||||||
class RangeTestCase(unittest.TestCase):
|
class RangeTestCase(unittest.TestCase):
|
||||||
def test_noparam(self):
|
def test_noparam(self):
|
||||||
from psycopg2.extras import Range
|
from psycopg2.extras import Range
|
||||||
|
@ -1541,6 +1632,9 @@ class RangeCasterTestCase(ConnectingTestCase):
|
||||||
self.assert_(not r1.lower_inc)
|
self.assert_(not r1.lower_inc)
|
||||||
self.assert_(r1.upper_inc)
|
self.assert_(r1.upper_inc)
|
||||||
|
|
||||||
|
# clear the adapters to allow precise count by scripts/refcounter.py
|
||||||
|
del ext.adapters[rc.range, ext.ISQLQuote]
|
||||||
|
|
||||||
def test_range_escaping(self):
|
def test_range_escaping(self):
|
||||||
from psycopg2.extras import register_range
|
from psycopg2.extras import register_range
|
||||||
cur = self.conn.cursor()
|
cur = self.conn.cursor()
|
||||||
|
@ -1592,6 +1686,9 @@ class RangeCasterTestCase(ConnectingTestCase):
|
||||||
self.assertEqual(ranges[i].lower_inf, r.lower_inf)
|
self.assertEqual(ranges[i].lower_inf, r.lower_inf)
|
||||||
self.assertEqual(ranges[i].upper_inf, r.upper_inf)
|
self.assertEqual(ranges[i].upper_inf, r.upper_inf)
|
||||||
|
|
||||||
|
# clear the adapters to allow precise count by scripts/refcounter.py
|
||||||
|
del ext.adapters[TextRange, ext.ISQLQuote]
|
||||||
|
|
||||||
def test_range_not_found(self):
|
def test_range_not_found(self):
|
||||||
from psycopg2.extras import register_range
|
from psycopg2.extras import register_range
|
||||||
cur = self.conn.cursor()
|
cur = self.conn.cursor()
|
||||||
|
@ -1625,6 +1722,10 @@ class RangeCasterTestCase(ConnectingTestCase):
|
||||||
register_range, 'rs.r1', 'FailRange', cur)
|
register_range, 'rs.r1', 'FailRange', cur)
|
||||||
cur.execute("rollback to savepoint x;")
|
cur.execute("rollback to savepoint x;")
|
||||||
|
|
||||||
|
# clear the adapters to allow precise count by scripts/refcounter.py
|
||||||
|
for r in [ra1, ra2, rars2, rars3]:
|
||||||
|
del ext.adapters[r.range, ext.ISQLQuote]
|
||||||
|
|
||||||
decorate_all_tests(RangeCasterTestCase, skip_if_no_range)
|
decorate_all_tests(RangeCasterTestCase, skip_if_no_range)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -200,6 +200,19 @@ class WithCursorTestCase(WithTestCase):
|
||||||
self.assert_(curs.closed)
|
self.assert_(curs.closed)
|
||||||
self.assert_(closes)
|
self.assert_(closes)
|
||||||
|
|
||||||
|
def test_exception_swallow(self):
|
||||||
|
# bug #262: __exit__ calls cur.close() that hides the exception
|
||||||
|
# with another error.
|
||||||
|
try:
|
||||||
|
with self.conn as conn:
|
||||||
|
with conn.cursor('named') as cur:
|
||||||
|
cur.execute("select 1/0")
|
||||||
|
cur.fetchone()
|
||||||
|
except psycopg2.DataError, e:
|
||||||
|
self.assertEqual(e.pgcode, '22012')
|
||||||
|
else:
|
||||||
|
self.fail("where is my exception?")
|
||||||
|
|
||||||
|
|
||||||
def test_suite():
|
def test_suite():
|
||||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||||
|
|
|
@ -7,6 +7,8 @@ dbhost = os.environ.get('PSYCOPG2_TESTDB_HOST', None)
|
||||||
dbport = os.environ.get('PSYCOPG2_TESTDB_PORT', None)
|
dbport = os.environ.get('PSYCOPG2_TESTDB_PORT', None)
|
||||||
dbuser = os.environ.get('PSYCOPG2_TESTDB_USER', None)
|
dbuser = os.environ.get('PSYCOPG2_TESTDB_USER', None)
|
||||||
dbpass = os.environ.get('PSYCOPG2_TESTDB_PASSWORD', None)
|
dbpass = os.environ.get('PSYCOPG2_TESTDB_PASSWORD', None)
|
||||||
|
repl_dsn = os.environ.get('PSYCOPG2_TEST_REPL_DSN',
|
||||||
|
"dbname=psycopg2_test replication=1")
|
||||||
|
|
||||||
# Check if we want to test psycopg's green path.
|
# Check if we want to test psycopg's green path.
|
||||||
green = os.environ.get('PSYCOPG2_TEST_GREEN', None)
|
green = os.environ.get('PSYCOPG2_TEST_GREEN', None)
|
||||||
|
|
|
@ -28,7 +28,7 @@ import os
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from testconfig import dsn
|
from testconfig import dsn, repl_dsn
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import unittest2
|
import unittest2
|
||||||
|
@ -65,7 +65,8 @@ else:
|
||||||
|
|
||||||
unittest.TestCase.skipTest = skipTest
|
unittest.TestCase.skipTest = skipTest
|
||||||
|
|
||||||
# Silence warnings caused by the stubborness of the Python unittest maintainers
|
# Silence warnings caused by the stubbornness of the Python unittest
|
||||||
|
# maintainers
|
||||||
# http://bugs.python.org/issue9424
|
# http://bugs.python.org/issue9424
|
||||||
if not hasattr(unittest.TestCase, 'assert_') \
|
if not hasattr(unittest.TestCase, 'assert_') \
|
||||||
or unittest.TestCase.assert_ is not unittest.TestCase.assertTrue:
|
or unittest.TestCase.assert_ is not unittest.TestCase.assertTrue:
|
||||||
|
@ -102,11 +103,35 @@ class ConnectingTestCase(unittest.TestCase):
|
||||||
"%s (did you remember calling ConnectingTestCase.setUp()?)"
|
"%s (did you remember calling ConnectingTestCase.setUp()?)"
|
||||||
% e)
|
% e)
|
||||||
|
|
||||||
|
if 'dsn' in kwargs:
|
||||||
|
conninfo = kwargs.pop('dsn')
|
||||||
|
else:
|
||||||
|
conninfo = dsn
|
||||||
import psycopg2
|
import psycopg2
|
||||||
conn = psycopg2.connect(dsn, **kwargs)
|
conn = psycopg2.connect(conninfo, **kwargs)
|
||||||
self._conns.append(conn)
|
self._conns.append(conn)
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
|
def repl_connect(self, **kwargs):
|
||||||
|
"""Return a connection set up for replication
|
||||||
|
|
||||||
|
The connection is on "PSYCOPG2_TEST_REPL_DSN" unless overridden by
|
||||||
|
a *dsn* kwarg.
|
||||||
|
|
||||||
|
Should raise a skip test if not available, but guard for None on
|
||||||
|
old Python versions.
|
||||||
|
"""
|
||||||
|
if 'dsn' not in kwargs:
|
||||||
|
kwargs['dsn'] = repl_dsn
|
||||||
|
import psycopg2
|
||||||
|
try:
|
||||||
|
conn = self.connect(**kwargs)
|
||||||
|
except psycopg2.OperationalError, e:
|
||||||
|
return self.skipTest("replication db not configured: %s" % e)
|
||||||
|
|
||||||
|
conn.autocommit = True
|
||||||
|
return conn
|
||||||
|
|
||||||
def _get_conn(self):
|
def _get_conn(self):
|
||||||
if not hasattr(self, '_the_conn'):
|
if not hasattr(self, '_the_conn'):
|
||||||
self._the_conn = self.connect()
|
self._the_conn = self.connect()
|
||||||
|
@ -235,6 +260,43 @@ def skip_after_postgres(*ver):
|
||||||
return skip_after_postgres__
|
return skip_after_postgres__
|
||||||
return skip_after_postgres_
|
return skip_after_postgres_
|
||||||
|
|
||||||
|
def libpq_version():
|
||||||
|
import psycopg2
|
||||||
|
v = psycopg2.__libpq_version__
|
||||||
|
if v >= 90100:
|
||||||
|
v = psycopg2.extensions.libpq_version()
|
||||||
|
return v
|
||||||
|
|
||||||
|
def skip_before_libpq(*ver):
|
||||||
|
"""Skip a test if libpq we're linked to is older than a certain version."""
|
||||||
|
ver = ver + (0,) * (3 - len(ver))
|
||||||
|
def skip_before_libpq_(f):
|
||||||
|
@wraps(f)
|
||||||
|
def skip_before_libpq__(self):
|
||||||
|
v = libpq_version()
|
||||||
|
if v < int("%d%02d%02d" % ver):
|
||||||
|
return self.skipTest("skipped because libpq %d" % v)
|
||||||
|
else:
|
||||||
|
return f(self)
|
||||||
|
|
||||||
|
return skip_before_libpq__
|
||||||
|
return skip_before_libpq_
|
||||||
|
|
||||||
|
def skip_after_libpq(*ver):
|
||||||
|
"""Skip a test if libpq we're linked to is newer than a certain version."""
|
||||||
|
ver = ver + (0,) * (3 - len(ver))
|
||||||
|
def skip_after_libpq_(f):
|
||||||
|
@wraps(f)
|
||||||
|
def skip_after_libpq__(self):
|
||||||
|
v = libpq_version()
|
||||||
|
if v >= int("%d%02d%02d" % ver):
|
||||||
|
return self.skipTest("skipped because libpq %s" % v)
|
||||||
|
else:
|
||||||
|
return f(self)
|
||||||
|
|
||||||
|
return skip_after_libpq__
|
||||||
|
return skip_after_libpq_
|
||||||
|
|
||||||
def skip_before_python(*ver):
|
def skip_before_python(*ver):
|
||||||
"""Skip a test on Python before a certain version."""
|
"""Skip a test on Python before a certain version."""
|
||||||
def skip_before_python_(f):
|
def skip_before_python_(f):
|
||||||
|
@ -350,4 +412,3 @@ class py3_raises_typeerror(object):
|
||||||
if sys.version_info[0] >= 3:
|
if sys.version_info[0] >= 3:
|
||||||
assert type is TypeError
|
assert type is TypeError
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user