Compare commits

...

128 Commits

Author SHA1 Message Date
Daniele Varrazzo
00cafbe85b Version changed for release 2.5.4 2014-08-30 18:26:21 +01:00
Daniele Varrazzo
75bce3e39a Release notes cleanup before releasing 2.4.5
Also tweaked tickets remapping offset.
2014-08-30 18:23:24 +01:00
Daniele Varrazzo
7d312e2da9 Fixed excessively strict notices test
Failing with PG 9.4 because it generates other debug messages during the
test run (rehashing catalog cache).
2014-08-30 14:30:20 +01:00
Daniele Varrazzo
11c9fcb9d4 Merge branch 'drop-lighthouse' into maint_2_5 2014-08-28 17:08:17 +01:00
Daniele Varrazzo
a676b7d08f Migrate more tickets and further away
Starting the import operation now...
2014-08-28 13:21:33 +01:00
Daniele Varrazzo
0715771845 Dropped creation of errcodes with missing spec field
On further inspection these names are just aliases for values already
defined: we don't need the duplication.
2014-08-28 02:05:54 +01:00
Daniele Varrazzo
380f7ee4fd Clear adapters in test to maintain reference count 2014-08-26 03:56:09 +01:00
Daniele Varrazzo
8cf30509ad Fixed memory leak with large objects
Deallocating closed large objects failed to decrement the connection
refcount. The fact the lobject is closed doesn't matter for refcount.
Issue detected by the always useful scripts/refcounter.py

With an extra bit of unrequested whitespace love.
2014-08-26 03:56:07 +01:00
Daniele Varrazzo
3dad8344c0 Added test to verify _psycopg can be imported 2014-08-26 03:55:47 +01:00
Daniele Varrazzo
b12f6a9135 Don't import psycopg2.tz into the C extension
This makes possible to import _psycopg directly, after adding the
package directory to the pythonpath. This enables hacks such as:

    sys.path.insert(0, '/path/to/psycopg2')
    import _psycopg
    sys.modules['psycopg2._psycopg'] = _psycopg
    sys.path.pop(0)

which can work around e.g. the problem of #201, freeze that cannot
freeze psycopg2. Well, freeze cannot freeze it because it's just not
designed to deal with C extensions. At least now the frozen application
can hack the pythonpath and work around the limitation by importing
_psycopg as above and then doing the rest of the imports normally.

Keeping long-lived references to python objects is bad anyway: the
tz module couldn't be reloaded before.
2014-08-26 03:26:59 +01:00
Daniele Varrazzo
284677ae0a Dropped content from the INSTALL file
The docs have it right.
2014-08-24 23:24:58 +01:00
Daniele Varrazzo
af7789b40c Added info about running the test suite in the docs
They were in the INSTALL file, which is quite out-of-date now.
Also fixed a couple of other things.
2014-08-24 23:24:52 +01:00
Daniele Varrazzo
72a121c78f Self-updating copyright year in docs 2014-08-24 23:11:53 +01:00
Daniele Varrazzo
f751c27208 Document we known 9.4 errorcodes 2014-08-24 22:11:40 +01:00
Daniele Varrazzo
3775a9d95d Fixed doc example about Error attributes
Catch the Error, not everything!

Also, whitespaces.
2014-08-24 22:08:50 +01:00
Daniele Varrazzo
2231578922 Added plural version of the tickets role 2014-08-24 02:39:09 +01:00
Daniele Varrazzo
baf1ad251e Point tickets to GitHub instead of Lighthouse
Keep into account the numbers reshuffling: Lighthouse bugs with a number
matching a GitHub merge request are shifted ahead.
2014-08-24 02:37:01 +01:00
Daniele Varrazzo
cafae16072 Merge branch 'withhold-transactions' into maint_2_5 2014-08-21 05:47:17 +01:00
Daniele Varrazzo
c114aff435 Document WITH HOLD corrections. 2014-08-21 05:46:57 +01:00
Alexey Borzenkov
e08be44db7 Allow using named with hold cursors in autocommit 2014-08-21 05:15:01 +01:00
Daniele Varrazzo
75a0b2ffe2 Added test to verify withhold cursors work in autocommit 2014-08-21 05:14:20 +01:00
Alexey Borzenkov
d131b2b22b No implicit transaction on named cursor close
Also, don't start an implicit transaction when fetching with
named with hold cursor, since it already returns results
from a previously committed transaction.
2014-08-21 05:05:54 +01:00
Daniele Varrazzo
18d67a73d5 Added test to verify withhold transaction behaviour
A withhold cursor can read its data when the transaction is closed, so
it shouldn't start a new one upon movement/close.
2014-08-21 05:05:54 +01:00
Daniele Varrazzo
0e44198a8f Fixed segfault in List function
This function is never called: it segfaults 100%. To be removed.
2014-08-15 02:11:43 +01:00
Daniele Varrazzo
756c6f1838 Scrape PostgreSQL 9.4 error codes table too
No new error code found as of 9.4 beta2.
2014-08-13 02:45:45 +01:00
Daniele Varrazzo
d232f25a68 Convert pool arguments to int
Failing to do so may cause dangerous misbehaviours such as an unbounded
pool (because of lame comparison operators in Python 2).

Fix ticket #220.
2014-08-13 02:37:00 +01:00
Daniele Varrazzo
f9518e42b9 Document PostgreSQL 9.4 as supported
Actually there is a test failing in the test suite in PG 9.4beta2, but
it's probably because the default logging level is to DEBUG. Will wait
for the final release to check if the test is to be fixed. All other
tests pass no problem.
2014-08-13 02:07:33 +01:00
Daniele Varrazzo
9acc1ba882 Merge branch 'jsonb' into maint_2_5 2014-08-13 01:59:15 +01:00
Daniele Varrazzo
774c1ff151 Habemus jsonb 2014-08-13 01:58:28 +01:00
Daniele Varrazzo
0f48a5e8b8 Added jsonb docs 2014-08-13 01:52:55 +01:00
Daniele Varrazzo
14b8c411be Add register_default_jsonb() and register the type 2014-08-13 01:52:55 +01:00
Daniele Varrazzo
cd965fe431 Added name param to register_json() 2014-08-13 00:45:10 +01:00
Daniele Varrazzo
ba1409d419 Added wishful test suite for jsonb type 2014-08-13 00:29:58 +01:00
Daniele Varrazzo
f434d87ef1 Fix supported Py/PG versions in docs 2014-08-04 22:39:41 +01:00
Daniele Varrazzo
0b06ebbf40 Parse errocodes for PG 9.3 too
There is no new errcode defined so no need to release this as a change,
only change the docs.
2014-07-31 13:35:52 +01:00
Daniele Varrazzo
f82d9d4db3 Add a few missing errcodes
They are used in the code but not defined in the SGML docs so the script
failed to parse them.
2014-07-31 13:35:52 +01:00
Daniele Varrazzo
b79fcaaa1a Parse the error codes from the text file if available
The text file was added in PG 9.1. It contains a few errors not available
in the SGML.
2014-07-31 13:35:17 +01:00
Daniele Varrazzo
17e9e7d327 Merge branch 'bug-219' into maint_2_5 2014-06-06 21:32:48 +02:00
Daniele Varrazzo
fd0ba0b2d3 Fixed segfault if COPY statements are executed
Close ticket #219
2014-06-06 21:32:08 +02:00
Daniele Varrazzo
5ebf6744ef Don't ignore silently the cursor.callproc argument without a length 2014-06-05 02:46:41 +02:00
Daniele Varrazzo
d066537efe Added test with objects without length as callproc param 2014-06-05 02:37:06 +02:00
Daniele Varrazzo
acde0a66c1 Bump version number to next dev release 2014-05-20 18:27:34 +01:00
Daniele Varrazzo
02baaae975 Merge branch 'license_cleanup' into maint_2_5 2014-05-20 18:24:20 +01:00
Chris Mildebrandt
8b85c60b23 cleanup remaining GPL license text 2014-05-20 18:23:17 +01:00
Chris Mildebrandt
bb8f6068c9 cleanup remaining GPL license text 2014-05-20 18:23:17 +01:00
Daniele Varrazzo
f6229a3d52 Dropped GPL license from source
It doesn't apply: leaving the LGPL only.
2014-05-20 18:23:17 +01:00
Jason Erickson
6933b3bece Skip test_cleanup_on_badconn_close on Windows
The Windows server version of PostgreSQL uses a function called pgkill in the
file kill.c in place of the UNIX kill function.  This pgkill function
simulates some of the SIGHUP like commands by passing signals through a named
pipe.  Because it is passing the signal through a pipe, the server doesn't get
the kill signal immediately and therefore fails the test on
test_connection.ConnectionTests.test_cleanup_on_badconn_close.
Ideally, the test should check to see if the server is running on Windows, not
the psycopg.
2014-05-18 20:58:50 -06:00
Jason Erickson
9bee072085 Added select.select timeout on AsyncTests.wait
On Windows, the select.select() hangs/waits forever on the
test_async_connection_error_message() test.  Adding a 10 second timeout
allows the tests to continue.
2014-05-18 20:58:23 -06:00
Daniele Varrazzo
40c1eea15d Bump up version for release 2.5.3 2014-05-13 16:20:39 +01:00
Daniele Varrazzo
12d114c731 Python 3.4 declared supported 2014-05-13 14:36:41 +01:00
Daniele Varrazzo
3b7f57b6d8 Parse the rowcount as long 2014-05-13 14:18:46 +01:00
Daniele Varrazzo
06f64fbe70 Skip rowcount on copy test on postgres < 8.2
It looks like the server doesn't send the message.
2014-05-13 14:18:40 +01:00
Daniele Varrazzo
e9335b08f8 Copy operations correctly set the cursor.rowcount attribute
Fixes ticket #180.
2014-05-05 23:52:41 +01:00
Daniele Varrazzo
26e7f1d71d set_session() params documented in a more Python-friendly way 2014-05-05 23:06:32 +01:00
Daniele Varrazzo
5a660b80f5 Fixed explicit connection.cursor(cursor_factory=None)
Fixes issue #210.
2014-04-30 17:56:09 +01:00
Daniele Varrazzo
e3ecae75a0 Merge branch 'connection-closed' into maint_2_5
NEWS for released 2.5.3 reordered.
2014-04-05 16:10:32 +01:00
Daniele Varrazzo
75b98b561b Don't specify 0 or 1 in closed docs
There's also 2 which means broken. But I prefer to leave that as
implementation detail.
2014-04-05 15:54:31 +01:00
Daniele Varrazzo
12c2fafa86 Close the connection if discovered bad on poll() 2014-04-05 15:52:41 +01:00
Daniele Varrazzo
e5ab0b3987 Check the connection is really bad on exception before closing it
We end up here without a pgres sometimes (e.g. from lobject errors)
2014-04-05 15:51:39 +01:00
Daniele Varrazzo
f0c38f0b37 Fixed attempt of closing an already closed lobject on dealloc
This results in a "null without exception set" in the corrent state, which
is caused by the connection being unexpectedly closed anyway.
2014-04-05 15:51:33 +01:00
Daniele Varrazzo
221d0d66de Don't set an exception witout GIL closing lobjects with a bad conn
We ended up in this branch only for an excessively aggressive closing
of the transaction that now I'm going to fix.
2014-04-05 15:51:25 +01:00
Daniele Varrazzo
a201307185 Avoid printing the typecast name in debug statement
The way we were doing it is unsafe and causes assertion failures on Py3.

Fixes ticket #188
2014-04-04 19:26:35 +01:00
Daniele Varrazzo
cefef286a6 Fixed use of Python object with refcount 0 in cursor_init
Caused segfault running test_cursor.CursorTests.test_invalid_name under
Python 3.4 built in debug mode.
2014-04-04 19:26:34 +01:00
Daniele Varrazzo
df7018a5d4 Work around pip issue #1630 breaking 'pip -e git+url'
https://github.com/pypa/pip/issues/1630

Fixes ticket #18 (opened in 2010!)
2014-04-04 17:57:35 +01:00
Daniele Varrazzo
ba71c05860 Clarify in the docs that mogrify() and query always return bytes 2014-04-03 02:46:13 +01:00
Daniele Varrazzo
25ae646dcf Fixed bug number typo in NEWS file 2014-04-03 02:45:22 +01:00
Daniele Varrazzo
2245b56dc1 Fixed overflow opening a lobject with an oid not fitting in a signed int
Fixes 🎫`203`.
2014-04-03 02:35:46 +01:00
Daniele Varrazzo
a516517a23 Dropped unneeded pass in test 2014-04-03 02:31:55 +01:00
Daniele Varrazzo
1d786d0380 Explicit the fact that closed connections have undefined status 2014-04-03 02:07:57 +01:00
Daniele Varrazzo
65fbe9159a Fixed dsn and closed attributes in failing connection subclasses.
From ticket #192 discussion.
2014-04-03 01:41:19 +01:00
Erwin de Keijzer
e2bb7ff8da Fixed DNS typo in examples 2014-03-25 14:29:36 +00:00
Daniele Varrazzo
d1e1243ba8 Close a connection if PQexec returned NULL
This happens for Socket connections, not for TCP ones, where a result
containing an error is returned and correctly handled by pq_raise()

Closes ticket #196 but not #192: poll() still doesn't change the
connection closed.
2014-03-06 18:17:32 +00:00
Daniele Varrazzo
f597c36f42 Allow get_transaction_status on closed connections
It's a local operation and the libpq functions has a NULL guard.
2014-03-06 18:02:08 +00:00
Daniele Varrazzo
d6da4ed09f Don't segfault on uninitialized cursor
It can happen with bad cursor subclasses not calling super's init. Raise
an exception instead of segfaulting.

Closes #195
2014-02-26 19:42:29 +00:00
Daniele Varrazzo
69b2fa282c Use the do-while 0 idiom for cursor guards macro 2014-02-26 19:42:28 +00:00
Daniele Varrazzo
a13c72cf32 Merge branch 'ticket-194' into maint_2_5 2014-02-24 15:19:02 +00:00
Daniele Varrazzo
e840e00278 Bug #194 confirmed fixed: newsfile updated. 2014-02-24 15:15:54 +00:00
Daniele Varrazzo
98ea06d8b4 Set the connection async before polling for connection
It should fix ticket #194
2014-02-24 12:12:16 +00:00
Daniele Varrazzo
a8ef13620a Set the connection async earlier in green mode
The moment it is called shouldn't have really changed, but it's more
explicit when it happens. Previously it was sort of obfuscated behind a
roundtrip through the green callback and poll.
2014-02-24 11:59:07 +00:00
Daniele Varrazzo
211e949741 Merge branch 'range_sort' into maint_2_5 2014-02-22 23:08:39 +00:00
Daniele Varrazzo
0e86fc164f Mention Range order in the news file 2014-02-22 23:06:03 +00:00
Daniele Varrazzo
d43e23ddc6 Docs wordsmithing about Range order 2014-02-22 23:06:03 +00:00
Daniele Varrazzo
6c27cdd20e Hardcode the list of attributes to be used in comparison
Comparing Range subclasses may lead to surprises.
2014-02-22 23:06:02 +00:00
Daniele Varrazzo
c10c1186a5 Added implementation for Range gt and ge operators
Using a common implementation for all the operators. Note that lt is the
one used by sort so it's nice it's the fastest.
2014-02-22 23:06:02 +00:00
Chris Withers
5624ad4ec5 documentation changes now that Range objects can be ordered 2014-02-22 23:06:02 +00:00
Chris Withers
1487800b6d New implementation of Range sorting that works for Python 2.5 to 3.3, at least. 2014-02-22 23:06:02 +00:00
Chris Withers
ca1845477d Coding style changes. 2014-02-22 23:06:02 +00:00
Chris Withers
f739576f0a Provide a stable and consistent sort order for Range objects.
This matches postgres server-side behaviour and helps client applications that need to sort based on the primary key of tables where the primary key is or contains a range.
2014-02-22 23:06:02 +00:00
Daniele Varrazzo
434fbb02b1 Fixed error message on range parsing failed 2014-02-19 01:03:01 +00:00
Daniele Varrazzo
9ac01d060d Fixed debug build on Windows
Patches on master by James Emerton, from github pull request #8.
2014-01-14 21:56:36 +00:00
Daniele Varrazzo
9e8923b884 Added doc notes about how to avoid JSON parsing
Added FAQ too as it has bitten more than one user
(see tickets #172, #190).
2014-01-14 18:30:41 +00:00
Daniele Varrazzo
3f6497d587 Bump to next dev release 2014-01-14 18:27:46 +00:00
Daniele Varrazzo
4f1e4a03d1 Bump version for release 2.5.2 2014-01-07 12:00:28 +00:00
Daniele Varrazzo
8aaa4eabca Fixed dealloc of lobject->smode
I put it by mistake into the in the self->conn checked block in the
previous commit.
2013-11-27 12:59:14 +00:00
Daniele Varrazzo
7e9e11ee27 Check connection type in lobject init
Fixes ticket #187.
2013-11-27 12:42:57 +00:00
Daniele Varrazzo
0442fd924f Typo fixes and improvements to new_array_type() docs 2013-10-21 15:26:57 +01:00
Daniele Varrazzo
58c53025d1 Use bytestrings in infinite dates adapter example
Returning strings would result in unclear tracebacks on Py3 (see ticket #163).
2013-10-21 14:35:30 +01:00
Daniele Varrazzo
8d8cc38590 Really test named cursors in test_scroll_named() 2013-10-16 19:09:51 +01:00
Daniele Varrazzo
429ebfc764 Document that named cursors don't raise an exception going out-of-bound
See issue #174 for the details.
2013-10-16 19:08:45 +01:00
Daniele Varrazzo
865b36e005 Document that using %, (, ) in the placeholder name is not supported 2013-10-16 15:36:43 +01:00
Daniele Varrazzo
09cf64dda4 Meaningful connection errors report a meaningful message
Fixes issue #173.
2013-10-16 15:28:16 +01:00
Daniele Varrazzo
0e08fbb20b Fixed pickling of exceptions with no pgerror/pgcode set
Fixes ticket #170.
2013-07-19 16:05:48 +01:00
Daniele Varrazzo
96248d0f09 Fixed titles in newsfile
The first title at the same level of the main releases helps getting a
more regular ToC in the docs main page.
2013-07-19 15:54:35 +01:00
Daniele Varrazzo
bf843fc5f0 Bump to next dev release number 2013-07-19 15:21:16 +01:00
Daniele Varrazzo
68d5d070fe Bump up version for release 2.5.1 2013-06-23 02:01:35 +01:00
Daniele Varrazzo
9b2f4c7d77 Fixed manifest to avoid spurious messages during install
It was a while we were getting::

  no previously-included directories found matching 'doc/src/_build'

at install time. Also avoid a couple of Sphinx build files from sdist
package.
2013-06-23 02:01:35 +01:00
Daniele Varrazzo
468951de25 Fixed doc index entry for infinite dates 2013-06-23 01:24:56 +01:00
Daniele Varrazzo
889b1d826e Untrack the connection before closing to avoid possible double-free
From Gangadharan S.A. Fixes issue #166.
2013-06-20 16:35:01 +01:00
Daniele Varrazzo
9f4b5b37a3 Added doc example to convert date.max to infinity
See issue #163.
2013-06-18 15:01:04 +01:00
Daniele Varrazzo
eb36e75b89 Fixed comparison of Range with non-range objects
Fixes ticket #164.

Patch from Chris Withers on master.
2013-06-18 14:16:06 +01:00
Daniele Varrazzo
8fd228dd28 Added regression test for infinity parsing in dates
...and a couple of typos.
2013-05-11 02:24:25 +01:00
Daniele Varrazzo
1e94018473 Added test to verify copysize handling in copy_expert
Not an original psycopg2 bug but present in pure python implementation,
e.g. ctypes issue #25 and cffi issue #5.

https://github.com/mvantellingen/psycopg2-ctypes/issues/25
https://github.com/chtd/psycopg2cffi/pull/5
2013-05-11 02:24:24 +01:00
Daniele Varrazzo
cc4cabebf0 Skip tests on python implementations without getrefcount()
PyPy is one of these.
2013-05-11 02:24:22 +01:00
Daniele Varrazzo
3a13599a99 A few docs fixes to the usage page 2013-05-01 22:40:39 +01:00
Daniele Varrazzo
c862554fdc Dropped a lot of words from the front matter
Replaced with links to the types adaptation table and the extension docs.
2013-04-26 10:33:06 +01:00
Piotr Kasprzyk
5f320e52f4 Fix multiple misspellings 2013-04-26 10:32:21 +01:00
Daniele Varrazzo
0fc1e3a8c7 Merge branch 'classifiers' into maint_2_5 2013-04-21 21:46:46 +01:00
Daniele Varrazzo
361522cde8 Added explicit Python 2 and Python 3 classifiers
Not sure, but I suspect the "python3" page on PyPy in only based
on the Language :: Python :: 3 classifier, not the more specific ones
2013-04-21 21:46:13 +01:00
Saul Shanabrook
ad5af45ba6 Explicit Python Version Support
Used supported versions from http://initd.org/psycopg/docs/install.html
2013-04-21 21:46:08 +01:00
Daniele Varrazzo
b8fbe599ac Make sure to return a bytes string from numeric range adapter 2013-04-21 20:30:03 +01:00
Daniele Varrazzo
9a1dac6125 Merge branch 'solaris-round' into maint_2_5 2013-04-21 18:43:27 +01:00
Daniele Varrazzo
244a58e5c7 Mention Solaris round fix in NEWS file 2013-04-21 18:43:08 +01:00
Daniele Varrazzo
35086c9ef0 Long ifdef reformatted for readibility 2013-04-21 18:43:08 +01:00
Daniel Enman
e93357ba17 Remove extra || 2013-04-21 18:43:08 +01:00
Daniel Enman
ec4aa95554 Solaris 10+, and illumos distros have round() 2013-04-21 18:43:08 +01:00
Daniele Varrazzo
29e96179f2 The UUID adapter returns bytes instead of str in Python 3
Also added __conform__ method to the adapter.
2013-04-07 22:20:38 +01:00
Daniele Varrazzo
5d3a5c242e Dropped stale entries from MANIFEST.in 2013-04-07 20:05:11 +01:00
Daniele Varrazzo
979c4fc1a6 Bump to next maint release 2013-04-07 18:29:56 +01:00
72 changed files with 1333 additions and 3039 deletions

105
INSTALL
View File

@ -1,103 +1,4 @@
Compiling and installing psycopg
********************************
** Important note: if you plan to use psycopg2 in a multithreaded application,
make sure that your libpq has been compiled with the --with-thread-safety
option. psycopg2 will work correctly even with a non-thread-safe libpq but
libpq will leak memory.
psycopg2 uses distutils for its build process, so most of the process is
executed by the setup.py script. Before building psycopg look at
setup.cfg file and change any settings to follow your system (or taste);
then:
python setup.py build
to build in the local directory; and:
python setup.py install
to install system-wide.
Common errors and build problems
================================
One of the most common errors is trying to build psycopg without the right
development headers for PostgreSQL, Python or both. If you get errors, look
for the following messages and then take the appropriate action:
libpq-fe.h: No such file or directory
PostgreSQL headers are not properly installed on your system or are
installed in a non default path. First make sure they are installed, then
check setup.cfg and make sure pg_config points to a valid pg_config
executable. If you don't have a working pg_config try to play with the
include_dirs variable (and note that a working pg_config is better.)
Running the test suite
======================
The included Makefile allows to run all the tests included in the
distribution. Just use:
make
make check
The tests are run against a database called psycopg2_test on unix socket
and standard port. You can configure a different database to run the test
by setting the environment variables:
- PSYCOPG2_TESTDB
- PSYCOPG2_TESTDB_HOST
- PSYCOPG2_TESTDB_PORT
- PSYCOPG2_TESTDB_USER
The database should be created before running the tests.
The standard Python unittest is used to run the tests. But if unittest2 is
found it will be used instead, with the result of having more informations
about skipped tests.
Building the documentation
==========================
In order to build the documentation included in the distribution, use
make env
make docs
The first command will install all the dependencies (Sphinx, Docutils) in
an 'env' directory in the project tree. The second command will build both
the html format (in the 'doc/html' directory) and in plain text
(doc/psycopg2.txt)
Using setuptools and EasyInstall
================================
If setuptools are installed on your system you can easily create an egg for
psycopg and install it. Download the source distribution (if you're reading
this file you probably already have) and then edit setup.cfg to your taste
and build from the source distribution top-level directory using:
easy_install .
Compiling under Windows with mingw32
====================================
You can compile psycopg under Windows platform with mingw32
(http://www.mingw.org/) compiler. MinGW is also shipped with IDEs such as
Dev-C++ (http://www.bloodshed.net/devcpp.html) and Code::Blocks
(http://www.codeblocks.org). gcc binaries should be in your PATH.
You need a PostgreSQL with include and libary files installed. At least v8.0
is required.
First you need to create a libpython2X.a as described in
http://starship.python.net/crew/kernr/mingw32/Notes.html. Then run:
python setup.py build_ext --compiler=mingw32 install
Installation instructions are included in the docs.
Please check the 'doc/src/install.rst' file or online at
<http://initd.org/psycopg/docs/install.html>.

View File

@ -1,16 +1,12 @@
recursive-include psycopg *.c *.h *.manifest
recursive-include lib *.py
recursive-include tests *.py
recursive-include ZPsycopgDA *.py *.gif *.dtml
recursive-include psycopg2da *
recursive-include examples *.py somehackers.jpg whereareyou.jpg
recursive-include debian *
recursive-include doc README HACKING SUCCESS COPYING* ChangeLog-1.x pep-0249.txt
recursive-include doc *.txt *.html *.css *.js Makefile
recursive-include doc/src *.rst *.py *.css Makefile
recursive-include doc/html *
prune doc/src/_build
include doc/Makefile doc/README doc/HACKING doc/SUCCESS doc/COPYING.LESSER doc/*.txt
include doc/src/Makefile doc/src/conf.py doc/src/*.rst doc/src/_static/*
recursive-include doc/src/tools *.py
include doc/html/*.html doc/html/*.js doc/html/_sources/*.txt doc/html/_static/*
recursive-include scripts *.py *.sh
include scripts/maketypes.sh scripts/buildtypes.py
include AUTHORS README INSTALL LICENSE NEWS ChangeLog
include AUTHORS README INSTALL LICENSE NEWS
include PKG-INFO MANIFEST.in MANIFEST setup.py setup.cfg Makefile

88
NEWS
View File

@ -1,3 +1,75 @@
Current release
---------------
What's new in psycopg 2.5.4
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Added :sql:`jsonb` support for PostgreSQL 9.4 (:ticket:`#226`).
- Fixed segfault if COPY statements are passed to `~cursor.execute()` instead
of using the proper methods (:ticket:`#219`).
- Force conversion of pool arguments to integer to avoid potentially unbounded
pools (:ticket:`#220`).
- Cursors :sql:`WITH HOLD` don't begin a new transaction upon move/fetch/close
(:ticket:`#228`).
- Cursors :sql:`WITH HOLD` can be used in autocommit (:ticket:`#229`).
- `~cursor.callproc()` doesn't silently ignore an argument without a length.
- Fixed memory leak with large objects (:ticket:`#256`).
- Make sure the internal ``_psycopg.so`` module can be imported stand-alone (to
allow modules juggling such as the one described in :ticket:`#201`).
What's new in psycopg 2.5.3
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Work around `pip issue #1630 <https://github.com/pypa/pip/issues/1630>`__
making installation via ``pip -e git+url`` impossible (:ticket:`#18`).
- Copy operations correctly set the `cursor.rowcount` attribute
(:ticket:`#180`).
- It is now possible to call `get_transaction_status()` on closed connections.
- Fixed unsafe access to object names causing assertion failures in
Python 3 debug builds (:ticket:`#188`).
- Mark the connection closed if found broken on `poll()` (from :ticket:`#192`
discussion)
- Fixed handling of dsn and closed attributes in connection subclasses
failing to connect (from :ticket:`#192` discussion).
- Added arbitrary but stable order to `Range` objects, thanks to
Chris Withers (:ticket:`#193`).
- Avoid blocking async connections on connect (:ticket:`#194`). Thanks to
Adam Petrovich for the bug report and diagnosis.
- Don't segfault using poorly defined cursor subclasses which forgot to call
the superclass init (:ticket:`#195`).
- Mark the connection closed when a Socket connection is broken, as it
happens for TCP connections instead (:ticket:`#196`).
- Fixed overflow opening a lobject with an oid not fitting in a signed int
(:ticket:`#203`).
- Fixed handling of explicit default ``cursor_factory=None`` in
`connection.cursor()` (:ticket:`#210`).
- Fixed possible segfault in named cursors creation.
- Fixed debug build on Windows, thanks to James Emerton.
What's new in psycopg 2.5.2
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed segfault pickling the exception raised on connection error
(:ticket:`#170`).
- Meaningful connection errors report a meaningful message, thanks to
Alexey Borzenkov (:ticket:`#173`).
- Manually creating `lobject` with the wrong parameter doesn't segfault
(:ticket:`#187`).
What's new in psycopg 2.5.1
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Fixed build on Solaris 10 and 11 where the round() function is already
declared (:ticket:`#146`).
- Fixed comparison of `Range` with non-range objects (:ticket:`#164`).
Thanks to Chris Withers for the patch.
- Fixed double-free on connection dealloc (:ticket:`#166`). Thanks to
Gangadharan S.A. for the report and fix suggestion.
What's new in psycopg 2.5
-------------------------
@ -51,7 +123,7 @@ What's new in psycopg 2.4.6
- 'register_hstore()', 'register_composite()', 'tpc_recover()' work with
RealDictConnection and Cursor (:ticket:`#114`).
- Fixed broken pool for Zope and connections re-init across ZSQL methods
in the same request (tickets #123, #125, #142).
in the same request (:tickets:`#123, #125, #142`).
- connect() raises an exception instead of swallowing keyword arguments
when a connection string is specified as well (:ticket:`#131`).
- Discard any result produced by 'executemany()' (:ticket:`#133`).
@ -73,7 +145,7 @@ What's new in psycopg 2.4.5
- Error and its subclasses are picklable, useful for multiprocessing
interaction (:ticket:`#90`).
- Better efficiency and formatting of timezone offset objects thanks
to Menno Smits (tickets #94, #95).
to Menno Smits (:tickets:`#94, #95`).
- Fixed 'rownumber' during iteration on cursor subclasses.
Regression introduced in 2.4.4 (:ticket:`#100`).
- Added support for 'inet' arrays.
@ -181,7 +253,7 @@ New features and changes:
ISO885916, LATIN10, SHIFT_JIS_2004.
- Dropped repeated dictionary lookups with unicode query/parameters.
- Improvements to the named cusors:
- Improvements to the named cursors:
- More efficient iteration on named cursors, fetching 'itersize'
records at time from the backend.
@ -244,7 +316,7 @@ Main new features:
- `dict` to `hstore` adapter and `hstore` to `dict` typecaster, using both
9.0 and pre-9.0 syntax.
- Two-phase commit protocol support as per DBAPI specification.
- Support for payload in notifications received from the backed.
- Support for payload in notifications received from the backend.
- `namedtuple`-returning cursor.
- Query execution cancel.
@ -284,7 +356,7 @@ Bux fixes:
The old register_tstz_w_secs() function is deprecated and will raise a
warning if called.
- Exceptions raised by the column iterator are propagated.
- Exceptions raised by executemany() interators are propagated.
- Exceptions raised by executemany() iterators are propagated.
What's new in psycopg 2.2.1
@ -401,7 +473,7 @@ New features:
Bug fixes:
- Fixed exeception in setup.py.
- Fixed exception in setup.py.
- More robust detection of PostgreSQL development versions.
- Fixed exception in RealDictCursor, introduced in 2.0.10.
@ -783,7 +855,7 @@ What's new in psycopg 1.99.11
* changed 'tuple_factory' cursor attribute name to 'row_factory'.
* the .cursor attribute is gone and connections and cursors are propely
* the .cursor attribute is gone and connections and cursors are properly
gc-managed.
* fixes to the async core.
@ -832,7 +904,7 @@ What's new in psycopg 1.99.8
* now cursors support .fileno() and .isready() methods, to be used in
select() calls.
* .copy_from() and .copy_in() methods are back in (still using the old
protocol, will be updated to use new one in next releasae.)
protocol, will be updated to use new one in next release.)
* fixed memory corruption bug reported on win32 platform.
What's new in psycopg 1.99.7

View File

@ -1,676 +0,0 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

File diff suppressed because it is too large Load Diff

View File

@ -23,7 +23,7 @@ Date: 23 Oct 2001 09:53:11 +0600
We use psycopg and psycopg zope adapter since fisrt public
release (it seems version 0.4). Now it works on 3 our sites and in intranet
applications. We had few problems, but all problems were quckly
applications. We had few problems, but all problems were quickly
solved. The strong side of psycopg is that it's code is well organized
and easy to understand. When I found a problem with non-ISO datestyle in first
version of psycopg, it took for me 15 or 20 minutes to learn code and

View File

@ -255,7 +255,7 @@ Cursor Objects
display_size, internal_size, precision, scale,
null_ok). The first two items (name and type_code) are
mandatory, the other five are optional and must be set to
None if meaningfull values are not provided.
None if meaningful values are not provided.
This attribute will be None for operations that
do not return rows or if the cursor has not had an

View File

@ -42,7 +42,9 @@ master_doc = 'index'
# General information about the project.
project = u'Psycopg'
copyright = u'2001-2013, Federico Di Gregorio. Documentation by Daniele Varrazzo'
from datetime import date
year = date.today().year
copyright = u'2001-%s, Federico Di Gregorio, Daniele Varrazzo' % year
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@ -66,7 +68,9 @@ intersphinx_mapping = {
}
# Pattern to generate links to the bug tracker
ticket_url = 'http://psycopg.lighthouseapp.com/projects/62710/tickets/%s'
ticket_url = 'https://github.com/psycopg/psycopg2/issues/%s'
ticket_remap_until = 25
ticket_remap_offset = 230
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.

View File

@ -295,8 +295,8 @@ The ``connection`` class
.. attribute:: closed
Read-only attribute reporting whether the database connection is open
(0) or closed (1).
Read-only integer attribute: 0 if the connection is open, nonzero if
it is closed or broken.
.. method:: cancel
@ -348,7 +348,7 @@ The ``connection`` class
pair: Transaction; Autocommit
pair: Transaction; Isolation level
.. method:: set_session([isolation_level,] [readonly,] [deferrable,] [autocommit])
.. method:: set_session(isolation_level=None, readonly=None, deferrable=None, autocommit=None)
Set one or more parameters for the next transactions or statements in
the current session. See |SET TRANSACTION|_ for further details.
@ -370,6 +370,7 @@ The ``connection`` class
PostgreSQL session setting but an alias for setting the
`autocommit` attribute.
Parameter passed as `!None` (the default for all) will not be changed.
The parameters *isolation_level*, *readonly* and *deferrable* also
accept the string ``DEFAULT`` as a value: the effect is to reset the
parameter to the server default.
@ -613,6 +614,8 @@ The ``connection`` class
`psycopg2.extensions`: see :ref:`connection-status-constants`
for the available values.
The status is undefined for `closed` connectons.
.. method:: lobject([oid [, mode [, new_oid [, new_file [, lobject_factory]]]]])
@ -678,7 +681,7 @@ The ``connection`` class
Return one of the constants defined in :ref:`poll-constants`. If it
returns `~psycopg2.extensions.POLL_OK` then the connection has been
estabilished or the query results are available on the client.
established or the query results are available on the client.
Otherwise wait until the file descriptor returned by `fileno()` is
ready to read or to write, as explained in :ref:`async-support`.
`poll()` should be also used by the function installed by

View File

@ -215,6 +215,8 @@ The ``cursor`` class
exactly the one that would be sent to the database running the
`~cursor.execute()` method or similar.
The returned string is always a bytes string.
>>> cur.mogrify("INSERT INTO test (num, data) VALUES (%s, %s)", (42, 'bar'))
"INSERT INTO test (num, data) VALUES (42, E'bar')"
@ -332,10 +334,6 @@ The ``cursor`` class
`~psycopg2.ProgrammingError` is raised and the cursor position is
not changed.
The method can be used both for client-side cursors and
:ref:`server-side cursors <server-side-cursors>`. Server-side cursors
can usually scroll backwards only if declared `~cursor.scrollable`.
.. note::
According to the |DBAPI|_, the exception raised for a cursor out
@ -347,6 +345,13 @@ The ``cursor`` class
except (ProgrammingError, IndexError), exc:
deal_with_it(exc)
The method can be used both for client-side cursors and
:ref:`server-side cursors <server-side-cursors>`. Server-side cursors
can usually scroll backwards only if declared `~cursor.scrollable`.
Moving out-of-bound in a server-side cursor doesn't result in an
exception, if the backend doesn't raise any (Postgres doesn't tell us
in a reliable way if we went out of bound).
.. attribute:: arraysize
@ -424,11 +429,11 @@ The ``cursor`` class
.. attribute:: query
Read-only attribute containing the body of the last query sent to the
backend (including bound arguments). `!None` if no query has been
executed yet:
backend (including bound arguments) as bytes string. `!None` if no
query has been executed yet:
>>> cur.execute("INSERT INTO test (num, data) VALUES (%s, %s)", (42, 'bar'))
>>> cur.query
>>> cur.query
"INSERT INTO test (num, data) VALUES (42, E'bar')"
.. extension::

View File

@ -49,8 +49,8 @@ An example of the available constants defined in the module:
>>> errorcodes.UNDEFINED_TABLE
'42P01'
Constants representing all the error values documented by PostgreSQL versions
between 8.1 and 9.2 are included in the module.
Constants representing all the error values defined by PostgreSQL versions
between 8.1 and 9.4 are included in the module.
.. autofunction:: lookup(code)

View File

@ -25,7 +25,7 @@ functionalities defined by the |DBAPI|_.
.. class:: cursor(conn, name=None)
It is the class usually returnded by the `connection.cursor()`
It is the class usually returned by the `connection.cursor()`
method. It is exposed by the `extensions` module in order to allow
subclassing to extend its behaviour: the subclass should be passed to the
`!cursor()` method using the `cursor_factory` parameter. See
@ -352,8 +352,8 @@ details.
`register_type()` to be used.
:param oids: tuple of OIDs of the PostgreSQL type to convert. It should
probably be the oid of the array type (e.g. the ``typarray`` field in
the ``pg_type`` table.
probably contain the oid of the array type (e.g. the ``typarray``
field in the ``pg_type`` table).
:param name: the name of the new type adapter.
:param base_caster: a Psycopg typecaster, e.g. created using the
`new_type()` function. The caster should be able to parse a single
@ -366,11 +366,12 @@ details.
.. note::
The function can be used to create a generic array typecaster,
returning a list of strings: just use the `~psycopg2.STRING` as base
typecaster. For instance, if you want to receive from the database an
array of :sql:`macaddr`, each address represented by string, you can
use::
returning a list of strings: just use `psycopg2.STRING` as base
typecaster. For instance, if you want to receive an array of
:sql:`macaddr` from the database, each address represented by string,
you can use::
# select typarray from pg_type where typname = 'macaddr' -> 1040
psycopg2.extensions.register_type(
psycopg2.extensions.new_array_type(
(1040,), 'MACADDR[]', psycopg2.STRING))
@ -427,7 +428,7 @@ The module exports a few exceptions in addition to the :ref:`standard ones
(subclasses `~psycopg2.OperationalError`)
Error causing transaction rollback (deadlocks, serialisation failures,
Error causing transaction rollback (deadlocks, serialization failures,
etc). It can be trapped specifically to detect a deadlock.
.. versionadded:: 2.0.7
@ -515,7 +516,7 @@ set to one of the following constants:
:sql:`SERIALIZABLE` isolation level. This is the strictest transactions
isolation level, equivalent to having the transactions executed serially
rather than concurrently. However applications using this level must be
prepared to retry reansactions due to serialization failures.
prepared to retry transactions due to serialization failures.
Starting from PostgreSQL 9.1, this mode monitors for conditions which
could make execution of a concurrent set of serializable transactions

View File

@ -41,7 +41,7 @@ If you want to use a `!connection` subclass you can pass it as the
Dictionary-like cursor
^^^^^^^^^^^^^^^^^^^^^^
The dict cursors allow to access to the retrieved records using an iterface
The dict cursors allow to access to the retrieved records using an interface
similar to the Python dictionaries instead of the tuples.
>>> dict_cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
@ -160,23 +160,27 @@ JSON_ adaptation
^^^^^^^^^^^^^^^^
.. versionadded:: 2.5
.. versionchanged:: 2.5.4
added |jsonb| support. In previous versions |jsonb| values are returned
as strings. See :ref:`the FAQ <faq-jsonb-adapt>` for a workaround.
Psycopg can adapt Python objects to and from the PostgreSQL |pgjson|_ type.
With PostgreSQL 9.2 adaptation is available out-of-the-box. To use JSON data
with previous database versions (either with the `9.1 json extension`__, but
even if you want to convert text fields to JSON) you can use
`register_json()`.
Psycopg can adapt Python objects to and from the PostgreSQL |pgjson|_ and
|jsonb| types. With PostgreSQL 9.2 and following versions adaptation is
available out-of-the-box. To use JSON data with previous database versions
(either with the `9.1 json extension`__, but even if you want to convert text
fields to JSON) you can use the `register_json()` function.
.. __: http://people.planetpostgresql.org/andrew/index.php?/archives/255-JSON-for-PG-9.2-...-and-now-for-9.1!.html
The Python library used to convert Python objects to JSON depends on the
language version: with Python 2.6 and following the :py:mod:`json` module from
the standard library is used; with previous versions the `simplejson`_ module
is used if available. Note that the last `!simplejson` version supporting
Python 2.4 is the 2.0.9.
The Python library used by default to convert Python objects to JSON and to
parse data from the database depends on the language version: with Python 2.6
and following the :py:mod:`json` module from the standard library is used;
with previous versions the `simplejson`_ module is used if available. Note
that the last `!simplejson` version supporting Python 2.4 is the 2.0.9.
.. _JSON: http://www.json.org/
.. |pgjson| replace:: :sql:`json`
.. |jsonb| replace:: :sql:`jsonb`
.. _pgjson: http://www.postgresql.org/docs/current/static/datatype-json.html
.. _simplejson: http://pypi.python.org/pypi/simplejson/
@ -186,8 +190,22 @@ the `Json` adapter::
curs.execute("insert into mytable (jsondata) values (%s)",
[Json({'a': 100})])
Reading from the database, |pgjson| values will be automatically converted to
Python objects.
Reading from the database, |pgjson| and |jsonb| values will be automatically
converted to Python objects.
.. note::
If you are using the PostgreSQL :sql:`json` data type but you want to read
it as string in Python instead of having it parsed, your can either cast
the column to :sql:`text` in the query (it is an efficient operation, that
doesn't involve a copy)::
cur.execute("select jsondata::text from mytable")
or you can register a no-op `!loads()` function with
`register_default_json()`::
psycopg2.extras.register_default_json(loads=lambda x: x)
.. note::
@ -204,7 +222,7 @@ Python objects.
effects.
If you want to customize the adaptation from Python to PostgreSQL you can
either provide a custom `!dumps()` function to `!Json`::
either provide a custom `!dumps()` function to `Json`::
curs.execute("insert into mytable (jsondata) values (%s)",
[Json({'a': 100}, dumps=simplejson.dumps)])
@ -219,9 +237,11 @@ or you can subclass it overriding the `~Json.dumps()` method::
[MyJson({'a': 100})])
Customizing the conversion from PostgreSQL to Python can be done passing a
custom `!loads()` function to `register_json()` (or `register_default_json()`
for PostgreSQL 9.2). For example, if you want to convert the float values
from :sql:`json` into :py:class:`~decimal.Decimal` you can use::
custom `!loads()` function to `register_json()`. For the builtin data types
(|pgjson| from PostgreSQL 9.2, |jsonb| from PostgreSQL 9.4) use
`register_default_json()` and `register_default_jsonb()`. For example, if you
want to convert the float values from :sql:`json` into
:py:class:`~decimal.Decimal` you can use::
loads = lambda x: json.loads(x, parse_float=Decimal)
psycopg2.extras.register_json(conn, loads=loads)
@ -234,8 +254,15 @@ from :sql:`json` into :py:class:`~decimal.Decimal` you can use::
.. autofunction:: register_json
.. versionchanged:: 2.5.4
added the *name* parameter to enable :sql:`jsonb` support.
.. autofunction:: register_default_json
.. autofunction:: register_default_jsonb
.. versionadded:: 2.5.4
.. index::
@ -423,8 +450,16 @@ user-defined |range| types can be adapted using `register_range()`.
`!Range` objects are immutable, hashable, and support the ``in`` operator
(checking if an element is within the range). They can be tested for
equivalence but not for ordering. Empty ranges evaluate to `!False` in
boolean context, nonempty evaluate to `!True`.
equivalence. Empty ranges evaluate to `!False` in boolean context,
nonempty evaluate to `!True`.
.. versionchanged:: 2.5.3
`!Range` objects can be sorted although, as on the server-side, this
ordering is not particularly meangingful. It is only meant to be used
by programs assuming objects using `!Range` as primary key can be
sorted on them. In previous versions comparing `!Range`\s raises
`!TypeError`.
Although it is possible to instantiate `!Range` objects, the class doesn't
have an adapter registered, so you cannot normally pass these instances as
@ -453,6 +488,17 @@ automatically casted into instances of these classes.
.. autoclass:: DateTimeRange
.. autoclass:: DateTimeTZRange
.. note::
Python lacks a representation for :sql:`infinity` date so Psycopg converts
the value to `date.max` and such. When written into the database these
dates will assume their literal value (e.g. :sql:`9999-12-31` instead of
:sql:`infinity`). Check :ref:`infinite-dates-handling` for an example of
an alternative adapter to map `date.max` to :sql:`infinity`. An
alternative dates adapter will be used automatically by the `DateRange`
adapter and so on.
Custom |range| types (created with |CREATE TYPE|_ :sql:`... AS RANGE`) can be
adapted to a custom `Range` subclass:

View File

@ -121,10 +121,36 @@ Psycopg converts :sql:`decimal`\/\ :sql:`numeric` database types into Python `!D
psycopg2.extensions.register_type(DEC2FLOAT)
See :ref:`type-casting-from-sql-to-python` to read the relevant
documentation. If you find `!psycopg2.extensions.DECIMAL` not avalable, use
documentation. If you find `!psycopg2.extensions.DECIMAL` not available, use
`!psycopg2._psycopg.DECIMAL` instead.
.. _faq-json-adapt:
.. cssclass:: faq
Psycopg automatically converts PostgreSQL :sql:`json` data into Python objects. How can I receive strings instead?
The easiest way to avoid JSON parsing is to register a no-op function with
`~psycopg2.extras.register_default_json()`::
psycopg2.extras.register_default_json(loads=lambda x: x)
See :ref:`adapt-json` for further details.
.. _faq-jsonb-adapt:
.. cssclass:: faq
Psycopg converts :sql:`json` values into Python objects but :sql:`jsonb` values are returned as strings. Can :sql:`jsonb` be converted automatically?
Automatic conversion of :sql:`jsonb` values is supported from Psycopg
release 2.5.4. For previous versions you can register the :sql:`json`
typecaster on the :sql:`jsonb` oids (which are known and not suppsed to
change in future PostgreSQL versions)::
psycopg2.extras.register_json(oid=3802, array_oid=3807, globally=True)
See :ref:`adapt-json` for further details.
.. _faq-bytea-9.0:
.. cssclass:: faq

View File

@ -15,16 +15,10 @@ Psycopg 2 is mostly implemented in C as a libpq_ wrapper, resulting in being
both efficient and secure. It features client-side and :ref:`server-side
<server-side-cursors>` cursors, :ref:`asynchronous communication
<async-support>` and :ref:`notifications <async-notify>`, |COPY-TO-FROM|__
support, and a flexible :ref:`objects adaptation system
<python-types-adaptation>`. Many basic Python types are supported
out-of-the-box and mapped to matching PostgreSQL data types, such as strings
(both byte strings and Unicode), numbers (ints, longs, floats, decimals),
booleans and date/time objects (both built-in and `mx.DateTime`_), several
types of :ref:`binary objects <adapt-binary>`. Also available are mappings
between lists and PostgreSQL arrays of any supported type, between
:ref:`dictionaries and PostgreSQL hstore <adapt-hstore>`, between
:ref:`tuples/namedtuples and PostgreSQL composite types <adapt-composite>`,
and between Python objects and :ref:`JSON <adapt-json>`.
support. Many Python types are supported out-of-the-box and :ref:`adapted to
matching PostgreSQL data types <python-types-adaptation>`; adaptation can be
extended and customized thanks to a flexible :ref:`objects adaptation system
<adapting-new-types>`.
Psycopg 2 is both Unicode and Python 3 friendly.

View File

@ -14,9 +14,12 @@ mature as the C implementation yet.
The current `!psycopg2` implementation supports:
..
NOTE: keep consistent with setup.py and the /features/ page.
- Python 2 versions from 2.5 to 2.7
- Python 3 versions from 3.1 to 3.3
- PostgreSQL versions from 7.4 to 9.2
- Python 3 versions from 3.1 to 3.4
- PostgreSQL versions from 7.4 to 9.4
.. _PostgreSQL: http://www.postgresql.org/
.. _Python: http://www.python.org/
@ -202,6 +205,33 @@ supported.
.. index::
single: tests
.. _test-suite:
Running the test suite
^^^^^^^^^^^^^^^^^^^^^^
The included ``Makefile`` allows to run all the tests included in the
distribution. Just run::
make
make check
The tests run against a database called ``psycopg2_test`` on UNIX socket and
the standard port. You can configure a different database to run the test by
setting the environment variables:
- :envvar:`PSYCOPG2_TESTDB`
- :envvar:`PSYCOPG2_TESTDB_HOST`
- :envvar:`PSYCOPG2_TESTDB_PORT`
- :envvar:`PSYCOPG2_TESTDB_USER`
The database should already exist before running the tests.
.. index::
single: debug
single: PSYCOPG_DEBUG
@ -222,13 +252,13 @@ order to create a debug package:
- :ref:`Compile and install <source-package>` the package.
- Set the :envvar:`PSYCOPG_DEBUG` variable::
- Set the :envvar:`PSYCOPG_DEBUG` environment variable::
$ export PSYCOPG_DEBUG=1
- Run your program (making sure that the `!psycopg2` package imported is the
one you just compiled and not e.g. the system one): you will have a copious
stream of informations printed on stdout.
stream of informations printed on stderr.
.. __: http://initd.org/psycopg/download/

View File

@ -111,7 +111,7 @@ The module interface respects the standard defined in the |DBAPI|_.
.. index::
.. index::
single: Exceptions; DB API
.. _dbapi-exceptions:
@ -122,12 +122,12 @@ Exceptions
In compliance with the |DBAPI|_, the module makes informations about errors
available through the following exceptions:
.. exception:: Warning
.. exception:: Warning
Exception raised for important warnings like data truncations while
inserting, etc. It is a subclass of the Python `~exceptions.StandardError`.
.. exception:: Error
.. exception:: Error
Exception that is the base class of all other error exceptions. You can
use this to catch all errors with one single `!except` statement. Warnings
@ -150,7 +150,7 @@ available through the following exceptions:
>>> try:
... cur.execute("SELECT * FROM barf")
... except Exception, e:
... except psycopg2.Error as e:
... pass
>>> e.pgcode
@ -159,6 +159,7 @@ available through the following exceptions:
ERROR: relation "barf" does not exist
LINE 1: SELECT * FROM barf
^
.. attribute:: cursor
The cursor the exception was raised from; `None` if not applicable.
@ -170,7 +171,7 @@ available through the following exceptions:
>>> try:
... cur.execute("SELECT * FROM barf")
... except Exception, e:
... except psycopg2.Error, e:
... pass
>>> e.diag.severity
@ -195,41 +196,41 @@ available through the following exceptions:
Exception raised for errors that are related to the database. It is a
subclass of `Error`.
.. exception:: DataError
Exception raised for errors that are due to problems with the processed
data like division by zero, numeric value out of range, etc. It is a
subclass of `DatabaseError`.
.. exception:: OperationalError
Exception raised for errors that are related to the database's operation
and not necessarily under the control of the programmer, e.g. an
unexpected disconnect occurs, the data source name is not found, a
transaction could not be processed, a memory allocation error occurred
during processing, etc. It is a subclass of `DatabaseError`.
.. exception:: IntegrityError
.. exception:: IntegrityError
Exception raised when the relational integrity of the database is
affected, e.g. a foreign key check fails. It is a subclass of
`DatabaseError`.
.. exception:: InternalError
.. exception:: InternalError
Exception raised when the database encounters an internal error, e.g. the
cursor is not valid anymore, the transaction is out of sync, etc. It is a
subclass of `DatabaseError`.
.. exception:: ProgrammingError
Exception raised for programming errors, e.g. table not found or already
exists, syntax error in the SQL statement, wrong number of parameters
specified, etc. It is a subclass of `DatabaseError`.
.. exception:: NotSupportedError
Exception raised in case a method or database API was used which is not
supported by the database, e.g. requesting a `!rollback()` on a
connection that does not support transaction or has transactions turned

View File

@ -3,37 +3,57 @@
ticket role
~~~~~~~~~~~
An interpreted text role to link docs to lighthouse issues.
An interpreted text role to link docs to tickets issues.
:copyright: Copyright 2013 by Daniele Varrazzo.
"""
import re
from docutils import nodes, utils
from docutils.parsers.rst import roles
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
num = int(text.replace('#', ''))
except ValueError:
msg = inliner.reporter.error(
"ticket number must be... a number, got '%s'" % text)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
url_pattern = inliner.document.settings.env.app.config.ticket_url
if url_pattern is None:
cfg = inliner.document.settings.env.app.config
if cfg.ticket_url is None:
msg = inliner.reporter.warning(
"ticket not configured: please configure ticket_url in conf.py")
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
url = url_pattern % num
roles.set_classes(options)
node = nodes.reference(rawtext, 'ticket ' + utils.unescape(text),
refuri=url, **options)
return [node], []
rv = [nodes.Text(name + ' ')]
tokens = re.findall(r'(#?\d+)|([^\d#]+)', text)
for ticket, noise in tokens:
if ticket:
num = int(ticket.replace('#', ''))
# Push numbers of the oldel tickets ahead.
# We moved the tickets from a different tracker to GitHub and the
# latter already had a few ticket numbers taken (as merge
# requests).
remap_until = cfg.ticket_remap_until
remap_offset = cfg.ticket_remap_offset
if remap_until and remap_offset:
if num <= remap_until:
num += remap_offset
url = cfg.ticket_url % num
roles.set_classes(options)
node = nodes.reference(ticket, utils.unescape(ticket),
refuri=url, **options)
rv.append(node)
else:
assert noise
rv.append(nodes.Text(noise))
return rv, []
def setup(app):
app.add_config_value('ticket_url', None, 'env')
app.add_config_value('ticket_remap_until', None, 'env')
app.add_config_value('ticket_remap_offset', None, 'env')
app.add_role('ticket', ticket_role)
app.add_role('tickets', ticket_role)

View File

@ -100,7 +100,8 @@ many placeholders can use the same values::
... {'int': 10, 'str': "O'Reilly", 'date': datetime.date(2005, 11, 18)})
When parameters are used, in order to include a literal ``%`` in the query you
can use the ``%%`` string.
can use the ``%%`` string. Using characters ``%``, ``(``, ``)`` in the
argument names is not supported.
While the mechanism resembles regular Python strings manipulation, there are a
few subtle differences you should care about when passing parameters to a
@ -298,8 +299,8 @@ proper SQL literals::
Numbers adaptation
^^^^^^^^^^^^^^^^^^
Numeric objects: `int`, `long`, `float`, `~decimal.Decimal` are converted in
the PostgreSQL numerical representation::
Python numeric objects `int`, `long`, `float`, `~decimal.Decimal` are
converted into a PostgreSQL numerical representation::
>>> cur.mogrify("SELECT %s, %s, %s, %s;", (10, 10L, 10.0, Decimal("10.00")))
'SELECT 10, 10, 10.0, 10.00;'
@ -311,7 +312,7 @@ converted into `!Decimal`.
.. note::
Sometimes you may prefer to receive :sql:`numeric` data as `!float`
insted, for performance reason or ease of manipulation: you can configure
instead, for performance reason or ease of manipulation: you can configure
an adapter to :ref:`cast PostgreSQL numeric to Python float <faq-float>`.
This of course may imply a loss of precision.
@ -422,7 +423,7 @@ the connection or globally: see the function
Binary adaptation
^^^^^^^^^^^^^^^^^
Binary types: Python types representing binary objects are converted into
Python types representing binary objects are converted into
PostgreSQL binary string syntax, suitable for :sql:`bytea` fields. Such
types are `buffer` (only available in Python 2), `memoryview` (available
from Python 2.7), `bytearray` (available from Python 2.6) and `bytes`
@ -477,7 +478,7 @@ or `!memoryview` (in Python 3).
Date/Time objects adaptation
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Date and time objects: builtin `~datetime.datetime`, `~datetime.date`,
Python builtin `~datetime.datetime`, `~datetime.date`,
`~datetime.time`, `~datetime.timedelta` are converted into PostgreSQL's
:sql:`timestamp[tz]`, :sql:`date`, :sql:`time`, :sql:`interval` data types.
Time zones are supported too. The Egenix `mx.DateTime`_ objects are adapted
@ -496,6 +497,7 @@ the same way::
.. seealso:: `PostgreSQL date/time types
<http://www.postgresql.org/docs/current/static/datatype-datetime.html>`__
.. index::
single: Time Zones
@ -530,6 +532,40 @@ rounded to the nearest minute, with an error of up to 30 seconds.
versions use `psycopg2.extras.register_tstz_w_secs()`.
.. index::
double: Date objects; Infinite
.. _infinite-dates-handling:
Infinite dates handling
'''''''''''''''''''''''
PostgreSQL can store the representation of an "infinite" date, timestamp, or
interval. Infinite dates are not available to Python, so these objects are
mapped to `!date.max`, `!datetime.max`, `!interval.max`. Unfortunately the
mapping cannot be bidirectional so these dates will be stored back into the
database with their values, such as :sql:`9999-12-31`.
It is possible to create an alternative adapter for dates and other objects
to map `date.max` to :sql:`infinity`, for instance::
class InfDateAdapter:
def __init__(self, wrapped):
self.wrapped = wrapped
def getquoted(self):
if self.wrapped == datetime.date.max:
return b"'infinity'::date"
elif self.wrapped == datetime.date.min:
return b"'-infinity'::date"
else:
return psycopg2.extensions.DateFromPy(self.wrapped).getquoted()
psycopg2.extensions.register_adapter(datetime.date, InfDateAdapter)
Of course it will not be possible to write the value of `date.max` in the
database anymore: :sql:`infinity` will be stored instead.
.. _adapt-list:
Lists adaptation
@ -560,7 +596,7 @@ Python lists are converted into PostgreSQL :sql:`ARRAY`\ s::
.. note::
Reading back from PostgreSQL, arrays are converted to lists of Python
objects as expected, but only if the items are of a known known type.
objects as expected, but only if the items are of a known type.
Arrays of unknown types are returned as represented by the database (e.g.
``{a,b,c}``). If you want to convert the items into Python objects you can
easily create a typecaster for :ref:`array of unknown types
@ -576,7 +612,7 @@ Tuples adaptation
double: Tuple; Adaptation
single: IN operator
Python tuples are converted in a syntax suitable for the SQL :sql:`IN`
Python tuples are converted into a syntax suitable for the SQL :sql:`IN`
operator and to represent a composite type::
>>> cur.mogrify("SELECT %s IN %s;", (10, (10, 20, 30)))

View File

@ -24,7 +24,7 @@ import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding
@ -79,7 +79,7 @@ for row in curs.fetchall():
print "done"
print " python type of image data is", type(row[0])
# this rollback is required because we can't drop a table with a binary cusor
# this rollback is required because we can't drop a table with a binary cursor
# declared and still open
conn.rollback()

View File

@ -3,15 +3,15 @@
# Copyright (C) 2002 Tom Jenkins <tjenkins@devis.com>
# Copyright (C) 2005 Federico Di Gregorio <fog@initd.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any later
# version.
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
@ -27,7 +27,7 @@ import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding

View File

@ -3,15 +3,15 @@
# Copyright (C) 2002 Tom Jenkins <tjenkins@devis.com>
# Copyright (C) 2005 Federico Di Gregorio <fog@initd.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any later
# version.
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
@ -27,7 +27,7 @@ import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding

View File

@ -28,7 +28,7 @@ from psycopg2.extensions import adapt
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
curs = conn.cursor()

View File

@ -1,4 +1,4 @@
# encoding.py - show to change client enkoding (and test it works)
# encoding.py - show to change client encoding (and test it works)
# -*- encoding: utf8 -*-
#
# Copyright (C) 2004-2010 Federico Di Gregorio <fog@debian.org>
@ -26,7 +26,7 @@ import psycopg2.extensions
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Initial encoding for this connection is", conn.encoding

View File

@ -24,7 +24,7 @@ import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding

View File

@ -23,7 +23,7 @@ import sys, psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
curs = conn.cursor()

View File

@ -2,15 +2,15 @@
#
# Copyright (C) 2001-2006 Federico Di Gregorio <fog@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any later
# version.
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
## put in DSN your DSN string
@ -24,7 +24,7 @@ import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding

View File

@ -24,7 +24,7 @@ import sys, psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding

View File

@ -26,7 +26,7 @@ from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding

View File

@ -30,7 +30,7 @@ import psycopg2
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding

View File

@ -45,7 +45,7 @@ if len(sys.argv) > 1:
if len(sys.argv) > 2:
MODE = int(sys.argv[2])
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
curs = conn.cursor()

View File

@ -29,7 +29,7 @@ import psycopg2.extensions
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Encoding for this connection is", conn.encoding

View File

@ -28,7 +28,7 @@ from psycopg2.tz import ZERO, LOCAL, FixedOffsetTimezone
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
curs = conn.cursor()

View File

@ -33,7 +33,7 @@ import psycopg2.extras
if len(sys.argv) > 1:
DSN = sys.argv[1]
print "Opening connection using dns:", DSN
print "Opening connection using dsn:", DSN
conn = psycopg2.connect(DSN)
print "Initial encoding for this connection is", conn.encoding

View File

@ -47,6 +47,10 @@ else:
JSON_OID = 114
JSONARRAY_OID = 199
# oids from PostgreSQL 9.4
JSONB_OID = 3802
JSONBARRAY_OID = 3807
class Json(object):
"""
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
@ -94,7 +98,7 @@ class Json(object):
def register_json(conn_or_curs=None, globally=False, loads=None,
oid=None, array_oid=None):
oid=None, array_oid=None, name='json'):
"""Create and register typecasters converting :sql:`json` type to Python objects.
:param conn_or_curs: a connection or cursor used to find the :sql:`json`
@ -110,17 +114,19 @@ def register_json(conn_or_curs=None, globally=False, loads=None,
queried on *conn_or_curs*
:param array_oid: the OID of the :sql:`json[]` array type if known;
if not, it will be queried on *conn_or_curs*
:param name: the name of the data type to look for in *conn_or_curs*
The connection or cursor passed to the function will be used to query the
database and look for the OID of the :sql:`json` type. No query is
performed if *oid* and *array_oid* are provided. Raise
`~psycopg2.ProgrammingError` if the type is not found.
database and look for the OID of the :sql:`json` type (or an alternative
type if *name* if provided). No query is performed if *oid* and *array_oid*
are provided. Raise `~psycopg2.ProgrammingError` if the type is not found.
"""
if oid is None:
oid, array_oid = _get_json_oids(conn_or_curs)
oid, array_oid = _get_json_oids(conn_or_curs, name)
JSON, JSONARRAY = _create_json_typecasters(oid, array_oid, loads)
JSON, JSONARRAY = _create_json_typecasters(
oid, array_oid, loads=loads, name=name.upper())
register_type(JSON, not globally and conn_or_curs or None)
@ -141,7 +147,19 @@ def register_default_json(conn_or_curs=None, globally=False, loads=None):
return register_json(conn_or_curs=conn_or_curs, globally=globally,
loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID)
def _create_json_typecasters(oid, array_oid, loads=None):
def register_default_jsonb(conn_or_curs=None, globally=False, loads=None):
"""
Create and register :sql:`jsonb` typecasters for PostgreSQL 9.4 and following.
As in `register_default_json()`, the function allows to register a
customized *loads* function for the :sql:`jsonb` type at its known oid for
PostgreSQL 9.4 and following versions. All the parameters have the same
meaning of `register_json()`.
"""
return register_json(conn_or_curs=conn_or_curs, globally=globally,
loads=loads, oid=JSONB_OID, array_oid=JSONBARRAY_OID, name='jsonb')
def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
"""Create typecasters for json data type."""
if loads is None:
if json is None:
@ -154,15 +172,15 @@ def _create_json_typecasters(oid, array_oid, loads=None):
return None
return loads(s)
JSON = new_type((oid, ), 'JSON', typecast_json)
JSON = new_type((oid, ), name, typecast_json)
if array_oid is not None:
JSONARRAY = new_array_type((array_oid, ), "JSONARRAY", JSON)
JSONARRAY = new_array_type((array_oid, ), "%sARRAY" % name, JSON)
else:
JSONARRAY = None
return JSON, JSONARRAY
def _get_json_oids(conn_or_curs):
def _get_json_oids(conn_or_curs, name='json'):
# lazy imports
from psycopg2.extensions import STATUS_IN_TRANSACTION
from psycopg2.extras import _solve_conn_curs
@ -177,8 +195,8 @@ def _get_json_oids(conn_or_curs):
# get the oid for the hstore
curs.execute(
"SELECT t.oid, %s FROM pg_type t WHERE t.typname = 'json';"
% typarray)
"SELECT t.oid, %s FROM pg_type t WHERE t.typname = %%s;"
% typarray, (name,))
r = curs.fetchone()
# revert the status of the connection as before the command
@ -186,7 +204,7 @@ def _get_json_oids(conn_or_curs):
conn.rollback()
if not r:
raise conn.ProgrammingError("json data type not found")
raise conn.ProgrammingError("%s data type not found" % name)
return r

View File

@ -121,6 +121,8 @@ class Range(object):
return self._bounds is not None
def __eq__(self, other):
if not isinstance(other, Range):
return False
return (self._lower == other._lower
and self._upper == other._upper
and self._bounds == other._bounds)
@ -131,12 +133,43 @@ class Range(object):
def __hash__(self):
return hash((self._lower, self._upper, self._bounds))
def __lt__(self, other):
raise TypeError(
'Range objects cannot be ordered; please refer to the PostgreSQL'
' documentation to perform this operation in the database')
# as the postgres docs describe for the server-side stuff,
# ordering is rather arbitrary, but will remain stable
# and consistent.
__le__ = __gt__ = __ge__ = __lt__
def __lt__(self, other):
if not isinstance(other, Range):
return NotImplemented
for attr in ('_lower', '_upper', '_bounds'):
self_value = getattr(self, attr)
other_value = getattr(other, attr)
if self_value == other_value:
pass
elif self_value is None:
return True
elif other_value is None:
return False
else:
return self_value < other_value
return False
def __le__(self, other):
if self == other:
return True
else:
return self.__lt__(other)
def __gt__(self, other):
if isinstance(other, Range):
return other.__lt__(self)
else:
return NotImplemented
def __ge__(self, other):
if self == other:
return True
else:
return self.__gt__(other)
def register_range(pgrange, pyrange, conn_or_curs, globally=False):
@ -354,7 +387,7 @@ where typname = %s and ns.nspname = %s;
m = self._re_range.match(s)
if m is None:
raise InterfaceError("failed to parse range: %s")
raise InterfaceError("failed to parse range: '%s'" % s)
lower = m.group(3)
if lower is None:
@ -415,7 +448,7 @@ class NumberRangeAdapter(RangeAdapter):
def getquoted(self):
r = self.adapted
if r.isempty:
return "'empty'"
return b("'empty'")
if not r.lower_inf:
# not exactly: we are relying that none of these object is really
@ -431,8 +464,8 @@ class NumberRangeAdapter(RangeAdapter):
else:
upper = ''
return b("'%s%s,%s%s'" % (
r._bounds[0], lower, upper, r._bounds[1]))
return ("'%s%s,%s%s'" % (
r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')
# TODO: probably won't work with infs, nans and other tricky cases.
register_adapter(NumericRange, NumberRangeAdapter)

View File

@ -82,7 +82,7 @@ STATUS_SYNC = 3 # currently unused
STATUS_ASYNC = 4 # currently unused
STATUS_PREPARED = 5
# This is a usefull mnemonic to check if the connection is in a transaction
# This is a useful mnemonic to check if the connection is in a transaction
STATUS_IN_TRANSACTION = STATUS_BEGIN
"""psycopg asynchronous connection polling values"""
@ -152,20 +152,22 @@ class NoneAdapter(object):
# Create default json typecasters for PostgreSQL 9.2 oids
from psycopg2._json import register_default_json
from psycopg2._json import register_default_json, register_default_jsonb
try:
JSON, JSONARRAY = register_default_json()
JSONB, JSONBARRAY = register_default_jsonb()
except ImportError:
pass
del register_default_json
del register_default_json, register_default_jsonb
# Create default Range typecasters
from psycopg2. _range import Range
del Range
# Add the "cleaned" version of the encodings to the key.
# When the encoding is set its name is cleaned up from - and _ and turned
# uppercase, so an encoding not respecting these rules wouldn't be found in the

View File

@ -1,7 +1,7 @@
"""Miscellaneous goodies for psycopg2
This module is a generic place used to hold little helper functions
and classes untill a better place in the distribution is found.
and classes until a better place in the distribution is found.
"""
# psycopg/extras.py - miscellaneous extra goodies for psycopg
#
@ -131,7 +131,7 @@ class DictCursor(DictCursorBase):
self._query_executed = 0
class DictRow(list):
"""A row object that allow by-colmun-name access to data."""
"""A row object that allow by-column-name access to data."""
__slots__ = ('_index',)
@ -406,7 +406,7 @@ class MinTimeLoggingConnection(LoggingConnection):
This is just an example of how to sub-class `LoggingConnection` to
provide some extra filtering for the logged queries. Both the
`inizialize()` and `filter()` methods are overwritten to make sure
`initialize()` and `filter()` methods are overwritten to make sure
that only queries executing for more than ``mintime`` ms are logged.
Note that this connection uses the specialized cursor
@ -449,13 +449,15 @@ class UUID_adapter(object):
def __init__(self, uuid):
self._uuid = uuid
def prepare(self, conn):
pass
def __conform__(self, proto):
if proto is _ext.ISQLQuote:
return self
def getquoted(self):
return "'"+str(self._uuid)+"'::uuid"
return b("'%s'::uuid" % self._uuid)
__str__ = getquoted
def __str__(self):
return "'%s'::uuid" % self._uuid
def register_uuid(oids=None, conn_or_curs=None):
"""Create the UUID type and an uuid.UUID adapter.
@ -514,8 +516,8 @@ class Inet(object):
obj.prepare(self._conn)
return obj.getquoted() + b("::inet")
def __conform__(self, foo):
if foo is _ext.ISQLQuote:
def __conform__(self, proto):
if proto is _ext.ISQLQuote:
return self
def __str__(self):
@ -963,7 +965,8 @@ def register_composite(name, conn_or_curs, globally=False, factory=None):
# expose the json adaptation stuff into the module
from psycopg2._json import json, Json, register_json, register_default_json
from psycopg2._json import json, Json, register_json
from psycopg2._json import register_default_json, register_default_jsonb
# Expose range-related objects

View File

@ -42,8 +42,8 @@ class AbstractConnectionPool(object):
with given parameters. The connection pool will support a maximum of
about 'maxconn' connections.
"""
self.minconn = minconn
self.maxconn = maxconn
self.minconn = int(minconn)
self.maxconn = int(maxconn)
self.closed = False
self._args = args

View File

@ -45,7 +45,7 @@ typedef struct {
HIDDEN PyObject *psyco_Binary(PyObject *module, PyObject *args);
#define psyco_Binary_doc \
"Binary(buffer) -> new binary object\n\n" \
"Build an object capable to hold a bynary string value."
"Build an object capable to hold a binary string value."
#ifdef __cplusplus
}

View File

@ -35,9 +35,6 @@
#include <string.h>
extern HIDDEN PyObject *pyPsycopgTzModule;
extern HIDDEN PyObject *pyPsycopgTzLOCAL;
int
psyco_adapter_datetime_init(void)
{
@ -392,9 +389,9 @@ psyco_DateFromTicks(PyObject *self, PyObject *args)
Py_DECREF(args);
}
}
else {
PyErr_SetString(InterfaceError, "failed localtime call");
}
else {
PyErr_SetString(InterfaceError, "failed localtime call");
}
return res;
}
@ -420,9 +417,9 @@ psyco_TimeFromTicks(PyObject *self, PyObject *args)
Py_DECREF(args);
}
}
else {
PyErr_SetString(InterfaceError, "failed localtime call");
}
else {
PyErr_SetString(InterfaceError, "failed localtime call");
}
return res;
}
@ -430,6 +427,8 @@ psyco_TimeFromTicks(PyObject *self, PyObject *args)
PyObject *
psyco_TimestampFromTicks(PyObject *self, PyObject *args)
{
PyObject *m = NULL;
PyObject *tz = NULL;
PyObject *res = NULL;
struct tm tm;
time_t t;
@ -438,18 +437,25 @@ psyco_TimestampFromTicks(PyObject *self, PyObject *args)
if (!PyArg_ParseTuple(args, "d", &ticks))
return NULL;
/* get psycopg2.tz.LOCAL from pythonland */
if (!(m = PyImport_ImportModule("psycopg2.tz"))) { goto exit; }
if (!(tz = PyObject_GetAttrString(m, "LOCAL"))) { goto exit; }
t = (time_t)floor(ticks);
ticks -= (double)t;
if (localtime_r(&t, &tm)) {
res = _psyco_Timestamp(
tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday,
tm.tm_hour, tm.tm_min, (double)tm.tm_sec + ticks,
pyPsycopgTzLOCAL);
if (!localtime_r(&t, &tm)) {
PyErr_SetString(InterfaceError, "failed localtime call");
goto exit;
}
else {
PyErr_SetString(InterfaceError, "failed localtime call");
}
res = _psyco_Timestamp(
tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday,
tm.tm_hour, tm.tm_min, (double)tm.tm_sec + ticks,
tz);
exit:
Py_DECREF(tz);
Py_XDECREF(m);
return res;
}

View File

@ -272,10 +272,10 @@ PyTypeObject listType = {
PyObject *
psyco_List(PyObject *module, PyObject *args)
{
PyObject *str;
PyObject *obj;
if (!PyArg_ParseTuple(args, "O", &str))
if (!PyArg_ParseTuple(args, "O", &obj))
return NULL;
return PyObject_CallFunctionObjArgs((PyObject *)&listType, "O", str, NULL);
return PyObject_CallFunctionObjArgs((PyObject *)&listType, obj, NULL);
}

View File

@ -141,7 +141,10 @@ static int pthread_mutex_init(pthread_mutex_t *mutex, void* fake)
#endif
#endif
#if (defined(__FreeBSD__) && __FreeBSD_version < 503000) || (defined(_WIN32) && !defined(__GNUC__)) || defined(__sun__) || defined(sun)
#if (defined(__FreeBSD__) && __FreeBSD_version < 503000) \
|| (defined(_WIN32) && !defined(__GNUC__)) \
|| (defined(sun) || defined(__sun__)) \
&& (defined(__SunOS_5_8) || defined(__SunOS_5_9))
/* what's this, we have no round function either? */
static double round(double num)
{

View File

@ -226,7 +226,7 @@ conn_get_standard_conforming_strings(PGconn *pgconn)
* The presence of the 'standard_conforming_strings' parameter
* means that the server _accepts_ the E'' quote.
*
* If the paramer is off, the PQescapeByteaConn returns
* If the parameter is off, the PQescapeByteaConn returns
* backslash escaped strings (e.g. '\001' -> "\\001"),
* so the E'' quotes are required to avoid warnings
* if 'escape_string_warning' is set.
@ -506,10 +506,6 @@ conn_setup(connectionObject *self, PGconn *pgconn)
pthread_mutex_lock(&self->lock);
Py_BLOCK_THREADS;
if (psyco_green() && (0 > pq_set_non_blocking(self, 1))) {
return -1;
}
if (!conn_is_datestyle_ok(self->pgconn)) {
int res;
Py_UNBLOCK_THREADS;
@ -573,6 +569,9 @@ _conn_sync_connect(connectionObject *self)
/* if the connection is green, wait to finish connection */
if (green) {
if (0 > pq_set_non_blocking(self, 1)) {
return -1;
}
if (0 != psyco_wait(self)) {
return -1;
}
@ -614,6 +613,11 @@ _conn_async_connect(connectionObject *self)
PQsetNoticeProcessor(pgconn, conn_notice_callback, (void*)self);
/* Set the connection to nonblocking now. */
if (pq_set_non_blocking(self, 1) != 0) {
return -1;
}
/* The connection will be completed banging on poll():
* First with _conn_poll_connecting() that will finish connection,
* then with _conn_poll_setup_async() that will do the same job
@ -625,14 +629,23 @@ _conn_async_connect(connectionObject *self)
int
conn_connect(connectionObject *self, long int async)
{
if (async == 1) {
int rv;
if (async == 1) {
Dprintf("con_connect: connecting in ASYNC mode");
return _conn_async_connect(self);
rv = _conn_async_connect(self);
}
else {
Dprintf("con_connect: connecting in SYNC mode");
return _conn_sync_connect(self);
rv = _conn_sync_connect(self);
}
if (rv != 0) {
/* connection failed, so let's close ourselves */
self->closed = 2;
}
return rv;
}
@ -642,6 +655,7 @@ static int
_conn_poll_connecting(connectionObject *self)
{
int res = PSYCO_POLL_ERROR;
const char *msg;
Dprintf("conn_poll: poll connecting");
switch (PQconnectPoll(self->pgconn)) {
@ -656,7 +670,11 @@ _conn_poll_connecting(connectionObject *self)
break;
case PGRES_POLLING_FAILED:
case PGRES_POLLING_ACTIVE:
PyErr_SetString(OperationalError, "asynchronous connection failed");
msg = PQerrorMessage(self->pgconn);
if (!(msg && *msg)) {
msg = "asynchronous connection failed";
}
PyErr_SetString(OperationalError, msg);
res = PSYCO_POLL_ERROR;
break;
}
@ -783,11 +801,6 @@ _conn_poll_setup_async(connectionObject *self)
switch (self->status) {
case CONN_STATUS_CONNECTING:
/* Set the connection to nonblocking now. */
if (pq_set_non_blocking(self, 1) != 0) {
break;
}
self->equote = conn_get_standard_conforming_strings(self->pgconn);
self->protocol = conn_get_protocol_version(self->pgconn);
self->server_version = conn_get_server_version(self->pgconn);
@ -1177,7 +1190,7 @@ conn_set_client_encoding(connectionObject *self, const char *enc)
goto endlock;
}
/* no error, we can proceeed and store the new encoding */
/* no error, we can proceed and store the new encoding */
{
char *tmp = self->encoding;
self->encoding = clean_enc;

View File

@ -55,7 +55,7 @@ psyco_conn_cursor(connectionObject *self, PyObject *args, PyObject *kwargs)
PyObject *obj = NULL;
PyObject *rv = NULL;
PyObject *name = Py_None;
PyObject *factory = (PyObject *)&cursorType;
PyObject *factory = Py_None;
PyObject *withhold = Py_False;
PyObject *scrollable = Py_None;
@ -64,16 +64,21 @@ psyco_conn_cursor(connectionObject *self, PyObject *args, PyObject *kwargs)
EXC_IF_CONN_CLOSED(self);
if (self->cursor_factory && self->cursor_factory != Py_None) {
factory = self->cursor_factory;
}
if (!PyArg_ParseTupleAndKeywords(
args, kwargs, "|OOOO", kwlist,
&name, &factory, &withhold, &scrollable)) {
goto exit;
}
if (factory == Py_None) {
if (self->cursor_factory && self->cursor_factory != Py_None) {
factory = self->cursor_factory;
}
else {
factory = (PyObject *)&cursorType;
}
}
if (self->status != CONN_STATUS_READY &&
self->status != CONN_STATUS_BEGIN &&
self->status != CONN_STATUS_PREPARED) {
@ -700,8 +705,6 @@ psyco_conn_set_client_encoding(connectionObject *self, PyObject *args)
static PyObject *
psyco_conn_get_transaction_status(connectionObject *self)
{
EXC_IF_CONN_CLOSED(self);
return PyInt_FromLong((long)PQtransactionStatus(self->pgconn));
}
@ -748,7 +751,7 @@ psyco_conn_get_parameter_status(connectionObject *self, PyObject *args)
static PyObject *
psyco_conn_lobject(connectionObject *self, PyObject *args, PyObject *keywds)
{
int oid = (int)InvalidOid, new_oid = (int)InvalidOid;
Oid oid = InvalidOid, new_oid = InvalidOid;
const char *new_file = NULL;
const char *smode = "";
PyObject *factory = (PyObject *)&lobjectType;
@ -757,7 +760,7 @@ psyco_conn_lobject(connectionObject *self, PyObject *args, PyObject *keywds)
static char *kwlist[] = {"oid", "mode", "new_oid", "new_file",
"cursor_factory", NULL};
if (!PyArg_ParseTupleAndKeywords(args, keywds, "|izizO", kwlist,
if (!PyArg_ParseTupleAndKeywords(args, keywds, "|IzIzO", kwlist,
&oid, &smode, &new_oid, &new_file,
&factory)) {
return NULL;
@ -769,16 +772,16 @@ psyco_conn_lobject(connectionObject *self, PyObject *args, PyObject *keywds)
EXC_IF_TPC_PREPARED(self, lobject);
Dprintf("psyco_conn_lobject: new lobject for connection at %p", self);
Dprintf("psyco_conn_lobject: parameters: oid = %d, mode = %s",
Dprintf("psyco_conn_lobject: parameters: oid = %u, mode = %s",
oid, smode);
Dprintf("psyco_conn_lobject: parameters: new_oid = %d, new_file = %s",
new_oid, new_file);
if (new_file)
obj = PyObject_CallFunction(factory, "Oisis",
obj = PyObject_CallFunction(factory, "OIsIs",
self, oid, smode, new_oid, new_file);
else
obj = PyObject_CallFunction(factory, "Oisi",
obj = PyObject_CallFunction(factory, "OIsI",
self, oid, smode, new_oid);
if (obj == NULL) return NULL;
@ -1099,6 +1102,7 @@ connection_setup(connectionObject *self, const char *dsn, long int async)
res = 0;
}
exit:
/* here we obfuscate the password even if there was a connection error */
pos = strstr(self->dsn, "password");
if (pos != NULL) {
@ -1106,7 +1110,6 @@ connection_setup(connectionObject *self, const char *dsn, long int async)
*pos = 'x';
}
exit:
return res;
}
@ -1128,10 +1131,13 @@ connection_dealloc(PyObject* obj)
{
connectionObject *self = (connectionObject *)obj;
conn_close(self);
/* Make sure to untrack the connection before calling conn_close, which may
* allow a different thread to try and dealloc the connection again,
* resulting in a double-free segfault (ticket #166). */
PyObject_GC_UnTrack(self);
conn_close(self);
if (self->weakreflist) {
PyObject_ClearWeakRefs(obj);
}

View File

@ -97,30 +97,44 @@ HIDDEN int psyco_curs_scrollable_set(cursorObject *self, PyObject *pyvalue);
/* exception-raising macros */
#define EXC_IF_CURS_CLOSED(self) \
if ((self)->closed || ((self)->conn && (self)->conn->closed)) { \
PyErr_SetString(InterfaceError, "cursor already closed"); \
return NULL; }
do { \
if (!(self)->conn) { \
PyErr_SetString(InterfaceError, "the cursor has no connection"); \
return NULL; } \
if ((self)->closed || (self)->conn->closed) { \
PyErr_SetString(InterfaceError, "cursor already closed"); \
return NULL; } \
} while (0)
#define EXC_IF_NO_TUPLES(self) \
if ((self)->notuples && (self)->name == NULL) { \
PyErr_SetString(ProgrammingError, "no results to fetch"); \
return NULL; }
do \
if ((self)->notuples && (self)->name == NULL) { \
PyErr_SetString(ProgrammingError, "no results to fetch"); \
return NULL; } \
while (0)
#define EXC_IF_NO_MARK(self) \
if ((self)->mark != (self)->conn->mark && (self)->withhold == 0) { \
PyErr_SetString(ProgrammingError, "named cursor isn't valid anymore"); \
return NULL; }
do \
if ((self)->mark != (self)->conn->mark && (self)->withhold == 0) { \
PyErr_SetString(ProgrammingError, "named cursor isn't valid anymore"); \
return NULL; } \
while (0)
#define EXC_IF_CURS_ASYNC(self, cmd) if ((self)->conn->async == 1) { \
PyErr_SetString(ProgrammingError, #cmd " cannot be used " \
"in asynchronous mode"); \
return NULL; }
#define EXC_IF_CURS_ASYNC(self, cmd) \
do \
if ((self)->conn->async == 1) { \
PyErr_SetString(ProgrammingError, \
#cmd " cannot be used in asynchronous mode"); \
return NULL; } \
while (0)
#define EXC_IF_ASYNC_IN_PROGRESS(self, cmd) \
if ((self)->conn->async_cursor != NULL) { \
PyErr_SetString(ProgrammingError, #cmd " cannot be used " \
"while an asynchronous query is underway"); \
return NULL; }
do \
if ((self)->conn->async_cursor != NULL) { \
PyErr_SetString(ProgrammingError, \
#cmd " cannot be used while an asynchronous query is underway"); \
return NULL; } \
while (0)
#ifdef __cplusplus
}

View File

@ -39,9 +39,6 @@
#include <stdlib.h>
extern PyObject *pyPsycopgTzFixedOffsetTimezone;
/** DBAPI methods **/
/* close method - close the cursor */
@ -63,7 +60,7 @@ psyco_curs_close(cursorObject *self)
EXC_IF_NO_MARK(self);
PyOS_snprintf(buffer, 127, "CLOSE \"%s\"", self->name);
if (pq_execute(self, buffer, 0, 0) == -1) return NULL;
if (pq_execute(self, buffer, 0, 0, 1) == -1) return NULL;
}
self->closed = 1;
@ -109,7 +106,7 @@ _mogrify(PyObject *var, PyObject *fmt, cursorObject *curs, PyObject **new)
/* if we find '%(' then this is a dictionary, we:
1/ find the matching ')' and extract the key name
2/ locate the value in the dictionary (or return an error)
3/ mogrify the value into something usefull (quoting)...
3/ mogrify the value into something useful (quoting)...
4/ ...and add it to the new dictionary to be used as argument
*/
case '(':
@ -314,7 +311,7 @@ _psyco_curs_merge_query_args(cursorObject *self,
"not all arguments converted"
and return the appropriate ProgrammingError. we do that by grabbing
the curren exception (we will later restore it if the type or the
the current exception (we will later restore it if the type or the
strings do not match.) */
if (!(fquery = Bytes_Format(query, args))) {
@ -444,7 +441,7 @@ _psyco_curs_execute(cursorObject *self,
/* At this point, the SQL statement must be str, not unicode */
tmp = pq_execute(self, Bytes_AS_STRING(self->query), async, no_result);
tmp = pq_execute(self, Bytes_AS_STRING(self->query), async, no_result, 0);
Dprintf("psyco_curs_execute: res = %d, pgres = %p", tmp, self->pgres);
if (tmp < 0) { goto exit; }
@ -478,7 +475,7 @@ psyco_curs_execute(cursorObject *self, PyObject *args, PyObject *kwargs)
"can't call .execute() on named cursors more than once");
return NULL;
}
if (self->conn->autocommit) {
if (self->conn->autocommit && !self->withhold) {
psyco_set_error(ProgrammingError, self,
"can't use a named cursor outside of transactions");
return NULL;
@ -506,7 +503,7 @@ psyco_curs_executemany(cursorObject *self, PyObject *args, PyObject *kwargs)
{
PyObject *operation = NULL, *vars = NULL;
PyObject *v, *iter = NULL;
int rowcount = 0;
long rowcount = 0;
static char *kwlist[] = {"query", "vars_list", NULL};
@ -766,7 +763,7 @@ psyco_curs_fetchone(cursorObject *self)
EXC_IF_ASYNC_IN_PROGRESS(self, fetchone);
EXC_IF_TPC_PREPARED(self->conn, fetchone);
PyOS_snprintf(buffer, 127, "FETCH FORWARD 1 FROM \"%s\"", self->name);
if (pq_execute(self, buffer, 0, 0) == -1) return NULL;
if (pq_execute(self, buffer, 0, 0, self->withhold) == -1) return NULL;
if (_psyco_curs_prefetch(self) < 0) return NULL;
}
@ -816,7 +813,7 @@ psyco_curs_next_named(cursorObject *self)
PyOS_snprintf(buffer, 127, "FETCH FORWARD %ld FROM \"%s\"",
self->itersize, self->name);
if (pq_execute(self, buffer, 0, 0) == -1) return NULL;
if (pq_execute(self, buffer, 0, 0, self->withhold) == -1) return NULL;
if (_psyco_curs_prefetch(self) < 0) return NULL;
}
@ -885,7 +882,7 @@ psyco_curs_fetchmany(cursorObject *self, PyObject *args, PyObject *kwords)
EXC_IF_TPC_PREPARED(self->conn, fetchone);
PyOS_snprintf(buffer, 127, "FETCH FORWARD %d FROM \"%s\"",
(int)size, self->name);
if (pq_execute(self, buffer, 0, 0) == -1) { goto exit; }
if (pq_execute(self, buffer, 0, 0, self->withhold) == -1) { goto exit; }
if (_psyco_curs_prefetch(self) < 0) { goto exit; }
}
@ -960,7 +957,7 @@ psyco_curs_fetchall(cursorObject *self)
EXC_IF_ASYNC_IN_PROGRESS(self, fetchall);
EXC_IF_TPC_PREPARED(self->conn, fetchall);
PyOS_snprintf(buffer, 127, "FETCH FORWARD ALL FROM \"%s\"", self->name);
if (pq_execute(self, buffer, 0, 0) == -1) { goto exit; }
if (pq_execute(self, buffer, 0, 0, self->withhold) == -1) { goto exit; }
if (_psyco_curs_prefetch(self) < 0) { goto exit; }
}
@ -1032,8 +1029,7 @@ psyco_curs_callproc(cursorObject *self, PyObject *args)
}
if (parameters != Py_None) {
nparameters = PyObject_Length(parameters);
if (nparameters < 0) nparameters = 0;
if (-1 == (nparameters = PyObject_Length(parameters))) { goto exit; }
}
/* allocate some memory, build the SQL and create a PyString from it */
@ -1169,7 +1165,7 @@ psyco_curs_scroll(cursorObject *self, PyObject *args, PyObject *kwargs)
char buffer[128];
EXC_IF_NO_MARK(self);
EXC_IF_ASYNC_IN_PROGRESS(self, scroll)
EXC_IF_ASYNC_IN_PROGRESS(self, scroll);
EXC_IF_TPC_PREPARED(self->conn, scroll);
if (strcmp(mode, "absolute") == 0) {
@ -1179,7 +1175,7 @@ psyco_curs_scroll(cursorObject *self, PyObject *args, PyObject *kwargs)
else {
PyOS_snprintf(buffer, 127, "MOVE %d FROM \"%s\"", value, self->name);
}
if (pq_execute(self, buffer, 0, 0) == -1) return NULL;
if (pq_execute(self, buffer, 0, 0, self->withhold) == -1) return NULL;
if (_psyco_curs_prefetch(self) < 0) return NULL;
}
@ -1392,7 +1388,7 @@ psyco_curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
Py_INCREF(file);
self->copyfile = file;
if (pq_execute(self, query, 0, 0) >= 0) {
if (pq_execute(self, query, 0, 0, 0) >= 0) {
res = Py_None;
Py_INCREF(Py_None);
}
@ -1486,7 +1482,7 @@ psyco_curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
Py_INCREF(file);
self->copyfile = file;
if (pq_execute(self, query, 0, 0) >= 0) {
if (pq_execute(self, query, 0, 0, 0) >= 0) {
res = Py_None;
Py_INCREF(Py_None);
}
@ -1560,7 +1556,7 @@ psyco_curs_copy_expert(cursorObject *self, PyObject *args, PyObject *kwargs)
self->copyfile = file;
/* At this point, the SQL statement must be str, not unicode */
if (pq_execute(self, Bytes_AS_STRING(sql), 0, 0) >= 0) {
if (pq_execute(self, Bytes_AS_STRING(sql), 0, 0, 0) >= 0) {
res = Py_None;
Py_INCREF(res);
}
@ -1822,7 +1818,7 @@ cursor_setup(cursorObject *self, connectionObject *conn, const char *name)
}
}
/* FIXME: why does this raise an excpetion on the _next_ line of code?
/* FIXME: why does this raise an exception on the _next_ line of code?
if (PyObject_IsInstance((PyObject*)conn,
(PyObject *)&connectionType) == 0) {
PyErr_SetString(PyExc_TypeError,
@ -1843,8 +1839,17 @@ cursor_setup(cursorObject *self, connectionObject *conn, const char *name)
self->tuple_factory = Py_None;
/* default tzinfo factory */
Py_INCREF(pyPsycopgTzFixedOffsetTimezone);
self->tzinfo_factory = pyPsycopgTzFixedOffsetTimezone;
{
PyObject *m = NULL;
if ((m = PyImport_ImportModule("psycopg2.tz"))) {
self->tzinfo_factory = PyObject_GetAttrString(
m, "FixedOffsetTimezone");
Py_DECREF(m);
}
if (!self->tzinfo_factory) {
return -1;
}
}
Dprintf("cursor_setup: good cursor object at %p, refcnt = "
FORMAT_CODE_PY_SSIZE_T,
@ -1899,31 +1904,34 @@ cursor_init(PyObject *obj, PyObject *args, PyObject *kwargs)
{
PyObject *conn;
PyObject *name = Py_None;
const char *cname;
PyObject *bname = NULL;
const char *cname = NULL;
int rv = -1;
static char *kwlist[] = {"conn", "name", NULL};
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!|O", kwlist,
&connectionType, &conn, &name)) {
return -1;
goto exit;
}
if (name == Py_None) {
cname = NULL;
} else {
if (name != Py_None) {
Py_INCREF(name); /* for ensure_bytes */
if (!(name = psycopg_ensure_bytes(name))) {
if (!(bname = psycopg_ensure_bytes(name))) {
/* name has had a ref stolen */
return -1;
goto exit;
}
Py_DECREF(name);
if (!(cname = Bytes_AsString(name))) {
return -1;
if (!(cname = Bytes_AsString(bname))) {
goto exit;
}
}
return cursor_setup((cursorObject *)obj, (connectionObject *)conn, cname);
rv = cursor_setup((cursorObject *)obj, (connectionObject *)conn, cname);
exit:
Py_XDECREF(bname);
return rv;
}
static PyObject *

View File

@ -163,8 +163,16 @@ psyco_error_reduce(errorObject *self)
if (2 != PyTuple_GET_SIZE(tuple)) { goto exit; }
if (!(dict = PyDict_New())) { goto error; }
if (0 != PyDict_SetItemString(dict, "pgerror", self->pgerror)) { goto error; }
if (0 != PyDict_SetItemString(dict, "pgcode", self->pgcode)) { goto error; }
if (self->pgerror) {
if (0 != PyDict_SetItemString(dict, "pgerror", self->pgerror)) {
goto error;
}
}
if (self->pgcode) {
if (0 != PyDict_SetItemString(dict, "pgcode", self->pgcode)) {
goto error;
}
}
{
PyObject *newtuple;

View File

@ -253,7 +253,7 @@ lobject_close_locked(lobjectObject *self, char **error)
return 0;
break;
default:
PyErr_SetString(OperationalError, "the connection is broken");
*error = strdup("the connection is broken");
return -1;
break;
}

View File

@ -52,7 +52,7 @@ psyco_lobj_close(lobjectObject *self, PyObject *args)
opened large objects */
if (!lobject_is_closed(self)
&& !self->conn->autocommit
&& self->conn->mark == self->mark)
&& self->conn->mark == self->mark)
{
Dprintf("psyco_lobj_close: closing lobject at %p", self);
if (lobject_close(self) < 0)
@ -171,14 +171,14 @@ psyco_lobj_seek(lobjectObject *self, PyObject *args)
int pos=0;
if (!PyArg_ParseTuple(args, "i|i", &offset, &whence))
return NULL;
return NULL;
EXC_IF_LOBJ_CLOSED(self);
EXC_IF_LOBJ_LEVEL0(self);
EXC_IF_LOBJ_UNMARKED(self);
if ((pos = lobject_seek(self, offset, whence)) < 0)
return NULL;
return NULL;
return PyInt_FromLong((long)pos);
}
@ -198,7 +198,7 @@ psyco_lobj_tell(lobjectObject *self, PyObject *args)
EXC_IF_LOBJ_UNMARKED(self);
if ((pos = lobject_tell(self)) < 0)
return NULL;
return NULL;
return PyInt_FromLong((long)pos);
}
@ -333,11 +333,10 @@ lobject_setup(lobjectObject *self, connectionObject *conn,
return -1;
}
Py_INCREF((PyObject*)conn);
self->conn = conn;
self->mark = conn->mark;
Py_INCREF((PyObject*)self->conn);
self->fd = -1;
self->oid = InvalidOid;
@ -355,9 +354,11 @@ lobject_dealloc(PyObject* obj)
{
lobjectObject *self = (lobjectObject *)obj;
if (lobject_close(self) < 0)
PyErr_Print();
Py_XDECREF((PyObject*)self->conn);
if (self->conn && self->fd != -1) {
if (lobject_close(self) < 0)
PyErr_Print();
}
Py_CLEAR(self->conn);
PyMem_Free(self->smode);
Dprintf("lobject_dealloc: deleted lobject object at %p, refcnt = "
@ -369,17 +370,18 @@ lobject_dealloc(PyObject* obj)
static int
lobject_init(PyObject *obj, PyObject *args, PyObject *kwds)
{
int oid = (int)InvalidOid, new_oid = (int)InvalidOid;
Oid oid = InvalidOid, new_oid = InvalidOid;
const char *smode = "";
const char *new_file = NULL;
PyObject *conn;
PyObject *conn = NULL;
if (!PyArg_ParseTuple(args, "O|iziz",
&conn, &oid, &smode, &new_oid, &new_file))
if (!PyArg_ParseTuple(args, "O!|IzIz",
&connectionType, &conn,
&oid, &smode, &new_oid, &new_file))
return -1;
return lobject_setup((lobjectObject *)obj,
(connectionObject *)conn, (Oid)oid, smode, (Oid)new_oid, new_file);
(connectionObject *)conn, oid, smode, new_oid, new_file);
}
static PyObject *

View File

@ -1,4 +1,4 @@
/* microporotocols_proto.h - definiton for psycopg's protocols
/* microporotocols_proto.h - definition for psycopg's protocols
*
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
*

View File

@ -25,7 +25,7 @@
/* IMPORTANT NOTE: no function in this file do its own connection locking
except for pg_execute and pq_fetch (that are somehow high-level). This means
that all the othe functions should be called while holding a lock to the
that all the other functions should be called while holding a lock to the
connection.
*/
@ -153,7 +153,7 @@ exception_from_sqlstate(const char *sqlstate)
This function should be called while holding the GIL.
The function passes the ownership of the pgres to the returned exception,
wherer the pgres was the explicit argument or taken from the cursor.
where the pgres was the explicit argument or taken from the cursor.
So, after calling it curs->pgres will be set to null */
RAISES static void
@ -417,10 +417,20 @@ pq_complete_error(connectionObject *conn, PGresult **pgres, char **error)
pq_raise(conn, NULL, pgres);
/* now *pgres is null */
}
else if (*error != NULL) {
PyErr_SetString(OperationalError, *error);
} else {
PyErr_SetString(OperationalError, "unknown error");
else {
if (*error != NULL) {
PyErr_SetString(OperationalError, *error);
} else {
PyErr_SetString(OperationalError, "unknown error");
}
/* Trivia: with a broken socket connection PQexec returns NULL, so we
* end up here. With a TCP connection we get a pgres with an error
* instead, and the connection gets closed in the pq_raise call above
* (see ticket #196)
*/
if (CONNECTION_BAD == PQstatus(conn->pgconn)) {
conn->closed = 2;
}
}
if (*error) {
@ -781,7 +791,7 @@ exit:
means that there is data available to be collected. -1 means an error, the
exception will be set accordingly.
this fucntion locks the connection object
this function locks the connection object
this function call Py_*_ALLOW_THREADS macros */
int
@ -797,6 +807,12 @@ pq_is_busy(connectionObject *conn)
Dprintf("pq_is_busy: PQconsumeInput() failed");
pthread_mutex_unlock(&(conn->lock));
Py_BLOCK_THREADS;
/* if the libpq says pgconn is lost, close the py conn */
if (CONNECTION_BAD == PQstatus(conn->pgconn)) {
conn->closed = 2;
}
PyErr_SetString(OperationalError, PQerrorMessage(conn->pgconn));
return -1;
}
@ -826,6 +842,12 @@ pq_is_busy_locked(connectionObject *conn)
if (PQconsumeInput(conn->pgconn) == 0) {
Dprintf("pq_is_busy_locked: PQconsumeInput() failed");
/* if the libpq says pgconn is lost, close the py conn */
if (CONNECTION_BAD == PQstatus(conn->pgconn)) {
conn->closed = 2;
}
PyErr_SetString(OperationalError, PQerrorMessage(conn->pgconn));
return -1;
}
@ -871,7 +893,7 @@ pq_flush(connectionObject *conn)
*/
RAISES_NEG int
pq_execute(cursorObject *curs, const char *query, int async, int no_result)
pq_execute(cursorObject *curs, const char *query, int async, int no_result, int no_begin)
{
PGresult *pgres = NULL;
char *error = NULL;
@ -894,7 +916,7 @@ pq_execute(cursorObject *curs, const char *query, int async, int no_result)
Py_BEGIN_ALLOW_THREADS;
pthread_mutex_lock(&(curs->conn->lock));
if (pq_begin_locked(curs->conn, &pgres, &error, &_save) < 0) {
if (!no_begin && pq_begin_locked(curs->conn, &pgres, &error, &_save) < 0) {
pthread_mutex_unlock(&(curs->conn->lock));
Py_BLOCK_THREADS;
pq_complete_error(curs->conn, &pgres, &error);
@ -974,7 +996,7 @@ pq_execute(cursorObject *curs, const char *query, int async, int no_result)
/* if the execute was sync, we call pq_fetch() immediately,
to respect the old DBAPI-2.0 compatible behaviour */
if (async == 0) {
Dprintf("pq_execute: entering syncronous DBAPI compatibility mode");
Dprintf("pq_execute: entering synchronous DBAPI compatibility mode");
if (pq_fetch(curs, no_result) < 0) return -1;
}
else {
@ -1041,7 +1063,7 @@ pq_get_last_result(connectionObject *conn)
/* pq_fetch - fetch data after a query
this fucntion locks the connection object
this function locks the connection object
this function call Py_*_ALLOW_THREADS macros
return value:
@ -1134,9 +1156,8 @@ _pq_fetch_tuples(cursorObject *curs)
cast = psyco_default_cast;
}
Dprintf("_pq_fetch_tuples: using cast at %p (%s) for type %d",
cast, Bytes_AS_STRING(((typecastObject*)cast)->name),
PQftype(curs->pgres,i));
Dprintf("_pq_fetch_tuples: using cast at %p for type %d",
cast, PQftype(curs->pgres,i));
Py_INCREF(cast);
PyTuple_SET_ITEM(casts, i, cast);
@ -1243,6 +1264,20 @@ exit:
return rv;
}
void
_read_rowcount(cursorObject *curs)
{
const char *rowcount;
rowcount = PQcmdTuples(curs->pgres);
Dprintf("_read_rowcount: PQcmdTuples = %s", rowcount);
if (!rowcount || !rowcount[0]) {
curs->rowcount = -1;
} else {
curs->rowcount = atol(rowcount);
}
}
static int
_pq_copy_in_v3(cursorObject *curs)
{
@ -1253,6 +1288,13 @@ _pq_copy_in_v3(cursorObject *curs)
Py_ssize_t length = 0;
int res, error = 0;
if (!curs->copyfile) {
PyErr_SetString(ProgrammingError,
"can't execute COPY FROM: use the copy_from() method instead");
error = 1;
goto exit;
}
if (!(func = PyObject_GetAttrString(curs->copyfile, "read"))) {
Dprintf("_pq_copy_in_v3: can't get o.read");
error = 1;
@ -1335,7 +1377,7 @@ _pq_copy_in_v3(cursorObject *curs)
else if (error == 2)
res = PQputCopyEnd(curs->conn->pgconn, "error in PQputCopyData() call");
else
/* XXX would be nice to propagate the exeption */
/* XXX would be nice to propagate the exception */
res = PQputCopyEnd(curs->conn->pgconn, "error in .read() call");
CLEARPGRES(curs->pgres);
@ -1343,7 +1385,7 @@ _pq_copy_in_v3(cursorObject *curs)
Dprintf("_pq_copy_in_v3: copy ended; res = %d", res);
/* if the result is -1 we should not even try to get a result from the
bacause that will lock the current thread forever */
because that will lock the current thread forever */
if (res == -1) {
pq_raise(curs->conn, curs, NULL);
/* FIXME: pq_raise check the connection but for some reason even
@ -1360,6 +1402,7 @@ _pq_copy_in_v3(cursorObject *curs)
if (NULL == curs->pgres)
break;
_read_rowcount(curs);
if (PQresultStatus(curs->pgres) == PGRES_FATAL_ERROR)
pq_raise(curs->conn, curs, NULL);
CLEARPGRES(curs->pgres);
@ -1375,7 +1418,8 @@ exit:
static int
_pq_copy_out_v3(cursorObject *curs)
{
PyObject *tmp = NULL, *func;
PyObject *tmp = NULL;
PyObject *func = NULL;
PyObject *obj = NULL;
int ret = -1;
int is_text;
@ -1383,6 +1427,12 @@ _pq_copy_out_v3(cursorObject *curs)
char *buffer;
Py_ssize_t len;
if (!curs->copyfile) {
PyErr_SetString(ProgrammingError,
"can't execute COPY TO: use the copy_to() method instead");
goto exit;
}
if (!(func = PyObject_GetAttrString(curs->copyfile, "write"))) {
Dprintf("_pq_copy_out_v3: can't get o.write");
goto exit;
@ -1436,6 +1486,7 @@ _pq_copy_out_v3(cursorObject *curs)
if (NULL == curs->pgres)
break;
_read_rowcount(curs);
if (PQresultStatus(curs->pgres) == PGRES_FATAL_ERROR)
pq_raise(curs->conn, curs, NULL);
CLEARPGRES(curs->pgres);
@ -1451,7 +1502,6 @@ int
pq_fetch(cursorObject *curs, int no_result)
{
int pgstatus, ex = -1;
const char *rowcount;
/* even if we fail, we remove any information about the previous query */
curs_reset(curs);
@ -1483,11 +1533,7 @@ pq_fetch(cursorObject *curs, int no_result)
case PGRES_COMMAND_OK:
Dprintf("pq_fetch: command returned OK (no tuples)");
rowcount = PQcmdTuples(curs->pgres);
if (!rowcount || !rowcount[0])
curs->rowcount = -1;
else
curs->rowcount = atoi(rowcount);
_read_rowcount(curs);
curs->lastoid = PQoidValue(curs->pgres);
CLEARPGRES(curs->pgres);
ex = 1;
@ -1495,8 +1541,8 @@ pq_fetch(cursorObject *curs, int no_result)
case PGRES_COPY_OUT:
Dprintf("pq_fetch: data from a COPY TO (no tuples)");
ex = _pq_copy_out_v3(curs);
curs->rowcount = -1;
ex = _pq_copy_out_v3(curs);
/* error caught by out glorious notice handler */
if (PyErr_Occurred()) ex = -1;
CLEARPGRES(curs->pgres);
@ -1504,8 +1550,8 @@ pq_fetch(cursorObject *curs, int no_result)
case PGRES_COPY_IN:
Dprintf("pq_fetch: data from a COPY FROM (no tuples)");
ex = _pq_copy_in_v3(curs);
curs->rowcount = -1;
ex = _pq_copy_in_v3(curs);
/* error caught by out glorious notice handler */
if (PyErr_Occurred()) ex = -1;
CLEARPGRES(curs->pgres);

View File

@ -36,7 +36,7 @@
HIDDEN PGresult *pq_get_last_result(connectionObject *conn);
RAISES_NEG HIDDEN int pq_fetch(cursorObject *curs, int no_result);
RAISES_NEG HIDDEN int pq_execute(cursorObject *curs, const char *query,
int async, int no_result);
int async, int no_result, int no_begin);
HIDDEN int pq_send_query(connectionObject *conn, const char *query);
HIDDEN int pq_begin_locked(connectionObject *conn, PGresult **pgres,
char **error, PyThreadState **tstate);

View File

@ -59,7 +59,7 @@ extern "C" {
HIDDEN psyco_errors_fill_RETURN psyco_errors_fill psyco_errors_fill_PROTO;
HIDDEN psyco_errors_set_RETURN psyco_errors_set psyco_errors_set_PROTO;
/* global excpetions */
/* global exceptions */
extern HIDDEN PyObject *Error, *Warning, *InterfaceError, *DatabaseError,
*InternalError, *OperationalError, *ProgrammingError,
*IntegrityError, *DataError, *NotSupportedError;
@ -169,7 +169,7 @@ STEALS(1) HIDDEN PyObject * psycopg_ensure_text(PyObject *obj);
"Error related to SQL query cancellation."
#define TransactionRollbackError_doc \
"Error causing transaction rollback (deadlocks, serialisation failures, etc)."
"Error causing transaction rollback (deadlocks, serialization failures, etc)."
#endif
#ifdef __cplusplus

View File

@ -58,11 +58,6 @@
#include "psycopg/adapter_datetime.h"
HIDDEN PyObject *pyDateTimeModuleP = NULL;
/* pointers to the psycopg.tz classes */
HIDDEN PyObject *pyPsycopgTzModule = NULL;
HIDDEN PyObject *pyPsycopgTzLOCAL = NULL;
HIDDEN PyObject *pyPsycopgTzFixedOffsetTimezone = NULL;
HIDDEN PyObject *psycoEncodings = NULL;
#ifdef PSYCOPG_DEBUG
@ -437,7 +432,7 @@ static struct {
static int
psyco_errors_init(void)
{
/* the names of the exceptions here reflect the oranization of the
/* the names of the exceptions here reflect the organization of the
psycopg2 module and not the fact the the original error objects
live in _psycopg */
@ -859,18 +854,6 @@ INIT_MODULE(_psycopg)(void)
Py_TYPE(&pydatetimeType) = &PyType_Type;
if (PyType_Ready(&pydatetimeType) == -1) goto exit;
/* import psycopg2.tz anyway (TODO: replace with C-level module?) */
pyPsycopgTzModule = PyImport_ImportModule("psycopg2.tz");
if (pyPsycopgTzModule == NULL) {
Dprintf("initpsycopg: can't import psycopg2.tz module");
PyErr_SetString(PyExc_ImportError, "can't import psycopg2.tz module");
goto exit;
}
pyPsycopgTzLOCAL =
PyObject_GetAttrString(pyPsycopgTzModule, "LOCAL");
pyPsycopgTzFixedOffsetTimezone =
PyObject_GetAttrString(pyPsycopgTzModule, "FixedOffsetTimezone");
/* initialize the module and grab module's dictionary */
#if PY_MAJOR_VERSION < 3
module = Py_InitModule("_psycopg", psycopgMethods);

View File

@ -61,7 +61,7 @@ chunk_getreadbuffer(chunkObject *self, Py_ssize_t segment, void **ptr)
if (segment != 0)
{
PyErr_SetString(PyExc_SystemError,
"acessing non-existant buffer segment");
"accessing non-existant buffer segment");
return -1;
}
*ptr = self->base;
@ -160,7 +160,7 @@ typecast_BINARY_cast(const char *s, Py_ssize_t l, PyObject *curs)
}
else {
/* This is a buffer in the classic bytea format. So we can handle it
* to the PQunescapeBytea to have it parsed, rignt? ...Wrong. We
* to the PQunescapeBytea to have it parsed, right? ...Wrong. We
* could, but then we'd have to record whether buffer was allocated by
* Python or by the libpq to dispose it properly. Furthermore the
* PQunescapeBytea interface is not the most brilliant as it wants a

View File

@ -1,70 +0,0 @@
# =======================================================================
# $Source: /sources/gnumed/gnumed/gnumed/client/testing/test-psycopg2-datetime-systematic.py,v $
__version__ = "$Revision: 1.1 $"
__author__ = "K.Hilbert <Karsten.Hilbert@gmx.net>"
__license__ = 'GPL (details at http://www.gnu.org)'
# =======================================================================
print "testing psycopg2 date/time parsing"
import psycopg2
print "psycopg2:", psycopg2.__version__
#dsn = u'dbname=gnumed_v10 user=any-doc password=any-doc'
dsn = u'dbname=test'
print dsn
conn = psycopg2.connect(dsn=dsn)
curs = conn.cursor()
cmd = u"""
select
name,
abbrev,
utc_offset::text,
case when
is_dst then 'DST'
else 'non-DST'
end
from pg_timezone_names"""
curs.execute(cmd)
rows = curs.fetchall()
curs.close()
conn.rollback()
for row in rows:
curs = conn.cursor()
tz = row[0]
cmd = u"set timezone to '%s'" % tz
try:
curs.execute(cmd)
except StandardError, e:
print "cannot use time zone", row
raise e
curs.close()
conn.rollback()
continue
cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone"""
try:
curs.execute(cmd)
curs.fetchone()
except StandardError, e:
print "%s (%s / %s / %s) failed:" % (tz, row[1], row[2], row[3])
print " ", e
curs.close()
conn.rollback()
conn.close()
# =======================================================================
# $Log: test-psycopg2-datetime-systematic.py,v $
# Revision 1.1 2009/02/10 18:45:32 ncq
# - psycopg2 cannot parse a bunch of settable time zones
#
# Revision 1.1 2009/02/10 13:57:03 ncq
# - test for psycopg2 on Ubuntu-Intrepid
#

View File

@ -4,20 +4,16 @@
#
# This file is part of the psycopg module.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2,
# or (at your option) any later version.
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# this a little script that analyze a file with (TYPE, NUMBER) tuples
# and write out C code ready for inclusion in psycopg. the generated
# code defines the DBAPITypeObject fundamental types and warns for
@ -74,7 +70,7 @@ typecastObject_initlist typecast_builtins[] = {
FOOTER = """ {NULL, NULL, NULL, NULL}\n};\n"""
# usefull error reporting function
# useful error reporting function
def error(msg):
"""Report an error on stderr."""
sys.stderr.write(msg+'\n')

View File

@ -6,16 +6,17 @@ The script can be run at a new PostgreSQL release to refresh the module.
# Copyright (C) 2010 Daniele Varrazzo <daniele.varrazzo@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any later
# version.
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import re
import sys
import urllib2
from collections import defaultdict
@ -30,8 +31,9 @@ def main():
filename = sys.argv[1]
file_start = read_base_file(filename)
# If you add a version to the list fix the docs (errorcodes.rst, err.rst)
classes, errors = fetch_errors(
['8.1', '8.2', '8.3', '8.4', '9.0', '9.1', '9.2'])
['8.1', '8.2', '8.3', '8.4', '9.0', '9.1', '9.2', '9.3', '9.4'])
f = open(filename, "w")
for line in file_start:
@ -48,7 +50,41 @@ def read_base_file(filename):
raise ValueError("can't find the separator. Is this the right file?")
def parse_errors(url):
def parse_errors_txt(url):
classes = {}
errors = defaultdict(dict)
page = urllib2.urlopen(url)
for line in page:
# Strip comments and skip blanks
line = line.split('#')[0].strip()
if not line:
continue
# Parse a section
m = re.match(r"Section: (Class (..) - .+)", line)
if m:
label, class_ = m.groups()
classes[class_] = label
continue
# Parse an error
m = re.match(r"(.....)\s+(?:E|W|S)\s+ERRCODE_(\S+)(?:\s+(\S+))?$", line)
if m:
errcode, macro, spec = m.groups()
# skip errcodes without specs as they are not publically visible
if not spec:
continue
errlabel = spec.upper()
errors[class_][errcode] = errlabel
continue
# We don't expect anything else
raise ValueError("unexpected line:\n%s" % line)
return classes, errors
def parse_errors_sgml(url):
page = BS(urllib2.urlopen(url))
table = page('table')[1]('tbody')[0]
@ -87,14 +123,25 @@ def parse_errors(url):
return classes, errors
errors_url="http://www.postgresql.org/docs/%s/static/errcodes-appendix.html"
errors_sgml_url = \
"http://www.postgresql.org/docs/%s/static/errcodes-appendix.html"
errors_txt_url = \
"http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob_plain;" \
"f=src/backend/utils/errcodes.txt;hb=REL%s_STABLE"
def fetch_errors(versions):
classes = {}
errors = defaultdict(dict)
for version in versions:
c1, e1 = parse_errors(errors_url % version)
print >> sys.stderr, version
tver = tuple(map(int, version.split('.')))
if tver < (9, 1):
c1, e1 = parse_errors_sgml(errors_sgml_url % version)
else:
c1, e1 = parse_errors_txt(
errors_txt_url % version.replace('.', '_'))
classes.update(c1)
for c, cerrs in e1.iteritems():
errors[c].update(cerrs)

View File

@ -8,19 +8,15 @@ script exits with error 1.
# Copyright (C) 2011 Daniele Varrazzo <daniele.varrazzo@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import gc
import sys

View File

@ -28,5 +28,5 @@ have_ssl=0
# Statically link against the postgresql client library.
#static_libpq=1
# Add here eventual extra libreries required to link the module.
# Add here eventual extra libraries required to link the module.
#libraries=

View File

@ -21,17 +21,26 @@ and stable as a rock.
psycopg2 is different from the other database adapter because it was
designed for heavily multi-threaded applications that create and destroy
lots of cursors and make a conspicuous number of concurrent INSERTs or
UPDATEs. psycopg2 also provide full asycronous operations and support
UPDATEs. psycopg2 also provide full asynchronous operations and support
for coroutine libraries.
"""
# note: if you are changing the list of supported Python version please fix
# the docs in install.rst and the /features/ page on the website.
classifiers = """\
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
License :: OSI Approved :: Zope Public License
Programming Language :: Python
Programming Language :: Python :: 2.5
Programming Language :: Python :: 2.6
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.1
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4
Programming Language :: C
Programming Language :: SQL
Topic :: Database
@ -76,8 +85,7 @@ except ImportError:
# Take a look at http://www.python.org/dev/peps/pep-0386/
# for a consistent versioning pattern.
PSYCOPG_VERSION = '2.5'
PSYCOPG_VERSION = '2.5.4'
version_flags = ['dt', 'dec']
@ -210,7 +218,7 @@ or with the pg_config option in 'setup.cfg'.
class psycopg_build_ext(build_ext):
"""Conditionally complement the setup.cfg options file.
This class configures the include_dirs, libray_dirs, libraries
This class configures the include_dirs, library_dirs, libraries
options as required by the system. Most of the configuration happens
in finalize_options() method.
@ -287,12 +295,15 @@ class psycopg_build_ext(build_ext):
manifest = '_psycopg.vc9.x86.manifest'
if platform == 'win-amd64':
manifest = '_psycopg.vc9.amd64.manifest'
try:
ext_path = self.get_ext_fullpath(extension.name)
except AttributeError:
ext_path = os.path.join(self.build_lib,
'psycopg2', '_psycopg.pyd')
self.compiler.spawn(
['mt.exe', '-nologo', '-manifest',
os.path.join('psycopg', manifest),
'-outputresource:%s;2' % (
os.path.join(self.build_lib,
'psycopg2', '_psycopg.pyd'))])
'-outputresource:%s;2' % ext_path])
def finalize_win32(self):
"""Finalize build system configuration on win32 platform."""
@ -362,7 +373,7 @@ class psycopg_build_ext(build_ext):
finalize_linux3 = finalize_linux
def finalize_options(self):
"""Complete the build system configuation."""
"""Complete the build system configuration."""
build_ext.finalize_options(self)
pg_config_helper = PostgresConfig(self)
@ -499,9 +510,11 @@ you probably need to install its companion -dev or -devel package."""
# generate a nice version string to avoid confusion when users report bugs
version_flags.append('pq3') # no more a choice
for have in parser.get('build_ext', 'define').split(','):
if have == 'PSYCOPG_EXTENSIONS':
version_flags.append('ext')
if version_flags:
PSYCOPG_VERSION_EX = PSYCOPG_VERSION + " (%s)" % ' '.join(version_flags)
else:
@ -522,6 +535,13 @@ if parser.has_option('build_ext', 'static_libpq'):
else:
static_libpq = 0
# And now... explicitly add the defines from the .cfg files.
# Looks like setuptools or some other cog doesn't add them to the command line
# when called e.g. with "pip -e git+url'. This results in declarations
# duplicate on the commandline, which I hope is not a problem.
for define in parser.get('build_ext', 'define').split(','):
define_macros.append((define, '1'))
# build the extension
sources = [ os.path.join('psycopg', x) for x in sources]
@ -549,7 +569,7 @@ setup(name="psycopg2",
author_email="fog@initd.org",
url="http://initd.org/psycopg/",
download_url=download_url,
license="GPL with exceptions or ZPL",
license="LGPL with exceptions or ZPL",
platforms=["any"],
description=__doc__.split("\n")[0],
long_description="\n".join(__doc__.split("\n")[2:]),

View File

@ -60,7 +60,7 @@ import sys
# - Now a subclass of TestCase, to avoid requiring the driver stub
# to use multiple inheritance
# - Reversed the polarity of buggy test in test_description
# - Test exception heirarchy correctly
# - Test exception hierarchy correctly
# - self.populate is now self._populate(), so if a driver stub
# overrides self.ddl1 this change propogates
# - VARCHAR columns now have a width, which will hopefully make the
@ -188,7 +188,7 @@ class DatabaseAPI20Test(unittest.TestCase):
def test_Exceptions(self):
# Make sure required exceptions exist, and are in the
# defined heirarchy.
# defined hierarchy.
if sys.version[0] == '3': #under Python 3 StardardError no longer exists
self.failUnless(issubclass(self.driver.Warning,Exception))
self.failUnless(issubclass(self.driver.Error,Exception))
@ -504,7 +504,7 @@ class DatabaseAPI20Test(unittest.TestCase):
self.assertRaises(self.driver.Error,cur.fetchone)
# cursor.fetchone should raise an Error if called after
# executing a query that cannnot return rows
# executing a query that cannot return rows
self.executeDDL1(cur)
self.assertRaises(self.driver.Error,cur.fetchone)
@ -516,7 +516,7 @@ class DatabaseAPI20Test(unittest.TestCase):
self.failUnless(cur.rowcount in (-1,0))
# cursor.fetchone should raise an Error if called after
# executing a query that cannnot return rows
# executing a query that cannot return rows
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))

View File

@ -75,9 +75,9 @@ class AsyncTests(ConnectingTestCase):
if state == psycopg2.extensions.POLL_OK:
break
elif state == psycopg2.extensions.POLL_READ:
select.select([pollable], [], [])
select.select([pollable], [], [], 10)
elif state == psycopg2.extensions.POLL_WRITE:
select.select([], [pollable], [])
select.select([], [pollable], [], 10)
else:
raise Exception("Unexpected result from poll: %r", state)
@ -449,6 +449,16 @@ class AsyncTests(ConnectingTestCase):
self.wait(self.conn)
self.assertEqual(cur.fetchone(), (42,))
def test_async_connection_error_message(self):
try:
cnn = psycopg2.connect('dbname=thisdatabasedoesntexist', async=True)
self.wait(cnn)
except psycopg2.Error, e:
self.assertNotEqual(str(e), "asynchronous connection failed",
"connection error reason lost")
else:
self.fail("no exception raised")
def test_suite():
return unittest.TestLoader().loadTestsFromName(__name__)

View File

@ -34,6 +34,7 @@ import psycopg2.extensions
from testutils import unittest, decorate_all_tests, skip_if_no_superuser
from testutils import skip_before_postgres, skip_after_postgres
from testutils import ConnectingTestCase, skip_if_tpc_disabled
from testutils import skip_if_windows
from testconfig import dsn, dbname
@ -64,6 +65,7 @@ class ConnectionTests(ConnectingTestCase):
@skip_before_postgres(8, 4)
@skip_if_no_superuser
@skip_if_windows
def test_cleanup_on_badconn_close(self):
# ticket #148
conn = self.conn
@ -125,9 +127,6 @@ class ConnectionTests(ConnectingTestCase):
cur.execute(sql)
self.assertEqual(50, len(conn.notices))
self.assert_('table50' in conn.notices[0], conn.notices[0])
self.assert_('table51' in conn.notices[1], conn.notices[1])
self.assert_('table98' in conn.notices[-2], conn.notices[-2])
self.assert_('table99' in conn.notices[-1], conn.notices[-1])
def test_server_version(self):
@ -249,6 +248,28 @@ class ConnectionTests(ConnectingTestCase):
cur.execute("select 1 as a")
self.assertRaises(TypeError, (lambda r: r['a']), cur.fetchone())
def test_cursor_factory_none(self):
# issue #210
conn = self.connect()
cur = conn.cursor(cursor_factory=None)
self.assertEqual(type(cur), psycopg2.extensions.cursor)
conn = self.connect(cursor_factory=psycopg2.extras.DictCursor)
cur = conn.cursor(cursor_factory=None)
self.assertEqual(type(cur), psycopg2.extras.DictCursor)
def test_failed_init_status(self):
class SubConnection(psycopg2.extensions.connection):
def __init__(self, dsn):
try:
super(SubConnection, self).__init__(dsn)
except Exception:
pass
c = SubConnection("dbname=thereisnosuchdatabasemate password=foobar")
self.assert_(c.closed, "connection failed so it must be closed")
self.assert_('foobar' not in c.dsn, "password was not obscured")
class IsolationLevelsTestCase(ConnectingTestCase):

View File

@ -25,13 +25,16 @@
import sys
import string
from testutils import unittest, ConnectingTestCase, decorate_all_tests
from testutils import skip_if_no_iobase
from testutils import skip_if_no_iobase, skip_before_postgres
from cStringIO import StringIO
from itertools import cycle, izip
from subprocess import Popen, PIPE
import psycopg2
import psycopg2.extensions
from testutils import skip_copy_if_green
from testutils import skip_copy_if_green, script_to_py3
from testconfig import dsn
if sys.version_info[0] < 3:
_base = object
@ -199,6 +202,20 @@ class CopyTests(ConnectingTestCase):
f.seek(0)
self.assertEqual(f.readline().rstrip(), about)
# same tests with setting size
f = io.StringIO()
f.write(about)
f.seek(0)
exp_size = 123
# hack here to leave file as is, only check size when reading
real_read = f.read
def read(_size, f=f, exp_size=exp_size):
self.assertEqual(_size, exp_size)
return real_read(_size)
f.read = read
curs.copy_expert('COPY tcopy (data) FROM STDIN', f, size=exp_size)
curs.execute("select data from tcopy;")
self.assertEqual(curs.fetchone()[0], abin)
def _copy_from(self, curs, nrecs, srec, copykw):
f = StringIO()
@ -258,6 +275,70 @@ class CopyTests(ConnectingTestCase):
curs.execute("select count(*) from manycols;")
self.assertEqual(curs.fetchone()[0], 2)
@skip_before_postgres(8, 2) # they don't send the count
def test_copy_rowcount(self):
curs = self.conn.cursor()
curs.copy_from(StringIO('aaa\nbbb\nccc\n'), 'tcopy', columns=['data'])
self.assertEqual(curs.rowcount, 3)
curs.copy_expert(
"copy tcopy (data) from stdin",
StringIO('ddd\neee\n'))
self.assertEqual(curs.rowcount, 2)
curs.copy_to(StringIO(), "tcopy")
self.assertEqual(curs.rowcount, 5)
curs.execute("insert into tcopy (data) values ('fff')")
curs.copy_expert("copy tcopy to stdout", StringIO())
self.assertEqual(curs.rowcount, 6)
def test_copy_rowcount_error(self):
curs = self.conn.cursor()
curs.execute("insert into tcopy (data) values ('fff')")
self.assertEqual(curs.rowcount, 1)
self.assertRaises(psycopg2.DataError,
curs.copy_from, StringIO('aaa\nbbb\nccc\n'), 'tcopy')
self.assertEqual(curs.rowcount, -1)
def test_copy_from_segfault(self):
# issue #219
script = ("""\
import psycopg2
conn = psycopg2.connect(%(dsn)r)
curs = conn.cursor()
curs.execute("create table copy_segf (id int)")
try:
curs.execute("copy copy_segf from stdin")
except psycopg2.ProgrammingError:
pass
conn.close()
""" % { 'dsn': dsn,})
proc = Popen([sys.executable, '-c', script_to_py3(script)])
proc.communicate()
self.assertEqual(0, proc.returncode)
def test_copy_to_segfault(self):
# issue #219
script = ("""\
import psycopg2
conn = psycopg2.connect(%(dsn)r)
curs = conn.cursor()
curs.execute("create table copy_segf (id int)")
try:
curs.execute("copy copy_segf to stdout")
except psycopg2.ProgrammingError:
pass
conn.close()
""" % { 'dsn': dsn,})
proc = Popen([sys.executable, '-c', script_to_py3(script)], stdout=PIPE)
proc.communicate()
self.assertEqual(0, proc.returncode)
decorate_all_tests(CopyTests, skip_copy_if_green)

View File

@ -27,7 +27,7 @@ import psycopg2
import psycopg2.extensions
from psycopg2.extensions import b
from testutils import unittest, ConnectingTestCase, skip_before_postgres
from testutils import skip_if_no_namedtuple
from testutils import skip_if_no_namedtuple, skip_if_no_getrefcount
class CursorTests(ConnectingTestCase):
@ -97,6 +97,7 @@ class CursorTests(ConnectingTestCase):
self.assertEqual(b('SELECT 10.3;'),
cur.mogrify("SELECT %s;", (Decimal("10.3"),)))
@skip_if_no_getrefcount
def test_mogrify_leak_on_multiple_reference(self):
# issue #81: reference leak when a parameter value is referenced
# more than once from a dict.
@ -157,6 +158,7 @@ class CursorTests(ConnectingTestCase):
curs = self.conn.cursor()
w = ref(curs)
del curs
import gc; gc.collect()
self.assert_(w() is None)
def test_null_name(self):
@ -174,10 +176,7 @@ class CursorTests(ConnectingTestCase):
curs.execute("select data from invname order by data")
self.assertEqual(curs.fetchall(), [(10,), (20,), (30,)])
def test_withhold(self):
self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor,
withhold=True)
def _create_withhold_table(self):
curs = self.conn.cursor()
try:
curs.execute("drop table withhold")
@ -188,6 +187,11 @@ class CursorTests(ConnectingTestCase):
curs.execute("insert into withhold values (%s)", (i,))
curs.close()
def test_withhold(self):
self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor,
withhold=True)
self._create_withhold_table()
curs = self.conn.cursor("W")
self.assertEqual(curs.withhold, False);
curs.withhold = True
@ -207,6 +211,52 @@ class CursorTests(ConnectingTestCase):
curs.execute("drop table withhold")
self.conn.commit()
def test_withhold_no_begin(self):
self._create_withhold_table()
curs = self.conn.cursor("w", withhold=True)
curs.execute("select data from withhold order by data")
self.assertEqual(curs.fetchone(), (10,))
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_BEGIN)
self.assertEqual(self.conn.get_transaction_status(),
psycopg2.extensions.TRANSACTION_STATUS_INTRANS)
self.conn.commit()
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
self.assertEqual(self.conn.get_transaction_status(),
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
self.assertEqual(curs.fetchone(), (20,))
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
self.assertEqual(self.conn.get_transaction_status(),
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
curs.close()
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
self.assertEqual(self.conn.get_transaction_status(),
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
def test_withhold_autocommit(self):
self._create_withhold_table()
self.conn.commit()
self.conn.autocommit = True
curs = self.conn.cursor("w", withhold=True)
curs.execute("select data from withhold order by data")
self.assertEqual(curs.fetchone(), (10,))
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
self.assertEqual(self.conn.get_transaction_status(),
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
self.conn.commit()
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
self.assertEqual(self.conn.get_transaction_status(),
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
curs.close()
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
self.assertEqual(self.conn.get_transaction_status(),
psycopg2.extensions.TRANSACTION_STATUS_IDLE)
def test_scrollable(self):
self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor,
scrollable=True)
@ -400,7 +450,7 @@ class CursorTests(ConnectingTestCase):
@skip_before_postgres(8, 0)
def test_scroll_named(self):
cur = self.conn.cursor()
cur = self.conn.cursor('tmp', scrollable=True)
cur.execute("select generate_series(0,9)")
cur.scroll(2)
self.assertEqual(cur.fetchone(), (2,))
@ -410,8 +460,24 @@ class CursorTests(ConnectingTestCase):
self.assertEqual(cur.fetchone(), (8,))
cur.scroll(9, mode='absolute')
self.assertEqual(cur.fetchone(), (9,))
self.assertRaises((IndexError, psycopg2.ProgrammingError),
cur.scroll, 10, mode='absolute')
def test_bad_subclass(self):
# check that we get an error message instead of a segfault
# for badly written subclasses.
# see http://stackoverflow.com/questions/22019341/
class StupidCursor(psycopg2.extensions.cursor):
def __init__(self, *args, **kwargs):
# I am stupid so not calling superclass init
pass
cur = StupidCursor()
self.assertRaises(psycopg2.InterfaceError, cur.execute, 'select 1')
self.assertRaises(psycopg2.InterfaceError, cur.executemany,
'select 1', [])
def test_callproc_badparam(self):
cur = self.conn.cursor()
self.assertRaises(TypeError, cur.callproc, 'lower', 42)
def test_suite():

View File

@ -213,6 +213,14 @@ class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
self.assertEqual(value.seconds, 41103)
self.assertEqual(value.microseconds, 876544)
def test_parse_infinity(self):
value = self.DATETIME('-infinity', self.curs)
self.assertEqual(str(value), '0001-01-01 00:00:00')
value = self.DATETIME('infinity', self.curs)
self.assertEqual(str(value), '9999-12-31 23:59:59.999999')
value = self.DATE('infinity', self.curs)
self.assertEqual(str(value), '9999-12-31')
def test_adapt_date(self):
from datetime import date
value = self.execute('select (%s)::date::text',
@ -240,7 +248,7 @@ class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
self.assertEqual(seconds, 3674096)
self.assertEqual(int(round((value - seconds) * 1000000)), 123456)
def test_adapt_megative_timedelta(self):
def test_adapt_negative_timedelta(self):
from datetime import timedelta
value = self.execute('select extract(epoch from (%s)::interval)',
[timedelta(days=-42, seconds=45296,
@ -428,7 +436,7 @@ class mxDateTimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
self.assertEqual(seconds, 3674096)
self.assertEqual(int(round((value - seconds) * 1000000)), 123456)
def test_adapt_megative_timedelta(self):
def test_adapt_negative_timedelta(self):
from mx.DateTime import DateTimeDeltaFrom
value = self.execute('select extract(epoch from (%s)::interval)',
[DateTimeDeltaFrom(days=-42,

View File

@ -77,6 +77,10 @@ class LargeObjectTests(LargeObjectTestCase):
self.assertNotEqual(lo, None)
self.assertEqual(lo.mode[0], "w")
def test_connection_needed(self):
self.assertRaises(TypeError,
psycopg2.extensions.lobject, [])
def test_open_non_existent(self):
# By creating then removing a large object, we get an Oid that
# should be unused.
@ -126,6 +130,7 @@ class LargeObjectTests(LargeObjectTestCase):
self.assertRaises(psycopg2.OperationalError,
self.conn.lobject, 0, "w", lo.oid)
self.assert_(not self.conn.closed)
def test_import(self):
self.tmpdir = tempfile.mkdtemp()
@ -369,6 +374,12 @@ class LargeObjectTests(LargeObjectTestCase):
finally:
self.conn.tpc_commit()
def test_large_oid(self):
# Test we don't overflow with an oid not fitting a signed int
try:
self.conn.lobject(0xFFFFFFFE)
except psycopg2.OperationalError:
pass
decorate_all_tests(LargeObjectTests, skip_if_no_lo, skip_lo_if_green)

View File

@ -22,8 +22,12 @@
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import os
import sys
from subprocess import Popen
from testutils import unittest, skip_before_python, skip_before_postgres
from testutils import ConnectingTestCase, skip_copy_if_green
from testutils import ConnectingTestCase, skip_copy_if_green, script_to_py3
import psycopg2
@ -199,7 +203,7 @@ class ExceptionsTestCase(ConnectingTestCase):
self.assertEqual(diag.sqlstate, '42P01')
del diag
gc.collect()
gc.collect(); gc.collect()
assert(w() is None)
@skip_copy_if_green
@ -279,6 +283,42 @@ class ExceptionsTestCase(ConnectingTestCase):
self.assertEqual(e.pgcode, e1.pgcode)
self.assert_(e1.cursor is None)
@skip_before_python(2, 5)
def test_pickle_connection_error(self):
# segfaults on psycopg 2.5.1 - see ticket #170
import pickle
try:
psycopg2.connect('dbname=nosuchdatabasemate')
except psycopg2.Error, exc:
e = exc
e1 = pickle.loads(pickle.dumps(e))
self.assertEqual(e.pgerror, e1.pgerror)
self.assertEqual(e.pgcode, e1.pgcode)
self.assert_(e1.cursor is None)
class TestExtensionModule(unittest.TestCase):
def test_import_internal(self):
# check that the internal package can be imported "naked"
# we may break this property if there is a compelling reason to do so,
# however having it allows for some import juggling such as the one
# required in ticket #201.
pkgdir = os.path.dirname(psycopg2.__file__)
pardir = os.path.dirname(pkgdir)
self.assert_(pardir in sys.path)
script = ("""
import sys
sys.path.remove(%r)
sys.path.insert(0, %r)
import _psycopg
""" % (pardir, pkgdir))
proc = Popen([sys.executable, '-c', script_to_py3(script)])
proc.communicate()
self.assertEqual(0, proc.returncode)
def test_suite():
return unittest.TestLoader().loadTestsFromName(__name__)

View File

@ -13,6 +13,7 @@
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
from __future__ import with_statement
import re
import sys
@ -22,9 +23,11 @@ from functools import wraps
from testutils import unittest, skip_if_no_uuid, skip_before_postgres
from testutils import ConnectingTestCase, decorate_all_tests
from testutils import py3_raises_typeerror
import psycopg2
import psycopg2.extras
import psycopg2.extensions as ext
from psycopg2.extensions import b
@ -109,9 +112,9 @@ class TypesExtrasTests(ConnectingTestCase):
def test_adapt_fail(self):
class Foo(object): pass
self.assertRaises(psycopg2.ProgrammingError,
psycopg2.extensions.adapt, Foo(), psycopg2.extensions.ISQLQuote, None)
psycopg2.extensions.adapt, Foo(), ext.ISQLQuote, None)
try:
psycopg2.extensions.adapt(Foo(), psycopg2.extensions.ISQLQuote, None)
psycopg2.extensions.adapt(Foo(), ext.ISQLQuote, None)
except psycopg2.ProgrammingError, err:
self.failUnless(str(err) == "can't adapt type 'Foo'")
@ -458,7 +461,6 @@ class AdaptTypeTestCase(ConnectingTestCase):
def test_none_fast_path(self):
# the None adapter is not actually invoked in regular adaptation
ext = psycopg2.extensions
class WonkyAdapter(object):
def __init__(self, obj): pass
@ -921,7 +923,7 @@ class JsonTestCase(ConnectingTestCase):
self.assertEqual(curs.mogrify("%s", (obj,)),
b("""'{"a": 123}'"""))
finally:
del psycopg2.extensions.adapters[dict, psycopg2.extensions.ISQLQuote]
del psycopg2.extensions.adapters[dict, ext.ISQLQuote]
def test_type_not_available(self):
@ -1056,6 +1058,97 @@ class JsonTestCase(ConnectingTestCase):
self.assertEqual(data['b'], None)
def skip_if_no_jsonb_type(f):
return skip_before_postgres(9, 4)(f)
class JsonbTestCase(ConnectingTestCase):
@staticmethod
def myloads(s):
import json
rv = json.loads(s)
rv['test'] = 1
return rv
def test_default_cast(self):
curs = self.conn.cursor()
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None})
curs.execute("""select array['{"a": 100.0, "b": null}']::jsonb[]""")
self.assertEqual(curs.fetchone()[0], [{'a': 100.0, 'b': None}])
def test_register_on_connection(self):
psycopg2.extras.register_json(self.conn, loads=self.myloads, name='jsonb')
curs = self.conn.cursor()
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None, 'test': 1})
def test_register_on_cursor(self):
curs = self.conn.cursor()
psycopg2.extras.register_json(curs, loads=self.myloads, name='jsonb')
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None, 'test': 1})
def test_register_globally(self):
old = psycopg2.extensions.string_types.get(3802)
olda = psycopg2.extensions.string_types.get(3807)
try:
new, newa = psycopg2.extras.register_json(self.conn,
loads=self.myloads, globally=True, name='jsonb')
curs = self.conn.cursor()
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None, 'test': 1})
finally:
psycopg2.extensions.string_types.pop(new.values[0])
psycopg2.extensions.string_types.pop(newa.values[0])
if old:
psycopg2.extensions.register_type(old)
if olda:
psycopg2.extensions.register_type(olda)
def test_loads(self):
json = psycopg2.extras.json
loads = lambda x: json.loads(x, parse_float=Decimal)
psycopg2.extras.register_json(self.conn, loads=loads, name='jsonb')
curs = self.conn.cursor()
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
data = curs.fetchone()[0]
self.assert_(isinstance(data['a'], Decimal))
self.assertEqual(data['a'], Decimal('100.0'))
# sure we are not manling json too?
curs.execute("""select '{"a": 100.0, "b": null}'::json""")
data = curs.fetchone()[0]
self.assert_(isinstance(data['a'], float))
self.assertEqual(data['a'], 100.0)
def test_register_default(self):
curs = self.conn.cursor()
loads = lambda x: psycopg2.extras.json.loads(x, parse_float=Decimal)
psycopg2.extras.register_default_jsonb(curs, loads=loads)
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
data = curs.fetchone()[0]
self.assert_(isinstance(data['a'], Decimal))
self.assertEqual(data['a'], Decimal('100.0'))
curs.execute("""select array['{"a": 100.0, "b": null}']::jsonb[]""")
data = curs.fetchone()[0]
self.assert_(isinstance(data[0]['a'], Decimal))
self.assertEqual(data[0]['a'], Decimal('100.0'))
def test_null(self):
curs = self.conn.cursor()
curs.execute("""select NULL::jsonb""")
self.assertEqual(curs.fetchone()[0], None)
curs.execute("""select NULL::jsonb[]""")
self.assertEqual(curs.fetchone()[0], None)
decorate_all_tests(JsonbTestCase, skip_if_no_json_module)
decorate_all_tests(JsonbTestCase, skip_if_no_jsonb_type)
class RangeTestCase(unittest.TestCase):
def test_noparam(self):
from psycopg2.extras import Range
@ -1212,12 +1305,86 @@ class RangeTestCase(unittest.TestCase):
assert_not_equal(Range(10, 20), Range(11, 20))
assert_not_equal(Range(10, 20, '[)'), Range(10, 20, '[]'))
def test_not_ordered(self):
def test_eq_wrong_type(self):
from psycopg2.extras import Range
self.assertRaises(TypeError, lambda: Range(empty=True) < Range(0,4))
self.assertRaises(TypeError, lambda: Range(1,2) > Range(0,4))
self.assertRaises(TypeError, lambda: Range(1,2) <= Range())
self.assertRaises(TypeError, lambda: Range(1,2) >= Range())
self.assertNotEqual(Range(10, 20), ())
def test_eq_subclass(self):
from psycopg2.extras import Range, NumericRange
class IntRange(NumericRange): pass
class PositiveIntRange(IntRange): pass
self.assertEqual(Range(10, 20), IntRange(10, 20))
self.assertEqual(PositiveIntRange(10, 20), IntRange(10, 20))
# as the postgres docs describe for the server-side stuff,
# ordering is rather arbitrary, but will remain stable
# and consistent.
def test_lt_ordering(self):
from psycopg2.extras import Range
self.assert_(Range(empty=True) < Range(0, 4))
self.assert_(not Range(1, 2) < Range(0, 4))
self.assert_(Range(0, 4) < Range(1, 2))
self.assert_(not Range(1, 2) < Range())
self.assert_(Range() < Range(1, 2))
self.assert_(not Range(1) < Range(upper=1))
self.assert_(not Range() < Range())
self.assert_(not Range(empty=True) < Range(empty=True))
self.assert_(not Range(1, 2) < Range(1, 2))
with py3_raises_typeerror():
self.assert_(1 < Range(1, 2))
with py3_raises_typeerror():
self.assert_(not Range(1, 2) < 1)
def test_gt_ordering(self):
from psycopg2.extras import Range
self.assert_(not Range(empty=True) > Range(0, 4))
self.assert_(Range(1, 2) > Range(0, 4))
self.assert_(not Range(0, 4) > Range(1, 2))
self.assert_(Range(1, 2) > Range())
self.assert_(not Range() > Range(1, 2))
self.assert_(Range(1) > Range(upper=1))
self.assert_(not Range() > Range())
self.assert_(not Range(empty=True) > Range(empty=True))
self.assert_(not Range(1, 2) > Range(1, 2))
with py3_raises_typeerror():
self.assert_(not 1 > Range(1, 2))
with py3_raises_typeerror():
self.assert_(Range(1, 2) > 1)
def test_le_ordering(self):
from psycopg2.extras import Range
self.assert_(Range(empty=True) <= Range(0, 4))
self.assert_(not Range(1, 2) <= Range(0, 4))
self.assert_(Range(0, 4) <= Range(1, 2))
self.assert_(not Range(1, 2) <= Range())
self.assert_(Range() <= Range(1, 2))
self.assert_(not Range(1) <= Range(upper=1))
self.assert_(Range() <= Range())
self.assert_(Range(empty=True) <= Range(empty=True))
self.assert_(Range(1, 2) <= Range(1, 2))
with py3_raises_typeerror():
self.assert_(1 <= Range(1, 2))
with py3_raises_typeerror():
self.assert_(not Range(1, 2) <= 1)
def test_ge_ordering(self):
from psycopg2.extras import Range
self.assert_(not Range(empty=True) >= Range(0, 4))
self.assert_(Range(1, 2) >= Range(0, 4))
self.assert_(not Range(0, 4) >= Range(1, 2))
self.assert_(Range(1, 2) >= Range())
self.assert_(not Range() >= Range(1, 2))
self.assert_(Range(1) >= Range(upper=1))
self.assert_(Range() >= Range())
self.assert_(Range(empty=True) >= Range(empty=True))
self.assert_(Range(1, 2) >= Range(1, 2))
with py3_raises_typeerror():
self.assert_(not 1 >= Range(1, 2))
with py3_raises_typeerror():
self.assert_(Range(1, 2) >= 1)
def skip_if_no_range(f):
@ -1454,6 +1621,9 @@ class RangeCasterTestCase(ConnectingTestCase):
self.assert_(not r1.lower_inc)
self.assert_(r1.upper_inc)
# clear the adapters to allow precise count by scripts/refcounter.py
del ext.adapters[rc.range, ext.ISQLQuote]
def test_range_escaping(self):
from psycopg2.extras import register_range
cur = self.conn.cursor()
@ -1505,6 +1675,9 @@ class RangeCasterTestCase(ConnectingTestCase):
self.assertEqual(ranges[i].lower_inf, r.lower_inf)
self.assertEqual(ranges[i].upper_inf, r.upper_inf)
# clear the adapters to allow precise count by scripts/refcounter.py
del ext.adapters[TextRange, ext.ISQLQuote]
def test_range_not_found(self):
from psycopg2.extras import register_range
cur = self.conn.cursor()
@ -1538,6 +1711,10 @@ class RangeCasterTestCase(ConnectingTestCase):
register_range, 'rs.r1', 'FailRange', cur)
cur.execute("rollback to savepoint x;")
# clear the adapters to allow precise count by scripts/refcounter.py
for r in [ra1, ra2, rars2, rars3]:
del ext.adapters[r.range, ext.ISQLQuote]
decorate_all_tests(RangeCasterTestCase, skip_if_no_range)

View File

@ -25,6 +25,7 @@
# Use unittest2 if available. Otherwise mock a skip facility with warnings.
import os
import platform
import sys
from functools import wraps
from testconfig import dsn
@ -293,6 +294,26 @@ def skip_if_green(reason):
skip_copy_if_green = skip_if_green("copy in async mode currently not supported")
def skip_if_no_getrefcount(f):
@wraps(f)
def skip_if_no_getrefcount_(self):
if not hasattr(sys, 'getrefcount'):
return self.skipTest('skipped, no sys.getrefcount()')
else:
return f(self)
return skip_if_no_getrefcount_
def skip_if_windows(f):
"""Skip a test if run on windows"""
@wraps(f)
def skip_if_windows_(self):
if platform.system() == 'Windows':
return self.skipTest("Not supported on Windows")
else:
return f(self)
return skip_if_windows_
def script_to_py3(script):
"""Convert a script to Python3 syntax if required."""
if sys.version_info[0] < 3:
@ -320,3 +341,13 @@ def script_to_py3(script):
f2.close()
os.remove(filename)
class py3_raises_typeerror(object):
def __enter__(self):
pass
def __exit__(self, type, exc, tb):
if sys.version_info[0] >= 3:
assert type is TypeError
return True