mirror of
https://github.com/psycopg/psycopg2.git
synced 2024-11-27 19:33:44 +03:00
Merge branch 'devel'
This commit is contained in:
commit
0bc49147d1
15
NEWS
15
NEWS
|
@ -1,3 +1,18 @@
|
||||||
|
What's new in psycopg 2.4.4
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
- 'register_composite()' also works with the types implicitly defined
|
||||||
|
after a table row, not only with the ones created by 'CREATE TYPE'.
|
||||||
|
- Values for the isolation level symbolic constants restored to what
|
||||||
|
they were before release 2.4.2 to avoid breaking apps using the
|
||||||
|
values instead of the constants.
|
||||||
|
- Named DictCursor/RealDictCursor honour itersize (ticket #80).
|
||||||
|
- Fixed rollback on error on Zope (ticket #73).
|
||||||
|
- Raise 'DatabaseError' instead of 'Error' with empty libpq errors,
|
||||||
|
consistently with other disconnection-related errors: regression
|
||||||
|
introduced in release 2.4.1 (ticket #82).
|
||||||
|
|
||||||
|
|
||||||
What's new in psycopg 2.4.3
|
What's new in psycopg 2.4.3
|
||||||
---------------------------
|
---------------------------
|
||||||
|
|
||||||
|
|
2
README
2
README
|
@ -8,7 +8,7 @@ and stable as a rock.
|
||||||
psycopg2 is different from the other database adapter because it was
|
psycopg2 is different from the other database adapter because it was
|
||||||
designed for heavily multi-threaded applications that create and destroy
|
designed for heavily multi-threaded applications that create and destroy
|
||||||
lots of cursors and make a conspicuous number of concurrent INSERTs or
|
lots of cursors and make a conspicuous number of concurrent INSERTs or
|
||||||
UPDATEs. psycopg2 also provide full asycronous operations and support
|
UPDATEs. psycopg2 also provides full asynchronous operations and support
|
||||||
for coroutine libraries.
|
for coroutine libraries.
|
||||||
|
|
||||||
psycopg2 can compile and run on Linux, FreeBSD, Solaris, MacOS X and
|
psycopg2 can compile and run on Linux, FreeBSD, Solaris, MacOS X and
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
# their work without bothering about the module dependencies.
|
# their work without bothering about the module dependencies.
|
||||||
|
|
||||||
|
|
||||||
ALLOWED_PSYCOPG_VERSIONS = ('2.4-beta1', '2.4-beta2', '2.4', '2.4.1', '2.4.2', '2.4.3')
|
ALLOWED_PSYCOPG_VERSIONS = ('2.4-beta1', '2.4-beta2', '2.4', '2.4.1', '2.4.2', '2.4.3', '2.4.4')
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
|
@ -47,12 +47,15 @@ class DB(TM, dbi_db.DB):
|
||||||
self.calls = 0
|
self.calls = 0
|
||||||
self.make_mappings()
|
self.make_mappings()
|
||||||
|
|
||||||
def getconn(self, create=True):
|
def getconn(self, init=True):
|
||||||
|
# if init is False we are trying to get hold on an already existing
|
||||||
|
# connection, so we avoid to (re)initialize it risking errors.
|
||||||
conn = pool.getconn(self.dsn)
|
conn = pool.getconn(self.dsn)
|
||||||
conn.set_isolation_level(int(self.tilevel))
|
if init:
|
||||||
conn.set_client_encoding(self.encoding)
|
conn.set_isolation_level(int(self.tilevel))
|
||||||
for tc in self.typecasts:
|
conn.set_client_encoding(self.encoding)
|
||||||
register_type(tc, conn)
|
for tc in self.typecasts:
|
||||||
|
register_type(tc, conn)
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
def putconn(self, close=False):
|
def putconn(self, close=False):
|
||||||
|
|
|
@ -50,7 +50,7 @@ An example of the available constants defined in the module:
|
||||||
'42P01'
|
'42P01'
|
||||||
|
|
||||||
Constants representing all the error values documented by PostgreSQL versions
|
Constants representing all the error values documented by PostgreSQL versions
|
||||||
between 8.1 and 9.0 are included in the module.
|
between 8.1 and 9.1 are included in the module.
|
||||||
|
|
||||||
|
|
||||||
.. autofunction:: lookup(code)
|
.. autofunction:: lookup(code)
|
||||||
|
|
|
@ -128,6 +128,8 @@ Additional data types
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
|
|
||||||
|
.. _adapt-hstore:
|
||||||
|
|
||||||
.. index::
|
.. index::
|
||||||
pair: hstore; Data types
|
pair: hstore; Data types
|
||||||
pair: dict; Adaptation
|
pair: dict; Adaptation
|
||||||
|
@ -157,6 +159,8 @@ can be enabled using the `register_hstore()` function.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
.. _adapt-composite:
|
||||||
|
|
||||||
.. index::
|
.. index::
|
||||||
pair: Composite types; Data types
|
pair: Composite types; Data types
|
||||||
pair: tuple; Adaptation
|
pair: tuple; Adaptation
|
||||||
|
@ -168,8 +172,9 @@ Composite types casting
|
||||||
.. versionadded:: 2.4
|
.. versionadded:: 2.4
|
||||||
|
|
||||||
Using `register_composite()` it is possible to cast a PostgreSQL composite
|
Using `register_composite()` it is possible to cast a PostgreSQL composite
|
||||||
type (e.g. created with |CREATE TYPE|_ command) into a Python named tuple, or
|
type (either created with the |CREATE TYPE|_ command or implicitly defined
|
||||||
into a regular tuple if :py:func:`collections.namedtuple` is not found.
|
after a table row type) into a Python named tuple, or into a regular tuple if
|
||||||
|
:py:func:`collections.namedtuple` is not found.
|
||||||
|
|
||||||
.. |CREATE TYPE| replace:: :sql:`CREATE TYPE`
|
.. |CREATE TYPE| replace:: :sql:`CREATE TYPE`
|
||||||
.. _CREATE TYPE: http://www.postgresql.org/docs/9.0/static/sql-createtype.html
|
.. _CREATE TYPE: http://www.postgresql.org/docs/9.0/static/sql-createtype.html
|
||||||
|
|
|
@ -4,20 +4,31 @@ Psycopg -- PostgreSQL database adapter for Python
|
||||||
|
|
||||||
.. sectionauthor:: Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
.. sectionauthor:: Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||||
|
|
||||||
Psycopg is a PostgreSQL_ database adapter for the Python_ programming
|
Psycopg_ is a PostgreSQL_ database adapter for the Python_ programming
|
||||||
language. Its main advantages are that it supports the full Python |DBAPI|_
|
language. Its main features are that it supports the full Python |DBAPI|_
|
||||||
and it is thread safe (threads can share the connections). It was designed for
|
and it is thread safe (threads can share the connections). It was designed for
|
||||||
heavily multi-threaded applications that create and destroy lots of cursors and
|
heavily multi-threaded applications that create and destroy lots of cursors and
|
||||||
make a conspicuous number of concurrent :sql:`INSERT`\ s or :sql:`UPDATE`\ s.
|
make a large number of concurrent :sql:`INSERT`\ s or :sql:`UPDATE`\ s.
|
||||||
The psycopg distribution includes ZPsycopgDA, a Zope_ Database Adapter.
|
The Psycopg distribution includes ZPsycopgDA, a Zope_ Database Adapter.
|
||||||
|
|
||||||
Psycopg 2 is an almost complete rewrite of the Psycopg 1.1.x branch. Psycopg 2
|
Psycopg 2 is mostly implemented in C as a libpq_ wrapper, resulting in being
|
||||||
features complete libpq_ v3 protocol, |COPY-TO-FROM|__ and full :ref:`object
|
both efficient and secure. It features client-side and :ref:`server-side
|
||||||
adaptation <python-types-adaptation>` for all basic Python types: strings (including unicode), ints,
|
<server-side-cursors>` cursors, :ref:`asynchronous communication
|
||||||
longs, floats, buffers (binary objects), booleans, `mx.DateTime`_ and builtin
|
<async-support>` and :ref:`notifications <async-notify>`, |COPY-TO-FROM|__
|
||||||
datetime types. It also supports unicode queries and Python lists mapped to
|
support, and a flexible :ref:`objects adaptation system
|
||||||
PostgreSQL arrays.
|
<python-types-adaptation>`. Many basic Python types are supported
|
||||||
|
out-of-the-box and mapped to matching PostgreSQL data types, such as strings
|
||||||
|
(both bytes and Unicode), numbers (ints, longs, floats, decimals), booleans and
|
||||||
|
datetime objects (both built-in and `mx.DateTime`_), several types of
|
||||||
|
:ref:`binary objects <adapt-binary>`. Also available are mappings between lists
|
||||||
|
and PostgreSQL arrays of any supported type, between :ref:`dictionaries and
|
||||||
|
PostgreSQL hstores <adapt-hstore>`, and between :ref:`tuples/namedtuples and
|
||||||
|
PostgreSQL composite types <adapt-composite>`.
|
||||||
|
|
||||||
|
Psycopg 2 is both Unicode and Python 3 friendly.
|
||||||
|
|
||||||
|
|
||||||
|
.. _Psycopg: http://initd.org/psycopg/
|
||||||
.. _PostgreSQL: http://www.postgresql.org/
|
.. _PostgreSQL: http://www.postgresql.org/
|
||||||
.. _Python: http://www.python.org/
|
.. _Python: http://www.python.org/
|
||||||
.. _Zope: http://www.zope.org/
|
.. _Zope: http://www.zope.org/
|
||||||
|
|
|
@ -256,11 +256,11 @@ the SQL string that would be sent to the database.
|
||||||
single: memoryview; Adaptation
|
single: memoryview; Adaptation
|
||||||
single: Binary string
|
single: Binary string
|
||||||
|
|
||||||
- Binary types: Python types representing binary objects are converted in
|
- Binary types: Python types representing binary objects are converted into
|
||||||
PostgreSQL binary string syntax, suitable for :sql:`bytea` fields. Such
|
PostgreSQL binary string syntax, suitable for :sql:`bytea` fields. Such
|
||||||
types are `buffer` (only available in Python 2), `memoryview` (available
|
types are `buffer` (only available in Python 2), `memoryview` (available
|
||||||
from Python 2.7), `bytearray` (available from Python 2.6) and `bytes`
|
from Python 2.7), `bytearray` (available from Python 2.6) and `bytes`
|
||||||
(only form Python 3: the name is available from Python 2.6 but it's only an
|
(only from Python 3: the name is available from Python 2.6 but it's only an
|
||||||
alias for the type `!str`). Any object implementing the `Revised Buffer
|
alias for the type `!str`). Any object implementing the `Revised Buffer
|
||||||
Protocol`__ should be usable as binary type where the protocol is supported
|
Protocol`__ should be usable as binary type where the protocol is supported
|
||||||
(i.e. from Python 2.6). Received data is returned as `!buffer` (in Python 2)
|
(i.e. from Python 2.6). Received data is returned as `!buffer` (in Python 2)
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
DSN = 'dbname=test'
|
DSN = 'dbname=test'
|
||||||
|
|
||||||
## don't modify anything below tis line (except for experimenting)
|
## don't modify anything below this line (except for experimenting)
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
@ -79,7 +79,7 @@ for row in curs.fetchall():
|
||||||
print "done"
|
print "done"
|
||||||
print " python type of image data is", type(row[0])
|
print " python type of image data is", type(row[0])
|
||||||
|
|
||||||
# this rollback is requires because we can't drop a table with a binary cusor
|
# this rollback is required because we can't drop a table with a binary cusor
|
||||||
# declared and still open
|
# declared and still open
|
||||||
conn.rollback()
|
conn.rollback()
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
|
|
||||||
DSN = 'dbname=test'
|
DSN = 'dbname=test'
|
||||||
|
|
||||||
## don't modify anything below tis line (except for experimenting)
|
## don't modify anything below this line (except for experimenting)
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
@ -165,7 +165,7 @@ try:
|
||||||
curs.copy_from(data, 'test_copy')
|
curs.copy_from(data, 'test_copy')
|
||||||
except StandardError, err:
|
except StandardError, err:
|
||||||
conn.rollback()
|
conn.rollback()
|
||||||
print " Catched error (as expected):\n", err
|
print " Caught error (as expected):\n", err
|
||||||
|
|
||||||
conn.rollback()
|
conn.rollback()
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
|
|
||||||
DSN = 'dbname=test'
|
DSN = 'dbname=test'
|
||||||
|
|
||||||
## don't modify anything below tis line (except for experimenting)
|
## don't modify anything below this line (except for experimenting)
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
|
@ -58,6 +58,6 @@ print "Result of fetchone():", curs.fetchone()
|
||||||
try:
|
try:
|
||||||
curs.fetchone()
|
curs.fetchone()
|
||||||
except NoDataError, err:
|
except NoDataError, err:
|
||||||
print "Exception caugth:", err
|
print "Exception caught:", err
|
||||||
|
|
||||||
conn.rollback()
|
conn.rollback()
|
||||||
|
|
|
@ -104,10 +104,10 @@ print adapt(Order()).generateInsert()
|
||||||
- Discussion
|
- Discussion
|
||||||
|
|
||||||
Psycopg 2 has a great new feature: adaptation. The big thing about
|
Psycopg 2 has a great new feature: adaptation. The big thing about
|
||||||
adaptation is that it enable the programmer to glue most of the
|
adaptation is that it enables the programmer to glue most of the
|
||||||
code out there without many difficulties.
|
code out there without many difficulties.
|
||||||
|
|
||||||
This recipe tries to focus the attention on a way to generate SQL queries to
|
This recipe tries to focus attention on a way to generate SQL queries to
|
||||||
insert completely new objects inside a database. As you can see objects do
|
insert completely new objects inside a database. As you can see objects do
|
||||||
not know anything about the code that is handling them. We specify all the
|
not know anything about the code that is handling them. We specify all the
|
||||||
fields that we need for each object through the persistent_fields dict.
|
fields that we need for each object through the persistent_fields dict.
|
||||||
|
@ -116,7 +116,7 @@ The most important lines of this recipe are:
|
||||||
register_adapter(Album, ObjectMapper)
|
register_adapter(Album, ObjectMapper)
|
||||||
register_adapter(Order, ObjectMapper)
|
register_adapter(Order, ObjectMapper)
|
||||||
|
|
||||||
In these line we notify the system that when we call adapt with an Album instance
|
In these lines we notify the system that when we call adapt with an Album instance
|
||||||
as an argument we want it to istantiate ObjectMapper passing the Album instance
|
as an argument we want it to istantiate ObjectMapper passing the Album instance
|
||||||
as argument (self.orig in the ObjectMapper class).
|
as argument (self.orig in the ObjectMapper class).
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
DSN = 'dbname=test'
|
DSN = 'dbname=test'
|
||||||
|
|
||||||
## don't modify anything below tis line (except for experimenting)
|
## don't modify anything below this line (except for experimenting)
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
@ -73,7 +73,7 @@ print "Extracting values inserted with mx.DateTime wrappers:"
|
||||||
curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 1")
|
curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 1")
|
||||||
for n, x in zip(mx1[1:], curs.fetchone()):
|
for n, x in zip(mx1[1:], curs.fetchone()):
|
||||||
try:
|
try:
|
||||||
# this will work only is psycopg has been compiled with datetime
|
# this will work only if psycopg has been compiled with datetime
|
||||||
# as the default typecaster for date/time values
|
# as the default typecaster for date/time values
|
||||||
s = repr(n) + "\n -> " + str(adapt(n)) + \
|
s = repr(n) + "\n -> " + str(adapt(n)) + \
|
||||||
"\n -> " + repr(x) + "\n -> " + x.isoformat()
|
"\n -> " + repr(x) + "\n -> " + x.isoformat()
|
||||||
|
@ -87,7 +87,7 @@ print "Extracting values inserted with Python datetime wrappers:"
|
||||||
curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 2")
|
curs.execute("SELECT d, t, dt, z FROM test_dt WHERE k = 2")
|
||||||
for n, x in zip(dt1[1:], curs.fetchone()):
|
for n, x in zip(dt1[1:], curs.fetchone()):
|
||||||
try:
|
try:
|
||||||
# this will work only is psycopg has been compiled with datetime
|
# this will work only if psycopg has been compiled with datetime
|
||||||
# as the default typecaster for date/time values
|
# as the default typecaster for date/time values
|
||||||
s = repr(n) + "\n -> " + repr(x) + "\n -> " + x.isoformat()
|
s = repr(n) + "\n -> " + repr(x) + "\n -> " + x.isoformat()
|
||||||
except:
|
except:
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
DSN = 'dbname=test'
|
DSN = 'dbname=test'
|
||||||
|
|
||||||
## don't modify anything below tis line (except for experimenting)
|
## don't modify anything below this line (except for experimenting)
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
@ -52,7 +52,7 @@ conn.commit()
|
||||||
# does some nice tricks with the transaction and postgres cursors
|
# does some nice tricks with the transaction and postgres cursors
|
||||||
# (remember to always commit or rollback before a DECLARE)
|
# (remember to always commit or rollback before a DECLARE)
|
||||||
#
|
#
|
||||||
# we don't need to DECLARE ourselves, psycopg now support named
|
# we don't need to DECLARE ourselves, psycopg now supports named
|
||||||
# cursors (but we leave the code here, comments, as an example of
|
# cursors (but we leave the code here, comments, as an example of
|
||||||
# what psycopg is doing under the hood)
|
# what psycopg is doing under the hood)
|
||||||
#
|
#
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
DSN = 'dbname=test'
|
DSN = 'dbname=test'
|
||||||
|
|
||||||
## don't modify anything below tis line (except for experimenting)
|
## don't modify anything below this line (except for experimenting)
|
||||||
|
|
||||||
import sys, psycopg2
|
import sys, psycopg2
|
||||||
|
|
||||||
|
|
|
@ -16,11 +16,12 @@
|
||||||
|
|
||||||
DSN = 'dbname=test'
|
DSN = 'dbname=test'
|
||||||
|
|
||||||
## don't modify anything below tis line (except for experimenting)
|
## don't modify anything below this line (except for experimenting)
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import psycopg2
|
|
||||||
import select
|
import select
|
||||||
|
import psycopg2
|
||||||
|
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
|
||||||
|
|
||||||
if len(sys.argv) > 1:
|
if len(sys.argv) > 1:
|
||||||
DSN = sys.argv[1]
|
DSN = sys.argv[1]
|
||||||
|
@ -29,7 +30,7 @@ print "Opening connection using dns:", DSN
|
||||||
conn = psycopg2.connect(DSN)
|
conn = psycopg2.connect(DSN)
|
||||||
print "Encoding for this connection is", conn.encoding
|
print "Encoding for this connection is", conn.encoding
|
||||||
|
|
||||||
conn.set_isolation_level(0)
|
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
||||||
curs = conn.cursor()
|
curs = conn.cursor()
|
||||||
|
|
||||||
curs.execute("listen test")
|
curs.execute("listen test")
|
||||||
|
|
|
@ -29,15 +29,16 @@ SELECT_STEP = 500
|
||||||
SELECT_DIV = 250
|
SELECT_DIV = 250
|
||||||
|
|
||||||
# the available modes are:
|
# the available modes are:
|
||||||
# 0 - one connection for all insert and one for all select threads
|
# 0 - one connection for all inserts and one for all select threads
|
||||||
# 1 - connections generated using the connection pool
|
# 1 - connections generated using the connection pool
|
||||||
|
|
||||||
MODE = 1
|
MODE = 1
|
||||||
|
|
||||||
## don't modify anything below tis line (except for experimenting)
|
## don't modify anything below this line (except for experimenting)
|
||||||
|
|
||||||
import sys, psycopg2, threading
|
import sys, psycopg2, threading
|
||||||
from psycopg2.pool import ThreadedConnectionPool
|
from psycopg2.pool import ThreadedConnectionPool
|
||||||
|
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
|
||||||
|
|
||||||
if len(sys.argv) > 1:
|
if len(sys.argv) > 1:
|
||||||
DSN = sys.argv[1]
|
DSN = sys.argv[1]
|
||||||
|
@ -89,21 +90,21 @@ def insert_func(conn_or_pool, rows):
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
## a nice select function that prints the current number of rows in the
|
## a nice select function that prints the current number of rows in the
|
||||||
## database (and transefer them, putting some pressure on the network)
|
## database (and transfer them, putting some pressure on the network)
|
||||||
|
|
||||||
def select_func(conn_or_pool, z):
|
def select_func(conn_or_pool, z):
|
||||||
name = threading.currentThread().getName()
|
name = threading.currentThread().getName()
|
||||||
|
|
||||||
if MODE == 0:
|
if MODE == 0:
|
||||||
conn = conn_or_pool
|
conn = conn_or_pool
|
||||||
conn.set_isolation_level(0)
|
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
||||||
|
|
||||||
for i in range(SELECT_SIZE):
|
for i in range(SELECT_SIZE):
|
||||||
if divmod(i, SELECT_STEP)[1] == 0:
|
if divmod(i, SELECT_STEP)[1] == 0:
|
||||||
try:
|
try:
|
||||||
if MODE == 1:
|
if MODE == 1:
|
||||||
conn = conn_or_pool.getconn()
|
conn = conn_or_pool.getconn()
|
||||||
conn.set_isolation_level(0)
|
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
c.execute("SELECT * FROM test_threads WHERE value2 < %s",
|
c.execute("SELECT * FROM test_threads WHERE value2 < %s",
|
||||||
(int(i/z),))
|
(int(i/z),))
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
|
|
||||||
DSN = 'dbname=test'
|
DSN = 'dbname=test'
|
||||||
|
|
||||||
## don't modify anything below tis line (except for experimenting)
|
## don't modify anything below this line (except for experimenting)
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
|
|
@ -69,10 +69,10 @@ except ImportError:
|
||||||
|
|
||||||
"""Isolation level values."""
|
"""Isolation level values."""
|
||||||
ISOLATION_LEVEL_AUTOCOMMIT = 0
|
ISOLATION_LEVEL_AUTOCOMMIT = 0
|
||||||
ISOLATION_LEVEL_READ_UNCOMMITTED = 1
|
ISOLATION_LEVEL_READ_UNCOMMITTED = 4
|
||||||
ISOLATION_LEVEL_READ_COMMITTED = 2
|
ISOLATION_LEVEL_READ_COMMITTED = 1
|
||||||
ISOLATION_LEVEL_REPEATABLE_READ = 3
|
ISOLATION_LEVEL_REPEATABLE_READ = 2
|
||||||
ISOLATION_LEVEL_SERIALIZABLE = 4
|
ISOLATION_LEVEL_SERIALIZABLE = 3
|
||||||
|
|
||||||
"""psycopg connection status values."""
|
"""psycopg connection status values."""
|
||||||
STATUS_SETUP = 0
|
STATUS_SETUP = 0
|
||||||
|
|
|
@ -86,18 +86,28 @@ class DictCursorBase(_cursor):
|
||||||
res = _cursor.fetchall(self)
|
res = _cursor.fetchall(self)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def next(self):
|
def __iter__(self):
|
||||||
if self._prefetch:
|
if self._prefetch:
|
||||||
res = _cursor.fetchone(self)
|
res = _cursor.fetchmany(self, self.itersize)
|
||||||
if res is None:
|
if not res:
|
||||||
raise StopIteration()
|
return
|
||||||
if self._query_executed:
|
if self._query_executed:
|
||||||
self._build_index()
|
self._build_index()
|
||||||
if not self._prefetch:
|
if not self._prefetch:
|
||||||
res = _cursor.fetchone(self)
|
res = _cursor.fetchmany(self, self.itersize)
|
||||||
if res is None:
|
|
||||||
raise StopIteration()
|
for r in res:
|
||||||
return res
|
yield r
|
||||||
|
|
||||||
|
# the above was the first itersize record. the following are
|
||||||
|
# in a repeated loop.
|
||||||
|
while 1:
|
||||||
|
res = _cursor.fetchmany(self, self.itersize)
|
||||||
|
if not res:
|
||||||
|
return
|
||||||
|
for r in res:
|
||||||
|
yield r
|
||||||
|
|
||||||
|
|
||||||
class DictConnection(_connection):
|
class DictConnection(_connection):
|
||||||
"""A connection that uses `DictCursor` automatically."""
|
"""A connection that uses `DictCursor` automatically."""
|
||||||
|
@ -694,7 +704,7 @@ WHERE typname = 'hstore';
|
||||||
|
|
||||||
# revert the status of the connection as before the command
|
# revert the status of the connection as before the command
|
||||||
if (conn_status != _ext.STATUS_IN_TRANSACTION
|
if (conn_status != _ext.STATUS_IN_TRANSACTION
|
||||||
and conn.isolation_level != _ext.ISOLATION_LEVEL_AUTOCOMMIT):
|
and not conn.autocommit):
|
||||||
conn.rollback()
|
conn.rollback()
|
||||||
|
|
||||||
return tuple(rv0), tuple(rv1)
|
return tuple(rv0), tuple(rv1)
|
||||||
|
@ -831,8 +841,8 @@ class CompositeCaster(object):
|
||||||
tokens = self.tokenize(s)
|
tokens = self.tokenize(s)
|
||||||
if len(tokens) != len(self.atttypes):
|
if len(tokens) != len(self.atttypes):
|
||||||
raise psycopg2.DataError(
|
raise psycopg2.DataError(
|
||||||
"expecting %d components for the type %s, %d found instead",
|
"expecting %d components for the type %s, %d found instead" %
|
||||||
(len(self.atttypes), self.name, len(self.tokens)))
|
(len(self.atttypes), self.name, len(tokens)))
|
||||||
|
|
||||||
attrs = [ curs.cast(oid, token)
|
attrs = [ curs.cast(oid, token)
|
||||||
for oid, token in zip(self.atttypes, tokens) ]
|
for oid, token in zip(self.atttypes, tokens) ]
|
||||||
|
@ -903,7 +913,8 @@ SELECT t.oid, %s, attname, atttypid
|
||||||
FROM pg_type t
|
FROM pg_type t
|
||||||
JOIN pg_namespace ns ON typnamespace = ns.oid
|
JOIN pg_namespace ns ON typnamespace = ns.oid
|
||||||
JOIN pg_attribute a ON attrelid = typrelid
|
JOIN pg_attribute a ON attrelid = typrelid
|
||||||
WHERE typname = %%s and nspname = %%s
|
WHERE typname = %%s AND nspname = %%s
|
||||||
|
AND attnum > 0 AND NOT attisdropped
|
||||||
ORDER BY attnum;
|
ORDER BY attnum;
|
||||||
""" % typarray, (tname, schema))
|
""" % typarray, (tname, schema))
|
||||||
|
|
||||||
|
@ -911,7 +922,7 @@ ORDER BY attnum;
|
||||||
|
|
||||||
# revert the status of the connection as before the command
|
# revert the status of the connection as before the command
|
||||||
if (conn_status != _ext.STATUS_IN_TRANSACTION
|
if (conn_status != _ext.STATUS_IN_TRANSACTION
|
||||||
and conn.isolation_level != _ext.ISOLATION_LEVEL_AUTOCOMMIT):
|
and not conn.autocommit):
|
||||||
conn.rollback()
|
conn.rollback()
|
||||||
|
|
||||||
if not recs:
|
if not recs:
|
||||||
|
|
|
@ -33,14 +33,14 @@ from psycopg2.extensions import cursor as _2cursor
|
||||||
from psycopg2.extensions import connection as _2connection
|
from psycopg2.extensions import connection as _2connection
|
||||||
|
|
||||||
from psycopg2 import *
|
from psycopg2 import *
|
||||||
del connect
|
import psycopg2.extensions as _ext
|
||||||
|
_2connect = connect
|
||||||
|
|
||||||
def connect(*args, **kwargs):
|
def connect(*args, **kwargs):
|
||||||
"""connect(dsn, ...) -> new psycopg 1.1.x compatible connection object"""
|
"""connect(dsn, ...) -> new psycopg 1.1.x compatible connection object"""
|
||||||
kwargs['connection_factory'] = connection
|
kwargs['connection_factory'] = connection
|
||||||
conn = _2psycopg.connect(*args, **kwargs)
|
conn = _2connect(*args, **kwargs)
|
||||||
conn.set_isolation_level(2)
|
conn.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED)
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
class connection(_2connection):
|
class connection(_2connection):
|
||||||
|
@ -53,9 +53,9 @@ class connection(_2connection):
|
||||||
def autocommit(self, on_off=1):
|
def autocommit(self, on_off=1):
|
||||||
"""autocommit(on_off=1) -> switch autocommit on (1) or off (0)"""
|
"""autocommit(on_off=1) -> switch autocommit on (1) or off (0)"""
|
||||||
if on_off > 0:
|
if on_off > 0:
|
||||||
self.set_isolation_level(0)
|
self.set_isolation_level(_ext.ISOLATION_LEVEL_AUTOCOMMIT)
|
||||||
else:
|
else:
|
||||||
self.set_isolation_level(2)
|
self.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED)
|
||||||
|
|
||||||
|
|
||||||
class cursor(_2cursor):
|
class cursor(_2cursor):
|
||||||
|
|
|
@ -32,6 +32,13 @@
|
||||||
extern "C" {
|
extern "C" {
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
/* isolation levels */
|
||||||
|
#define ISOLATION_LEVEL_AUTOCOMMIT 0
|
||||||
|
#define ISOLATION_LEVEL_READ_UNCOMMITTED 4
|
||||||
|
#define ISOLATION_LEVEL_READ_COMMITTED 1
|
||||||
|
#define ISOLATION_LEVEL_REPEATABLE_READ 2
|
||||||
|
#define ISOLATION_LEVEL_SERIALIZABLE 3
|
||||||
|
|
||||||
/* connection status */
|
/* connection status */
|
||||||
#define CONN_STATUS_SETUP 0
|
#define CONN_STATUS_SETUP 0
|
||||||
#define CONN_STATUS_READY 1
|
#define CONN_STATUS_READY 1
|
||||||
|
|
|
@ -35,15 +35,16 @@
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
/* Mapping from isolation level name to value exposed by Python.
|
/* Mapping from isolation level name to value exposed by Python.
|
||||||
* Only used for backward compatibility by the isolation_level property */
|
*
|
||||||
|
* Note: ordering matters: to get a valid pre-PG 8 level from one not valid,
|
||||||
|
* we increase a pointer in this list by one position. */
|
||||||
const IsolationLevel conn_isolevels[] = {
|
const IsolationLevel conn_isolevels[] = {
|
||||||
{"", 0}, /* autocommit */
|
{"", ISOLATION_LEVEL_AUTOCOMMIT},
|
||||||
{"read uncommitted", 1},
|
{"read uncommitted", ISOLATION_LEVEL_READ_UNCOMMITTED},
|
||||||
{"read committed", 2},
|
{"read committed", ISOLATION_LEVEL_READ_COMMITTED},
|
||||||
{"repeatable read", 3},
|
{"repeatable read", ISOLATION_LEVEL_REPEATABLE_READ},
|
||||||
{"serializable", 4},
|
{"serializable", ISOLATION_LEVEL_SERIALIZABLE},
|
||||||
{"default", -1},
|
{"default", -1}, /* never to be found on the server */
|
||||||
{ NULL }
|
{ NULL }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1041,9 +1042,10 @@ conn_switch_isolation_level(connectionObject *self, int level)
|
||||||
|
|
||||||
/* use only supported levels on older PG versions */
|
/* use only supported levels on older PG versions */
|
||||||
if (self->server_version < 80000) {
|
if (self->server_version < 80000) {
|
||||||
if (level == 1 || level == 3) {
|
if (level == ISOLATION_LEVEL_READ_UNCOMMITTED)
|
||||||
++level;
|
level = ISOLATION_LEVEL_READ_COMMITTED;
|
||||||
}
|
else if (level == ISOLATION_LEVEL_REPEATABLE_READ)
|
||||||
|
level = ISOLATION_LEVEL_SERIALIZABLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (-1 == (curr_level = conn_get_isolation_level(self))) {
|
if (-1 == (curr_level = conn_get_isolation_level(self))) {
|
||||||
|
|
|
@ -411,7 +411,9 @@ _psyco_conn_parse_isolevel(connectionObject *self, PyObject *pyval)
|
||||||
goto exit;
|
goto exit;
|
||||||
}
|
}
|
||||||
|
|
||||||
isolevel = conn_isolevels + level;
|
isolevel = conn_isolevels;
|
||||||
|
while ((++isolevel)->value != level)
|
||||||
|
; /* continue */
|
||||||
}
|
}
|
||||||
|
|
||||||
/* parse from the string -- this includes "default" */
|
/* parse from the string -- this includes "default" */
|
||||||
|
@ -435,7 +437,8 @@ _psyco_conn_parse_isolevel(connectionObject *self, PyObject *pyval)
|
||||||
|
|
||||||
/* use only supported levels on older PG versions */
|
/* use only supported levels on older PG versions */
|
||||||
if (isolevel && self->server_version < 80000) {
|
if (isolevel && self->server_version < 80000) {
|
||||||
if (isolevel->value == 1 || isolevel->value == 3) {
|
if (isolevel->value == ISOLATION_LEVEL_READ_UNCOMMITTED
|
||||||
|
|| isolevel->value == ISOLATION_LEVEL_REPEATABLE_READ) {
|
||||||
++isolevel;
|
++isolevel;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,6 +76,7 @@ exception_from_sqlstate(const char *sqlstate)
|
||||||
break;
|
break;
|
||||||
case '2':
|
case '2':
|
||||||
switch (sqlstate[1]) {
|
switch (sqlstate[1]) {
|
||||||
|
case '0': /* Class 20 - Case Not Found */
|
||||||
case '1': /* Class 21 - Cardinality Violation */
|
case '1': /* Class 21 - Cardinality Violation */
|
||||||
return ProgrammingError;
|
return ProgrammingError;
|
||||||
case '2': /* Class 22 - Data Exception */
|
case '2': /* Class 22 - Data Exception */
|
||||||
|
@ -135,6 +136,8 @@ exception_from_sqlstate(const char *sqlstate)
|
||||||
return OperationalError;
|
return OperationalError;
|
||||||
case 'F': /* Class F0 - Configuration File Error */
|
case 'F': /* Class F0 - Configuration File Error */
|
||||||
return InternalError;
|
return InternalError;
|
||||||
|
case 'H': /* Class HV - Foreign Data Wrapper Error (SQL/MED) */
|
||||||
|
return OperationalError;
|
||||||
case 'P': /* Class P0 - PL/pgSQL Error */
|
case 'P': /* Class P0 - PL/pgSQL Error */
|
||||||
return InternalError;
|
return InternalError;
|
||||||
case 'X': /* Class XX - Internal Error */
|
case 'X': /* Class XX - Internal Error */
|
||||||
|
@ -157,7 +160,8 @@ pq_raise(connectionObject *conn, cursorObject *curs, PGresult *pgres)
|
||||||
const char *code = NULL;
|
const char *code = NULL;
|
||||||
|
|
||||||
if (conn == NULL) {
|
if (conn == NULL) {
|
||||||
PyErr_SetString(Error, "psycopg went psycotic and raised a null error");
|
PyErr_SetString(DatabaseError,
|
||||||
|
"psycopg went psycotic and raised a null error");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -183,9 +187,11 @@ pq_raise(connectionObject *conn, cursorObject *curs, PGresult *pgres)
|
||||||
|
|
||||||
/* if the is no error message we probably called pq_raise without reason:
|
/* if the is no error message we probably called pq_raise without reason:
|
||||||
we need to set an exception anyway because the caller will probably
|
we need to set an exception anyway because the caller will probably
|
||||||
raise and a meaningful message is better than an empty one */
|
raise and a meaningful message is better than an empty one.
|
||||||
|
Note: it can happen without it being our error: see ticket #82 */
|
||||||
if (err == NULL || err[0] == '\0') {
|
if (err == NULL || err[0] == '\0') {
|
||||||
PyErr_SetString(Error, "psycopg went psycotic without error set");
|
PyErr_SetString(DatabaseError,
|
||||||
|
"error with no message from the libpq");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -48,11 +48,7 @@ def read_base_file(filename):
|
||||||
raise ValueError("can't find the separator. Is this the right file?")
|
raise ValueError("can't find the separator. Is this the right file?")
|
||||||
|
|
||||||
def parse_errors(url):
|
def parse_errors(url):
|
||||||
page = urllib2.urlopen(url).read()
|
page = BS(urllib2.urlopen(url))
|
||||||
page = page.replace( # make things easier
|
|
||||||
'<SPAN CLASS="PRODUCTNAME">PostgreSQL</SPAN>',
|
|
||||||
'PostgreSQL')
|
|
||||||
page = BS(page)
|
|
||||||
table = page('table')[1]('tbody')[0]
|
table = page('table')[1]('tbody')[0]
|
||||||
|
|
||||||
classes = {}
|
classes = {}
|
||||||
|
@ -60,9 +56,9 @@ def parse_errors(url):
|
||||||
|
|
||||||
for tr in table('tr'):
|
for tr in table('tr'):
|
||||||
if tr.td.get('colspan'): # it's a class
|
if tr.td.get('colspan'): # it's a class
|
||||||
label = tr.b.string.encode("ascii")
|
label = ' '.join(' '.join(tr(text=True)).split()) \
|
||||||
|
.replace(u'\u2014', '-').encode('ascii')
|
||||||
assert label.startswith('Class')
|
assert label.startswith('Class')
|
||||||
label = label.replace("—", "-")
|
|
||||||
class_ = label.split()[1]
|
class_ = label.split()[1]
|
||||||
assert len(class_) == 2
|
assert len(class_) == 2
|
||||||
classes[class_] = label
|
classes[class_] = label
|
||||||
|
@ -73,14 +69,14 @@ def parse_errors(url):
|
||||||
|
|
||||||
tds = tr('td')
|
tds = tr('td')
|
||||||
if len(tds) == 3:
|
if len(tds) == 3:
|
||||||
errlabel = tds[1].string.replace(" ", "_").encode("ascii")
|
errlabel = '_'.join(tds[1].string.split()).encode('ascii')
|
||||||
|
|
||||||
# double check the columns are equal
|
# double check the columns are equal
|
||||||
cond_name = tds[2].string.upper().encode("ascii")
|
cond_name = tds[2].string.strip().upper().encode("ascii")
|
||||||
assert errlabel == cond_name, tr
|
assert errlabel == cond_name, tr
|
||||||
|
|
||||||
elif len(tds) == 2:
|
elif len(tds) == 2:
|
||||||
# found in PG 9.1 beta3 docs
|
# found in PG 9.1 docs
|
||||||
errlabel = tds[1].tt.string.upper().encode("ascii")
|
errlabel = tds[1].tt.string.upper().encode("ascii")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -73,7 +73,7 @@ except ImportError:
|
||||||
# Take a look at http://www.python.org/dev/peps/pep-0386/
|
# Take a look at http://www.python.org/dev/peps/pep-0386/
|
||||||
# for a consistent versioning pattern.
|
# for a consistent versioning pattern.
|
||||||
|
|
||||||
PSYCOPG_VERSION = '2.4.3'
|
PSYCOPG_VERSION = '2.4.4'
|
||||||
|
|
||||||
version_flags = ['dt', 'dec']
|
version_flags = ['dt', 'dec']
|
||||||
|
|
||||||
|
|
|
@ -65,6 +65,7 @@ class ExtrasDictCursorTests(unittest.TestCase):
|
||||||
return row
|
return row
|
||||||
self._testWithPlainCursorReal(getter)
|
self._testWithPlainCursorReal(getter)
|
||||||
|
|
||||||
|
|
||||||
def testDictCursorWithNamedCursorFetchOne(self):
|
def testDictCursorWithNamedCursorFetchOne(self):
|
||||||
self._testWithNamedCursor(lambda curs: curs.fetchone())
|
self._testWithNamedCursor(lambda curs: curs.fetchone())
|
||||||
|
|
||||||
|
@ -80,6 +81,12 @@ class ExtrasDictCursorTests(unittest.TestCase):
|
||||||
return row
|
return row
|
||||||
self._testWithNamedCursor(getter)
|
self._testWithNamedCursor(getter)
|
||||||
|
|
||||||
|
@skip_before_postgres(8, 2)
|
||||||
|
def testDictCursorWithNamedCursorNotGreedy(self):
|
||||||
|
curs = self.conn.cursor('tmp', cursor_factory=psycopg2.extras.DictCursor)
|
||||||
|
self._testNamedCursorNotGreedy(curs)
|
||||||
|
|
||||||
|
|
||||||
def testDictCursorRealWithNamedCursorFetchOne(self):
|
def testDictCursorRealWithNamedCursorFetchOne(self):
|
||||||
self._testWithNamedCursorReal(lambda curs: curs.fetchone())
|
self._testWithNamedCursorReal(lambda curs: curs.fetchone())
|
||||||
|
|
||||||
|
@ -95,6 +102,12 @@ class ExtrasDictCursorTests(unittest.TestCase):
|
||||||
return row
|
return row
|
||||||
self._testWithNamedCursorReal(getter)
|
self._testWithNamedCursorReal(getter)
|
||||||
|
|
||||||
|
@skip_before_postgres(8, 2)
|
||||||
|
def testDictCursorRealWithNamedCursorNotGreedy(self):
|
||||||
|
curs = self.conn.cursor('tmp', cursor_factory=psycopg2.extras.RealDictCursor)
|
||||||
|
self._testNamedCursorNotGreedy(curs)
|
||||||
|
|
||||||
|
|
||||||
def _testWithPlainCursor(self, getter):
|
def _testWithPlainCursor(self, getter):
|
||||||
curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
||||||
curs.execute("SELECT * FROM ExtrasDictCursorTests")
|
curs.execute("SELECT * FROM ExtrasDictCursorTests")
|
||||||
|
@ -128,6 +141,18 @@ class ExtrasDictCursorTests(unittest.TestCase):
|
||||||
self.failUnless(row['foo'] == 'qux')
|
self.failUnless(row['foo'] == 'qux')
|
||||||
self.failUnless(row[0] == 'qux')
|
self.failUnless(row[0] == 'qux')
|
||||||
|
|
||||||
|
def _testNamedCursorNotGreedy(self, curs):
|
||||||
|
curs.itersize = 2
|
||||||
|
curs.execute("""select clock_timestamp() as ts from generate_series(1,3)""")
|
||||||
|
recs = []
|
||||||
|
for t in curs:
|
||||||
|
time.sleep(0.01)
|
||||||
|
recs.append(t)
|
||||||
|
|
||||||
|
# check that the dataset was not fetched in a single gulp
|
||||||
|
self.assert_(recs[1]['ts'] - recs[0]['ts'] < timedelta(seconds=0.005))
|
||||||
|
self.assert_(recs[2]['ts'] - recs[1]['ts'] > timedelta(seconds=0.0099))
|
||||||
|
|
||||||
|
|
||||||
class NamedTupleCursorTest(unittest.TestCase):
|
class NamedTupleCursorTest(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
|
|
@ -648,6 +648,61 @@ class AdaptTypeTestCase(unittest.TestCase):
|
||||||
self.assertEqual(v[1][1], "world")
|
self.assertEqual(v[1][1], "world")
|
||||||
self.assertEqual(v[1][2], date(2011,1,3))
|
self.assertEqual(v[1][2], date(2011,1,3))
|
||||||
|
|
||||||
|
@skip_if_no_composite
|
||||||
|
def test_wrong_schema(self):
|
||||||
|
oid = self._create_type("type_ii", [("a", "integer"), ("b", "integer")])
|
||||||
|
from psycopg2.extras import CompositeCaster
|
||||||
|
c = CompositeCaster('type_ii', oid, [('a', 23), ('b', 23), ('c', 23)])
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
psycopg2.extensions.register_type(c.typecaster, curs)
|
||||||
|
curs.execute("select (1,2)::type_ii")
|
||||||
|
self.assertRaises(psycopg2.DataError, curs.fetchone)
|
||||||
|
|
||||||
|
@skip_if_no_composite
|
||||||
|
@skip_before_postgres(8, 4)
|
||||||
|
def test_from_tables(self):
|
||||||
|
curs = self.conn.cursor()
|
||||||
|
curs.execute("""create table ctest1 (
|
||||||
|
id integer primary key,
|
||||||
|
temp int,
|
||||||
|
label varchar
|
||||||
|
);""")
|
||||||
|
|
||||||
|
curs.execute("""alter table ctest1 drop temp;""")
|
||||||
|
|
||||||
|
curs.execute("""create table ctest2 (
|
||||||
|
id serial primary key,
|
||||||
|
label varchar,
|
||||||
|
test_id integer references ctest1(id)
|
||||||
|
);""")
|
||||||
|
|
||||||
|
curs.execute("""insert into ctest1 (id, label) values
|
||||||
|
(1, 'test1'),
|
||||||
|
(2, 'test2');""")
|
||||||
|
curs.execute("""insert into ctest2 (label, test_id) values
|
||||||
|
('testa', 1),
|
||||||
|
('testb', 1),
|
||||||
|
('testc', 2),
|
||||||
|
('testd', 2);""")
|
||||||
|
|
||||||
|
psycopg2.extras.register_composite("ctest1", curs)
|
||||||
|
psycopg2.extras.register_composite("ctest2", curs)
|
||||||
|
|
||||||
|
curs.execute("""
|
||||||
|
select ctest1, array_agg(ctest2) as test2s
|
||||||
|
from (
|
||||||
|
select ctest1, ctest2
|
||||||
|
from ctest1 inner join ctest2 on ctest1.id = ctest2.test_id
|
||||||
|
order by ctest1.id, ctest2.label
|
||||||
|
) x group by ctest1;""")
|
||||||
|
|
||||||
|
r = curs.fetchone()
|
||||||
|
self.assertEqual(r[0], (1, 'test1'))
|
||||||
|
self.assertEqual(r[1], [(1, 'testa', 1), (2, 'testb', 1)])
|
||||||
|
r = curs.fetchone()
|
||||||
|
self.assertEqual(r[0], (2, 'test2'))
|
||||||
|
self.assertEqual(r[1], [(3, 'testc', 2), (4, 'testd', 2)])
|
||||||
|
|
||||||
def _create_type(self, name, fields):
|
def _create_type(self, name, fields):
|
||||||
curs = self.conn.cursor()
|
curs = self.conn.cursor()
|
||||||
try:
|
try:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user