mirror of
https://github.com/psycopg/psycopg2.git
synced 2025-03-03 15:45:46 +03:00
Merge branch 'devel'
This commit is contained in:
commit
ba34b6942f
26
NEWS
26
NEWS
|
@ -1,3 +1,29 @@
|
|||
What's new in psycopg 2.4.3
|
||||
---------------------------
|
||||
|
||||
- connect() supports all the keyword arguments supported by the
|
||||
database
|
||||
- Added 'new_array_type()' function for easy creation of array
|
||||
typecasters.
|
||||
- Added support for arrays of hstores and composite types (ticket #66).
|
||||
- Fixed segfault in case of transaction started with connection lost
|
||||
(and possibly other events).
|
||||
- Fixed adaptation of Decimal type in sub-interpreters, such as in
|
||||
certain mod_wsgi configurations (ticket #52).
|
||||
- Rollback connections in transaction or in error before putting them
|
||||
back into a pool. Also discard broken connections (ticket #62).
|
||||
- Lazy import of the slow uuid module, thanks to Marko Kreen.
|
||||
- Fixed NamedTupleCursor.executemany() (ticket #65).
|
||||
- Fixed --static-libpq setup option (ticket #64).
|
||||
- Fixed interaction between RealDictCursor and named cursors
|
||||
(ticket #67).
|
||||
- Dropped limit on the columns length in COPY operations (ticket #68).
|
||||
- Fixed reference leak with arguments referenced more than once
|
||||
in queries (ticket #81).
|
||||
- Fixed typecasting of arrays containing consecutive backslashes.
|
||||
- 'errorcodes' map updated to PostgreSQL 9.1.
|
||||
|
||||
|
||||
What's new in psycopg 2.4.2
|
||||
---------------------------
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
# their work without bothering about the module dependencies.
|
||||
|
||||
|
||||
ALLOWED_PSYCOPG_VERSIONS = ('2.4-beta1', '2.4-beta2', '2.4', '2.4.1', '2.4.2')
|
||||
ALLOWED_PSYCOPG_VERSIONS = ('2.4-beta1', '2.4-beta2', '2.4', '2.4.1', '2.4.2', '2.4.3')
|
||||
|
||||
import sys
|
||||
import time
|
||||
|
|
|
@ -39,7 +39,10 @@ class DB(TM, dbi_db.DB):
|
|||
self.dsn = dsn
|
||||
self.tilevel = tilevel
|
||||
self.typecasts = typecasts
|
||||
self.encoding = enc
|
||||
if enc is None or enc == "":
|
||||
self.encoding = "utf-8"
|
||||
else:
|
||||
self.encoding = enc
|
||||
self.failures = 0
|
||||
self.calls = 0
|
||||
self.make_mappings()
|
||||
|
|
|
@ -147,7 +147,7 @@ geometric type:
|
|||
|
||||
The above function call results in the SQL command::
|
||||
|
||||
INSERT INTO atable (apoint) VALUES ((1.23, 4.56));
|
||||
INSERT INTO atable (apoint) VALUES ('(1.23, 4.56)');
|
||||
|
||||
|
||||
|
||||
|
@ -216,6 +216,9 @@ read:
|
|||
>>> print type(point), point.x, point.y
|
||||
<class 'Point'> 10.2 20.3
|
||||
|
||||
A typecaster created by `!new_type()` can be also used with
|
||||
`~psycopg2.extensions.new_array_type()` to create a typecaster converting a
|
||||
PostgreSQL array into a Python list.
|
||||
|
||||
|
||||
.. index::
|
||||
|
@ -348,7 +351,7 @@ returned, the connection can be safely used:
|
|||
>>> wait(aconn)
|
||||
>>> acurs = aconn.cursor()
|
||||
|
||||
Notice that there are a few other requirements to be met in order to have a
|
||||
Note that there are a few other requirements to be met in order to have a
|
||||
completely non-blocking connection attempt: see the libpq documentation for
|
||||
|PQconnectStart|_.
|
||||
|
||||
|
@ -472,6 +475,7 @@ resources about the topic.
|
|||
.. __: http://www.postgresql.org/docs/9.0/static/libpq-async.html
|
||||
|
||||
.. warning::
|
||||
|
||||
:ref:`COPY commands <copy>` are currently not supported when a wait callback
|
||||
is registered, but they will be probably implemented in a future release.
|
||||
|
||||
|
|
|
@ -113,9 +113,6 @@ rst_epilog = """
|
|||
.. _transaction isolation level:
|
||||
http://www.postgresql.org/docs/9.1/static/transaction-iso.html
|
||||
|
||||
.. _serializable isolation level:
|
||||
http://www.postgresql.org/docs/9.1/static/transaction-iso.html#XACT-SERIALIZABLE
|
||||
|
||||
.. _mx.DateTime: http://www.egenix.com/products/python/mxBase/mxDateTime/
|
||||
|
||||
.. |MVCC| replace:: :abbr:`MVCC (Multiversion concurrency control)`
|
||||
|
|
|
@ -18,16 +18,19 @@ The ``connection`` class
|
|||
Connections are created using the factory function
|
||||
`~psycopg2.connect()`.
|
||||
|
||||
Connections are thread safe and can be shared among many thread. See
|
||||
Connections are thread safe and can be shared among many threads. See
|
||||
:ref:`thread-safety` for details.
|
||||
|
||||
.. method:: cursor([name] [, cursor_factory])
|
||||
.. method:: cursor([name] [, cursor_factory] [, withhold])
|
||||
|
||||
Return a new `cursor` object using the connection.
|
||||
|
||||
If *name* is specified, the returned cursor will be a :ref:`server
|
||||
side cursor <server-side-cursors>` (also known as *named cursor*).
|
||||
Otherwise it will be a regular *client side* cursor.
|
||||
Otherwise it will be a regular *client side* cursor. By default a
|
||||
:sql:`WITHOUT HOLD` cursor is created; to create a :sql:`WITH HOLD`
|
||||
cursor, pass a `!True` value as the *withhold* parameter. See
|
||||
:ref:`server-side-cursors`.
|
||||
|
||||
The name can be a string not valid as a PostgreSQL identifier: for
|
||||
example it may start with a digit and contain non-alphanumeric
|
||||
|
|
|
@ -114,6 +114,19 @@ The ``cursor`` class
|
|||
The `name` attribute is a Psycopg extension to the |DBAPI|.
|
||||
|
||||
|
||||
.. attribute:: withhold
|
||||
|
||||
Read/write attribute: specifies if a named cursor lifetime should
|
||||
extend outside of the current transaction, i.e., it is possible to
|
||||
fetch from the cursor even after a `commection.commit()` (but not after
|
||||
a `connection.rollback()`). See :ref:`server-side-cursors`
|
||||
|
||||
.. versionadded:: 2.4.3
|
||||
|
||||
.. extension::
|
||||
|
||||
The `withhold` attribute is a Psycopg extension to the |DBAPI|.
|
||||
|
||||
|
||||
.. |execute*| replace:: `execute*()`
|
||||
|
||||
|
@ -444,7 +457,7 @@ The ``cursor`` class
|
|||
The :sql:`COPY` command is a PostgreSQL extension to the SQL standard.
|
||||
As such, its support is a Psycopg extension to the |DBAPI|.
|
||||
|
||||
.. method:: copy_from(file, table, sep='\\t', null='\\N', size=8192, columns=None)
|
||||
.. method:: copy_from(file, table, sep='\\t', null='\\\\N', size=8192, columns=None)
|
||||
|
||||
Read data *from* the file-like object *file* appending them to
|
||||
the table named *table*. See :ref:`copy` for an overview.
|
||||
|
@ -454,6 +467,7 @@ The ``cursor`` class
|
|||
:param table: name of the table to copy data into.
|
||||
:param sep: columns separator expected in the file. Defaults to a tab.
|
||||
:param null: textual representation of :sql:`NULL` in the file.
|
||||
The default is the two character string ``\N``.
|
||||
:param size: size of the buffer used to read from the file.
|
||||
:param columns: iterable with name of the columns to import.
|
||||
The length and types should match the content of the file to read.
|
||||
|
@ -476,7 +490,7 @@ The ``cursor`` class
|
|||
are encoded in the connection `~connection.encoding` when sent to
|
||||
the backend.
|
||||
|
||||
.. method:: copy_to(file, table, sep='\\t', null='\\N', columns=None)
|
||||
.. method:: copy_to(file, table, sep='\\t', null='\\\\N', columns=None)
|
||||
|
||||
Write the content of the table named *table* *to* the file-like
|
||||
object *file*. See :ref:`copy` for an overview.
|
||||
|
@ -486,6 +500,7 @@ The ``cursor`` class
|
|||
:param table: name of the table to copy data from.
|
||||
:param sep: columns separator expected in the file. Defaults to a tab.
|
||||
:param null: textual representation of :sql:`NULL` in the file.
|
||||
The default is the two character string ``\N``.
|
||||
:param columns: iterable with name of the columns to export.
|
||||
If not specified, export all the columns.
|
||||
|
||||
|
@ -513,7 +528,7 @@ The ``cursor`` class
|
|||
|
||||
:param sql: the :sql:`COPY` statement to execute.
|
||||
:param file: a file-like object; must be a readable file for
|
||||
:sql:`COPY FROM` or an writeable file for :sql:`COPY TO`.
|
||||
:sql:`COPY FROM` or an writable file for :sql:`COPY TO`.
|
||||
:param size: size of the read buffer to be used in :sql:`COPY FROM`.
|
||||
|
||||
Example:
|
||||
|
|
|
@ -290,7 +290,7 @@ details.
|
|||
.. function:: new_type(oids, name, adapter)
|
||||
|
||||
Create a new type caster to convert from a PostgreSQL type to a Python
|
||||
object. The created object must be registered using
|
||||
object. The object created must be registered using
|
||||
`register_type()` to be used.
|
||||
|
||||
:param oids: tuple of OIDs of the PostgreSQL type to convert.
|
||||
|
@ -309,6 +309,23 @@ details.
|
|||
See :ref:`type-casting-from-sql-to-python` for an usage example.
|
||||
|
||||
|
||||
.. function:: new_array_type(oids, name, base_caster)
|
||||
|
||||
Create a new type caster to convert from a PostgreSQL array type to a list
|
||||
of Python object. The object created must be registered using
|
||||
`register_type()` to be used.
|
||||
|
||||
:param oids: tuple of OIDs of the PostgreSQL type to convert. It should
|
||||
probably be the oid of the array type (e.g. the ``typarray`` field in
|
||||
the ``pg_type`` table.
|
||||
:param name: the name of the new type adapter.
|
||||
:param base_caster: a Psycopg typecaster, e.g. created using the
|
||||
`new_type()` function. The caster should be able to parse a single
|
||||
item of the desired type.
|
||||
|
||||
.. versionadded:: 2.4.3
|
||||
|
||||
|
||||
.. function:: register_type(obj [, scope])
|
||||
|
||||
Register a type caster created using `new_type()`.
|
||||
|
@ -401,26 +418,64 @@ set to one of the following constants:
|
|||
|
||||
.. data:: ISOLATION_LEVEL_READ_COMMITTED
|
||||
|
||||
This is the default value. A new transaction is started at the first
|
||||
`~cursor.execute()` command on a cursor and at each new
|
||||
`!execute()` after a `~connection.commit()` or a
|
||||
This is usually the the default PostgreSQL value, but a different default
|
||||
may be set in the database configuration.
|
||||
|
||||
A new transaction is started at the first `~cursor.execute()` command on a
|
||||
cursor and at each new `!execute()` after a `~connection.commit()` or a
|
||||
`~connection.rollback()`. The transaction runs in the PostgreSQL
|
||||
:sql:`READ COMMITTED` isolation level.
|
||||
:sql:`READ COMMITTED` isolation level: a :sql:`SELECT` query sees only
|
||||
data committed before the query began; it never sees either uncommitted
|
||||
data or changes committed during query execution by concurrent
|
||||
transactions.
|
||||
|
||||
.. seealso:: `Read Committed Isolation Level`__ in PostgreSQL
|
||||
documentation.
|
||||
|
||||
.. __: http://www.postgresql.org/docs/9.1/static/transaction-iso.html#XACT-READ-COMMITTED
|
||||
|
||||
.. data:: ISOLATION_LEVEL_REPEATABLE_READ
|
||||
|
||||
The :sql:`REPEATABLE READ` isolation level is defined in the SQL standard
|
||||
but not available in the |MVCC| model of PostgreSQL: it is replaced by the
|
||||
stricter :sql:`SERIALIZABLE`.
|
||||
As in `!ISOLATION_LEVEL_READ_COMMITTED`, a new transaction is started at
|
||||
the first `~cursor.execute()` command. Transactions run at a
|
||||
:sql:`REPEATABLE READ` isolation level: all the queries in a transaction
|
||||
see a snapshot as of the start of the transaction, not as of the start of
|
||||
the current query within the transaction. However applications using this
|
||||
level must be prepared to retry transactions due to serialization
|
||||
failures.
|
||||
|
||||
While this level provides a guarantee that each transaction sees a
|
||||
completely stable view of the database, this view will not necessarily
|
||||
always be consistent with some serial (one at a time) execution of
|
||||
concurrent transactions of the same level.
|
||||
|
||||
.. versionchanged:: 2.4.2
|
||||
The value was an alias for `!ISOLATION_LEVEL_SERIALIZABLE` before. The
|
||||
two levels are distinct since PostgreSQL 9.1
|
||||
|
||||
.. seealso:: `Repeatable Read Isolation Level`__ in PostgreSQL
|
||||
documentation.
|
||||
|
||||
.. __: http://www.postgresql.org/docs/9.1/static/transaction-iso.html#XACT-REPEATABLE-READ
|
||||
|
||||
.. data:: ISOLATION_LEVEL_SERIALIZABLE
|
||||
|
||||
Transactions are run at a :sql:`SERIALIZABLE` isolation level. This is the
|
||||
strictest transactions isolation level, equivalent to having the
|
||||
transactions executed serially rather than concurrently. However
|
||||
applications using this level must be prepared to retry reansactions due
|
||||
to serialization failures. See `serializable isolation level`_ in
|
||||
PostgreSQL documentation.
|
||||
As in `!ISOLATION_LEVEL_READ_COMMITTED`, a new transaction is started at
|
||||
the first `~cursor.execute()` command. Transactions run at a
|
||||
:sql:`SERIALIZABLE` isolation level. This is the strictest transactions
|
||||
isolation level, equivalent to having the transactions executed serially
|
||||
rather than concurrently. However applications using this level must be
|
||||
prepared to retry reansactions due to serialization failures.
|
||||
|
||||
Starting from PostgreSQL 9.1, this mode monitors for conditions which
|
||||
could make execution of a concurrent set of serializable transactions
|
||||
behave in a manner inconsistent with all possible serial (one at a time)
|
||||
executions of those transaction. In previous version the behaviour was the
|
||||
same of the :sql:`REPEATABLE READ` isolation level.
|
||||
|
||||
.. seealso:: `Serializable Isolation Level`__ in PostgreSQL documentation.
|
||||
|
||||
.. __: http://www.postgresql.org/docs/9.1/static/transaction-iso.html#XACT-SERIALIZABLE
|
||||
|
||||
|
||||
|
||||
|
@ -562,7 +617,7 @@ from the database. See :ref:`unicode-handling` for details.
|
|||
TIME
|
||||
UNICODE
|
||||
|
||||
Typecasters for basic types. Notice that a few other ones (`~psycopg2.BINARY`,
|
||||
Typecasters for basic types. Note that a few other ones (`~psycopg2.BINARY`,
|
||||
`~psycopg2.DATETIME`, `~psycopg2.NUMBER`, `~psycopg2.ROWID`,
|
||||
`~psycopg2.STRING`) are exposed by the `psycopg2` module for |DBAPI|_
|
||||
compliance.
|
||||
|
|
|
@ -18,7 +18,7 @@ Why does `!psycopg2` leave database sessions "idle in transaction"?
|
|||
:sql:`SELECT`. The transaction is not closed until an explicit
|
||||
`~connection.commit()` or `~connection.rollback()`.
|
||||
|
||||
If you are writing a long-living program, you should probably ensure to
|
||||
If you are writing a long-living program, you should probably make sure to
|
||||
call one of the transaction closing methods before leaving the connection
|
||||
unused for a long time (which may also be a few seconds, depending on the
|
||||
concurrency level in your database). Alternatively you can use a
|
||||
|
@ -37,7 +37,7 @@ I receive the error *current transaction is aborted, commands ignored until end
|
|||
|
||||
Why do I get the error *current transaction is aborted, commands ignored until end of transaction block* when I use `!multiprocessing` (or any other forking system) and not when use `!threading`?
|
||||
Psycopg's connections can't be shared across processes (but are thread
|
||||
safe). If you are forking the Python process ensure to create a new
|
||||
safe). If you are forking the Python process make sure to create a new
|
||||
connection in each forked child. See :ref:`thread-safety` for further
|
||||
informations.
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ The module interface respects the standard defined in the |DBAPI|_.
|
|||
|
||||
Create a new database session and return a new `connection` object.
|
||||
|
||||
You can specify the connection parameters either as a string::
|
||||
The connection parameters can be specified either as a string::
|
||||
|
||||
conn = psycopg2.connect("dbname=test user=postgres password=secret")
|
||||
|
||||
|
@ -28,17 +28,23 @@ The module interface respects the standard defined in the |DBAPI|_.
|
|||
|
||||
conn = psycopg2.connect(database="test", user="postgres", password="secret")
|
||||
|
||||
The full list of available parameters is:
|
||||
|
||||
The basic connection parameters are:
|
||||
|
||||
- `!dbname` -- the database name (only in dsn string)
|
||||
- `!database` -- the database name (only as keyword argument)
|
||||
- `!user` -- user name used to authenticate
|
||||
- `!password` -- password used to authenticate
|
||||
- `!host` -- database host address (defaults to UNIX socket if not provided)
|
||||
- `!port` -- connection port number (defaults to 5432 if not provided)
|
||||
- `!sslmode` -- `SSL TCP/IP negotiation`__ mode
|
||||
|
||||
.. __: http://www.postgresql.org/docs/9.0/static/libpq-ssl.html#LIBPQ-SSL-SSLMODE-STATEMENTS
|
||||
Any other connection parameter supported by the client library/server can
|
||||
be passed either in the connection string or as keyword. See the
|
||||
PostgreSQL documentation for a complete `list of supported parameters`__.
|
||||
Also note that the same parameters can be passed to the client library
|
||||
using `environment variables`__.
|
||||
|
||||
.. __: http://www.postgresql.org/docs/9.1/static/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS
|
||||
.. __: http://www.postgresql.org/docs/9.1/static/libpq-envars.html
|
||||
|
||||
Using the *connection_factory* parameter a different class or
|
||||
connections factory can be specified. It should be a callable object
|
||||
|
@ -48,6 +54,10 @@ The module interface respects the standard defined in the |DBAPI|_.
|
|||
Using *async*\=1 an asynchronous connection will be created: see
|
||||
:ref:`async-support` to know about advantages and limitations.
|
||||
|
||||
.. versionchanged:: 2.4.3
|
||||
any keyword argument is passed to the connection. Previously only the
|
||||
basic parameters (plus `!sslmode`) were supported as keywords.
|
||||
|
||||
.. extension::
|
||||
|
||||
The parameters *connection_factory* and *async* are Psycopg extensions
|
||||
|
@ -218,7 +228,9 @@ This is the exception inheritance layout:
|
|||
Type Objects and Constructors
|
||||
-----------------------------
|
||||
|
||||
.. note:: This section is mostly copied verbatim from the |DBAPI|_
|
||||
.. note::
|
||||
|
||||
This section is mostly copied verbatim from the |DBAPI|_
|
||||
specification. While these objects are exposed in compliance to the
|
||||
DB API, Psycopg offers very accurate tools to convert data between Python
|
||||
and PostgreSQL formats. See :ref:`adapting-new-types` and
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
Creating new PostgreSQL connections can be an expensive operation. This
|
||||
module offers a few pure Python classes implementing simple connection pooling
|
||||
directly into the client application.
|
||||
directly in the client application.
|
||||
|
||||
.. class:: AbstractConnectionPool(minconn, maxconn, \*args, \*\*kwargs)
|
||||
|
||||
|
@ -26,15 +26,17 @@ directly into the client application.
|
|||
|
||||
Get a free connection and assign it to *key* if not `!None`.
|
||||
|
||||
.. method:: putconn(conn, key=None)
|
||||
.. method:: putconn(conn, key=None, close=False)
|
||||
|
||||
Put away a connection.
|
||||
|
||||
If *close* is `!True`, discard the connection from the pool.
|
||||
|
||||
.. method:: closeall
|
||||
|
||||
Close all the connections handled by the pool.
|
||||
|
||||
Notice that all the connections are closed, including ones
|
||||
Note that all the connections are closed, including ones
|
||||
eventually in use by the application.
|
||||
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ basic commands::
|
|||
>>> conn.close()
|
||||
|
||||
|
||||
The main entry point of Psycopg are:
|
||||
The main entry points of Psycopg are:
|
||||
|
||||
- The function `~psycopg2.connect()` creates a new database session and
|
||||
returns a new `connection` instance.
|
||||
|
@ -90,7 +90,7 @@ is converted into the SQL command::
|
|||
|
||||
Named arguments are supported too using :samp:`%({name})s` placeholders.
|
||||
Using named arguments the values can be passed to the query in any order and
|
||||
many placeholder can use the same values::
|
||||
many placeholders can use the same values::
|
||||
|
||||
>>> cur.execute(
|
||||
... """INSERT INTO some_table (an_int, a_date, another_date, a_string)
|
||||
|
@ -165,9 +165,9 @@ hang it onto your desk.
|
|||
.. _SQL injection: http://en.wikipedia.org/wiki/SQL_injection
|
||||
.. __: http://xkcd.com/327/
|
||||
|
||||
Psycopg can `convert automatically Python objects into and from SQL
|
||||
literals`__: using this feature your code will result more robust and
|
||||
reliable. It is really the case to stress this point:
|
||||
Psycopg can `automatically convert Python objects to and from SQL
|
||||
literals`__: using this feature your code will be more robust and
|
||||
reliable. We must stress this point:
|
||||
|
||||
.. __: python-types-adaptation_
|
||||
|
||||
|
@ -180,9 +180,9 @@ reliable. It is really the case to stress this point:
|
|||
The correct way to pass variables in a SQL command is using the second
|
||||
argument of the `~cursor.execute()` method::
|
||||
|
||||
>>> SQL = "INSERT INTO authors (name) VALUES (%s);" # Notice: no quotes
|
||||
>>> SQL = "INSERT INTO authors (name) VALUES (%s);" # Note: no quotes
|
||||
>>> data = ("O'Reilly", )
|
||||
>>> cur.execute(SQL, data) # Notice: no % operator
|
||||
>>> cur.execute(SQL, data) # Note: no % operator
|
||||
|
||||
|
||||
|
||||
|
@ -290,8 +290,8 @@ the SQL string that would be sent to the database.
|
|||
emit :sql:`bytea` fields. Starting from Psycopg 2.4.1 the format is
|
||||
correctly supported. If you use a previous version you will need some
|
||||
extra care when receiving bytea from PostgreSQL: you must have at least
|
||||
the libpq 9.0 installed on the client or alternatively you can set the
|
||||
`bytea_output`__ configutation parameter to ``escape``, either in the
|
||||
libpq 9.0 installed on the client or alternatively you can set the
|
||||
`bytea_output`__ configuration parameter to ``escape``, either in the
|
||||
server configuration file or in the client session (using a query such as
|
||||
``SET bytea_output TO escape;``) before receiving binary data.
|
||||
|
||||
|
@ -444,7 +444,7 @@ the connection or globally: see the function
|
|||
|
||||
.. note::
|
||||
|
||||
In Python 2, if you want to receive uniformly all your database input in
|
||||
In Python 2, if you want to uniformly receive all your database input in
|
||||
Unicode, you can register the related typecasters globally as soon as
|
||||
Psycopg is imported::
|
||||
|
||||
|
@ -473,7 +473,7 @@ to a `~psycopg2.tz.FixedOffsetTimezone` instance.
|
|||
>>> cur.fetchone()[0].tzinfo
|
||||
psycopg2.tz.FixedOffsetTimezone(offset=60, name=None)
|
||||
|
||||
Notice that only time zones with an integer number of minutes are supported:
|
||||
Note that only time zones with an integer number of minutes are supported:
|
||||
this is a limitation of the Python `datetime` module. A few historical time
|
||||
zones had seconds in the UTC offset: these time zones will have the offset
|
||||
rounded to the nearest minute, with an error of up to 30 seconds.
|
||||
|
@ -526,7 +526,7 @@ older versions).
|
|||
long-running programs, if no further action is taken, the session will
|
||||
remain "idle in transaction", a condition non desiderable for several
|
||||
reasons (locks are held by the session, tables bloat...). For long lived
|
||||
scripts, either ensure to terminate a transaction as soon as possible or
|
||||
scripts, either make sure to terminate a transaction as soon as possible or
|
||||
use an autocommit connection.
|
||||
|
||||
A few other transaction properties can be set session-wide by the
|
||||
|
@ -562,19 +562,64 @@ subsequently handled using :sql:`MOVE`, :sql:`FETCH` and :sql:`CLOSE` commands.
|
|||
|
||||
Psycopg wraps the database server side cursor in *named cursors*. A named
|
||||
cursor is created using the `~connection.cursor()` method specifying the
|
||||
`name` parameter. Such cursor will behave mostly like a regular cursor,
|
||||
*name* parameter. Such cursor will behave mostly like a regular cursor,
|
||||
allowing the user to move in the dataset using the `~cursor.scroll()`
|
||||
method and to read the data using `~cursor.fetchone()` and
|
||||
`~cursor.fetchmany()` methods.
|
||||
|
||||
Named cursors are also :ref:`iterable <cursor-iterable>` like regular cursors.
|
||||
Notice however that before Psycopg 2.4 iteration was performed fetching one
|
||||
Note however that before Psycopg 2.4 iteration was performed fetching one
|
||||
record at time from the backend, resulting in a large overhead. The attribute
|
||||
`~cursor.itersize` now controls how many records are now fetched at time
|
||||
`~cursor.itersize` now controls how many records are fetched at time
|
||||
during the iteration: the default value of 2000 allows to fetch about 100KB
|
||||
per roundtrip assuming records of 10-20 columns of mixed number and strings;
|
||||
you may decrease this value if you are dealing with huge records.
|
||||
|
||||
Named cursors are usually created :sql:`WITHOUT HOLD`, meaning they live only
|
||||
as long as the current transaction. Trying to fetch from a named cursor after
|
||||
a `~connection.commit()` or to create a named cursor when the `connection`
|
||||
transaction isolation level is set to `AUTOCOMMIT` will result in an exception.
|
||||
It is possible to create a :sql:`WITH HOLD` cursor by specifying a `!True`
|
||||
value for the `withhold` parameter to `~connection.cursor()` or by setting the
|
||||
`~cursor.withhold` attribute to `!True` before calling `~cursor.execute()` on
|
||||
the cursor. It is extremely important to always `~cursor.close()` such cursors,
|
||||
otherwise they will continue to hold server-side resources until the connection
|
||||
will be eventually closed. Also note that while :sql:`WITH HOLD` cursors
|
||||
lifetime extends well after `~connection.commit()`, calling
|
||||
`~connection.rollback()` will automatically close the cursor.
|
||||
|
||||
.. note::
|
||||
|
||||
It is also possible to use a named cursor to consume a cursor created
|
||||
in some other way than using the |DECLARE| executed by
|
||||
`~cursor.execute()`. For example, you may have a PL/pgSQL function
|
||||
returning a cursor::
|
||||
|
||||
CREATE FUNCTION reffunc(refcursor) RETURNS refcursor AS $$
|
||||
BEGIN
|
||||
OPEN $1 FOR SELECT col FROM test;
|
||||
RETURN $1;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
You can read the cursor content by calling the function with a regular,
|
||||
non-named, Psycopg cursor:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
cur1 = conn.cursor()
|
||||
cur1.callproc('reffunc', ['curname'])
|
||||
|
||||
and then use a named cursor in the same transaction to "steal the cursor":
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
cur2 = conn.cursor('curname')
|
||||
for record in cur2: # or cur2.fetchone, fetchmany...
|
||||
# do something with record
|
||||
pass
|
||||
|
||||
|
||||
.. |DECLARE| replace:: :sql:`DECLARE`
|
||||
.. _DECLARE: http://www.postgresql.org/docs/9.0/static/sql-declare.html
|
||||
|
||||
|
@ -589,7 +634,7 @@ Thread and process safety
|
|||
|
||||
The Psycopg module and the `connection` objects are *thread-safe*: many
|
||||
threads can access the same database either using separate sessions and
|
||||
creating a `!connection` per thread or using the same using the same
|
||||
creating a `!connection` per thread or using the same
|
||||
connection and creating separate `cursor`\ s. In |DBAPI|_ parlance, Psycopg is
|
||||
*level 2 thread safe*.
|
||||
|
||||
|
@ -603,7 +648,7 @@ the same connection, all the commands will be executed in the same session
|
|||
The above observations are only valid for regular threads: they don't apply to
|
||||
forked processes nor to green threads. `libpq` connections `shouldn't be used by a
|
||||
forked processes`__, so when using a module such as `multiprocessing` or a
|
||||
forking web deploy method such as FastCGI ensure to create the connections
|
||||
forking web deploy method such as FastCGI make sure to create the connections
|
||||
*after* the fork.
|
||||
|
||||
.. __: http://www.postgresql.org/docs/9.0/static/libpq-connect.html#LIBPQ-CONNECT
|
||||
|
@ -654,7 +699,7 @@ examples.
|
|||
Access to PostgreSQL large objects
|
||||
----------------------------------
|
||||
|
||||
PostgreSQL offers support to `large objects`__, which provide stream-style
|
||||
PostgreSQL offers support for `large objects`__, which provide stream-style
|
||||
access to user data that is stored in a special large-object structure. They
|
||||
are useful with data values too large to be manipulated conveniently as a
|
||||
whole.
|
||||
|
@ -689,7 +734,7 @@ Two-Phase Commit protocol support
|
|||
Psycopg exposes the two-phase commit features available since PostgreSQL 8.1
|
||||
implementing the *two-phase commit extensions* proposed by the |DBAPI|.
|
||||
|
||||
The |DBAPI| model of two-phase commit is inspired to the `XA specification`__,
|
||||
The |DBAPI| model of two-phase commit is inspired by the `XA specification`__,
|
||||
according to which transaction IDs are formed from three components:
|
||||
|
||||
- a format ID (non-negative 32 bit integer)
|
||||
|
|
|
@ -73,7 +73,7 @@ from psycopg2._psycopg import Error, Warning, DataError, DatabaseError, Programm
|
|||
from psycopg2._psycopg import IntegrityError, InterfaceError, InternalError
|
||||
from psycopg2._psycopg import NotSupportedError, OperationalError
|
||||
|
||||
from psycopg2._psycopg import connect, apilevel, threadsafety, paramstyle
|
||||
from psycopg2._psycopg import _connect, apilevel, threadsafety, paramstyle
|
||||
from psycopg2._psycopg import __version__
|
||||
|
||||
from psycopg2 import tz
|
||||
|
@ -85,5 +85,99 @@ import psycopg2.extensions as _ext
|
|||
_ext.register_adapter(tuple, _ext.SQL_IN)
|
||||
_ext.register_adapter(type(None), _ext.NoneAdapter)
|
||||
|
||||
# Register the Decimal adapter here instead of in the C layer.
|
||||
# This way a new class is registered for each sub-interpreter.
|
||||
# See ticket #52
|
||||
try:
|
||||
from decimal import Decimal
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
from psycopg2._psycopg import Decimal as Adapter
|
||||
_ext.register_adapter(Decimal, Adapter)
|
||||
del Decimal, Adapter
|
||||
|
||||
import re
|
||||
|
||||
def _param_escape(s,
|
||||
re_escape=re.compile(r"([\\'])"),
|
||||
re_space=re.compile(r'\s')):
|
||||
"""
|
||||
Apply the escaping rule required by PQconnectdb
|
||||
"""
|
||||
if not s: return "''"
|
||||
|
||||
s = re_escape.sub(r'\\\1', s)
|
||||
if re_space.search(s):
|
||||
s = "'" + s + "'"
|
||||
|
||||
return s
|
||||
|
||||
del re
|
||||
|
||||
|
||||
def connect(dsn=None,
|
||||
database=None, user=None, password=None, host=None, port=None,
|
||||
connection_factory=None, async=False, **kwargs):
|
||||
"""
|
||||
Create a new database connection.
|
||||
|
||||
The connection parameters can be specified either as a string:
|
||||
|
||||
conn = psycopg2.connect("dbname=test user=postgres password=secret")
|
||||
|
||||
or using a set of keyword arguments:
|
||||
|
||||
conn = psycopg2.connect(database="test", user="postgres", password="secret")
|
||||
|
||||
The basic connection parameters are:
|
||||
|
||||
- *dbname*: the database name (only in dsn string)
|
||||
- *database*: the database name (only as keyword argument)
|
||||
- *user*: user name used to authenticate
|
||||
- *password*: password used to authenticate
|
||||
- *host*: database host address (defaults to UNIX socket if not provided)
|
||||
- *port*: connection port number (defaults to 5432 if not provided)
|
||||
|
||||
Using the *connection_factory* parameter a different class or connections
|
||||
factory can be specified. It should be a callable object taking a dsn
|
||||
argument.
|
||||
|
||||
Using *async*=True an asynchronous connection will be created.
|
||||
|
||||
Any other keyword parameter will be passed to the underlying client
|
||||
library: the list of supported parameter depends on the library version.
|
||||
|
||||
"""
|
||||
if dsn is None:
|
||||
# Note: reproducing the behaviour of the previous C implementation:
|
||||
# keyword are silently swallowed if a DSN is specified. I would have
|
||||
# raised an exception. File under "histerical raisins".
|
||||
items = []
|
||||
if database is not None:
|
||||
items.append(('dbname', database))
|
||||
if user is not None:
|
||||
items.append(('user', user))
|
||||
if password is not None:
|
||||
items.append(('password', password))
|
||||
if host is not None:
|
||||
items.append(('host', host))
|
||||
# Reproducing the previous C implementation behaviour: swallow a
|
||||
# negative port. The libpq would raise an exception for it.
|
||||
if port is not None and int(port) > 0:
|
||||
items.append(('port', port))
|
||||
|
||||
items.extend(
|
||||
[(k, v) for (k, v) in kwargs.iteritems() if v is not None])
|
||||
dsn = " ".join(["%s=%s" % (k, _param_escape(str(v)))
|
||||
for (k, v) in items])
|
||||
|
||||
if not dsn:
|
||||
raise InterfaceError('missing dsn and no parameters')
|
||||
|
||||
return _connect(dsn,
|
||||
connection_factory=connection_factory, async=async)
|
||||
|
||||
|
||||
__all__ = filter(lambda k: not k.startswith('_'), locals().keys())
|
||||
|
||||
|
|
|
@ -86,6 +86,7 @@ CLASS_OBJECT_NOT_IN_PREREQUISITE_STATE = '55'
|
|||
CLASS_OPERATOR_INTERVENTION = '57'
|
||||
CLASS_SYSTEM_ERROR = '58'
|
||||
CLASS_CONFIGURATION_FILE_ERROR = 'F0'
|
||||
CLASS_FOREIGN_DATA_WRAPPER_ERROR = 'HV'
|
||||
CLASS_PL_PGSQL_ERROR = 'P0'
|
||||
CLASS_INTERNAL_ERROR = 'XX'
|
||||
|
||||
|
@ -319,6 +320,8 @@ INVALID_OBJECT_DEFINITION = '42P17'
|
|||
INDETERMINATE_DATATYPE = '42P18'
|
||||
INVALID_RECURSION = '42P19'
|
||||
WINDOWING_ERROR = '42P20'
|
||||
COLLATION_MISMATCH = '42P21'
|
||||
INDETERMINATE_COLLATION = '42P22'
|
||||
|
||||
# Class 44 - WITH CHECK OPTION Violation
|
||||
WITH_CHECK_OPTION_VIOLATION = '44000'
|
||||
|
@ -347,6 +350,7 @@ QUERY_CANCELED = '57014'
|
|||
ADMIN_SHUTDOWN = '57P01'
|
||||
CRASH_SHUTDOWN = '57P02'
|
||||
CANNOT_CONNECT_NOW = '57P03'
|
||||
DATABASE_DROPPED = '57P04'
|
||||
|
||||
# Class 58 - System Error (errors external to PostgreSQL itself)
|
||||
IO_ERROR = '58030'
|
||||
|
@ -357,6 +361,35 @@ DUPLICATE_FILE = '58P02'
|
|||
CONFIG_FILE_ERROR = 'F0000'
|
||||
LOCK_FILE_EXISTS = 'F0001'
|
||||
|
||||
# Class HV - Foreign Data Wrapper Error (SQL/MED)
|
||||
FDW_ERROR = 'HV000'
|
||||
FDW_OUT_OF_MEMORY = 'HV001'
|
||||
FDW_DYNAMIC_PARAMETER_VALUE_NEEDED = 'HV002'
|
||||
FDW_INVALID_DATA_TYPE = 'HV004'
|
||||
FDW_COLUMN_NAME_NOT_FOUND = 'HV005'
|
||||
FDW_INVALID_DATA_TYPE_DESCRIPTORS = 'HV006'
|
||||
FDW_INVALID_COLUMN_NAME = 'HV007'
|
||||
FDW_INVALID_COLUMN_NUMBER = 'HV008'
|
||||
FDW_INVALID_USE_OF_NULL_POINTER = 'HV009'
|
||||
FDW_INVALID_STRING_FORMAT = 'HV00A'
|
||||
FDW_INVALID_HANDLE = 'HV00B'
|
||||
FDW_INVALID_OPTION_INDEX = 'HV00C'
|
||||
FDW_INVALID_OPTION_NAME = 'HV00D'
|
||||
FDW_OPTION_NAME_NOT_FOUND = 'HV00J'
|
||||
FDW_REPLY_HANDLE = 'HV00K'
|
||||
FDW_UNABLE_TO_CREATE_EXECUTION = 'HV00L'
|
||||
FDW_UNABLE_TO_CREATE_REPLY = 'HV00M'
|
||||
FDW_UNABLE_TO_ESTABLISH_CONNECTION = 'HV00N'
|
||||
FDW_NO_SCHEMAS = 'HV00P'
|
||||
FDW_SCHEMA_NOT_FOUND = 'HV00Q'
|
||||
FDW_TABLE_NOT_FOUND = 'HV00R'
|
||||
FDW_FUNCTION_SEQUENCE_ERROR = 'HV010'
|
||||
FDW_TOO_MANY_HANDLES = 'HV014'
|
||||
FDW_INCONSISTENT_DESCRIPTOR_INFORMATION = 'HV021'
|
||||
FDW_INVALID_ATTRIBUTE_VALUE = 'HV024'
|
||||
FDW_INVALID_STRING_LENGTH_OR_BUFFER_LENGTH = 'HV090'
|
||||
FDW_INVALID_DESCRIPTOR_FIELD_IDENTIFIER = 'HV091'
|
||||
|
||||
# Class P0 - PL/pgSQL Error
|
||||
PLPGSQL_ERROR = 'P0000'
|
||||
RAISE_EXCEPTION = 'P0001'
|
||||
|
|
|
@ -57,7 +57,7 @@ except ImportError:
|
|||
pass
|
||||
|
||||
from psycopg2._psycopg import adapt, adapters, encodings, connection, cursor, lobject, Xid
|
||||
from psycopg2._psycopg import string_types, binary_types, new_type, register_type
|
||||
from psycopg2._psycopg import string_types, binary_types, new_type, new_array_type, register_type
|
||||
from psycopg2._psycopg import ISQLQuote, Notify
|
||||
|
||||
from psycopg2._psycopg import QueryCanceledError, TransactionRollbackError
|
||||
|
@ -158,5 +158,6 @@ for k, v in encodings.items():
|
|||
k = k.replace('_', '').replace('-', '').upper()
|
||||
encodings[k] = v
|
||||
|
||||
del k, v
|
||||
|
||||
__all__ = filter(lambda k: not k.startswith('_'), locals().keys())
|
||||
|
|
168
lib/extras.py
168
lib/extras.py
|
@ -237,6 +237,10 @@ class RealDictRow(dict):
|
|||
|
||||
def __init__(self, cursor):
|
||||
dict.__init__(self)
|
||||
# Required for named cursors
|
||||
if cursor.description and not cursor.column_mapping:
|
||||
cursor._build_index()
|
||||
|
||||
self._column_mapping = cursor.column_mapping
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
|
@ -275,7 +279,7 @@ class NamedTupleCursor(_cursor):
|
|||
|
||||
def executemany(self, query, vars):
|
||||
self.Record = None
|
||||
return _cursor.executemany(self, vars)
|
||||
return _cursor.executemany(self, query, vars)
|
||||
|
||||
def callproc(self, procname, vars=None):
|
||||
self.Record = None
|
||||
|
@ -427,65 +431,56 @@ class MinTimeLoggingCursor(LoggingCursor):
|
|||
|
||||
# a dbtype and adapter for Python UUID type
|
||||
|
||||
try:
|
||||
class UUID_adapter(object):
|
||||
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
|
||||
|
||||
.. __: http://docs.python.org/library/uuid.html
|
||||
.. __: http://www.postgresql.org/docs/8.4/static/datatype-uuid.html
|
||||
"""
|
||||
|
||||
def __init__(self, uuid):
|
||||
self._uuid = uuid
|
||||
|
||||
def prepare(self, conn):
|
||||
pass
|
||||
|
||||
def getquoted(self):
|
||||
return "'"+str(self._uuid)+"'::uuid"
|
||||
|
||||
__str__ = getquoted
|
||||
|
||||
def register_uuid(oids=None, conn_or_curs=None):
|
||||
"""Create the UUID type and an uuid.UUID adapter."""
|
||||
|
||||
import uuid
|
||||
|
||||
class UUID_adapter(object):
|
||||
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
|
||||
if not oids:
|
||||
oid1 = 2950
|
||||
oid2 = 2951
|
||||
elif type(oids) == list:
|
||||
oid1, oid2 = oids
|
||||
else:
|
||||
oid1 = oids
|
||||
oid2 = 2951
|
||||
|
||||
.. __: http://docs.python.org/library/uuid.html
|
||||
.. __: http://www.postgresql.org/docs/8.4/static/datatype-uuid.html
|
||||
"""
|
||||
|
||||
def __init__(self, uuid):
|
||||
self._uuid = uuid
|
||||
|
||||
def prepare(self, conn):
|
||||
pass
|
||||
|
||||
def getquoted(self):
|
||||
return "'"+str(self._uuid)+"'::uuid"
|
||||
|
||||
__str__ = getquoted
|
||||
|
||||
def register_uuid(oids=None, conn_or_curs=None):
|
||||
"""Create the UUID type and an uuid.UUID adapter."""
|
||||
if not oids:
|
||||
oid1 = 2950
|
||||
oid2 = 2951
|
||||
elif type(oids) == list:
|
||||
oid1, oid2 = oids
|
||||
def parseUUIDARRAY(data, cursor):
|
||||
if data is None:
|
||||
return None
|
||||
elif data == '{}':
|
||||
return []
|
||||
else:
|
||||
oid1 = oids
|
||||
oid2 = 2951
|
||||
return [((len(x) > 0 and x != 'NULL') and uuid.UUID(x) or None)
|
||||
for x in data[1:-1].split(',')]
|
||||
|
||||
def parseUUIDARRAY(data, cursor):
|
||||
if data is None:
|
||||
return None
|
||||
elif data == '{}':
|
||||
return []
|
||||
else:
|
||||
return [((len(x) > 0 and x != 'NULL') and uuid.UUID(x) or None)
|
||||
for x in data[1:-1].split(',')]
|
||||
_ext.UUID = _ext.new_type((oid1, ), "UUID",
|
||||
lambda data, cursor: data and uuid.UUID(data) or None)
|
||||
_ext.UUIDARRAY = _ext.new_type((oid2,), "UUID[]", parseUUIDARRAY)
|
||||
|
||||
_ext.UUID = _ext.new_type((oid1, ), "UUID",
|
||||
lambda data, cursor: data and uuid.UUID(data) or None)
|
||||
_ext.UUIDARRAY = _ext.new_type((oid2,), "UUID[]", parseUUIDARRAY)
|
||||
_ext.register_type(_ext.UUID, conn_or_curs)
|
||||
_ext.register_type(_ext.UUIDARRAY, conn_or_curs)
|
||||
_ext.register_adapter(uuid.UUID, UUID_adapter)
|
||||
|
||||
_ext.register_type(_ext.UUID, conn_or_curs)
|
||||
_ext.register_type(_ext.UUIDARRAY, conn_or_curs)
|
||||
_ext.register_adapter(uuid.UUID, UUID_adapter)
|
||||
|
||||
return _ext.UUID
|
||||
|
||||
except ImportError, e:
|
||||
def register_uuid(oid=None):
|
||||
"""Create the UUID type and an uuid.UUID adapter.
|
||||
|
||||
This is a fake function that will always raise an error because the
|
||||
import of the uuid module failed.
|
||||
"""
|
||||
raise e
|
||||
return _ext.UUID
|
||||
|
||||
|
||||
# a type, dbtype and adapter for PostgreSQL inet type
|
||||
|
@ -704,7 +699,8 @@ WHERE typname = 'hstore';
|
|||
|
||||
return tuple(rv0), tuple(rv1)
|
||||
|
||||
def register_hstore(conn_or_curs, globally=False, unicode=False, oid=None):
|
||||
def register_hstore(conn_or_curs, globally=False, unicode=False,
|
||||
oid=None, array_oid=None):
|
||||
"""Register adapter and typecaster for `!dict`\-\ |hstore| conversions.
|
||||
|
||||
:param conn_or_curs: a connection or cursor: the typecaster will be
|
||||
|
@ -714,14 +710,18 @@ def register_hstore(conn_or_curs, globally=False, unicode=False, oid=None):
|
|||
will be `!unicode` instead of `!str`. The option is not available on
|
||||
Python 3
|
||||
:param oid: the OID of the |hstore| type if known. If not, it will be
|
||||
queried on *conn_or_curs*
|
||||
queried on *conn_or_curs*.
|
||||
:param array_oid: the OID of the |hstore| array type if known. If not, it
|
||||
will be queried on *conn_or_curs*.
|
||||
|
||||
The connection or cursor passed to the function will be used to query the
|
||||
database and look for the OID of the |hstore| type (which may be different
|
||||
across databases). If querying is not desirable (e.g. with
|
||||
:ref:`asynchronous connections <async-support>`) you may specify it in the
|
||||
*oid* parameter (it can be found using a query such as :sql:`SELECT
|
||||
'hstore'::regtype::oid;`).
|
||||
*oid* parameter, which can be found using a query such as :sql:`SELECT
|
||||
'hstore'::regtype::oid`. Analogously you can obtain a value for *array_oid*
|
||||
using a query such as :sql:`SELECT 'hstore[]'::regtype::oid`.
|
||||
|
||||
|
||||
Note that, when passing a dictionary from Python to the database, both
|
||||
strings and unicode keys and values are supported. Dictionaries returned
|
||||
|
@ -735,6 +735,10 @@ def register_hstore(conn_or_curs, globally=False, unicode=False, oid=None):
|
|||
added the *oid* parameter. If not specified, the typecaster is
|
||||
installed also if |hstore| is not installed in the :sql:`public`
|
||||
schema.
|
||||
|
||||
.. versionchanged:: 2.4.3
|
||||
added support for |hstore| array.
|
||||
|
||||
"""
|
||||
if oid is None:
|
||||
oid = HstoreAdapter.get_oids(conn_or_curs)
|
||||
|
@ -743,11 +747,18 @@ def register_hstore(conn_or_curs, globally=False, unicode=False, oid=None):
|
|||
"hstore type not found in the database. "
|
||||
"please install it from your 'contrib/hstore.sql' file")
|
||||
else:
|
||||
oid = oid[0] # for the moment we don't have a HSTOREARRAY
|
||||
array_oid = oid[1]
|
||||
oid = oid[0]
|
||||
|
||||
if isinstance(oid, int):
|
||||
oid = (oid,)
|
||||
|
||||
if array_oid is not None:
|
||||
if isinstance(array_oid, int):
|
||||
array_oid = (array_oid,)
|
||||
else:
|
||||
array_oid = tuple([x for x in array_oid if x])
|
||||
|
||||
# create and register the typecaster
|
||||
if sys.version_info[0] < 3 and unicode:
|
||||
cast = HstoreAdapter.parse_unicode
|
||||
|
@ -758,11 +769,18 @@ def register_hstore(conn_or_curs, globally=False, unicode=False, oid=None):
|
|||
_ext.register_type(HSTORE, not globally and conn_or_curs or None)
|
||||
_ext.register_adapter(dict, HstoreAdapter)
|
||||
|
||||
if array_oid:
|
||||
HSTOREARRAY = _ext.new_array_type(array_oid, "HSTOREARRAY", HSTORE)
|
||||
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
|
||||
|
||||
|
||||
class CompositeCaster(object):
|
||||
"""Helps conversion of a PostgreSQL composite type into a Python object.
|
||||
|
||||
The class is usually created by the `register_composite()` function.
|
||||
You may want to create and register manually instances of the class if
|
||||
querying the database at registration time is not desirable (such as when
|
||||
using an :ref:`asynchronous connections <async-support>`).
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
|
@ -772,6 +790,10 @@ class CompositeCaster(object):
|
|||
|
||||
The oid of the PostgreSQL type.
|
||||
|
||||
.. attribute:: array_oid
|
||||
|
||||
The oid of the PostgreSQL array type, if available.
|
||||
|
||||
.. attribute:: type
|
||||
|
||||
The type of the Python objects returned. If :py:func:`collections.namedtuple()`
|
||||
|
@ -787,14 +809,20 @@ class CompositeCaster(object):
|
|||
List of component type oids of the type to be casted.
|
||||
|
||||
"""
|
||||
def __init__(self, name, oid, attrs):
|
||||
def __init__(self, name, oid, attrs, array_oid=None):
|
||||
self.name = name
|
||||
self.oid = oid
|
||||
self.array_oid = array_oid
|
||||
|
||||
self.attnames = [ a[0] for a in attrs ]
|
||||
self.atttypes = [ a[1] for a in attrs ]
|
||||
self._create_type(name, self.attnames)
|
||||
self.typecaster = _ext.new_type((oid,), name, self.parse)
|
||||
if array_oid:
|
||||
self.array_typecaster = _ext.new_array_type(
|
||||
(array_oid,), "%sARRAY" % name, self.typecaster)
|
||||
else:
|
||||
self.array_typecaster = None
|
||||
|
||||
def parse(self, s, curs):
|
||||
if s is None:
|
||||
|
@ -866,15 +894,18 @@ class CompositeCaster(object):
|
|||
tname = name
|
||||
schema = 'public'
|
||||
|
||||
# column typarray not available before PG 8.3
|
||||
typarray = conn.server_version >= 80300 and "typarray" or "NULL"
|
||||
|
||||
# get the type oid and attributes
|
||||
curs.execute("""\
|
||||
SELECT t.oid, attname, atttypid
|
||||
SELECT t.oid, %s, attname, atttypid
|
||||
FROM pg_type t
|
||||
JOIN pg_namespace ns ON typnamespace = ns.oid
|
||||
JOIN pg_attribute a ON attrelid = typrelid
|
||||
WHERE typname = %s and nspname = %s
|
||||
WHERE typname = %%s and nspname = %%s
|
||||
ORDER BY attnum;
|
||||
""", (tname, schema))
|
||||
""" % typarray, (tname, schema))
|
||||
|
||||
recs = curs.fetchall()
|
||||
|
||||
|
@ -888,9 +919,11 @@ ORDER BY attnum;
|
|||
"PostgreSQL type '%s' not found" % name)
|
||||
|
||||
type_oid = recs[0][0]
|
||||
type_attrs = [ (r[1], r[2]) for r in recs ]
|
||||
array_oid = recs[0][1]
|
||||
type_attrs = [ (r[2], r[3]) for r in recs ]
|
||||
|
||||
return CompositeCaster(tname, type_oid, type_attrs)
|
||||
return CompositeCaster(tname, type_oid, type_attrs,
|
||||
array_oid=array_oid)
|
||||
|
||||
def register_composite(name, conn_or_curs, globally=False):
|
||||
"""Register a typecaster to convert a composite type into a tuple.
|
||||
|
@ -904,10 +937,17 @@ def register_composite(name, conn_or_curs, globally=False):
|
|||
*conn_or_curs*, otherwise register it globally
|
||||
:return: the registered `CompositeCaster` instance responsible for the
|
||||
conversion
|
||||
|
||||
.. versionchanged:: 2.4.3
|
||||
added support for array of composite types
|
||||
|
||||
"""
|
||||
caster = CompositeCaster._from_db(name, conn_or_curs)
|
||||
_ext.register_type(caster.typecaster, not globally and conn_or_curs or None)
|
||||
|
||||
if caster.array_typecaster is not None:
|
||||
_ext.register_type(caster.array_typecaster, not globally and conn_or_curs or None)
|
||||
|
||||
return caster
|
||||
|
||||
|
||||
|
|
19
lib/pool.py
19
lib/pool.py
|
@ -25,6 +25,7 @@ This module implements thread-safe (and not) connection pools.
|
|||
# License for more details.
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.extensions as _ext
|
||||
|
||||
try:
|
||||
import logging
|
||||
|
@ -115,13 +116,27 @@ class AbstractConnectionPool(object):
|
|||
def _putconn(self, conn, key=None, close=False):
|
||||
"""Put away a connection."""
|
||||
if self.closed: raise PoolError("connection pool is closed")
|
||||
if key is None: key = self._rused[id(conn)]
|
||||
if key is None: key = self._rused.get(id(conn))
|
||||
|
||||
if not key:
|
||||
raise PoolError("trying to put unkeyed connection")
|
||||
|
||||
if len(self._pool) < self.minconn and not close:
|
||||
self._pool.append(conn)
|
||||
# Return the connection into a consistent state before putting
|
||||
# it back into the pool
|
||||
if not conn.closed:
|
||||
status = conn.get_transaction_status()
|
||||
if status == _ext.TRANSACTION_STATUS_UNKNOWN:
|
||||
# server connection lost
|
||||
conn.close()
|
||||
elif status != _ext.TRANSACTION_STATUS_IDLE:
|
||||
# connection in error or in transaction
|
||||
conn.rollback()
|
||||
self._pool.append(conn)
|
||||
else:
|
||||
# regular idle connection
|
||||
self._pool.append(conn)
|
||||
# If the connection is closed, we just discard it.
|
||||
else:
|
||||
conn.close()
|
||||
|
||||
|
|
|
@ -111,13 +111,12 @@ Bytes_Format(PyObject *format, PyObject *args)
|
|||
Py_ssize_t arglen, argidx;
|
||||
Py_ssize_t reslen, rescnt, fmtcnt;
|
||||
int args_owned = 0;
|
||||
PyObject *result, *orig_args;
|
||||
PyObject *result;
|
||||
PyObject *dict = NULL;
|
||||
if (format == NULL || !Bytes_Check(format) || args == NULL) {
|
||||
PyErr_BadInternalCall();
|
||||
return NULL;
|
||||
}
|
||||
orig_args = args;
|
||||
fmt = Bytes_AS_STRING(format);
|
||||
fmtcnt = Bytes_GET_SIZE(format);
|
||||
reslen = rescnt = fmtcnt + 100;
|
||||
|
|
|
@ -42,7 +42,7 @@
|
|||
/* cursor method - allocate a new cursor */
|
||||
|
||||
#define psyco_conn_cursor_doc \
|
||||
"cursor(cursor_factory=extensions.cursor) -- new cursor\n\n" \
|
||||
"cursor(name=None, cursor_factory=extensions.cursor, withhold=None) -- new cursor\n\n" \
|
||||
"Return a new cursor.\n\nThe ``cursor_factory`` argument can be used to\n" \
|
||||
"create non-standard cursors by passing a class different from the\n" \
|
||||
"default. Note that the new class *should* be a sub-class of\n" \
|
||||
|
@ -53,15 +53,23 @@ static PyObject *
|
|||
psyco_conn_cursor(connectionObject *self, PyObject *args, PyObject *keywds)
|
||||
{
|
||||
const char *name = NULL;
|
||||
PyObject *obj, *factory = NULL;
|
||||
PyObject *obj, *factory = NULL, *withhold = NULL;
|
||||
|
||||
static char *kwlist[] = {"name", "cursor_factory", NULL};
|
||||
static char *kwlist[] = {"name", "cursor_factory", "withhold", NULL};
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, keywds, "|sO", kwlist,
|
||||
&name, &factory)) {
|
||||
if (!PyArg_ParseTupleAndKeywords(args, keywds, "|sOO", kwlist,
|
||||
&name, &factory, &withhold)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (withhold != NULL) {
|
||||
if (PyObject_IsTrue(withhold) && name == NULL) {
|
||||
PyErr_SetString(ProgrammingError,
|
||||
"'withhold=True can be specified only for named cursors");
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
EXC_IF_CONN_CLOSED(self);
|
||||
|
||||
if (self->status != CONN_STATUS_READY &&
|
||||
|
@ -95,6 +103,9 @@ psyco_conn_cursor(connectionObject *self, PyObject *args, PyObject *keywds)
|
|||
Py_DECREF(obj);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (withhold != NULL && PyObject_IsTrue(withhold))
|
||||
((cursorObject*)obj)->withhold = 1;
|
||||
|
||||
Dprintf("psyco_conn_cursor: new cursor at %p: refcnt = "
|
||||
FORMAT_CODE_PY_SSIZE_T,
|
||||
|
@ -525,7 +536,7 @@ psyco_conn_set_session(connectionObject *self, PyObject *args, PyObject *kwargs)
|
|||
|
||||
|
||||
#define psyco_conn_autocommit_doc \
|
||||
"set or return the autocommit status."
|
||||
"Set or return the autocommit status."
|
||||
|
||||
static PyObject *
|
||||
psyco_conn_autocommit_get(connectionObject *self)
|
||||
|
@ -565,7 +576,12 @@ psyco_conn_autocommit_set(connectionObject *self, PyObject *pyvalue)
|
|||
static PyObject *
|
||||
psyco_conn_isolation_level_get(connectionObject *self)
|
||||
{
|
||||
int rv = conn_get_isolation_level(self);
|
||||
int rv;
|
||||
|
||||
EXC_IF_CONN_CLOSED(self);
|
||||
EXC_IF_TPC_PREPARED(self, set_isolation_level);
|
||||
|
||||
rv = conn_get_isolation_level(self);
|
||||
if (-1 == rv) { return NULL; }
|
||||
return PyInt_FromLong((long)rv);
|
||||
}
|
||||
|
|
|
@ -42,6 +42,7 @@ struct cursorObject {
|
|||
|
||||
int closed:1; /* 1 if the cursor is closed */
|
||||
int notuples:1; /* 1 if the command was not a SELECT query */
|
||||
int withhold:1; /* 1 if the cursor is named and uses WITH HOLD */
|
||||
|
||||
long int rowcount; /* number of rows affected by last execute */
|
||||
long int columns; /* number of columns fetched from the db */
|
||||
|
@ -99,7 +100,7 @@ if ((self)->notuples && (self)->name == NULL) { \
|
|||
return NULL; }
|
||||
|
||||
#define EXC_IF_NO_MARK(self) \
|
||||
if ((self)->mark != (self)->conn->mark) { \
|
||||
if ((self)->mark != (self)->conn->mark && (self)->withhold == 0) { \
|
||||
PyErr_SetString(ProgrammingError, "named cursor isn't valid anymore"); \
|
||||
return NULL; }
|
||||
|
||||
|
|
|
@ -123,23 +123,29 @@ _mogrify(PyObject *var, PyObject *fmt, cursorObject *curs, PyObject **new)
|
|||
for (d = c + 1; *d && *d != ')' && *d != '%'; d++);
|
||||
|
||||
if (*d == ')') {
|
||||
key = Text_FromUTF8AndSize(c+1, (Py_ssize_t) (d-c-1));
|
||||
value = PyObject_GetItem(var, key);
|
||||
/* key has refcnt 1, value the original value + 1 */
|
||||
if (!(key = Text_FromUTF8AndSize(c+1, (Py_ssize_t)(d-c-1)))) {
|
||||
Py_XDECREF(n);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* if value is NULL we did not find the key (or this is not a
|
||||
dictionary): let python raise a KeyError */
|
||||
if (value == NULL) {
|
||||
if (!(value = PyObject_GetItem(var, key))) {
|
||||
Py_DECREF(key); /* destroy key */
|
||||
Py_XDECREF(n); /* destroy n */
|
||||
return -1;
|
||||
}
|
||||
/* key has refcnt 1, value the original value + 1 */
|
||||
|
||||
Dprintf("_mogrify: value refcnt: "
|
||||
FORMAT_CODE_PY_SSIZE_T " (+1)", Py_REFCNT(value));
|
||||
|
||||
if (n == NULL) {
|
||||
n = PyDict_New();
|
||||
if (!(n = PyDict_New())) {
|
||||
Py_DECREF(key);
|
||||
Py_DECREF(value);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
if (0 == PyDict_Contains(n, key)) {
|
||||
|
@ -156,24 +162,22 @@ _mogrify(PyObject *var, PyObject *fmt, cursorObject *curs, PyObject **new)
|
|||
}
|
||||
else {
|
||||
t = microprotocol_getquoted(value, curs->conn);
|
||||
|
||||
if (t != NULL) {
|
||||
PyDict_SetItem(n, key, t);
|
||||
/* both key and t refcnt +1, key is at 2 now */
|
||||
}
|
||||
else {
|
||||
/* no adapter found, raise a BIG exception */
|
||||
Py_XDECREF(value);
|
||||
Py_DECREF(key);
|
||||
Py_DECREF(value);
|
||||
Py_DECREF(n);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
Py_XDECREF(t); /* t dies here */
|
||||
/* after the DECREF value has the original refcnt plus 1
|
||||
if it was added to the dictionary directly; good */
|
||||
Py_XDECREF(value);
|
||||
}
|
||||
Py_DECREF(value);
|
||||
Py_DECREF(key); /* key has the original refcnt now */
|
||||
Dprintf("_mogrify: after value refcnt: "
|
||||
FORMAT_CODE_PY_SSIZE_T, Py_REFCNT(value));
|
||||
|
@ -391,8 +395,10 @@ _psyco_curs_execute(cursorObject *self,
|
|||
|
||||
if (self->name != NULL) {
|
||||
self->query = Bytes_FromFormat(
|
||||
"DECLARE \"%s\" CURSOR WITHOUT HOLD FOR %s",
|
||||
self->name, Bytes_AS_STRING(fquery));
|
||||
"DECLARE \"%s\" CURSOR %s HOLD FOR %s",
|
||||
self->name,
|
||||
self->withhold ? "WITH" : "WITHOUT",
|
||||
Bytes_AS_STRING(fquery));
|
||||
Py_DECREF(fquery);
|
||||
}
|
||||
else {
|
||||
|
@ -402,8 +408,10 @@ _psyco_curs_execute(cursorObject *self,
|
|||
else {
|
||||
if (self->name != NULL) {
|
||||
self->query = Bytes_FromFormat(
|
||||
"DECLARE \"%s\" CURSOR WITHOUT HOLD FOR %s",
|
||||
self->name, Bytes_AS_STRING(operation));
|
||||
"DECLARE \"%s\" CURSOR %s HOLD FOR %s",
|
||||
self->name,
|
||||
self->withhold ? "WITH" : "WITHOUT",
|
||||
Bytes_AS_STRING(operation));
|
||||
}
|
||||
else {
|
||||
/* Transfer reference ownership of the str in operation to
|
||||
|
@ -461,11 +469,7 @@ psyco_curs_execute(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
"can't use a named cursor outside of transactions", NULL, NULL);
|
||||
return NULL;
|
||||
}
|
||||
if (self->conn->mark != self->mark) {
|
||||
psyco_set_error(ProgrammingError, self,
|
||||
"named cursor isn't valid anymore", NULL, NULL);
|
||||
return NULL;
|
||||
}
|
||||
EXC_IF_NO_MARK(self);
|
||||
}
|
||||
|
||||
EXC_IF_CURS_CLOSED(self);
|
||||
|
@ -1161,32 +1165,53 @@ psyco_curs_scroll(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
|
||||
#ifdef PSYCOPG_EXTENSIONS
|
||||
|
||||
static int _psyco_curs_copy_columns(PyObject *columns, char *columnlist)
|
||||
/* Return a newly allocated buffer containing the list of columns to be
|
||||
* copied. On error return NULL and set an exception.
|
||||
*/
|
||||
static char *_psyco_curs_copy_columns(PyObject *columns)
|
||||
{
|
||||
PyObject *col, *coliter;
|
||||
Py_ssize_t collen;
|
||||
char* colname;
|
||||
char *colname;
|
||||
char *columnlist = NULL;
|
||||
Py_ssize_t bufsize = 512;
|
||||
Py_ssize_t offset = 1;
|
||||
|
||||
columnlist[0] = '\0';
|
||||
if (columns == NULL || columns == Py_None) return 0;
|
||||
if (columns == NULL || columns == Py_None) {
|
||||
if (NULL == (columnlist = PyMem_Malloc(2))) {
|
||||
PyErr_NoMemory();
|
||||
goto error;
|
||||
}
|
||||
columnlist[0] = '\0';
|
||||
goto exit;
|
||||
}
|
||||
|
||||
coliter = PyObject_GetIter(columns);
|
||||
if (coliter == NULL) return 0;
|
||||
if (NULL == (coliter = PyObject_GetIter(columns))) {
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (NULL == (columnlist = PyMem_Malloc(bufsize))) {
|
||||
PyErr_NoMemory();
|
||||
goto error;
|
||||
}
|
||||
columnlist[0] = '(';
|
||||
|
||||
while ((col = PyIter_Next(coliter)) != NULL) {
|
||||
if (!(col = psycopg_ensure_bytes(col))) {
|
||||
Py_DECREF(coliter);
|
||||
return -1;
|
||||
goto error;
|
||||
}
|
||||
Bytes_AsStringAndSize(col, &colname, &collen);
|
||||
if (offset + collen > DEFAULT_COPYBUFF - 2) {
|
||||
Py_DECREF(col);
|
||||
Py_DECREF(coliter);
|
||||
PyErr_SetString(PyExc_ValueError, "column list too long");
|
||||
return -1;
|
||||
while (offset + collen > bufsize - 2) {
|
||||
char *tmp;
|
||||
bufsize *= 2;
|
||||
if (NULL == (tmp = PyMem_Realloc(columnlist, bufsize))) {
|
||||
Py_DECREF(col);
|
||||
Py_DECREF(coliter);
|
||||
PyErr_NoMemory();
|
||||
goto error;
|
||||
}
|
||||
columnlist = tmp;
|
||||
}
|
||||
strncpy(&columnlist[offset], colname, collen);
|
||||
offset += collen;
|
||||
|
@ -1197,23 +1222,30 @@ static int _psyco_curs_copy_columns(PyObject *columns, char *columnlist)
|
|||
|
||||
/* Error raised by the coliter generator */
|
||||
if (PyErr_Occurred()) {
|
||||
return -1;
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (offset == 2) {
|
||||
return 0;
|
||||
goto exit;
|
||||
}
|
||||
else {
|
||||
columnlist[offset - 1] = ')';
|
||||
columnlist[offset] = '\0';
|
||||
return 1;
|
||||
goto exit;
|
||||
}
|
||||
|
||||
error:
|
||||
PyMem_Free(columnlist);
|
||||
columnlist = NULL;
|
||||
|
||||
exit:
|
||||
return columnlist;
|
||||
}
|
||||
|
||||
/* extension: copy_from - implements COPY FROM */
|
||||
|
||||
#define psyco_curs_copy_from_doc \
|
||||
"copy_from(file, table, sep='\\t', null='\\N', size=8192, columns=None) -- Copy table from file."
|
||||
"copy_from(file, table, sep='\\t', null='\\\\N', size=8192, columns=None) -- Copy table from file."
|
||||
|
||||
static int
|
||||
_psyco_curs_has_read_check(PyObject* o, void* var)
|
||||
|
@ -1239,19 +1271,23 @@ _psyco_curs_has_read_check(PyObject* o, void* var)
|
|||
static PyObject *
|
||||
psyco_curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
char *query = NULL;
|
||||
char query_buffer[DEFAULT_COPYBUFF];
|
||||
static char *kwlist[] = {
|
||||
"file", "table", "sep", "null", "size", "columns", NULL};
|
||||
|
||||
const char *sep = "\t";
|
||||
const char *null = "\\N";
|
||||
const char *command =
|
||||
"COPY %s%s FROM stdin WITH DELIMITER AS %s NULL AS %s";
|
||||
|
||||
Py_ssize_t query_size;
|
||||
const char *table_name;
|
||||
const char *sep = "\t", *null = NULL;
|
||||
Py_ssize_t bufsize = DEFAULT_COPYBUFF;
|
||||
PyObject *file, *columns = NULL, *res = NULL;
|
||||
char columnlist[DEFAULT_COPYBUFF];
|
||||
char *query = NULL;
|
||||
char *columnlist = NULL;
|
||||
char *quoted_delimiter = NULL;
|
||||
char *quoted_null = NULL;
|
||||
|
||||
static char *kwlist[] = {
|
||||
"file", "table", "sep", "null", "size", "columns", NULL};
|
||||
const char *table_name;
|
||||
Py_ssize_t bufsize = DEFAULT_COPYBUFF;
|
||||
PyObject *file, *columns = NULL, *res = NULL;
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwargs,
|
||||
"O&s|ss" CONV_CODE_PY_SSIZE_T "O", kwlist,
|
||||
|
@ -1261,57 +1297,36 @@ psyco_curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
if (_psyco_curs_copy_columns(columns, columnlist) == -1)
|
||||
return NULL;
|
||||
|
||||
EXC_IF_CURS_CLOSED(self);
|
||||
EXC_IF_CURS_ASYNC(self, copy_from);
|
||||
EXC_IF_GREEN(copy_from);
|
||||
EXC_IF_TPC_PREPARED(self->conn, copy_from);
|
||||
|
||||
if (NULL == (columnlist = _psyco_curs_copy_columns(columns)))
|
||||
goto exit;
|
||||
|
||||
if (!(quoted_delimiter = psycopg_escape_string(
|
||||
(PyObject*)self->conn, sep, 0, NULL, NULL))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
|
||||
query = query_buffer;
|
||||
if (null) {
|
||||
if (!(quoted_null = psycopg_escape_string(
|
||||
(PyObject*)self->conn, null, 0, NULL, NULL))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
query_size = PyOS_snprintf(query, DEFAULT_COPYBUFF,
|
||||
"COPY %s%s FROM stdin WITH DELIMITER AS %s NULL AS %s",
|
||||
table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
if (query_size >= DEFAULT_COPYBUFF) {
|
||||
/* Got truncated, allocate dynamically */
|
||||
if (!(query = PyMem_New(char, query_size + 1))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
PyOS_snprintf(query, query_size + 1,
|
||||
"COPY %s%s FROM stdin WITH DELIMITER AS %s NULL AS %s",
|
||||
table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
}
|
||||
if (!(quoted_null = psycopg_escape_string(
|
||||
(PyObject*)self->conn, null, 0, NULL, NULL))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
else {
|
||||
query_size = PyOS_snprintf(query, DEFAULT_COPYBUFF,
|
||||
"COPY %s%s FROM stdin WITH DELIMITER AS %s",
|
||||
table_name, columnlist, quoted_delimiter);
|
||||
if (query_size >= DEFAULT_COPYBUFF) {
|
||||
/* Got truncated, allocate dynamically */
|
||||
if (!(query = PyMem_New(char, query_size + 1))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
PyOS_snprintf(query, query_size + 1,
|
||||
"COPY %s%s FROM stdin WITH DELIMITER AS %s",
|
||||
table_name, columnlist, quoted_delimiter);
|
||||
}
|
||||
|
||||
query_size = strlen(command) + strlen(table_name) + strlen(columnlist)
|
||||
+ strlen(quoted_delimiter) + strlen(quoted_null) + 1;
|
||||
if (!(query = PyMem_New(char, query_size))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
|
||||
PyOS_snprintf(query, query_size, command,
|
||||
table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
|
||||
Dprintf("psyco_curs_copy_from: query = %s", query);
|
||||
|
||||
self->copysize = bufsize;
|
||||
|
@ -1323,13 +1338,13 @@ psyco_curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
Py_INCREF(Py_None);
|
||||
}
|
||||
|
||||
self->copyfile = NULL;
|
||||
Py_DECREF(file);
|
||||
Py_CLEAR(self->copyfile);
|
||||
|
||||
exit:
|
||||
PyMem_Free(columnlist);
|
||||
PyMem_Free(quoted_delimiter);
|
||||
PyMem_Free(quoted_null);
|
||||
if (query != query_buffer) { PyMem_Free(query); }
|
||||
PyMem_Free(query);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
@ -1337,7 +1352,7 @@ exit:
|
|||
/* extension: copy_to - implements COPY TO */
|
||||
|
||||
#define psyco_curs_copy_to_doc \
|
||||
"copy_to(file, table, sep='\\t', null='\\N', columns=None) -- Copy table to file."
|
||||
"copy_to(file, table, sep='\\t', null='\\\\N', columns=None) -- Copy table to file."
|
||||
|
||||
static int
|
||||
_psyco_curs_has_write_check(PyObject* o, void* var)
|
||||
|
@ -1356,17 +1371,21 @@ _psyco_curs_has_write_check(PyObject* o, void* var)
|
|||
static PyObject *
|
||||
psyco_curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
static char *kwlist[] = {"file", "table", "sep", "null", "columns", NULL};
|
||||
|
||||
const char *sep = "\t";
|
||||
const char *null = "\\N";
|
||||
const char *command =
|
||||
"COPY %s%s TO stdout WITH DELIMITER AS %s NULL AS %s";
|
||||
|
||||
Py_ssize_t query_size;
|
||||
char *query = NULL;
|
||||
char query_buffer[DEFAULT_COPYBUFF];
|
||||
size_t query_size;
|
||||
char columnlist[DEFAULT_COPYBUFF];
|
||||
const char *table_name;
|
||||
const char *sep = "\t", *null = NULL;
|
||||
PyObject *file, *columns = NULL, *res = NULL;
|
||||
char *columnlist = NULL;
|
||||
char *quoted_delimiter = NULL;
|
||||
char *quoted_null = NULL;
|
||||
|
||||
static char *kwlist[] = {"file", "table", "sep", "null", "columns", NULL};
|
||||
const char *table_name;
|
||||
PyObject *file, *columns = NULL, *res = NULL;
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&s|ssO", kwlist,
|
||||
_psyco_curs_has_write_check, &file,
|
||||
|
@ -1374,57 +1393,36 @@ psyco_curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
if (_psyco_curs_copy_columns(columns, columnlist) == -1)
|
||||
return NULL;
|
||||
|
||||
EXC_IF_CURS_CLOSED(self);
|
||||
EXC_IF_CURS_ASYNC(self, copy_to);
|
||||
EXC_IF_GREEN(copy_to);
|
||||
EXC_IF_TPC_PREPARED(self->conn, copy_to);
|
||||
|
||||
if (NULL == (columnlist = _psyco_curs_copy_columns(columns)))
|
||||
goto exit;
|
||||
|
||||
if (!(quoted_delimiter = psycopg_escape_string(
|
||||
(PyObject*)self->conn, sep, 0, NULL, NULL))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
|
||||
query = query_buffer;
|
||||
if (null) {
|
||||
if (!(quoted_null = psycopg_escape_string(
|
||||
(PyObject*)self->conn, null, 0, NULL, NULL))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
query_size = PyOS_snprintf(query, DEFAULT_COPYBUFF,
|
||||
"COPY %s%s TO stdout WITH DELIMITER AS %s"
|
||||
" NULL AS %s", table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
if (query_size >= DEFAULT_COPYBUFF) {
|
||||
/* Got truncated, allocate dynamically */
|
||||
if (!(query = PyMem_New(char, query_size + 1))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
PyOS_snprintf(query, query_size + 1,
|
||||
"COPY %s%s TO stdout WITH DELIMITER AS %s"
|
||||
" NULL AS %s", table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
}
|
||||
if (!(quoted_null = psycopg_escape_string(
|
||||
(PyObject*)self->conn, null, 0, NULL, NULL))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
else {
|
||||
query_size = PyOS_snprintf(query, DEFAULT_COPYBUFF,
|
||||
"COPY %s%s TO stdout WITH DELIMITER AS %s",
|
||||
table_name, columnlist, quoted_delimiter);
|
||||
if (query_size >= DEFAULT_COPYBUFF) {
|
||||
/* Got truncated, allocate dynamically */
|
||||
if (!(query = PyMem_New(char, query_size + 1))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
PyOS_snprintf(query, query_size + 1,
|
||||
"COPY %s%s TO stdout WITH DELIMITER AS %s",
|
||||
table_name, columnlist, quoted_delimiter);
|
||||
}
|
||||
|
||||
query_size = strlen(command) + strlen(table_name) + strlen(columnlist)
|
||||
+ strlen(quoted_delimiter) + strlen(quoted_null) + 1;
|
||||
if (!(query = PyMem_New(char, query_size))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
|
||||
PyOS_snprintf(query, query_size, command,
|
||||
table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
|
||||
Dprintf("psyco_curs_copy_to: query = %s", query);
|
||||
|
||||
self->copysize = 0;
|
||||
|
@ -1436,13 +1434,13 @@ psyco_curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
Py_INCREF(Py_None);
|
||||
}
|
||||
|
||||
Py_DECREF(file);
|
||||
self->copyfile = NULL;
|
||||
Py_CLEAR(self->copyfile);
|
||||
|
||||
exit:
|
||||
PyMem_Free(columnlist);
|
||||
PyMem_Free(quoted_delimiter);
|
||||
PyMem_Free(quoted_null);
|
||||
if (query != query_buffer) { PyMem_Free(query); }
|
||||
PyMem_Free(query);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
@ -1455,7 +1453,7 @@ exit:
|
|||
|
||||
#define psyco_curs_copy_expert_doc \
|
||||
"copy_expert(sql, file, size=8192) -- Submit a user-composed COPY statement.\n" \
|
||||
"`file` must be an open, readable file for COPY FROM or an open, writeable\n" \
|
||||
"`file` must be an open, readable file for COPY FROM or an open, writable\n" \
|
||||
"file for COPY TO. The optional `size` argument, when specified for a COPY\n" \
|
||||
"FROM statement, will be passed to file's read method to control the read\n" \
|
||||
"buffer size."
|
||||
|
@ -1495,7 +1493,7 @@ psyco_curs_copy_expert(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
)
|
||||
{
|
||||
PyErr_SetString(PyExc_TypeError, "file must be a readable file-like"
|
||||
" object for COPY FROM; a writeable file-like object for COPY TO."
|
||||
" object for COPY FROM; a writable file-like object for COPY TO."
|
||||
);
|
||||
goto exit;
|
||||
}
|
||||
|
@ -1510,8 +1508,7 @@ psyco_curs_copy_expert(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
Py_INCREF(res);
|
||||
}
|
||||
|
||||
self->copyfile = NULL;
|
||||
Py_DECREF(file);
|
||||
Py_CLEAR(self->copyfile);
|
||||
|
||||
exit:
|
||||
Py_XDECREF(sql);
|
||||
|
@ -1519,7 +1516,7 @@ exit:
|
|||
return res;
|
||||
}
|
||||
|
||||
/* extension: closed - return true if cursor is closed*/
|
||||
/* extension: closed - return true if cursor is closed */
|
||||
|
||||
#define psyco_curs_closed_doc \
|
||||
"True if cursor is closed, False if cursor is open"
|
||||
|
@ -1535,6 +1532,39 @@ psyco_curs_get_closed(cursorObject *self, void *closure)
|
|||
return closed;
|
||||
}
|
||||
|
||||
/* extension: withhold - get or set "WITH HOLD" for named cursors */
|
||||
|
||||
#define psyco_curs_withhold_doc \
|
||||
"Set or return cursor use of WITH HOLD"
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_withhold_get(cursorObject *self)
|
||||
{
|
||||
PyObject *ret;
|
||||
ret = self->withhold ? Py_True : Py_False;
|
||||
Py_INCREF(ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int
|
||||
psyco_curs_withhold_set(cursorObject *self, PyObject *pyvalue)
|
||||
{
|
||||
int value;
|
||||
|
||||
if (self->name == NULL) {
|
||||
PyErr_SetString(ProgrammingError,
|
||||
"trying to set .withhold on unnamed cursor");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if ((value = PyObject_IsTrue(pyvalue)) == -1)
|
||||
return -1;
|
||||
|
||||
self->withhold = value;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
|
@ -1657,6 +1687,10 @@ static struct PyGetSetDef cursorObject_getsets[] = {
|
|||
#ifdef PSYCOPG_EXTENSIONS
|
||||
{ "closed", (getter)psyco_curs_get_closed, NULL,
|
||||
psyco_curs_closed_doc, NULL },
|
||||
{ "withhold",
|
||||
(getter)psyco_curs_withhold_get,
|
||||
(setter)psyco_curs_withhold_set,
|
||||
psyco_curs_withhold_doc, NULL },
|
||||
#endif
|
||||
{NULL}
|
||||
};
|
||||
|
@ -1686,6 +1720,7 @@ cursor_setup(cursorObject *self, connectionObject *conn, const char *name)
|
|||
self->conn = conn;
|
||||
|
||||
self->closed = 0;
|
||||
self->withhold = 0;
|
||||
self->mark = conn->mark;
|
||||
self->pgres = NULL;
|
||||
self->notuples = 1;
|
||||
|
|
|
@ -344,11 +344,13 @@ pq_execute_command_locked(connectionObject *conn, const char *query,
|
|||
}
|
||||
if (*pgres == NULL) {
|
||||
Dprintf("pq_execute_command_locked: PQexec returned NULL");
|
||||
PyEval_RestoreThread(*tstate);
|
||||
if (!PyErr_Occurred()) {
|
||||
const char *msg;
|
||||
msg = PQerrorMessage(conn->pgconn);
|
||||
if (msg && *msg) { *error = strdup(msg); }
|
||||
}
|
||||
*tstate = PyEval_SaveThread();
|
||||
goto cleanup;
|
||||
}
|
||||
|
||||
|
@ -635,11 +637,13 @@ pq_get_guc_locked(
|
|||
|
||||
if (*pgres == NULL) {
|
||||
Dprintf("pq_get_guc_locked: PQexec returned NULL");
|
||||
PyEval_RestoreThread(*tstate);
|
||||
if (!PyErr_Occurred()) {
|
||||
const char *msg;
|
||||
msg = PQerrorMessage(conn->pgconn);
|
||||
if (msg && *msg) { *error = strdup(msg); }
|
||||
}
|
||||
*tstate = PyEval_SaveThread();
|
||||
goto cleanup;
|
||||
}
|
||||
if (PQresultStatus(*pgres) != PGRES_TUPLES_OK) {
|
||||
|
|
|
@ -75,177 +75,43 @@ HIDDEN PyObject *psyco_DescriptionType = NULL;
|
|||
|
||||
/** connect module-level function **/
|
||||
#define psyco_connect_doc \
|
||||
"connect(dsn, ...) -- Create a new database connection.\n\n" \
|
||||
"This function supports two different but equivalent sets of arguments.\n" \
|
||||
"A single data source name or ``dsn`` string can be used to specify the\n" \
|
||||
"connection parameters, as follows::\n\n" \
|
||||
" psycopg2.connect(\"dbname=xxx user=xxx ...\")\n\n" \
|
||||
"If ``dsn`` is not provided it is possible to pass the parameters as\n" \
|
||||
"keyword arguments; e.g.::\n\n" \
|
||||
" psycopg2.connect(database='xxx', user='xxx', ...)\n\n" \
|
||||
"The full list of available parameters is:\n\n" \
|
||||
"- ``dbname`` -- database name (only in 'dsn')\n" \
|
||||
"- ``database`` -- database name (only as keyword argument)\n" \
|
||||
"- ``host`` -- host address (defaults to UNIX socket if not provided)\n" \
|
||||
"- ``port`` -- port number (defaults to 5432 if not provided)\n" \
|
||||
"- ``user`` -- user name used to authenticate\n" \
|
||||
"- ``password`` -- password used to authenticate\n" \
|
||||
"- ``sslmode`` -- SSL mode (see PostgreSQL documentation)\n\n" \
|
||||
"- ``async`` -- if the connection should provide asynchronous API\n\n" \
|
||||
"If the ``connection_factory`` keyword argument is not provided this\n" \
|
||||
"function always return an instance of the `connection` class.\n" \
|
||||
"Else the given sub-class of `extensions.connection` will be used to\n" \
|
||||
"instantiate the connection object.\n\n" \
|
||||
":return: New database connection\n" \
|
||||
":rtype: `extensions.connection`"
|
||||
|
||||
static size_t
|
||||
_psyco_connect_fill_dsn(char *dsn, const char *kw, const char *v, size_t i)
|
||||
{
|
||||
strcpy(&dsn[i], kw); i += strlen(kw);
|
||||
strcpy(&dsn[i], v); i += strlen(v);
|
||||
return i;
|
||||
}
|
||||
"_connect(dsn, [connection_factory], [async]) -- New database connection.\n\n"
|
||||
|
||||
static PyObject *
|
||||
psyco_connect(PyObject *self, PyObject *args, PyObject *keywds)
|
||||
{
|
||||
PyObject *conn = NULL, *factory = NULL;
|
||||
PyObject *pyport = NULL;
|
||||
|
||||
size_t idsn=-1;
|
||||
int iport=-1;
|
||||
const char *dsn_static = NULL;
|
||||
char *dsn_dynamic=NULL;
|
||||
const char *database=NULL, *user=NULL, *password=NULL;
|
||||
const char *host=NULL, *sslmode=NULL;
|
||||
char port[16];
|
||||
PyObject *conn = NULL;
|
||||
PyObject *factory = NULL;
|
||||
const char *dsn = NULL;
|
||||
int async = 0;
|
||||
|
||||
static char *kwlist[] = {"dsn", "database", "host", "port",
|
||||
"user", "password", "sslmode",
|
||||
"connection_factory", "async", NULL};
|
||||
static char *kwlist[] = {"dsn", "connection_factory", "async", NULL};
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, keywds, "|sssOsssOi", kwlist,
|
||||
&dsn_static, &database, &host, &pyport,
|
||||
&user, &password, &sslmode,
|
||||
&factory, &async)) {
|
||||
if (!PyArg_ParseTupleAndKeywords(args, keywds, "s|Oi", kwlist,
|
||||
&dsn, &factory, &async)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
if (pyport && PyString_Check(pyport)) {
|
||||
PyObject *pyint = PyInt_FromString(PyString_AsString(pyport), NULL, 10);
|
||||
if (!pyint) goto fail;
|
||||
/* Must use PyInt_AsLong rather than PyInt_AS_LONG, because
|
||||
* PyInt_FromString can return a PyLongObject: */
|
||||
iport = PyInt_AsLong(pyint);
|
||||
Py_DECREF(pyint);
|
||||
if (iport == -1 && PyErr_Occurred())
|
||||
goto fail;
|
||||
}
|
||||
else if (pyport && PyInt_Check(pyport)) {
|
||||
iport = PyInt_AsLong(pyport);
|
||||
if (iport == -1 && PyErr_Occurred())
|
||||
goto fail;
|
||||
}
|
||||
#else
|
||||
if (pyport && PyUnicode_Check(pyport)) {
|
||||
PyObject *pyint = PyObject_CallFunction((PyObject*)&PyLong_Type,
|
||||
"Oi", pyport, 10);
|
||||
if (!pyint) goto fail;
|
||||
iport = PyLong_AsLong(pyint);
|
||||
Py_DECREF(pyint);
|
||||
if (iport == -1 && PyErr_Occurred())
|
||||
goto fail;
|
||||
}
|
||||
else if (pyport && PyLong_Check(pyport)) {
|
||||
iport = PyLong_AsLong(pyport);
|
||||
if (iport == -1 && PyErr_Occurred())
|
||||
goto fail;
|
||||
}
|
||||
#endif
|
||||
else if (pyport != NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "port must be a string or int");
|
||||
goto fail;
|
||||
Dprintf("psyco_connect: dsn = '%s', async = %d", dsn, async);
|
||||
|
||||
/* allocate connection, fill with errors and return it */
|
||||
if (factory == NULL || factory == Py_None) {
|
||||
factory = (PyObject *)&connectionType;
|
||||
}
|
||||
|
||||
if (iport > 0)
|
||||
PyOS_snprintf(port, 16, "%d", iport);
|
||||
|
||||
if (dsn_static == NULL) {
|
||||
size_t l = 46; /* len(" dbname= user= password= host= port= sslmode=\0") */
|
||||
|
||||
if (database) l += strlen(database);
|
||||
if (host) l += strlen(host);
|
||||
if (iport > 0) l += strlen(port);
|
||||
if (user) l += strlen(user);
|
||||
if (password) l += strlen(password);
|
||||
if (sslmode) l += strlen(sslmode);
|
||||
|
||||
dsn_dynamic = malloc(l*sizeof(char));
|
||||
if (dsn_dynamic == NULL) {
|
||||
PyErr_SetString(InterfaceError, "dynamic dsn allocation failed");
|
||||
goto fail;
|
||||
}
|
||||
|
||||
idsn = 0;
|
||||
if (database)
|
||||
idsn = _psyco_connect_fill_dsn(dsn_dynamic, " dbname=", database, idsn);
|
||||
if (host)
|
||||
idsn = _psyco_connect_fill_dsn(dsn_dynamic, " host=", host, idsn);
|
||||
if (iport > 0)
|
||||
idsn = _psyco_connect_fill_dsn(dsn_dynamic, " port=", port, idsn);
|
||||
if (user)
|
||||
idsn = _psyco_connect_fill_dsn(dsn_dynamic, " user=", user, idsn);
|
||||
if (password)
|
||||
idsn = _psyco_connect_fill_dsn(dsn_dynamic, " password=", password, idsn);
|
||||
if (sslmode)
|
||||
idsn = _psyco_connect_fill_dsn(dsn_dynamic, " sslmode=", sslmode, idsn);
|
||||
|
||||
if (idsn > 0) {
|
||||
dsn_dynamic[idsn] = '\0';
|
||||
memmove(dsn_dynamic, &dsn_dynamic[1], idsn);
|
||||
}
|
||||
else {
|
||||
PyErr_SetString(InterfaceError, "missing dsn and no parameters");
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
const char *dsn = (dsn_static != NULL ? dsn_static : dsn_dynamic);
|
||||
Dprintf("psyco_connect: dsn = '%s', async = %d", dsn, async);
|
||||
|
||||
/* allocate connection, fill with errors and return it */
|
||||
if (factory == NULL) factory = (PyObject *)&connectionType;
|
||||
/* Here we are breaking the connection.__init__ interface defined
|
||||
* by psycopg2. So, if not requiring an async conn, avoid passing
|
||||
* the async parameter. */
|
||||
/* TODO: would it be possible to avoid an additional parameter
|
||||
* to the conn constructor? A subclass? (but it would require mixins
|
||||
* to further subclass) Another dsn parameter (but is not really
|
||||
* a connection parameter that can be configured) */
|
||||
if (!async) {
|
||||
/* Here we are breaking the connection.__init__ interface defined
|
||||
* by psycopg2. So, if not requiring an async conn, avoid passing
|
||||
* the async parameter. */
|
||||
/* TODO: would it be possible to avoid an additional parameter
|
||||
* to the conn constructor? A subclass? (but it would require mixins
|
||||
* to further subclass) Another dsn parameter (but is not really
|
||||
* a connection parameter that can be configured) */
|
||||
if (!async) {
|
||||
conn = PyObject_CallFunction(factory, "s", dsn);
|
||||
} else {
|
||||
} else {
|
||||
conn = PyObject_CallFunction(factory, "si", dsn, async);
|
||||
}
|
||||
}
|
||||
|
||||
goto cleanup;
|
||||
fail:
|
||||
assert (PyErr_Occurred());
|
||||
if (conn != NULL) {
|
||||
Py_DECREF(conn);
|
||||
conn = NULL;
|
||||
}
|
||||
/* Fall through to cleanup: */
|
||||
cleanup:
|
||||
if (dsn_dynamic != NULL) {
|
||||
free(dsn_dynamic);
|
||||
}
|
||||
|
||||
return conn;
|
||||
}
|
||||
|
||||
|
@ -257,7 +123,7 @@ psyco_connect(PyObject *self, PyObject *args, PyObject *keywds)
|
|||
" * `conn_or_curs`: A connection, cursor or None"
|
||||
|
||||
#define typecast_from_python_doc \
|
||||
"new_type(oids, name, adapter) -> new type object\n\n" \
|
||||
"new_type(oids, name, castobj) -> new type object\n\n" \
|
||||
"Create a new binding object. The object can be used with the\n" \
|
||||
"`register_type()` function to bind PostgreSQL objects to python objects.\n\n" \
|
||||
":Parameters:\n" \
|
||||
|
@ -268,6 +134,15 @@ psyco_connect(PyObject *self, PyObject *args, PyObject *keywds)
|
|||
" the string representation returned by PostgreSQL (`!None` if ``NULL``)\n" \
|
||||
" and ``cur`` is the cursor from which data are read."
|
||||
|
||||
#define typecast_array_from_python_doc \
|
||||
"new_array_type(oids, name, baseobj) -> new type object\n\n" \
|
||||
"Create a new binding object to parse an array.\n\n" \
|
||||
"The object can be used with `register_type()`.\n\n" \
|
||||
":Parameters:\n" \
|
||||
" * `oids`: Tuple of ``oid`` of the PostgreSQL types to convert.\n" \
|
||||
" * `name`: Name for the new type\n" \
|
||||
" * `baseobj`: Adapter to perform type conversion of a single array item."
|
||||
|
||||
static void
|
||||
_psyco_register_type_set(PyObject **dict, PyObject *type)
|
||||
{
|
||||
|
@ -313,7 +188,6 @@ static void
|
|||
psyco_adapters_init(PyObject *mod)
|
||||
{
|
||||
PyObject *call;
|
||||
PyTypeObject *type;
|
||||
|
||||
microprotocols_add(&PyFloat_Type, NULL, (PyObject*)&pfloatType);
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
|
@ -344,9 +218,6 @@ psyco_adapters_init(PyObject *mod)
|
|||
|
||||
microprotocols_add(&PyList_Type, NULL, (PyObject*)&listType);
|
||||
|
||||
if ((type = (PyTypeObject*)psyco_GetDecimalType()) != NULL)
|
||||
microprotocols_add(type, NULL, (PyObject*)&pdecimalType);
|
||||
|
||||
/* the module has already been initialized, so we can obtain the callable
|
||||
objects directly from its dictionary :) */
|
||||
call = PyMapping_GetItemString(mod, "DateFromPy");
|
||||
|
@ -749,7 +620,7 @@ exit:
|
|||
/** method table and module initialization **/
|
||||
|
||||
static PyMethodDef psycopgMethods[] = {
|
||||
{"connect", (PyCFunction)psyco_connect,
|
||||
{"_connect", (PyCFunction)psyco_connect,
|
||||
METH_VARARGS|METH_KEYWORDS, psyco_connect_doc},
|
||||
{"adapt", (PyCFunction)psyco_microprotocols_adapt,
|
||||
METH_VARARGS, psyco_microprotocols_adapt_doc},
|
||||
|
@ -758,6 +629,8 @@ static PyMethodDef psycopgMethods[] = {
|
|||
METH_VARARGS, psyco_register_type_doc},
|
||||
{"new_type", (PyCFunction)typecast_from_python,
|
||||
METH_VARARGS|METH_KEYWORDS, typecast_from_python_doc},
|
||||
{"new_array_type", (PyCFunction)typecast_array_from_python,
|
||||
METH_VARARGS|METH_KEYWORDS, typecast_array_from_python_doc},
|
||||
|
||||
{"AsIs", (PyCFunction)psyco_AsIs,
|
||||
METH_VARARGS, psyco_AsIs_doc},
|
||||
|
|
|
@ -603,6 +603,29 @@ typecast_from_python(PyObject *self, PyObject *args, PyObject *keywds)
|
|||
return typecast_new(name, v, cast, base);
|
||||
}
|
||||
|
||||
PyObject *
|
||||
typecast_array_from_python(PyObject *self, PyObject *args, PyObject *keywds)
|
||||
{
|
||||
PyObject *values, *name = NULL, *base = NULL;
|
||||
typecastObject *obj = NULL;
|
||||
|
||||
static char *kwlist[] = {"values", "name", "baseobj", NULL};
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, keywds, "O!O!O!", kwlist,
|
||||
&PyTuple_Type, &values,
|
||||
&Text_Type, &name,
|
||||
&typecastType, &base)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if ((obj = (typecastObject *)typecast_new(name, values, NULL, base))) {
|
||||
obj->ccast = typecast_GENERIC_ARRAY_cast;
|
||||
obj->pcast = NULL;
|
||||
}
|
||||
|
||||
return (PyObject *)obj;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
typecast_from_c(typecastObject_initlist *type, PyObject *dict)
|
||||
{
|
||||
|
|
|
@ -77,9 +77,11 @@ HIDDEN int typecast_add(PyObject *obj, PyObject *dict, int binary);
|
|||
/* the C callable typecastObject creator function */
|
||||
HIDDEN PyObject *typecast_from_c(typecastObject_initlist *type, PyObject *d);
|
||||
|
||||
/* the python callable typecast creator function */
|
||||
/* the python callable typecast creator functions */
|
||||
HIDDEN PyObject *typecast_from_python(
|
||||
PyObject *self, PyObject *args, PyObject *keywds);
|
||||
HIDDEN PyObject *typecast_array_from_python(
|
||||
PyObject *self, PyObject *args, PyObject *keywds);
|
||||
|
||||
/* the function used to dispatch typecasting calls */
|
||||
HIDDEN PyObject *typecast_cast(
|
||||
|
|
|
@ -133,7 +133,7 @@ typecast_array_tokenize(const char *str, Py_ssize_t strlength,
|
|||
}
|
||||
|
||||
if (res == ASCAN_QUOTED) {
|
||||
Py_ssize_t j;
|
||||
const char *j, *jj;
|
||||
char *buffer = PyMem_Malloc(l+1);
|
||||
if (buffer == NULL) {
|
||||
PyErr_NoMemory();
|
||||
|
@ -142,10 +142,9 @@ typecast_array_tokenize(const char *str, Py_ssize_t strlength,
|
|||
|
||||
*token = buffer;
|
||||
|
||||
for (j = *pos; j < *pos+l; j++) {
|
||||
if (str[j] != '\\'
|
||||
|| (j > *pos && str[j-1] == '\\'))
|
||||
*(buffer++) = str[j];
|
||||
for (j = str + *pos, jj = j + l; j < jj; ++j) {
|
||||
if (*j == '\\') { ++j; }
|
||||
*(buffer++) = *j;
|
||||
}
|
||||
|
||||
*buffer = '\0';
|
||||
|
|
|
@ -117,13 +117,13 @@
|
|||
<None Include="ZPsycopgDA\__init__.py" />
|
||||
<None Include="ZPsycopgDA\pool.py" />
|
||||
<None Include="tests\dbapi20.py" />
|
||||
<None Include="tests\extras_dictcursor.py" />
|
||||
<None Include="tests\test_extras_dictcursor.py" />
|
||||
<None Include="tests\__init__.py" />
|
||||
<None Include="tests\test_psycopg2_dbapi20.py" />
|
||||
<None Include="tests\test_transaction.py" />
|
||||
<None Include="tests\types_basic.py" />
|
||||
<None Include="tests\bugX000.py" />
|
||||
<None Include="tests\types_extras.py" />
|
||||
<None Include="tests\test_types_basic.py" />
|
||||
<None Include="tests\test_bugX000.py" />
|
||||
<None Include="tests\test_types_extras.py" />
|
||||
<None Include="tests\test_connection.py" />
|
||||
<None Include="tests\test_dates.py" />
|
||||
<None Include="tests\test_lobject.py" />
|
||||
|
@ -166,7 +166,7 @@
|
|||
<None Include="doc\src\faq.rst" />
|
||||
<None Include="tests\test_async.py" />
|
||||
<None Include="tests\test_copy.py" />
|
||||
<None Include="tests\bug_gc.py" />
|
||||
<None Include="tests\test_bug_gc.py" />
|
||||
<None Include="tests\test_green.py" />
|
||||
<None Include="tests\test_notify.py" />
|
||||
<None Include="scripts\make_errorcodes.py" />
|
||||
|
@ -202,6 +202,9 @@
|
|||
<None Include="tests\dbapi20_tpc.py" />
|
||||
<None Include="tests\test_cursor.py" />
|
||||
<None Include="NEWS" />
|
||||
<None Include="tests\test_cancel.py" />
|
||||
<None Include="tests\testconfig.py" />
|
||||
<None Include="tests\testutils.py" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Compile Include="psycopg\adapter_asis.c" />
|
||||
|
@ -234,6 +237,7 @@
|
|||
<Compile Include="psycopg\green.c" />
|
||||
<Compile Include="psycopg\notify_type.c" />
|
||||
<Compile Include="psycopg\xid_type.c" />
|
||||
<Compile Include="psycopg\bytes_format.c" />
|
||||
</ItemGroup>
|
||||
<ProjectExtensions>
|
||||
<MonoDevelop>
|
||||
|
|
|
@ -35,6 +35,10 @@ Global
|
|||
$4.inheritsSet = VisualStudio
|
||||
$4.inheritsScope = text/plain
|
||||
$4.scope = text/x-readme
|
||||
$0.TextStylePolicy = $5
|
||||
$5.inheritsSet = VisualStudio
|
||||
$5.inheritsScope = text/plain
|
||||
$5.scope = text/plain
|
||||
name = psycopg2
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
|
|
@ -30,7 +30,7 @@ def main():
|
|||
filename = sys.argv[1]
|
||||
|
||||
file_start = read_base_file(filename)
|
||||
classes, errors = fetch_errors(['8.1', '8.2', '8.3', '8.4', '9.0'])
|
||||
classes, errors = fetch_errors(['8.1', '8.2', '8.3', '8.4', '9.0', '9.1'])
|
||||
|
||||
f = open(filename, "w")
|
||||
for line in file_start:
|
||||
|
@ -70,11 +70,21 @@ def parse_errors(url):
|
|||
else: # it's an error
|
||||
errcode = tr.tt.string.encode("ascii")
|
||||
assert len(errcode) == 5
|
||||
errlabel = tr('td')[1].string.replace(" ", "_").encode("ascii")
|
||||
|
||||
# double check the columns are equal
|
||||
cond_name = tr('td')[2].string.upper().encode("ascii")
|
||||
assert errlabel == cond_name, tr
|
||||
tds = tr('td')
|
||||
if len(tds) == 3:
|
||||
errlabel = tds[1].string.replace(" ", "_").encode("ascii")
|
||||
|
||||
# double check the columns are equal
|
||||
cond_name = tds[2].string.upper().encode("ascii")
|
||||
assert errlabel == cond_name, tr
|
||||
|
||||
elif len(tds) == 2:
|
||||
# found in PG 9.1 beta3 docs
|
||||
errlabel = tds[1].tt.string.upper().encode("ascii")
|
||||
|
||||
else:
|
||||
assert False, tr
|
||||
|
||||
errors[class_][errcode] = errlabel
|
||||
|
||||
|
|
6
setup.py
6
setup.py
|
@ -73,7 +73,7 @@ except ImportError:
|
|||
# Take a look at http://www.python.org/dev/peps/pep-0386/
|
||||
# for a consistent versioning pattern.
|
||||
|
||||
PSYCOPG_VERSION = '2.4.2'
|
||||
PSYCOPG_VERSION = '2.4.3'
|
||||
|
||||
version_flags = ['dt', 'dec']
|
||||
|
||||
|
@ -354,6 +354,8 @@ class psycopg_build_ext(build_ext):
|
|||
extension.extra_compile_args.append(
|
||||
'-Wdeclaration-after-statement')
|
||||
|
||||
finalize_linux3 = finalize_linux2
|
||||
|
||||
def finalize_options(self):
|
||||
"""Complete the build system configuation."""
|
||||
build_ext.finalize_options(self)
|
||||
|
@ -362,7 +364,7 @@ class psycopg_build_ext(build_ext):
|
|||
|
||||
self.include_dirs.append(".")
|
||||
if self.static_libpq:
|
||||
if not hasattr(self, 'link_objects'):
|
||||
if not getattr(self, 'link_objects', None):
|
||||
self.link_objects = []
|
||||
self.link_objects.append(
|
||||
os.path.join(pg_config_helper.query("libdir"), "libpq.a"))
|
||||
|
|
|
@ -22,28 +22,28 @@
|
|||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
# License for more details.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from testconfig import dsn
|
||||
from testutils import unittest
|
||||
|
||||
import bug_gc
|
||||
import bugX000
|
||||
import extras_dictcursor
|
||||
import test_async
|
||||
import test_bugX000
|
||||
import test_bug_gc
|
||||
import test_cancel
|
||||
import test_connection
|
||||
import test_copy
|
||||
import test_cursor
|
||||
import test_dates
|
||||
import test_extras_dictcursor
|
||||
import test_green
|
||||
import test_lobject
|
||||
import test_module
|
||||
import test_notify
|
||||
import test_psycopg2_dbapi20
|
||||
import test_quote
|
||||
import test_connection
|
||||
import test_cursor
|
||||
import test_transaction
|
||||
import types_basic
|
||||
import types_extras
|
||||
import test_lobject
|
||||
import test_copy
|
||||
import test_notify
|
||||
import test_async
|
||||
import test_green
|
||||
import test_cancel
|
||||
import test_types_basic
|
||||
import test_types_extras
|
||||
|
||||
def test_suite():
|
||||
# If connection to test db fails, bail out early.
|
||||
|
@ -58,23 +58,24 @@ def test_suite():
|
|||
cnn.close()
|
||||
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(bug_gc.test_suite())
|
||||
suite.addTest(bugX000.test_suite())
|
||||
suite.addTest(extras_dictcursor.test_suite())
|
||||
suite.addTest(test_async.test_suite())
|
||||
suite.addTest(test_bugX000.test_suite())
|
||||
suite.addTest(test_bug_gc.test_suite())
|
||||
suite.addTest(test_cancel.test_suite())
|
||||
suite.addTest(test_connection.test_suite())
|
||||
suite.addTest(test_copy.test_suite())
|
||||
suite.addTest(test_cursor.test_suite())
|
||||
suite.addTest(test_dates.test_suite())
|
||||
suite.addTest(test_extras_dictcursor.test_suite())
|
||||
suite.addTest(test_green.test_suite())
|
||||
suite.addTest(test_lobject.test_suite())
|
||||
suite.addTest(test_module.test_suite())
|
||||
suite.addTest(test_notify.test_suite())
|
||||
suite.addTest(test_psycopg2_dbapi20.test_suite())
|
||||
suite.addTest(test_quote.test_suite())
|
||||
suite.addTest(test_connection.test_suite())
|
||||
suite.addTest(test_cursor.test_suite())
|
||||
suite.addTest(test_transaction.test_suite())
|
||||
suite.addTest(types_basic.test_suite())
|
||||
suite.addTest(types_extras.test_suite())
|
||||
suite.addTest(test_lobject.test_suite())
|
||||
suite.addTest(test_copy.test_suite())
|
||||
suite.addTest(test_notify.test_suite())
|
||||
suite.addTest(test_async.test_suite())
|
||||
suite.addTest(test_green.test_suite())
|
||||
suite.addTest(test_cancel.test_suite())
|
||||
suite.addTest(test_types_basic.test_suite())
|
||||
suite.addTest(test_types_extras.test_suite())
|
||||
return suite
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -344,6 +344,16 @@ class IsolationLevelsTestCase(unittest.TestCase):
|
|||
cur2.execute("select count(*) from isolevel;")
|
||||
self.assertEqual(2, cur2.fetchone()[0])
|
||||
|
||||
def test_isolation_level_closed(self):
|
||||
cnn = self.connect()
|
||||
cnn.close()
|
||||
self.assertRaises(psycopg2.InterfaceError, getattr,
|
||||
cnn, 'isolation_level')
|
||||
self.assertRaises(psycopg2.InterfaceError,
|
||||
cnn.set_isolation_level, 0)
|
||||
self.assertRaises(psycopg2.InterfaceError,
|
||||
cnn.set_isolation_level, 1)
|
||||
|
||||
|
||||
class ConnectionTwoPhaseTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
|
@ -725,6 +735,12 @@ class TransactionControlTests(unittest.TestCase):
|
|||
if not self.conn.closed:
|
||||
self.conn.close()
|
||||
|
||||
def test_closed(self):
|
||||
self.conn.close()
|
||||
self.assertRaises(psycopg2.InterfaceError,
|
||||
self.conn.set_session,
|
||||
psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE)
|
||||
|
||||
def test_not_in_transaction(self):
|
||||
cur = self.conn.cursor()
|
||||
cur.execute("select 1")
|
||||
|
@ -868,6 +884,19 @@ class AutocommitTests(unittest.TestCase):
|
|||
if not self.conn.closed:
|
||||
self.conn.close()
|
||||
|
||||
def test_closed(self):
|
||||
self.conn.close()
|
||||
self.assertRaises(psycopg2.InterfaceError,
|
||||
setattr, self.conn, 'autocommit', True)
|
||||
|
||||
# The getter doesn't have a guard. We may change this in future
|
||||
# to make it consistent with other methods; meanwhile let's just check
|
||||
# it doesn't explode.
|
||||
try:
|
||||
self.assert_(self.conn.autocommit in (True, False))
|
||||
except psycopg2.InterfaceError:
|
||||
pass
|
||||
|
||||
def test_default_no_autocommit(self):
|
||||
self.assert_(not self.conn.autocommit)
|
||||
self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY)
|
||||
|
|
|
@ -253,6 +253,25 @@ class CopyTests(unittest.TestCase):
|
|||
self.assertRaises(TypeError,
|
||||
curs.copy_expert, 'COPY tcopy (data) FROM STDIN', f)
|
||||
|
||||
def test_copy_no_column_limit(self):
|
||||
cols = [ "c%050d" % i for i in range(200) ]
|
||||
|
||||
curs = self.conn.cursor()
|
||||
curs.execute('CREATE TEMPORARY TABLE manycols (%s)' % ',\n'.join(
|
||||
[ "%s int" % c for c in cols]))
|
||||
curs.execute("INSERT INTO manycols DEFAULT VALUES")
|
||||
|
||||
f = StringIO()
|
||||
curs.copy_to(f, "manycols", columns = cols)
|
||||
f.seek(0)
|
||||
self.assertEqual(f.read().split(), ['\\N'] * len(cols))
|
||||
|
||||
f.seek(0)
|
||||
curs.copy_from(f, "manycols", columns = cols)
|
||||
curs.execute("select count(*) from manycols;")
|
||||
self.assertEqual(curs.fetchone()[0], 2)
|
||||
|
||||
|
||||
decorate_all_tests(CopyTests, skip_if_green)
|
||||
|
||||
|
||||
|
|
|
@ -97,6 +97,18 @@ class CursorTests(unittest.TestCase):
|
|||
self.assertEqual(b('SELECT 10.3;'),
|
||||
cur.mogrify("SELECT %s;", (Decimal("10.3"),)))
|
||||
|
||||
def test_mogrify_leak_on_multiple_reference(self):
|
||||
# issue #81: reference leak when a parameter value is referenced
|
||||
# more than once from a dict.
|
||||
cur = self.conn.cursor()
|
||||
i = lambda x: x
|
||||
foo = i('foo') * 10
|
||||
import sys
|
||||
nref1 = sys.getrefcount(foo)
|
||||
cur.mogrify("select %(foo)s, %(foo)s, %(foo)s", {'foo': foo})
|
||||
nref2 = sys.getrefcount(foo)
|
||||
self.assertEqual(nref1, nref2)
|
||||
|
||||
def test_bad_placeholder(self):
|
||||
cur = self.conn.cursor()
|
||||
self.assertRaises(psycopg2.ProgrammingError,
|
||||
|
@ -158,6 +170,39 @@ class CursorTests(unittest.TestCase):
|
|||
curs.execute("select data from invname order by data")
|
||||
self.assertEqual(curs.fetchall(), [(10,), (20,), (30,)])
|
||||
|
||||
def test_withhold(self):
|
||||
self.assertRaises(psycopg2.ProgrammingError, self.conn.cursor,
|
||||
withhold=True)
|
||||
|
||||
curs = self.conn.cursor()
|
||||
try:
|
||||
curs.execute("drop table withhold")
|
||||
except psycopg2.ProgrammingError:
|
||||
self.conn.rollback()
|
||||
curs.execute("create table withhold (data int)")
|
||||
for i in (10, 20, 30):
|
||||
curs.execute("insert into withhold values (%s)", (i,))
|
||||
curs.close()
|
||||
|
||||
curs = self.conn.cursor("W")
|
||||
self.assertEqual(curs.withhold, False);
|
||||
curs.withhold = True
|
||||
self.assertEqual(curs.withhold, True);
|
||||
curs.execute("select data from withhold order by data")
|
||||
self.conn.commit()
|
||||
self.assertEqual(curs.fetchall(), [(10,), (20,), (30,)])
|
||||
curs.close()
|
||||
|
||||
curs = self.conn.cursor("W", withhold=True)
|
||||
self.assertEqual(curs.withhold, True);
|
||||
curs.execute("select data from withhold order by data")
|
||||
self.conn.commit()
|
||||
self.assertEqual(curs.fetchall(), [(10,), (20,), (30,)])
|
||||
|
||||
curs = self.conn.cursor()
|
||||
curs.execute("drop table withhold")
|
||||
self.conn.commit()
|
||||
|
||||
@skip_before_postgres(8, 2)
|
||||
def test_iter_named_cursor_efficient(self):
|
||||
curs = self.conn.cursor('tmp')
|
||||
|
@ -225,6 +270,20 @@ class CursorTests(unittest.TestCase):
|
|||
self.assertEqual(c.precision, None)
|
||||
self.assertEqual(c.scale, None)
|
||||
|
||||
@skip_before_postgres(8, 0)
|
||||
def test_named_cursor_stealing(self):
|
||||
# you can use a named cursor to iterate on a refcursor created
|
||||
# somewhere else
|
||||
cur1 = self.conn.cursor()
|
||||
cur1.execute("DECLARE test CURSOR WITHOUT HOLD "
|
||||
" FOR SELECT generate_series(1,7)")
|
||||
|
||||
cur2 = self.conn.cursor('test')
|
||||
# can call fetch without execute
|
||||
self.assertEqual((1,), cur2.fetchone())
|
||||
self.assertEqual([(2,), (3,), (4,)], cur2.fetchmany(3))
|
||||
self.assertEqual([(5,), (6,), (7,)], cur2.fetchall())
|
||||
|
||||
|
||||
def test_suite():
|
||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||
|
|
|
@ -36,7 +36,7 @@ class CommonDatetimeTestsMixin:
|
|||
|
||||
def test_parse_date(self):
|
||||
value = self.DATE('2007-01-01', self.curs)
|
||||
self.assertNotEqual(value, None)
|
||||
self.assert_(value is not None)
|
||||
self.assertEqual(value.year, 2007)
|
||||
self.assertEqual(value.month, 1)
|
||||
self.assertEqual(value.day, 1)
|
||||
|
@ -51,7 +51,7 @@ class CommonDatetimeTestsMixin:
|
|||
|
||||
def test_parse_time(self):
|
||||
value = self.TIME('13:30:29', self.curs)
|
||||
self.assertNotEqual(value, None)
|
||||
self.assert_(value is not None)
|
||||
self.assertEqual(value.hour, 13)
|
||||
self.assertEqual(value.minute, 30)
|
||||
self.assertEqual(value.second, 29)
|
||||
|
@ -66,7 +66,7 @@ class CommonDatetimeTestsMixin:
|
|||
|
||||
def test_parse_datetime(self):
|
||||
value = self.DATETIME('2007-01-01 13:30:29', self.curs)
|
||||
self.assertNotEqual(value, None)
|
||||
self.assert_(value is not None)
|
||||
self.assertEqual(value.year, 2007)
|
||||
self.assertEqual(value.month, 1)
|
||||
self.assertEqual(value.day, 1)
|
||||
|
@ -99,10 +99,10 @@ class DatetimeTests(unittest.TestCase, CommonDatetimeTestsMixin):
|
|||
def setUp(self):
|
||||
self.conn = psycopg2.connect(dsn)
|
||||
self.curs = self.conn.cursor()
|
||||
self.DATE = psycopg2._psycopg.PYDATE
|
||||
self.TIME = psycopg2._psycopg.PYTIME
|
||||
self.DATETIME = psycopg2._psycopg.PYDATETIME
|
||||
self.INTERVAL = psycopg2._psycopg.PYINTERVAL
|
||||
self.DATE = psycopg2.extensions.PYDATE
|
||||
self.TIME = psycopg2.extensions.PYTIME
|
||||
self.DATETIME = psycopg2.extensions.PYDATETIME
|
||||
self.INTERVAL = psycopg2.extensions.PYINTERVAL
|
||||
|
||||
def tearDown(self):
|
||||
self.conn.close()
|
||||
|
@ -307,7 +307,7 @@ class DatetimeTests(unittest.TestCase, CommonDatetimeTestsMixin):
|
|||
|
||||
|
||||
# Only run the datetime tests if psycopg was compiled with support.
|
||||
if not hasattr(psycopg2._psycopg, 'PYDATETIME'):
|
||||
if not hasattr(psycopg2.extensions, 'PYDATETIME'):
|
||||
del DatetimeTests
|
||||
|
||||
|
||||
|
@ -336,7 +336,7 @@ class mxDateTimeTests(unittest.TestCase, CommonDatetimeTestsMixin):
|
|||
|
||||
def test_parse_bc_date(self):
|
||||
value = self.DATE('00042-01-01 BC', self.curs)
|
||||
self.assertNotEqual(value, None)
|
||||
self.assert_(value is not None)
|
||||
# mx.DateTime numbers BC dates from 0 rather than 1.
|
||||
self.assertEqual(value.year, -41)
|
||||
self.assertEqual(value.month, 1)
|
||||
|
@ -344,7 +344,7 @@ class mxDateTimeTests(unittest.TestCase, CommonDatetimeTestsMixin):
|
|||
|
||||
def test_parse_bc_datetime(self):
|
||||
value = self.DATETIME('00042-01-01 13:30:29 BC', self.curs)
|
||||
self.assertNotEqual(value, None)
|
||||
self.assert_(value is not None)
|
||||
# mx.DateTime numbers BC dates from 0 rather than 1.
|
||||
self.assertEqual(value.year, -41)
|
||||
self.assertEqual(value.month, 1)
|
||||
|
@ -395,7 +395,7 @@ class mxDateTimeTests(unittest.TestCase, CommonDatetimeTestsMixin):
|
|||
|
||||
def test_parse_interval(self):
|
||||
value = self.INTERVAL('42 days 05:50:05', self.curs)
|
||||
self.assertNotEqual(value, None)
|
||||
self.assert_(value is not None)
|
||||
self.assertEqual(value.day, 42)
|
||||
self.assertEqual(value.hour, 5)
|
||||
self.assertEqual(value.minute, 50)
|
||||
|
@ -484,7 +484,10 @@ class mxDateTimeTests(unittest.TestCase, CommonDatetimeTestsMixin):
|
|||
|
||||
|
||||
# Only run the mx.DateTime tests if psycopg was compiled with support.
|
||||
if not hasattr(psycopg2._psycopg, 'MXDATETIME'):
|
||||
try:
|
||||
if not hasattr(psycopg2._psycopg, 'MXDATETIME'):
|
||||
del mxDateTimeTests
|
||||
except AttributeError:
|
||||
del mxDateTimeTests
|
||||
|
||||
|
||||
|
|
|
@ -80,6 +80,21 @@ class ExtrasDictCursorTests(unittest.TestCase):
|
|||
return row
|
||||
self._testWithNamedCursor(getter)
|
||||
|
||||
def testDictCursorRealWithNamedCursorFetchOne(self):
|
||||
self._testWithNamedCursorReal(lambda curs: curs.fetchone())
|
||||
|
||||
def testDictCursorRealWithNamedCursorFetchMany(self):
|
||||
self._testWithNamedCursorReal(lambda curs: curs.fetchmany(100)[0])
|
||||
|
||||
def testDictCursorRealWithNamedCursorFetchAll(self):
|
||||
self._testWithNamedCursorReal(lambda curs: curs.fetchall()[0])
|
||||
|
||||
def testDictCursorRealWithNamedCursorIter(self):
|
||||
def getter(curs):
|
||||
for row in curs:
|
||||
return row
|
||||
self._testWithNamedCursorReal(getter)
|
||||
|
||||
def _testWithPlainCursor(self, getter):
|
||||
curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
||||
curs.execute("SELECT * FROM ExtrasDictCursorTests")
|
||||
|
@ -171,6 +186,17 @@ class NamedTupleCursorTest(unittest.TestCase):
|
|||
self.assertEqual(res[2].i, 3)
|
||||
self.assertEqual(res[2].s, 'baz')
|
||||
|
||||
@skip_if_no_namedtuple
|
||||
def test_executemany(self):
|
||||
curs = self.conn.cursor()
|
||||
curs.executemany("delete from nttest where i = %s",
|
||||
[(1,), (2,)])
|
||||
curs.execute("select * from nttest order by 1")
|
||||
res = curs.fetchall()
|
||||
self.assertEqual(1, len(res))
|
||||
self.assertEqual(res[0].i, 3)
|
||||
self.assertEqual(res[0].s, 'baz')
|
||||
|
||||
@skip_if_no_namedtuple
|
||||
def test_iter(self):
|
||||
curs = self.conn.cursor()
|
134
tests/test_module.py
Executable file
134
tests/test_module.py
Executable file
|
@ -0,0 +1,134 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# test_module.py - unit test for the module interface
|
||||
#
|
||||
# Copyright (C) 2011 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# In addition, as a special exception, the copyright holders give
|
||||
# permission to link this program with the OpenSSL library (or with
|
||||
# modified versions of OpenSSL that use the same license as OpenSSL),
|
||||
# and distribute linked combinations including the two.
|
||||
#
|
||||
# You must obey the GNU Lesser General Public License in all respects for
|
||||
# all of the code used other than OpenSSL.
|
||||
#
|
||||
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
# License for more details.
|
||||
|
||||
from testutils import unittest
|
||||
|
||||
import psycopg2
|
||||
|
||||
class ConnectTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.args = None
|
||||
def conect_stub(dsn, connection_factory=None, async=False):
|
||||
self.args = (dsn, connection_factory, async)
|
||||
|
||||
self._connect_orig = psycopg2._connect
|
||||
psycopg2._connect = conect_stub
|
||||
|
||||
def tearDown(self):
|
||||
psycopg2._connect = self._connect_orig
|
||||
|
||||
def test_there_has_to_be_something(self):
|
||||
self.assertRaises(psycopg2.InterfaceError, psycopg2.connect)
|
||||
self.assertRaises(psycopg2.InterfaceError, psycopg2.connect,
|
||||
connection_factory=lambda dsn, async=False: None)
|
||||
self.assertRaises(psycopg2.InterfaceError, psycopg2.connect,
|
||||
async=True)
|
||||
|
||||
def test_no_keywords(self):
|
||||
psycopg2.connect('')
|
||||
self.assertEqual(self.args[0], '')
|
||||
self.assertEqual(self.args[1], None)
|
||||
self.assertEqual(self.args[2], False)
|
||||
|
||||
def test_dsn(self):
|
||||
psycopg2.connect('dbname=blah x=y')
|
||||
self.assertEqual(self.args[0], 'dbname=blah x=y')
|
||||
self.assertEqual(self.args[1], None)
|
||||
self.assertEqual(self.args[2], False)
|
||||
|
||||
def test_supported_keywords(self):
|
||||
psycopg2.connect(database='foo')
|
||||
self.assertEqual(self.args[0], 'dbname=foo')
|
||||
psycopg2.connect(user='postgres')
|
||||
self.assertEqual(self.args[0], 'user=postgres')
|
||||
psycopg2.connect(password='secret')
|
||||
self.assertEqual(self.args[0], 'password=secret')
|
||||
psycopg2.connect(port=5432)
|
||||
self.assertEqual(self.args[0], 'port=5432')
|
||||
psycopg2.connect(sslmode='require')
|
||||
self.assertEqual(self.args[0], 'sslmode=require')
|
||||
|
||||
psycopg2.connect(database='foo',
|
||||
user='postgres', password='secret', port=5432)
|
||||
self.assert_('dbname=foo' in self.args[0])
|
||||
self.assert_('user=postgres' in self.args[0])
|
||||
self.assert_('password=secret' in self.args[0])
|
||||
self.assert_('port=5432' in self.args[0])
|
||||
self.assertEqual(len(self.args[0].split()), 4)
|
||||
|
||||
def test_generic_keywords(self):
|
||||
psycopg2.connect(foo='bar')
|
||||
self.assertEqual(self.args[0], 'foo=bar')
|
||||
|
||||
def test_factory(self):
|
||||
def f(dsn, async=False):
|
||||
pass
|
||||
|
||||
psycopg2.connect(database='foo', bar='baz', connection_factory=f)
|
||||
self.assertEqual(self.args[0], 'dbname=foo bar=baz')
|
||||
self.assertEqual(self.args[1], f)
|
||||
self.assertEqual(self.args[2], False)
|
||||
|
||||
psycopg2.connect("dbname=foo bar=baz", connection_factory=f)
|
||||
self.assertEqual(self.args[0], 'dbname=foo bar=baz')
|
||||
self.assertEqual(self.args[1], f)
|
||||
self.assertEqual(self.args[2], False)
|
||||
|
||||
def test_async(self):
|
||||
psycopg2.connect(database='foo', bar='baz', async=1)
|
||||
self.assertEqual(self.args[0], 'dbname=foo bar=baz')
|
||||
self.assertEqual(self.args[1], None)
|
||||
self.assert_(self.args[2])
|
||||
|
||||
psycopg2.connect("dbname=foo bar=baz", async=True)
|
||||
self.assertEqual(self.args[0], 'dbname=foo bar=baz')
|
||||
self.assertEqual(self.args[1], None)
|
||||
self.assert_(self.args[2])
|
||||
|
||||
def test_empty_param(self):
|
||||
psycopg2.connect(database='sony', password='')
|
||||
self.assertEqual(self.args[0], "dbname=sony password=''")
|
||||
|
||||
def test_escape(self):
|
||||
psycopg2.connect(database='hello world')
|
||||
self.assertEqual(self.args[0], "dbname='hello world'")
|
||||
|
||||
psycopg2.connect(database=r'back\slash')
|
||||
self.assertEqual(self.args[0], r"dbname=back\\slash")
|
||||
|
||||
psycopg2.connect(database="quo'te")
|
||||
self.assertEqual(self.args[0], r"dbname=quo\'te")
|
||||
|
||||
psycopg2.connect(database="with\ttab")
|
||||
self.assertEqual(self.args[0], "dbname='with\ttab'")
|
||||
|
||||
psycopg2.connect(database=r"\every thing'")
|
||||
self.assertEqual(self.args[0], r"dbname='\\every thing\''")
|
||||
|
||||
|
||||
def test_suite():
|
||||
return unittest.TestLoader().loadTestsFromName(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
|
@ -189,6 +189,17 @@ class TypesBasicTests(unittest.TestCase):
|
|||
s = self.execute("SELECT '{}'::text AS foo")
|
||||
self.failUnlessEqual(s, "{}")
|
||||
|
||||
def testArrayEscape(self):
|
||||
ss = ['', '\\', '"', '\\\\', '\\"']
|
||||
for s in ss:
|
||||
r = self.execute("SELECT %s AS foo", (s,))
|
||||
self.failUnlessEqual(s, r)
|
||||
r = self.execute("SELECT %s AS foo", ([s],))
|
||||
self.failUnlessEqual([s], r)
|
||||
|
||||
r = self.execute("SELECT %s AS foo", (ss,))
|
||||
self.failUnlessEqual(ss, r)
|
||||
|
||||
@testutils.skip_from_python(3)
|
||||
def testTypeRoundtripBuffer(self):
|
||||
o1 = buffer("".join(map(chr, range(256))))
|
||||
|
@ -285,6 +296,26 @@ class TypesBasicTests(unittest.TestCase):
|
|||
l1 = self.execute("select -%s;", (-1L,))
|
||||
self.assertEqual(1, l1)
|
||||
|
||||
def testGenericArray(self):
|
||||
a = self.execute("select '{1,2,3}'::int4[]")
|
||||
self.assertEqual(a, [1,2,3])
|
||||
a = self.execute("select array['a','b','''']::text[]")
|
||||
self.assertEqual(a, ['a','b',"'"])
|
||||
|
||||
@testutils.skip_before_postgres(8, 2)
|
||||
def testGenericArrayNull(self):
|
||||
def caster(s, cur):
|
||||
if s is None: return "nada"
|
||||
return int(s) * 2
|
||||
base = psycopg2.extensions.new_type((23,), "INT4", caster)
|
||||
array = psycopg2.extensions.new_array_type((1007,), "INT4ARRAY", base)
|
||||
|
||||
psycopg2.extensions.register_type(array, self.conn)
|
||||
a = self.execute("select '{1,2,3}'::int4[]")
|
||||
self.assertEqual(a, [2,4,6])
|
||||
a = self.execute("select '{1,2,NULL}'::int4[]")
|
||||
self.assertEqual(a, [2,4,'nada'])
|
||||
|
||||
|
||||
class AdaptSubclassTest(unittest.TestCase):
|
||||
def test_adapt_subtype(self):
|
|
@ -22,7 +22,7 @@ import re
|
|||
import sys
|
||||
from datetime import date
|
||||
|
||||
from testutils import unittest, skip_if_no_uuid
|
||||
from testutils import unittest, skip_if_no_uuid, skip_before_postgres
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
|
@ -357,6 +357,63 @@ class HstoreTestCase(unittest.TestCase):
|
|||
finally:
|
||||
psycopg2.extensions.string_types.pop(oid)
|
||||
|
||||
@skip_if_no_hstore
|
||||
@skip_before_postgres(8, 3)
|
||||
def test_roundtrip_array(self):
|
||||
from psycopg2.extras import register_hstore
|
||||
register_hstore(self.conn)
|
||||
|
||||
ds = []
|
||||
ds.append({})
|
||||
ds.append({'a': 'b', 'c': None})
|
||||
|
||||
ab = map(chr, range(32, 128))
|
||||
ds.append(dict(zip(ab, ab)))
|
||||
ds.append({''.join(ab): ''.join(ab)})
|
||||
|
||||
self.conn.set_client_encoding('latin1')
|
||||
if sys.version_info[0] < 3:
|
||||
ab = map(chr, range(32, 127) + range(160, 255))
|
||||
else:
|
||||
ab = bytes(range(32, 127) + range(160, 255)).decode('latin1')
|
||||
|
||||
ds.append({''.join(ab): ''.join(ab)})
|
||||
ds.append(dict(zip(ab, ab)))
|
||||
|
||||
cur = self.conn.cursor()
|
||||
cur.execute("select %s", (ds,))
|
||||
ds1 = cur.fetchone()[0]
|
||||
self.assertEqual(ds, ds1)
|
||||
|
||||
@skip_if_no_hstore
|
||||
@skip_before_postgres(8, 3)
|
||||
def test_array_cast(self):
|
||||
from psycopg2.extras import register_hstore
|
||||
register_hstore(self.conn)
|
||||
cur = self.conn.cursor()
|
||||
cur.execute("select array['a=>1'::hstore, 'b=>2'::hstore];")
|
||||
a = cur.fetchone()[0]
|
||||
self.assertEqual(a, [{'a': '1'}, {'b': '2'}])
|
||||
|
||||
@skip_if_no_hstore
|
||||
def test_array_cast_oid(self):
|
||||
cur = self.conn.cursor()
|
||||
cur.execute("select 'hstore'::regtype::oid, 'hstore[]'::regtype::oid")
|
||||
oid, aoid = cur.fetchone()
|
||||
|
||||
from psycopg2.extras import register_hstore
|
||||
register_hstore(None, globally=True, oid=oid, array_oid=aoid)
|
||||
try:
|
||||
cur.execute("select null::hstore, ''::hstore, 'a => b'::hstore, '{a=>b}'::hstore[]")
|
||||
t = cur.fetchone()
|
||||
self.assert_(t[0] is None)
|
||||
self.assertEqual(t[1], {})
|
||||
self.assertEqual(t[2], {'a': 'b'})
|
||||
self.assertEqual(t[3], [{'a': 'b'}])
|
||||
|
||||
finally:
|
||||
psycopg2.extensions.string_types.pop(oid)
|
||||
psycopg2.extensions.string_types.pop(aoid)
|
||||
|
||||
def skip_if_no_composite(f):
|
||||
def skip_if_no_composite_(self):
|
||||
|
@ -539,7 +596,12 @@ class AdaptTypeTestCase(unittest.TestCase):
|
|||
curs2.execute("select (1,2)::type_ii")
|
||||
self.assertEqual(curs2.fetchone()[0], (1,2))
|
||||
finally:
|
||||
del psycopg2.extensions.string_types[t.oid]
|
||||
# drop the registered typecasters to help the refcounting
|
||||
# script to return precise values.
|
||||
del psycopg2.extensions.string_types[t.typecaster.values[0]]
|
||||
if t.array_typecaster:
|
||||
del psycopg2.extensions.string_types[
|
||||
t.array_typecaster.values[0]]
|
||||
|
||||
finally:
|
||||
conn1.close()
|
||||
|
@ -563,6 +625,29 @@ class AdaptTypeTestCase(unittest.TestCase):
|
|||
curs.execute("select (4,8)::typens.typens_ii")
|
||||
self.assertEqual(curs.fetchone()[0], (4,8))
|
||||
|
||||
@skip_if_no_composite
|
||||
@skip_before_postgres(8, 4)
|
||||
def test_composite_array(self):
|
||||
oid = self._create_type("type_isd",
|
||||
[('anint', 'integer'), ('astring', 'text'), ('adate', 'date')])
|
||||
|
||||
t = psycopg2.extras.register_composite("type_isd", self.conn)
|
||||
|
||||
curs = self.conn.cursor()
|
||||
r1 = (10, 'hello', date(2011,1,2))
|
||||
r2 = (20, 'world', date(2011,1,3))
|
||||
curs.execute("select %s::type_isd[];", ([r1, r2],))
|
||||
v = curs.fetchone()[0]
|
||||
self.assertEqual(len(v), 2)
|
||||
self.assert_(isinstance(v[0], t.type))
|
||||
self.assertEqual(v[0][0], 10)
|
||||
self.assertEqual(v[0][1], "hello")
|
||||
self.assertEqual(v[0][2], date(2011,1,2))
|
||||
self.assert_(isinstance(v[1], t.type))
|
||||
self.assertEqual(v[1][0], 20)
|
||||
self.assertEqual(v[1][1], "world")
|
||||
self.assertEqual(v[1][2], date(2011,1,3))
|
||||
|
||||
def _create_type(self, name, fields):
|
||||
curs = self.conn.cursor()
|
||||
try:
|
Loading…
Reference in New Issue
Block a user