2005-10-18 09:42:00 +04:00
|
|
|
"""Miscellaneous goodies for psycopg2
|
2005-01-20 08:49:40 +03:00
|
|
|
|
2005-10-18 09:42:00 +04:00
|
|
|
This module is a generic place used to hold little helper functions
|
2005-01-20 08:49:40 +03:00
|
|
|
and classes untill a better place in the distribution is found.
|
|
|
|
"""
|
2004-10-19 07:17:12 +04:00
|
|
|
# psycopg/extras.py - miscellaneous extra goodies for psycopg
|
|
|
|
#
|
2010-02-13 01:34:53 +03:00
|
|
|
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
2004-10-19 07:17:12 +04:00
|
|
|
#
|
2010-02-13 01:34:53 +03:00
|
|
|
# psycopg2 is free software: you can redistribute it and/or modify it
|
|
|
|
# under the terms of the GNU Lesser General Public License as published
|
|
|
|
# by the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
2004-10-19 07:17:12 +04:00
|
|
|
#
|
2010-02-13 01:34:53 +03:00
|
|
|
# In addition, as a special exception, the copyright holders give
|
|
|
|
# permission to link this program with the OpenSSL library (or with
|
|
|
|
# modified versions of OpenSSL that use the same license as OpenSSL),
|
|
|
|
# and distribute linked combinations including the two.
|
|
|
|
#
|
|
|
|
# You must obey the GNU Lesser General Public License in all respects for
|
|
|
|
# all of the code used other than OpenSSL.
|
|
|
|
#
|
|
|
|
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
|
|
|
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
|
|
|
# License for more details.
|
2004-10-19 07:17:12 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
import os
|
2010-12-12 19:45:21 +03:00
|
|
|
import sys
|
2006-01-20 07:07:23 +03:00
|
|
|
import time
|
2010-07-13 16:26:52 +04:00
|
|
|
import warnings
|
2009-03-02 12:59:52 +03:00
|
|
|
import re as regex
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
import logging
|
|
|
|
except:
|
|
|
|
logging = None
|
2008-09-19 23:25:16 +04:00
|
|
|
|
2010-09-27 00:59:54 +04:00
|
|
|
import psycopg2
|
2008-09-19 23:25:16 +04:00
|
|
|
from psycopg2 import extensions as _ext
|
2005-06-24 11:11:44 +04:00
|
|
|
from psycopg2.extensions import cursor as _cursor
|
2005-07-17 08:08:08 +04:00
|
|
|
from psycopg2.extensions import connection as _connection
|
2005-06-24 11:11:44 +04:00
|
|
|
from psycopg2.extensions import adapt as _A
|
2010-12-29 05:45:24 +03:00
|
|
|
from psycopg2.extensions import b
|
2005-02-27 18:03:53 +03:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
class DictCursorBase(_cursor):
|
|
|
|
"""Base class for all dict-like cursors."""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2010-12-12 19:45:21 +03:00
|
|
|
if 'row_factory' in kwargs:
|
2007-01-16 16:45:41 +03:00
|
|
|
row_factory = kwargs['row_factory']
|
|
|
|
del kwargs['row_factory']
|
|
|
|
else:
|
|
|
|
raise NotImplementedError(
|
|
|
|
"DictCursorBase can't be instantiated without a row factory.")
|
|
|
|
_cursor.__init__(self, *args, **kwargs)
|
|
|
|
self._query_executed = 0
|
2009-05-09 16:44:59 +04:00
|
|
|
self._prefetch = 0
|
2007-01-16 16:45:41 +03:00
|
|
|
self.row_factory = row_factory
|
|
|
|
|
|
|
|
def fetchone(self):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchone(self)
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchone(self)
|
2009-03-02 12:59:52 +03:00
|
|
|
return res
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
def fetchmany(self, size=None):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchmany(self, size)
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchmany(self, size)
|
2009-03-02 12:59:52 +03:00
|
|
|
return res
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
def fetchall(self):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchall(self)
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchall(self)
|
2009-03-02 12:59:52 +03:00
|
|
|
return res
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2011-12-12 02:04:42 +04:00
|
|
|
def __iter__(self):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
2011-12-12 02:04:42 +04:00
|
|
|
res = _cursor.fetchmany(self, self.itersize)
|
|
|
|
if not res:
|
|
|
|
return
|
2009-03-02 12:59:52 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
2011-12-12 02:04:42 +04:00
|
|
|
res = _cursor.fetchmany(self, self.itersize)
|
|
|
|
|
|
|
|
for r in res:
|
|
|
|
yield r
|
|
|
|
|
|
|
|
# the above was the first itersize record. the following are
|
|
|
|
# in a repeated loop.
|
|
|
|
while 1:
|
|
|
|
res = _cursor.fetchmany(self, self.itersize)
|
|
|
|
if not res:
|
|
|
|
return
|
|
|
|
for r in res:
|
|
|
|
yield r
|
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
|
2005-07-17 08:08:08 +04:00
|
|
|
class DictConnection(_connection):
|
2010-02-26 03:17:52 +03:00
|
|
|
"""A connection that uses `DictCursor` automatically."""
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
|
|
|
if name is None:
|
|
|
|
return _connection.cursor(self, cursor_factory=DictCursor)
|
|
|
|
else:
|
2009-03-02 12:59:52 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=DictCursor)
|
2004-10-19 07:17:12 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
class DictCursor(DictCursorBase):
|
2004-10-19 07:17:12 +04:00
|
|
|
"""A cursor that keeps a list of column name -> index mappings."""
|
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
kwargs['row_factory'] = DictRow
|
|
|
|
DictCursorBase.__init__(self, *args, **kwargs)
|
2009-05-09 16:44:59 +04:00
|
|
|
self._prefetch = 1
|
2007-01-16 16:45:41 +03:00
|
|
|
|
2010-03-31 03:43:07 +04:00
|
|
|
def execute(self, query, vars=None):
|
2004-10-19 07:17:12 +04:00
|
|
|
self.index = {}
|
2007-01-16 16:45:41 +03:00
|
|
|
self._query_executed = 1
|
2010-03-31 03:43:07 +04:00
|
|
|
return _cursor.execute(self, query, vars)
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2005-12-06 08:55:58 +03:00
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.index = {}
|
2007-01-16 16:45:41 +03:00
|
|
|
self._query_executed = 1
|
2009-05-09 16:44:59 +04:00
|
|
|
return _cursor.callproc(self, procname, vars)
|
2004-10-19 07:17:12 +04:00
|
|
|
|
|
|
|
def _build_index(self):
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed == 1 and self.description:
|
2004-10-19 07:17:12 +04:00
|
|
|
for i in range(len(self.description)):
|
|
|
|
self.index[self.description[i][0]] = i
|
2009-05-09 16:44:59 +04:00
|
|
|
self._query_executed = 0
|
2005-03-01 19:41:02 +03:00
|
|
|
|
2004-10-19 07:17:12 +04:00
|
|
|
class DictRow(list):
|
2010-02-15 20:41:30 +03:00
|
|
|
"""A row object that allow by-colmun-name access to data."""
|
2004-10-19 07:17:12 +04:00
|
|
|
|
2009-05-09 12:19:15 +04:00
|
|
|
__slots__ = ('_index',)
|
|
|
|
|
2004-10-19 07:17:12 +04:00
|
|
|
def __init__(self, cursor):
|
2005-05-10 06:29:24 +04:00
|
|
|
self._index = cursor.index
|
2004-10-19 07:17:12 +04:00
|
|
|
self[:] = [None] * len(cursor.description)
|
|
|
|
|
|
|
|
def __getitem__(self, x):
|
2010-12-23 05:28:19 +03:00
|
|
|
if not isinstance(x, (int, slice)):
|
2005-05-10 06:29:24 +04:00
|
|
|
x = self._index[x]
|
2004-10-19 07:17:12 +04:00
|
|
|
return list.__getitem__(self, x)
|
2005-02-27 18:03:53 +03:00
|
|
|
|
2010-12-01 16:17:12 +03:00
|
|
|
def __setitem__(self, x, v):
|
2010-12-23 05:28:19 +03:00
|
|
|
if not isinstance(x, (int, slice)):
|
2010-12-01 16:17:12 +03:00
|
|
|
x = self._index[x]
|
|
|
|
list.__setitem__(self, x, v)
|
|
|
|
|
2005-04-11 11:20:46 +04:00
|
|
|
def items(self):
|
2010-12-12 19:45:21 +03:00
|
|
|
return list(self.iteritems())
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2005-04-25 08:58:38 +04:00
|
|
|
def keys(self):
|
2006-01-06 05:58:24 +03:00
|
|
|
return self._index.keys()
|
2005-04-25 08:58:38 +04:00
|
|
|
|
2005-05-19 08:48:26 +04:00
|
|
|
def values(self):
|
|
|
|
return tuple(self[:])
|
|
|
|
|
|
|
|
def has_key(self, x):
|
2010-12-12 19:45:21 +03:00
|
|
|
return x in self._index
|
2006-01-06 05:58:24 +03:00
|
|
|
|
2005-05-19 08:48:26 +04:00
|
|
|
def get(self, x, default=None):
|
|
|
|
try:
|
|
|
|
return self[x]
|
|
|
|
except:
|
|
|
|
return default
|
|
|
|
|
2006-10-06 09:22:54 +04:00
|
|
|
def iteritems(self):
|
2010-12-12 19:45:21 +03:00
|
|
|
for n, v in self._index.iteritems():
|
2006-10-06 09:22:54 +04:00
|
|
|
yield n, list.__getitem__(self, v)
|
2005-02-27 18:03:53 +03:00
|
|
|
|
2009-04-19 18:25:12 +04:00
|
|
|
def iterkeys(self):
|
|
|
|
return self._index.iterkeys()
|
|
|
|
|
|
|
|
def itervalues(self):
|
|
|
|
return list.__iter__(self)
|
|
|
|
|
|
|
|
def copy(self):
|
2010-12-12 19:45:21 +03:00
|
|
|
return dict(self.iteritems())
|
2009-04-19 18:25:12 +04:00
|
|
|
|
|
|
|
def __contains__(self, x):
|
2010-12-12 19:45:21 +03:00
|
|
|
return x in self._index
|
|
|
|
|
|
|
|
# grop the crusty Py2 methods
|
|
|
|
if sys.version_info[0] > 2:
|
|
|
|
items = iteritems; del iteritems
|
|
|
|
keys = iterkeys; del iterkeys
|
|
|
|
values = itervalues; del itervalues
|
|
|
|
del has_key
|
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
class RealDictConnection(_connection):
|
2010-02-26 03:17:52 +03:00
|
|
|
"""A connection that uses `RealDictCursor` automatically."""
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
|
|
|
if name is None:
|
|
|
|
return _connection.cursor(self, cursor_factory=RealDictCursor)
|
|
|
|
else:
|
2009-02-23 23:39:25 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=RealDictCursor)
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
class RealDictCursor(DictCursorBase):
|
|
|
|
"""A cursor that uses a real dict as the base type for rows.
|
|
|
|
|
|
|
|
Note that this cursor is extremely specialized and does not allow
|
|
|
|
the normal access (using integer indices) to fetched data. If you need
|
|
|
|
to access database rows both as a dictionary and a list, then use
|
2010-02-26 03:17:52 +03:00
|
|
|
the generic `DictCursor` instead of `!RealDictCursor`.
|
2007-01-16 16:45:41 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
kwargs['row_factory'] = RealDictRow
|
|
|
|
DictCursorBase.__init__(self, *args, **kwargs)
|
2009-05-09 16:44:59 +04:00
|
|
|
self._prefetch = 0
|
2007-01-16 16:45:41 +03:00
|
|
|
|
2010-03-31 03:43:07 +04:00
|
|
|
def execute(self, query, vars=None):
|
2007-01-16 16:45:41 +03:00
|
|
|
self.column_mapping = []
|
|
|
|
self._query_executed = 1
|
2010-03-31 03:43:07 +04:00
|
|
|
return _cursor.execute(self, query, vars)
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.column_mapping = []
|
|
|
|
self._query_executed = 1
|
2009-05-09 16:44:59 +04:00
|
|
|
return _cursor.callproc(self, procname, vars)
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
def _build_index(self):
|
|
|
|
if self._query_executed == 1 and self.description:
|
2009-05-09 16:44:59 +04:00
|
|
|
for i in range(len(self.description)):
|
|
|
|
self.column_mapping.append(self.description[i][0])
|
|
|
|
self._query_executed = 0
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
class RealDictRow(dict):
|
2011-02-19 19:16:28 +03:00
|
|
|
"""A `!dict` subclass representing a data record."""
|
2009-05-09 12:19:15 +04:00
|
|
|
|
2009-05-09 16:44:59 +04:00
|
|
|
__slots__ = ('_column_mapping')
|
2009-05-09 12:19:15 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def __init__(self, cursor):
|
|
|
|
dict.__init__(self)
|
2011-09-12 05:20:53 +04:00
|
|
|
# Required for named cursors
|
|
|
|
if cursor.description and not cursor.column_mapping:
|
|
|
|
cursor._build_index()
|
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
self._column_mapping = cursor.column_mapping
|
|
|
|
|
|
|
|
def __setitem__(self, name, value):
|
2009-05-09 16:44:59 +04:00
|
|
|
if type(name) == int:
|
2007-01-16 16:45:41 +03:00
|
|
|
name = self._column_mapping[name]
|
|
|
|
return dict.__setitem__(self, name, value)
|
|
|
|
|
|
|
|
|
2010-11-06 04:39:43 +03:00
|
|
|
class NamedTupleConnection(_connection):
|
|
|
|
"""A connection that uses `NamedTupleCursor` automatically."""
|
|
|
|
def cursor(self, *args, **kwargs):
|
|
|
|
kwargs['cursor_factory'] = NamedTupleCursor
|
|
|
|
return _connection.cursor(self, *args, **kwargs)
|
|
|
|
|
|
|
|
class NamedTupleCursor(_cursor):
|
2011-02-19 19:16:28 +03:00
|
|
|
"""A cursor that generates results as `~collections.namedtuple`.
|
2010-11-06 04:39:43 +03:00
|
|
|
|
|
|
|
`!fetch*()` methods will return named tuples instead of regular tuples, so
|
|
|
|
their elements can be accessed both as regular numeric items as well as
|
|
|
|
attributes.
|
|
|
|
|
|
|
|
>>> nt_cur = conn.cursor(cursor_factory=psycopg2.extras.NamedTupleCursor)
|
|
|
|
>>> rec = nt_cur.fetchone()
|
|
|
|
>>> rec
|
|
|
|
Record(id=1, num=100, data="abc'def")
|
|
|
|
>>> rec[1]
|
|
|
|
100
|
|
|
|
>>> rec.data
|
|
|
|
"abc'def"
|
|
|
|
"""
|
2010-11-11 13:26:36 +03:00
|
|
|
Record = None
|
|
|
|
|
|
|
|
def execute(self, query, vars=None):
|
|
|
|
self.Record = None
|
|
|
|
return _cursor.execute(self, query, vars)
|
|
|
|
|
|
|
|
def executemany(self, query, vars):
|
|
|
|
self.Record = None
|
2011-08-09 14:29:15 +04:00
|
|
|
return _cursor.executemany(self, query, vars)
|
2010-11-11 13:26:36 +03:00
|
|
|
|
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.Record = None
|
|
|
|
return _cursor.callproc(self, procname, vars)
|
|
|
|
|
2010-11-06 04:39:43 +03:00
|
|
|
def fetchone(self):
|
|
|
|
t = _cursor.fetchone(self)
|
|
|
|
if t is not None:
|
2010-11-11 13:26:36 +03:00
|
|
|
nt = self.Record
|
|
|
|
if nt is None:
|
|
|
|
nt = self.Record = self._make_nt()
|
2010-11-06 04:39:43 +03:00
|
|
|
return nt(*t)
|
|
|
|
|
|
|
|
def fetchmany(self, size=None):
|
2011-04-26 22:18:39 +04:00
|
|
|
ts = _cursor.fetchmany(self, size)
|
2010-11-11 13:26:36 +03:00
|
|
|
nt = self.Record
|
|
|
|
if nt is None:
|
|
|
|
nt = self.Record = self._make_nt()
|
2010-11-06 04:39:43 +03:00
|
|
|
return [nt(*t) for t in ts]
|
|
|
|
|
|
|
|
def fetchall(self):
|
2011-04-26 22:18:39 +04:00
|
|
|
ts = _cursor.fetchall(self)
|
2010-11-11 13:26:36 +03:00
|
|
|
nt = self.Record
|
|
|
|
if nt is None:
|
|
|
|
nt = self.Record = self._make_nt()
|
2010-11-06 04:39:43 +03:00
|
|
|
return [nt(*t) for t in ts]
|
|
|
|
|
|
|
|
def __iter__(self):
|
2011-04-26 22:26:19 +04:00
|
|
|
# Invoking _cursor.__iter__(self) goes to infinite recursion,
|
|
|
|
# so we do pagination by hand
|
|
|
|
while 1:
|
|
|
|
recs = self.fetchmany(self.itersize)
|
|
|
|
if not recs:
|
|
|
|
return
|
|
|
|
for rec in recs:
|
|
|
|
yield rec
|
2010-11-06 04:39:43 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
from collections import namedtuple
|
|
|
|
except ImportError, _exc:
|
|
|
|
def _make_nt(self):
|
|
|
|
raise self._exc
|
|
|
|
else:
|
|
|
|
def _make_nt(self, namedtuple=namedtuple):
|
2010-11-11 13:30:01 +03:00
|
|
|
return namedtuple("Record", [d[0] for d in self.description or ()])
|
2010-11-06 04:39:43 +03:00
|
|
|
|
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
class LoggingConnection(_connection):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""A connection that logs all queries to a file or logger__ object.
|
|
|
|
|
|
|
|
.. __: http://docs.python.org/library/logging.html
|
|
|
|
"""
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
def initialize(self, logobj):
|
2011-02-19 19:16:28 +03:00
|
|
|
"""Initialize the connection to log to `!logobj`.
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2011-02-19 19:16:28 +03:00
|
|
|
The `!logobj` parameter can be an open file object or a Logger
|
2010-02-13 05:10:51 +03:00
|
|
|
instance from the standard logging module.
|
2006-01-20 07:07:23 +03:00
|
|
|
"""
|
|
|
|
self._logobj = logobj
|
|
|
|
if logging and isinstance(logobj, logging.Logger):
|
|
|
|
self.log = self._logtologger
|
|
|
|
else:
|
|
|
|
self.log = self._logtofile
|
|
|
|
|
|
|
|
def filter(self, msg, curs):
|
|
|
|
"""Filter the query before logging it.
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
This is the method to overwrite to filter unwanted queries out of the
|
|
|
|
log or to add some extra data to the output. The default implementation
|
|
|
|
just does nothing.
|
|
|
|
"""
|
|
|
|
return msg
|
|
|
|
|
|
|
|
def _logtofile(self, msg, curs):
|
|
|
|
msg = self.filter(msg, curs)
|
|
|
|
if msg: self._logobj.write(msg + os.linesep)
|
|
|
|
|
|
|
|
def _logtologger(self, msg, curs):
|
|
|
|
msg = self.filter(msg, curs)
|
|
|
|
if msg: self._logobj.debug(msg)
|
|
|
|
|
|
|
|
def _check(self):
|
|
|
|
if not hasattr(self, '_logobj'):
|
|
|
|
raise self.ProgrammingError(
|
|
|
|
"LoggingConnection object has not been initialize()d")
|
|
|
|
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
2006-01-20 07:07:23 +03:00
|
|
|
self._check()
|
2009-02-23 23:39:25 +03:00
|
|
|
if name is None:
|
2007-09-01 13:32:42 +04:00
|
|
|
return _connection.cursor(self, cursor_factory=LoggingCursor)
|
|
|
|
else:
|
2009-02-23 23:39:25 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=LoggingCursor)
|
2007-09-01 13:32:42 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
class LoggingCursor(_cursor):
|
|
|
|
"""A cursor that logs queries using its connection logging facilities."""
|
|
|
|
|
2010-03-31 03:43:07 +04:00
|
|
|
def execute(self, query, vars=None):
|
2006-01-20 07:07:23 +03:00
|
|
|
try:
|
2010-03-31 03:43:07 +04:00
|
|
|
return _cursor.execute(self, query, vars)
|
2006-01-20 07:07:23 +03:00
|
|
|
finally:
|
|
|
|
self.connection.log(self.query, self)
|
|
|
|
|
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
try:
|
|
|
|
return _cursor.callproc(self, procname, vars)
|
|
|
|
finally:
|
|
|
|
self.connection.log(self.query, self)
|
|
|
|
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
class MinTimeLoggingConnection(LoggingConnection):
|
|
|
|
"""A connection that logs queries based on execution time.
|
|
|
|
|
2010-02-26 03:17:52 +03:00
|
|
|
This is just an example of how to sub-class `LoggingConnection` to
|
2010-02-13 05:10:51 +03:00
|
|
|
provide some extra filtering for the logged queries. Both the
|
2010-02-26 03:17:52 +03:00
|
|
|
`inizialize()` and `filter()` methods are overwritten to make sure
|
2010-02-13 05:10:51 +03:00
|
|
|
that only queries executing for more than ``mintime`` ms are logged.
|
2006-01-20 07:07:23 +03:00
|
|
|
|
2010-02-13 05:10:51 +03:00
|
|
|
Note that this connection uses the specialized cursor
|
2010-02-26 03:17:52 +03:00
|
|
|
`MinTimeLoggingCursor`.
|
2006-01-20 07:07:23 +03:00
|
|
|
"""
|
|
|
|
def initialize(self, logobj, mintime=0):
|
|
|
|
LoggingConnection.initialize(self, logobj)
|
|
|
|
self._mintime = mintime
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
def filter(self, msg, curs):
|
|
|
|
t = (time.time() - curs.timestamp) * 1000
|
|
|
|
if t > self._mintime:
|
|
|
|
return msg + os.linesep + " (execution time: %d ms)" % t
|
|
|
|
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
2006-01-20 07:07:23 +03:00
|
|
|
self._check()
|
2009-02-23 23:39:25 +03:00
|
|
|
if name is None:
|
2007-09-01 13:32:42 +04:00
|
|
|
return _connection.cursor(self, cursor_factory=MinTimeLoggingCursor)
|
|
|
|
else:
|
2009-02-23 23:39:25 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=MinTimeLoggingCursor)
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
class MinTimeLoggingCursor(LoggingCursor):
|
2010-02-26 03:17:52 +03:00
|
|
|
"""The cursor sub-class companion to `MinTimeLoggingConnection`."""
|
2006-01-20 07:07:23 +03:00
|
|
|
|
2010-03-31 03:43:07 +04:00
|
|
|
def execute(self, query, vars=None):
|
2006-01-20 07:07:23 +03:00
|
|
|
self.timestamp = time.time()
|
2010-03-31 03:43:07 +04:00
|
|
|
return LoggingCursor.execute(self, query, vars)
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.timestamp = time.time()
|
2006-09-10 18:50:03 +04:00
|
|
|
return LoggingCursor.execute(self, procname, vars)
|
|
|
|
|
2008-09-19 23:25:16 +04:00
|
|
|
|
|
|
|
# a dbtype and adapter for Python UUID type
|
|
|
|
|
2011-06-28 19:12:43 +04:00
|
|
|
class UUID_adapter(object):
|
|
|
|
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
|
2008-09-19 23:25:16 +04:00
|
|
|
|
2011-06-28 19:12:43 +04:00
|
|
|
.. __: http://docs.python.org/library/uuid.html
|
|
|
|
.. __: http://www.postgresql.org/docs/8.4/static/datatype-uuid.html
|
|
|
|
"""
|
2010-02-13 05:10:51 +03:00
|
|
|
|
2011-06-28 19:12:43 +04:00
|
|
|
def __init__(self, uuid):
|
|
|
|
self._uuid = uuid
|
|
|
|
|
|
|
|
def prepare(self, conn):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def getquoted(self):
|
|
|
|
return "'"+str(self._uuid)+"'::uuid"
|
|
|
|
|
|
|
|
__str__ = getquoted
|
|
|
|
|
|
|
|
def register_uuid(oids=None, conn_or_curs=None):
|
|
|
|
"""Create the UUID type and an uuid.UUID adapter."""
|
|
|
|
|
|
|
|
import uuid
|
|
|
|
|
|
|
|
if not oids:
|
|
|
|
oid1 = 2950
|
|
|
|
oid2 = 2951
|
|
|
|
elif type(oids) == list:
|
|
|
|
oid1, oid2 = oids
|
|
|
|
else:
|
|
|
|
oid1 = oids
|
|
|
|
oid2 = 2951
|
|
|
|
|
|
|
|
def parseUUIDARRAY(data, cursor):
|
|
|
|
if data is None:
|
|
|
|
return None
|
|
|
|
elif data == '{}':
|
|
|
|
return []
|
2009-10-04 14:34:02 +04:00
|
|
|
else:
|
2011-06-28 19:12:43 +04:00
|
|
|
return [((len(x) > 0 and x != 'NULL') and uuid.UUID(x) or None)
|
|
|
|
for x in data[1:-1].split(',')]
|
|
|
|
|
|
|
|
_ext.UUID = _ext.new_type((oid1, ), "UUID",
|
|
|
|
lambda data, cursor: data and uuid.UUID(data) or None)
|
|
|
|
_ext.UUIDARRAY = _ext.new_type((oid2,), "UUID[]", parseUUIDARRAY)
|
|
|
|
|
|
|
|
_ext.register_type(_ext.UUID, conn_or_curs)
|
|
|
|
_ext.register_type(_ext.UUIDARRAY, conn_or_curs)
|
|
|
|
_ext.register_adapter(uuid.UUID, UUID_adapter)
|
|
|
|
|
|
|
|
return _ext.UUID
|
2008-09-19 23:25:16 +04:00
|
|
|
|
|
|
|
|
2008-09-24 03:27:52 +04:00
|
|
|
# a type, dbtype and adapter for PostgreSQL inet type
|
|
|
|
|
|
|
|
class Inet(object):
|
|
|
|
"""Wrap a string to allow for correct SQL-quoting of inet values.
|
|
|
|
|
|
|
|
Note that this adapter does NOT check the passed value to make
|
|
|
|
sure it really is an inet-compatible address but DOES call adapt()
|
|
|
|
on it to make sure it is impossible to execute an SQL-injection
|
|
|
|
by passing an evil value to the initializer.
|
|
|
|
"""
|
|
|
|
def __init__(self, addr):
|
2010-02-13 05:14:38 +03:00
|
|
|
self.addr = addr
|
2008-09-24 03:27:52 +04:00
|
|
|
|
2010-02-14 23:14:17 +03:00
|
|
|
def __repr__(self):
|
|
|
|
return "%s(%r)" % (self.__class__.__name__, self.addr)
|
|
|
|
|
2008-09-24 03:27:52 +04:00
|
|
|
def prepare(self, conn):
|
|
|
|
self._conn = conn
|
|
|
|
|
|
|
|
def getquoted(self):
|
2010-02-14 23:14:17 +03:00
|
|
|
obj = _A(self.addr)
|
2008-09-24 03:27:52 +04:00
|
|
|
if hasattr(obj, 'prepare'):
|
|
|
|
obj.prepare(self._conn)
|
2010-12-29 05:46:36 +03:00
|
|
|
return obj.getquoted() + b("::inet")
|
2008-09-24 03:27:52 +04:00
|
|
|
|
2010-09-26 02:55:55 +04:00
|
|
|
def __conform__(self, foo):
|
|
|
|
if foo is _ext.ISQLQuote:
|
|
|
|
return self
|
|
|
|
|
2008-09-24 03:27:52 +04:00
|
|
|
def __str__(self):
|
|
|
|
return str(self.addr)
|
|
|
|
|
2009-03-02 12:59:52 +03:00
|
|
|
def register_inet(oid=None, conn_or_curs=None):
|
2008-09-24 03:27:52 +04:00
|
|
|
"""Create the INET type and an Inet adapter."""
|
|
|
|
if not oid: oid = 869
|
|
|
|
_ext.INET = _ext.new_type((oid, ), "INET",
|
|
|
|
lambda data, cursor: data and Inet(data) or None)
|
2009-03-02 12:59:52 +03:00
|
|
|
_ext.register_type(_ext.INET, conn_or_curs)
|
2008-09-24 03:27:52 +04:00
|
|
|
return _ext.INET
|
|
|
|
|
|
|
|
|
2009-03-02 13:07:17 +03:00
|
|
|
def register_tstz_w_secs(oids=None, conn_or_curs=None):
|
2010-05-20 05:10:33 +04:00
|
|
|
"""The function used to register an alternate type caster for
|
|
|
|
:sql:`TIMESTAMP WITH TIME ZONE` to deal with historical time zones with
|
|
|
|
seconds in the UTC offset.
|
2009-03-02 13:07:17 +03:00
|
|
|
|
2010-05-20 05:10:33 +04:00
|
|
|
These are now correctly handled by the default type caster, so currently
|
|
|
|
the function doesn't do anything.
|
|
|
|
"""
|
2010-07-13 16:26:52 +04:00
|
|
|
warnings.warn("deprecated", DeprecationWarning)
|
2009-03-02 13:07:17 +03:00
|
|
|
|
|
|
|
|
2010-04-02 04:56:38 +04:00
|
|
|
import select
|
|
|
|
from psycopg2.extensions import POLL_OK, POLL_READ, POLL_WRITE
|
|
|
|
from psycopg2 import OperationalError
|
|
|
|
|
2010-04-04 06:07:43 +04:00
|
|
|
def wait_select(conn):
|
2010-04-02 04:56:38 +04:00
|
|
|
"""Wait until a connection or cursor has data available.
|
|
|
|
|
|
|
|
The function is an example of a wait callback to be registered with
|
|
|
|
`~psycopg2.extensions.set_wait_callback()`. This function uses `!select()`
|
|
|
|
to wait for data available.
|
|
|
|
"""
|
|
|
|
while 1:
|
2010-04-04 06:07:43 +04:00
|
|
|
state = conn.poll()
|
2010-04-02 04:56:38 +04:00
|
|
|
if state == POLL_OK:
|
|
|
|
break
|
|
|
|
elif state == POLL_READ:
|
|
|
|
select.select([conn.fileno()], [], [])
|
|
|
|
elif state == POLL_WRITE:
|
|
|
|
select.select([], [conn.fileno()], [])
|
|
|
|
else:
|
|
|
|
raise OperationalError("bad state from poll: %s" % state)
|
|
|
|
|
|
|
|
|
2010-09-26 02:13:51 +04:00
|
|
|
class HstoreAdapter(object):
|
|
|
|
"""Adapt a Python dict to the hstore syntax."""
|
|
|
|
def __init__(self, wrapped):
|
|
|
|
self.wrapped = wrapped
|
|
|
|
|
|
|
|
def prepare(self, conn):
|
|
|
|
self.conn = conn
|
|
|
|
|
2010-09-27 04:45:16 +04:00
|
|
|
# use an old-style getquoted implementation if required
|
|
|
|
if conn.server_version < 90000:
|
|
|
|
self.getquoted = self._getquoted_8
|
2010-09-26 02:13:51 +04:00
|
|
|
|
|
|
|
def _getquoted_8(self):
|
|
|
|
"""Use the operators available in PG pre-9.0."""
|
2010-09-27 01:30:23 +04:00
|
|
|
if not self.wrapped:
|
2010-12-29 05:45:24 +03:00
|
|
|
return b("''::hstore")
|
2010-09-27 01:30:23 +04:00
|
|
|
|
2010-09-26 02:13:51 +04:00
|
|
|
adapt = _ext.adapt
|
|
|
|
rv = []
|
|
|
|
for k, v in self.wrapped.iteritems():
|
|
|
|
k = adapt(k)
|
|
|
|
k.prepare(self.conn)
|
|
|
|
k = k.getquoted()
|
|
|
|
|
|
|
|
if v is not None:
|
|
|
|
v = adapt(v)
|
|
|
|
v.prepare(self.conn)
|
|
|
|
v = v.getquoted()
|
|
|
|
else:
|
2010-12-29 05:45:24 +03:00
|
|
|
v = b('NULL')
|
2010-09-26 02:13:51 +04:00
|
|
|
|
2010-12-29 05:45:24 +03:00
|
|
|
# XXX this b'ing is painfully inefficient!
|
|
|
|
rv.append(b("(") + k + b(" => ") + v + b(")"))
|
2010-09-26 02:13:51 +04:00
|
|
|
|
2010-12-29 05:45:24 +03:00
|
|
|
return b("(") + b('||').join(rv) + b(")")
|
2010-09-26 02:13:51 +04:00
|
|
|
|
|
|
|
def _getquoted_9(self):
|
|
|
|
"""Use the hstore(text[], text[]) function."""
|
2010-09-27 01:30:23 +04:00
|
|
|
if not self.wrapped:
|
2010-12-29 05:45:24 +03:00
|
|
|
return b("''::hstore")
|
2010-09-27 01:30:23 +04:00
|
|
|
|
2010-09-26 02:13:51 +04:00
|
|
|
k = _ext.adapt(self.wrapped.keys())
|
|
|
|
k.prepare(self.conn)
|
|
|
|
v = _ext.adapt(self.wrapped.values())
|
|
|
|
v.prepare(self.conn)
|
2010-12-29 05:45:24 +03:00
|
|
|
return b("hstore(") + k.getquoted() + b(", ") + v.getquoted() + b(")")
|
2010-09-26 02:13:51 +04:00
|
|
|
|
2010-09-27 04:45:16 +04:00
|
|
|
getquoted = _getquoted_9
|
|
|
|
|
2010-09-27 00:59:54 +04:00
|
|
|
_re_hstore = regex.compile(r"""
|
|
|
|
# hstore key:
|
2010-09-27 01:53:02 +04:00
|
|
|
# a string of normal or escaped chars
|
|
|
|
"((?: [^"\\] | \\. )*)"
|
2010-09-27 00:59:54 +04:00
|
|
|
\s*=>\s* # hstore value
|
|
|
|
(?:
|
|
|
|
NULL # the value can be null - not catched
|
2010-09-27 01:53:02 +04:00
|
|
|
# or a quoted string like the key
|
|
|
|
| "((?: [^"\\] | \\. )*)"
|
2010-09-27 00:59:54 +04:00
|
|
|
)
|
|
|
|
(?:\s*,\s*|$) # pairs separated by comma or end of string.
|
|
|
|
""", regex.VERBOSE)
|
|
|
|
|
2010-12-29 05:45:24 +03:00
|
|
|
@classmethod
|
|
|
|
def parse(self, s, cur, _bsdec=regex.compile(r"\\(.)")):
|
2010-09-27 00:59:54 +04:00
|
|
|
"""Parse an hstore representation in a Python string.
|
|
|
|
|
|
|
|
The hstore is represented as something like::
|
|
|
|
|
|
|
|
"a"=>"1", "b"=>"2"
|
|
|
|
|
|
|
|
with backslash-escaped strings.
|
|
|
|
"""
|
|
|
|
if s is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
rv = {}
|
|
|
|
start = 0
|
|
|
|
for m in self._re_hstore.finditer(s):
|
|
|
|
if m is None or m.start() != start:
|
|
|
|
raise psycopg2.InterfaceError(
|
|
|
|
"error parsing hstore pair at char %d" % start)
|
2010-12-29 05:45:24 +03:00
|
|
|
k = _bsdec.sub(r'\1', m.group(1))
|
2010-09-27 00:59:54 +04:00
|
|
|
v = m.group(2)
|
|
|
|
if v is not None:
|
2010-12-29 05:45:24 +03:00
|
|
|
v = _bsdec.sub(r'\1', v)
|
2010-09-27 00:59:54 +04:00
|
|
|
|
|
|
|
rv[k] = v
|
|
|
|
start = m.end()
|
|
|
|
|
|
|
|
if start < len(s):
|
|
|
|
raise psycopg2.InterfaceError(
|
|
|
|
"error parsing hstore: unparsed data after char %d" % start)
|
|
|
|
|
|
|
|
return rv
|
|
|
|
|
2010-12-29 05:45:24 +03:00
|
|
|
@classmethod
|
2010-09-27 03:49:31 +04:00
|
|
|
def parse_unicode(self, s, cur):
|
|
|
|
"""Parse an hstore returning unicode keys and values."""
|
2010-12-29 05:45:24 +03:00
|
|
|
if s is None:
|
|
|
|
return None
|
2010-09-27 03:49:31 +04:00
|
|
|
|
2010-12-29 05:45:24 +03:00
|
|
|
s = s.decode(_ext.encodings[cur.connection.encoding])
|
|
|
|
return self.parse(s, cur)
|
2010-09-27 03:49:31 +04:00
|
|
|
|
2010-11-19 06:51:53 +03:00
|
|
|
@classmethod
|
2010-09-27 04:46:54 +04:00
|
|
|
def get_oids(self, conn_or_curs):
|
2011-02-25 03:19:49 +03:00
|
|
|
"""Return the lists of OID of the hstore and hstore[] types.
|
2010-09-27 04:46:54 +04:00
|
|
|
"""
|
|
|
|
if hasattr(conn_or_curs, 'execute'):
|
|
|
|
conn = conn_or_curs.connection
|
|
|
|
curs = conn_or_curs
|
|
|
|
else:
|
|
|
|
conn = conn_or_curs
|
|
|
|
curs = conn_or_curs.cursor()
|
2010-09-27 01:11:06 +04:00
|
|
|
|
2010-09-27 04:46:54 +04:00
|
|
|
# Store the transaction status of the connection to revert it after use
|
|
|
|
conn_status = conn.status
|
2010-09-27 01:11:06 +04:00
|
|
|
|
2010-11-19 06:51:53 +03:00
|
|
|
# column typarray not available before PG 8.3
|
|
|
|
typarray = conn.server_version >= 80300 and "typarray" or "NULL"
|
|
|
|
|
2011-02-25 03:19:49 +03:00
|
|
|
rv0, rv1 = [], []
|
|
|
|
|
2010-09-27 04:46:54 +04:00
|
|
|
# get the oid for the hstore
|
|
|
|
curs.execute("""\
|
2010-11-19 06:51:53 +03:00
|
|
|
SELECT t.oid, %s
|
2010-09-27 01:11:06 +04:00
|
|
|
FROM pg_type t JOIN pg_namespace ns
|
|
|
|
ON typnamespace = ns.oid
|
2011-02-25 03:19:49 +03:00
|
|
|
WHERE typname = 'hstore';
|
2010-11-19 06:51:53 +03:00
|
|
|
""" % typarray)
|
2011-02-25 03:19:49 +03:00
|
|
|
for oids in curs:
|
|
|
|
rv0.append(oids[0])
|
|
|
|
rv1.append(oids[1])
|
2010-09-27 01:11:06 +04:00
|
|
|
|
2010-09-27 04:46:54 +04:00
|
|
|
# revert the status of the connection as before the command
|
|
|
|
if (conn_status != _ext.STATUS_IN_TRANSACTION
|
2011-12-15 23:28:04 +04:00
|
|
|
and not conn.autocommit):
|
2010-09-27 04:46:54 +04:00
|
|
|
conn.rollback()
|
2010-09-27 01:11:06 +04:00
|
|
|
|
2011-02-25 03:19:49 +03:00
|
|
|
return tuple(rv0), tuple(rv1)
|
2010-09-27 04:46:54 +04:00
|
|
|
|
2011-09-22 21:53:21 +04:00
|
|
|
def register_hstore(conn_or_curs, globally=False, unicode=False,
|
|
|
|
oid=None, array_oid=None):
|
2011-02-19 19:16:28 +03:00
|
|
|
"""Register adapter and typecaster for `!dict`\-\ |hstore| conversions.
|
2010-09-28 03:33:34 +04:00
|
|
|
|
2011-02-21 03:58:52 +03:00
|
|
|
:param conn_or_curs: a connection or cursor: the typecaster will be
|
|
|
|
registered only on this object unless *globally* is set to `!True`
|
2011-02-25 03:19:49 +03:00
|
|
|
:param globally: register the adapter globally, not only on *conn_or_curs*
|
2011-02-21 03:58:52 +03:00
|
|
|
:param unicode: if `!True`, keys and values returned from the database
|
|
|
|
will be `!unicode` instead of `!str`. The option is not available on
|
|
|
|
Python 3
|
|
|
|
:param oid: the OID of the |hstore| type if known. If not, it will be
|
2011-09-22 21:53:21 +04:00
|
|
|
queried on *conn_or_curs*.
|
|
|
|
:param array_oid: the OID of the |hstore| array type if known. If not, it
|
|
|
|
will be queried on *conn_or_curs*.
|
2011-02-21 03:58:52 +03:00
|
|
|
|
|
|
|
The connection or cursor passed to the function will be used to query the
|
|
|
|
database and look for the OID of the |hstore| type (which may be different
|
|
|
|
across databases). If querying is not desirable (e.g. with
|
|
|
|
:ref:`asynchronous connections <async-support>`) you may specify it in the
|
2011-09-22 21:53:21 +04:00
|
|
|
*oid* parameter, which can be found using a query such as :sql:`SELECT
|
|
|
|
'hstore'::regtype::oid`. Analogously you can obtain a value for *array_oid*
|
|
|
|
using a query such as :sql:`SELECT 'hstore[]'::regtype::oid`.
|
|
|
|
|
2011-02-21 03:58:52 +03:00
|
|
|
|
|
|
|
Note that, when passing a dictionary from Python to the database, both
|
|
|
|
strings and unicode keys and values are supported. Dictionaries returned
|
|
|
|
from the database have keys/values according to the *unicode* parameter.
|
2010-09-28 03:33:34 +04:00
|
|
|
|
|
|
|
The |hstore| contrib module must be already installed in the database
|
|
|
|
(executing the ``hstore.sql`` script in your ``contrib`` directory).
|
|
|
|
Raise `~psycopg2.ProgrammingError` if the type is not found.
|
2011-02-21 03:58:52 +03:00
|
|
|
|
|
|
|
.. versionchanged:: 2.4
|
2011-02-25 03:19:49 +03:00
|
|
|
added the *oid* parameter. If not specified, the typecaster is
|
|
|
|
installed also if |hstore| is not installed in the :sql:`public`
|
|
|
|
schema.
|
2011-09-22 21:53:21 +04:00
|
|
|
|
|
|
|
.. versionchanged:: 2.4.3
|
|
|
|
added support for |hstore| array.
|
|
|
|
|
2010-09-27 04:46:54 +04:00
|
|
|
"""
|
2011-02-21 03:58:52 +03:00
|
|
|
if oid is None:
|
|
|
|
oid = HstoreAdapter.get_oids(conn_or_curs)
|
2011-02-25 04:37:02 +03:00
|
|
|
if oid is None or not oid[0]:
|
2011-02-21 03:58:52 +03:00
|
|
|
raise psycopg2.ProgrammingError(
|
|
|
|
"hstore type not found in the database. "
|
|
|
|
"please install it from your 'contrib/hstore.sql' file")
|
|
|
|
else:
|
2011-09-22 21:53:21 +04:00
|
|
|
array_oid = oid[1]
|
|
|
|
oid = oid[0]
|
2010-09-27 01:11:06 +04:00
|
|
|
|
2011-02-25 03:19:49 +03:00
|
|
|
if isinstance(oid, int):
|
|
|
|
oid = (oid,)
|
|
|
|
|
2011-09-22 21:53:21 +04:00
|
|
|
if array_oid is not None:
|
|
|
|
if isinstance(array_oid, int):
|
|
|
|
array_oid = (array_oid,)
|
|
|
|
else:
|
|
|
|
array_oid = tuple([x for x in array_oid if x])
|
|
|
|
|
2010-09-27 01:11:06 +04:00
|
|
|
# create and register the typecaster
|
2010-12-29 05:45:24 +03:00
|
|
|
if sys.version_info[0] < 3 and unicode:
|
2010-09-27 03:49:31 +04:00
|
|
|
cast = HstoreAdapter.parse_unicode
|
|
|
|
else:
|
|
|
|
cast = HstoreAdapter.parse
|
|
|
|
|
2011-02-25 03:19:49 +03:00
|
|
|
HSTORE = _ext.new_type(oid, "HSTORE", cast)
|
2010-09-27 04:46:54 +04:00
|
|
|
_ext.register_type(HSTORE, not globally and conn_or_curs or None)
|
2010-09-27 01:11:06 +04:00
|
|
|
_ext.register_adapter(dict, HstoreAdapter)
|
|
|
|
|
2011-09-22 21:53:21 +04:00
|
|
|
if array_oid:
|
|
|
|
HSTOREARRAY = _ext.new_array_type(array_oid, "HSTOREARRAY", HSTORE)
|
|
|
|
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
|
|
|
|
|
2010-09-26 02:13:51 +04:00
|
|
|
|
2011-01-02 02:34:13 +03:00
|
|
|
class CompositeCaster(object):
|
|
|
|
"""Helps conversion of a PostgreSQL composite type into a Python object.
|
|
|
|
|
|
|
|
The class is usually created by the `register_composite()` function.
|
2011-09-22 21:55:33 +04:00
|
|
|
You may want to create and register manually instances of the class if
|
|
|
|
querying the database at registration time is not desirable (such as when
|
|
|
|
using an :ref:`asynchronous connections <async-support>`).
|
2011-01-02 02:34:13 +03:00
|
|
|
|
|
|
|
.. attribute:: name
|
|
|
|
|
|
|
|
The name of the PostgreSQL type.
|
|
|
|
|
|
|
|
.. attribute:: oid
|
|
|
|
|
|
|
|
The oid of the PostgreSQL type.
|
|
|
|
|
2011-09-22 21:55:33 +04:00
|
|
|
.. attribute:: array_oid
|
|
|
|
|
|
|
|
The oid of the PostgreSQL array type, if available.
|
|
|
|
|
2011-01-02 02:34:13 +03:00
|
|
|
.. attribute:: type
|
|
|
|
|
2011-02-19 19:16:28 +03:00
|
|
|
The type of the Python objects returned. If :py:func:`collections.namedtuple()`
|
2011-01-02 02:34:13 +03:00
|
|
|
is available, it is a named tuple with attributes equal to the type
|
2011-02-19 19:16:28 +03:00
|
|
|
components. Otherwise it is just the `!tuple` object.
|
2011-01-02 02:34:13 +03:00
|
|
|
|
|
|
|
.. attribute:: attnames
|
|
|
|
|
|
|
|
List of component names of the type to be casted.
|
|
|
|
|
|
|
|
.. attribute:: atttypes
|
|
|
|
|
|
|
|
List of component type oids of the type to be casted.
|
|
|
|
|
|
|
|
"""
|
2011-09-22 21:55:33 +04:00
|
|
|
def __init__(self, name, oid, attrs, array_oid=None):
|
2011-01-02 02:34:13 +03:00
|
|
|
self.name = name
|
|
|
|
self.oid = oid
|
2011-09-22 21:55:33 +04:00
|
|
|
self.array_oid = array_oid
|
2011-01-02 02:34:13 +03:00
|
|
|
|
|
|
|
self.attnames = [ a[0] for a in attrs ]
|
|
|
|
self.atttypes = [ a[1] for a in attrs ]
|
2011-01-03 21:27:26 +03:00
|
|
|
self._create_type(name, self.attnames)
|
2011-01-02 02:34:13 +03:00
|
|
|
self.typecaster = _ext.new_type((oid,), name, self.parse)
|
2011-09-22 21:55:33 +04:00
|
|
|
if array_oid:
|
|
|
|
self.array_typecaster = _ext.new_array_type(
|
|
|
|
(array_oid,), "%sARRAY" % name, self.typecaster)
|
|
|
|
else:
|
|
|
|
self.array_typecaster = None
|
2011-01-02 02:34:13 +03:00
|
|
|
|
|
|
|
def parse(self, s, curs):
|
|
|
|
if s is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
tokens = self.tokenize(s)
|
|
|
|
if len(tokens) != len(self.atttypes):
|
|
|
|
raise psycopg2.DataError(
|
2011-12-15 23:23:08 +04:00
|
|
|
"expecting %d components for the type %s, %d found instead" %
|
|
|
|
(len(self.atttypes), self.name, len(tokens)))
|
2011-01-02 02:34:13 +03:00
|
|
|
|
|
|
|
attrs = [ curs.cast(oid, token)
|
|
|
|
for oid, token in zip(self.atttypes, tokens) ]
|
2011-01-03 21:27:26 +03:00
|
|
|
return self._ctor(*attrs)
|
2011-01-02 02:34:13 +03:00
|
|
|
|
|
|
|
_re_tokenize = regex.compile(r"""
|
|
|
|
\(? ([,\)]) # an empty token, representing NULL
|
|
|
|
| \(? " ((?: [^"] | "")*) " [,)] # or a quoted string
|
|
|
|
| \(? ([^",\)]+) [,\)] # or an unquoted string
|
|
|
|
""", regex.VERBOSE)
|
|
|
|
|
|
|
|
_re_undouble = regex.compile(r'(["\\])\1')
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tokenize(self, s):
|
|
|
|
rv = []
|
|
|
|
for m in self._re_tokenize.finditer(s):
|
|
|
|
if m is None:
|
|
|
|
raise psycopg2.InterfaceError("can't parse type: %r", s)
|
|
|
|
if m.group(1):
|
|
|
|
rv.append(None)
|
|
|
|
elif m.group(2):
|
|
|
|
rv.append(self._re_undouble.sub(r"\1", m.group(2)))
|
|
|
|
else:
|
|
|
|
rv.append(m.group(3))
|
|
|
|
|
|
|
|
return rv
|
|
|
|
|
|
|
|
def _create_type(self, name, attnames):
|
|
|
|
try:
|
|
|
|
from collections import namedtuple
|
|
|
|
except ImportError:
|
2011-01-03 21:27:26 +03:00
|
|
|
self.type = tuple
|
|
|
|
self._ctor = lambda *args: tuple(args)
|
2011-01-02 02:34:13 +03:00
|
|
|
else:
|
2011-01-03 21:27:26 +03:00
|
|
|
self.type = namedtuple(name, attnames)
|
|
|
|
self._ctor = self.type
|
2011-01-02 02:34:13 +03:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _from_db(self, name, conn_or_curs):
|
|
|
|
"""Return a `CompositeCaster` instance for the type *name*.
|
|
|
|
|
|
|
|
Raise `ProgrammingError` if the type is not found.
|
|
|
|
"""
|
|
|
|
if hasattr(conn_or_curs, 'execute'):
|
|
|
|
conn = conn_or_curs.connection
|
|
|
|
curs = conn_or_curs
|
|
|
|
else:
|
|
|
|
conn = conn_or_curs
|
|
|
|
curs = conn_or_curs.cursor()
|
|
|
|
|
|
|
|
# Store the transaction status of the connection to revert it after use
|
|
|
|
conn_status = conn.status
|
|
|
|
|
2011-02-09 04:20:25 +03:00
|
|
|
# Use the correct schema
|
|
|
|
if '.' in name:
|
|
|
|
schema, tname = name.split('.', 1)
|
|
|
|
else:
|
|
|
|
tname = name
|
|
|
|
schema = 'public'
|
|
|
|
|
2011-09-22 21:55:33 +04:00
|
|
|
# column typarray not available before PG 8.3
|
|
|
|
typarray = conn.server_version >= 80300 and "typarray" or "NULL"
|
|
|
|
|
2011-01-02 02:34:13 +03:00
|
|
|
# get the type oid and attributes
|
|
|
|
curs.execute("""\
|
2011-09-22 21:55:33 +04:00
|
|
|
SELECT t.oid, %s, attname, atttypid
|
2011-01-02 02:34:13 +03:00
|
|
|
FROM pg_type t
|
|
|
|
JOIN pg_namespace ns ON typnamespace = ns.oid
|
|
|
|
JOIN pg_attribute a ON attrelid = typrelid
|
2011-12-16 00:11:17 +04:00
|
|
|
WHERE typname = %%s AND nspname = %%s
|
|
|
|
AND attnum > 0 AND NOT attisdropped
|
2011-02-09 03:11:36 +03:00
|
|
|
ORDER BY attnum;
|
2011-09-22 21:55:33 +04:00
|
|
|
""" % typarray, (tname, schema))
|
2011-01-02 02:34:13 +03:00
|
|
|
|
|
|
|
recs = curs.fetchall()
|
|
|
|
|
|
|
|
# revert the status of the connection as before the command
|
|
|
|
if (conn_status != _ext.STATUS_IN_TRANSACTION
|
2011-12-15 23:28:04 +04:00
|
|
|
and not conn.autocommit):
|
2011-01-02 02:34:13 +03:00
|
|
|
conn.rollback()
|
|
|
|
|
|
|
|
if not recs:
|
|
|
|
raise psycopg2.ProgrammingError(
|
|
|
|
"PostgreSQL type '%s' not found" % name)
|
|
|
|
|
|
|
|
type_oid = recs[0][0]
|
2011-09-22 21:55:33 +04:00
|
|
|
array_oid = recs[0][1]
|
|
|
|
type_attrs = [ (r[2], r[3]) for r in recs ]
|
2011-01-02 02:34:13 +03:00
|
|
|
|
2011-09-22 21:55:33 +04:00
|
|
|
return CompositeCaster(tname, type_oid, type_attrs,
|
|
|
|
array_oid=array_oid)
|
2011-01-02 02:34:13 +03:00
|
|
|
|
|
|
|
def register_composite(name, conn_or_curs, globally=False):
|
|
|
|
"""Register a typecaster to convert a composite type into a tuple.
|
|
|
|
|
|
|
|
:param name: the name of a PostgreSQL composite type, e.g. created using
|
|
|
|
the |CREATE TYPE|_ command
|
|
|
|
:param conn_or_curs: a connection or cursor used to find the type oid and
|
|
|
|
components; the typecaster is registered in a scope limited to this
|
2011-02-19 19:16:28 +03:00
|
|
|
object, unless *globally* is set to `!True`
|
|
|
|
:param globally: if `!False` (default) register the typecaster only on
|
2011-01-02 02:34:13 +03:00
|
|
|
*conn_or_curs*, otherwise register it globally
|
|
|
|
:return: the registered `CompositeCaster` instance responsible for the
|
|
|
|
conversion
|
2011-09-22 21:55:33 +04:00
|
|
|
|
|
|
|
.. versionchanged:: 2.4.3
|
|
|
|
added support for array of composite types
|
|
|
|
|
2011-01-02 02:34:13 +03:00
|
|
|
"""
|
|
|
|
caster = CompositeCaster._from_db(name, conn_or_curs)
|
|
|
|
_ext.register_type(caster.typecaster, not globally and conn_or_curs or None)
|
|
|
|
|
2011-09-22 21:55:33 +04:00
|
|
|
if caster.array_typecaster is not None:
|
|
|
|
_ext.register_type(caster.array_typecaster, not globally and conn_or_curs or None)
|
|
|
|
|
2011-01-02 02:34:13 +03:00
|
|
|
return caster
|
|
|
|
|
|
|
|
|
2010-02-08 23:13:10 +03:00
|
|
|
__all__ = filter(lambda k: not k.startswith('_'), locals().keys())
|