2005-10-18 09:42:00 +04:00
|
|
|
"""Miscellaneous goodies for psycopg2
|
2005-01-20 08:49:40 +03:00
|
|
|
|
2005-10-18 09:42:00 +04:00
|
|
|
This module is a generic place used to hold little helper functions
|
2005-01-20 08:49:40 +03:00
|
|
|
and classes untill a better place in the distribution is found.
|
|
|
|
"""
|
2004-10-19 07:17:12 +04:00
|
|
|
# psycopg/extras.py - miscellaneous extra goodies for psycopg
|
|
|
|
#
|
2010-02-13 01:34:53 +03:00
|
|
|
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
2004-10-19 07:17:12 +04:00
|
|
|
#
|
2010-02-13 01:34:53 +03:00
|
|
|
# psycopg2 is free software: you can redistribute it and/or modify it
|
|
|
|
# under the terms of the GNU Lesser General Public License as published
|
|
|
|
# by the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
2004-10-19 07:17:12 +04:00
|
|
|
#
|
2010-02-13 01:34:53 +03:00
|
|
|
# In addition, as a special exception, the copyright holders give
|
|
|
|
# permission to link this program with the OpenSSL library (or with
|
|
|
|
# modified versions of OpenSSL that use the same license as OpenSSL),
|
|
|
|
# and distribute linked combinations including the two.
|
|
|
|
#
|
|
|
|
# You must obey the GNU Lesser General Public License in all respects for
|
|
|
|
# all of the code used other than OpenSSL.
|
|
|
|
#
|
|
|
|
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
|
|
|
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
|
|
|
# License for more details.
|
2004-10-19 07:17:12 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
import os
|
2010-12-12 19:45:21 +03:00
|
|
|
import sys
|
2006-01-20 07:07:23 +03:00
|
|
|
import time
|
2010-09-27 03:49:31 +04:00
|
|
|
import codecs
|
2010-07-13 16:26:52 +04:00
|
|
|
import warnings
|
2009-03-02 12:59:52 +03:00
|
|
|
import re as regex
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
import logging
|
|
|
|
except:
|
|
|
|
logging = None
|
2008-09-19 23:25:16 +04:00
|
|
|
|
2010-09-27 00:59:54 +04:00
|
|
|
import psycopg2
|
2008-09-19 23:25:16 +04:00
|
|
|
from psycopg2 import extensions as _ext
|
2005-06-24 11:11:44 +04:00
|
|
|
from psycopg2.extensions import cursor as _cursor
|
2005-07-17 08:08:08 +04:00
|
|
|
from psycopg2.extensions import connection as _connection
|
2005-06-24 11:11:44 +04:00
|
|
|
from psycopg2.extensions import adapt as _A
|
2005-02-27 18:03:53 +03:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
class DictCursorBase(_cursor):
|
|
|
|
"""Base class for all dict-like cursors."""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2010-12-12 19:45:21 +03:00
|
|
|
if 'row_factory' in kwargs:
|
2007-01-16 16:45:41 +03:00
|
|
|
row_factory = kwargs['row_factory']
|
|
|
|
del kwargs['row_factory']
|
|
|
|
else:
|
|
|
|
raise NotImplementedError(
|
|
|
|
"DictCursorBase can't be instantiated without a row factory.")
|
|
|
|
_cursor.__init__(self, *args, **kwargs)
|
|
|
|
self._query_executed = 0
|
2009-05-09 16:44:59 +04:00
|
|
|
self._prefetch = 0
|
2007-01-16 16:45:41 +03:00
|
|
|
self.row_factory = row_factory
|
|
|
|
|
|
|
|
def fetchone(self):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchone(self)
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchone(self)
|
2009-03-02 12:59:52 +03:00
|
|
|
return res
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
def fetchmany(self, size=None):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchmany(self, size)
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchmany(self, size)
|
2009-03-02 12:59:52 +03:00
|
|
|
return res
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
def fetchall(self):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchall(self)
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchall(self)
|
2009-03-02 12:59:52 +03:00
|
|
|
return res
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def next(self):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchone(self)
|
|
|
|
if res is None:
|
|
|
|
raise StopIteration()
|
2009-03-02 12:59:52 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchone(self)
|
|
|
|
if res is None:
|
|
|
|
raise StopIteration()
|
2007-01-16 16:45:41 +03:00
|
|
|
return res
|
|
|
|
|
2005-07-17 08:08:08 +04:00
|
|
|
class DictConnection(_connection):
|
2010-02-26 03:17:52 +03:00
|
|
|
"""A connection that uses `DictCursor` automatically."""
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
|
|
|
if name is None:
|
|
|
|
return _connection.cursor(self, cursor_factory=DictCursor)
|
|
|
|
else:
|
2009-03-02 12:59:52 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=DictCursor)
|
2004-10-19 07:17:12 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
class DictCursor(DictCursorBase):
|
2004-10-19 07:17:12 +04:00
|
|
|
"""A cursor that keeps a list of column name -> index mappings."""
|
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
kwargs['row_factory'] = DictRow
|
|
|
|
DictCursorBase.__init__(self, *args, **kwargs)
|
2009-05-09 16:44:59 +04:00
|
|
|
self._prefetch = 1
|
2007-01-16 16:45:41 +03:00
|
|
|
|
2010-03-31 03:43:07 +04:00
|
|
|
def execute(self, query, vars=None):
|
2004-10-19 07:17:12 +04:00
|
|
|
self.index = {}
|
2007-01-16 16:45:41 +03:00
|
|
|
self._query_executed = 1
|
2010-03-31 03:43:07 +04:00
|
|
|
return _cursor.execute(self, query, vars)
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2005-12-06 08:55:58 +03:00
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.index = {}
|
2007-01-16 16:45:41 +03:00
|
|
|
self._query_executed = 1
|
2009-05-09 16:44:59 +04:00
|
|
|
return _cursor.callproc(self, procname, vars)
|
2004-10-19 07:17:12 +04:00
|
|
|
|
|
|
|
def _build_index(self):
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed == 1 and self.description:
|
2004-10-19 07:17:12 +04:00
|
|
|
for i in range(len(self.description)):
|
|
|
|
self.index[self.description[i][0]] = i
|
2009-05-09 16:44:59 +04:00
|
|
|
self._query_executed = 0
|
2005-03-01 19:41:02 +03:00
|
|
|
|
2004-10-19 07:17:12 +04:00
|
|
|
class DictRow(list):
|
2010-02-15 20:41:30 +03:00
|
|
|
"""A row object that allow by-colmun-name access to data."""
|
2004-10-19 07:17:12 +04:00
|
|
|
|
2009-05-09 12:19:15 +04:00
|
|
|
__slots__ = ('_index',)
|
|
|
|
|
2004-10-19 07:17:12 +04:00
|
|
|
def __init__(self, cursor):
|
2005-05-10 06:29:24 +04:00
|
|
|
self._index = cursor.index
|
2004-10-19 07:17:12 +04:00
|
|
|
self[:] = [None] * len(cursor.description)
|
|
|
|
|
|
|
|
def __getitem__(self, x):
|
2010-12-12 19:45:21 +03:00
|
|
|
if not isinstance(x, int):
|
2005-05-10 06:29:24 +04:00
|
|
|
x = self._index[x]
|
2004-10-19 07:17:12 +04:00
|
|
|
return list.__getitem__(self, x)
|
2005-02-27 18:03:53 +03:00
|
|
|
|
2010-12-01 16:17:12 +03:00
|
|
|
def __setitem__(self, x, v):
|
2010-12-12 19:45:21 +03:00
|
|
|
if not isinstance(x, int):
|
2010-12-01 16:17:12 +03:00
|
|
|
x = self._index[x]
|
|
|
|
list.__setitem__(self, x, v)
|
|
|
|
|
2005-04-11 11:20:46 +04:00
|
|
|
def items(self):
|
2010-12-12 19:45:21 +03:00
|
|
|
return list(self.iteritems())
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2005-04-25 08:58:38 +04:00
|
|
|
def keys(self):
|
2006-01-06 05:58:24 +03:00
|
|
|
return self._index.keys()
|
2005-04-25 08:58:38 +04:00
|
|
|
|
2005-05-19 08:48:26 +04:00
|
|
|
def values(self):
|
|
|
|
return tuple(self[:])
|
|
|
|
|
|
|
|
def has_key(self, x):
|
2010-12-12 19:45:21 +03:00
|
|
|
return x in self._index
|
2006-01-06 05:58:24 +03:00
|
|
|
|
2005-05-19 08:48:26 +04:00
|
|
|
def get(self, x, default=None):
|
|
|
|
try:
|
|
|
|
return self[x]
|
|
|
|
except:
|
|
|
|
return default
|
|
|
|
|
2006-10-06 09:22:54 +04:00
|
|
|
def iteritems(self):
|
2010-12-12 19:45:21 +03:00
|
|
|
for n, v in self._index.iteritems():
|
2006-10-06 09:22:54 +04:00
|
|
|
yield n, list.__getitem__(self, v)
|
2005-02-27 18:03:53 +03:00
|
|
|
|
2009-04-19 18:25:12 +04:00
|
|
|
def iterkeys(self):
|
|
|
|
return self._index.iterkeys()
|
|
|
|
|
|
|
|
def itervalues(self):
|
|
|
|
return list.__iter__(self)
|
|
|
|
|
|
|
|
def copy(self):
|
2010-12-12 19:45:21 +03:00
|
|
|
return dict(self.iteritems())
|
2009-04-19 18:25:12 +04:00
|
|
|
|
|
|
|
def __contains__(self, x):
|
2010-12-12 19:45:21 +03:00
|
|
|
return x in self._index
|
|
|
|
|
|
|
|
# grop the crusty Py2 methods
|
|
|
|
if sys.version_info[0] > 2:
|
|
|
|
items = iteritems; del iteritems
|
|
|
|
keys = iterkeys; del iterkeys
|
|
|
|
values = itervalues; del itervalues
|
|
|
|
del has_key
|
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
class RealDictConnection(_connection):
|
2010-02-26 03:17:52 +03:00
|
|
|
"""A connection that uses `RealDictCursor` automatically."""
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
|
|
|
if name is None:
|
|
|
|
return _connection.cursor(self, cursor_factory=RealDictCursor)
|
|
|
|
else:
|
2009-02-23 23:39:25 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=RealDictCursor)
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
class RealDictCursor(DictCursorBase):
|
|
|
|
"""A cursor that uses a real dict as the base type for rows.
|
|
|
|
|
|
|
|
Note that this cursor is extremely specialized and does not allow
|
|
|
|
the normal access (using integer indices) to fetched data. If you need
|
|
|
|
to access database rows both as a dictionary and a list, then use
|
2010-02-26 03:17:52 +03:00
|
|
|
the generic `DictCursor` instead of `!RealDictCursor`.
|
2007-01-16 16:45:41 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
kwargs['row_factory'] = RealDictRow
|
|
|
|
DictCursorBase.__init__(self, *args, **kwargs)
|
2009-05-09 16:44:59 +04:00
|
|
|
self._prefetch = 0
|
2007-01-16 16:45:41 +03:00
|
|
|
|
2010-03-31 03:43:07 +04:00
|
|
|
def execute(self, query, vars=None):
|
2007-01-16 16:45:41 +03:00
|
|
|
self.column_mapping = []
|
|
|
|
self._query_executed = 1
|
2010-03-31 03:43:07 +04:00
|
|
|
return _cursor.execute(self, query, vars)
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.column_mapping = []
|
|
|
|
self._query_executed = 1
|
2009-05-09 16:44:59 +04:00
|
|
|
return _cursor.callproc(self, procname, vars)
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
def _build_index(self):
|
|
|
|
if self._query_executed == 1 and self.description:
|
2009-05-09 16:44:59 +04:00
|
|
|
for i in range(len(self.description)):
|
|
|
|
self.column_mapping.append(self.description[i][0])
|
|
|
|
self._query_executed = 0
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
class RealDictRow(dict):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""A ``dict`` subclass representing a data record."""
|
2009-05-09 12:19:15 +04:00
|
|
|
|
2009-05-09 16:44:59 +04:00
|
|
|
__slots__ = ('_column_mapping')
|
2009-05-09 12:19:15 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def __init__(self, cursor):
|
|
|
|
dict.__init__(self)
|
|
|
|
self._column_mapping = cursor.column_mapping
|
|
|
|
|
|
|
|
def __setitem__(self, name, value):
|
2009-05-09 16:44:59 +04:00
|
|
|
if type(name) == int:
|
2007-01-16 16:45:41 +03:00
|
|
|
name = self._column_mapping[name]
|
|
|
|
return dict.__setitem__(self, name, value)
|
|
|
|
|
|
|
|
|
2010-11-06 04:39:43 +03:00
|
|
|
class NamedTupleConnection(_connection):
|
|
|
|
"""A connection that uses `NamedTupleCursor` automatically."""
|
|
|
|
def cursor(self, *args, **kwargs):
|
|
|
|
kwargs['cursor_factory'] = NamedTupleCursor
|
|
|
|
return _connection.cursor(self, *args, **kwargs)
|
|
|
|
|
|
|
|
class NamedTupleCursor(_cursor):
|
|
|
|
"""A cursor that generates results as |namedtuple|__.
|
|
|
|
|
|
|
|
`!fetch*()` methods will return named tuples instead of regular tuples, so
|
|
|
|
their elements can be accessed both as regular numeric items as well as
|
|
|
|
attributes.
|
|
|
|
|
|
|
|
>>> nt_cur = conn.cursor(cursor_factory=psycopg2.extras.NamedTupleCursor)
|
|
|
|
>>> rec = nt_cur.fetchone()
|
|
|
|
>>> rec
|
|
|
|
Record(id=1, num=100, data="abc'def")
|
|
|
|
>>> rec[1]
|
|
|
|
100
|
|
|
|
>>> rec.data
|
|
|
|
"abc'def"
|
|
|
|
|
|
|
|
.. |namedtuple| replace:: `!namedtuple`
|
|
|
|
.. __: http://docs.python.org/release/2.6/library/collections.html#collections.namedtuple
|
|
|
|
"""
|
2010-11-11 13:26:36 +03:00
|
|
|
Record = None
|
|
|
|
|
|
|
|
def execute(self, query, vars=None):
|
|
|
|
self.Record = None
|
|
|
|
return _cursor.execute(self, query, vars)
|
|
|
|
|
|
|
|
def executemany(self, query, vars):
|
|
|
|
self.Record = None
|
|
|
|
return _cursor.executemany(self, vars)
|
|
|
|
|
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.Record = None
|
|
|
|
return _cursor.callproc(self, procname, vars)
|
|
|
|
|
2010-11-06 04:39:43 +03:00
|
|
|
def fetchone(self):
|
|
|
|
t = _cursor.fetchone(self)
|
|
|
|
if t is not None:
|
2010-11-11 13:26:36 +03:00
|
|
|
nt = self.Record
|
|
|
|
if nt is None:
|
|
|
|
nt = self.Record = self._make_nt()
|
2010-11-06 04:39:43 +03:00
|
|
|
return nt(*t)
|
|
|
|
|
|
|
|
def fetchmany(self, size=None):
|
2010-11-11 13:26:36 +03:00
|
|
|
nt = self.Record
|
|
|
|
if nt is None:
|
|
|
|
nt = self.Record = self._make_nt()
|
2010-11-06 04:39:43 +03:00
|
|
|
ts = _cursor.fetchmany(self, size)
|
|
|
|
return [nt(*t) for t in ts]
|
|
|
|
|
|
|
|
def fetchall(self):
|
2010-11-11 13:26:36 +03:00
|
|
|
nt = self.Record
|
|
|
|
if nt is None:
|
|
|
|
nt = self.Record = self._make_nt()
|
2010-11-06 04:39:43 +03:00
|
|
|
ts = _cursor.fetchall(self)
|
|
|
|
return [nt(*t) for t in ts]
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return iter(self.fetchall())
|
|
|
|
|
|
|
|
try:
|
|
|
|
from collections import namedtuple
|
|
|
|
except ImportError, _exc:
|
|
|
|
def _make_nt(self):
|
|
|
|
raise self._exc
|
|
|
|
else:
|
|
|
|
def _make_nt(self, namedtuple=namedtuple):
|
2010-11-11 13:30:01 +03:00
|
|
|
return namedtuple("Record", [d[0] for d in self.description or ()])
|
2010-11-06 04:39:43 +03:00
|
|
|
|
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
class LoggingConnection(_connection):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""A connection that logs all queries to a file or logger__ object.
|
|
|
|
|
|
|
|
.. __: http://docs.python.org/library/logging.html
|
|
|
|
"""
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
def initialize(self, logobj):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""Initialize the connection to log to ``logobj``.
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2010-02-13 05:10:51 +03:00
|
|
|
The ``logobj`` parameter can be an open file object or a Logger
|
|
|
|
instance from the standard logging module.
|
2006-01-20 07:07:23 +03:00
|
|
|
"""
|
|
|
|
self._logobj = logobj
|
|
|
|
if logging and isinstance(logobj, logging.Logger):
|
|
|
|
self.log = self._logtologger
|
|
|
|
else:
|
|
|
|
self.log = self._logtofile
|
|
|
|
|
|
|
|
def filter(self, msg, curs):
|
|
|
|
"""Filter the query before logging it.
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
This is the method to overwrite to filter unwanted queries out of the
|
|
|
|
log or to add some extra data to the output. The default implementation
|
|
|
|
just does nothing.
|
|
|
|
"""
|
|
|
|
return msg
|
|
|
|
|
|
|
|
def _logtofile(self, msg, curs):
|
|
|
|
msg = self.filter(msg, curs)
|
|
|
|
if msg: self._logobj.write(msg + os.linesep)
|
|
|
|
|
|
|
|
def _logtologger(self, msg, curs):
|
|
|
|
msg = self.filter(msg, curs)
|
|
|
|
if msg: self._logobj.debug(msg)
|
|
|
|
|
|
|
|
def _check(self):
|
|
|
|
if not hasattr(self, '_logobj'):
|
|
|
|
raise self.ProgrammingError(
|
|
|
|
"LoggingConnection object has not been initialize()d")
|
|
|
|
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
2006-01-20 07:07:23 +03:00
|
|
|
self._check()
|
2009-02-23 23:39:25 +03:00
|
|
|
if name is None:
|
2007-09-01 13:32:42 +04:00
|
|
|
return _connection.cursor(self, cursor_factory=LoggingCursor)
|
|
|
|
else:
|
2009-02-23 23:39:25 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=LoggingCursor)
|
2007-09-01 13:32:42 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
class LoggingCursor(_cursor):
|
|
|
|
"""A cursor that logs queries using its connection logging facilities."""
|
|
|
|
|
2010-03-31 03:43:07 +04:00
|
|
|
def execute(self, query, vars=None):
|
2006-01-20 07:07:23 +03:00
|
|
|
try:
|
2010-03-31 03:43:07 +04:00
|
|
|
return _cursor.execute(self, query, vars)
|
2006-01-20 07:07:23 +03:00
|
|
|
finally:
|
|
|
|
self.connection.log(self.query, self)
|
|
|
|
|
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
try:
|
|
|
|
return _cursor.callproc(self, procname, vars)
|
|
|
|
finally:
|
|
|
|
self.connection.log(self.query, self)
|
|
|
|
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
class MinTimeLoggingConnection(LoggingConnection):
|
|
|
|
"""A connection that logs queries based on execution time.
|
|
|
|
|
2010-02-26 03:17:52 +03:00
|
|
|
This is just an example of how to sub-class `LoggingConnection` to
|
2010-02-13 05:10:51 +03:00
|
|
|
provide some extra filtering for the logged queries. Both the
|
2010-02-26 03:17:52 +03:00
|
|
|
`inizialize()` and `filter()` methods are overwritten to make sure
|
2010-02-13 05:10:51 +03:00
|
|
|
that only queries executing for more than ``mintime`` ms are logged.
|
2006-01-20 07:07:23 +03:00
|
|
|
|
2010-02-13 05:10:51 +03:00
|
|
|
Note that this connection uses the specialized cursor
|
2010-02-26 03:17:52 +03:00
|
|
|
`MinTimeLoggingCursor`.
|
2006-01-20 07:07:23 +03:00
|
|
|
"""
|
|
|
|
def initialize(self, logobj, mintime=0):
|
|
|
|
LoggingConnection.initialize(self, logobj)
|
|
|
|
self._mintime = mintime
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
def filter(self, msg, curs):
|
|
|
|
t = (time.time() - curs.timestamp) * 1000
|
|
|
|
if t > self._mintime:
|
|
|
|
return msg + os.linesep + " (execution time: %d ms)" % t
|
|
|
|
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
2006-01-20 07:07:23 +03:00
|
|
|
self._check()
|
2009-02-23 23:39:25 +03:00
|
|
|
if name is None:
|
2007-09-01 13:32:42 +04:00
|
|
|
return _connection.cursor(self, cursor_factory=MinTimeLoggingCursor)
|
|
|
|
else:
|
2009-02-23 23:39:25 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=MinTimeLoggingCursor)
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
class MinTimeLoggingCursor(LoggingCursor):
|
2010-02-26 03:17:52 +03:00
|
|
|
"""The cursor sub-class companion to `MinTimeLoggingConnection`."""
|
2006-01-20 07:07:23 +03:00
|
|
|
|
2010-03-31 03:43:07 +04:00
|
|
|
def execute(self, query, vars=None):
|
2006-01-20 07:07:23 +03:00
|
|
|
self.timestamp = time.time()
|
2010-03-31 03:43:07 +04:00
|
|
|
return LoggingCursor.execute(self, query, vars)
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.timestamp = time.time()
|
2006-09-10 18:50:03 +04:00
|
|
|
return LoggingCursor.execute(self, procname, vars)
|
|
|
|
|
2008-09-19 23:25:16 +04:00
|
|
|
|
|
|
|
# a dbtype and adapter for Python UUID type
|
|
|
|
|
|
|
|
try:
|
|
|
|
import uuid
|
|
|
|
|
|
|
|
class UUID_adapter(object):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
|
|
|
|
|
|
|
|
.. __: http://docs.python.org/library/uuid.html
|
|
|
|
.. __: http://www.postgresql.org/docs/8.4/static/datatype-uuid.html
|
|
|
|
"""
|
2008-09-19 23:25:16 +04:00
|
|
|
|
|
|
|
def __init__(self, uuid):
|
|
|
|
self._uuid = uuid
|
|
|
|
|
|
|
|
def prepare(self, conn):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def getquoted(self):
|
|
|
|
return "'"+str(self._uuid)+"'::uuid"
|
|
|
|
|
|
|
|
__str__ = getquoted
|
|
|
|
|
2009-10-04 14:34:02 +04:00
|
|
|
def register_uuid(oids=None, conn_or_curs=None):
|
2008-09-19 23:25:16 +04:00
|
|
|
"""Create the UUID type and an uuid.UUID adapter."""
|
2009-10-04 14:34:02 +04:00
|
|
|
if not oids:
|
|
|
|
oid1 = 2950
|
|
|
|
oid2 = 2951
|
|
|
|
elif type(oids) == list:
|
|
|
|
oid1, oid2 = oids
|
|
|
|
else:
|
|
|
|
oid1 = oids
|
|
|
|
oid2 = 2951
|
|
|
|
|
|
|
|
def parseUUIDARRAY(data, cursor):
|
|
|
|
if data is None:
|
|
|
|
return None
|
|
|
|
elif data == '{}':
|
|
|
|
return []
|
|
|
|
else:
|
|
|
|
return [((len(x) > 0 and x != 'NULL') and uuid.UUID(x) or None)
|
|
|
|
for x in data[1:-1].split(',')]
|
|
|
|
|
|
|
|
_ext.UUID = _ext.new_type((oid1, ), "UUID",
|
2008-09-24 03:27:52 +04:00
|
|
|
lambda data, cursor: data and uuid.UUID(data) or None)
|
2009-10-04 14:34:02 +04:00
|
|
|
_ext.UUIDARRAY = _ext.new_type((oid2,), "UUID[]", parseUUIDARRAY)
|
|
|
|
|
2009-03-02 12:59:52 +03:00
|
|
|
_ext.register_type(_ext.UUID, conn_or_curs)
|
2009-10-04 14:34:02 +04:00
|
|
|
_ext.register_type(_ext.UUIDARRAY, conn_or_curs)
|
2008-09-19 23:25:16 +04:00
|
|
|
_ext.register_adapter(uuid.UUID, UUID_adapter)
|
2009-10-04 14:34:02 +04:00
|
|
|
|
2008-09-19 23:25:16 +04:00
|
|
|
return _ext.UUID
|
2008-09-24 03:27:52 +04:00
|
|
|
|
2008-09-19 23:25:16 +04:00
|
|
|
except ImportError, e:
|
|
|
|
def register_uuid(oid=None):
|
|
|
|
"""Create the UUID type and an uuid.UUID adapter.
|
|
|
|
|
|
|
|
This is a fake function that will always raise an error because the
|
|
|
|
import of the uuid module failed.
|
|
|
|
"""
|
|
|
|
raise e
|
|
|
|
|
|
|
|
|
2008-09-24 03:27:52 +04:00
|
|
|
# a type, dbtype and adapter for PostgreSQL inet type
|
|
|
|
|
|
|
|
class Inet(object):
|
|
|
|
"""Wrap a string to allow for correct SQL-quoting of inet values.
|
|
|
|
|
|
|
|
Note that this adapter does NOT check the passed value to make
|
|
|
|
sure it really is an inet-compatible address but DOES call adapt()
|
|
|
|
on it to make sure it is impossible to execute an SQL-injection
|
|
|
|
by passing an evil value to the initializer.
|
|
|
|
"""
|
|
|
|
def __init__(self, addr):
|
2010-02-13 05:14:38 +03:00
|
|
|
self.addr = addr
|
2008-09-24 03:27:52 +04:00
|
|
|
|
2010-02-14 23:14:17 +03:00
|
|
|
def __repr__(self):
|
|
|
|
return "%s(%r)" % (self.__class__.__name__, self.addr)
|
|
|
|
|
2008-09-24 03:27:52 +04:00
|
|
|
def prepare(self, conn):
|
|
|
|
self._conn = conn
|
|
|
|
|
|
|
|
def getquoted(self):
|
2010-02-14 23:14:17 +03:00
|
|
|
obj = _A(self.addr)
|
2008-09-24 03:27:52 +04:00
|
|
|
if hasattr(obj, 'prepare'):
|
|
|
|
obj.prepare(self._conn)
|
|
|
|
return obj.getquoted()+"::inet"
|
|
|
|
|
2010-09-26 02:55:55 +04:00
|
|
|
def __conform__(self, foo):
|
|
|
|
if foo is _ext.ISQLQuote:
|
|
|
|
return self
|
|
|
|
|
2008-09-24 03:27:52 +04:00
|
|
|
def __str__(self):
|
|
|
|
return str(self.addr)
|
|
|
|
|
2009-03-02 12:59:52 +03:00
|
|
|
def register_inet(oid=None, conn_or_curs=None):
|
2008-09-24 03:27:52 +04:00
|
|
|
"""Create the INET type and an Inet adapter."""
|
|
|
|
if not oid: oid = 869
|
|
|
|
_ext.INET = _ext.new_type((oid, ), "INET",
|
|
|
|
lambda data, cursor: data and Inet(data) or None)
|
2009-03-02 12:59:52 +03:00
|
|
|
_ext.register_type(_ext.INET, conn_or_curs)
|
2008-09-24 03:27:52 +04:00
|
|
|
return _ext.INET
|
|
|
|
|
|
|
|
|
2009-03-02 13:07:17 +03:00
|
|
|
def register_tstz_w_secs(oids=None, conn_or_curs=None):
|
2010-05-20 05:10:33 +04:00
|
|
|
"""The function used to register an alternate type caster for
|
|
|
|
:sql:`TIMESTAMP WITH TIME ZONE` to deal with historical time zones with
|
|
|
|
seconds in the UTC offset.
|
2009-03-02 13:07:17 +03:00
|
|
|
|
2010-05-20 05:10:33 +04:00
|
|
|
These are now correctly handled by the default type caster, so currently
|
|
|
|
the function doesn't do anything.
|
|
|
|
"""
|
2010-07-13 16:26:52 +04:00
|
|
|
warnings.warn("deprecated", DeprecationWarning)
|
2009-03-02 13:07:17 +03:00
|
|
|
|
|
|
|
|
2010-04-02 04:56:38 +04:00
|
|
|
import select
|
|
|
|
from psycopg2.extensions import POLL_OK, POLL_READ, POLL_WRITE
|
|
|
|
from psycopg2 import OperationalError
|
|
|
|
|
2010-04-04 06:07:43 +04:00
|
|
|
def wait_select(conn):
|
2010-04-02 04:56:38 +04:00
|
|
|
"""Wait until a connection or cursor has data available.
|
|
|
|
|
|
|
|
The function is an example of a wait callback to be registered with
|
|
|
|
`~psycopg2.extensions.set_wait_callback()`. This function uses `!select()`
|
|
|
|
to wait for data available.
|
|
|
|
"""
|
|
|
|
while 1:
|
2010-04-04 06:07:43 +04:00
|
|
|
state = conn.poll()
|
2010-04-02 04:56:38 +04:00
|
|
|
if state == POLL_OK:
|
|
|
|
break
|
|
|
|
elif state == POLL_READ:
|
|
|
|
select.select([conn.fileno()], [], [])
|
|
|
|
elif state == POLL_WRITE:
|
|
|
|
select.select([], [conn.fileno()], [])
|
|
|
|
else:
|
|
|
|
raise OperationalError("bad state from poll: %s" % state)
|
|
|
|
|
|
|
|
|
2010-09-26 02:13:51 +04:00
|
|
|
class HstoreAdapter(object):
|
|
|
|
"""Adapt a Python dict to the hstore syntax."""
|
|
|
|
def __init__(self, wrapped):
|
|
|
|
self.wrapped = wrapped
|
|
|
|
|
|
|
|
def prepare(self, conn):
|
|
|
|
self.conn = conn
|
|
|
|
|
2010-09-27 04:45:16 +04:00
|
|
|
# use an old-style getquoted implementation if required
|
|
|
|
if conn.server_version < 90000:
|
|
|
|
self.getquoted = self._getquoted_8
|
2010-09-26 02:13:51 +04:00
|
|
|
|
|
|
|
def _getquoted_8(self):
|
|
|
|
"""Use the operators available in PG pre-9.0."""
|
2010-09-27 01:30:23 +04:00
|
|
|
if not self.wrapped:
|
|
|
|
return "''::hstore"
|
|
|
|
|
2010-09-26 02:13:51 +04:00
|
|
|
adapt = _ext.adapt
|
|
|
|
rv = []
|
|
|
|
for k, v in self.wrapped.iteritems():
|
|
|
|
k = adapt(k)
|
|
|
|
k.prepare(self.conn)
|
|
|
|
k = k.getquoted()
|
|
|
|
|
|
|
|
if v is not None:
|
|
|
|
v = adapt(v)
|
|
|
|
v.prepare(self.conn)
|
|
|
|
v = v.getquoted()
|
|
|
|
else:
|
|
|
|
v = 'NULL'
|
|
|
|
|
|
|
|
rv.append("(%s => %s)" % (k, v))
|
|
|
|
|
|
|
|
return "(" + '||'.join(rv) + ")"
|
|
|
|
|
|
|
|
def _getquoted_9(self):
|
|
|
|
"""Use the hstore(text[], text[]) function."""
|
2010-09-27 01:30:23 +04:00
|
|
|
if not self.wrapped:
|
|
|
|
return "''::hstore"
|
|
|
|
|
2010-09-26 02:13:51 +04:00
|
|
|
k = _ext.adapt(self.wrapped.keys())
|
|
|
|
k.prepare(self.conn)
|
|
|
|
v = _ext.adapt(self.wrapped.values())
|
|
|
|
v.prepare(self.conn)
|
|
|
|
return "hstore(%s, %s)" % (k.getquoted(), v.getquoted())
|
|
|
|
|
2010-09-27 04:45:16 +04:00
|
|
|
getquoted = _getquoted_9
|
|
|
|
|
2010-09-27 00:59:54 +04:00
|
|
|
_re_hstore = regex.compile(r"""
|
|
|
|
# hstore key:
|
2010-09-27 01:53:02 +04:00
|
|
|
# a string of normal or escaped chars
|
|
|
|
"((?: [^"\\] | \\. )*)"
|
2010-09-27 00:59:54 +04:00
|
|
|
\s*=>\s* # hstore value
|
|
|
|
(?:
|
|
|
|
NULL # the value can be null - not catched
|
2010-09-27 01:53:02 +04:00
|
|
|
# or a quoted string like the key
|
|
|
|
| "((?: [^"\\] | \\. )*)"
|
2010-09-27 00:59:54 +04:00
|
|
|
)
|
|
|
|
(?:\s*,\s*|$) # pairs separated by comma or end of string.
|
|
|
|
""", regex.VERBOSE)
|
|
|
|
|
2010-09-27 03:49:31 +04:00
|
|
|
# backslash decoder
|
2010-12-12 19:45:21 +03:00
|
|
|
if sys.version_info[0] < 3:
|
|
|
|
_bsdec = codecs.getdecoder("string_escape")
|
|
|
|
else:
|
|
|
|
_bsdec = codecs.getdecoder("unicode_escape")
|
2010-09-27 03:49:31 +04:00
|
|
|
|
|
|
|
def parse(self, s, cur, _decoder=_bsdec):
|
2010-09-27 00:59:54 +04:00
|
|
|
"""Parse an hstore representation in a Python string.
|
|
|
|
|
|
|
|
The hstore is represented as something like::
|
|
|
|
|
|
|
|
"a"=>"1", "b"=>"2"
|
|
|
|
|
|
|
|
with backslash-escaped strings.
|
|
|
|
"""
|
|
|
|
if s is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
rv = {}
|
|
|
|
start = 0
|
|
|
|
for m in self._re_hstore.finditer(s):
|
|
|
|
if m is None or m.start() != start:
|
|
|
|
raise psycopg2.InterfaceError(
|
|
|
|
"error parsing hstore pair at char %d" % start)
|
2010-09-27 03:49:31 +04:00
|
|
|
k = _decoder(m.group(1))[0]
|
2010-09-27 00:59:54 +04:00
|
|
|
v = m.group(2)
|
|
|
|
if v is not None:
|
2010-09-27 03:49:31 +04:00
|
|
|
v = _decoder(v)[0]
|
2010-09-27 00:59:54 +04:00
|
|
|
|
|
|
|
rv[k] = v
|
|
|
|
start = m.end()
|
|
|
|
|
|
|
|
if start < len(s):
|
|
|
|
raise psycopg2.InterfaceError(
|
|
|
|
"error parsing hstore: unparsed data after char %d" % start)
|
|
|
|
|
|
|
|
return rv
|
|
|
|
|
|
|
|
parse = classmethod(parse)
|
2010-09-26 19:57:07 +04:00
|
|
|
|
2010-09-27 03:49:31 +04:00
|
|
|
def parse_unicode(self, s, cur):
|
|
|
|
"""Parse an hstore returning unicode keys and values."""
|
|
|
|
codec = codecs.getdecoder(_ext.encodings[cur.connection.encoding])
|
|
|
|
bsdec = self._bsdec
|
|
|
|
decoder = lambda s: codec(bsdec(s)[0])
|
|
|
|
return self.parse(s, cur, _decoder=decoder)
|
|
|
|
|
|
|
|
parse_unicode = classmethod(parse_unicode)
|
|
|
|
|
2010-11-19 06:51:53 +03:00
|
|
|
@classmethod
|
2010-09-27 04:46:54 +04:00
|
|
|
def get_oids(self, conn_or_curs):
|
|
|
|
"""Return the oid of the hstore and hstore[] types.
|
2010-09-27 03:49:31 +04:00
|
|
|
|
2010-09-27 04:46:54 +04:00
|
|
|
Return None if hstore is not available.
|
|
|
|
"""
|
|
|
|
if hasattr(conn_or_curs, 'execute'):
|
|
|
|
conn = conn_or_curs.connection
|
|
|
|
curs = conn_or_curs
|
|
|
|
else:
|
|
|
|
conn = conn_or_curs
|
|
|
|
curs = conn_or_curs.cursor()
|
2010-09-27 01:11:06 +04:00
|
|
|
|
2010-09-27 04:46:54 +04:00
|
|
|
# Store the transaction status of the connection to revert it after use
|
|
|
|
conn_status = conn.status
|
2010-09-27 01:11:06 +04:00
|
|
|
|
2010-11-19 06:51:53 +03:00
|
|
|
# column typarray not available before PG 8.3
|
|
|
|
typarray = conn.server_version >= 80300 and "typarray" or "NULL"
|
|
|
|
|
2010-09-27 04:46:54 +04:00
|
|
|
# get the oid for the hstore
|
|
|
|
curs.execute("""\
|
2010-11-19 06:51:53 +03:00
|
|
|
SELECT t.oid, %s
|
2010-09-27 01:11:06 +04:00
|
|
|
FROM pg_type t JOIN pg_namespace ns
|
|
|
|
ON typnamespace = ns.oid
|
|
|
|
WHERE typname = 'hstore' and nspname = 'public';
|
2010-11-19 06:51:53 +03:00
|
|
|
""" % typarray)
|
2010-09-27 04:46:54 +04:00
|
|
|
oids = curs.fetchone()
|
2010-09-27 01:11:06 +04:00
|
|
|
|
2010-09-27 04:46:54 +04:00
|
|
|
# revert the status of the connection as before the command
|
|
|
|
if (conn_status != _ext.STATUS_IN_TRANSACTION
|
|
|
|
and conn.isolation_level != _ext.ISOLATION_LEVEL_AUTOCOMMIT):
|
|
|
|
conn.rollback()
|
2010-09-27 01:11:06 +04:00
|
|
|
|
2010-09-27 04:46:54 +04:00
|
|
|
return oids
|
|
|
|
|
|
|
|
def register_hstore(conn_or_curs, globally=False, unicode=False):
|
2010-09-28 03:33:34 +04:00
|
|
|
"""Register adapter and typecaster for `dict`\-\ |hstore| conversions.
|
|
|
|
|
|
|
|
The function must receive a connection or cursor as the |hstore| oid is
|
|
|
|
different in each database. The typecaster will normally be registered
|
|
|
|
only on the connection or cursor passed as argument. If your application
|
|
|
|
uses a single database you can pass *globally*\=True to have the typecaster
|
|
|
|
registered on all the connections.
|
|
|
|
|
|
|
|
By default the returned dicts will have `str` objects as keys and values:
|
|
|
|
use *unicode*\=True to return `unicode` objects instead. When adapting a
|
|
|
|
dictionary both `str` and `unicode` keys and values are handled (the
|
|
|
|
`unicode` values will be converted according to the current
|
|
|
|
`~connection.encoding`).
|
|
|
|
|
|
|
|
The |hstore| contrib module must be already installed in the database
|
|
|
|
(executing the ``hstore.sql`` script in your ``contrib`` directory).
|
|
|
|
Raise `~psycopg2.ProgrammingError` if the type is not found.
|
2010-09-27 04:46:54 +04:00
|
|
|
"""
|
|
|
|
oids = HstoreAdapter.get_oids(conn_or_curs)
|
2010-09-27 01:11:06 +04:00
|
|
|
if oids is None:
|
|
|
|
raise psycopg2.ProgrammingError(
|
|
|
|
"hstore type not found in the database. "
|
|
|
|
"please install it from your 'contrib/hstore.sql' file")
|
|
|
|
|
|
|
|
# create and register the typecaster
|
2010-09-27 03:49:31 +04:00
|
|
|
if unicode:
|
|
|
|
cast = HstoreAdapter.parse_unicode
|
|
|
|
else:
|
|
|
|
cast = HstoreAdapter.parse
|
|
|
|
|
|
|
|
HSTORE = _ext.new_type((oids[0],), "HSTORE", cast)
|
2010-09-27 04:46:54 +04:00
|
|
|
_ext.register_type(HSTORE, not globally and conn_or_curs or None)
|
2010-09-27 01:11:06 +04:00
|
|
|
_ext.register_adapter(dict, HstoreAdapter)
|
|
|
|
|
2010-09-26 02:13:51 +04:00
|
|
|
|
2010-02-08 23:13:10 +03:00
|
|
|
__all__ = filter(lambda k: not k.startswith('_'), locals().keys())
|