2005-10-18 09:42:00 +04:00
|
|
|
"""Miscellaneous goodies for psycopg2
|
2005-01-20 08:49:40 +03:00
|
|
|
|
2005-10-18 09:42:00 +04:00
|
|
|
This module is a generic place used to hold little helper functions
|
2005-01-20 08:49:40 +03:00
|
|
|
and classes untill a better place in the distribution is found.
|
|
|
|
"""
|
2004-10-19 07:17:12 +04:00
|
|
|
# psycopg/extras.py - miscellaneous extra goodies for psycopg
|
|
|
|
#
|
2010-02-13 01:34:53 +03:00
|
|
|
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
2004-10-19 07:17:12 +04:00
|
|
|
#
|
2010-02-13 01:34:53 +03:00
|
|
|
# psycopg2 is free software: you can redistribute it and/or modify it
|
|
|
|
# under the terms of the GNU Lesser General Public License as published
|
|
|
|
# by the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
2004-10-19 07:17:12 +04:00
|
|
|
#
|
2010-02-13 01:34:53 +03:00
|
|
|
# In addition, as a special exception, the copyright holders give
|
|
|
|
# permission to link this program with the OpenSSL library (or with
|
|
|
|
# modified versions of OpenSSL that use the same license as OpenSSL),
|
|
|
|
# and distribute linked combinations including the two.
|
|
|
|
#
|
|
|
|
# You must obey the GNU Lesser General Public License in all respects for
|
|
|
|
# all of the code used other than OpenSSL.
|
|
|
|
#
|
|
|
|
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
|
|
|
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
|
|
|
# License for more details.
|
2004-10-19 07:17:12 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
import os
|
|
|
|
import time
|
2009-03-02 12:59:52 +03:00
|
|
|
import re as regex
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
import logging
|
|
|
|
except:
|
|
|
|
logging = None
|
2008-09-19 23:25:16 +04:00
|
|
|
|
2009-03-02 12:59:52 +03:00
|
|
|
from psycopg2 import DATETIME, DataError
|
2008-09-19 23:25:16 +04:00
|
|
|
from psycopg2 import extensions as _ext
|
2005-06-24 11:11:44 +04:00
|
|
|
from psycopg2.extensions import cursor as _cursor
|
2005-07-17 08:08:08 +04:00
|
|
|
from psycopg2.extensions import connection as _connection
|
2005-06-24 11:11:44 +04:00
|
|
|
from psycopg2.extensions import adapt as _A
|
2005-02-27 18:03:53 +03:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
class DictCursorBase(_cursor):
|
|
|
|
"""Base class for all dict-like cursors."""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
if kwargs.has_key('row_factory'):
|
|
|
|
row_factory = kwargs['row_factory']
|
|
|
|
del kwargs['row_factory']
|
|
|
|
else:
|
|
|
|
raise NotImplementedError(
|
|
|
|
"DictCursorBase can't be instantiated without a row factory.")
|
|
|
|
_cursor.__init__(self, *args, **kwargs)
|
|
|
|
self._query_executed = 0
|
2009-05-09 16:44:59 +04:00
|
|
|
self._prefetch = 0
|
2007-01-16 16:45:41 +03:00
|
|
|
self.row_factory = row_factory
|
|
|
|
|
|
|
|
def fetchone(self):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchone(self)
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchone(self)
|
2009-03-02 12:59:52 +03:00
|
|
|
return res
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
def fetchmany(self, size=None):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchmany(self, size)
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchmany(self, size)
|
2009-03-02 12:59:52 +03:00
|
|
|
return res
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
def fetchall(self):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchall(self)
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchall(self)
|
2009-03-02 12:59:52 +03:00
|
|
|
return res
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def next(self):
|
2009-05-09 16:44:59 +04:00
|
|
|
if self._prefetch:
|
|
|
|
res = _cursor.fetchone(self)
|
|
|
|
if res is None:
|
|
|
|
raise StopIteration()
|
2009-03-02 12:59:52 +03:00
|
|
|
if self._query_executed:
|
|
|
|
self._build_index()
|
2009-05-09 16:44:59 +04:00
|
|
|
if not self._prefetch:
|
|
|
|
res = _cursor.fetchone(self)
|
|
|
|
if res is None:
|
|
|
|
raise StopIteration()
|
2007-01-16 16:45:41 +03:00
|
|
|
return res
|
|
|
|
|
2005-07-17 08:08:08 +04:00
|
|
|
class DictConnection(_connection):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""A connection that uses :class:`DictCursor` automatically."""
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
|
|
|
if name is None:
|
|
|
|
return _connection.cursor(self, cursor_factory=DictCursor)
|
|
|
|
else:
|
2009-03-02 12:59:52 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=DictCursor)
|
2004-10-19 07:17:12 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
class DictCursor(DictCursorBase):
|
2004-10-19 07:17:12 +04:00
|
|
|
"""A cursor that keeps a list of column name -> index mappings."""
|
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
kwargs['row_factory'] = DictRow
|
|
|
|
DictCursorBase.__init__(self, *args, **kwargs)
|
2009-05-09 16:44:59 +04:00
|
|
|
self._prefetch = 1
|
2007-01-16 16:45:41 +03:00
|
|
|
|
2004-10-19 07:17:12 +04:00
|
|
|
def execute(self, query, vars=None, async=0):
|
|
|
|
self.index = {}
|
2007-01-16 16:45:41 +03:00
|
|
|
self._query_executed = 1
|
2004-10-19 07:17:12 +04:00
|
|
|
return _cursor.execute(self, query, vars, async)
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2005-12-06 08:55:58 +03:00
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.index = {}
|
2007-01-16 16:45:41 +03:00
|
|
|
self._query_executed = 1
|
2009-05-09 16:44:59 +04:00
|
|
|
return _cursor.callproc(self, procname, vars)
|
2004-10-19 07:17:12 +04:00
|
|
|
|
|
|
|
def _build_index(self):
|
2007-01-16 16:45:41 +03:00
|
|
|
if self._query_executed == 1 and self.description:
|
2004-10-19 07:17:12 +04:00
|
|
|
for i in range(len(self.description)):
|
|
|
|
self.index[self.description[i][0]] = i
|
2009-05-09 16:44:59 +04:00
|
|
|
self._query_executed = 0
|
2005-03-01 19:41:02 +03:00
|
|
|
|
2004-10-19 07:17:12 +04:00
|
|
|
class DictRow(list):
|
|
|
|
"""A row object that allow by-colun-name access to data."""
|
|
|
|
|
2009-05-09 12:19:15 +04:00
|
|
|
__slots__ = ('_index',)
|
|
|
|
|
2004-10-19 07:17:12 +04:00
|
|
|
def __init__(self, cursor):
|
2005-05-10 06:29:24 +04:00
|
|
|
self._index = cursor.index
|
2004-10-19 07:17:12 +04:00
|
|
|
self[:] = [None] * len(cursor.description)
|
|
|
|
|
|
|
|
def __getitem__(self, x):
|
|
|
|
if type(x) != int:
|
2005-05-10 06:29:24 +04:00
|
|
|
x = self._index[x]
|
2004-10-19 07:17:12 +04:00
|
|
|
return list.__getitem__(self, x)
|
2005-02-27 18:03:53 +03:00
|
|
|
|
2005-04-11 11:20:46 +04:00
|
|
|
def items(self):
|
2006-01-06 05:58:24 +03:00
|
|
|
res = []
|
|
|
|
for n, v in self._index.items():
|
|
|
|
res.append((n, list.__getitem__(self, v)))
|
|
|
|
return res
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2005-04-25 08:58:38 +04:00
|
|
|
def keys(self):
|
2006-01-06 05:58:24 +03:00
|
|
|
return self._index.keys()
|
2005-04-25 08:58:38 +04:00
|
|
|
|
2005-05-19 08:48:26 +04:00
|
|
|
def values(self):
|
|
|
|
return tuple(self[:])
|
|
|
|
|
|
|
|
def has_key(self, x):
|
|
|
|
return self._index.has_key(x)
|
2006-01-06 05:58:24 +03:00
|
|
|
|
2005-05-19 08:48:26 +04:00
|
|
|
def get(self, x, default=None):
|
|
|
|
try:
|
|
|
|
return self[x]
|
|
|
|
except:
|
|
|
|
return default
|
|
|
|
|
2006-10-06 09:22:54 +04:00
|
|
|
def iteritems(self):
|
|
|
|
for n, v in self._index.items():
|
|
|
|
yield n, list.__getitem__(self, v)
|
2005-02-27 18:03:53 +03:00
|
|
|
|
2009-04-19 18:25:12 +04:00
|
|
|
def iterkeys(self):
|
|
|
|
return self._index.iterkeys()
|
|
|
|
|
|
|
|
def itervalues(self):
|
|
|
|
return list.__iter__(self)
|
|
|
|
|
|
|
|
def copy(self):
|
|
|
|
return dict(self.items())
|
|
|
|
|
|
|
|
def __contains__(self, x):
|
|
|
|
return self._index.__contains__(x)
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
class RealDictConnection(_connection):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""A connection that uses :class:`RealDictCursor` automatically."""
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
|
|
|
if name is None:
|
|
|
|
return _connection.cursor(self, cursor_factory=RealDictCursor)
|
|
|
|
else:
|
2009-02-23 23:39:25 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=RealDictCursor)
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
class RealDictCursor(DictCursorBase):
|
|
|
|
"""A cursor that uses a real dict as the base type for rows.
|
|
|
|
|
|
|
|
Note that this cursor is extremely specialized and does not allow
|
|
|
|
the normal access (using integer indices) to fetched data. If you need
|
|
|
|
to access database rows both as a dictionary and a list, then use
|
2010-02-13 05:10:51 +03:00
|
|
|
the generic :class:`DictCursor` instead of :class:`!RealDictCursor`.
|
2007-01-16 16:45:41 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
kwargs['row_factory'] = RealDictRow
|
|
|
|
DictCursorBase.__init__(self, *args, **kwargs)
|
2009-05-09 16:44:59 +04:00
|
|
|
self._prefetch = 0
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
def execute(self, query, vars=None, async=0):
|
|
|
|
self.column_mapping = []
|
|
|
|
self._query_executed = 1
|
|
|
|
return _cursor.execute(self, query, vars, async)
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.column_mapping = []
|
|
|
|
self._query_executed = 1
|
2009-05-09 16:44:59 +04:00
|
|
|
return _cursor.callproc(self, procname, vars)
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
def _build_index(self):
|
|
|
|
if self._query_executed == 1 and self.description:
|
2009-05-09 16:44:59 +04:00
|
|
|
for i in range(len(self.description)):
|
|
|
|
self.column_mapping.append(self.description[i][0])
|
|
|
|
self._query_executed = 0
|
2007-01-16 16:45:41 +03:00
|
|
|
|
|
|
|
class RealDictRow(dict):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""A ``dict`` subclass representing a data record."""
|
2009-05-09 12:19:15 +04:00
|
|
|
|
2009-05-09 16:44:59 +04:00
|
|
|
__slots__ = ('_column_mapping')
|
2009-05-09 12:19:15 +04:00
|
|
|
|
2007-01-16 16:45:41 +03:00
|
|
|
def __init__(self, cursor):
|
|
|
|
dict.__init__(self)
|
|
|
|
self._column_mapping = cursor.column_mapping
|
|
|
|
|
|
|
|
def __setitem__(self, name, value):
|
2009-05-09 16:44:59 +04:00
|
|
|
if type(name) == int:
|
2007-01-16 16:45:41 +03:00
|
|
|
name = self._column_mapping[name]
|
|
|
|
return dict.__setitem__(self, name, value)
|
|
|
|
|
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
class LoggingConnection(_connection):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""A connection that logs all queries to a file or logger__ object.
|
|
|
|
|
|
|
|
.. __: http://docs.python.org/library/logging.html
|
|
|
|
"""
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
def initialize(self, logobj):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""Initialize the connection to log to ``logobj``.
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2010-02-13 05:10:51 +03:00
|
|
|
The ``logobj`` parameter can be an open file object or a Logger
|
|
|
|
instance from the standard logging module.
|
2006-01-20 07:07:23 +03:00
|
|
|
"""
|
|
|
|
self._logobj = logobj
|
|
|
|
if logging and isinstance(logobj, logging.Logger):
|
|
|
|
self.log = self._logtologger
|
|
|
|
else:
|
|
|
|
self.log = self._logtofile
|
|
|
|
|
|
|
|
def filter(self, msg, curs):
|
|
|
|
"""Filter the query before logging it.
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
This is the method to overwrite to filter unwanted queries out of the
|
|
|
|
log or to add some extra data to the output. The default implementation
|
|
|
|
just does nothing.
|
|
|
|
"""
|
|
|
|
return msg
|
|
|
|
|
|
|
|
def _logtofile(self, msg, curs):
|
|
|
|
msg = self.filter(msg, curs)
|
|
|
|
if msg: self._logobj.write(msg + os.linesep)
|
|
|
|
|
|
|
|
def _logtologger(self, msg, curs):
|
|
|
|
msg = self.filter(msg, curs)
|
|
|
|
if msg: self._logobj.debug(msg)
|
|
|
|
|
|
|
|
def _check(self):
|
|
|
|
if not hasattr(self, '_logobj'):
|
|
|
|
raise self.ProgrammingError(
|
|
|
|
"LoggingConnection object has not been initialize()d")
|
|
|
|
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
2006-01-20 07:07:23 +03:00
|
|
|
self._check()
|
2009-02-23 23:39:25 +03:00
|
|
|
if name is None:
|
2007-09-01 13:32:42 +04:00
|
|
|
return _connection.cursor(self, cursor_factory=LoggingCursor)
|
|
|
|
else:
|
2009-02-23 23:39:25 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=LoggingCursor)
|
2007-09-01 13:32:42 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
class LoggingCursor(_cursor):
|
|
|
|
"""A cursor that logs queries using its connection logging facilities."""
|
|
|
|
|
|
|
|
def execute(self, query, vars=None, async=0):
|
|
|
|
try:
|
|
|
|
return _cursor.execute(self, query, vars, async)
|
|
|
|
finally:
|
|
|
|
self.connection.log(self.query, self)
|
|
|
|
|
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
try:
|
|
|
|
return _cursor.callproc(self, procname, vars)
|
|
|
|
finally:
|
|
|
|
self.connection.log(self.query, self)
|
|
|
|
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
class MinTimeLoggingConnection(LoggingConnection):
|
|
|
|
"""A connection that logs queries based on execution time.
|
|
|
|
|
2010-02-13 05:10:51 +03:00
|
|
|
This is just an example of how to sub-class :class:`LoggingConnection` to
|
|
|
|
provide some extra filtering for the logged queries. Both the
|
|
|
|
:meth:`inizialize` and :meth:`filter` methods are overwritten to make sure
|
|
|
|
that only queries executing for more than ``mintime`` ms are logged.
|
2006-01-20 07:07:23 +03:00
|
|
|
|
2010-02-13 05:10:51 +03:00
|
|
|
Note that this connection uses the specialized cursor
|
|
|
|
:class:`MinTimeLoggingCursor`.
|
2006-01-20 07:07:23 +03:00
|
|
|
"""
|
|
|
|
def initialize(self, logobj, mintime=0):
|
|
|
|
LoggingConnection.initialize(self, logobj)
|
|
|
|
self._mintime = mintime
|
2009-05-09 16:44:59 +04:00
|
|
|
|
2006-01-20 07:07:23 +03:00
|
|
|
def filter(self, msg, curs):
|
|
|
|
t = (time.time() - curs.timestamp) * 1000
|
|
|
|
if t > self._mintime:
|
|
|
|
return msg + os.linesep + " (execution time: %d ms)" % t
|
|
|
|
|
2007-09-01 13:32:42 +04:00
|
|
|
def cursor(self, name=None):
|
2006-01-20 07:07:23 +03:00
|
|
|
self._check()
|
2009-02-23 23:39:25 +03:00
|
|
|
if name is None:
|
2007-09-01 13:32:42 +04:00
|
|
|
return _connection.cursor(self, cursor_factory=MinTimeLoggingCursor)
|
|
|
|
else:
|
2009-02-23 23:39:25 +03:00
|
|
|
return _connection.cursor(self, name, cursor_factory=MinTimeLoggingCursor)
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
class MinTimeLoggingCursor(LoggingCursor):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""The cursor sub-class companion to :class:`MinTimeLoggingConnection`."""
|
2006-01-20 07:07:23 +03:00
|
|
|
|
|
|
|
def execute(self, query, vars=None, async=0):
|
|
|
|
self.timestamp = time.time()
|
|
|
|
return LoggingCursor.execute(self, query, vars, async)
|
|
|
|
|
|
|
|
def callproc(self, procname, vars=None):
|
|
|
|
self.timestamp = time.time()
|
2006-09-10 18:50:03 +04:00
|
|
|
return LoggingCursor.execute(self, procname, vars)
|
|
|
|
|
2008-09-19 23:25:16 +04:00
|
|
|
|
|
|
|
# a dbtype and adapter for Python UUID type
|
|
|
|
|
|
|
|
try:
|
|
|
|
import uuid
|
|
|
|
|
|
|
|
class UUID_adapter(object):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
|
|
|
|
|
|
|
|
.. __: http://docs.python.org/library/uuid.html
|
|
|
|
.. __: http://www.postgresql.org/docs/8.4/static/datatype-uuid.html
|
|
|
|
"""
|
2008-09-19 23:25:16 +04:00
|
|
|
|
|
|
|
def __init__(self, uuid):
|
|
|
|
self._uuid = uuid
|
|
|
|
|
|
|
|
def prepare(self, conn):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def getquoted(self):
|
|
|
|
return "'"+str(self._uuid)+"'::uuid"
|
|
|
|
|
|
|
|
__str__ = getquoted
|
|
|
|
|
2009-10-04 14:34:02 +04:00
|
|
|
def register_uuid(oids=None, conn_or_curs=None):
|
2008-09-19 23:25:16 +04:00
|
|
|
"""Create the UUID type and an uuid.UUID adapter."""
|
2009-10-04 14:34:02 +04:00
|
|
|
if not oids:
|
|
|
|
oid1 = 2950
|
|
|
|
oid2 = 2951
|
|
|
|
elif type(oids) == list:
|
|
|
|
oid1, oid2 = oids
|
|
|
|
else:
|
|
|
|
oid1 = oids
|
|
|
|
oid2 = 2951
|
|
|
|
|
|
|
|
def parseUUIDARRAY(data, cursor):
|
|
|
|
if data is None:
|
|
|
|
return None
|
|
|
|
elif data == '{}':
|
|
|
|
return []
|
|
|
|
else:
|
|
|
|
return [((len(x) > 0 and x != 'NULL') and uuid.UUID(x) or None)
|
|
|
|
for x in data[1:-1].split(',')]
|
|
|
|
|
|
|
|
_ext.UUID = _ext.new_type((oid1, ), "UUID",
|
2008-09-24 03:27:52 +04:00
|
|
|
lambda data, cursor: data and uuid.UUID(data) or None)
|
2009-10-04 14:34:02 +04:00
|
|
|
_ext.UUIDARRAY = _ext.new_type((oid2,), "UUID[]", parseUUIDARRAY)
|
|
|
|
|
2009-03-02 12:59:52 +03:00
|
|
|
_ext.register_type(_ext.UUID, conn_or_curs)
|
2009-10-04 14:34:02 +04:00
|
|
|
_ext.register_type(_ext.UUIDARRAY, conn_or_curs)
|
2008-09-19 23:25:16 +04:00
|
|
|
_ext.register_adapter(uuid.UUID, UUID_adapter)
|
2009-10-04 14:34:02 +04:00
|
|
|
|
2008-09-19 23:25:16 +04:00
|
|
|
return _ext.UUID
|
2008-09-24 03:27:52 +04:00
|
|
|
|
2008-09-19 23:25:16 +04:00
|
|
|
except ImportError, e:
|
|
|
|
def register_uuid(oid=None):
|
|
|
|
"""Create the UUID type and an uuid.UUID adapter.
|
|
|
|
|
|
|
|
This is a fake function that will always raise an error because the
|
|
|
|
import of the uuid module failed.
|
|
|
|
"""
|
|
|
|
raise e
|
|
|
|
|
|
|
|
|
2008-09-24 03:27:52 +04:00
|
|
|
# a type, dbtype and adapter for PostgreSQL inet type
|
|
|
|
|
|
|
|
class Inet(object):
|
|
|
|
"""Wrap a string to allow for correct SQL-quoting of inet values.
|
|
|
|
|
|
|
|
Note that this adapter does NOT check the passed value to make
|
|
|
|
sure it really is an inet-compatible address but DOES call adapt()
|
|
|
|
on it to make sure it is impossible to execute an SQL-injection
|
|
|
|
by passing an evil value to the initializer.
|
|
|
|
"""
|
|
|
|
def __init__(self, addr):
|
2010-02-13 05:14:38 +03:00
|
|
|
self.addr = addr
|
2008-09-24 03:27:52 +04:00
|
|
|
|
|
|
|
def prepare(self, conn):
|
|
|
|
self._conn = conn
|
|
|
|
|
|
|
|
def getquoted(self):
|
|
|
|
obj = adapt(self.addr)
|
|
|
|
if hasattr(obj, 'prepare'):
|
|
|
|
obj.prepare(self._conn)
|
|
|
|
return obj.getquoted()+"::inet"
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return str(self.addr)
|
|
|
|
|
2009-03-02 12:59:52 +03:00
|
|
|
def register_inet(oid=None, conn_or_curs=None):
|
2008-09-24 03:27:52 +04:00
|
|
|
"""Create the INET type and an Inet adapter."""
|
|
|
|
if not oid: oid = 869
|
|
|
|
_ext.INET = _ext.new_type((oid, ), "INET",
|
|
|
|
lambda data, cursor: data and Inet(data) or None)
|
2009-03-02 12:59:52 +03:00
|
|
|
_ext.register_type(_ext.INET, conn_or_curs)
|
2008-09-24 03:27:52 +04:00
|
|
|
return _ext.INET
|
|
|
|
|
|
|
|
|
2009-03-02 13:07:17 +03:00
|
|
|
# safe management of times with a non-standard time zone
|
|
|
|
|
|
|
|
def _convert_tstz_w_secs(s, cursor):
|
|
|
|
try:
|
|
|
|
return DATETIME(s, cursor)
|
|
|
|
|
|
|
|
except (DataError,), exc:
|
|
|
|
if exc.message != "unable to parse time":
|
|
|
|
raise
|
|
|
|
|
|
|
|
if regex.match('(\+|-)\d\d:\d\d:\d\d', s[-9:]) is None:
|
|
|
|
raise
|
|
|
|
|
|
|
|
# parsing doesn't succeed even if seconds are ":00" so truncate in
|
|
|
|
# any case
|
|
|
|
return DATETIME(s[:-3], cursor)
|
|
|
|
|
|
|
|
def register_tstz_w_secs(oids=None, conn_or_curs=None):
|
2010-02-13 05:10:51 +03:00
|
|
|
"""Register alternate type caster for :sql:`TIMESTAMP WITH TIME ZONE`.
|
2009-03-02 13:07:17 +03:00
|
|
|
|
|
|
|
The Python datetime module cannot handle time zones with
|
|
|
|
seconds in the UTC offset. There are, however, historical
|
|
|
|
"time zones" which contain such offsets, eg. "Asia/Calcutta".
|
|
|
|
In many cases those offsets represent true local time.
|
|
|
|
|
|
|
|
If you encounter "unable to parse time" on a perfectly valid
|
|
|
|
timestamp you likely want to try this type caster. It truncates
|
|
|
|
the seconds from the time zone data and retries casting
|
|
|
|
the timestamp. Note that this will generate timestamps
|
2010-02-13 05:10:51 +03:00
|
|
|
which are **inaccurate** by the number of seconds truncated
|
2009-03-02 13:07:17 +03:00
|
|
|
(unless the seconds were 00).
|
|
|
|
|
2010-02-13 05:10:51 +03:00
|
|
|
:param oids:
|
2009-03-02 13:07:17 +03:00
|
|
|
which OIDs to use this type caster for,
|
2010-02-13 05:10:51 +03:00
|
|
|
defaults to :sql:`TIMESTAMP WITH TIME ZONE`
|
|
|
|
:param conn_or_curs:
|
2009-03-02 13:07:17 +03:00
|
|
|
a cursor or connection if you want to attach
|
|
|
|
this type caster to that only, defaults to
|
2010-02-13 05:10:51 +03:00
|
|
|
``None`` meaning all connections and cursors
|
2009-03-02 13:07:17 +03:00
|
|
|
"""
|
|
|
|
if oids is None:
|
|
|
|
oids = (1184,) # hardcoded from PostgreSQL headers
|
|
|
|
|
|
|
|
_ext.TSTZ_W_SECS = _ext.new_type(oids, 'TSTZ_W_SECS', _convert_tstz_w_secs)
|
2010-02-10 20:16:00 +03:00
|
|
|
_ext.register_type(_ext.TSTZ_W_SECS, conn_or_curs)
|
2009-03-02 13:07:17 +03:00
|
|
|
|
|
|
|
return _ext.TSTZ_W_SECS
|
|
|
|
|
|
|
|
|
2010-02-08 23:13:10 +03:00
|
|
|
__all__ = filter(lambda k: not k.startswith('_'), locals().keys())
|