From 6aff2af0d8d47bb7cf64e0048dd38010a6eee3e4 Mon Sep 17 00:00:00 2001 From: Mads Jensen Date: Sat, 26 Dec 2015 21:48:05 +0100 Subject: [PATCH] pep8 enforced for all files under lib, except for E501 (I cut a single line to < 120 characters), and E701 multiple statements on one line (colon) --- lib/__init__.py | 18 ++++++++-------- lib/_json.py | 22 ++++++++++--------- lib/_range.py | 36 ++++++++++++++++--------------- lib/errorcodes.py | 1 + lib/extensions.py | 35 ++++++++++++++++-------------- lib/extras.py | 54 +++++++++++++++++++++++++++++------------------ lib/pool.py | 40 ++++++++++++++++++++--------------- lib/psycopg1.py | 15 +++++++------ lib/tz.py | 12 ++++++----- 9 files changed, 131 insertions(+), 102 deletions(-) diff --git a/lib/__init__.py b/lib/__init__.py index 994b15a8..7b3be772 100644 --- a/lib/__init__.py +++ b/lib/__init__.py @@ -82,13 +82,14 @@ else: import re -def _param_escape(s, - re_escape=re.compile(r"([\\'])"), - re_space=re.compile(r'\s')): + +def _param_escape(s, re_escape=re.compile(r"([\\'])"), + re_space=re.compile(r'\s')): """ Apply the escaping rule required by PQconnectdb """ - if not s: return "''" + if not s: + return "''" s = re_escape.sub(r'\\\1', s) if re_space.search(s): @@ -99,9 +100,8 @@ def _param_escape(s, del re -def connect(dsn=None, - database=None, user=None, password=None, host=None, port=None, - connection_factory=None, cursor_factory=None, async=False, **kwargs): +def connect(dsn=None, database=None, user=None, password=None, host=None, port=None, + connection_factory=None, cursor_factory=None, async=False, **kwargs): """ Create a new database connection. @@ -152,14 +152,14 @@ def connect(dsn=None, if dsn is not None and items: raise TypeError( "'%s' is an invalid keyword argument when the dsn is specified" - % items[0][0]) + % items[0][0]) if dsn is None: if not items: raise TypeError('missing dsn and no parameters') else: dsn = " ".join(["%s=%s" % (k, _param_escape(str(v))) - for (k, v) in items]) + for (k, v) in items]) conn = _connect(dsn, connection_factory=connection_factory, async=async) if cursor_factory is not None: diff --git a/lib/_json.py b/lib/_json.py index 26e32f2f..35e0f478 100644 --- a/lib/_json.py +++ b/lib/_json.py @@ -34,7 +34,7 @@ from psycopg2._psycopg import new_type, new_array_type, register_type # import the best json implementation available -if sys.version_info[:2] >= (2,6): +if sys.version_info[:2] >= (2, 6): import json else: try: @@ -51,6 +51,7 @@ JSONARRAY_OID = 199 JSONB_OID = 3802 JSONBARRAY_OID = 3807 + class Json(object): """ An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to @@ -106,7 +107,7 @@ class Json(object): def register_json(conn_or_curs=None, globally=False, loads=None, - oid=None, array_oid=None, name='json'): + oid=None, array_oid=None, name='json'): """Create and register typecasters converting :sql:`json` type to Python objects. :param conn_or_curs: a connection or cursor used to find the :sql:`json` @@ -143,6 +144,7 @@ def register_json(conn_or_curs=None, globally=False, loads=None, return JSON, JSONARRAY + def register_default_json(conn_or_curs=None, globally=False, loads=None): """ Create and register :sql:`json` typecasters for PostgreSQL 9.2 and following. @@ -153,7 +155,8 @@ def register_default_json(conn_or_curs=None, globally=False, loads=None): All the parameters have the same meaning of `register_json()`. """ return register_json(conn_or_curs=conn_or_curs, globally=globally, - loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID) + loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID) + def register_default_jsonb(conn_or_curs=None, globally=False, loads=None): """ @@ -165,7 +168,9 @@ def register_default_jsonb(conn_or_curs=None, globally=False, loads=None): meaning of `register_json()`. """ return register_json(conn_or_curs=conn_or_curs, globally=globally, - loads=loads, oid=JSONB_OID, array_oid=JSONBARRAY_OID, name='jsonb') + loads=loads, oid=JSONB_OID, array_oid=JSONBARRAY_OID, + name='jsonb') + def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'): """Create typecasters for json data type.""" @@ -188,6 +193,7 @@ def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'): return JSON, JSONARRAY + def _get_json_oids(conn_or_curs, name='json'): # lazy imports from psycopg2.extensions import STATUS_IN_TRANSACTION @@ -202,9 +208,8 @@ def _get_json_oids(conn_or_curs, name='json'): typarray = conn.server_version >= 80300 and "typarray" or "NULL" # get the oid for the hstore - curs.execute( - "SELECT t.oid, %s FROM pg_type t WHERE t.typname = %%s;" - % typarray, (name,)) + curs.execute("SELECT t.oid, %s FROM pg_type t WHERE t.typname = %%s;" + % typarray, (name,)) r = curs.fetchone() # revert the status of the connection as before the command @@ -215,6 +220,3 @@ def _get_json_oids(conn_or_curs, name='json'): raise conn.ProgrammingError("%s data type not found" % name) return r - - - diff --git a/lib/_range.py b/lib/_range.py index 47b82086..6ac8083b 100644 --- a/lib/_range.py +++ b/lib/_range.py @@ -30,6 +30,7 @@ from psycopg2._psycopg import ProgrammingError, InterfaceError from psycopg2.extensions import ISQLQuote, adapt, register_adapter, b from psycopg2.extensions import new_type, new_array_type, register_type + class Range(object): """Python representation for a PostgreSQL |range|_ type. @@ -57,7 +58,8 @@ class Range(object): if self._bounds is None: return "%s(empty=True)" % self.__class__.__name__ else: - return "%s(%r, %r, %r)" % (self.__class__.__name__, + return "%s(%r, %r, %r)" % ( + self.__class__.__name__, self._lower, self._upper, self._bounds) @property @@ -124,8 +126,8 @@ class Range(object): if not isinstance(other, Range): return False return (self._lower == other._lower - and self._upper == other._upper - and self._bounds == other._bounds) + and self._upper == other._upper + and self._bounds == other._bounds) def __ne__(self, other): return not self.__eq__(other) @@ -248,7 +250,7 @@ class RangeAdapter(object): upper = b('NULL') return b(self.name + '(') + lower + b(', ') + upper \ - + b(", '%s')" % r._bounds) + + b(", '%s')" % r._bounds) class RangeCaster(object): @@ -318,7 +320,7 @@ class RangeCaster(object): if conn.server_version < 90200: raise ProgrammingError("range types not available in version %s" - % conn.server_version) + % conn.server_version) # Store the transaction status of the connection to revert it after use conn_status = conn.status @@ -349,8 +351,7 @@ where typname = %s and ns.nspname = %s; rec = curs.fetchone() # revert the status of the connection as before the command - if (conn_status != STATUS_IN_TRANSACTION - and not conn.autocommit): + if (conn_status != STATUS_IN_TRANSACTION and not conn.autocommit): conn.rollback() if not rec: @@ -359,8 +360,8 @@ where typname = %s and ns.nspname = %s; type, subtype, array = rec - return RangeCaster(name, pyrange, - oid=type, subtype_oid=subtype, array_oid=array) + return RangeCaster( + name, pyrange, oid=type, subtype_oid=subtype, array_oid=array) _re_range = re.compile(r""" ( \(|\[ ) # lower bound flag @@ -425,14 +426,17 @@ class NumericRange(Range): """ pass + class DateRange(Range): """Represents :sql:`daterange` values.""" pass + class DateTimeRange(Range): """Represents :sql:`tsrange` values.""" pass + class DateTimeTZRange(Range): """Represents :sql:`tstzrange` values.""" pass @@ -475,27 +479,25 @@ register_adapter(NumericRange, NumberRangeAdapter) # note: the adapter is registered more than once, but this is harmless. int4range_caster = RangeCaster(NumberRangeAdapter, NumericRange, - oid=3904, subtype_oid=23, array_oid=3905) + oid=3904, subtype_oid=23, array_oid=3905) int4range_caster._register() int8range_caster = RangeCaster(NumberRangeAdapter, NumericRange, - oid=3926, subtype_oid=20, array_oid=3927) + oid=3926, subtype_oid=20, array_oid=3927) int8range_caster._register() numrange_caster = RangeCaster(NumberRangeAdapter, NumericRange, - oid=3906, subtype_oid=1700, array_oid=3907) + oid=3906, subtype_oid=1700, array_oid=3907) numrange_caster._register() daterange_caster = RangeCaster('daterange', DateRange, - oid=3912, subtype_oid=1082, array_oid=3913) + oid=3912, subtype_oid=1082, array_oid=3913) daterange_caster._register() tsrange_caster = RangeCaster('tsrange', DateTimeRange, - oid=3908, subtype_oid=1114, array_oid=3909) + oid=3908, subtype_oid=1114, array_oid=3909) tsrange_caster._register() tstzrange_caster = RangeCaster('tstzrange', DateTimeTZRange, - oid=3910, subtype_oid=1184, array_oid=3911) + oid=3910, subtype_oid=1184, array_oid=3911) tstzrange_caster._register() - - diff --git a/lib/errorcodes.py b/lib/errorcodes.py index aa5a723c..bf46bf80 100644 --- a/lib/errorcodes.py +++ b/lib/errorcodes.py @@ -29,6 +29,7 @@ This module contains symbolic names for all PostgreSQL error codes. # http://www.postgresql.org/docs/current/static/errcodes-appendix.html # + def lookup(code, _cache={}): """Lookup an error code or class code and return its symbolic name. diff --git a/lib/extensions.py b/lib/extensions.py index b40e28b8..b8baf9d8 100644 --- a/lib/extensions.py +++ b/lib/extensions.py @@ -7,7 +7,7 @@ This module holds all the extensions to the DBAPI-2.0 provided by psycopg. - `lobject` -- the new-type inheritable large object class - `adapt()` -- exposes the PEP-246_ compatible adapting mechanism used by psycopg to adapt Python types to PostgreSQL ones - + .. _PEP-246: http://www.python.org/peps/pep-0246.html """ # psycopg/extensions.py - DBAPI-2.0 extensions specific to psycopg @@ -56,7 +56,9 @@ try: except ImportError: pass -from psycopg2._psycopg import adapt, adapters, encodings, connection, cursor, lobject, Xid, libpq_version, parse_dsn, quote_ident +from psycopg2._psycopg import ( + adapt, adapters, encodings, connection, cursor, lobject, Xid, libpq_version, + parse_dsn, quote_ident) from psycopg2._psycopg import string_types, binary_types, new_type, new_array_type, register_type from psycopg2._psycopg import ISQLQuote, Notify, Diagnostics, Column @@ -68,32 +70,32 @@ except ImportError: pass """Isolation level values.""" -ISOLATION_LEVEL_AUTOCOMMIT = 0 -ISOLATION_LEVEL_READ_UNCOMMITTED = 4 -ISOLATION_LEVEL_READ_COMMITTED = 1 -ISOLATION_LEVEL_REPEATABLE_READ = 2 -ISOLATION_LEVEL_SERIALIZABLE = 3 +ISOLATION_LEVEL_AUTOCOMMIT = 0 +ISOLATION_LEVEL_READ_UNCOMMITTED = 4 +ISOLATION_LEVEL_READ_COMMITTED = 1 +ISOLATION_LEVEL_REPEATABLE_READ = 2 +ISOLATION_LEVEL_SERIALIZABLE = 3 """psycopg connection status values.""" -STATUS_SETUP = 0 -STATUS_READY = 1 -STATUS_BEGIN = 2 -STATUS_SYNC = 3 # currently unused -STATUS_ASYNC = 4 # currently unused +STATUS_SETUP = 0 +STATUS_READY = 1 +STATUS_BEGIN = 2 +STATUS_SYNC = 3 # currently unused +STATUS_ASYNC = 4 # currently unused STATUS_PREPARED = 5 # This is a useful mnemonic to check if the connection is in a transaction STATUS_IN_TRANSACTION = STATUS_BEGIN """psycopg asynchronous connection polling values""" -POLL_OK = 0 -POLL_READ = 1 +POLL_OK = 0 +POLL_READ = 1 POLL_WRITE = 2 POLL_ERROR = 3 """Backend transaction status values.""" -TRANSACTION_STATUS_IDLE = 0 -TRANSACTION_STATUS_ACTIVE = 1 +TRANSACTION_STATUS_IDLE = 0 +TRANSACTION_STATUS_ACTIVE = 1 TRANSACTION_STATUS_INTRANS = 2 TRANSACTION_STATUS_INERROR = 3 TRANSACTION_STATUS_UNKNOWN = 4 @@ -108,6 +110,7 @@ else: def b(s): return s.encode('utf8') + def register_adapter(typ, callable): """Register 'callable' as an ISQLQuote adapter for type 'typ'.""" adapters[(typ, ISQLQuote)] = callable diff --git a/lib/extras.py b/lib/extras.py index 2713d6fc..5aa36be7 100644 --- a/lib/extras.py +++ b/lib/extras.py @@ -106,6 +106,7 @@ class DictConnection(_connection): kwargs.setdefault('cursor_factory', DictCursor) return super(DictConnection, self).cursor(*args, **kwargs) + class DictCursor(DictCursorBase): """A cursor that keeps a list of column name -> index mappings.""" @@ -130,6 +131,7 @@ class DictCursor(DictCursorBase): self.index[self.description[i][0]] = i self._query_executed = 0 + class DictRow(list): """A row object that allow by-column-name access to data.""" @@ -192,10 +194,10 @@ class DictRow(list): # drop the crusty Py2 methods if _sys.version_info[0] > 2: - items = iteritems; del iteritems - keys = iterkeys; del iterkeys - values = itervalues; del itervalues - del has_key + items = iteritems + keys = iterkeys + values = itervalues + del itervalues, has_key, iteritems, iterkeys class RealDictConnection(_connection): @@ -204,6 +206,7 @@ class RealDictConnection(_connection): kwargs.setdefault('cursor_factory', RealDictCursor) return super(RealDictConnection, self).cursor(*args, **kwargs) + class RealDictCursor(DictCursorBase): """A cursor that uses a real dict as the base type for rows. @@ -233,6 +236,7 @@ class RealDictCursor(DictCursorBase): self.column_mapping.append(self.description[i][0]) self._query_executed = 0 + class RealDictRow(dict): """A `!dict` subclass representing a data record.""" @@ -265,6 +269,7 @@ class NamedTupleConnection(_connection): kwargs.setdefault('cursor_factory', NamedTupleCursor) return super(NamedTupleConnection, self).cursor(*args, **kwargs) + class NamedTupleCursor(_cursor): """A cursor that generates results as `~collections.namedtuple`. @@ -369,11 +374,13 @@ class LoggingConnection(_connection): def _logtofile(self, msg, curs): msg = self.filter(msg, curs) - if msg: self._logobj.write(msg + _os.linesep) + if msg: + self._logobj.write(msg + _os.linesep) def _logtologger(self, msg, curs): msg = self.filter(msg, curs) - if msg: self._logobj.debug(msg) + if msg: + self._logobj.debug(msg) def _check(self): if not hasattr(self, '_logobj'): @@ -385,6 +392,7 @@ class LoggingConnection(_connection): kwargs.setdefault('cursor_factory', LoggingCursor) return super(LoggingConnection, self).cursor(*args, **kwargs) + class LoggingCursor(_cursor): """A cursor that logs queries using its connection logging facilities.""" @@ -425,6 +433,7 @@ class MinTimeLoggingConnection(LoggingConnection): kwargs.setdefault('cursor_factory', MinTimeLoggingCursor) return LoggingConnection.cursor(self, *args, **kwargs) + class MinTimeLoggingCursor(LoggingCursor): """The cursor sub-class companion to `MinTimeLoggingConnection`.""" @@ -459,6 +468,7 @@ class UUID_adapter(object): def __str__(self): return "'%s'::uuid" % self._uuid + def register_uuid(oids=None, conn_or_curs=None): """Create the UUID type and an uuid.UUID adapter. @@ -480,8 +490,9 @@ def register_uuid(oids=None, conn_or_curs=None): oid1 = oids oid2 = 2951 - _ext.UUID = _ext.new_type((oid1, ), "UUID", - lambda data, cursor: data and uuid.UUID(data) or None) + _ext.UUID = _ext.new_type( + (oid1, ), "UUID", + lambda data, cursor: data and uuid.UUID(data) or None) _ext.UUIDARRAY = _ext.new_array_type((oid2,), "UUID[]", _ext.UUID) _ext.register_type(_ext.UUID, conn_or_curs) @@ -523,6 +534,7 @@ class Inet(object): def __str__(self): return str(self.addr) + def register_inet(oid=None, conn_or_curs=None): """Create the INET type and an Inet adapter. @@ -542,7 +554,7 @@ def register_inet(oid=None, conn_or_curs=None): oid2 = 1041 _ext.INET = _ext.new_type((oid1, ), "INET", - lambda data, cursor: data and Inet(data) or None) + lambda data, cursor: data and Inet(data) or None) _ext.INETARRAY = _ext.new_array_type((oid2, ), "INETARRAY", _ext.INET) _ext.register_type(_ext.INET, conn_or_curs) @@ -736,14 +748,14 @@ WHERE typname = 'hstore'; rv1.append(oids[1]) # revert the status of the connection as before the command - if (conn_status != _ext.STATUS_IN_TRANSACTION - and not conn.autocommit): - conn.rollback() + if (conn_status != _ext.STATUS_IN_TRANSACTION and not conn.autocommit): + conn.rollback() return tuple(rv0), tuple(rv1) + def register_hstore(conn_or_curs, globally=False, unicode=False, - oid=None, array_oid=None): + oid=None, array_oid=None): """Register adapter and typecaster for `!dict`\-\ |hstore| conversions. :param conn_or_curs: a connection or cursor: the typecaster will be @@ -822,8 +834,8 @@ class CompositeCaster(object): self.oid = oid self.array_oid = array_oid - self.attnames = [ a[0] for a in attrs ] - self.atttypes = [ a[1] for a in attrs ] + self.attnames = [a[0] for a in attrs] + self.atttypes = [a[1] for a in attrs] self._create_type(name, self.attnames) self.typecaster = _ext.new_type((oid,), name, self.parse) if array_oid: @@ -842,8 +854,8 @@ class CompositeCaster(object): "expecting %d components for the type %s, %d found instead" % (len(self.atttypes), self.name, len(tokens))) - values = [ curs.cast(oid, token) - for oid, token in zip(self.atttypes, tokens) ] + values = [curs.cast(oid, token) + for oid, token in zip(self.atttypes, tokens)] return self.make(values) @@ -927,8 +939,7 @@ ORDER BY attnum; recs = curs.fetchall() # revert the status of the connection as before the command - if (conn_status != _ext.STATUS_IN_TRANSACTION - and not conn.autocommit): + if (conn_status != _ext.STATUS_IN_TRANSACTION and not conn.autocommit): conn.rollback() if not recs: @@ -937,10 +948,11 @@ ORDER BY attnum; type_oid = recs[0][0] array_oid = recs[0][1] - type_attrs = [ (r[2], r[3]) for r in recs ] + type_attrs = [(r[2], r[3]) for r in recs] return self(tname, type_oid, type_attrs, - array_oid=array_oid, schema=schema) + array_oid=array_oid, schema=schema) + def register_composite(name, conn_or_curs, globally=False, factory=None): """Register a typecaster to convert a composite type into a tuple. diff --git a/lib/pool.py b/lib/pool.py index 8d7c4afb..72159c53 100644 --- a/lib/pool.py +++ b/lib/pool.py @@ -40,18 +40,18 @@ class AbstractConnectionPool(object): New 'minconn' connections are created immediately calling 'connfunc' with given parameters. The connection pool will support a maximum of - about 'maxconn' connections. + about 'maxconn' connections. """ self.minconn = int(minconn) self.maxconn = int(maxconn) self.closed = False - + self._args = args self._kwargs = kwargs self._pool = [] self._used = {} - self._rused = {} # id(conn) -> key map + self._rused = {} # id(conn) -> key map self._keys = 0 for i in range(self.minconn): @@ -71,12 +71,14 @@ class AbstractConnectionPool(object): """Return a new unique key.""" self._keys += 1 return self._keys - + def _getconn(self, key=None): """Get a free connection and assign it to 'key' if not None.""" - if self.closed: raise PoolError("connection pool is closed") - if key is None: key = self._getkey() - + if self.closed: + raise PoolError("connection pool is closed") + if key is None: + key = self._getkey() + if key in self._used: return self._used[key] @@ -88,11 +90,13 @@ class AbstractConnectionPool(object): if len(self._used) == self.maxconn: raise PoolError("connection pool exhausted") return self._connect(key) - + def _putconn(self, conn, key=None, close=False): """Put away a connection.""" - if self.closed: raise PoolError("connection pool is closed") - if key is None: key = self._rused.get(id(conn)) + if self.closed: + raise PoolError("connection pool is closed") + if key is None: + key = self._rused.get(id(conn)) if not key: raise PoolError("trying to put unkeyed connection") @@ -129,21 +133,22 @@ class AbstractConnectionPool(object): an already closed connection. If you call .closeall() make sure your code can deal with it. """ - if self.closed: raise PoolError("connection pool is closed") + if self.closed: + raise PoolError("connection pool is closed") for conn in self._pool + list(self._used.values()): try: conn.close() except: pass self.closed = True - + class SimpleConnectionPool(AbstractConnectionPool): """A connection pool that can't be shared across different threads.""" getconn = AbstractConnectionPool._getconn putconn = AbstractConnectionPool._putconn - closeall = AbstractConnectionPool._closeall + closeall = AbstractConnectionPool._closeall class ThreadedConnectionPool(AbstractConnectionPool): @@ -182,7 +187,7 @@ class ThreadedConnectionPool(AbstractConnectionPool): class PersistentConnectionPool(AbstractConnectionPool): - """A pool that assigns persistent connections to different threads. + """A pool that assigns persistent connections to different threads. Note that this connection pool generates by itself the required keys using the current thread id. This means that until a thread puts away @@ -195,7 +200,7 @@ class PersistentConnectionPool(AbstractConnectionPool): """Initialize the threading lock.""" import warnings warnings.warn("deprecated: use ZPsycopgDA.pool implementation", - DeprecationWarning) + DeprecationWarning) import threading AbstractConnectionPool.__init__( @@ -204,7 +209,7 @@ class PersistentConnectionPool(AbstractConnectionPool): # we we'll need the thread module, to determine thread ids, so we # import it here and copy it in an instance variable - import thread as _thread # work around for 2to3 bug - see ticket #348 + import thread as _thread # work around for 2to3 bug - see ticket #348 self.__thread = _thread def getconn(self): @@ -221,7 +226,8 @@ class PersistentConnectionPool(AbstractConnectionPool): key = self.__thread.get_ident() self._lock.acquire() try: - if not conn: conn = self._used[key] + if not conn: + conn = self._used[key] self._putconn(conn, key, close) finally: self._lock.release() diff --git a/lib/psycopg1.py b/lib/psycopg1.py index 7a24c5f2..bd51aa3f 100644 --- a/lib/psycopg1.py +++ b/lib/psycopg1.py @@ -36,16 +36,18 @@ from psycopg2 import * import psycopg2.extensions as _ext _2connect = connect + def connect(*args, **kwargs): """connect(dsn, ...) -> new psycopg 1.1.x compatible connection object""" kwargs['connection_factory'] = connection conn = _2connect(*args, **kwargs) conn.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED) return conn - + + class connection(_2connection): """psycopg 1.1.x connection.""" - + def cursor(self): """cursor() -> new psycopg 1.1.x compatible cursor object""" return _2connection.cursor(self, cursor_factory=cursor) @@ -56,7 +58,7 @@ class connection(_2connection): self.set_isolation_level(_ext.ISOLATION_LEVEL_AUTOCOMMIT) else: self.set_isolation_level(_ext.ISOLATION_LEVEL_READ_COMMITTED) - + class cursor(_2cursor): """psycopg 1.1.x cursor. @@ -71,25 +73,24 @@ class cursor(_2cursor): for i in range(len(self.description)): res[self.description[i][0]] = row[i] return res - + def dictfetchone(self): row = _2cursor.fetchone(self) if row: return self.__build_dict(row) else: return row - + def dictfetchmany(self, size): res = [] rows = _2cursor.fetchmany(self, size) for row in rows: res.append(self.__build_dict(row)) return res - + def dictfetchall(self): res = [] rows = _2cursor.fetchall(self) for row in rows: res.append(self.__build_dict(row)) return res - diff --git a/lib/tz.py b/lib/tz.py index 695a9253..92a16041 100644 --- a/lib/tz.py +++ b/lib/tz.py @@ -2,7 +2,7 @@ This module holds two different tzinfo implementations that can be used as the 'tzinfo' argument to datetime constructors, directly passed to psycopg -functions or used to set the .tzinfo_factory attribute in cursors. +functions or used to set the .tzinfo_factory attribute in cursors. """ # psycopg/tz.py - tzinfo implementation # @@ -31,6 +31,7 @@ import time ZERO = datetime.timedelta(0) + class FixedOffsetTimezone(datetime.tzinfo): """Fixed offset in minutes east from UTC. @@ -52,7 +53,7 @@ class FixedOffsetTimezone(datetime.tzinfo): def __init__(self, offset=None, name=None): if offset is not None: - self._offset = datetime.timedelta(minutes = offset) + self._offset = datetime.timedelta(minutes=offset) if name is not None: self._name = name @@ -85,7 +86,7 @@ class FixedOffsetTimezone(datetime.tzinfo): else: seconds = self._offset.seconds + self._offset.days * 86400 hours, seconds = divmod(seconds, 3600) - minutes = seconds/60 + minutes = seconds / 60 if minutes: return "%+03d:%d" % (hours, minutes) else: @@ -95,13 +96,14 @@ class FixedOffsetTimezone(datetime.tzinfo): return ZERO -STDOFFSET = datetime.timedelta(seconds = -time.timezone) +STDOFFSET = datetime.timedelta(seconds=-time.timezone) if time.daylight: - DSTOFFSET = datetime.timedelta(seconds = -time.altzone) + DSTOFFSET = datetime.timedelta(seconds=-time.altzone) else: DSTOFFSET = STDOFFSET DSTDIFF = DSTOFFSET - STDOFFSET + class LocalTimezone(datetime.tzinfo): """Platform idea of local timezone.