Merge branch 'rm-2.7'

This commit is contained in:
Daniele Varrazzo 2021-05-20 16:59:43 +02:00
commit 19ddbc47ca
62 changed files with 372 additions and 1307 deletions

4
NEWS
View File

@ -4,7 +4,7 @@ Current release
What's new in psycopg 2.9 What's new in psycopg 2.9
------------------------- -------------------------
- Dropped support for Python 3.4, 3.5 (:tickets:#1000, #1197). - Dropped support for Python 2.7, 3.4, 3.5 (:tickets:#1198, #1000, #1197).
- Reclassified SQLSTATE connection exceptions (08XXX) as - Reclassified SQLSTATE connection exceptions (08XXX) as
`~psycopg2.errors.OperationalError` (subclass of previously used `~psycopg2.errors.OperationalError` (subclass of previously used
`~psycopg2.errors.DatabaseError`) (:ticket:`#1148`). `~psycopg2.errors.DatabaseError`) (:ticket:`#1148`).
@ -25,7 +25,7 @@ What's new in psycopg 2.8.6
(:ticket:`#1101`). (:ticket:`#1101`).
- Fixed search of mxDateTime headers in virtualenvs (:ticket:`#996`). - Fixed search of mxDateTime headers in virtualenvs (:ticket:`#996`).
- Added missing values from errorcodes (:ticket:`#1133`). - Added missing values from errorcodes (:ticket:`#1133`).
- `cursor.query` reports the query of the last :sql:`COPY` opearation too - `cursor.query` reports the query of the last :sql:`COPY` operation too
(:ticket:`#1141`). (:ticket:`#1141`).
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to - `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
PostgreSQL 13. PostgreSQL 13.

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# #
# Psycopg documentation build configuration file, created by # Psycopg documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 7 13:48:41 2010. # sphinx-quickstart on Sun Feb 7 13:48:41 2010.
@ -48,9 +47,9 @@ source_suffix = '.rst'
master_doc = 'index' master_doc = 'index'
# General information about the project. # General information about the project.
project = u'Psycopg' project = 'Psycopg'
copyright = ( copyright = (
u'2001-2020, Federico Di Gregorio, Daniele Varrazzo, The Psycopg Team' '2001-2020, Federico Di Gregorio, Daniele Varrazzo, The Psycopg Team'
) )
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
@ -235,8 +234,8 @@ latex_documents = [
( (
'index', 'index',
'psycopg.tex', 'psycopg.tex',
u'Psycopg Documentation', 'Psycopg Documentation',
u'Federico Di Gregorio', 'Federico Di Gregorio',
'manual', 'manual',
) )
] ]

View File

@ -131,8 +131,7 @@ The current `!psycopg2` implementation supports:
.. ..
NOTE: keep consistent with setup.py and the /features/ page. NOTE: keep consistent with setup.py and the /features/ page.
- Python version 2.7 - Python versions from 3.6 to 3.9
- Python 3 versions from 3.6 to 3.9
- PostgreSQL server versions from 7.4 to 13 - PostgreSQL server versions from 7.4 to 13
- PostgreSQL client library version from 9.1 - PostgreSQL client library version from 9.1

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
""" """
extension extension
~~~~~~~~~ ~~~~~~~~~

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
""" """
sql role sql role
~~~~~~~~ ~~~~~~~~

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
""" """
ticket role ticket role
~~~~~~~~~~~ ~~~~~~~~~~~

View File

@ -2,7 +2,6 @@
"""Create the docs table of the sqlstate errors. """Create the docs table of the sqlstate errors.
""" """
from __future__ import print_function
import re import re
import sys import sys
@ -26,8 +25,8 @@ def main():
for k in sorted(sqlstate_errors): for k in sorted(sqlstate_errors):
exc = sqlstate_errors[k] exc = sqlstate_errors[k]
lines.append(Line( lines.append(Line(
"``%s``" % k, "`!%s`" % exc.__name__, f"``{k}``", f"`!{exc.__name__}`",
"`!%s`" % get_base_exception(exc).__name__, k)) f"`!{get_base_exception(exc).__name__}`", k))
widths = [max(len(l[c]) for l in lines) for c in range(3)] widths = [max(len(l[c]) for l in lines) for c in range(3)]
h = Line(*(['=' * w for w in widths] + [None])) h = Line(*(['=' * w for w in widths] + [None]))
@ -40,7 +39,7 @@ def main():
for l in lines: for l in lines:
cls = l.sqlstate[:2] if l.sqlstate else None cls = l.sqlstate[:2] if l.sqlstate else None
if cls and cls != sqlclass: if cls and cls != sqlclass:
print("**Class %s**: %s" % (cls, sqlclasses[cls])) print(f"**Class {cls}**: {sqlclasses[cls]}")
print(h1) print(h1)
sqlclass = cls sqlclass = cls

View File

@ -26,7 +26,6 @@
from psycopg2.extensions import ( from psycopg2.extensions import (
new_type, new_array_type, register_type, register_adapter, QuotedString) new_type, new_array_type, register_type, register_adapter, QuotedString)
from psycopg2.compat import text_type
# The module is imported on register_ipaddress # The module is imported on register_ipaddress
ipaddress = None ipaddress = None
@ -78,13 +77,13 @@ def cast_interface(s, cur=None):
if s is None: if s is None:
return None return None
# Py2 version force the use of unicode. meh. # Py2 version force the use of unicode. meh.
return ipaddress.ip_interface(text_type(s)) return ipaddress.ip_interface(str(s))
def cast_network(s, cur=None): def cast_network(s, cur=None):
if s is None: if s is None:
return None return None
return ipaddress.ip_network(text_type(s)) return ipaddress.ip_network(str(s))
def adapt_ipaddress(obj): def adapt_ipaddress(obj):

View File

@ -32,7 +32,6 @@ import json
from psycopg2._psycopg import ISQLQuote, QuotedString from psycopg2._psycopg import ISQLQuote, QuotedString
from psycopg2._psycopg import new_type, new_array_type, register_type from psycopg2._psycopg import new_type, new_array_type, register_type
from psycopg2.compat import PY2
# oids from PostgreSQL 9.2 # oids from PostgreSQL 9.2
@ -44,7 +43,7 @@ JSONB_OID = 3802
JSONBARRAY_OID = 3807 JSONBARRAY_OID = 3807
class Json(object): class Json:
""" """
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
:sql:`json` data type. :sql:`json` data type.
@ -82,12 +81,8 @@ class Json(object):
qs.prepare(self._conn) qs.prepare(self._conn)
return qs.getquoted() return qs.getquoted()
if PY2:
def __str__(self): def __str__(self):
return self.getquoted() # getquoted is binary
else:
def __str__(self):
# getquoted is binary in Py3
return self.getquoted().decode('ascii', 'replace') return self.getquoted().decode('ascii', 'replace')
@ -168,7 +163,7 @@ def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
JSON = new_type((oid, ), name, typecast_json) JSON = new_type((oid, ), name, typecast_json)
if array_oid is not None: if array_oid is not None:
JSONARRAY = new_array_type((array_oid, ), "%sARRAY" % name, JSON) JSONARRAY = new_array_type((array_oid, ), f"{name}ARRAY", JSON)
else: else:
JSONARRAY = None JSONARRAY = None
@ -199,6 +194,6 @@ def _get_json_oids(conn_or_curs, name='json'):
conn.rollback() conn.rollback()
if not r: if not r:
raise conn.ProgrammingError("%s data type not found" % name) raise conn.ProgrammingError(f"{name} data type not found")
return r return r

View File

@ -1,104 +0,0 @@
"""
LRU cache implementation for Python 2.7
Ported from http://code.activestate.com/recipes/578078/ and simplified for our
use (only support maxsize > 0 and positional arguments).
"""
from collections import namedtuple
from functools import update_wrapper
from threading import RLock
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
def lru_cache(maxsize=100):
"""Least-recently-used cache decorator.
Arguments to the cached function must be hashable.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
"""
def decorating_function(user_function):
cache = dict()
stats = [0, 0] # make statistics updateable non-locally
HITS, MISSES = 0, 1 # names for the stats fields
cache_get = cache.get # bound method to lookup key or return None
_len = len # localize the global len() function
lock = RLock() # linkedlist updates aren't threadsafe
root = [] # root of the circular doubly linked list
root[:] = [root, root, None, None] # initialize by pointing to self
nonlocal_root = [root] # make updateable non-locally
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
assert maxsize and maxsize > 0, "maxsize %s not supported" % maxsize
def wrapper(*args):
# size limited caching that tracks accesses by recency
key = args
with lock:
link = cache_get(key)
if link is not None:
# record recent use of the key by moving it to the
# front of the list
root, = nonlocal_root
link_prev, link_next, key, result = link
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
last = root[PREV]
last[NEXT] = root[PREV] = link
link[PREV] = last
link[NEXT] = root
stats[HITS] += 1
return result
result = user_function(*args)
with lock:
root, = nonlocal_root
if key in cache:
# getting here means that this same key was added to the
# cache while the lock was released. since the link
# update is already done, we need only return the
# computed result and update the count of misses.
pass
elif _len(cache) >= maxsize:
# use the old root to store the new key and result
oldroot = root
oldroot[KEY] = key
oldroot[RESULT] = result
# empty the oldest link and make it the new root
root = nonlocal_root[0] = oldroot[NEXT]
oldkey = root[KEY]
# oldvalue = root[RESULT]
root[KEY] = root[RESULT] = None
# now update the cache dictionary for the new links
del cache[oldkey]
cache[key] = oldroot
else:
# put result in a new link at the front of the list
last = root[PREV]
link = [last, root, key, result]
last[NEXT] = root[PREV] = cache[key] = link
stats[MISSES] += 1
return result
def cache_info():
"""Report cache statistics"""
with lock:
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
def cache_clear():
"""Clear the cache and cache statistics"""
with lock:
cache.clear()
root = nonlocal_root[0]
root[:] = [root, root, None, None]
stats[:] = [0, 0]
wrapper.__wrapped__ = user_function
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return update_wrapper(wrapper, user_function)
return decorating_function

View File

@ -30,10 +30,9 @@ import re
from psycopg2._psycopg import ProgrammingError, InterfaceError from psycopg2._psycopg import ProgrammingError, InterfaceError
from psycopg2.extensions import ISQLQuote, adapt, register_adapter from psycopg2.extensions import ISQLQuote, adapt, register_adapter
from psycopg2.extensions import new_type, new_array_type, register_type from psycopg2.extensions import new_type, new_array_type, register_type
from psycopg2.compat import string_types
class Range(object): class Range:
"""Python representation for a PostgreSQL |range|_ type. """Python representation for a PostgreSQL |range|_ type.
:param lower: lower bound for the range. `!None` means unbound :param lower: lower bound for the range. `!None` means unbound
@ -48,7 +47,7 @@ class Range(object):
def __init__(self, lower=None, upper=None, bounds='[)', empty=False): def __init__(self, lower=None, upper=None, bounds='[)', empty=False):
if not empty: if not empty:
if bounds not in ('[)', '(]', '()', '[]'): if bounds not in ('[)', '(]', '()', '[]'):
raise ValueError("bound flags not valid: %r" % bounds) raise ValueError(f"bound flags not valid: {bounds!r}")
self._lower = lower self._lower = lower
self._upper = upper self._upper = upper
@ -58,9 +57,9 @@ class Range(object):
def __repr__(self): def __repr__(self):
if self._bounds is None: if self._bounds is None:
return "%s(empty=True)" % self.__class__.__name__ return f"{self.__class__.__name__}(empty=True)"
else: else:
return "%s(%r, %r, %r)" % (self.__class__.__name__, return "{}({!r}, {!r}, {!r})".format(self.__class__.__name__,
self._lower, self._upper, self._bounds) self._lower, self._upper, self._bounds)
def __str__(self): def __str__(self):
@ -239,7 +238,7 @@ def register_range(pgrange, pyrange, conn_or_curs, globally=False):
return caster return caster
class RangeAdapter(object): class RangeAdapter:
"""`ISQLQuote` adapter for `Range` subclasses. """`ISQLQuote` adapter for `Range` subclasses.
This is an abstract class: concrete classes must set a `name` class This is an abstract class: concrete classes must set a `name` class
@ -287,7 +286,7 @@ class RangeAdapter(object):
+ b", '" + r._bounds.encode('utf8') + b"')" + b", '" + r._bounds.encode('utf8') + b"')"
class RangeCaster(object): class RangeCaster:
"""Helper class to convert between `Range` and PostgreSQL range types. """Helper class to convert between `Range` and PostgreSQL range types.
Objects of this class are usually created by `register_range()`. Manual Objects of this class are usually created by `register_range()`. Manual
@ -315,7 +314,7 @@ class RangeCaster(object):
# an implementation detail and is not documented. It is currently used # an implementation detail and is not documented. It is currently used
# for the numeric ranges. # for the numeric ranges.
self.adapter = None self.adapter = None
if isinstance(pgrange, string_types): if isinstance(pgrange, str):
self.adapter = type(pgrange, (RangeAdapter,), {}) self.adapter = type(pgrange, (RangeAdapter,), {})
self.adapter.name = pgrange self.adapter.name = pgrange
else: else:
@ -332,7 +331,7 @@ class RangeCaster(object):
self.range = None self.range = None
try: try:
if isinstance(pyrange, string_types): if isinstance(pyrange, str):
self.range = type(pyrange, (Range,), {}) self.range = type(pyrange, (Range,), {})
if issubclass(pyrange, Range) and pyrange is not Range: if issubclass(pyrange, Range) and pyrange is not Range:
self.range = pyrange self.range = pyrange
@ -392,7 +391,7 @@ where typname = %s and ns.nspname = %s;
if not rec: if not rec:
raise ProgrammingError( raise ProgrammingError(
"PostgreSQL type '%s' not found" % name) f"PostgreSQL type '{name}' not found")
type, subtype, array = rec type, subtype, array = rec
@ -424,7 +423,7 @@ where typname = %s and ns.nspname = %s;
m = self._re_range.match(s) m = self._re_range.match(s)
if m is None: if m is None:
raise InterfaceError("failed to parse range: '%s'" % s) raise InterfaceError(f"failed to parse range: '{s}'")
lower = m.group(3) lower = m.group(3)
if lower is None: if lower is None:
@ -504,8 +503,7 @@ class NumberRangeAdapter(RangeAdapter):
else: else:
upper = '' upper = ''
return ("'%s%s,%s%s'" % ( return (f"'{r._bounds[0]}{lower},{upper}{r._bounds[1]}'").encode('ascii')
r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')
# TODO: probably won't work with infs, nans and other tricky cases. # TODO: probably won't work with infs, nans and other tricky cases.

View File

@ -1,19 +0,0 @@
import sys
__all__ = ['string_types', 'text_type', 'lru_cache']
if sys.version_info[0] == 2:
# Python 2
PY2 = True
PY3 = False
string_types = basestring,
text_type = unicode
from ._lru_cache import lru_cache
else:
# Python 3
PY2 = False
PY3 = True
string_types = str,
text_type = str
from functools import lru_cache

View File

@ -106,7 +106,7 @@ def register_adapter(typ, callable):
# The SQL_IN class is the official adapter for tuples starting from 2.0.6. # The SQL_IN class is the official adapter for tuples starting from 2.0.6.
class SQL_IN(object): class SQL_IN:
"""Adapt any iterable to an SQL quotable object.""" """Adapt any iterable to an SQL quotable object."""
def __init__(self, seq): def __init__(self, seq):
self._seq = seq self._seq = seq
@ -130,7 +130,7 @@ class SQL_IN(object):
return str(self.getquoted()) return str(self.getquoted())
class NoneAdapter(object): class NoneAdapter:
"""Adapt None to NULL. """Adapt None to NULL.
This adapter is not used normally as a fast path in mogrify uses NULL, This adapter is not used normally as a fast path in mogrify uses NULL,
@ -168,7 +168,7 @@ def make_dsn(dsn=None, **kwargs):
tmp.update(kwargs) tmp.update(kwargs)
kwargs = tmp kwargs = tmp
dsn = " ".join(["%s=%s" % (k, _param_escape(str(v))) dsn = " ".join(["{}={}".format(k, _param_escape(str(v)))
for (k, v) in kwargs.items()]) for (k, v) in kwargs.items()])
# verify that the returned dsn is valid # verify that the returned dsn is valid

View File

@ -38,7 +38,7 @@ from psycopg2 import extensions as _ext
from .extensions import cursor as _cursor from .extensions import cursor as _cursor
from .extensions import connection as _connection from .extensions import connection as _connection
from .extensions import adapt as _A, quote_ident from .extensions import adapt as _A, quote_ident
from .compat import PY2, PY3, lru_cache from functools import lru_cache
from psycopg2._psycopg import ( # noqa from psycopg2._psycopg import ( # noqa
REPLICATION_PHYSICAL, REPLICATION_LOGICAL, REPLICATION_PHYSICAL, REPLICATION_LOGICAL,
@ -72,47 +72,47 @@ class DictCursorBase(_cursor):
else: else:
raise NotImplementedError( raise NotImplementedError(
"DictCursorBase can't be instantiated without a row factory.") "DictCursorBase can't be instantiated without a row factory.")
super(DictCursorBase, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self._query_executed = False self._query_executed = False
self._prefetch = False self._prefetch = False
self.row_factory = row_factory self.row_factory = row_factory
def fetchone(self): def fetchone(self):
if self._prefetch: if self._prefetch:
res = super(DictCursorBase, self).fetchone() res = super().fetchone()
if self._query_executed: if self._query_executed:
self._build_index() self._build_index()
if not self._prefetch: if not self._prefetch:
res = super(DictCursorBase, self).fetchone() res = super().fetchone()
return res return res
def fetchmany(self, size=None): def fetchmany(self, size=None):
if self._prefetch: if self._prefetch:
res = super(DictCursorBase, self).fetchmany(size) res = super().fetchmany(size)
if self._query_executed: if self._query_executed:
self._build_index() self._build_index()
if not self._prefetch: if not self._prefetch:
res = super(DictCursorBase, self).fetchmany(size) res = super().fetchmany(size)
return res return res
def fetchall(self): def fetchall(self):
if self._prefetch: if self._prefetch:
res = super(DictCursorBase, self).fetchall() res = super().fetchall()
if self._query_executed: if self._query_executed:
self._build_index() self._build_index()
if not self._prefetch: if not self._prefetch:
res = super(DictCursorBase, self).fetchall() res = super().fetchall()
return res return res
def __iter__(self): def __iter__(self):
try: try:
if self._prefetch: if self._prefetch:
res = super(DictCursorBase, self).__iter__() res = super().__iter__()
first = next(res) first = next(res)
if self._query_executed: if self._query_executed:
self._build_index() self._build_index()
if not self._prefetch: if not self._prefetch:
res = super(DictCursorBase, self).__iter__() res = super().__iter__()
first = next(res) first = next(res)
yield first yield first
@ -126,7 +126,7 @@ class DictConnection(_connection):
"""A connection that uses `DictCursor` automatically.""" """A connection that uses `DictCursor` automatically."""
def cursor(self, *args, **kwargs): def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or DictCursor) kwargs.setdefault('cursor_factory', self.cursor_factory or DictCursor)
return super(DictConnection, self).cursor(*args, **kwargs) return super().cursor(*args, **kwargs)
class DictCursor(DictCursorBase): class DictCursor(DictCursorBase):
@ -137,18 +137,18 @@ class DictCursor(DictCursorBase):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
kwargs['row_factory'] = DictRow kwargs['row_factory'] = DictRow
super(DictCursor, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self._prefetch = True self._prefetch = True
def execute(self, query, vars=None): def execute(self, query, vars=None):
self.index = OrderedDict() self.index = OrderedDict()
self._query_executed = True self._query_executed = True
return super(DictCursor, self).execute(query, vars) return super().execute(query, vars)
def callproc(self, procname, vars=None): def callproc(self, procname, vars=None):
self.index = OrderedDict() self.index = OrderedDict()
self._query_executed = True self._query_executed = True
return super(DictCursor, self).callproc(procname, vars) return super().callproc(procname, vars)
def _build_index(self): def _build_index(self):
if self._query_executed and self.description: if self._query_executed and self.description:
@ -169,22 +169,22 @@ class DictRow(list):
def __getitem__(self, x): def __getitem__(self, x):
if not isinstance(x, (int, slice)): if not isinstance(x, (int, slice)):
x = self._index[x] x = self._index[x]
return super(DictRow, self).__getitem__(x) return super().__getitem__(x)
def __setitem__(self, x, v): def __setitem__(self, x, v):
if not isinstance(x, (int, slice)): if not isinstance(x, (int, slice)):
x = self._index[x] x = self._index[x]
super(DictRow, self).__setitem__(x, v) super().__setitem__(x, v)
def items(self): def items(self):
g = super(DictRow, self).__getitem__ g = super().__getitem__
return ((n, g(self._index[n])) for n in self._index) return ((n, g(self._index[n])) for n in self._index)
def keys(self): def keys(self):
return iter(self._index) return iter(self._index)
def values(self): def values(self):
g = super(DictRow, self).__getitem__ g = super().__getitem__
return (g(self._index[n]) for n in self._index) return (g(self._index[n]) for n in self._index)
def get(self, x, default=None): def get(self, x, default=None):
@ -201,7 +201,7 @@ class DictRow(list):
def __reduce__(self): def __reduce__(self):
# this is apparently useless, but it fixes #1073 # this is apparently useless, but it fixes #1073
return super(DictRow, self).__reduce__() return super().__reduce__()
def __getstate__(self): def __getstate__(self):
return self[:], self._index.copy() return self[:], self._index.copy()
@ -210,27 +210,12 @@ class DictRow(list):
self[:] = data[0] self[:] = data[0]
self._index = data[1] self._index = data[1]
if PY2:
iterkeys = keys
itervalues = values
iteritems = items
has_key = __contains__
def keys(self):
return list(self.iterkeys())
def values(self):
return tuple(self.itervalues())
def items(self):
return list(self.iteritems())
class RealDictConnection(_connection): class RealDictConnection(_connection):
"""A connection that uses `RealDictCursor` automatically.""" """A connection that uses `RealDictCursor` automatically."""
def cursor(self, *args, **kwargs): def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or RealDictCursor) kwargs.setdefault('cursor_factory', self.cursor_factory or RealDictCursor)
return super(RealDictConnection, self).cursor(*args, **kwargs) return super().cursor(*args, **kwargs)
class RealDictCursor(DictCursorBase): class RealDictCursor(DictCursorBase):
@ -243,17 +228,17 @@ class RealDictCursor(DictCursorBase):
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
kwargs['row_factory'] = RealDictRow kwargs['row_factory'] = RealDictRow
super(RealDictCursor, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
def execute(self, query, vars=None): def execute(self, query, vars=None):
self.column_mapping = [] self.column_mapping = []
self._query_executed = True self._query_executed = True
return super(RealDictCursor, self).execute(query, vars) return super().execute(query, vars)
def callproc(self, procname, vars=None): def callproc(self, procname, vars=None):
self.column_mapping = [] self.column_mapping = []
self._query_executed = True self._query_executed = True
return super(RealDictCursor, self).callproc(procname, vars) return super().callproc(procname, vars)
def _build_index(self): def _build_index(self):
if self._query_executed and self.description: if self._query_executed and self.description:
@ -271,7 +256,7 @@ class RealDictRow(OrderedDict):
else: else:
cursor = None cursor = None
super(RealDictRow, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
if cursor is not None: if cursor is not None:
# Required for named cursors # Required for named cursors
@ -287,20 +272,20 @@ class RealDictRow(OrderedDict):
if RealDictRow in self: if RealDictRow in self:
# We are in the row building phase # We are in the row building phase
mapping = self[RealDictRow] mapping = self[RealDictRow]
super(RealDictRow, self).__setitem__(mapping[key], value) super().__setitem__(mapping[key], value)
if key == len(mapping) - 1: if key == len(mapping) - 1:
# Row building finished # Row building finished
del self[RealDictRow] del self[RealDictRow]
return return
super(RealDictRow, self).__setitem__(key, value) super().__setitem__(key, value)
class NamedTupleConnection(_connection): class NamedTupleConnection(_connection):
"""A connection that uses `NamedTupleCursor` automatically.""" """A connection that uses `NamedTupleCursor` automatically."""
def cursor(self, *args, **kwargs): def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or NamedTupleCursor) kwargs.setdefault('cursor_factory', self.cursor_factory or NamedTupleCursor)
return super(NamedTupleConnection, self).cursor(*args, **kwargs) return super().cursor(*args, **kwargs)
class NamedTupleCursor(_cursor): class NamedTupleCursor(_cursor):
@ -324,18 +309,18 @@ class NamedTupleCursor(_cursor):
def execute(self, query, vars=None): def execute(self, query, vars=None):
self.Record = None self.Record = None
return super(NamedTupleCursor, self).execute(query, vars) return super().execute(query, vars)
def executemany(self, query, vars): def executemany(self, query, vars):
self.Record = None self.Record = None
return super(NamedTupleCursor, self).executemany(query, vars) return super().executemany(query, vars)
def callproc(self, procname, vars=None): def callproc(self, procname, vars=None):
self.Record = None self.Record = None
return super(NamedTupleCursor, self).callproc(procname, vars) return super().callproc(procname, vars)
def fetchone(self): def fetchone(self):
t = super(NamedTupleCursor, self).fetchone() t = super().fetchone()
if t is not None: if t is not None:
nt = self.Record nt = self.Record
if nt is None: if nt is None:
@ -343,14 +328,14 @@ class NamedTupleCursor(_cursor):
return nt._make(t) return nt._make(t)
def fetchmany(self, size=None): def fetchmany(self, size=None):
ts = super(NamedTupleCursor, self).fetchmany(size) ts = super().fetchmany(size)
nt = self.Record nt = self.Record
if nt is None: if nt is None:
nt = self.Record = self._make_nt() nt = self.Record = self._make_nt()
return list(map(nt._make, ts)) return list(map(nt._make, ts))
def fetchall(self): def fetchall(self):
ts = super(NamedTupleCursor, self).fetchall() ts = super().fetchall()
nt = self.Record nt = self.Record
if nt is None: if nt is None:
nt = self.Record = self._make_nt() nt = self.Record = self._make_nt()
@ -358,7 +343,7 @@ class NamedTupleCursor(_cursor):
def __iter__(self): def __iter__(self):
try: try:
it = super(NamedTupleCursor, self).__iter__() it = super().__iter__()
t = next(it) t = next(it)
nt = self.Record nt = self.Record
@ -436,7 +421,7 @@ class LoggingConnection(_connection):
def _logtofile(self, msg, curs): def _logtofile(self, msg, curs):
msg = self.filter(msg, curs) msg = self.filter(msg, curs)
if msg: if msg:
if PY3 and isinstance(msg, bytes): if isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace') msg = msg.decode(_ext.encodings[self.encoding], 'replace')
self._logobj.write(msg + _os.linesep) self._logobj.write(msg + _os.linesep)
@ -453,7 +438,7 @@ class LoggingConnection(_connection):
def cursor(self, *args, **kwargs): def cursor(self, *args, **kwargs):
self._check() self._check()
kwargs.setdefault('cursor_factory', self.cursor_factory or LoggingCursor) kwargs.setdefault('cursor_factory', self.cursor_factory or LoggingCursor)
return super(LoggingConnection, self).cursor(*args, **kwargs) return super().cursor(*args, **kwargs)
class LoggingCursor(_cursor): class LoggingCursor(_cursor):
@ -461,13 +446,13 @@ class LoggingCursor(_cursor):
def execute(self, query, vars=None): def execute(self, query, vars=None):
try: try:
return super(LoggingCursor, self).execute(query, vars) return super().execute(query, vars)
finally: finally:
self.connection.log(self.query, self) self.connection.log(self.query, self)
def callproc(self, procname, vars=None): def callproc(self, procname, vars=None):
try: try:
return super(LoggingCursor, self).callproc(procname, vars) return super().callproc(procname, vars)
finally: finally:
self.connection.log(self.query, self) self.connection.log(self.query, self)
@ -490,9 +475,9 @@ class MinTimeLoggingConnection(LoggingConnection):
def filter(self, msg, curs): def filter(self, msg, curs):
t = (_time.time() - curs.timestamp) * 1000 t = (_time.time() - curs.timestamp) * 1000
if t > self._mintime: if t > self._mintime:
if PY3 and isinstance(msg, bytes): if isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace') msg = msg.decode(_ext.encodings[self.encoding], 'replace')
return msg + _os.linesep + " (execution time: %d ms)" % t return f"{msg}{_os.linesep} (execution time: {t} ms)"
def cursor(self, *args, **kwargs): def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', kwargs.setdefault('cursor_factory',
@ -516,14 +501,14 @@ class LogicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_LOGICAL kwargs['replication_type'] = REPLICATION_LOGICAL
super(LogicalReplicationConnection, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
class PhysicalReplicationConnection(_replicationConnection): class PhysicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_PHYSICAL kwargs['replication_type'] = REPLICATION_PHYSICAL
super(PhysicalReplicationConnection, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
class StopReplication(Exception): class StopReplication(Exception):
@ -544,7 +529,7 @@ class ReplicationCursor(_replicationCursor):
def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None): def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None):
"""Create streaming replication slot.""" """Create streaming replication slot."""
command = "CREATE_REPLICATION_SLOT %s " % quote_ident(slot_name, self) command = f"CREATE_REPLICATION_SLOT {quote_ident(slot_name, self)} "
if slot_type is None: if slot_type is None:
slot_type = self.connection.replication_type slot_type = self.connection.replication_type
@ -555,7 +540,7 @@ class ReplicationCursor(_replicationCursor):
"output plugin name is required to create " "output plugin name is required to create "
"logical replication slot") "logical replication slot")
command += "LOGICAL %s" % quote_ident(output_plugin, self) command += f"LOGICAL {quote_ident(output_plugin, self)}"
elif slot_type == REPLICATION_PHYSICAL: elif slot_type == REPLICATION_PHYSICAL:
if output_plugin is not None: if output_plugin is not None:
@ -567,14 +552,14 @@ class ReplicationCursor(_replicationCursor):
else: else:
raise psycopg2.ProgrammingError( raise psycopg2.ProgrammingError(
"unrecognized replication type: %s" % repr(slot_type)) f"unrecognized replication type: {repr(slot_type)}")
self.execute(command) self.execute(command)
def drop_replication_slot(self, slot_name): def drop_replication_slot(self, slot_name):
"""Drop streaming replication slot.""" """Drop streaming replication slot."""
command = "DROP_REPLICATION_SLOT %s" % quote_ident(slot_name, self) command = f"DROP_REPLICATION_SLOT {quote_ident(slot_name, self)}"
self.execute(command) self.execute(command)
def start_replication( def start_replication(
@ -589,7 +574,7 @@ class ReplicationCursor(_replicationCursor):
if slot_type == REPLICATION_LOGICAL: if slot_type == REPLICATION_LOGICAL:
if slot_name: if slot_name:
command += "SLOT %s " % quote_ident(slot_name, self) command += f"SLOT {quote_ident(slot_name, self)} "
else: else:
raise psycopg2.ProgrammingError( raise psycopg2.ProgrammingError(
"slot name is required for logical replication") "slot name is required for logical replication")
@ -598,19 +583,18 @@ class ReplicationCursor(_replicationCursor):
elif slot_type == REPLICATION_PHYSICAL: elif slot_type == REPLICATION_PHYSICAL:
if slot_name: if slot_name:
command += "SLOT %s " % quote_ident(slot_name, self) command += f"SLOT {quote_ident(slot_name, self)} "
# don't add "PHYSICAL", before 9.4 it was just START_REPLICATION XXX/XXX # don't add "PHYSICAL", before 9.4 it was just START_REPLICATION XXX/XXX
else: else:
raise psycopg2.ProgrammingError( raise psycopg2.ProgrammingError(
"unrecognized replication type: %s" % repr(slot_type)) f"unrecognized replication type: {repr(slot_type)}")
if type(start_lsn) is str: if type(start_lsn) is str:
lsn = start_lsn.split('/') lsn = start_lsn.split('/')
lsn = "%X/%08X" % (int(lsn[0], 16), int(lsn[1], 16)) lsn = f"{int(lsn[0], 16):X}/{int(lsn[1], 16):08X}"
else: else:
lsn = "%X/%08X" % ((start_lsn >> 32) & 0xFFFFFFFF, lsn = f"{start_lsn >> 32 & 4294967295:X}/{start_lsn & 4294967295:08X}"
start_lsn & 0xFFFFFFFF)
command += lsn command += lsn
@ -619,7 +603,7 @@ class ReplicationCursor(_replicationCursor):
raise psycopg2.ProgrammingError( raise psycopg2.ProgrammingError(
"cannot specify timeline for logical replication") "cannot specify timeline for logical replication")
command += " TIMELINE %d" % timeline command += f" TIMELINE {timeline}"
if options: if options:
if slot_type == REPLICATION_PHYSICAL: if slot_type == REPLICATION_PHYSICAL:
@ -630,7 +614,7 @@ class ReplicationCursor(_replicationCursor):
for k, v in options.items(): for k, v in options.items():
if not command.endswith('('): if not command.endswith('('):
command += ", " command += ", "
command += "%s %s" % (quote_ident(k, self), _A(str(v))) command += f"{quote_ident(k, self)} {_A(str(v))}"
command += ")" command += ")"
self.start_replication_expert( self.start_replication_expert(
@ -643,7 +627,7 @@ class ReplicationCursor(_replicationCursor):
# a dbtype and adapter for Python UUID type # a dbtype and adapter for Python UUID type
class UUID_adapter(object): class UUID_adapter:
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__. """Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
.. __: https://docs.python.org/library/uuid.html .. __: https://docs.python.org/library/uuid.html
@ -658,10 +642,10 @@ class UUID_adapter(object):
return self return self
def getquoted(self): def getquoted(self):
return ("'%s'::uuid" % self._uuid).encode('utf8') return (f"'{self._uuid}'::uuid").encode('utf8')
def __str__(self): def __str__(self):
return "'%s'::uuid" % self._uuid return f"'{self._uuid}'::uuid"
def register_uuid(oids=None, conn_or_curs=None): def register_uuid(oids=None, conn_or_curs=None):
@ -698,7 +682,7 @@ def register_uuid(oids=None, conn_or_curs=None):
# a type, dbtype and adapter for PostgreSQL inet type # a type, dbtype and adapter for PostgreSQL inet type
class Inet(object): class Inet:
"""Wrap a string to allow for correct SQL-quoting of inet values. """Wrap a string to allow for correct SQL-quoting of inet values.
Note that this adapter does NOT check the passed value to make Note that this adapter does NOT check the passed value to make
@ -710,7 +694,7 @@ class Inet(object):
self.addr = addr self.addr = addr
def __repr__(self): def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.addr) return f"{self.__class__.__name__}({self.addr!r})"
def prepare(self, conn): def prepare(self, conn):
self._conn = conn self._conn = conn
@ -783,7 +767,7 @@ def wait_select(conn):
elif state == POLL_WRITE: elif state == POLL_WRITE:
select.select([], [conn.fileno()], []) select.select([], [conn.fileno()], [])
else: else:
raise conn.OperationalError("bad state from poll: %s" % state) raise conn.OperationalError(f"bad state from poll: {state}")
except KeyboardInterrupt: except KeyboardInterrupt:
conn.cancel() conn.cancel()
# the loop will be broken by a server error # the loop will be broken by a server error
@ -805,7 +789,7 @@ def _solve_conn_curs(conn_or_curs):
return conn, curs return conn, curs
class HstoreAdapter(object): class HstoreAdapter:
"""Adapt a Python dict to the hstore syntax.""" """Adapt a Python dict to the hstore syntax."""
def __init__(self, wrapped): def __init__(self, wrapped):
self.wrapped = wrapped self.wrapped = wrapped
@ -885,7 +869,7 @@ class HstoreAdapter(object):
for m in self._re_hstore.finditer(s): for m in self._re_hstore.finditer(s):
if m is None or m.start() != start: if m is None or m.start() != start:
raise psycopg2.InterfaceError( raise psycopg2.InterfaceError(
"error parsing hstore pair at char %d" % start) f"error parsing hstore pair at char {start}")
k = _bsdec.sub(r'\1', m.group(1)) k = _bsdec.sub(r'\1', m.group(1))
v = m.group(2) v = m.group(2)
if v is not None: if v is not None:
@ -896,7 +880,7 @@ class HstoreAdapter(object):
if start < len(s): if start < len(s):
raise psycopg2.InterfaceError( raise psycopg2.InterfaceError(
"error parsing hstore: unparsed data after char %d" % start) f"error parsing hstore: unparsed data after char {start}")
return rv return rv
@ -924,12 +908,11 @@ class HstoreAdapter(object):
rv0, rv1 = [], [] rv0, rv1 = [], []
# get the oid for the hstore # get the oid for the hstore
curs.execute("""\ curs.execute(f"""SELECT t.oid, {typarray}
SELECT t.oid, %s
FROM pg_type t JOIN pg_namespace ns FROM pg_type t JOIN pg_namespace ns
ON typnamespace = ns.oid ON typnamespace = ns.oid
WHERE typname = 'hstore'; WHERE typname = 'hstore';
""" % typarray) """)
for oids in curs: for oids in curs:
rv0.append(oids[0]) rv0.append(oids[0])
rv1.append(oids[1]) rv1.append(oids[1])
@ -993,12 +976,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
array_oid = tuple([x for x in array_oid if x]) array_oid = tuple([x for x in array_oid if x])
# create and register the typecaster # create and register the typecaster
if PY2 and unicode: HSTORE = _ext.new_type(oid, "HSTORE", HstoreAdapter.parse)
cast = HstoreAdapter.parse_unicode
else:
cast = HstoreAdapter.parse
HSTORE = _ext.new_type(oid, "HSTORE", cast)
_ext.register_type(HSTORE, not globally and conn_or_curs or None) _ext.register_type(HSTORE, not globally and conn_or_curs or None)
_ext.register_adapter(dict, HstoreAdapter) _ext.register_adapter(dict, HstoreAdapter)
@ -1007,7 +985,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None) _ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
class CompositeCaster(object): class CompositeCaster:
"""Helps conversion of a PostgreSQL composite type into a Python object. """Helps conversion of a PostgreSQL composite type into a Python object.
The class is usually created by the `register_composite()` function. The class is usually created by the `register_composite()` function.
@ -1028,7 +1006,7 @@ class CompositeCaster(object):
self.typecaster = _ext.new_type((oid,), name, self.parse) self.typecaster = _ext.new_type((oid,), name, self.parse)
if array_oid: if array_oid:
self.array_typecaster = _ext.new_array_type( self.array_typecaster = _ext.new_array_type(
(array_oid,), "%sARRAY" % name, self.typecaster) (array_oid,), f"{name}ARRAY", self.typecaster)
else: else:
self.array_typecaster = None self.array_typecaster = None
@ -1072,7 +1050,7 @@ class CompositeCaster(object):
rv = [] rv = []
for m in self._re_tokenize.finditer(s): for m in self._re_tokenize.finditer(s):
if m is None: if m is None:
raise psycopg2.InterfaceError("can't parse type: %r" % s) raise psycopg2.InterfaceError(f"can't parse type: {s!r}")
if m.group(1) is not None: if m.group(1) is not None:
rv.append(None) rv.append(None)
elif m.group(2) is not None: elif m.group(2) is not None:
@ -1127,7 +1105,7 @@ ORDER BY attnum;
if not recs: if not recs:
raise psycopg2.ProgrammingError( raise psycopg2.ProgrammingError(
"PostgreSQL type '%s' not found" % name) f"PostgreSQL type '{name}' not found")
type_oid = recs[0][0] type_oid = recs[0][0]
array_oid = recs[0][1] array_oid = recs[0][1]

View File

@ -33,7 +33,7 @@ class PoolError(psycopg2.Error):
pass pass
class AbstractConnectionPool(object): class AbstractConnectionPool:
"""Generic key-based pooling code.""" """Generic key-based pooling code."""
def __init__(self, minconn, maxconn, *args, **kwargs): def __init__(self, minconn, maxconn, *args, **kwargs):

View File

@ -27,13 +27,12 @@
import string import string
from psycopg2 import extensions as ext from psycopg2 import extensions as ext
from psycopg2.compat import PY3, string_types
_formatter = string.Formatter() _formatter = string.Formatter()
class Composable(object): class Composable:
""" """
Abstract base class for objects that can be used to compose an SQL string. Abstract base class for objects that can be used to compose an SQL string.
@ -51,7 +50,7 @@ class Composable(object):
self._wrapped = wrapped self._wrapped = wrapped
def __repr__(self): def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self._wrapped) return f"{self.__class__.__name__}({self._wrapped!r})"
def as_string(self, context): def as_string(self, context):
""" """
@ -107,10 +106,10 @@ class Composed(Composable):
for i in seq: for i in seq:
if not isinstance(i, Composable): if not isinstance(i, Composable):
raise TypeError( raise TypeError(
"Composed elements must be Composable, got %r instead" % i) f"Composed elements must be Composable, got {i!r} instead")
wrapped.append(i) wrapped.append(i)
super(Composed, self).__init__(wrapped) super().__init__(wrapped)
@property @property
def seq(self): def seq(self):
@ -148,7 +147,7 @@ class Composed(Composable):
"foo", "bar" "foo", "bar"
""" """
if isinstance(joiner, string_types): if isinstance(joiner, str):
joiner = SQL(joiner) joiner = SQL(joiner)
elif not isinstance(joiner, SQL): elif not isinstance(joiner, SQL):
raise TypeError( raise TypeError(
@ -180,9 +179,9 @@ class SQL(Composable):
select "foo", "bar" from "table" select "foo", "bar" from "table"
""" """
def __init__(self, string): def __init__(self, string):
if not isinstance(string, string_types): if not isinstance(string, str):
raise TypeError("SQL values must be strings") raise TypeError("SQL values must be strings")
super(SQL, self).__init__(string) super().__init__(string)
@property @property
def string(self): def string(self):
@ -324,10 +323,10 @@ class Identifier(Composable):
raise TypeError("Identifier cannot be empty") raise TypeError("Identifier cannot be empty")
for s in strings: for s in strings:
if not isinstance(s, string_types): if not isinstance(s, str):
raise TypeError("SQL identifier parts must be strings") raise TypeError("SQL identifier parts must be strings")
super(Identifier, self).__init__(strings) super().__init__(strings)
@property @property
def strings(self): def strings(self):
@ -345,9 +344,7 @@ class Identifier(Composable):
"the Identifier wraps more than one than one string") "the Identifier wraps more than one than one string")
def __repr__(self): def __repr__(self):
return "%s(%s)" % ( return f"{self.__class__.__name__}({', '.join(map(repr, self._wrapped))})"
self.__class__.__name__,
', '.join(map(repr, self._wrapped)))
def as_string(self, context): def as_string(self, context):
return '.'.join(ext.quote_ident(s, context) for s in self._wrapped) return '.'.join(ext.quote_ident(s, context) for s in self._wrapped)
@ -392,7 +389,7 @@ class Literal(Composable):
a.prepare(conn) a.prepare(conn)
rv = a.getquoted() rv = a.getquoted()
if PY3 and isinstance(rv, bytes): if isinstance(rv, bytes):
rv = rv.decode(ext.encodings[conn.encoding]) rv = rv.decode(ext.encodings[conn.encoding])
return rv return rv
@ -426,14 +423,14 @@ class Placeholder(Composable):
""" """
def __init__(self, name=None): def __init__(self, name=None):
if isinstance(name, string_types): if isinstance(name, str):
if ')' in name: if ')' in name:
raise ValueError("invalid name: %r" % name) raise ValueError(f"invalid name: {name!r}")
elif name is not None: elif name is not None:
raise TypeError("expected string or None as name, got %r" % name) raise TypeError(f"expected string or None as name, got {name!r}")
super(Placeholder, self).__init__(name) super().__init__(name)
@property @property
def name(self): def name(self):
@ -441,12 +438,11 @@ class Placeholder(Composable):
return self._wrapped return self._wrapped
def __repr__(self): def __repr__(self):
return "Placeholder(%r)" % ( return f"Placeholder({self._wrapped if self._wrapped is not None else ''!r})"
self._wrapped if self._wrapped is not None else '',)
def as_string(self, context): def as_string(self, context):
if self._wrapped is not None: if self._wrapped is not None:
return "%%(%s)s" % self._wrapped return f"%({self._wrapped})"
else: else:
return "%s" return "%s"

View File

@ -65,7 +65,7 @@ class FixedOffsetTimezone(datetime.tzinfo):
try: try:
return cls._cache[key] return cls._cache[key]
except KeyError: except KeyError:
tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name) tz = super().__new__(cls, offset, name)
cls._cache[key] = tz cls._cache[key] = tz
return tz return tz

View File

@ -45,14 +45,12 @@ asis_getquoted(asisObject *self, PyObject *args)
} }
else { else {
rv = PyObject_Str(self->wrapped); rv = PyObject_Str(self->wrapped);
#if PY_3 /* unicode to bytes */
/* unicode to bytes in Py3 */
if (rv) { if (rv) {
PyObject *tmp = PyUnicode_AsUTF8String(rv); PyObject *tmp = PyUnicode_AsUTF8String(rv);
Py_DECREF(rv); Py_DECREF(rv);
rv = tmp; rv = tmp;
} }
#endif
} }
return rv; return rv;

View File

@ -76,15 +76,6 @@ binary_quote(binaryObject *self)
buffer_len = view.len; buffer_len = view.len;
} }
#if PY_2
if (!buffer && (Bytes_Check(self->wrapped) || PyBuffer_Check(self->wrapped))) {
if (PyObject_AsReadBuffer(self->wrapped, (const void **)&buffer,
&buffer_len) < 0) {
goto exit;
}
}
#endif
if (!buffer) { if (!buffer) {
goto exit; goto exit;
} }

View File

@ -81,8 +81,7 @@ pdecimal_getquoted(pdecimalObject *self, PyObject *args)
/* res may be unicode and may suffer for issue #57 */ /* res may be unicode and may suffer for issue #57 */
output: output:
#if PY_3 /* unicode to bytes */
/* unicode to bytes in Py3 */
{ {
PyObject *tmp = PyUnicode_AsUTF8String(res); PyObject *tmp = PyUnicode_AsUTF8String(res);
Py_DECREF(res); Py_DECREF(res);
@ -90,7 +89,6 @@ output:
goto end; goto end;
} }
} }
#endif
if ('-' == Bytes_AS_STRING(res)[0]) { if ('-' == Bytes_AS_STRING(res)[0]) {
/* Prepend a space in front of negative numbers (ticket #57) */ /* Prepend a space in front of negative numbers (ticket #57) */

View File

@ -54,8 +54,7 @@ pfloat_getquoted(pfloatObject *self, PyObject *args)
goto exit; goto exit;
} }
#if PY_3 /* unicode to bytes */
/* unicode to bytes in Py3 */
{ {
PyObject *tmp = PyUnicode_AsUTF8String(rv); PyObject *tmp = PyUnicode_AsUTF8String(rv);
Py_DECREF(rv); Py_DECREF(rv);
@ -63,7 +62,6 @@ pfloat_getquoted(pfloatObject *self, PyObject *args)
goto exit; goto exit;
} }
} }
#endif
if ('-' == Bytes_AS_STRING(rv)[0]) { if ('-' == Bytes_AS_STRING(rv)[0]) {
/* Prepend a space in front of negative numbers (ticket #57) */ /* Prepend a space in front of negative numbers (ticket #57) */

View File

@ -40,11 +40,7 @@ pint_getquoted(pintObject *self, PyObject *args)
/* Convert subclass to int to handle IntEnum and other subclasses /* Convert subclass to int to handle IntEnum and other subclasses
* whose str() is not the number. */ * whose str() is not the number. */
if (PyLong_CheckExact(self->wrapped) if (PyLong_CheckExact(self->wrapped)) {
#if PY_2
|| PyInt_CheckExact(self->wrapped)
#endif
) {
res = PyObject_Str(self->wrapped); res = PyObject_Str(self->wrapped);
} else { } else {
PyObject *tmp; PyObject *tmp;
@ -60,8 +56,7 @@ pint_getquoted(pintObject *self, PyObject *args)
goto exit; goto exit;
} }
#if PY_3 /* unicode to bytes */
/* unicode to bytes in Py3 */
{ {
PyObject *tmp = PyUnicode_AsUTF8String(res); PyObject *tmp = PyUnicode_AsUTF8String(res);
Py_DECREF(res); Py_DECREF(res);
@ -69,7 +64,6 @@ pint_getquoted(pintObject *self, PyObject *args)
goto exit; goto exit;
} }
} }
#endif
if ('-' == Bytes_AS_STRING(res)[0]) { if ('-' == Bytes_AS_STRING(res)[0]) {
/* Prepend a space in front of negative numbers (ticket #57) */ /* Prepend a space in front of negative numbers (ticket #57) */

View File

@ -85,11 +85,7 @@ _lobject_parse_mode(const char *mode)
pos += 1; pos += 1;
break; break;
default: default:
#if PY_2
rv |= LOBJECT_BINARY;
#else
rv |= LOBJECT_TEXT; rv |= LOBJECT_TEXT;
#endif
break; break;
} }

View File

@ -92,11 +92,7 @@ _get_superclass_adapter(PyObject *obj, PyObject *proto)
Py_ssize_t i, ii; Py_ssize_t i, ii;
type = Py_TYPE(obj); type = Py_TYPE(obj);
if (!( if (!(type->tp_mro)) {
#if PY_2
(Py_TPFLAGS_HAVE_CLASS & type->tp_flags) &&
#endif
type->tp_mro)) {
/* has no mro */ /* has no mro */
return Py_None; return Py_None;
} }

View File

@ -309,11 +309,6 @@ adapters_init(PyObject *module)
if (0 > microprotocols_add(&PyFloat_Type, NULL, (PyObject*)&pfloatType)) { if (0 > microprotocols_add(&PyFloat_Type, NULL, (PyObject*)&pfloatType)) {
goto exit; goto exit;
} }
#if PY_2
if (0 > microprotocols_add(&PyInt_Type, NULL, (PyObject*)&pintType)) {
goto exit;
}
#endif
if (0 > microprotocols_add(&PyLong_Type, NULL, (PyObject*)&pintType)) { if (0 > microprotocols_add(&PyLong_Type, NULL, (PyObject*)&pintType)) {
goto exit; goto exit;
} }
@ -322,25 +317,14 @@ adapters_init(PyObject *module)
} }
/* strings */ /* strings */
#if PY_2
if (0 > microprotocols_add(&PyString_Type, NULL, (PyObject*)&qstringType)) {
goto exit;
}
#endif
if (0 > microprotocols_add(&PyUnicode_Type, NULL, (PyObject*)&qstringType)) { if (0 > microprotocols_add(&PyUnicode_Type, NULL, (PyObject*)&qstringType)) {
goto exit; goto exit;
} }
/* binary */ /* binary */
#if PY_2
if (0 > microprotocols_add(&PyBuffer_Type, NULL, (PyObject*)&binaryType)) {
goto exit;
}
#else
if (0 > microprotocols_add(&PyBytes_Type, NULL, (PyObject*)&binaryType)) { if (0 > microprotocols_add(&PyBytes_Type, NULL, (PyObject*)&binaryType)) {
goto exit; goto exit;
} }
#endif
if (0 > microprotocols_add(&PyByteArray_Type, NULL, (PyObject*)&binaryType)) { if (0 > microprotocols_add(&PyByteArray_Type, NULL, (PyObject*)&binaryType)) {
goto exit; goto exit;
@ -1052,7 +1036,6 @@ static PyMethodDef psycopgMethods[] = {
{NULL, NULL, 0, NULL} /* Sentinel */ {NULL, NULL, 0, NULL} /* Sentinel */
}; };
#if PY_3
static struct PyModuleDef psycopgmodule = { static struct PyModuleDef psycopgmodule = {
PyModuleDef_HEAD_INIT, PyModuleDef_HEAD_INIT,
"_psycopg", "_psycopg",
@ -1064,7 +1047,6 @@ static struct PyModuleDef psycopgmodule = {
NULL, NULL,
NULL NULL
}; };
#endif
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ #ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
#define PyMODINIT_FUNC void #define PyMODINIT_FUNC void
@ -1098,11 +1080,7 @@ INIT_MODULE(_psycopg)(void)
if (!(psyco_null = Bytes_FromString("NULL"))) { goto exit; } if (!(psyco_null = Bytes_FromString("NULL"))) { goto exit; }
/* initialize the module */ /* initialize the module */
#if PY_2
module = Py_InitModule("_psycopg", psycopgMethods);
#else
module = PyModule_Create(&psycopgmodule); module = PyModule_Create(&psycopgmodule);
#endif
if (!module) { goto exit; } if (!module) { goto exit; }
if (0 > add_module_constants(module)) { goto exit; } if (0 > add_module_constants(module)) { goto exit; }
@ -1118,9 +1096,5 @@ INIT_MODULE(_psycopg)(void)
Dprintf("psycopgmodule: module initialization complete"); Dprintf("psycopgmodule: module initialization complete");
exit: exit:
#if PY_3
return module; return module;
#else
return;
#endif
} }

View File

@ -27,30 +27,11 @@
#ifndef PSYCOPG_PYTHON_H #ifndef PSYCOPG_PYTHON_H
#define PSYCOPG_PYTHON_H 1 #define PSYCOPG_PYTHON_H 1
#define PY_2 (PY_MAJOR_VERSION == 2) #if PY_VERSION_HEX < 0x03060000
#define PY_3 (PY_MAJOR_VERSION == 3)
#if PY_2 && PY_VERSION_HEX < 0x02070000
#error "psycopg requires Python 2.7"
#endif
#if PY_3 && PY_VERSION_HEX < 0x03060000
#error "psycopg requires Python 3.6" #error "psycopg requires Python 3.6"
#endif #endif
#include <structmember.h> #include <structmember.h>
#if PY_2
#include <stringobject.h>
#endif
/* hash() return size changed around version 3.2a4 on 64bit platforms. Before
* this, the return size was always a long, regardless of arch. ~3.2
* introduced the Py_hash_t & Py_uhash_t typedefs with the resulting sizes
* based upon arch. */
#if PY_VERSION_HEX < 0x030200A4
typedef long Py_hash_t;
typedef unsigned long Py_uhash_t;
#endif
/* Since Py_TYPE() is changed to the inline static function, /* Since Py_TYPE() is changed to the inline static function,
* Py_TYPE(obj) = new_type must be replaced with Py_SET_TYPE(obj, new_type) * Py_TYPE(obj) = new_type must be replaced with Py_SET_TYPE(obj, new_type)
@ -72,43 +53,6 @@ typedef unsigned long Py_uhash_t;
#define FORMAT_CODE_SIZE_T "%zu" #define FORMAT_CODE_SIZE_T "%zu"
#endif #endif
#if PY_2
#define Text_Type PyString_Type
#define Text_Check(s) PyString_Check(s)
#define Text_Format(f,a) PyString_Format(f,a)
#define Text_FromUTF8(s) PyString_FromString(s)
#define Text_FromUTF8AndSize(s,n) PyString_FromStringAndSize(s,n)
#define Bytes_Type PyString_Type
#define Bytes_Check PyString_Check
#define Bytes_CheckExact PyString_CheckExact
#define Bytes_AS_STRING PyString_AS_STRING
#define Bytes_GET_SIZE PyString_GET_SIZE
#define Bytes_Size PyString_Size
#define Bytes_AsString PyString_AsString
#define Bytes_AsStringAndSize PyString_AsStringAndSize
#define Bytes_FromString PyString_FromString
#define Bytes_FromStringAndSize PyString_FromStringAndSize
#define Bytes_FromFormat PyString_FromFormat
#define Bytes_ConcatAndDel PyString_ConcatAndDel
#define _Bytes_Resize _PyString_Resize
#define PyDateTime_DELTA_GET_DAYS(o) (((PyDateTime_Delta*)o)->days)
#define PyDateTime_DELTA_GET_SECONDS(o) (((PyDateTime_Delta*)o)->seconds)
#define PyDateTime_DELTA_GET_MICROSECONDS(o) (((PyDateTime_Delta*)o)->microseconds)
#define INIT_MODULE(m) init ## m
/* fix #961, but don't change all types to longs. Sure someone will complain. */
#define PyLong_FromOid(x) (((x) & 0x80000000) ? \
PyLong_FromUnsignedLong((unsigned long)(x)) : \
PyInt_FromLong((x)))
#endif /* PY_2 */
#if PY_3
#define Text_Type PyUnicode_Type #define Text_Type PyUnicode_Type
#define Text_Check(s) PyUnicode_Check(s) #define Text_Check(s) PyUnicode_Check(s)
#define Text_Format(f,a) PyUnicode_Format(f,a) #define Text_Format(f,a) PyUnicode_Format(f,a)
@ -149,8 +93,6 @@ typedef unsigned long Py_uhash_t;
#define PyLong_FromOid(x) (PyLong_FromUnsignedLong((unsigned long)(x))) #define PyLong_FromOid(x) (PyLong_FromUnsignedLong((unsigned long)(x)))
#endif /* PY_3 */
/* expose Oid attributes in Python C objects */ /* expose Oid attributes in Python C objects */
#define T_OID T_UINT #define T_OID T_UINT

View File

@ -475,11 +475,7 @@ PyTypeObject typecastType = {
0, /*tp_print*/ 0, /*tp_print*/
0, /*tp_getattr*/ 0, /*tp_getattr*/
0, /*tp_setattr*/ 0, /*tp_setattr*/
#if PY_VERSION_HEX < 0x03000000
typecast_cmp, /*tp_compare*/
#else
0, /*tp_reserved*/ 0, /*tp_reserved*/
#endif
typecast_repr, /*tp_repr*/ typecast_repr, /*tp_repr*/
0, /*tp_as_number*/ 0, /*tp_as_number*/
0, /*tp_as_sequence*/ 0, /*tp_as_sequence*/
@ -651,11 +647,7 @@ typecast_cast(PyObject *obj, const char *str, Py_ssize_t len, PyObject *curs)
* Notice that this way it is about impossible to create a python * Notice that this way it is about impossible to create a python
* typecaster on a binary type. */ * typecaster on a binary type. */
if (str) { if (str) {
#if PY_2
s = PyString_FromStringAndSize(str, len);
#else
s = conn_decode(((cursorObject *)curs)->conn, str, len); s = conn_decode(((cursorObject *)curs)->conn, str, len);
#endif
} }
else { else {
Py_INCREF(Py_None); Py_INCREF(Py_None);

View File

@ -26,22 +26,7 @@
/** INTEGER - cast normal integers (4 bytes) to python int **/ /** INTEGER - cast normal integers (4 bytes) to python int **/
#if PY_2
static PyObject *
typecast_INTEGER_cast(const char *s, Py_ssize_t len, PyObject *curs)
{
char buffer[12];
if (s == NULL) { Py_RETURN_NONE; }
if (s[len] != '\0') {
strncpy(buffer, s, (size_t) len); buffer[len] = '\0';
s = buffer;
}
return PyInt_FromString((char *)s, NULL, 0);
}
#else
#define typecast_INTEGER_cast typecast_LONGINTEGER_cast #define typecast_INTEGER_cast typecast_LONGINTEGER_cast
#endif
/** LONGINTEGER - cast long integers (8 bytes) to python long **/ /** LONGINTEGER - cast long integers (8 bytes) to python long **/
@ -67,11 +52,7 @@ typecast_FLOAT_cast(const char *s, Py_ssize_t len, PyObject *curs)
if (s == NULL) { Py_RETURN_NONE; } if (s == NULL) { Py_RETURN_NONE; }
if (!(str = Text_FromUTF8AndSize(s, len))) { return NULL; } if (!(str = Text_FromUTF8AndSize(s, len))) { return NULL; }
#if PY_2
flo = PyFloat_FromString(str, NULL);
#else
flo = PyFloat_FromString(str); flo = PyFloat_FromString(str);
#endif
Py_DECREF(str); Py_DECREF(str);
return flo; return flo;
} }
@ -103,11 +84,7 @@ typecast_UNICODE_cast(const char *s, Py_ssize_t len, PyObject *curs)
/** STRING - cast strings of any type to python string **/ /** STRING - cast strings of any type to python string **/
#if PY_2
#define typecast_STRING_cast typecast_BYTES_cast
#else
#define typecast_STRING_cast typecast_UNICODE_cast #define typecast_STRING_cast typecast_UNICODE_cast
#endif
/** BOOLEAN - cast boolean value into right python object **/ /** BOOLEAN - cast boolean value into right python object **/

View File

@ -54,39 +54,6 @@ chunk_repr(chunkObject *self)
); );
} }
#if PY_2
static Py_ssize_t
chunk_getreadbuffer(chunkObject *self, Py_ssize_t segment, void **ptr)
{
if (segment != 0)
{
PyErr_SetString(PyExc_SystemError,
"accessing non-existant buffer segment");
return -1;
}
*ptr = self->base;
return self->len;
}
static Py_ssize_t
chunk_getsegcount(chunkObject *self, Py_ssize_t *lenp)
{
if (lenp != NULL)
*lenp = self->len;
return 1;
}
static PyBufferProcs chunk_as_buffer =
{
(readbufferproc) chunk_getreadbuffer,
(writebufferproc) NULL,
(segcountproc) chunk_getsegcount,
(charbufferproc) NULL
};
#else
/* 3.0 buffer interface */ /* 3.0 buffer interface */
int chunk_getbuffer(PyObject *_self, Py_buffer *view, int flags) int chunk_getbuffer(PyObject *_self, Py_buffer *view, int flags)
{ {
@ -105,8 +72,6 @@ static PyBufferProcs chunk_as_buffer =
NULL, NULL,
}; };
#endif
#define chunk_doc "memory chunk" #define chunk_doc "memory chunk"
PyTypeObject chunkType = { PyTypeObject chunkType = {
@ -183,13 +148,8 @@ typecast_BINARY_cast(const char *s, Py_ssize_t l, PyObject *curs)
buffer = NULL; buffer = NULL;
chunk->len = (Py_ssize_t)len; chunk->len = (Py_ssize_t)len;
#if PY_2
if ((res = PyBuffer_FromObject((PyObject *)chunk, 0, chunk->len)) == NULL)
goto exit;
#else
if ((res = PyMemoryView_FromObject((PyObject*)chunk)) == NULL) if ((res = PyMemoryView_FromObject((PyObject*)chunk)) == NULL)
goto exit; goto exit;
#endif
exit: exit:
Py_XDECREF((PyObject *)chunk); Py_XDECREF((PyObject *)chunk);

View File

@ -190,7 +190,7 @@ psyco_ensure_bytes(PyObject *obj)
/* Take a Python object and return text from it. /* Take a Python object and return text from it.
* *
* On Py3 this means converting bytes to unicode. On Py2 bytes are fine. * This means converting bytes to unicode.
* *
* The function is ref neutral: steals a ref from obj and adds one to the * The function is ref neutral: steals a ref from obj and adds one to the
* return value. It is safe to call it on NULL. * return value. It is safe to call it on NULL.
@ -198,9 +198,6 @@ psyco_ensure_bytes(PyObject *obj)
STEALS(1) PyObject * STEALS(1) PyObject *
psyco_ensure_text(PyObject *obj) psyco_ensure_text(PyObject *obj)
{ {
#if PY_2
return obj;
#else
if (obj) { if (obj) {
/* bytes to unicode in Py3 */ /* bytes to unicode in Py3 */
PyObject *rv = PyUnicode_FromEncodedObject(obj, "utf8", "replace"); PyObject *rv = PyUnicode_FromEncodedObject(obj, "utf8", "replace");
@ -210,7 +207,6 @@ psyco_ensure_text(PyObject *obj)
else { else {
return NULL; return NULL;
} }
#endif
} }
/* Check if a file derives from TextIOBase. /* Check if a file derives from TextIOBase.
@ -309,24 +305,13 @@ exit:
/* Convert a C string into Python Text using a specified codec. /* Convert a C string into Python Text using a specified codec.
* *
* The codec is the python function codec.getdecoder(enc). It is only used on * The codec is the python function codec.getdecoder(enc).
* Python 3 to return unicode: in Py2 the function returns a string.
* *
* len is optional: use -1 to have it calculated by the function. * len is optional: use -1 to have it calculated by the function.
*/ */
PyObject * PyObject *
psyco_text_from_chars_safe(const char *str, Py_ssize_t len, PyObject *decoder) psyco_text_from_chars_safe(const char *str, Py_ssize_t len, PyObject *decoder)
{ {
#if PY_2
if (!str) { Py_RETURN_NONE; }
if (len < 0) { len = strlen(str); }
return PyString_FromStringAndSize(str, len);
#else
static PyObject *replace = NULL; static PyObject *replace = NULL;
PyObject *rv = NULL; PyObject *rv = NULL;
PyObject *b = NULL; PyObject *b = NULL;
@ -356,8 +341,6 @@ exit:
Py_XDECREF(t); Py_XDECREF(t);
Py_XDECREF(b); Py_XDECREF(b);
return rv; return rv;
#endif
} }

View File

@ -426,10 +426,7 @@ def check_libpq_version():
.decode('ascii') .decode('ascii')
.rstrip() .rstrip()
) )
assert want_ver == got_ver, "libpq version mismatch: %r != %r" % ( assert want_ver == got_ver, f"libpq version mismatch: {want_ver!r} != {got_ver!r}"
want_ver,
got_ver,
)
def run_test_suite(): def run_test_suite():
@ -671,7 +668,7 @@ def which(name):
if os.path.isfile(fn): if os.path.isfile(fn):
return fn return fn
raise Exception("couldn't find program on path: %s" % name) raise Exception(f"couldn't find program on path: {name}")
class Options: class Options:
@ -683,7 +680,7 @@ class Options:
def py_ver(self): def py_ver(self):
"""The Python version to build as 2 digits string.""" """The Python version to build as 2 digits string."""
rv = os.environ['PY_VER'] rv = os.environ['PY_VER']
assert rv in ('27', '36', '37', '38', '39'), rv assert rv in ('36', '37', '38', '39'), rv
return rv return rv
@property @property
@ -766,11 +763,9 @@ class Options:
def vs_ver(self): def vs_ver(self):
# https://wiki.python.org/moin/WindowsCompilers # https://wiki.python.org/moin/WindowsCompilers
# https://www.appveyor.com/docs/windows-images-software/#python # https://www.appveyor.com/docs/windows-images-software/#python
# Py 2.7 = VS Ver. 9.0 (VS 2008)
# Py 3.6--3.8 = VS Ver. 14.0 (VS 2015) # Py 3.6--3.8 = VS Ver. 14.0 (VS 2015)
# Py 3.9 = VS Ver. 16.0 (VS 2019) # Py 3.9 = VS Ver. 16.0 (VS 2019)
vsvers = { vsvers = {
'27': '9.0',
'36': '14.0', '36': '14.0',
'37': '14.0', '37': '14.0',
'38': '14.0', '38': '14.0',
@ -835,7 +830,7 @@ class Options:
def dist_dir(self): def dist_dir(self):
"""The directory where to build packages to distribute.""" """The directory where to build packages to distribute."""
return ( return (
self.package_dir / 'dist' / ('psycopg2-%s' % self.package_version) self.package_dir / 'dist' / (f'psycopg2-{self.package_version}')
) )

View File

@ -16,7 +16,6 @@ The script can be run at a new PostgreSQL release to refresh the module.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details. # License for more details.
from __future__ import print_function
import re import re
import sys import sys
@ -26,7 +25,7 @@ from collections import defaultdict
def main(): def main():
if len(sys.argv) != 2: if len(sys.argv) != 2:
print("usage: %s /path/to/errorcodes.py" % sys.argv[0], file=sys.stderr) print(f"usage: {sys.argv[0]} /path/to/errorcodes.py", file=sys.stderr)
return 2 return 2
filename = sys.argv[1] filename = sys.argv[1]
@ -85,7 +84,7 @@ def parse_errors_txt(url):
continue continue
# We don't expect anything else # We don't expect anything else
raise ValueError("unexpected line:\n%s" % line) raise ValueError(f"unexpected line:\n{line}")
return classes, errors return classes, errors
@ -102,9 +101,7 @@ def fetch_errors(versions):
for version in versions: for version in versions:
print(version, file=sys.stderr) print(version, file=sys.stderr)
tver = tuple(map(int, version.split()[0].split('.'))) tver = tuple(map(int, version.split()[0].split('.')))
tag = '%s%s_STABLE' % ( tag = f"{tver[0] >= 10 and 'REL_' or 'REL'}{version.replace('.', '_')}_STABLE"
(tver[0] >= 10 and 'REL_' or 'REL'),
version.replace('.', '_'))
c1, e1 = parse_errors_txt(errors_txt_url % tag) c1, e1 = parse_errors_txt(errors_txt_url % tag)
classes.update(c1) classes.update(c1)
@ -136,19 +133,19 @@ def generate_module_data(classes, errors):
for clscode, clslabel in sorted(classes.items()): for clscode, clslabel in sorted(classes.items()):
err = clslabel.split(" - ")[1].split("(")[0] \ err = clslabel.split(" - ")[1].split("(")[0] \
.strip().replace(" ", "_").replace('/', "_").upper() .strip().replace(" ", "_").replace('/', "_").upper()
yield "CLASS_%s = %r" % (err, clscode) yield f"CLASS_{err} = {clscode!r}"
seen = set() seen = set()
for clscode, clslabel in sorted(classes.items()): for clscode, clslabel in sorted(classes.items()):
yield "" yield ""
yield "# %s" % clslabel yield f"# {clslabel}"
for errcode, errlabel in sorted(errors[clscode].items()): for errcode, errlabel in sorted(errors[clscode].items()):
if errlabel in seen: if errlabel in seen:
raise Exception("error label already seen: %s" % errlabel) raise Exception(f"error label already seen: {errlabel}")
seen.add(errlabel) seen.add(errlabel)
yield "%s = %r" % (errlabel, errcode) yield f"{errlabel} = {errcode!r}"
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -16,7 +16,6 @@ The script can be run at a new PostgreSQL release to refresh the module.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details. # License for more details.
from __future__ import print_function
import os import os
import re import re
@ -69,7 +68,7 @@ def parse_errors_txt(url):
continue continue
# We don't expect anything else # We don't expect anything else
raise ValueError("unexpected line:\n%s" % line) raise ValueError(f"unexpected line:\n{line}")
return classes, errors return classes, errors
@ -86,9 +85,7 @@ def fetch_errors(versions):
for version in versions: for version in versions:
print(version, file=sys.stderr) print(version, file=sys.stderr)
tver = tuple(map(int, version.split()[0].split('.'))) tver = tuple(map(int, version.split()[0].split('.')))
tag = '%s%s_STABLE' % ( tag = f"{tver[0] >= 10 and 'REL_' or 'REL'}{version.replace('.', '_')}_STABLE"
(tver[0] >= 10 and 'REL_' or 'REL'),
version.replace('.', '_'))
c1, e1 = parse_errors_txt(errors_txt_url % tag) c1, e1 = parse_errors_txt(errors_txt_url % tag)
classes.update(c1) classes.update(c1)
@ -119,7 +116,7 @@ def generate_module_data(classes, errors):
# success and warning - never raised # success and warning - never raised
continue continue
yield "\n/* %s */" % clslabel yield f"\n/* {clslabel} */"
for errcode, errlabel in sorted(errors[clscode].items()): for errcode, errlabel in sorted(errors[clscode].items()):
if errcode in specific: if errcode in specific:
@ -127,7 +124,7 @@ def generate_module_data(classes, errors):
else: else:
clsname = errlabel.title().replace('_', '') clsname = errlabel.title().replace('_', '')
if clsname in seen: if clsname in seen:
raise Exception("class already existing: %s" % clsname) raise Exception(f"class already existing: {clsname}")
seen.add(clsname) seen.add(clsname)
yield tmpl % { yield tmpl % {

View File

@ -18,7 +18,6 @@ script exits with error 1.
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details. # License for more details.
from __future__ import print_function
import argparse import argparse
import gc import gc
@ -37,27 +36,27 @@ def main():
if opt.suite: if opt.suite:
test = getattr(test, opt.suite) test = getattr(test, opt.suite)
sys.stdout.write("test suite %s\n" % test.__name__) sys.stdout.write(f"test suite {test.__name__}\n")
for i in range(1, opt.nruns + 1): for i in range(1, opt.nruns + 1):
sys.stdout.write("test suite run %d of %d\n" % (i, opt.nruns)) sys.stdout.write(f"test suite run {i} of {opt.nruns}\n")
runner = unittest.TextTestRunner() runner = unittest.TextTestRunner()
runner.run(test.test_suite()) runner.run(test.test_suite())
dump(i, opt) dump(i, opt)
f1 = open('debug-%02d.txt' % (opt.nruns - 1)).readlines() f1 = open(f'debug-{(opt.nruns - 1):02}.txt').readlines()
f2 = open('debug-%02d.txt' % opt.nruns).readlines() f2 = open(f'debug-{opt.nruns:02}.txt').readlines()
for line in difflib.unified_diff(f1, f2, for line in difflib.unified_diff(f1, f2,
"run %d" % (opt.nruns - 1), "run %d" % opt.nruns): f"run {opt.nruns - 1}", f"run {opt.nruns}"):
sys.stdout.write(line) sys.stdout.write(line)
rv = f1 != f2 and 1 or 0 rv = f1 != f2 and 1 or 0
if opt.objs: if opt.objs:
f1 = open('objs-%02d.txt' % (opt.nruns - 1)).readlines() f1 = open(f'objs-{(opt.nruns - 1):02}.txt').readlines()
f2 = open('objs-%02d.txt' % opt.nruns).readlines() f2 = open(f'objs-{opt.nruns:02}.txt').readlines()
for line in difflib.unified_diff(f1, f2, for line in difflib.unified_diff(f1, f2,
"run %d" % (opt.nruns - 1), "run %d" % opt.nruns): f"run {opt.nruns - 1}", f"run {opt.nruns}"):
sys.stdout.write(line) sys.stdout.write(line)
return rv return rv
@ -86,7 +85,7 @@ def dump(i, opt):
pprint( pprint(
sorted(((v, str(k)) for k, v in c.items()), reverse=True), sorted(((v, str(k)) for k, v in c.items()), reverse=True),
stream=open("debug-%02d.txt" % i, "w")) stream=open(f"debug-{i:02}.txt", "w"))
if opt.objs: if opt.objs:
co = [] co = []
@ -101,7 +100,7 @@ def dump(i, opt):
else: else:
co.sort() co.sort()
pprint(co, stream=open("objs-%02d.txt" % i, "w")) pprint(co, stream=open(f"objs-{i:02}.txt", "w"))
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -26,8 +26,6 @@ UPDATEs. psycopg2 also provide full asynchronous operations and support
for coroutine libraries. for coroutine libraries.
""" """
# Note: The setup.py must be compatible with both Python 2 and 3
import os import os
import sys import sys
@ -38,7 +36,6 @@ from distutils.command.build_ext import build_ext
from distutils.sysconfig import get_python_inc from distutils.sysconfig import get_python_inc
from distutils.ccompiler import get_default_compiler from distutils.ccompiler import get_default_compiler
from distutils.errors import CompileError from distutils.errors import CompileError
from distutils.util import get_platform
try: try:
import configparser import configparser
@ -58,13 +55,12 @@ Development Status :: 5 - Production/Stable
Intended Audience :: Developers Intended Audience :: Developers
License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
Programming Language :: Python Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3 Programming Language :: Python :: 3
Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.9
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: CPython
Programming Language :: C Programming Language :: C
Programming Language :: SQL Programming Language :: SQL
@ -118,8 +114,7 @@ For further information please check the 'doc/src/install.rst' file (also at
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE) stderr=subprocess.PIPE)
except OSError: except OSError:
raise Warning("Unable to find 'pg_config' file in '%s'" % raise Warning(f"Unable to find 'pg_config' file in '{self.pg_config_exe}'")
self.pg_config_exe)
pg_config_process.stdin.close() pg_config_process.stdin.close()
result = pg_config_process.stdout.readline().strip() result = pg_config_process.stdout.readline().strip()
if not result: if not result:
@ -174,7 +169,7 @@ For further information please check the 'doc/src/install.rst' file (also at
try: try:
pg_inst_list_key = winreg.OpenKey(reg, pg_inst_list_key = winreg.OpenKey(reg,
'SOFTWARE\\PostgreSQL\\Installations') 'SOFTWARE\\PostgreSQL\\Installations')
except EnvironmentError: except OSError:
# No PostgreSQL installation, as best as we can tell. # No PostgreSQL installation, as best as we can tell.
return None return None
@ -182,7 +177,7 @@ For further information please check the 'doc/src/install.rst' file (also at
# Determine the name of the first subkey, if any: # Determine the name of the first subkey, if any:
try: try:
first_sub_key_name = winreg.EnumKey(pg_inst_list_key, 0) first_sub_key_name = winreg.EnumKey(pg_inst_list_key, 0)
except EnvironmentError: except OSError:
return None return None
pg_first_inst_key = winreg.OpenKey(reg, pg_first_inst_key = winreg.OpenKey(reg,
@ -201,12 +196,6 @@ For further information please check the 'doc/src/install.rst' file (also at
if not os.path.exists(pg_config_path): if not os.path.exists(pg_config_path):
return None return None
# Support unicode paths, if this version of Python provides the
# necessary infrastructure:
if sys.version_info[0] < 3:
pg_config_path = pg_config_path.encode(
sys.getfilesystemencoding())
return pg_config_path return pg_config_path
@ -307,30 +296,6 @@ For further information please check the 'doc/src/install.rst' file (also at
""") """)
raise raise
sysVer = sys.version_info[:2]
# For Python versions that use MSVC compiler 2008, re-insert the
# manifest into the resulting .pyd file.
if self.compiler_is_msvc() and sysVer == (2, 7):
platform = get_platform()
# Default to the x86 manifest
manifest = '_psycopg.vc9.x86.manifest'
if platform == 'win-amd64':
manifest = '_psycopg.vc9.amd64.manifest'
try:
ext_path = self.get_ext_fullpath(extension.name)
except AttributeError:
ext_path = os.path.join(self.build_lib,
'psycopg2', '_psycopg.pyd')
# Make sure spawn() will work if compile() was never
# called. https://github.com/psycopg/psycopg2/issues/380
if not self.compiler.initialized:
self.compiler.initialize()
self.compiler.spawn(
['mt.exe', '-nologo', '-manifest',
os.path.join('psycopg', manifest),
'-outputresource:%s;2' % ext_path])
def finalize_win32(self): def finalize_win32(self):
"""Finalize build system configuration on win32 platform.""" """Finalize build system configuration on win32 platform."""
@ -430,8 +395,7 @@ For further information please check the 'doc/src/install.rst' file (also at
pgpatch = int(pgpatch) pgpatch = int(pgpatch)
else: else:
sys.stderr.write( sys.stderr.write(
"Error: could not determine PostgreSQL version from '%s'" f"Error: could not determine PostgreSQL version from '{pgversion}'")
% pgversion)
sys.exit(1) sys.exit(1)
define_macros.append(("PG_VERSION_NUM", "%d%02d%02d" % define_macros.append(("PG_VERSION_NUM", "%d%02d%02d" %
@ -453,7 +417,7 @@ For further information please check the 'doc/src/install.rst' file (also at
except Warning: except Warning:
w = sys.exc_info()[1] # work around py 2/3 different syntax w = sys.exc_info()[1] # work around py 2/3 different syntax
sys.stderr.write("Error: %s\n" % w) sys.stderr.write(f"Error: {w}\n")
sys.exit(1) sys.exit(1)
if hasattr(self, "finalize_" + sys.platform): if hasattr(self, "finalize_" + sys.platform):
@ -546,7 +510,7 @@ version_flags.append('pq3') # no more a choice
version_flags.append('ext') # no more a choice version_flags.append('ext') # no more a choice
if version_flags: if version_flags:
PSYCOPG_VERSION_EX = PSYCOPG_VERSION + " (%s)" % ' '.join(version_flags) PSYCOPG_VERSION_EX = PSYCOPG_VERSION + f" ({' '.join(version_flags)})"
else: else:
PSYCOPG_VERSION_EX = PSYCOPG_VERSION PSYCOPG_VERSION_EX = PSYCOPG_VERSION
@ -598,7 +562,7 @@ setup(name="psycopg2",
url="https://psycopg.org/", url="https://psycopg.org/",
license="LGPL with exceptions", license="LGPL with exceptions",
platforms=["any"], platforms=["any"],
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*', python_requires='>=3.6',
description=readme.split("\n")[0], description=readme.split("\n")[0],
long_description="\n".join(readme.split("\n")[2:]).lstrip(), long_description="\n".join(readme.split("\n")[2:]).lstrip(),
classifiers=[x for x in classifiers.split("\n") if x], classifiers=[x for x in classifiers.split("\n") if x],

View File

@ -58,9 +58,6 @@ from . import test_types_basic
from . import test_types_extras from . import test_types_extras
from . import test_with from . import test_with
if sys.version_info[:2] < (3, 6):
from . import test_async_keyword
def test_suite(): def test_suite():
# If connection to test db fails, bail out early. # If connection to test db fails, bail out early.
@ -76,8 +73,6 @@ def test_suite():
suite = unittest.TestSuite() suite = unittest.TestSuite()
suite.addTest(test_async.test_suite()) suite.addTest(test_async.test_suite())
if sys.version_info[:2] < (3, 6):
suite.addTest(test_async_keyword.test_suite())
suite.addTest(test_bugX000.test_suite()) suite.addTest(test_bugX000.test_suite())
suite.addTest(test_bug_gc.test_suite()) suite.addTest(test_bug_gc.test_suite())
suite.addTest(test_cancel.test_suite()) suite.addTest(test_cancel.test_suite())

View File

@ -185,13 +185,8 @@ class DatabaseAPI20Test(unittest.TestCase):
def test_Exceptions(self): def test_Exceptions(self):
# Make sure required exceptions exist, and are in the # Make sure required exceptions exist, and are in the
# defined hierarchy. # defined hierarchy.
if sys.version[0] == '3': #under Python 3 StardardError no longer exists
self.failUnless(issubclass(self.driver.Warning,Exception)) self.failUnless(issubclass(self.driver.Warning,Exception))
self.failUnless(issubclass(self.driver.Error,Exception)) self.failUnless(issubclass(self.driver.Error,Exception))
else:
self.failUnless(issubclass(self.driver.Warning,StandardError))
self.failUnless(issubclass(self.driver.Error,StandardError))
self.failUnless( self.failUnless(
issubclass(self.driver.InterfaceError,self.driver.Error) issubclass(self.driver.InterfaceError,self.driver.Error)
) )
@ -547,7 +542,7 @@ class DatabaseAPI20Test(unittest.TestCase):
tests. tests.
''' '''
populate = [ populate = [
"insert into %sbooze values ('%s')" % (self.table_prefix,s) f"insert into {self.table_prefix}booze values ('{s}')"
for s in self.samples for s in self.samples
] ]
return populate return populate

View File

@ -19,7 +19,7 @@ class TwoPhaseCommitTests(unittest.TestCase):
def make_xid(self, con): def make_xid(self, con):
id = TwoPhaseCommitTests._last_id id = TwoPhaseCommitTests._last_id
TwoPhaseCommitTests._last_id += 1 TwoPhaseCommitTests._last_id += 1
return con.xid(42, "%s%d" % (self._global_id_prefix, id), "qualifier") return con.xid(42, f"{self._global_id_prefix}{id}", "qualifier")
def test_xid(self): def test_xid(self):
con = self.connect() con = self.connect()

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*-
# test_async.py - unit test for asynchronous API # test_async.py - unit test for asynchronous API
# #
@ -37,7 +36,7 @@ from .testutils import (ConnectingTestCase, StringIO, skip_before_postgres,
skip_if_crdb, crdb_version, slow) skip_if_crdb, crdb_version, slow)
class PollableStub(object): class PollableStub:
"""A 'pollable' wrapper allowing analysis of the `poll()` calls.""" """A 'pollable' wrapper allowing analysis of the `poll()` calls."""
def __init__(self, pollable): def __init__(self, pollable):
self.pollable = pollable self.pollable = pollable

View File

@ -1,225 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# test_async_keyword.py - test for objects using 'async' as attribute/param
#
# Copyright (C) 2017-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
# Copyright (C) 2020 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import time
from select import select
import psycopg2
from psycopg2 import extras
from .testconfig import dsn
import unittest
from .testutils import ConnectingTestCase, skip_before_postgres, slow
from .test_replication import ReplicationTestCase, skip_repl_if_green
from psycopg2.extras import LogicalReplicationConnection, StopReplication
class AsyncTests(ConnectingTestCase):
def setUp(self):
ConnectingTestCase.setUp(self)
self.sync_conn = self.conn
self.conn = self.connect(async=True)
self.wait(self.conn)
curs = self.conn.cursor()
curs.execute('''
CREATE TEMPORARY TABLE table1 (
id int PRIMARY KEY
)''')
self.wait(curs)
def test_connection_setup(self):
cur = self.conn.cursor()
sync_cur = self.sync_conn.cursor()
del cur, sync_cur
self.assert_(self.conn.async)
self.assert_(not self.sync_conn.async)
# the async connection should be autocommit
self.assert_(self.conn.autocommit)
# check other properties to be found on the connection
self.assert_(self.conn.server_version)
self.assert_(self.conn.protocol_version in (2, 3))
self.assert_(self.conn.encoding in psycopg2.extensions.encodings)
def test_async_subclass(self):
class MyConn(psycopg2.extensions.connection):
def __init__(self, dsn, async=0):
psycopg2.extensions.connection.__init__(self, dsn, async=async)
conn = self.connect(connection_factory=MyConn, async=True)
self.assert_(isinstance(conn, MyConn))
self.assert_(conn.async)
conn.close()
def test_async_connection_error_message(self):
try:
cnn = psycopg2.connect('dbname=thisdatabasedoesntexist', async=True)
self.wait(cnn)
except psycopg2.Error as e:
self.assertNotEqual(str(e), "asynchronous connection failed",
"connection error reason lost")
else:
self.fail("no exception raised")
class CancelTests(ConnectingTestCase):
def setUp(self):
ConnectingTestCase.setUp(self)
cur = self.conn.cursor()
cur.execute('''
CREATE TEMPORARY TABLE table1 (
id int PRIMARY KEY
)''')
self.conn.commit()
@slow
@skip_before_postgres(8, 2)
def test_async_cancel(self):
async_conn = psycopg2.connect(dsn, async=True)
self.assertRaises(psycopg2.OperationalError, async_conn.cancel)
extras.wait_select(async_conn)
cur = async_conn.cursor()
cur.execute("select pg_sleep(10)")
time.sleep(1)
self.assertTrue(async_conn.isexecuting())
async_conn.cancel()
self.assertRaises(psycopg2.extensions.QueryCanceledError,
extras.wait_select, async_conn)
cur.execute("select 1")
extras.wait_select(async_conn)
self.assertEqual(cur.fetchall(), [(1, )])
def test_async_connection_cancel(self):
async_conn = psycopg2.connect(dsn, async=True)
async_conn.close()
self.assertTrue(async_conn.closed)
class ConnectTestCase(unittest.TestCase):
def setUp(self):
self.args = None
def connect_stub(dsn, connection_factory=None, async=False):
self.args = (dsn, connection_factory, async)
self._connect_orig = psycopg2._connect
psycopg2._connect = connect_stub
def tearDown(self):
psycopg2._connect = self._connect_orig
def test_there_has_to_be_something(self):
self.assertRaises(TypeError, psycopg2.connect)
self.assertRaises(TypeError, psycopg2.connect,
connection_factory=lambda dsn, async=False: None)
self.assertRaises(TypeError, psycopg2.connect,
async=True)
def test_factory(self):
def f(dsn, async=False):
pass
psycopg2.connect(database='foo', host='baz', connection_factory=f)
self.assertDsnEqual(self.args[0], 'dbname=foo host=baz')
self.assertEqual(self.args[1], f)
self.assertEqual(self.args[2], False)
psycopg2.connect("dbname=foo host=baz", connection_factory=f)
self.assertDsnEqual(self.args[0], 'dbname=foo host=baz')
self.assertEqual(self.args[1], f)
self.assertEqual(self.args[2], False)
def test_async(self):
psycopg2.connect(database='foo', host='baz', async=1)
self.assertDsnEqual(self.args[0], 'dbname=foo host=baz')
self.assertEqual(self.args[1], None)
self.assert_(self.args[2])
psycopg2.connect("dbname=foo host=baz", async=True)
self.assertDsnEqual(self.args[0], 'dbname=foo host=baz')
self.assertEqual(self.args[1], None)
self.assert_(self.args[2])
class AsyncReplicationTest(ReplicationTestCase):
@skip_before_postgres(9, 4) # slots require 9.4
@skip_repl_if_green
def test_async_replication(self):
conn = self.repl_connect(
connection_factory=LogicalReplicationConnection, async=1)
if conn is None:
return
cur = conn.cursor()
self.create_replication_slot(cur, output_plugin='test_decoding')
self.wait(cur)
cur.start_replication(self.slot)
self.wait(cur)
self.make_replication_events()
self.msg_count = 0
def consume(msg):
# just check the methods
"%s: %s" % (cur.io_timestamp, repr(msg))
"%s: %s" % (cur.feedback_timestamp, repr(msg))
self.msg_count += 1
if self.msg_count > 3:
cur.send_feedback(reply=True)
raise StopReplication()
cur.send_feedback(flush_lsn=msg.data_start)
# cannot be used in asynchronous mode
self.assertRaises(psycopg2.ProgrammingError, cur.consume_stream, consume)
def process_stream():
while True:
msg = cur.read_message()
if msg:
consume(msg)
else:
select([cur], [], [])
self.assertRaises(StopReplication, process_stream)
def test_suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == "__main__":
unittest.main()

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*-
# test_cancel.py - unit test for query cancellation # test_cancel.py - unit test for query cancellation
# #

View File

@ -42,7 +42,7 @@ import psycopg2.extras
from psycopg2 import extensions as ext from psycopg2 import extensions as ext
from .testutils import ( from .testutils import (
PY2, unittest, skip_if_no_superuser, skip_before_postgres, unittest, skip_if_no_superuser, skip_before_postgres,
skip_after_postgres, skip_before_libpq, skip_after_libpq, skip_after_postgres, skip_before_libpq, skip_after_libpq,
ConnectingTestCase, skip_if_tpc_disabled, skip_if_windows, slow, ConnectingTestCase, skip_if_tpc_disabled, skip_if_windows, slow,
skip_if_crdb, crdb_version) skip_if_crdb, crdb_version)
@ -245,7 +245,7 @@ class ConnectionTests(ConnectingTestCase):
cur = self.conn.cursor() cur = self.conn.cursor()
ext.register_type(ext.UNICODE, cur) ext.register_type(ext.UNICODE, cur)
cur.execute("select 'foo'::text;") cur.execute("select 'foo'::text;")
self.assertEqual(cur.fetchone()[0], u'foo') self.assertEqual(cur.fetchone()[0], 'foo')
def test_connect_nonnormal_envvar(self): def test_connect_nonnormal_envvar(self):
# We must perform encoding normalization at connection time # We must perform encoding normalization at connection time
@ -302,7 +302,7 @@ class ConnectionTests(ConnectingTestCase):
# Stop the committer thread # Stop the committer thread
stop.append(True) stop.append(True)
self.assert_(not notices, "%d notices raised" % len(notices)) self.assert_(not notices, f"{len(notices)} notices raised")
def test_connect_cursor_factory(self): def test_connect_cursor_factory(self):
conn = self.connect(cursor_factory=psycopg2.extras.DictCursor) conn = self.connect(cursor_factory=psycopg2.extras.DictCursor)
@ -343,7 +343,7 @@ class ConnectionTests(ConnectingTestCase):
class SubConnection(ext.connection): class SubConnection(ext.connection):
def __init__(self, dsn): def __init__(self, dsn):
try: try:
super(SubConnection, self).__init__(dsn) super().__init__(dsn)
except Exception: except Exception:
pass pass
@ -383,12 +383,11 @@ class ConnectionTests(ConnectingTestCase):
dir = tempfile.mkdtemp() dir = tempfile.mkdtemp()
try: try:
with open(os.path.join(dir, "mptest.py"), 'w') as f: with open(os.path.join(dir, "mptest.py"), 'w') as f:
f.write("""\ f.write(f"""import time
import time
import psycopg2 import psycopg2
def thread(): def thread():
conn = psycopg2.connect(%(dsn)r) conn = psycopg2.connect({dsn!r})
curs = conn.cursor() curs = conn.cursor()
for i in range(10): for i in range(10):
curs.execute("select 1") curs.execute("select 1")
@ -396,11 +395,11 @@ def thread():
def process(): def process():
time.sleep(0.2) time.sleep(0.2)
""" % {'dsn': dsn}) """)
script = ("""\ script = ("""\
import sys import sys
sys.path.insert(0, %(dir)r) sys.path.insert(0, {dir!r})
import time import time
import threading import threading
import multiprocessing import multiprocessing
@ -411,7 +410,7 @@ t.start()
time.sleep(0.2) time.sleep(0.2)
multiprocessing.Process(target=mptest.process, name='myprocess').start() multiprocessing.Process(target=mptest.process, name='myprocess').start()
t.join() t.join()
""" % {'dir': dir}) """.format(dir=dir))
out = sp.check_output( out = sp.check_output(
[sys.executable, '-c', script], stderr=sp.STDOUT) [sys.executable, '-c', script], stderr=sp.STDOUT)
@ -464,15 +463,12 @@ class ParseDsnTestCase(ConnectingTestCase):
self.assertTrue(raised, "ProgrammingError raised due to invalid URI") self.assertTrue(raised, "ProgrammingError raised due to invalid URI")
def test_unicode_value(self): def test_unicode_value(self):
snowman = u"\u2603" snowman = "\u2603"
d = ext.parse_dsn('dbname=' + snowman) d = ext.parse_dsn('dbname=' + snowman)
if PY2:
self.assertEqual(d['dbname'], snowman.encode('utf8'))
else:
self.assertEqual(d['dbname'], snowman) self.assertEqual(d['dbname'], snowman)
def test_unicode_key(self): def test_unicode_key(self):
snowman = u"\u2603" snowman = "\u2603"
self.assertRaises(psycopg2.ProgrammingError, ext.parse_dsn, self.assertRaises(psycopg2.ProgrammingError, ext.parse_dsn,
snowman + '=' + snowman) snowman + '=' + snowman)
@ -1230,7 +1226,7 @@ class ConnectionTwoPhaseTests(ConnectingTestCase):
def test_xid_unicode(self): def test_xid_unicode(self):
cnn = self.connect() cnn = self.connect()
x1 = cnn.xid(10, u'uni', u'code') x1 = cnn.xid(10, 'uni', 'code')
cnn.tpc_begin(x1) cnn.tpc_begin(x1)
cnn.tpc_prepare() cnn.tpc_prepare()
cnn.reset() cnn.reset()
@ -1245,7 +1241,7 @@ class ConnectionTwoPhaseTests(ConnectingTestCase):
# Let's just check unicode is accepted as type. # Let's just check unicode is accepted as type.
cnn = self.connect() cnn = self.connect()
cnn.set_client_encoding('utf8') cnn.set_client_encoding('utf8')
cnn.tpc_begin(u"transaction-id") cnn.tpc_begin("transaction-id")
cnn.tpc_prepare() cnn.tpc_prepare()
cnn.reset() cnn.reset()
@ -1683,7 +1679,7 @@ class AutocommitTests(ConnectingTestCase):
class PasswordLeakTestCase(ConnectingTestCase): class PasswordLeakTestCase(ConnectingTestCase):
def setUp(self): def setUp(self):
super(PasswordLeakTestCase, self).setUp() super().setUp()
PasswordLeakTestCase.dsn = None PasswordLeakTestCase.dsn = None
class GrassingConnection(ext.connection): class GrassingConnection(ext.connection):
@ -1736,8 +1732,7 @@ class SignalTestCase(ConnectingTestCase):
""") """)
def _test_bug_551(self, query): def _test_bug_551(self, query):
script = ("""\ script = f"""import os
import os
import sys import sys
import time import time
import signal import signal
@ -1758,7 +1753,7 @@ def killer():
signal.signal(signal.SIGABRT, handle_sigabort) signal.signal(signal.SIGABRT, handle_sigabort)
conn = psycopg2.connect(%(dsn)r) conn = psycopg2.connect({dsn!r})
cur = conn.cursor() cur = conn.cursor()
@ -1769,8 +1764,8 @@ t.daemon = True
t.start() t.start()
while True: while True:
cur.execute(%(query)r, ("Hello, world!",)) cur.execute({query!r}, ("Hello, world!",))
""" % {'dsn': dsn, 'query': query}) """
proc = sp.Popen([sys.executable, '-c', script], proc = sp.Popen([sys.executable, '-c', script],
stdout=sp.PIPE, stderr=sp.PIPE) stdout=sp.PIPE, stderr=sp.PIPE)

View File

@ -34,7 +34,7 @@ from subprocess import Popen, PIPE
import psycopg2 import psycopg2
import psycopg2.extensions import psycopg2.extensions
from .testutils import skip_copy_if_green, PY2, TextIOBase from .testutils import skip_copy_if_green, TextIOBase
from .testconfig import dsn from .testconfig import dsn
@ -97,7 +97,7 @@ class CopyTests(ConnectingTestCase):
curs = self.conn.cursor() curs = self.conn.cursor()
f = StringIO() f = StringIO()
for i in range(10): for i in range(10):
f.write("%s\n" % (i,)) f.write(f"{i}\n")
f.seek(0) f.seek(0)
curs.copy_from(MinimalRead(f), "tcopy", columns=['id']) curs.copy_from(MinimalRead(f), "tcopy", columns=['id'])
@ -109,7 +109,7 @@ class CopyTests(ConnectingTestCase):
curs = self.conn.cursor() curs = self.conn.cursor()
f = StringIO() f = StringIO()
for i in range(10): for i in range(10):
f.write("%s\n" % (i,)) f.write(f"{i}\n")
f.seek(0) f.seek(0)
@ -133,11 +133,6 @@ class CopyTests(ConnectingTestCase):
self.conn.set_client_encoding('latin1') self.conn.set_client_encoding('latin1')
self._create_temp_table() # the above call closed the xn self._create_temp_table() # the above call closed the xn
if PY2:
abin = ''.join(map(chr, range(32, 127) + range(160, 256)))
about = abin.decode('latin1').replace('\\', '\\\\')
else:
abin = bytes(list(range(32, 127)) abin = bytes(list(range(32, 127))
+ list(range(160, 256))).decode('latin1') + list(range(160, 256))).decode('latin1')
about = abin.replace('\\', '\\\\') about = abin.replace('\\', '\\\\')
@ -155,10 +150,6 @@ class CopyTests(ConnectingTestCase):
self.conn.set_client_encoding('latin1') self.conn.set_client_encoding('latin1')
self._create_temp_table() # the above call closed the xn self._create_temp_table() # the above call closed the xn
if PY2:
abin = ''.join(map(chr, range(32, 127) + range(160, 255)))
about = abin.replace('\\', '\\\\')
else:
abin = bytes(list(range(32, 127)) abin = bytes(list(range(32, 127))
+ list(range(160, 255))).decode('latin1') + list(range(160, 255))).decode('latin1')
about = abin.replace('\\', '\\\\').encode('latin1') about = abin.replace('\\', '\\\\').encode('latin1')
@ -176,12 +167,6 @@ class CopyTests(ConnectingTestCase):
self.conn.set_client_encoding('latin1') self.conn.set_client_encoding('latin1')
self._create_temp_table() # the above call closed the xn self._create_temp_table() # the above call closed the xn
if PY2:
abin = ''.join(map(chr, range(32, 127) + range(160, 256)))
abin = abin.decode('latin1')
about = abin.replace('\\', '\\\\')
else:
abin = bytes(list(range(32, 127)) abin = bytes(list(range(32, 127))
+ list(range(160, 256))).decode('latin1') + list(range(160, 256))).decode('latin1')
about = abin.replace('\\', '\\\\') about = abin.replace('\\', '\\\\')
@ -224,7 +209,7 @@ class CopyTests(ConnectingTestCase):
f = StringIO() f = StringIO()
for i, c in zip(range(nrecs), cycle(string.ascii_letters)): for i, c in zip(range(nrecs), cycle(string.ascii_letters)):
l = c * srec l = c * srec
f.write("%s\t%s\n" % (i, l)) f.write(f"{i}\t{l}\n")
f.seek(0) f.seek(0)
curs.copy_from(MinimalRead(f), "tcopy", **copykw) curs.copy_from(MinimalRead(f), "tcopy", **copykw)
@ -252,7 +237,7 @@ class CopyTests(ConnectingTestCase):
self.assertEqual(ntests, len(string.ascii_letters)) self.assertEqual(ntests, len(string.ascii_letters))
def test_copy_expert_file_refcount(self): def test_copy_expert_file_refcount(self):
class Whatever(object): class Whatever:
pass pass
f = Whatever() f = Whatever()
@ -261,7 +246,7 @@ class CopyTests(ConnectingTestCase):
curs.copy_expert, 'COPY tcopy (data) FROM STDIN', f) curs.copy_expert, 'COPY tcopy (data) FROM STDIN', f)
def test_copy_no_column_limit(self): def test_copy_no_column_limit(self):
cols = ["c%050d" % i for i in range(200)] cols = [f"c{i:050}" for i in range(200)]
curs = self.conn.cursor() curs = self.conn.cursor()
curs.execute('CREATE TEMPORARY TABLE manycols (%s)' % ',\n'.join( curs.execute('CREATE TEMPORARY TABLE manycols (%s)' % ',\n'.join(
@ -332,9 +317,8 @@ class CopyTests(ConnectingTestCase):
@slow @slow
def test_copy_from_segfault(self): def test_copy_from_segfault(self):
# issue #219 # issue #219
script = ("""\ script = f"""import psycopg2
import psycopg2 conn = psycopg2.connect({dsn!r})
conn = psycopg2.connect(%(dsn)r)
curs = conn.cursor() curs = conn.cursor()
curs.execute("create table copy_segf (id int)") curs.execute("create table copy_segf (id int)")
try: try:
@ -342,7 +326,7 @@ try:
except psycopg2.ProgrammingError: except psycopg2.ProgrammingError:
pass pass
conn.close() conn.close()
""" % {'dsn': dsn}) """
proc = Popen([sys.executable, '-c', script]) proc = Popen([sys.executable, '-c', script])
proc.communicate() proc.communicate()
@ -351,9 +335,8 @@ conn.close()
@slow @slow
def test_copy_to_segfault(self): def test_copy_to_segfault(self):
# issue #219 # issue #219
script = ("""\ script = f"""import psycopg2
import psycopg2 conn = psycopg2.connect({dsn!r})
conn = psycopg2.connect(%(dsn)r)
curs = conn.cursor() curs = conn.cursor()
curs.execute("create table copy_segf (id int)") curs.execute("create table copy_segf (id int)")
try: try:
@ -361,7 +344,7 @@ try:
except psycopg2.ProgrammingError: except psycopg2.ProgrammingError:
pass pass
conn.close() conn.close()
""" % {'dsn': dsn}) """
proc = Popen([sys.executable, '-c', script], stdout=PIPE) proc = Popen([sys.executable, '-c', script], stdout=PIPE)
proc.communicate() proc.communicate()

View File

@ -39,7 +39,6 @@ from .testutils import (ConnectingTestCase, skip_before_postgres,
skip_if_windows, skip_if_crdb, crdb_version) skip_if_windows, skip_if_crdb, crdb_version)
import psycopg2.extras import psycopg2.extras
from psycopg2.compat import text_type
class CursorTests(ConnectingTestCase): class CursorTests(ConnectingTestCase):
@ -75,36 +74,36 @@ class CursorTests(ConnectingTestCase):
# test consistency between execute and mogrify. # test consistency between execute and mogrify.
# unicode query containing only ascii data # unicode query containing only ascii data
cur.execute(u"SELECT 'foo';") cur.execute("SELECT 'foo';")
self.assertEqual('foo', cur.fetchone()[0]) self.assertEqual('foo', cur.fetchone()[0])
self.assertEqual(b"SELECT 'foo';", cur.mogrify(u"SELECT 'foo';")) self.assertEqual(b"SELECT 'foo';", cur.mogrify("SELECT 'foo';"))
conn.set_client_encoding('UTF8') conn.set_client_encoding('UTF8')
snowman = u"\u2603" snowman = "\u2603"
def b(s): def b(s):
if isinstance(s, text_type): if isinstance(s, str):
return s.encode('utf8') return s.encode('utf8')
else: else:
return s return s
# unicode query with non-ascii data # unicode query with non-ascii data
cur.execute(u"SELECT '%s';" % snowman) cur.execute(f"SELECT '{snowman}';")
self.assertEqual(snowman.encode('utf8'), b(cur.fetchone()[0])) self.assertEqual(snowman.encode('utf8'), b(cur.fetchone()[0]))
self.assertQuotedEqual(("SELECT '%s';" % snowman).encode('utf8'), self.assertQuotedEqual(f"SELECT '{snowman}';".encode('utf8'),
cur.mogrify(u"SELECT '%s';" % snowman)) cur.mogrify(f"SELECT '{snowman}';"))
# unicode args # unicode args
cur.execute("SELECT %s;", (snowman,)) cur.execute("SELECT %s;", (snowman,))
self.assertEqual(snowman.encode("utf-8"), b(cur.fetchone()[0])) self.assertEqual(snowman.encode("utf-8"), b(cur.fetchone()[0]))
self.assertQuotedEqual(("SELECT '%s';" % snowman).encode('utf8'), self.assertQuotedEqual(f"SELECT '{snowman}';".encode('utf8'),
cur.mogrify("SELECT %s;", (snowman,))) cur.mogrify("SELECT %s;", (snowman,)))
# unicode query and args # unicode query and args
cur.execute(u"SELECT %s;", (snowman,)) cur.execute("SELECT %s;", (snowman,))
self.assertEqual(snowman.encode("utf-8"), b(cur.fetchone()[0])) self.assertEqual(snowman.encode("utf-8"), b(cur.fetchone()[0]))
self.assertQuotedEqual(("SELECT '%s';" % snowman).encode('utf8'), self.assertQuotedEqual(f"SELECT '{snowman}';".encode('utf8'),
cur.mogrify(u"SELECT %s;", (snowman,))) cur.mogrify("SELECT %s;", (snowman,)))
def test_mogrify_decimal_explodes(self): def test_mogrify_decimal_explodes(self):
conn = self.conn conn = self.conn
@ -293,12 +292,12 @@ class CursorTests(ConnectingTestCase):
cur = self.conn.cursor() cur = self.conn.cursor()
# Set up the temporary function # Set up the temporary function
cur.execute(''' cur.execute(f'''
CREATE FUNCTION %s(%s INT) CREATE FUNCTION {procname}({escaped_paramname} INT)
RETURNS INT AS RETURNS INT AS
'SELECT $1 * $1' 'SELECT $1 * $1'
LANGUAGE SQL LANGUAGE SQL
''' % (procname, escaped_paramname)) ''')
# Make sure callproc works right # Make sure callproc works right
cur.callproc(procname, {paramname: 2}) cur.callproc(procname, {paramname: 2})
@ -309,7 +308,7 @@ class CursorTests(ConnectingTestCase):
({paramname: 2, 'foo': 'bar'}, psycopg2.ProgrammingError), ({paramname: 2, 'foo': 'bar'}, psycopg2.ProgrammingError),
({paramname: '2'}, psycopg2.ProgrammingError), ({paramname: '2'}, psycopg2.ProgrammingError),
({paramname: 'two'}, psycopg2.ProgrammingError), ({paramname: 'two'}, psycopg2.ProgrammingError),
({u'bj\xc3rn': 2}, psycopg2.ProgrammingError), ({'bj\xc3rn': 2}, psycopg2.ProgrammingError),
({3: 2}, TypeError), ({3: 2}, TypeError),
({self: 2}, TypeError), ({self: 2}, TypeError),
] ]
@ -584,8 +583,7 @@ class NamedCursorTests(ConnectingTestCase):
time.sleep(0.2) time.sleep(0.2)
t2 = next(i)[0] t2 = next(i)[0]
self.assert_((t2 - t1).microseconds * 1e-6 < 0.1, self.assert_((t2 - t1).microseconds * 1e-6 < 0.1,
"named cursor records fetched in 2 roundtrips (delta: %s)" f"named cursor records fetched in 2 roundtrips (delta: {t2 - t1})")
% (t2 - t1))
@skip_before_postgres(8, 0) @skip_before_postgres(8, 0)
def test_iter_named_cursor_default_itersize(self): def test_iter_named_cursor_default_itersize(self):

View File

@ -379,7 +379,7 @@ class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
cur) cur)
def f(val): def f(val):
cur.execute("select '%s'::text" % val) cur.execute(f"select '{val}'::text")
return cur.fetchone()[0] return cur.fetchone()[0]
self.assertRaises(OverflowError, f, '100000000000000000:00:00') self.assertRaises(OverflowError, f, '100000000000000000:00:00')
@ -423,7 +423,7 @@ class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
]: ]:
cur.execute("select %s::text", (s,)) cur.execute("select %s::text", (s,))
r = cur.fetchone()[0] r = cur.fetchone()[0]
self.assertEqual(r, v, "%s -> %s != %s" % (s, r, v)) self.assertEqual(r, v, f"{s} -> {r} != {v}")
@skip_if_crdb("interval style") @skip_if_crdb("interval style")
@skip_before_postgres(8, 4) @skip_before_postgres(8, 4)

View File

@ -53,7 +53,7 @@ class ErrocodeTests(ConnectingTestCase):
if errs: if errs:
self.fail( self.fail(
"raised %s errors in %s cycles (first is %s %s)" % ( "raised {} errors in {} cycles (first is {} {})".format(
len(errs), MAX_CYCLES, len(errs), MAX_CYCLES,
errs[0].__class__.__name__, errs[0])) errs[0].__class__.__name__, errs[0]))

View File

@ -20,14 +20,14 @@ import time
import pickle import pickle
import unittest import unittest
from datetime import timedelta from datetime import timedelta
from functools import lru_cache
import psycopg2 import psycopg2
from psycopg2.compat import lru_cache
import psycopg2.extras import psycopg2.extras
from psycopg2.extras import NamedTupleConnection, NamedTupleCursor from psycopg2.extras import NamedTupleConnection, NamedTupleCursor
from .testutils import ConnectingTestCase, skip_before_postgres, \ from .testutils import ConnectingTestCase, skip_before_postgres, \
skip_before_python, skip_from_python, crdb_version, skip_if_crdb crdb_version, skip_if_crdb
class _DictCursorBase(ConnectingTestCase): class _DictCursorBase(ConnectingTestCase):
@ -179,27 +179,7 @@ class ExtrasDictCursorTests(_DictCursorBase):
self.assertEqual(len(rv3), 2) self.assertEqual(len(rv3), 2)
self.assertEqual(len(rv), 2) self.assertEqual(len(rv), 2)
@skip_from_python(3) def test_iter_methods(self):
def test_iter_methods_2(self):
curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
curs.execute("select 10 as a, 20 as b")
r = curs.fetchone()
self.assert_(isinstance(r.keys(), list))
self.assertEqual(len(r.keys()), 2)
self.assert_(isinstance(r.values(), tuple)) # sic?
self.assertEqual(len(r.values()), 2)
self.assert_(isinstance(r.items(), list))
self.assertEqual(len(r.items()), 2)
self.assert_(not isinstance(r.iterkeys(), list))
self.assertEqual(len(list(r.iterkeys())), 2)
self.assert_(not isinstance(r.itervalues(), list))
self.assertEqual(len(list(r.itervalues())), 2)
self.assert_(not isinstance(r.iteritems(), list))
self.assertEqual(len(list(r.iteritems())), 2)
@skip_before_python(3)
def test_iter_methods_3(self):
curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
curs.execute("select 10 as a, 20 as b") curs.execute("select 10 as a, 20 as b")
r = curs.fetchone() r = curs.fetchone()
@ -226,21 +206,6 @@ class ExtrasDictCursorTests(_DictCursorBase):
self.assertEqual(list(r1.values()), list(r.values())) self.assertEqual(list(r1.values()), list(r.values()))
self.assertEqual(list(r1.items()), list(r.items())) self.assertEqual(list(r1.items()), list(r.items()))
@skip_from_python(3)
def test_order_iter(self):
curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
curs.execute("select 5 as foo, 4 as bar, 33 as baz, 2 as qux")
r = curs.fetchone()
self.assertEqual(list(r.iterkeys()), ['foo', 'bar', 'baz', 'qux'])
self.assertEqual(list(r.itervalues()), [5, 4, 33, 2])
self.assertEqual(list(r.iteritems()),
[('foo', 5), ('bar', 4), ('baz', 33), ('qux', 2)])
r1 = pickle.loads(pickle.dumps(r))
self.assertEqual(list(r1.iterkeys()), list(r.iterkeys()))
self.assertEqual(list(r1.itervalues()), list(r.itervalues()))
self.assertEqual(list(r1.iteritems()), list(r.iteritems()))
class ExtrasDictCursorRealTests(_DictCursorBase): class ExtrasDictCursorRealTests(_DictCursorBase):
def testRealMeansReal(self): def testRealMeansReal(self):
@ -340,27 +305,7 @@ class ExtrasDictCursorRealTests(_DictCursorBase):
row = getter(curs) row = getter(curs)
self.failUnless(row['foo'] == 'bar') self.failUnless(row['foo'] == 'bar')
@skip_from_python(3) def test_iter_methods(self):
def test_iter_methods_2(self):
curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
curs.execute("select 10 as a, 20 as b")
r = curs.fetchone()
self.assert_(isinstance(r.keys(), list))
self.assertEqual(len(r.keys()), 2)
self.assert_(isinstance(r.values(), list))
self.assertEqual(len(r.values()), 2)
self.assert_(isinstance(r.items(), list))
self.assertEqual(len(r.items()), 2)
self.assert_(not isinstance(r.iterkeys(), list))
self.assertEqual(len(list(r.iterkeys())), 2)
self.assert_(not isinstance(r.itervalues(), list))
self.assertEqual(len(list(r.itervalues())), 2)
self.assert_(not isinstance(r.iteritems(), list))
self.assertEqual(len(list(r.iteritems())), 2)
@skip_before_python(3)
def test_iter_methods_3(self):
curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
curs.execute("select 10 as a, 20 as b") curs.execute("select 10 as a, 20 as b")
r = curs.fetchone() r = curs.fetchone()
@ -387,21 +332,6 @@ class ExtrasDictCursorRealTests(_DictCursorBase):
self.assertEqual(list(r1.values()), list(r.values())) self.assertEqual(list(r1.values()), list(r.values()))
self.assertEqual(list(r1.items()), list(r.items())) self.assertEqual(list(r1.items()), list(r.items()))
@skip_from_python(3)
def test_order_iter(self):
curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
curs.execute("select 5 as foo, 4 as bar, 33 as baz, 2 as qux")
r = curs.fetchone()
self.assertEqual(list(r.iterkeys()), ['foo', 'bar', 'baz', 'qux'])
self.assertEqual(list(r.itervalues()), [5, 4, 33, 2])
self.assertEqual(list(r.iteritems()),
[('foo', 5), ('bar', 4), ('baz', 33), ('qux', 2)])
r1 = pickle.loads(pickle.dumps(r))
self.assertEqual(list(r1.iterkeys()), list(r.iterkeys()))
self.assertEqual(list(r1.itervalues()), list(r.itervalues()))
self.assertEqual(list(r1.iteritems()), list(r.iteritems()))
def test_pop(self): def test_pop(self):
curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) curs = self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
curs.execute("select 1 as a, 2 as b, 3 as c") curs.execute("select 1 as a, 2 as b, 3 as c")
@ -564,7 +494,6 @@ class NamedTupleCursorTest(ConnectingTestCase):
self.assertEqual(rv.f_column_, 2) self.assertEqual(rv.f_column_, 2)
self.assertEqual(rv.f3, 3) self.assertEqual(rv.f3, 3)
@skip_before_python(3)
@skip_before_postgres(8) @skip_before_postgres(8)
def test_nonascii_name(self): def test_nonascii_name(self):
curs = self.conn.cursor() curs = self.conn.cursor()
@ -692,7 +621,7 @@ class NamedTupleCursorTest(ConnectingTestCase):
recs = [] recs = []
curs = self.conn.cursor() curs = self.conn.cursor()
for i in range(10): for i in range(10):
curs.execute("select 1 as f%s" % i) curs.execute(f"select 1 as f{i}")
recs.append(curs.fetchone()) recs.append(curs.fetchone())
# Still in cache # Still in cache

View File

@ -43,9 +43,9 @@ class TestPaginate(unittest.TestCase):
[list(range(i * 100, (i + 1) * 100)) for i in range(10)]) [list(range(i * 100, (i + 1) * 100)) for i in range(10)])
class FastExecuteTestMixin(object): class FastExecuteTestMixin:
def setUp(self): def setUp(self):
super(FastExecuteTestMixin, self).setUp() super().setUp()
cur = self.conn.cursor() cur = self.conn.cursor()
cur.execute("""create table testfast ( cur.execute("""create table testfast (
id serial primary key, date date, val int, data text)""") id serial primary key, date date, val int, data text)""")
@ -102,7 +102,7 @@ class TestExecuteBatch(FastExecuteTestMixin, testutils.ConnectingTestCase):
page_size=10) page_size=10)
# last command was 5 statements # last command was 5 statements
self.assertEqual(sum(c == u';' for c in cur.query.decode('ascii')), 4) self.assertEqual(sum(c == ';' for c in cur.query.decode('ascii')), 4)
cur.execute("select id, val from testfast order by id") cur.execute("select id, val from testfast order by id")
self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)])
@ -111,7 +111,7 @@ class TestExecuteBatch(FastExecuteTestMixin, testutils.ConnectingTestCase):
def test_unicode(self): def test_unicode(self):
cur = self.conn.cursor() cur = self.conn.cursor()
ext.register_type(ext.UNICODE, cur) ext.register_type(ext.UNICODE, cur)
snowman = u"\u2603" snowman = "\u2603"
# unicode in statement # unicode in statement
psycopg2.extras.execute_batch(cur, psycopg2.extras.execute_batch(cur,
@ -206,7 +206,7 @@ class TestExecuteValues(FastExecuteTestMixin, testutils.ConnectingTestCase):
def test_unicode(self): def test_unicode(self):
cur = self.conn.cursor() cur = self.conn.cursor()
ext.register_type(ext.UNICODE, cur) ext.register_type(ext.UNICODE, cur)
snowman = u"\u2603" snowman = "\u2603"
# unicode in statement # unicode in statement
psycopg2.extras.execute_values(cur, psycopg2.extras.execute_values(cur,

View File

@ -36,7 +36,7 @@ from .testutils import ConnectingTestCase, skip_before_postgres, slow
from .testutils import skip_if_crdb from .testutils import skip_if_crdb
class ConnectionStub(object): class ConnectionStub:
"""A `connection` wrapper allowing analysis of the `poll()` calls.""" """A `connection` wrapper allowing analysis of the `poll()` calls."""
def __init__(self, conn): def __init__(self, conn):
self.conn = conn self.conn = conn
@ -137,7 +137,7 @@ class GreenTestCase(ConnectingTestCase):
elif state == POLL_WRITE: elif state == POLL_WRITE:
select.select([], [conn.fileno()], [], 0.1) select.select([], [conn.fileno()], [], 0.1)
else: else:
raise conn.OperationalError("bad state from poll: %s" % state) raise conn.OperationalError(f"bad state from poll: {state}")
stub = self.set_stub_wait_callback(self.conn, wait) stub = self.set_stub_wait_callback(self.conn, wait)
cur = self.conn.cursor() cur = self.conn.cursor()
@ -182,7 +182,7 @@ class CallbackErrorTestCase(ConnectingTestCase):
elif state == POLL_WRITE: elif state == POLL_WRITE:
select.select([], [conn.fileno()], []) select.select([], [conn.fileno()], [])
else: else:
raise conn.OperationalError("bad state from poll: %s" % state) raise conn.OperationalError(f"bad state from poll: {state}")
except KeyboardInterrupt: except KeyboardInterrupt:
conn.cancel() conn.cancel()
# the loop will be broken by a server error # the loop will be broken by a server error

View File

@ -15,7 +15,6 @@
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details. # License for more details.
from __future__ import unicode_literals
from . import testutils from . import testutils
import unittest import unittest

View File

@ -185,15 +185,15 @@ class LargeObjectTests(LargeObjectTestCase):
def test_read_text(self): def test_read_text(self):
lo = self.conn.lobject() lo = self.conn.lobject()
snowman = u"\u2603" snowman = "\u2603"
lo.write(u"some data " + snowman) lo.write("some data " + snowman)
lo.close() lo.close()
lo = self.conn.lobject(lo.oid, "rt") lo = self.conn.lobject(lo.oid, "rt")
x = lo.read(4) x = lo.read(4)
self.assertEqual(type(x), type(u'')) self.assertEqual(type(x), type(''))
self.assertEqual(x, u"some") self.assertEqual(x, "some")
self.assertEqual(lo.read(), u" data " + snowman) self.assertEqual(lo.read(), " data " + snowman)
@slow @slow
def test_read_large(self): def test_read_large(self):
@ -207,7 +207,7 @@ class LargeObjectTests(LargeObjectTestCase):
data1 = lo.read() data1 = lo.read()
# avoid dumping megacraps in the console in case of error # avoid dumping megacraps in the console in case of error
self.assert_(data == data1, self.assert_(data == data1,
"%r... != %r..." % (data[:100], data1[:100])) f"{data[:100]!r}... != {data1[:100]!r}...")
def test_seek_tell(self): def test_seek_tell(self):
lo = self.conn.lobject() lo = self.conn.lobject()

View File

@ -130,7 +130,7 @@ class ConnectTestCase(unittest.TestCase):
def test_int_port_param(self): def test_int_port_param(self):
psycopg2.connect(database='sony', port=6543) psycopg2.connect(database='sony', port=6543)
dsn = " %s " % self.args[0] dsn = f" {self.args[0]} "
self.assert_(" dbname=sony " in dsn, dsn) self.assert_(" dbname=sony " in dsn, dsn)
self.assert_(" port=6543 " in dsn, dsn) self.assert_(" port=6543 " in dsn, dsn)
@ -338,12 +338,12 @@ class TestExtensionModule(unittest.TestCase):
pkgdir = os.path.dirname(psycopg2.__file__) pkgdir = os.path.dirname(psycopg2.__file__)
pardir = os.path.dirname(pkgdir) pardir = os.path.dirname(pkgdir)
self.assert_(pardir in sys.path) self.assert_(pardir in sys.path)
script = (""" script = f"""
import sys import sys
sys.path.remove(%r) sys.path.remove({pardir!r})
sys.path.insert(0, %r) sys.path.insert(0, {pkgdir!r})
import _psycopg import _psycopg
""" % (pardir, pkgdir)) """
proc = Popen([sys.executable, '-c', script]) proc = Popen([sys.executable, '-c', script])
proc.communicate() proc.communicate()

View File

@ -56,23 +56,23 @@ class NotifiesTests(ConnectingTestCase):
if payload is None: if payload is None:
payload = '' payload = ''
else: else:
payload = ", %r" % payload payload = f", {payload!r}"
script = ("""\ script = ("""\
import time import time
time.sleep(%(sec)s) time.sleep({sec})
import %(module)s as psycopg2 import {module} as psycopg2
import %(module)s.extensions as ext import {module}.extensions as ext
conn = psycopg2.connect(%(dsn)r) conn = psycopg2.connect({dsn!r})
conn.set_isolation_level(ext.ISOLATION_LEVEL_AUTOCOMMIT) conn.set_isolation_level(ext.ISOLATION_LEVEL_AUTOCOMMIT)
print(conn.info.backend_pid) print(conn.info.backend_pid)
curs = conn.cursor() curs = conn.cursor()
curs.execute("NOTIFY " %(name)r %(payload)r) curs.execute("NOTIFY " {name!r} {payload!r})
curs.close() curs.close()
conn.close() conn.close()
""" % { """.format(
'module': psycopg2.__name__, module=psycopg2.__name__,
'dsn': dsn, 'sec': sec, 'name': name, 'payload': payload}) dsn=dsn, sec=sec, name=name, payload=payload))
return Popen([sys.executable, '-c', script], stdout=PIPE) return Popen([sys.executable, '-c', script], stdout=PIPE)

View File

@ -25,7 +25,7 @@
from . import testutils from . import testutils
import unittest import unittest
from .testutils import ConnectingTestCase, skip_if_crdb, unichr, PY2 from .testutils import ConnectingTestCase, skip_if_crdb
import psycopg2 import psycopg2
import psycopg2.extensions import psycopg2.extensions
@ -79,16 +79,10 @@ class QuotingTestCase(ConnectingTestCase):
data = b"""some data with \000\013 binary data = b"""some data with \000\013 binary
stuff into, 'quotes' and \\ a backslash too. stuff into, 'quotes' and \\ a backslash too.
""" """
if PY2:
data += "".join(map(chr, range(256)))
else:
data += bytes(list(range(256))) data += bytes(list(range(256)))
curs = self.conn.cursor() curs = self.conn.cursor()
curs.execute("SELECT %s::bytea;", (psycopg2.Binary(data),)) curs.execute("SELECT %s::bytea;", (psycopg2.Binary(data),))
if PY2:
res = str(curs.fetchone()[0])
else:
res = curs.fetchone()[0].tobytes() res = curs.fetchone()[0].tobytes()
if res[0] in (b'x', ord(b'x')) and self.conn.info.server_version >= 90000: if res[0] in (b'x', ord(b'x')) and self.conn.info.server_version >= 90000:
@ -104,13 +98,12 @@ class QuotingTestCase(ConnectingTestCase):
server_encoding = curs.fetchone()[0] server_encoding = curs.fetchone()[0]
if server_encoding != "UTF8": if server_encoding != "UTF8":
return self.skipTest( return self.skipTest(
"Unicode test skipped since server encoding is %s" f"Unicode test skipped since server encoding is {server_encoding}")
% server_encoding)
data = u"""some data with \t chars data = """some data with \t chars
to escape into, 'quotes', \u20ac euro sign and \\ a backslash too. to escape into, 'quotes', \u20ac euro sign and \\ a backslash too.
""" """
data += u"".join(map(unichr, [u for u in range(1, 65536) data += "".join(map(chr, [u for u in range(1, 65536)
if not 0xD800 <= u <= 0xDFFF])) # surrogate area if not 0xD800 <= u <= 0xDFFF])) # surrogate area
self.conn.set_client_encoding('UNICODE') self.conn.set_client_encoding('UNICODE')
@ -125,9 +118,6 @@ class QuotingTestCase(ConnectingTestCase):
def test_latin1(self): def test_latin1(self):
self.conn.set_client_encoding('LATIN1') self.conn.set_client_encoding('LATIN1')
curs = self.conn.cursor() curs = self.conn.cursor()
if PY2:
data = ''.join(map(chr, range(32, 127) + range(160, 256)))
else:
data = bytes(list(range(32, 127)) data = bytes(list(range(32, 127))
+ list(range(160, 256))).decode('latin1') + list(range(160, 256))).decode('latin1')
@ -137,23 +127,11 @@ class QuotingTestCase(ConnectingTestCase):
self.assertEqual(res, data) self.assertEqual(res, data)
self.assert_(not self.conn.notices) self.assert_(not self.conn.notices)
# as unicode
if PY2:
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.conn)
data = data.decode('latin1')
curs.execute("SELECT %s::text;", (data,))
res = curs.fetchone()[0]
self.assertEqual(res, data)
self.assert_(not self.conn.notices)
@skip_if_crdb("encoding") @skip_if_crdb("encoding")
def test_koi8(self): def test_koi8(self):
self.conn.set_client_encoding('KOI8') self.conn.set_client_encoding('KOI8')
curs = self.conn.cursor() curs = self.conn.cursor()
if PY2:
data = ''.join(map(chr, range(32, 127) + range(128, 256)))
else:
data = bytes(list(range(32, 127)) data = bytes(list(range(32, 127))
+ list(range(128, 256))).decode('koi8_r') + list(range(128, 256))).decode('koi8_r')
@ -163,18 +141,8 @@ class QuotingTestCase(ConnectingTestCase):
self.assertEqual(res, data) self.assertEqual(res, data)
self.assert_(not self.conn.notices) self.assert_(not self.conn.notices)
# as unicode
if PY2:
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.conn)
data = data.decode('koi8_r')
curs.execute("SELECT %s::text;", (data,))
res = curs.fetchone()[0]
self.assertEqual(res, data)
self.assert_(not self.conn.notices)
def test_bytes(self): def test_bytes(self):
snowman = u"\u2603" snowman = "\u2603"
conn = self.connect() conn = self.connect()
conn.set_client_encoding('UNICODE') conn.set_client_encoding('UNICODE')
psycopg2.extensions.register_type(psycopg2.extensions.BYTES, conn) psycopg2.extensions.register_type(psycopg2.extensions.BYTES, conn)
@ -202,11 +170,8 @@ class TestQuotedIdentifier(ConnectingTestCase):
@testutils.skip_before_postgres(8, 0) @testutils.skip_before_postgres(8, 0)
def test_unicode_ident(self): def test_unicode_ident(self):
snowman = u"\u2603" snowman = "\u2603"
quoted = '"' + snowman + '"' quoted = '"' + snowman + '"'
if PY2:
self.assertEqual(quote_ident(snowman, self.conn), quoted.encode('utf8'))
else:
self.assertEqual(quote_ident(snowman, self.conn), quoted) self.assertEqual(quote_ident(snowman, self.conn), quoted)
@ -223,7 +188,7 @@ class TestStringAdapter(ConnectingTestCase):
# self.assertEqual(adapt(egrave).getquoted(), "'\xe8'") # self.assertEqual(adapt(egrave).getquoted(), "'\xe8'")
def test_encoding_error(self): def test_encoding_error(self):
snowman = u"\u2603" snowman = "\u2603"
a = adapt(snowman) a = adapt(snowman)
self.assertRaises(UnicodeEncodeError, a.getquoted) self.assertRaises(UnicodeEncodeError, a.getquoted)
@ -231,14 +196,14 @@ class TestStringAdapter(ConnectingTestCase):
# Note: this works-ish mostly in case when the standard db connection # Note: this works-ish mostly in case when the standard db connection
# we test with is utf8, otherwise the encoding chosen by PQescapeString # we test with is utf8, otherwise the encoding chosen by PQescapeString
# may give bad results. # may give bad results.
snowman = u"\u2603" snowman = "\u2603"
a = adapt(snowman) a = adapt(snowman)
a.encoding = 'utf8' a.encoding = 'utf8'
self.assertEqual(a.encoding, 'utf8') self.assertEqual(a.encoding, 'utf8')
self.assertEqual(a.getquoted(), b"'\xe2\x98\x83'") self.assertEqual(a.getquoted(), b"'\xe2\x98\x83'")
def test_connection_wins_anyway(self): def test_connection_wins_anyway(self):
snowman = u"\u2603" snowman = "\u2603"
a = adapt(snowman) a = adapt(snowman)
a.encoding = 'latin9' a.encoding = 'latin9'
@ -248,9 +213,8 @@ class TestStringAdapter(ConnectingTestCase):
self.assertEqual(a.encoding, 'utf_8') self.assertEqual(a.encoding, 'utf_8')
self.assertQuotedEqual(a.getquoted(), b"'\xe2\x98\x83'") self.assertQuotedEqual(a.getquoted(), b"'\xe2\x98\x83'")
@testutils.skip_before_python(3)
def test_adapt_bytes(self): def test_adapt_bytes(self):
snowman = u"\u2603" snowman = "\u2603"
self.conn.set_client_encoding('utf8') self.conn.set_client_encoding('utf8')
a = psycopg2.extensions.QuotedString(snowman.encode('utf8')) a = psycopg2.extensions.QuotedString(snowman.encode('utf8'))
a.prepare(self.conn) a.prepare(self.conn)

View File

@ -41,13 +41,13 @@ skip_repl_if_green = skip_if_green("replication not supported in green mode")
class ReplicationTestCase(ConnectingTestCase): class ReplicationTestCase(ConnectingTestCase):
def setUp(self): def setUp(self):
super(ReplicationTestCase, self).setUp() super().setUp()
self.slot = testconfig.repl_slot self.slot = testconfig.repl_slot
self._slots = [] self._slots = []
def tearDown(self): def tearDown(self):
# first close all connections, as they might keep the slot(s) active # first close all connections, as they might keep the slot(s) active
super(ReplicationTestCase, self).tearDown() super().tearDown()
time.sleep(0.025) # sometimes the slot is still active, wait a little time.sleep(0.025) # sometimes the slot is still active, wait a little
@ -244,9 +244,9 @@ class AsyncReplicationTest(ReplicationTestCase):
def consume(msg): def consume(msg):
# just check the methods # just check the methods
"%s: %s" % (cur.io_timestamp, repr(msg)) f"{cur.io_timestamp}: {repr(msg)}"
"%s: %s" % (cur.feedback_timestamp, repr(msg)) f"{cur.feedback_timestamp}: {repr(msg)}"
"%s: %s" % (cur.wal_end, repr(msg)) f"{cur.wal_end}: {repr(msg)}"
self.msg_count += 1 self.msg_count += 1
if self.msg_count > 3: if self.msg_count > 3:

View File

@ -31,7 +31,6 @@ from .testutils import (
import psycopg2 import psycopg2
from psycopg2 import sql from psycopg2 import sql
from psycopg2.compat import text_type
class SqlFormatTests(ConnectingTestCase): class SqlFormatTests(ConnectingTestCase):
@ -62,13 +61,6 @@ class SqlFormatTests(ConnectingTestCase):
self.assert_(isinstance(s1, str)) self.assert_(isinstance(s1, str))
self.assertEqual(s1, 'select "field" from "table"') self.assertEqual(s1, 'select "field" from "table"')
def test_unicode(self):
s = sql.SQL(u"select {0} from {1}").format(
sql.Identifier(u'field'), sql.Identifier('table'))
s1 = s.as_string(self.conn)
self.assert_(isinstance(s1, text_type))
self.assertEqual(s1, u'select "field" from "table"')
def test_compose_literal(self): def test_compose_literal(self):
s = sql.SQL("select {0};").format(sql.Literal(dt.date(2016, 12, 31))) s = sql.SQL("select {0};").format(sql.Literal(dt.date(2016, 12, 31)))
s1 = s.as_string(self.conn) s1 = s.as_string(self.conn)
@ -111,7 +103,7 @@ class SqlFormatTests(ConnectingTestCase):
self.assertRaises(ValueError, sql.SQL("select {a:<};").format, a=10) self.assertRaises(ValueError, sql.SQL("select {a:<};").format, a=10)
def test_must_be_adaptable(self): def test_must_be_adaptable(self):
class Foo(object): class Foo:
pass pass
self.assertRaises(psycopg2.ProgrammingError, self.assertRaises(psycopg2.ProgrammingError,
@ -182,7 +174,7 @@ class IdentifierTests(ConnectingTestCase):
def test_init(self): def test_init(self):
self.assert_(isinstance(sql.Identifier('foo'), sql.Identifier)) self.assert_(isinstance(sql.Identifier('foo'), sql.Identifier))
self.assert_(isinstance(sql.Identifier(u'foo'), sql.Identifier)) self.assert_(isinstance(sql.Identifier('foo'), sql.Identifier))
self.assert_(isinstance(sql.Identifier('foo', 'bar', 'baz'), sql.Identifier)) self.assert_(isinstance(sql.Identifier('foo', 'bar', 'baz'), sql.Identifier))
self.assertRaises(TypeError, sql.Identifier) self.assertRaises(TypeError, sql.Identifier)
self.assertRaises(TypeError, sql.Identifier, 10) self.assertRaises(TypeError, sql.Identifier, 10)
@ -231,7 +223,7 @@ class LiteralTests(ConnectingTestCase):
def test_init(self): def test_init(self):
self.assert_(isinstance(sql.Literal('foo'), sql.Literal)) self.assert_(isinstance(sql.Literal('foo'), sql.Literal))
self.assert_(isinstance(sql.Literal(u'foo'), sql.Literal)) self.assert_(isinstance(sql.Literal('foo'), sql.Literal))
self.assert_(isinstance(sql.Literal(b'foo'), sql.Literal)) self.assert_(isinstance(sql.Literal(b'foo'), sql.Literal))
self.assert_(isinstance(sql.Literal(42), sql.Literal)) self.assert_(isinstance(sql.Literal(42), sql.Literal))
self.assert_(isinstance( self.assert_(isinstance(
@ -256,7 +248,7 @@ class LiteralTests(ConnectingTestCase):
self.assert_(sql.Literal('foo') != sql.SQL('foo')) self.assert_(sql.Literal('foo') != sql.SQL('foo'))
def test_must_be_adaptable(self): def test_must_be_adaptable(self):
class Foo(object): class Foo:
pass pass
self.assertRaises(psycopg2.ProgrammingError, self.assertRaises(psycopg2.ProgrammingError,
@ -269,7 +261,7 @@ class SQLTests(ConnectingTestCase):
def test_init(self): def test_init(self):
self.assert_(isinstance(sql.SQL('foo'), sql.SQL)) self.assert_(isinstance(sql.SQL('foo'), sql.SQL))
self.assert_(isinstance(sql.SQL(u'foo'), sql.SQL)) self.assert_(isinstance(sql.SQL('foo'), sql.SQL))
self.assertRaises(TypeError, sql.SQL, 10) self.assertRaises(TypeError, sql.SQL, 10)
self.assertRaises(TypeError, sql.SQL, dt.date(2016, 12, 31)) self.assertRaises(TypeError, sql.SQL, dt.date(2016, 12, 31))

View File

@ -31,7 +31,7 @@ import platform
from . import testutils from . import testutils
import unittest import unittest
from .testutils import PY2, long, text_type, ConnectingTestCase, restore_types from .testutils import ConnectingTestCase, restore_types
from .testutils import skip_if_crdb from .testutils import skip_if_crdb
import psycopg2 import psycopg2
@ -52,15 +52,13 @@ class TypesBasicTests(ConnectingTestCase):
"wrong quoting: " + s) "wrong quoting: " + s)
def testUnicode(self): def testUnicode(self):
s = u"Quote'this\\! ''ok?''" s = "Quote'this\\! ''ok?''"
self.failUnless(self.execute("SELECT %s AS foo", (s,)) == s, self.failUnless(self.execute("SELECT %s AS foo", (s,)) == s,
"wrong unicode quoting: " + s) "wrong unicode quoting: " + s)
def testNumber(self): def testNumber(self):
s = self.execute("SELECT %s AS foo", (1971,)) s = self.execute("SELECT %s AS foo", (1971,))
self.failUnless(s == 1971, "wrong integer quoting: " + str(s)) self.failUnless(s == 1971, "wrong integer quoting: " + str(s))
s = self.execute("SELECT %s AS foo", (long(1971),))
self.failUnless(s == long(1971), "wrong integer quoting: " + str(s))
def testBoolean(self): def testBoolean(self):
x = self.execute("SELECT %s as foo", (False,)) x = self.execute("SELECT %s as foo", (False,))
@ -110,12 +108,6 @@ class TypesBasicTests(ConnectingTestCase):
self.failUnless(str(s) == "-inf", "wrong float quoting: " + str(s)) self.failUnless(str(s) == "-inf", "wrong float quoting: " + str(s))
def testBinary(self): def testBinary(self):
if PY2:
s = ''.join([chr(x) for x in range(256)])
b = psycopg2.Binary(s)
buf = self.execute("SELECT %s::bytea AS foo", (b,))
self.assertEqual(s, str(buf))
else:
s = bytes(range(256)) s = bytes(range(256))
b = psycopg2.Binary(s) b = psycopg2.Binary(s)
buf = self.execute("SELECT %s::bytea AS foo", (b,)) buf = self.execute("SELECT %s::bytea AS foo", (b,))
@ -128,22 +120,12 @@ class TypesBasicTests(ConnectingTestCase):
def testBinaryEmptyString(self): def testBinaryEmptyString(self):
# test to make sure an empty Binary is converted to an empty string # test to make sure an empty Binary is converted to an empty string
if PY2:
b = psycopg2.Binary('')
self.assertEqual(str(b), "''::bytea")
else:
b = psycopg2.Binary(bytes([])) b = psycopg2.Binary(bytes([]))
self.assertEqual(str(b), "''::bytea") self.assertEqual(str(b), "''::bytea")
def testBinaryRoundTrip(self): def testBinaryRoundTrip(self):
# test to make sure buffers returned by psycopg2 are # test to make sure buffers returned by psycopg2 are
# understood by execute: # understood by execute:
if PY2:
s = ''.join([chr(x) for x in range(256)])
buf = self.execute("SELECT %s::bytea AS foo", (psycopg2.Binary(s),))
buf2 = self.execute("SELECT %s::bytea AS foo", (buf,))
self.assertEqual(s, str(buf2))
else:
s = bytes(range(256)) s = bytes(range(256))
buf = self.execute("SELECT %s::bytea AS foo", (psycopg2.Binary(s),)) buf = self.execute("SELECT %s::bytea AS foo", (psycopg2.Binary(s),))
buf2 = self.execute("SELECT %s::bytea AS foo", (buf,)) buf2 = self.execute("SELECT %s::bytea AS foo", (buf,))
@ -227,8 +209,8 @@ class TypesBasicTests(ConnectingTestCase):
curs = self.conn.cursor() curs = self.conn.cursor()
curs.execute("select '{a,b,c}'::text[]") curs.execute("select '{a,b,c}'::text[]")
x = curs.fetchone()[0] x = curs.fetchone()[0]
self.assert_(isinstance(x[0], text_type)) self.assert_(isinstance(x[0], str))
self.assertEqual(x, [u'a', u'b', u'c']) self.assertEqual(x, ['a', 'b', 'c'])
def testBytesArray(self): def testBytesArray(self):
psycopg2.extensions.register_type( psycopg2.extensions.register_type(
@ -291,27 +273,6 @@ class TypesBasicTests(ConnectingTestCase):
curs.execute("select %s::int[]", (a,)) curs.execute("select %s::int[]", (a,))
self.assertEqual(curs.fetchone()[0], a) self.assertEqual(curs.fetchone()[0], a)
@testutils.skip_from_python(3)
def testTypeRoundtripBuffer(self):
o1 = buffer("".join(map(chr, range(256))))
o2 = self.execute("select %s;", (o1,))
self.assertEqual(type(o1), type(o2))
# Test with an empty buffer
o1 = buffer("")
o2 = self.execute("select %s;", (o1,))
self.assertEqual(type(o1), type(o2))
self.assertEqual(str(o1), str(o2))
@testutils.skip_from_python(3)
def testTypeRoundtripBufferArray(self):
o1 = buffer("".join(map(chr, range(256))))
o1 = [o1]
o2 = self.execute("select %s;", (o1,))
self.assertEqual(type(o1[0]), type(o2[0]))
self.assertEqual(str(o1[0]), str(o2[0]))
@testutils.skip_before_python(3)
def testTypeRoundtripBytes(self): def testTypeRoundtripBytes(self):
o1 = bytes(range(256)) o1 = bytes(range(256))
o2 = self.execute("select %s;", (o1,)) o2 = self.execute("select %s;", (o1,))
@ -322,7 +283,6 @@ class TypesBasicTests(ConnectingTestCase):
o2 = self.execute("select %s;", (o1,)) o2 = self.execute("select %s;", (o1,))
self.assertEqual(memoryview, type(o2)) self.assertEqual(memoryview, type(o2))
@testutils.skip_before_python(3)
def testTypeRoundtripBytesArray(self): def testTypeRoundtripBytesArray(self):
o1 = bytes(range(256)) o1 = bytes(range(256))
o1 = [o1] o1 = [o1]
@ -332,12 +292,7 @@ class TypesBasicTests(ConnectingTestCase):
def testAdaptBytearray(self): def testAdaptBytearray(self):
o1 = bytearray(range(256)) o1 = bytearray(range(256))
o2 = self.execute("select %s;", (o1,)) o2 = self.execute("select %s;", (o1,))
if PY2:
self.assertEqual(buffer, type(o2))
else:
self.assertEqual(memoryview, type(o2)) self.assertEqual(memoryview, type(o2))
self.assertEqual(len(o1), len(o2)) self.assertEqual(len(o1), len(o2))
for c1, c2 in zip(o1, o2): for c1, c2 in zip(o1, o2):
self.assertEqual(c1, ord(c2)) self.assertEqual(c1, ord(c2))
@ -345,27 +300,17 @@ class TypesBasicTests(ConnectingTestCase):
# Test with an empty buffer # Test with an empty buffer
o1 = bytearray([]) o1 = bytearray([])
o2 = self.execute("select %s;", (o1,)) o2 = self.execute("select %s;", (o1,))
self.assertEqual(len(o2), 0) self.assertEqual(len(o2), 0)
if PY2:
self.assertEqual(buffer, type(o2))
else:
self.assertEqual(memoryview, type(o2)) self.assertEqual(memoryview, type(o2))
def testAdaptMemoryview(self): def testAdaptMemoryview(self):
o1 = memoryview(bytearray(range(256))) o1 = memoryview(bytearray(range(256)))
o2 = self.execute("select %s;", (o1,)) o2 = self.execute("select %s;", (o1,))
if PY2:
self.assertEqual(buffer, type(o2))
else:
self.assertEqual(memoryview, type(o2)) self.assertEqual(memoryview, type(o2))
# Test with an empty buffer # Test with an empty buffer
o1 = memoryview(bytearray([])) o1 = memoryview(bytearray([]))
o2 = self.execute("select %s;", (o1,)) o2 = self.execute("select %s;", (o1,))
if PY2:
self.assertEqual(buffer, type(o2))
else:
self.assertEqual(memoryview, type(o2)) self.assertEqual(memoryview, type(o2))
def testByteaHexCheckFalsePositive(self): def testByteaHexCheckFalsePositive(self):
@ -382,8 +327,6 @@ class TypesBasicTests(ConnectingTestCase):
self.assertEqual(1, f1) self.assertEqual(1, f1)
i1 = self.execute("select -%s;", (-1,)) i1 = self.execute("select -%s;", (-1,))
self.assertEqual(1, i1) self.assertEqual(1, i1)
l1 = self.execute("select -%s;", (long(-1),))
self.assertEqual(1, l1)
def testGenericArray(self): def testGenericArray(self):
a = self.execute("select '{1, 2, 3}'::int4[]") a = self.execute("select '{1, 2, 3}'::int4[]")
@ -417,7 +360,6 @@ class TypesBasicTests(ConnectingTestCase):
a = self.execute("select '{10:20:30:40:50:60}'::macaddr[]") a = self.execute("select '{10:20:30:40:50:60}'::macaddr[]")
self.assertEqual(a, ['10:20:30:40:50:60']) self.assertEqual(a, ['10:20:30:40:50:60'])
@testutils.skip_before_python(3, 4)
def testIntEnum(self): def testIntEnum(self):
from enum import IntEnum from enum import IntEnum
@ -440,7 +382,7 @@ class AdaptSubclassTest(unittest.TestCase):
@restore_types @restore_types
def test_adapt_most_specific(self): def test_adapt_most_specific(self):
class A(object): class A:
pass pass
class B(A): class B(A):
@ -453,19 +395,6 @@ class AdaptSubclassTest(unittest.TestCase):
register_adapter(B, lambda b: AsIs("b")) register_adapter(B, lambda b: AsIs("b"))
self.assertEqual(b'b', adapt(C()).getquoted()) self.assertEqual(b'b', adapt(C()).getquoted())
@testutils.skip_from_python(3)
@restore_types
def test_no_mro_no_joy(self):
class A:
pass
class B(A):
pass
register_adapter(A, lambda a: AsIs("a"))
self.assertRaises(psycopg2.ProgrammingError, adapt, B())
@testutils.skip_before_python(3)
@restore_types @restore_types
def test_adapt_subtype_3(self): def test_adapt_subtype_3(self):
class A: class A:
@ -512,9 +441,6 @@ class ByteaParserTest(unittest.TestCase):
if rv is None: if rv is None:
return None return None
if PY2:
return str(rv)
else:
return rv.tobytes() return rv.tobytes()
def test_null(self): def test_null(self):
@ -536,9 +462,6 @@ class ByteaParserTest(unittest.TestCase):
buf = buf.upper() buf = buf.upper()
buf = '\\x' + buf buf = '\\x' + buf
rv = self.cast(buf.encode('utf8')) rv = self.cast(buf.encode('utf8'))
if PY2:
self.assertEqual(rv, ''.join(map(chr, range(256))))
else:
self.assertEqual(rv, bytes(range(256))) self.assertEqual(rv, bytes(range(256)))
def test_full_hex_upper(self): def test_full_hex_upper(self):
@ -547,9 +470,6 @@ class ByteaParserTest(unittest.TestCase):
def test_full_escaped_octal(self): def test_full_escaped_octal(self):
buf = ''.join(("\\%03o" % i) for i in range(256)) buf = ''.join(("\\%03o" % i) for i in range(256))
rv = self.cast(buf.encode('utf8')) rv = self.cast(buf.encode('utf8'))
if PY2:
self.assertEqual(rv, ''.join(map(chr, range(256))))
else:
self.assertEqual(rv, bytes(range(256))) self.assertEqual(rv, bytes(range(256)))
def test_escaped_mixed(self): def test_escaped_mixed(self):
@ -558,10 +478,6 @@ class ByteaParserTest(unittest.TestCase):
buf += ''.join('\\' + c for c in string.ascii_letters) buf += ''.join('\\' + c for c in string.ascii_letters)
buf += '\\\\' buf += '\\\\'
rv = self.cast(buf.encode('utf8')) rv = self.cast(buf.encode('utf8'))
if PY2:
tgt = ''.join(map(chr, range(32))) \
+ string.ascii_letters * 2 + '\\'
else:
tgt = bytes(range(32)) + \ tgt = bytes(range(32)) + \
(string.ascii_letters * 2 + '\\').encode('ascii') (string.ascii_letters * 2 + '\\').encode('ascii')

View File

@ -25,8 +25,8 @@ from functools import wraps
from pickle import dumps, loads from pickle import dumps, loads
import unittest import unittest
from .testutils import (PY2, text_type, skip_if_no_uuid, skip_before_postgres, from .testutils import (skip_if_no_uuid, skip_before_postgres,
ConnectingTestCase, py3_raises_typeerror, slow, skip_from_python, ConnectingTestCase, raises_typeerror, slow,
restore_types, skip_if_crdb, crdb_version) restore_types, skip_if_crdb, crdb_version)
import psycopg2 import psycopg2
@ -110,13 +110,13 @@ class TypesExtrasTests(ConnectingTestCase):
self.assertQuotedEqual(a.getquoted(), b"'192.168.1.0/24'::inet") self.assertQuotedEqual(a.getquoted(), b"'192.168.1.0/24'::inet")
# adapts ok with unicode too # adapts ok with unicode too
i = Inet(u"192.168.1.0/24") i = Inet("192.168.1.0/24")
a = psycopg2.extensions.adapt(i) a = psycopg2.extensions.adapt(i)
a.prepare(self.conn) a.prepare(self.conn)
self.assertQuotedEqual(a.getquoted(), b"'192.168.1.0/24'::inet") self.assertQuotedEqual(a.getquoted(), b"'192.168.1.0/24'::inet")
def test_adapt_fail(self): def test_adapt_fail(self):
class Foo(object): class Foo:
pass pass
self.assertRaises(psycopg2.ProgrammingError, self.assertRaises(psycopg2.ProgrammingError,
psycopg2.extensions.adapt, Foo(), ext.ISQLQuote, None) psycopg2.extensions.adapt, Foo(), ext.ISQLQuote, None)
@ -151,7 +151,7 @@ class HstoreTestCase(ConnectingTestCase):
o = {'a': '1', 'b': "'", 'c': None} o = {'a': '1', 'b': "'", 'c': None}
if self.conn.encoding == 'UTF8': if self.conn.encoding == 'UTF8':
o['d'] = u'\xe0' o['d'] = '\xe0'
a = HstoreAdapter(o) a = HstoreAdapter(o)
a.prepare(self.conn) a.prepare(self.conn)
@ -166,7 +166,7 @@ class HstoreTestCase(ConnectingTestCase):
self.assertQuotedEqual(ii[1], b"('b' => '''')") self.assertQuotedEqual(ii[1], b"('b' => '''')")
self.assertQuotedEqual(ii[2], b"('c' => NULL)") self.assertQuotedEqual(ii[2], b"('c' => NULL)")
if 'd' in o: if 'd' in o:
encc = u'\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding]) encc = '\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding])
self.assertQuotedEqual(ii[3], b"('d' => '" + encc + b"')") self.assertQuotedEqual(ii[3], b"('d' => '" + encc + b"')")
def test_adapt_9(self): def test_adapt_9(self):
@ -175,7 +175,7 @@ class HstoreTestCase(ConnectingTestCase):
o = {'a': '1', 'b': "'", 'c': None} o = {'a': '1', 'b': "'", 'c': None}
if self.conn.encoding == 'UTF8': if self.conn.encoding == 'UTF8':
o['d'] = u'\xe0' o['d'] = '\xe0'
a = HstoreAdapter(o) a = HstoreAdapter(o)
a.prepare(self.conn) a.prepare(self.conn)
@ -197,7 +197,7 @@ class HstoreTestCase(ConnectingTestCase):
self.assertQuotedEqual(ii[2][0], b"'c'") self.assertQuotedEqual(ii[2][0], b"'c'")
self.assertQuotedEqual(ii[2][1], b"NULL") self.assertQuotedEqual(ii[2][1], b"NULL")
if 'd' in o: if 'd' in o:
encc = u'\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding]) encc = '\xe0'.encode(psycopg2.extensions.encodings[self.conn.encoding])
self.assertQuotedEqual(ii[3][0], b"'d'") self.assertQuotedEqual(ii[3][0], b"'d'")
self.assertQuotedEqual(ii[3][1], b"'" + encc + b"'") self.assertQuotedEqual(ii[3][1], b"'" + encc + b"'")
@ -249,19 +249,6 @@ class HstoreTestCase(ConnectingTestCase):
self.assertEqual(t[1], {}) self.assertEqual(t[1], {})
self.assertEqual(t[2], {'a': 'b'}) self.assertEqual(t[2], {'a': 'b'})
@skip_if_no_hstore
@skip_from_python(3)
def test_register_unicode(self):
register_hstore(self.conn, unicode=True)
cur = self.conn.cursor()
cur.execute("select null::hstore, ''::hstore, 'a => b'::hstore")
t = cur.fetchone()
self.assert_(t[0] is None)
self.assertEqual(t[1], {})
self.assertEqual(t[2], {u'a': u'b'})
self.assert_(isinstance(t[2].keys()[0], unicode))
self.assert_(isinstance(t[2].values()[0], unicode))
@skip_if_no_hstore @skip_if_no_hstore
@restore_types @restore_types
def test_register_globally(self): def test_register_globally(self):
@ -297,37 +284,11 @@ class HstoreTestCase(ConnectingTestCase):
ok({''.join(ab): ''.join(ab)}) ok({''.join(ab): ''.join(ab)})
self.conn.set_client_encoding('latin1') self.conn.set_client_encoding('latin1')
if PY2:
ab = map(chr, range(32, 127) + range(160, 255))
else:
ab = bytes(list(range(32, 127)) + list(range(160, 255))).decode('latin1') ab = bytes(list(range(32, 127)) + list(range(160, 255))).decode('latin1')
ok({''.join(ab): ''.join(ab)}) ok({''.join(ab): ''.join(ab)})
ok(dict(zip(ab, ab))) ok(dict(zip(ab, ab)))
@skip_if_no_hstore
@skip_from_python(3)
def test_roundtrip_unicode(self):
register_hstore(self.conn, unicode=True)
cur = self.conn.cursor()
def ok(d):
cur.execute("select %s", (d,))
d1 = cur.fetchone()[0]
self.assertEqual(len(d), len(d1))
for k, v in d1.iteritems():
self.assert_(k in d, k)
self.assertEqual(d[k], v)
self.assert_(isinstance(k, unicode))
self.assert_(v is None or isinstance(v, unicode))
ok({})
ok({'a': 'b', 'c': None, 'd': u'\u20ac', u'\u2603': 'e'})
ab = map(unichr, range(1, 1024))
ok({u''.join(ab): u''.join(ab)})
ok(dict(zip(ab, ab)))
@skip_if_no_hstore @skip_if_no_hstore
@restore_types @restore_types
def test_oid(self): def test_oid(self):
@ -356,9 +317,6 @@ class HstoreTestCase(ConnectingTestCase):
ds.append({''.join(ab): ''.join(ab)}) ds.append({''.join(ab): ''.join(ab)})
self.conn.set_client_encoding('latin1') self.conn.set_client_encoding('latin1')
if PY2:
ab = map(chr, range(32, 127) + range(160, 255))
else:
ab = bytes(list(range(32, 127)) + list(range(160, 255))).decode('latin1') ab = bytes(list(range(32, 127)) + list(range(160, 255))).decode('latin1')
ds.append({''.join(ab): ''.join(ab)}) ds.append({''.join(ab): ''.join(ab)})
@ -443,7 +401,7 @@ class AdaptTypeTestCase(ConnectingTestCase):
def test_none_fast_path(self): def test_none_fast_path(self):
# the None adapter is not actually invoked in regular adaptation # the None adapter is not actually invoked in regular adaptation
class WonkyAdapter(object): class WonkyAdapter:
def __init__(self, obj): def __init__(self, obj):
pass pass
@ -753,11 +711,11 @@ class AdaptTypeTestCase(ConnectingTestCase):
def _create_type(self, name, fields): def _create_type(self, name, fields):
curs = self.conn.cursor() curs = self.conn.cursor()
try: try:
curs.execute("drop type %s cascade;" % name) curs.execute(f"drop type {name} cascade;")
except psycopg2.ProgrammingError: except psycopg2.ProgrammingError:
self.conn.rollback() self.conn.rollback()
curs.execute("create type %s as (%s);" % (name, curs.execute("create type {} as ({});".format(name,
", ".join(["%s %s" % p for p in fields]))) ", ".join(["%s %s" % p for p in fields])))
if '.' in name: if '.' in name:
schema, name = name.split('.') schema, name = name.split('.')
@ -792,7 +750,7 @@ def skip_if_no_json_type(f):
class JsonTestCase(ConnectingTestCase): class JsonTestCase(ConnectingTestCase):
def test_adapt(self): def test_adapt(self):
objs = [None, "te'xt", 123, 123.45, objs = [None, "te'xt", 123, 123.45,
u'\xe0\u20ac', ['a', 100], {'a': 100}] '\xe0\u20ac', ['a', 100], {'a': 100}]
curs = self.conn.cursor() curs = self.conn.cursor()
for obj in enumerate(objs): for obj in enumerate(objs):
@ -947,7 +905,7 @@ class JsonTestCase(ConnectingTestCase):
self.assertEqual(data['b'], None) self.assertEqual(data['b'], None)
def test_str(self): def test_str(self):
snowman = u"\u2603" snowman = "\u2603"
obj = {'a': [1, 2, snowman]} obj = {'a': [1, 2, snowman]}
j = psycopg2.extensions.adapt(psycopg2.extras.Json(obj)) j = psycopg2.extensions.adapt(psycopg2.extras.Json(obj))
s = str(j) s = str(j)
@ -1238,9 +1196,9 @@ class RangeTestCase(unittest.TestCase):
self.assert_(not Range() < Range()) self.assert_(not Range() < Range())
self.assert_(not Range(empty=True) < Range(empty=True)) self.assert_(not Range(empty=True) < Range(empty=True))
self.assert_(not Range(1, 2) < Range(1, 2)) self.assert_(not Range(1, 2) < Range(1, 2))
with py3_raises_typeerror(): with raises_typeerror():
self.assert_(1 < Range(1, 2)) self.assert_(1 < Range(1, 2))
with py3_raises_typeerror(): with raises_typeerror():
self.assert_(not Range(1, 2) < 1) self.assert_(not Range(1, 2) < 1)
def test_gt_ordering(self): def test_gt_ordering(self):
@ -1253,9 +1211,9 @@ class RangeTestCase(unittest.TestCase):
self.assert_(not Range() > Range()) self.assert_(not Range() > Range())
self.assert_(not Range(empty=True) > Range(empty=True)) self.assert_(not Range(empty=True) > Range(empty=True))
self.assert_(not Range(1, 2) > Range(1, 2)) self.assert_(not Range(1, 2) > Range(1, 2))
with py3_raises_typeerror(): with raises_typeerror():
self.assert_(not 1 > Range(1, 2)) self.assert_(not 1 > Range(1, 2))
with py3_raises_typeerror(): with raises_typeerror():
self.assert_(Range(1, 2) > 1) self.assert_(Range(1, 2) > 1)
def test_le_ordering(self): def test_le_ordering(self):
@ -1268,9 +1226,9 @@ class RangeTestCase(unittest.TestCase):
self.assert_(Range() <= Range()) self.assert_(Range() <= Range())
self.assert_(Range(empty=True) <= Range(empty=True)) self.assert_(Range(empty=True) <= Range(empty=True))
self.assert_(Range(1, 2) <= Range(1, 2)) self.assert_(Range(1, 2) <= Range(1, 2))
with py3_raises_typeerror(): with raises_typeerror():
self.assert_(1 <= Range(1, 2)) self.assert_(1 <= Range(1, 2))
with py3_raises_typeerror(): with raises_typeerror():
self.assert_(not Range(1, 2) <= 1) self.assert_(not Range(1, 2) <= 1)
def test_ge_ordering(self): def test_ge_ordering(self):
@ -1283,9 +1241,9 @@ class RangeTestCase(unittest.TestCase):
self.assert_(Range() >= Range()) self.assert_(Range() >= Range())
self.assert_(Range(empty=True) >= Range(empty=True)) self.assert_(Range(empty=True) >= Range(empty=True))
self.assert_(Range(1, 2) >= Range(1, 2)) self.assert_(Range(1, 2) >= Range(1, 2))
with py3_raises_typeerror(): with raises_typeerror():
self.assert_(not 1 >= Range(1, 2)) self.assert_(not 1 >= Range(1, 2))
with py3_raises_typeerror(): with raises_typeerror():
self.assert_(Range(1, 2) >= 1) self.assert_(Range(1, 2) >= 1)
def test_pickling(self): def test_pickling(self):
@ -1303,20 +1261,20 @@ class RangeTestCase(unittest.TestCase):
# Using the "u" prefix to make sure we have the proper return types in # Using the "u" prefix to make sure we have the proper return types in
# Python2 # Python2
expected = [ expected = [
u'(0, 4)', '(0, 4)',
u'[0, 4]', '[0, 4]',
u'(0, 4]', '(0, 4]',
u'[0, 4)', '[0, 4)',
u'empty', 'empty',
] ]
results = [] results = []
for bounds in ('()', '[]', '(]', '[)'): for bounds in ('()', '[]', '(]', '[)'):
r = Range(0, 4, bounds=bounds) r = Range(0, 4, bounds=bounds)
results.append(text_type(r)) results.append(str(r))
r = Range(empty=True) r = Range(empty=True)
results.append(text_type(r)) results.append(str(r))
self.assertEqual(results, expected) self.assertEqual(results, expected)
def test_str_datetime(self): def test_str_datetime(self):
@ -1327,8 +1285,8 @@ class RangeTestCase(unittest.TestCase):
tz = FixedOffsetTimezone(-5 * 60, "EST") tz = FixedOffsetTimezone(-5 * 60, "EST")
r = DateTimeTZRange(datetime(2010, 1, 1, tzinfo=tz), r = DateTimeTZRange(datetime(2010, 1, 1, tzinfo=tz),
datetime(2011, 1, 1, tzinfo=tz)) datetime(2011, 1, 1, tzinfo=tz))
expected = u'[2010-01-01 00:00:00-05:00, 2011-01-01 00:00:00-05:00)' expected = '[2010-01-01 00:00:00-05:00, 2011-01-01 00:00:00-05:00)'
result = text_type(r) result = str(r)
self.assertEqual(result, expected) self.assertEqual(result, expected)
@ -1342,14 +1300,14 @@ class RangeCasterTestCase(ConnectingTestCase):
def test_cast_null(self): def test_cast_null(self):
cur = self.conn.cursor() cur = self.conn.cursor()
for type in self.builtin_ranges: for type in self.builtin_ranges:
cur.execute("select NULL::%s" % type) cur.execute(f"select NULL::{type}")
r = cur.fetchone()[0] r = cur.fetchone()[0]
self.assertEqual(r, None) self.assertEqual(r, None)
def test_cast_empty(self): def test_cast_empty(self):
cur = self.conn.cursor() cur = self.conn.cursor()
for type in self.builtin_ranges: for type in self.builtin_ranges:
cur.execute("select 'empty'::%s" % type) cur.execute(f"select 'empty'::{type}")
r = cur.fetchone()[0] r = cur.fetchone()[0]
self.assert_(isinstance(r, Range), type) self.assert_(isinstance(r, Range), type)
self.assert_(r.isempty) self.assert_(r.isempty)
@ -1357,7 +1315,7 @@ class RangeCasterTestCase(ConnectingTestCase):
def test_cast_inf(self): def test_cast_inf(self):
cur = self.conn.cursor() cur = self.conn.cursor()
for type in self.builtin_ranges: for type in self.builtin_ranges:
cur.execute("select '(,)'::%s" % type) cur.execute(f"select '(,)'::{type}")
r = cur.fetchone()[0] r = cur.fetchone()[0]
self.assert_(isinstance(r, Range), type) self.assert_(isinstance(r, Range), type)
self.assert_(not r.isempty) self.assert_(not r.isempty)
@ -1367,7 +1325,7 @@ class RangeCasterTestCase(ConnectingTestCase):
def test_cast_numbers(self): def test_cast_numbers(self):
cur = self.conn.cursor() cur = self.conn.cursor()
for type in ('int4range', 'int8range'): for type in ('int4range', 'int8range'):
cur.execute("select '(10,20)'::%s" % type) cur.execute(f"select '(10,20)'::{type}")
r = cur.fetchone()[0] r = cur.fetchone()[0]
self.assert_(isinstance(r, NumericRange)) self.assert_(isinstance(r, NumericRange))
self.assert_(not r.isempty) self.assert_(not r.isempty)

View File

@ -117,7 +117,7 @@ class WithConnectionTestCase(WithTestCase):
class MyConn(ext.connection): class MyConn(ext.connection):
def commit(self): def commit(self):
commits.append(None) commits.append(None)
super(MyConn, self).commit() super().commit()
with self.connect(connection_factory=MyConn) as conn: with self.connect(connection_factory=MyConn) as conn:
curs = conn.cursor() curs = conn.cursor()
@ -136,7 +136,7 @@ class WithConnectionTestCase(WithTestCase):
class MyConn(ext.connection): class MyConn(ext.connection):
def rollback(self): def rollback(self):
rollbacks.append(None) rollbacks.append(None)
super(MyConn, self).rollback() super().rollback()
try: try:
with self.connect(connection_factory=MyConn) as conn: with self.connect(connection_factory=MyConn) as conn:
@ -195,7 +195,7 @@ class WithCursorTestCase(WithTestCase):
class MyCurs(ext.cursor): class MyCurs(ext.cursor):
def close(self): def close(self):
closes.append(None) closes.append(None)
super(MyCurs, self).close() super().close()
with self.conn.cursor(cursor_factory=MyCurs) as curs: with self.conn.cursor(cursor_factory=MyCurs) as curs:
self.assert_(isinstance(curs, MyCurs)) self.assert_(isinstance(curs, MyCurs))

View File

@ -23,15 +23,15 @@ if green:
psycopg2.extensions.set_wait_callback(wait_callback) psycopg2.extensions.set_wait_callback(wait_callback)
# Construct a DSN to connect to the test database: # Construct a DSN to connect to the test database:
dsn = 'dbname=%s' % dbname dsn = f'dbname={dbname}'
if dbhost is not None: if dbhost is not None:
dsn += ' host=%s' % dbhost dsn += f' host={dbhost}'
if dbport is not None: if dbport is not None:
dsn += ' port=%s' % dbport dsn += f' port={dbport}'
if dbuser is not None: if dbuser is not None:
dsn += ' user=%s' % dbuser dsn += f' user={dbuser}'
if dbpass is not None: if dbpass is not None:
dsn += ' password=%s' % dbpass dsn += f' password={dbpass}'
# Don't run replication tests if REPL_DSN is not set, default to normal DSN if # Don't run replication tests if REPL_DSN is not set, default to normal DSN if
# set to empty string. # set to empty string.

View File

@ -34,32 +34,16 @@ import platform
import unittest import unittest
from functools import wraps from functools import wraps
from ctypes.util import find_library from ctypes.util import find_library
from io import StringIO # noqa
from io import TextIOBase # noqa
from importlib import reload # noqa
import psycopg2 import psycopg2
import psycopg2.errors import psycopg2.errors
import psycopg2.extensions import psycopg2.extensions
from psycopg2.compat import PY2, PY3, string_types, text_type
from .testconfig import green, dsn, repl_dsn from .testconfig import green, dsn, repl_dsn
# Python 2/3 compatibility
if PY2:
# Python 2
from StringIO import StringIO
TextIOBase = object
long = long
reload = reload
unichr = unichr
else:
# Python 3
from io import StringIO # noqa
from io import TextIOBase # noqa
from importlib import reload # noqa
long = int
unichr = chr
# Silence warnings caused by the stubbornness of the Python unittest # Silence warnings caused by the stubbornness of the Python unittest
# maintainers # maintainers
@ -102,7 +86,7 @@ class ConnectingTestCase(unittest.TestCase):
def assertQuotedEqual(self, first, second, msg=None): def assertQuotedEqual(self, first, second, msg=None):
"""Compare two quoted strings disregarding eventual E'' quotes""" """Compare two quoted strings disregarding eventual E'' quotes"""
def f(s): def f(s):
if isinstance(s, text_type): if isinstance(s, str):
return re.sub(r"\bE'", "'", s) return re.sub(r"\bE'", "'", s)
elif isinstance(first, bytes): elif isinstance(first, bytes):
return re.sub(br"\bE'", b"'", s) return re.sub(br"\bE'", b"'", s)
@ -116,8 +100,7 @@ class ConnectingTestCase(unittest.TestCase):
self._conns self._conns
except AttributeError as e: except AttributeError as e:
raise AttributeError( raise AttributeError(
"%s (did you forget to call ConnectingTestCase.setUp()?)" f"{e} (did you forget to call ConnectingTestCase.setUp()?)")
% e)
if 'dsn' in kwargs: if 'dsn' in kwargs:
conninfo = kwargs.pop('dsn') conninfo = kwargs.pop('dsn')
@ -150,7 +133,7 @@ class ConnectingTestCase(unittest.TestCase):
# Otherwise we tried to run some bad operation in the connection # Otherwise we tried to run some bad operation in the connection
# (e.g. bug #482) and we'd rather know that. # (e.g. bug #482) and we'd rather know that.
if e.pgcode is None: if e.pgcode is None:
return self.skipTest("replication db not configured: %s" % e) return self.skipTest(f"replication db not configured: {e}")
else: else:
raise raise
@ -326,7 +309,7 @@ def skip_before_libpq(*ver):
v = libpq_version() v = libpq_version()
decorator = unittest.skipIf( decorator = unittest.skipIf(
v < int("%d%02d%02d" % ver), v < int("%d%02d%02d" % ver),
"skipped because libpq %d" % v, f"skipped because libpq {v}",
) )
return decorator(cls) return decorator(cls)
return skip_before_libpq_ return skip_before_libpq_
@ -340,7 +323,7 @@ def skip_after_libpq(*ver):
v = libpq_version() v = libpq_version()
decorator = unittest.skipIf( decorator = unittest.skipIf(
v >= int("%d%02d%02d" % ver), v >= int("%d%02d%02d" % ver),
"skipped because libpq %s" % v, f"skipped because libpq {v}",
) )
return decorator(cls) return decorator(cls)
return skip_after_libpq_ return skip_after_libpq_
@ -351,8 +334,7 @@ def skip_before_python(*ver):
def skip_before_python_(cls): def skip_before_python_(cls):
decorator = unittest.skipIf( decorator = unittest.skipIf(
sys.version_info[:len(ver)] < ver, sys.version_info[:len(ver)] < ver,
"skipped because Python %s" f"skipped because Python {'.'.join(map(str, sys.version_info[:len(ver)]))}",
% ".".join(map(str, sys.version_info[:len(ver)])),
) )
return decorator(cls) return decorator(cls)
return skip_before_python_ return skip_before_python_
@ -363,8 +345,7 @@ def skip_from_python(*ver):
def skip_from_python_(cls): def skip_from_python_(cls):
decorator = unittest.skipIf( decorator = unittest.skipIf(
sys.version_info[:len(ver)] >= ver, sys.version_info[:len(ver)] >= ver,
"skipped because Python %s" f"skipped because Python {'.'.join(map(str, sys.version_info[:len(ver)]))}",
% ".".join(map(str, sys.version_info[:len(ver)])),
) )
return decorator(cls) return decorator(cls)
return skip_from_python_ return skip_from_python_
@ -431,7 +412,7 @@ def crdb_version(conn, __crdb_version=[]):
m = re.search(r"\bv(\d+)\.(\d+)\.(\d+)", sver) m = re.search(r"\bv(\d+)\.(\d+)\.(\d+)", sver)
if not m: if not m:
raise ValueError( raise ValueError(
"can't parse CockroachDB version from %s" % sver) f"can't parse CockroachDB version from {sver}")
ver = int(m.group(1)) * 10000 + int(m.group(2)) * 100 + int(m.group(3)) ver = int(m.group(1)) * 10000 + int(m.group(2)) * 100 + int(m.group(3))
__crdb_version.append(ver) __crdb_version.append(ver)
@ -454,8 +435,8 @@ def skip_if_crdb(reason, conn=None, version=None):
"== 20.1.3": the test will be skipped only if the version matches. "== 20.1.3": the test will be skipped only if the version matches.
""" """
if not isinstance(reason, string_types): if not isinstance(reason, str):
raise TypeError("reason should be a string, got %r instead" % reason) raise TypeError(f"reason should be a string, got {reason!r} instead")
if conn is not None: if conn is not None:
ver = crdb_version(conn) ver = crdb_version(conn)
@ -465,7 +446,7 @@ def skip_if_crdb(reason, conn=None, version=None):
"%s (https://github.com/cockroachdb/cockroach/issues/%s)" "%s (https://github.com/cockroachdb/cockroach/issues/%s)"
% (reason, crdb_reasons[reason])) % (reason, crdb_reasons[reason]))
raise unittest.SkipTest( raise unittest.SkipTest(
"not supported on CockroachDB %s: %s" % (ver, reason)) f"not supported on CockroachDB {ver}: {reason}")
@decorate_all_tests @decorate_all_tests
def skip_if_crdb_(f): def skip_if_crdb_(f):
@ -519,12 +500,11 @@ def _crdb_match_version(version, pattern):
return op(version, ref) return op(version, ref)
class py3_raises_typeerror(object): class raises_typeerror:
def __enter__(self): def __enter__(self):
pass pass
def __exit__(self, type, exc, tb): def __exit__(self, type, exc, tb):
if PY3:
assert type is TypeError assert type is TypeError
return True return True

View File

@ -1,5 +1,5 @@
[tox] [tox]
envlist = py{27,36,37,38,39} envlist = py{36,37,38,39}
[testenv] [testenv]
commands = make check commands = make check