Merge branch 'master' into rm-2.7

This commit is contained in:
Daniele Varrazzo 2021-05-20 16:36:55 +02:00
commit 8f40c648af
16 changed files with 76 additions and 24 deletions

8
NEWS
View File

@ -10,6 +10,14 @@ What's new in psycopg 2.9
`~psycopg2.errors.DatabaseError`) (:ticket:`#1148`).
What's new in psycopg 2.8.7
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Accept empty params as `~psycopg2.connect()` (:ticket:`#1250`).
- Fix attributes refcount in `Column` initialisation (:ticket:`#1252`).
- Allow re-initialisation of static variables in the C module (:ticket:`#1267`).
What's new in psycopg 2.8.6
^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@ -124,7 +124,7 @@ The ``cursor`` class
.. attribute:: name
Read-only attribute containing the name of the cursor if it was
creates as named cursor by `connection.cursor()`, or `!None` if
created as named cursor by `connection.cursor()`, or `!None` if
it is a client side cursor. See :ref:`server-side-cursors`.
.. extension::

View File

@ -445,7 +445,9 @@ The individual messages in the replication stream are represented by
If the *reply* or *force* parameters are not set, this method will
just update internal structures without sending the feedback message
to the server. The library sends feedback message automatically
when *status_interval* timeout is reached.
when *status_interval* timeout is reached. For this to work, you must
call `send_feedback()` on the same Cursor that you called `start_replication()`
on (the one in `message.cursor`) or your feedback will be lost.
.. versionchanged:: 2.8.3
added the *force* parameter.

View File

@ -180,7 +180,7 @@ Psycopg automatically converts PostgreSQL :sql:`json` data into Python objects.
Psycopg converts :sql:`json` values into Python objects but :sql:`jsonb` values are returned as strings. Can :sql:`jsonb` be converted automatically?
Automatic conversion of :sql:`jsonb` values is supported from Psycopg
release 2.5.4. For previous versions you can register the :sql:`json`
typecaster on the :sql:`jsonb` oids (which are known and not suppsed to
typecaster on the :sql:`jsonb` oids (which are known and not supposed to
change in future PostgreSQL versions)::
psycopg2.extras.register_json(oid=3802, array_oid=3807, globally=True)

View File

@ -120,9 +120,6 @@ def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs):
if 'async_' in kwargs:
kwasync['async_'] = kwargs.pop('async_')
if dsn is None and not kwargs:
raise TypeError('missing dsn and no parameters')
dsn = _ext.make_dsn(dsn, **kwargs)
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
if cursor_factory is not None:

View File

@ -97,17 +97,36 @@ column_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
static int
column_init(columnObject *self, PyObject *args, PyObject *kwargs)
{
PyObject *name = NULL;
PyObject *type_code = NULL;
PyObject *display_size = NULL;
PyObject *internal_size = NULL;
PyObject *precision = NULL;
PyObject *scale = NULL;
PyObject *null_ok = NULL;
PyObject *table_oid = NULL;
PyObject *table_column = NULL;
static char *kwlist[] = {
"name", "type_code", "display_size", "internal_size",
"precision", "scale", "null_ok", "table_oid", "table_column", NULL};
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|OOOOOOOOO", kwlist,
&self->name, &self->type_code, &self->display_size,
&self->internal_size, &self->precision, &self->scale,
&self->null_ok, &self->table_oid, &self->table_column)) {
&name, &type_code, &display_size, &internal_size, &precision,
&scale, &null_ok, &table_oid, &table_column)) {
return -1;
}
Py_XINCREF(name); self->name = name;
Py_XINCREF(type_code); self->type_code = type_code;
Py_XINCREF(display_size); self->display_size = display_size;
Py_XINCREF(internal_size); self->internal_size = internal_size;
Py_XINCREF(precision); self->precision = precision;
Py_XINCREF(scale); self->scale = scale;
Py_XINCREF(null_ok); self->null_ok = null_ok;
Py_XINCREF(table_oid); self->table_oid = table_oid;
Py_XINCREF(table_column); self->table_column = table_column;
return 0;
}

View File

@ -768,7 +768,7 @@ curs_fetchone(cursorObject *self, PyObject *dummy)
Dprintf("curs_fetchone: rowcount = %ld", self->rowcount);
if (self->row >= self->rowcount) {
/* we exausted available data: return None */
/* we exhausted available data: return None */
Py_RETURN_NONE;
}

View File

@ -592,6 +592,10 @@ encodings_init(PyObject *module)
int rv = -1;
Dprintf("psycopgmodule: initializing encodings table");
if (psycoEncodings) {
Dprintf("encodings_init(): already called");
return 0;
}
if (!(psycoEncodings = PyDict_New())) { goto exit; }
Py_INCREF(psycoEncodings);
@ -754,9 +758,8 @@ sqlstate_errors_init(PyObject *module)
Dprintf("psycopgmodule: initializing sqlstate exceptions");
if (sqlstate_errors) {
PyErr_SetString(PyExc_SystemError,
"sqlstate_errors_init(): already called");
goto exit;
Dprintf("sqlstate_errors_init(): already called");
return 0;
}
if (!(errmodule = PyImport_ImportModule("psycopg2.errors"))) {
/* don't inject the exceptions into the errors module */

View File

@ -598,7 +598,7 @@ xid_recover(PyObject *conn)
/* curs = conn.cursor()
* (sort of. Use the real cursor in case the connection returns
* somenthing non-dbapi -- see ticket #114) */
* something non-dbapi -- see ticket #114) */
if (!(curs = PyObject_CallFunctionObjArgs(
(PyObject *)&cursorType, conn, NULL))) { goto exit; }

View File

@ -76,7 +76,7 @@ def parse_errors_txt(url):
m = re.match(r"(.....)\s+(?:E|W|S)\s+ERRCODE_(\S+)(?:\s+(\S+))?$", line)
if m:
errcode, macro, spec = m.groups()
# skip errcodes without specs as they are not publically visible
# skip errcodes without specs as they are not publicly visible
if not spec:
continue
errlabel = spec.upper()

View File

@ -60,7 +60,7 @@ def parse_errors_txt(url):
m = re.match(r"(.....)\s+(?:E|W|S)\s+ERRCODE_(\S+)(?:\s+(\S+))?$", line)
if m:
errcode, macro, spec = m.groups()
# skip errcodes without specs as they are not publically visible
# skip errcodes without specs as they are not publicly visible
if not spec:
continue
errlabel = spec.upper()

View File

@ -64,7 +64,7 @@ import sys
# - self.populate is now self._populate(), so if a driver stub
# overrides self.ddl1 this change propagates
# - VARCHAR columns now have a width, which will hopefully make the
# DDL even more portible (this will be reversed if it causes more problems)
# DDL even more portable (this will be reversed if it causes more problems)
# - cursor.rowcount being checked after various execute and fetchXXX methods
# - Check for fetchall and fetchmany returning empty lists after results
# are exhausted (already checking for empty lists if select retrieved
@ -76,7 +76,7 @@ class DatabaseAPI20Test(unittest.TestCase):
''' Test a database self.driver for DB API 2.0 compatibility.
This implementation tests Gadfly, but the TestCase
is structured so that other self.drivers can subclass this
test case to ensure compiliance with the DB-API. It is
test case to ensure compliance with the DB-API. It is
expected that this TestCase may be expanded in the future
if ambiguities or edge conditions are discovered.

View File

@ -1238,7 +1238,7 @@ class ConnectionTwoPhaseTests(ConnectingTestCase):
def test_xid_unicode_unparsed(self):
# We don't expect people shooting snowmen as transaction ids,
# so if something explodes in an encode error I don't mind.
# Let's just check uniconde is accepted as type.
# Let's just check unicode is accepted as type.
cnn = self.connect()
cnn.set_client_encoding('utf8')
cnn.tpc_begin("transaction-id")
@ -1513,6 +1513,7 @@ class TestEncryptPassword(ConnectingTestCase):
scope=self.conn, algorithm='md5'),
'md594839d658c28a357126f105b9cb14cfc')
@skip_if_crdb("password_encryption")
@skip_before_libpq(10)
@skip_before_postgres(10)
def test_encrypt_server(self):

View File

@ -249,6 +249,16 @@ class CursorTests(ConnectingTestCase):
self.assertEqual(description, unpickled)
def test_column_refcount(self):
# Reproduce crash describe in ticket #1252
from psycopg2.extensions import Column
def do_stuff():
_ = Column(name='my_column')
for _ in range(1000):
do_stuff()
def test_bad_subclass(self):
# check that we get an error message instead of a segfault
# for badly written subclasses.

View File

@ -50,12 +50,22 @@ class ConnectTestCase(unittest.TestCase):
def tearDown(self):
psycopg2._connect = self._connect_orig
def test_there_has_to_be_something(self):
self.assertRaises(TypeError, psycopg2.connect)
self.assertRaises(TypeError, psycopg2.connect,
def test_there_might_be_nothing(self):
psycopg2.connect()
self.assertEqual(self.args[0], '')
self.assertEqual(self.args[1], None)
self.assertEqual(self.args[2], False)
psycopg2.connect(
connection_factory=lambda dsn, async_=False: None)
self.assertRaises(TypeError, psycopg2.connect,
async_=True)
self.assertEqual(self.args[0], '')
self.assertNotEqual(self.args[1], None)
self.assertEqual(self.args[2], False)
psycopg2.connect(async_=True)
self.assertEqual(self.args[0], '')
self.assertEqual(self.args[1], None)
self.assertEqual(self.args[2], True)
def test_no_keywords(self):
psycopg2.connect('')
@ -280,6 +290,7 @@ class ExceptionsTestCase(ConnectingTestCase):
self.assertEqual(e.diag.constraint_name, "chk_eq1")
self.assertEqual(e.diag.datatype_name, None)
@skip_if_crdb("diagnostic")
@skip_before_postgres(9, 6)
def test_9_6_diagnostics(self):
cur = self.conn.cursor()

View File

@ -478,6 +478,7 @@ crdb_reasons = {
"named cursor": 41412,
"nested array": 32552,
"notify": 41522,
"password_encryption": 42519,
"range": 41282,
"stored procedure": 1751,
}