From 077328c1a213474706038d02f0ecc886dc499316 Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Sun, 1 Jan 2017 16:01:09 +0100 Subject: [PATCH 01/21] Using the new name of the old doc template --- doc/src/_static/psycopg.css | 2 +- doc/src/conf.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/src/_static/psycopg.css b/doc/src/_static/psycopg.css index a5d5b3a6..7d7bf786 100644 --- a/doc/src/_static/psycopg.css +++ b/doc/src/_static/psycopg.css @@ -1,4 +1,4 @@ -@import url("default.css"); +@import url("classic.css"); blockquote { font-style: italic; diff --git a/doc/src/conf.py b/doc/src/conf.py index a918c08c..9e73308b 100644 --- a/doc/src/conf.py +++ b/doc/src/conf.py @@ -127,7 +127,7 @@ rst_epilog = """ # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. -html_theme = 'default' +html_theme = 'classic' # The stylesheet to use with HTML output: this will include the original one # adding a few classes. From 651f1b6c97af8b82945a65bb46de4e9f4faf2438 Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Tue, 3 Jan 2017 19:12:44 +0100 Subject: [PATCH 02/21] Consider Python 3.6 and Postgres 9.6 supported --- .travis.yml | 2 +- doc/src/install.rst | 4 ++-- setup.py | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 10411637..4d558f17 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,7 @@ language: python python: - 2.7 - - 3.6-dev + - 3.6 - 2.6 - 3.5 - 3.4 diff --git a/doc/src/install.rst b/doc/src/install.rst index 4611537e..b3645514 100644 --- a/doc/src/install.rst +++ b/doc/src/install.rst @@ -18,8 +18,8 @@ The current `!psycopg2` implementation supports: NOTE: keep consistent with setup.py and the /features/ page. - Python 2 versions from 2.6 to 2.7 -- Python 3 versions from 3.1 to 3.5 -- PostgreSQL server versions from 7.4 to 9.5 +- Python 3 versions from 3.1 to 3.6 +- PostgreSQL server versions from 7.4 to 9.6 - PostgreSQL client library version from 9.1 .. _PostgreSQL: http://www.postgresql.org/ diff --git a/setup.py b/setup.py index c1065258..1c5c8597 100644 --- a/setup.py +++ b/setup.py @@ -79,6 +79,7 @@ Programming Language :: Python :: 3.2 Programming Language :: Python :: 3.3 Programming Language :: Python :: 3.4 Programming Language :: Python :: 3.5 +Programming Language :: Python :: 3.6 Programming Language :: C Programming Language :: SQL Topic :: Database From 8341792c5bb8d47408f72afc5ae128c53e17fbf4 Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Wed, 4 Jan 2017 04:39:53 +0100 Subject: [PATCH 03/21] Added script to create manylinux1 wheels See issue #425 --- .gitignore | 1 + scripts/wheels-build.sh | 47 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100755 scripts/wheels-build.sh diff --git a/.gitignore b/.gitignore index a017eb3e..409bb3a7 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ scripts/pypi_docs_upload.py env .tox /rel +/wheels diff --git a/scripts/wheels-build.sh b/scripts/wheels-build.sh new file mode 100755 index 00000000..ede3024b --- /dev/null +++ b/scripts/wheels-build.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +# Create manylinux1 wheels for psycopg2 +# +# Run this script with something like: +# +# docker run -t --rm -v `pwd`:/psycopg quay.io/pypa/manylinux1_x86_64 /psycopg/scripts/wheels-build.sh +# docker run -t --rm -v `pwd`:/psycopg quay.io/pypa/manylinux1_i686 linux32 /psycopg/scripts/wheels-build.sh +# +# (Note: -t is requrired for sudo) + +set -e -x + +# Install postgres packages for build and testing +# This doesn't work: +# rpm -Uvh "http://yum.postgresql.org/9.5/redhat/rhel-5-x86_64/pgdg-redhat95-9.5-3.noarch.rpm" +wget -O "/tmp/pgdg.rpm" "https://download.postgresql.org/pub/repos/yum/9.5/redhat/rhel-5-x86_64/pgdg-centos95-9.5-3.noarch.rpm" +rpm -Uvh "/tmp/pgdg.rpm" +yum install -y postgresql95-devel postgresql95-server sudo + +# Make pg_config available +export PGPATH=/usr/pgsql-9.5/bin/ +export PATH="$PGPATH:$PATH" + +# Create the wheel packages +for PYBIN in /opt/python/*/bin; do + "${PYBIN}/pip" wheel /psycopg/ -w wheels/ +done + +# Bundle external shared libraries into the wheels +for WHL in wheels/*.whl; do + auditwheel repair "$WHL" -w /psycopg/wheels +done + +# Create a test cluster +/usr/bin/sudo -u postgres "$PGPATH/initdb" -D /var/lib/pgsql/9.5/data/ +/usr/bin/sudo -u postgres "$PGPATH/pg_ctl" -D /var/lib/pgsql/9.5/data/ start +sleep 5 # wait server started +/usr/bin/sudo -u postgres "$PGPATH/createdb" psycopg2_test + +export PSYCOPG2_TESTDB_USER=postgres + +# Install packages and test +for PYBIN in /opt/python/*/bin; do + "${PYBIN}/pip" install psycopg2 --no-index -f /psycopg/wheels + "${PYBIN}/python" -c "from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" +done From 9ffb61214cbb7b389cfb0bc2bcc6d0790f426672 Mon Sep 17 00:00:00 2001 From: NotSqrt Date: Wed, 4 Jan 2017 09:45:53 +0100 Subject: [PATCH 04/21] Fix DeprecationWarning: generator '__iter__' raised StopIteration Closes #498 --- lib/extras.py | 44 +++++++++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/lib/extras.py b/lib/extras.py index b59a2c76..c1d15670 100644 --- a/lib/extras.py +++ b/lib/extras.py @@ -106,18 +106,21 @@ class DictCursorBase(_cursor): return res def __iter__(self): - if self._prefetch: - res = super(DictCursorBase, self).__iter__() - first = res.next() - if self._query_executed: - self._build_index() - if not self._prefetch: - res = super(DictCursorBase, self).__iter__() - first = res.next() + try: + if self._prefetch: + res = super(DictCursorBase, self).__iter__() + first = res.next() + if self._query_executed: + self._build_index() + if not self._prefetch: + res = super(DictCursorBase, self).__iter__() + first = res.next() - yield first - while 1: - yield res.next() + yield first + while 1: + yield res.next() + except StopIteration: + return class DictConnection(_connection): @@ -343,17 +346,20 @@ class NamedTupleCursor(_cursor): return map(nt._make, ts) def __iter__(self): - it = super(NamedTupleCursor, self).__iter__() - t = it.next() + try: + it = super(NamedTupleCursor, self).__iter__() + t = it.next() - nt = self.Record - if nt is None: - nt = self.Record = self._make_nt() + nt = self.Record + if nt is None: + nt = self.Record = self._make_nt() - yield nt._make(t) + yield nt._make(t) - while 1: - yield nt._make(it.next()) + while 1: + yield nt._make(it.next()) + except StopIteration: + return try: from collections import namedtuple From a95fd3df1abc0282f1c47fa2170191f037c3c8de Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Wed, 1 Feb 2017 01:59:47 +0000 Subject: [PATCH 05/21] Added execute_batch and execute_values functions --- NEWS | 6 ++ doc/src/cursor.rst | 17 +++-- doc/src/extras.rst | 25 ++++++++ lib/extras.py | 80 +++++++++++++++++++++++ tests/test_types_extras.py | 126 +++++++++++++++++++++++++++++++++++++ 5 files changed, 248 insertions(+), 6 deletions(-) diff --git a/NEWS b/NEWS index b4d11e64..a0f1810e 100644 --- a/NEWS +++ b/NEWS @@ -27,6 +27,12 @@ New features: - `~cursor.callproc()` now accepts a dictionary of parameters (:ticket:`#381`). - Using Python C API decoding functions and codecs caching for faster unicode encoding/decoding (:ticket:`#473`). +- `~cursor.executemany()` slowness addressed by + `~psycopg2.extras.execute_batch()` and `~psycopg2.extras.execute_values()` + (:ticket:`#491`). + +Bug fixes: + - Fixed error caused by missing decoding `~psycopg2.extras.LoggingConnection` (:ticket:`#483`). diff --git a/doc/src/cursor.rst b/doc/src/cursor.rst index aee6b465..4161f2a1 100644 --- a/doc/src/cursor.rst +++ b/doc/src/cursor.rst @@ -172,33 +172,38 @@ The ``cursor`` class .. method:: execute(operation [, parameters]) - + Prepare and execute a database operation (query or command). Parameters may be provided as sequence or mapping and will be bound to variables in the operation. Variables are specified either with positional (``%s``) or named (:samp:`%({name})s`) placeholders. See :ref:`query-parameters`. - + The method returns `!None`. If a query was executed, the returned values can be retrieved using |fetch*|_ methods. .. method:: executemany(operation, seq_of_parameters) - + Prepare a database operation (query or command) and then execute it against all parameter tuples or mappings found in the sequence `seq_of_parameters`. - + The function is mostly useful for commands that update the database: any result set returned by the query is discarded. - + Parameters are bounded to the query using the same rules described in the `~cursor.execute()` method. + .. warning:: + In its current implementation this method is not faster than + executing `~cursor.execute()` in a loop. For better performance + you can use the functions described in :ref:`fast-exec`. + .. method:: callproc(procname [, parameters]) - + Call a stored database procedure with the given name. The sequence of parameters must contain one entry for each argument that the procedure expects. Overloaded procedures are supported. Named parameters can be diff --git a/doc/src/extras.rst b/doc/src/extras.rst index d33b8eed..66be5902 100644 --- a/doc/src/extras.rst +++ b/doc/src/extras.rst @@ -974,6 +974,31 @@ converted into lists of strings. future versions. + +.. _fast-exec: + +Fast execution helpers +---------------------- + +The current implementation of `~cursor.executemany()` is (using an extremely +charitable understatement) not particularly performing. These functions can +be used to speed up the repeated execution of a statement againts a set of +parameters. By reducing the number of server roundtrips the performance can be +`orders of magnitude better`__ than using `!executemany()`. + +.. __: https://github.com/psycopg/psycopg2/issues/491#issuecomment-276551038 + + +.. autofunction:: execute_batch + + .. versionadded:: 2.7 + +.. autofunction:: execute_values + + .. versionadded:: 2.7 + + + .. index:: single: Time zones; Fractional diff --git a/lib/extras.py b/lib/extras.py index c1d15670..85963c9f 100644 --- a/lib/extras.py +++ b/lib/extras.py @@ -1141,3 +1141,83 @@ def register_composite(name, conn_or_curs, globally=False, factory=None): caster.array_typecaster, not globally and conn_or_curs or None) return caster + + +def _paginate(seq, page_size): + """Consume an iterable and return it in chunks. + + Every chunk is at most `page_size`. Never return an empty chunk. + """ + page = [] + it = iter(seq) + while 1: + try: + for i in xrange(page_size): + page.append(it.next()) + yield page + page = [] + except StopIteration: + if page: + yield page + return + + +def execute_batch(cur, sql, argslist, page_size=100): + """Execute groups of statements in fewer server roundtrips. + + Execute *sql* several times, against all parameters set (sequences or + mappings) found in *argslist*. + + The function is semantically similar to `~cursor.executemany()`, but has a + different implementation: Psycopg will join the statements into fewer + multi-statement commands, reducing the number of server roundtrips, + resulting in better performances. Every command contains at most + *page_size* statements. + + """ + for page in _paginate(argslist, page_size=page_size): + sqls = [cur.mogrify(sql, args) for args in page] + cur.execute(";".join(sqls)) + + +def execute_values(cur, sql, argslist, template=None, page_size=100): + '''Execute a statement using :sql:`VALUES` with a sequence of parameters. + + *sql* must contain a single ``%s`` placeholder, which will be replaced by a + `VALUES list`__. Every statement will contain at most *page_size* sets of + arguments. + + .. __: https://www.postgresql.org/docs/current/static/queries-values.html + + *template* is the part merged to the arguments, so it should be compatible + with the content of *argslist* (it should contain the right number of + arguments if *argslist* is a sequence of sequences, or compatible names if + *argslist* is a sequence of mappings). If not specified, assume the + arguments are sequence and use a simple positional template (i.e. + ``(%s, %s, ...)``). + + While :sql:`INSERT` is an obvious candidate for this function it is + possible to use it with other statements, for example:: + + >>> cur.execute( + ... "create table test (id int primary key, v1 int, v2 int)") + + >>> execute_values(cur, + ... "INSERT INTO test (id, v1, v2) VALUES %s", + ... [(1, 2, 3), (4, 5, 6), (7, 8, 9)]) + + >>> execute_values(cur, + ... """UPDATE test SET v1 = data.v1 FROM (VALUES %s) AS data (id, v1) + ... WHERE test.id = data.id""", + ... [(1, 20), (4, 50)]) + + >>> cur.execute("select * from test order by id") + >>> cur.fetchall() + [(1, 20, 3), (4, 50, 6), (7, 8, 9)]) + + ''' + for page in _paginate(argslist, page_size=page_size): + if template is None: + template = '(%s)' % ','.join(['%s'] * len(page[0])) + values = ",".join(cur.mogrify(template, args) for args in page) + cur.execute(sql % (values,)) diff --git a/tests/test_types_extras.py b/tests/test_types_extras.py index 8e615616..ab8e1707 100755 --- a/tests/test_types_extras.py +++ b/tests/test_types_extras.py @@ -1766,6 +1766,132 @@ class RangeCasterTestCase(ConnectingTestCase): decorate_all_tests(RangeCasterTestCase, skip_if_no_range) +class TestFastExecute(ConnectingTestCase): + def setUp(self): + super(TestFastExecute, self).setUp() + cur = self.conn.cursor() + cur.execute( + "create table testfast (id serial primary key, date date, val int)") + + def test_paginate(self): + def pag(seq): + return psycopg2.extras._paginate(seq, 100) + + self.assertEqual(list(pag([])), []) + self.assertEqual(list(pag([1])), [[1]]) + self.assertEqual(list(pag(range(99))), [list(range(99))]) + self.assertEqual(list(pag(range(100))), [list(range(100))]) + self.assertEqual(list(pag(range(101))), [list(range(100)), [100]]) + self.assertEqual( + list(pag(range(200))), [list(range(100)), list(range(100, 200))]) + self.assertEqual( + list(pag(range(1000))), + [list(range(i * 100, (i + 1) * 100)) for i in range(10)]) + + def test_execute_batch_empty(self): + cur = self.conn.cursor() + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, val) values (%s, %s)", + []) + cur.execute("select * from testfast order by id") + self.assertEqual(cur.fetchall(), []) + + def test_execute_batch_one(self): + cur = self.conn.cursor() + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, val) values (%s, %s)", + iter([(1, 10)])) + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(1, 10)]) + + def test_execute_batch_tuples(self): + cur = self.conn.cursor() + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, date, val) values (%s, %s, %s)", + ((i, date(2017, 1, i + 1), i * 10) for i in range(10))) + cur.execute("select id, date, val from testfast order by id") + self.assertEqual(cur.fetchall(), + [(i, date(2017, 1, i + 1), i * 10) for i in range(10)]) + + def test_execute_batch_many(self): + cur = self.conn.cursor() + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, val) values (%s, %s)", + ((i, i * 10) for i in range(1000))) + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(1000)]) + + def test_execute_batch_pages(self): + cur = self.conn.cursor() + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, val) values (%s, %s)", + ((i, i * 10) for i in range(25)), + page_size=10) + + # last command was 5 statements + self.assertEqual(sum(c == ';' for c in cur.query), 4) + + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) + + def test_execute_values_empty(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, val) values %s", + []) + cur.execute("select * from testfast order by id") + self.assertEqual(cur.fetchall(), []) + + def test_execute_values_one(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, val) values %s", + iter([(1, 10)])) + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(1, 10)]) + + def test_execute_values_tuples(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, date, val) values %s", + ((i, date(2017, 1, i + 1), i * 10) for i in range(10))) + cur.execute("select id, date, val from testfast order by id") + self.assertEqual(cur.fetchall(), + [(i, date(2017, 1, i + 1), i * 10) for i in range(10)]) + + def test_execute_values_dicts(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, date, val) values %s", + (dict(id=i, date=date(2017, 1, i + 1), val=i * 10, foo="bar") + for i in range(10)), + template='(%(id)s, %(date)s, %(val)s)') + cur.execute("select id, date, val from testfast order by id") + self.assertEqual(cur.fetchall(), + [(i, date(2017, 1, i + 1), i * 10) for i in range(10)]) + + def test_execute_values_many(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, val) values %s", + ((i, i * 10) for i in range(1000))) + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(1000)]) + + def test_execute_values_pages(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, val) values %s", + ((i, i * 10) for i in range(25)), + page_size=10) + + # last statement was 5 tuples (one parens is for the fields list) + self.assertEqual(sum(c == '(' for c in cur.query), 6) + + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) + + def test_suite(): return unittest.TestLoader().loadTestsFromName(__name__) From 26952ecee421350fd234f9390f03285a966b0d46 Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Wed, 1 Feb 2017 02:00:20 +0000 Subject: [PATCH 06/21] Fixed sql docs style with newer Docutils versions --- doc/src/_static/psycopg.css | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/src/_static/psycopg.css b/doc/src/_static/psycopg.css index 7d7bf786..8f6567b3 100644 --- a/doc/src/_static/psycopg.css +++ b/doc/src/_static/psycopg.css @@ -14,11 +14,13 @@ div.dbapi-extension { border: 1px solid #aaf; } +code.sql, tt.sql { font-size: 1em; background-color: transparent; } +a > code.sql:hover, a > tt.sql:hover { text-decoration: underline; } From 2e2dcd536bb06c07747e28ad98f4fb8e3053748e Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Wed, 1 Feb 2017 02:36:54 +0000 Subject: [PATCH 07/21] Fixed fast execute functions with Python 3 --- lib/extras.py | 6 ++++-- tests/test_types_extras.py | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/extras.py b/lib/extras.py index 85963c9f..2636655a 100644 --- a/lib/extras.py +++ b/lib/extras.py @@ -1177,7 +1177,7 @@ def execute_batch(cur, sql, argslist, page_size=100): """ for page in _paginate(argslist, page_size=page_size): sqls = [cur.mogrify(sql, args) for args in page] - cur.execute(";".join(sqls)) + cur.execute(b";".join(sqls)) def execute_values(cur, sql, argslist, template=None, page_size=100): @@ -1219,5 +1219,7 @@ def execute_values(cur, sql, argslist, template=None, page_size=100): for page in _paginate(argslist, page_size=page_size): if template is None: template = '(%s)' % ','.join(['%s'] * len(page[0])) - values = ",".join(cur.mogrify(template, args) for args in page) + values = b",".join(cur.mogrify(template, args) for args in page) + if isinstance(values, bytes) and _sys.version_info[0] > 2: + values = values.decode(_ext.encodings[cur.connection.encoding]) cur.execute(sql % (values,)) diff --git a/tests/test_types_extras.py b/tests/test_types_extras.py index ab8e1707..952208c5 100755 --- a/tests/test_types_extras.py +++ b/tests/test_types_extras.py @@ -1829,7 +1829,7 @@ class TestFastExecute(ConnectingTestCase): page_size=10) # last command was 5 statements - self.assertEqual(sum(c == ';' for c in cur.query), 4) + self.assertEqual(sum(c == u';' for c in cur.query.decode('ascii')), 4) cur.execute("select id, val from testfast order by id") self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) @@ -1886,7 +1886,7 @@ class TestFastExecute(ConnectingTestCase): page_size=10) # last statement was 5 tuples (one parens is for the fields list) - self.assertEqual(sum(c == '(' for c in cur.query), 6) + self.assertEqual(sum(c == '(' for c in cur.query.decode('ascii')), 6) cur.execute("select id, val from testfast order by id") self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) From 9bac37baf76dbd0831230e4d01d5269b6e4a9438 Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Wed, 1 Feb 2017 02:47:40 +0000 Subject: [PATCH 08/21] Fixed execute_values with unicode Also added unicode tests. --- lib/extras.py | 2 +- tests/test_types_extras.py | 56 ++++++++++++++++++++++++++++++++++++-- 2 files changed, 55 insertions(+), 3 deletions(-) diff --git a/lib/extras.py b/lib/extras.py index 2636655a..2d264025 100644 --- a/lib/extras.py +++ b/lib/extras.py @@ -1220,6 +1220,6 @@ def execute_values(cur, sql, argslist, template=None, page_size=100): if template is None: template = '(%s)' % ','.join(['%s'] * len(page[0])) values = b",".join(cur.mogrify(template, args) for args in page) - if isinstance(values, bytes) and _sys.version_info[0] > 2: + if isinstance(values, bytes): values = values.decode(_ext.encodings[cur.connection.encoding]) cur.execute(sql % (values,)) diff --git a/tests/test_types_extras.py b/tests/test_types_extras.py index 952208c5..a584c868 100755 --- a/tests/test_types_extras.py +++ b/tests/test_types_extras.py @@ -1770,8 +1770,8 @@ class TestFastExecute(ConnectingTestCase): def setUp(self): super(TestFastExecute, self).setUp() cur = self.conn.cursor() - cur.execute( - "create table testfast (id serial primary key, date date, val int)") + cur.execute("""create table testfast ( + id serial primary key, date date, val int, data text)""") def test_paginate(self): def pag(seq): @@ -1834,6 +1834,32 @@ class TestFastExecute(ConnectingTestCase): cur.execute("select id, val from testfast order by id") self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) + def test_execute_batch_unicode(self): + cur = self.conn.cursor() + ext.register_type(ext.UNICODE, cur) + snowman = u"\u2603" + + # unicode in statement + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, data) values (%%s, %%s) -- %s" % snowman, + [(1, 'x')]) + cur.execute("select id, data from testfast where id = 1") + self.assertEqual(cur.fetchone(), (1, 'x')) + + # unicode in data + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, data) values (%s, %s)", + [(2, snowman)]) + cur.execute("select id, data from testfast where id = 2") + self.assertEqual(cur.fetchone(), (2, snowman)) + + # unicode in both + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, data) values (%%s, %%s) -- %s" % snowman, + [(3, snowman)]) + cur.execute("select id, data from testfast where id = 3") + self.assertEqual(cur.fetchone(), (3, snowman)) + def test_execute_values_empty(self): cur = self.conn.cursor() psycopg2.extras.execute_values(cur, @@ -1891,6 +1917,32 @@ class TestFastExecute(ConnectingTestCase): cur.execute("select id, val from testfast order by id") self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) + def test_execute_values_unicode(self): + cur = self.conn.cursor() + ext.register_type(ext.UNICODE, cur) + snowman = u"\u2603" + + # unicode in statement + psycopg2.extras.execute_values(cur, + "insert into testfast (id, data) values %%s -- %s" % snowman, + [(1, 'x')]) + cur.execute("select id, data from testfast where id = 1") + self.assertEqual(cur.fetchone(), (1, 'x')) + + # unicode in data + psycopg2.extras.execute_values(cur, + "insert into testfast (id, data) values %s", + [(2, snowman)]) + cur.execute("select id, data from testfast where id = 2") + self.assertEqual(cur.fetchone(), (2, snowman)) + + # unicode in both + psycopg2.extras.execute_values(cur, + "insert into testfast (id, data) values %%s -- %s" % snowman, + [(3, snowman)]) + cur.execute("select id, data from testfast where id = 3") + self.assertEqual(cur.fetchone(), (3, snowman)) + def test_suite(): return unittest.TestLoader().loadTestsFromName(__name__) From 815869375b6346add3b76d74dab2e8e177c0e0d0 Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Wed, 1 Feb 2017 17:05:47 +0000 Subject: [PATCH 09/21] Merge back manylinux build script from the psycopg2-wheels project --- .../{wheels-build.sh => build-manylinux.sh} | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) rename scripts/{wheels-build.sh => build-manylinux.sh} (66%) diff --git a/scripts/wheels-build.sh b/scripts/build-manylinux.sh similarity index 66% rename from scripts/wheels-build.sh rename to scripts/build-manylinux.sh index ede3024b..4ceb1330 100755 --- a/scripts/wheels-build.sh +++ b/scripts/build-manylinux.sh @@ -4,8 +4,8 @@ # # Run this script with something like: # -# docker run -t --rm -v `pwd`:/psycopg quay.io/pypa/manylinux1_x86_64 /psycopg/scripts/wheels-build.sh -# docker run -t --rm -v `pwd`:/psycopg quay.io/pypa/manylinux1_i686 linux32 /psycopg/scripts/wheels-build.sh +# docker run -t --rm -v `pwd`:/psycopg2 quay.io/pypa/manylinux1_x86_64 /psycopg2/scripts/build-manylinux.sh +# docker run -t --rm -v `pwd`:/psycopg2 quay.io/pypa/manylinux1_i686 linux32 /psycopg2/scripts/build-manylinux.sh # # (Note: -t is requrired for sudo) @@ -15,21 +15,25 @@ set -e -x # This doesn't work: # rpm -Uvh "http://yum.postgresql.org/9.5/redhat/rhel-5-x86_64/pgdg-redhat95-9.5-3.noarch.rpm" wget -O "/tmp/pgdg.rpm" "https://download.postgresql.org/pub/repos/yum/9.5/redhat/rhel-5-x86_64/pgdg-centos95-9.5-3.noarch.rpm" -rpm -Uvh "/tmp/pgdg.rpm" +rpm -Uv "/tmp/pgdg.rpm" yum install -y postgresql95-devel postgresql95-server sudo # Make pg_config available export PGPATH=/usr/pgsql-9.5/bin/ export PATH="$PGPATH:$PATH" +# Find psycopg version +export VERSION=$(grep -e ^PSYCOPG_VERSION /psycopg2/setup.py | sed "s/.*'\(.*\)'/\1/") +export WHEELSDIR="/psycopg2/wheels/psycopg2-$VERSION" + # Create the wheel packages for PYBIN in /opt/python/*/bin; do - "${PYBIN}/pip" wheel /psycopg/ -w wheels/ + "${PYBIN}/pip" wheel /psycopg2/ -w "$WHEELSDIR" done # Bundle external shared libraries into the wheels -for WHL in wheels/*.whl; do - auditwheel repair "$WHL" -w /psycopg/wheels +for WHL in "$WHEELSDIR"/*.whl; do + auditwheel repair "$WHL" -w "$WHEELSDIR" done # Create a test cluster @@ -42,6 +46,6 @@ export PSYCOPG2_TESTDB_USER=postgres # Install packages and test for PYBIN in /opt/python/*/bin; do - "${PYBIN}/pip" install psycopg2 --no-index -f /psycopg/wheels + "${PYBIN}/pip" install psycopg2 --no-index -f "$WHEELSDIR" "${PYBIN}/python" -c "from psycopg2 import tests; tests.unittest.main(defaultTest='tests.test_suite')" done From 9ca51e0ed921270b2c641959ffeeb2ca9f58809b Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Wed, 1 Feb 2017 17:33:12 +0000 Subject: [PATCH 10/21] Use the server on the host to test --- scripts/build-manylinux.sh | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/scripts/build-manylinux.sh b/scripts/build-manylinux.sh index 4ceb1330..e1bdd917 100755 --- a/scripts/build-manylinux.sh +++ b/scripts/build-manylinux.sh @@ -4,10 +4,11 @@ # # Run this script with something like: # -# docker run -t --rm -v `pwd`:/psycopg2 quay.io/pypa/manylinux1_x86_64 /psycopg2/scripts/build-manylinux.sh -# docker run -t --rm -v `pwd`:/psycopg2 quay.io/pypa/manylinux1_i686 linux32 /psycopg2/scripts/build-manylinux.sh +# docker run --rm -v `pwd`:/psycopg2 quay.io/pypa/manylinux1_x86_64 /psycopg2/scripts/build-manylinux.sh +# docker run --rm -v `pwd`:/psycopg2 quay.io/pypa/manylinux1_i686 linux32 /psycopg2/scripts/build-manylinux.sh # -# (Note: -t is requrired for sudo) +# Tests run against a postgres on the host. Use -e PSYCOPG_TESTDB_USER=... etc +# to configure tests run. set -e -x @@ -16,7 +17,7 @@ set -e -x # rpm -Uvh "http://yum.postgresql.org/9.5/redhat/rhel-5-x86_64/pgdg-redhat95-9.5-3.noarch.rpm" wget -O "/tmp/pgdg.rpm" "https://download.postgresql.org/pub/repos/yum/9.5/redhat/rhel-5-x86_64/pgdg-centos95-9.5-3.noarch.rpm" rpm -Uv "/tmp/pgdg.rpm" -yum install -y postgresql95-devel postgresql95-server sudo +yum install -y postgresql95-devel # Make pg_config available export PGPATH=/usr/pgsql-9.5/bin/ @@ -36,13 +37,11 @@ for WHL in "$WHEELSDIR"/*.whl; do auditwheel repair "$WHL" -w "$WHEELSDIR" done -# Create a test cluster -/usr/bin/sudo -u postgres "$PGPATH/initdb" -D /var/lib/pgsql/9.5/data/ -/usr/bin/sudo -u postgres "$PGPATH/pg_ctl" -D /var/lib/pgsql/9.5/data/ start -sleep 5 # wait server started -/usr/bin/sudo -u postgres "$PGPATH/createdb" psycopg2_test +# Make sure libpq is not in the system +yum remove -y postgresql95-devel -export PSYCOPG2_TESTDB_USER=postgres +# Connect to the host to test. Use 'docker -e' to pass other variables +export PSYCOPG2_TESTDB_HOST=$(ip route show | awk '/default/ {print $3}') # Install packages and test for PYBIN in /opt/python/*/bin; do From f24de0357ff4472aa8bb283980ea27b3ddd722e4 Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Thu, 2 Feb 2017 01:53:50 +0000 Subject: [PATCH 11/21] Allow skipping the slow test It's not so much about tests being slow: some just get stuck and timeout travis. Skipped all tests taking about more than 0.2s to run on my laptop. Fast testing takes about 8s instead of 24. --- tests/test_async.py | 3 ++- tests/test_cancel.py | 3 ++- tests/test_connection.py | 6 +++++- tests/test_copy.py | 7 +++++-- tests/test_cursor.py | 5 +++-- tests/test_errcodes.py | 3 ++- tests/test_green.py | 3 ++- tests/test_notify.py | 9 ++++++++- tests/test_transaction.py | 4 +++- tests/test_types_extras.py | 6 +++--- tests/testutils.py | 11 ++++++++++- 11 files changed, 45 insertions(+), 15 deletions(-) diff --git a/tests/test_async.py b/tests/test_async.py index 6f8fed58..b379baea 100755 --- a/tests/test_async.py +++ b/tests/test_async.py @@ -23,7 +23,7 @@ # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # License for more details. -from testutils import unittest, skip_before_postgres +from testutils import unittest, skip_before_postgres, slow import psycopg2 from psycopg2 import extensions @@ -318,6 +318,7 @@ class AsyncTests(ConnectingTestCase): self.assert_(conn.async) conn.close() + @slow def test_flush_on_write(self): # a very large query requires a flush loop to be sent to the backend curs = self.conn.cursor() diff --git a/tests/test_cancel.py b/tests/test_cancel.py index a8eb7506..cb08e5f8 100755 --- a/tests/test_cancel.py +++ b/tests/test_cancel.py @@ -30,7 +30,7 @@ import psycopg2.extensions from psycopg2 import extras from testconfig import dsn -from testutils import unittest, ConnectingTestCase, skip_before_postgres +from testutils import unittest, ConnectingTestCase, skip_before_postgres, slow class CancelTests(ConnectingTestCase): @@ -48,6 +48,7 @@ class CancelTests(ConnectingTestCase): def test_empty_cancel(self): self.conn.cancel() + @slow @skip_before_postgres(8, 2) def test_cancel(self): errors = [] diff --git a/tests/test_connection.py b/tests/test_connection.py index 833751b9..a5b5c418 100755 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -35,7 +35,7 @@ from psycopg2 import extensions as ext from testutils import ( unittest, decorate_all_tests, skip_if_no_superuser, skip_before_postgres, skip_after_postgres, skip_before_libpq, - ConnectingTestCase, skip_if_tpc_disabled, skip_if_windows) + ConnectingTestCase, skip_if_tpc_disabled, skip_if_windows, slow) from testconfig import dsn, dbname @@ -196,6 +196,7 @@ class ConnectionTests(ConnectingTestCase): self.assertRaises(psycopg2.NotSupportedError, cnn.xid, 42, "foo", "bar") + @slow @skip_before_postgres(8, 2) def test_concurrent_execution(self): def slave(): @@ -246,6 +247,7 @@ class ConnectionTests(ConnectingTestCase): gc.collect() self.assert_(w() is None) + @slow def test_commit_concurrency(self): # The problem is the one reported in ticket #103. Because of bad # status check, we commit even when a commit is already on its way. @@ -899,6 +901,7 @@ class ConnectionTwoPhaseTests(ConnectingTestCase): (dbname,)) self.assertEqual('42_Z3RyaWQ=_YnF1YWw=', cur.fetchone()[0]) + @slow def test_xid_roundtrip(self): for fid, gtrid, bqual in [ (0, "", ""), @@ -921,6 +924,7 @@ class ConnectionTwoPhaseTests(ConnectingTestCase): cnn.tpc_rollback(xid) + @slow def test_unparsed_roundtrip(self): for tid in [ '', diff --git a/tests/test_copy.py b/tests/test_copy.py index ac42c980..8cbe4eaa 100755 --- a/tests/test_copy.py +++ b/tests/test_copy.py @@ -24,8 +24,8 @@ import sys import string -from testutils import unittest, ConnectingTestCase, decorate_all_tests -from testutils import skip_if_no_iobase, skip_before_postgres +from testutils import (unittest, ConnectingTestCase, decorate_all_tests, + skip_if_no_iobase, skip_before_postgres, slow) from cStringIO import StringIO from itertools import cycle, izip from subprocess import Popen, PIPE @@ -77,6 +77,7 @@ class CopyTests(ConnectingTestCase): data text )''') + @slow def test_copy_from(self): curs = self.conn.cursor() try: @@ -84,6 +85,7 @@ class CopyTests(ConnectingTestCase): finally: curs.close() + @slow def test_copy_from_insane_size(self): # Trying to trigger a "would block" error curs = self.conn.cursor() @@ -120,6 +122,7 @@ class CopyTests(ConnectingTestCase): self.assertRaises(ZeroDivisionError, curs.copy_from, MinimalRead(f), "tcopy", columns=cols()) + @slow def test_copy_to(self): curs = self.conn.cursor() try: diff --git a/tests/test_cursor.py b/tests/test_cursor.py index fc924c4b..a8fedccb 100755 --- a/tests/test_cursor.py +++ b/tests/test_cursor.py @@ -26,8 +26,8 @@ import time import pickle import psycopg2 import psycopg2.extensions -from testutils import unittest, ConnectingTestCase, skip_before_postgres -from testutils import skip_if_no_namedtuple, skip_if_no_getrefcount +from testutils import (unittest, ConnectingTestCase, skip_before_postgres, + skip_if_no_namedtuple, skip_if_no_getrefcount, slow) class CursorTests(ConnectingTestCase): @@ -331,6 +331,7 @@ class CursorTests(ConnectingTestCase): curs.scroll(2) self.assertRaises(psycopg2.OperationalError, curs.scroll, -1) + @slow @skip_before_postgres(8, 2) def test_iter_named_cursor_efficient(self): curs = self.conn.cursor('tmp') diff --git a/tests/test_errcodes.py b/tests/test_errcodes.py index 6865194f..accee565 100755 --- a/tests/test_errcodes.py +++ b/tests/test_errcodes.py @@ -22,7 +22,7 @@ # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # License for more details. -from testutils import unittest, ConnectingTestCase +from testutils import unittest, ConnectingTestCase, slow try: reload @@ -34,6 +34,7 @@ from psycopg2 import errorcodes class ErrocodeTests(ConnectingTestCase): + @slow def test_lookup_threadsafe(self): # Increase if it does not fail with KeyError diff --git a/tests/test_green.py b/tests/test_green.py index 0424a2cc..6d1571d4 100755 --- a/tests/test_green.py +++ b/tests/test_green.py @@ -27,7 +27,7 @@ import psycopg2 import psycopg2.extensions import psycopg2.extras -from testutils import ConnectingTestCase +from testutils import ConnectingTestCase, slow class ConnectionStub(object): @@ -61,6 +61,7 @@ class GreenTestCase(ConnectingTestCase): lambda conn: psycopg2.extras.wait_select(stub)) return stub + @slow def test_flush_on_write(self): # a very large query requires a flush loop to be sent to the backend conn = self.conn diff --git a/tests/test_notify.py b/tests/test_notify.py index 1a0ac457..4e99a3e9 100755 --- a/tests/test_notify.py +++ b/tests/test_notify.py @@ -26,7 +26,7 @@ from testutils import unittest import psycopg2 from psycopg2 import extensions -from testutils import ConnectingTestCase, script_to_py3 +from testutils import ConnectingTestCase, script_to_py3, slow from testconfig import dsn import sys @@ -72,6 +72,7 @@ conn.close() return Popen([sys.executable, '-c', script_to_py3(script)], stdout=PIPE) + @slow def test_notifies_received_on_poll(self): self.autocommit(self.conn) self.listen('foo') @@ -90,6 +91,7 @@ conn.close() self.assertEqual(pid, self.conn.notifies[0][0]) self.assertEqual('foo', self.conn.notifies[0][1]) + @slow def test_many_notifies(self): self.autocommit(self.conn) for name in ['foo', 'bar', 'baz']: @@ -119,6 +121,7 @@ conn.close() self.assertEqual(pid, self.conn.notifies[0][0]) self.assertEqual('foo', self.conn.notifies[0][1]) + @slow def test_notify_object(self): self.autocommit(self.conn) self.listen('foo') @@ -128,6 +131,7 @@ conn.close() notify = self.conn.notifies[0] self.assert_(isinstance(notify, psycopg2.extensions.Notify)) + @slow def test_notify_attributes(self): self.autocommit(self.conn) self.listen('foo') @@ -140,6 +144,7 @@ conn.close() self.assertEqual('foo', notify.channel) self.assertEqual('', notify.payload) + @slow def test_notify_payload(self): if self.conn.server_version < 90000: return self.skipTest("server version %s doesn't support notify payload" @@ -155,6 +160,7 @@ conn.close() self.assertEqual('foo', notify.channel) self.assertEqual('Hello, world!', notify.payload) + @slow def test_notify_deque(self): from collections import deque self.autocommit(self.conn) @@ -167,6 +173,7 @@ conn.close() self.assert_(isinstance(notify, psycopg2.extensions.Notify)) self.assertEqual(len(self.conn.notifies), 0) + @slow def test_notify_noappend(self): self.autocommit(self.conn) self.conn.notifies = None diff --git a/tests/test_transaction.py b/tests/test_transaction.py index 2dc44ec5..36947dee 100755 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -23,7 +23,7 @@ # License for more details. import threading -from testutils import unittest, ConnectingTestCase, skip_before_postgres +from testutils import unittest, ConnectingTestCase, skip_before_postgres, slow import psycopg2 from psycopg2.extensions import ( @@ -131,6 +131,7 @@ class DeadlockSerializationTests(ConnectingTestCase): ConnectingTestCase.tearDown(self) + @slow def test_deadlock(self): self.thread1_error = self.thread2_error = None step1 = threading.Event() @@ -178,6 +179,7 @@ class DeadlockSerializationTests(ConnectingTestCase): self.assertTrue(isinstance( error, psycopg2.extensions.TransactionRollbackError)) + @slow def test_serialisation_failure(self): self.thread1_error = self.thread2_error = None step1 = threading.Event() diff --git a/tests/test_types_extras.py b/tests/test_types_extras.py index 8e615616..f28c5c21 100755 --- a/tests/test_types_extras.py +++ b/tests/test_types_extras.py @@ -22,9 +22,8 @@ from datetime import date, datetime from functools import wraps from pickle import dumps, loads -from testutils import unittest, skip_if_no_uuid, skip_before_postgres -from testutils import ConnectingTestCase, decorate_all_tests -from testutils import py3_raises_typeerror +from testutils import (unittest, skip_if_no_uuid, skip_before_postgres, + ConnectingTestCase, decorate_all_tests, py3_raises_typeerror, slow) import psycopg2 import psycopg2.extras @@ -708,6 +707,7 @@ class AdaptTypeTestCase(ConnectingTestCase): curs.execute("select (1,2)::type_ii") self.assertRaises(psycopg2.DataError, curs.fetchone) + @slow @skip_if_no_composite @skip_before_postgres(8, 4) def test_from_tables(self): diff --git a/tests/testutils.py b/tests/testutils.py index 93477357..b34a5a87 100644 --- a/tests/testutils.py +++ b/tests/testutils.py @@ -447,7 +447,6 @@ def script_to_py3(script): class py3_raises_typeerror(object): - def __enter__(self): pass @@ -455,3 +454,13 @@ class py3_raises_typeerror(object): if sys.version_info[0] >= 3: assert type is TypeError return True + + +def slow(f): + """Decorator to mark slow tests we may want to skip""" + @wraps(f) + def slow_(self): + if os.environ.get('PSYCOPG2_TEST_FAST'): + return self.skipTest("slow test") + return f(self) + return slow_ From 88a21689cee0e23ea4b2a6e3b423aa0c6c29c36a Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Thu, 2 Feb 2017 02:09:59 +0000 Subject: [PATCH 12/21] Added note about finding slow tests --- tests/testutils.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/testutils.py b/tests/testutils.py index b34a5a87..c36f6749 100644 --- a/tests/testutils.py +++ b/tests/testutils.py @@ -457,7 +457,12 @@ class py3_raises_typeerror(object): def slow(f): - """Decorator to mark slow tests we may want to skip""" + """Decorator to mark slow tests we may want to skip + + Note: in order to find slow tests you can run: + + make check 2>&1 | ts -i "%.s" | sort -n + """ @wraps(f) def slow_(self): if os.environ.get('PSYCOPG2_TEST_FAST'): From 27652ed3b0afda8d175fa673ecd4e341b3a58c3c Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Thu, 2 Feb 2017 02:40:03 +0000 Subject: [PATCH 13/21] Added notes about using execute_batch with prepared statements --- doc/src/extras.rst | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/doc/src/extras.rst b/doc/src/extras.rst index 66be5902..5ef4223e 100644 --- a/doc/src/extras.rst +++ b/doc/src/extras.rst @@ -993,6 +993,38 @@ parameters. By reducing the number of server roundtrips the performance can be .. versionadded:: 2.7 +.. note:: + + `!execute_batch()` can be also used in conjunction with PostgreSQL + prepared statements using |PREPARE|_, |EXECUTE|_, |DEALLOCATE|_. + Instead of executing:: + + execute_batch(cur, + "big and complex SQL with %s %s params", + params_list) + + it is possible to execute something like:: + + cur.execute("PREPARE stmt AS big and complex SQL with $1 $2 params") + execute_batch(cur, "EXECUTE stmt (%s, %s)", params_list) + cur.execute("DEALLOCATE stmt") + + which may bring further performance benefits: if the operation to perform + is complex, every single execution will be faster as the query plan is + already cached; furthermore the amount of data to send on the server will + be lesser (one |EXECUTE| per param set instead of the whole, likely + longer, statement). + + .. |PREPARE| replace:: :sql:`PREPARE` + .. _PREPARE: https://www.postgresql.org/docs/current/static/sql-prepare.html + + .. |EXECUTE| replace:: :sql:`EXECUTE` + .. _EXECUTE: https://www.postgresql.org/docs/current/static/sql-execute.html + + .. |DEALLOCATE| replace:: :sql:`DEALLOCATE` + .. _DEALLOCATE: https://www.postgresql.org/docs/current/static/sql-deallocate.html + + .. autofunction:: execute_values .. versionadded:: 2.7 From d8b1fbd9052946e585967a3c711702a7417ea0e4 Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Thu, 2 Feb 2017 02:58:22 +0000 Subject: [PATCH 14/21] Further skipping of slow tests --- tests/test_async.py | 2 ++ tests/test_connection.py | 2 ++ tests/test_copy.py | 2 ++ tests/test_lobject.py | 5 +++-- tests/test_module.py | 5 +++-- tests/test_notify.py | 1 + 6 files changed, 13 insertions(+), 4 deletions(-) diff --git a/tests/test_async.py b/tests/test_async.py index b379baea..e7fc1a95 100755 --- a/tests/test_async.py +++ b/tests/test_async.py @@ -97,6 +97,7 @@ class AsyncTests(ConnectingTestCase): self.assertFalse(self.conn.isexecuting()) self.assertEquals(cur.fetchone()[0], "a") + @slow @skip_before_postgres(8, 2) def test_async_callproc(self): cur = self.conn.cursor() @@ -107,6 +108,7 @@ class AsyncTests(ConnectingTestCase): self.assertFalse(self.conn.isexecuting()) self.assertEquals(cur.fetchall()[0][0], '') + @slow def test_async_after_async(self): cur = self.conn.cursor() cur2 = self.conn.cursor() diff --git a/tests/test_connection.py b/tests/test_connection.py index a5b5c418..1ea00da3 100755 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -125,6 +125,7 @@ class ConnectionTests(ConnectingTestCase): self.assert_('table3' in conn.notices[2]) self.assert_('table4' in conn.notices[3]) + @slow def test_notices_limited(self): conn = self.conn cur = conn.cursor() @@ -138,6 +139,7 @@ class ConnectionTests(ConnectingTestCase): self.assertEqual(50, len(conn.notices)) self.assert_('table99' in conn.notices[-1], conn.notices[-1]) + @slow def test_notices_deque(self): from collections import deque diff --git a/tests/test_copy.py b/tests/test_copy.py index 8cbe4eaa..3aa509b5 100755 --- a/tests/test_copy.py +++ b/tests/test_copy.py @@ -312,6 +312,7 @@ class CopyTests(ConnectingTestCase): curs.copy_from, StringIO('aaa\nbbb\nccc\n'), 'tcopy') self.assertEqual(curs.rowcount, -1) + @slow def test_copy_from_segfault(self): # issue #219 script = ("""\ @@ -330,6 +331,7 @@ conn.close() proc.communicate() self.assertEqual(0, proc.returncode) + @slow def test_copy_to_segfault(self): # issue #219 script = ("""\ diff --git a/tests/test_lobject.py b/tests/test_lobject.py index 4da20e95..3379ec0c 100755 --- a/tests/test_lobject.py +++ b/tests/test_lobject.py @@ -29,8 +29,8 @@ from functools import wraps import psycopg2 import psycopg2.extensions -from testutils import unittest, decorate_all_tests, skip_if_tpc_disabled -from testutils import ConnectingTestCase, skip_if_green +from testutils import (unittest, decorate_all_tests, skip_if_tpc_disabled, + ConnectingTestCase, skip_if_green, slow) def skip_if_no_lo(f): @@ -191,6 +191,7 @@ class LargeObjectTests(LargeObjectTestCase): self.assertEqual(x, u"some") self.assertEqual(lo.read(), u" data " + snowman) + @slow def test_read_large(self): lo = self.conn.lobject() data = "data" * 1000000 diff --git a/tests/test_module.py b/tests/test_module.py index 6a1606d6..b166bac6 100755 --- a/tests/test_module.py +++ b/tests/test_module.py @@ -26,8 +26,8 @@ import os import sys from subprocess import Popen -from testutils import unittest, skip_before_python, skip_before_postgres -from testutils import ConnectingTestCase, skip_copy_if_green, script_to_py3 +from testutils import (unittest, skip_before_python, skip_before_postgres, + ConnectingTestCase, skip_copy_if_green, script_to_py3, slow) import psycopg2 @@ -311,6 +311,7 @@ class ExceptionsTestCase(ConnectingTestCase): class TestExtensionModule(unittest.TestCase): + @slow def test_import_internal(self): # check that the internal package can be imported "naked" # we may break this property if there is a compelling reason to do so, diff --git a/tests/test_notify.py b/tests/test_notify.py index 4e99a3e9..0e74e1d5 100755 --- a/tests/test_notify.py +++ b/tests/test_notify.py @@ -111,6 +111,7 @@ conn.close() self.assertEqual(pids[name], pid) names.pop(name) # raise if name found twice + @slow def test_notifies_received_on_execute(self): self.autocommit(self.conn) self.listen('foo') From d2fdc5ca9f6d5ac76ee39fc6b7db626345a6c84c Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Thu, 2 Feb 2017 16:02:33 +0000 Subject: [PATCH 15/21] Better docs for fast executemany functions. Issue #502. --- lib/extras.py | 46 +++++++++++++++++++++++++++++++--------------- 1 file changed, 31 insertions(+), 15 deletions(-) diff --git a/lib/extras.py b/lib/extras.py index 2d264025..1aad3d1d 100644 --- a/lib/extras.py +++ b/lib/extras.py @@ -1168,11 +1168,15 @@ def execute_batch(cur, sql, argslist, page_size=100): Execute *sql* several times, against all parameters set (sequences or mappings) found in *argslist*. - The function is semantically similar to `~cursor.executemany()`, but has a - different implementation: Psycopg will join the statements into fewer - multi-statement commands, reducing the number of server roundtrips, - resulting in better performances. Every command contains at most - *page_size* statements. + The function is semantically similar to + + .. parsed-literal:: + + *cur*\.\ `~cursor.executemany`\ (\ *sql*\ , *argslist*\ ) + + but has a different implementation: Psycopg will join the statements into + fewer multi-statement commands, each one containing at most *page_size* + statements, resulting in a reduced number of server roundtrips. """ for page in _paginate(argslist, page_size=page_size): @@ -1183,19 +1187,31 @@ def execute_batch(cur, sql, argslist, page_size=100): def execute_values(cur, sql, argslist, template=None, page_size=100): '''Execute a statement using :sql:`VALUES` with a sequence of parameters. - *sql* must contain a single ``%s`` placeholder, which will be replaced by a - `VALUES list`__. Every statement will contain at most *page_size* sets of - arguments. + :param cur: the cursor to use to execute the query. + + :param sql: the query to execute. It must contain a single ``%s`` + placeholder, which will be replaced by a `VALUES list`__. + Example: ``"INSERT INTO mytable (id, f1, f2) VALUES %s"``. + + :param argslist: sequence of sequences or dictionaries with the arguments + to send to the query. The type and content must be consistent with + *template*. + + :param template: the snippet to merge to every item in *argslist* to + compose the query. If *argslist* items are sequences it should contain + positional placeholders (e.g. ``"(%s, %s, %s)"``, or ``"(%s, %s, 42)``" + if there are constants value...); If *argslist* is items are mapping + it should contain named placeholders (e.g. ``"(%(id)s, %(f1)s, 42)"``). + If not specified, assume the arguments are sequence and use a simple + positional template (i.e. ``(%s, %s, ...)``), with the number of + placeholders sniffed by the first element in *argslist*. + + :param page_size: maximum number of *argslist* items to include in every + statement. If there are more items the function will execute more than + one statement. .. __: https://www.postgresql.org/docs/current/static/queries-values.html - *template* is the part merged to the arguments, so it should be compatible - with the content of *argslist* (it should contain the right number of - arguments if *argslist* is a sequence of sequences, or compatible names if - *argslist* is a sequence of mappings). If not specified, assume the - arguments are sequence and use a simple positional template (i.e. - ``(%s, %s, ...)``). - While :sql:`INSERT` is an obvious candidate for this function it is possible to use it with other statements, for example:: From dc1b4fff9001964c719e3f4471cc5a6fe6533e3a Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Thu, 2 Feb 2017 17:29:17 +0000 Subject: [PATCH 16/21] Avoid an useless encode/decode roundtrip in execute_values() Tests moved into a separate module. --- lib/extras.py | 48 ++++++- tests/__init__.py | 2 + tests/test_fast_executemany.py | 237 +++++++++++++++++++++++++++++++++ tests/test_types_extras.py | 178 ------------------------- 4 files changed, 283 insertions(+), 182 deletions(-) create mode 100755 tests/test_fast_executemany.py diff --git a/lib/extras.py b/lib/extras.py index 1aad3d1d..80034e6f 100644 --- a/lib/extras.py +++ b/lib/extras.py @@ -1232,10 +1232,50 @@ def execute_values(cur, sql, argslist, template=None, page_size=100): [(1, 20, 3), (4, 50, 6), (7, 8, 9)]) ''' + # we can't just use sql % vals because vals is bytes: if sql is bytes + # there will be some decoding error because of stupid codec used, and Py3 + # doesn't implement % on bytes. + if not isinstance(sql, bytes): + sql = sql.encode(_ext.encodings[cur.connection.encoding]) + pre, post = _split_sql(sql) + for page in _paginate(argslist, page_size=page_size): if template is None: template = '(%s)' % ','.join(['%s'] * len(page[0])) - values = b",".join(cur.mogrify(template, args) for args in page) - if isinstance(values, bytes): - values = values.decode(_ext.encodings[cur.connection.encoding]) - cur.execute(sql % (values,)) + parts = [pre] + for args in page: + parts.append(cur.mogrify(template, args)) + parts.append(b',') + parts[-1] = post + cur.execute(b''.join(parts)) + + +def _split_sql(sql): + """Split *sql* on a single ``%s`` placeholder. + + Return a (pre, post) pair around the ``%s``, with ``%%`` -> ``%`` replacement. + """ + curr = pre = [] + post = [] + tokens = _re.split(br'(%.)', sql) + for token in tokens: + if len(token) != 2 or token[:1] != b'%': + curr.append(token) + continue + + if token[1:] == b's': + if curr is pre: + curr = post + else: + raise ValueError( + "the query contains more than one '%s' placeholder") + elif token[1:] == b'%': + curr.append(b'%') + else: + raise ValueError("unsupported format character: '%s'" + % token[1:].decode('ascii', 'replace')) + + if curr is pre: + raise ValueError("the query doesn't contain any '%s' placeholder") + + return b''.join(pre), b''.join(post) diff --git a/tests/__init__.py b/tests/__init__.py index 1a240994..35837e82 100755 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -37,6 +37,7 @@ import test_cursor import test_dates import test_errcodes import test_extras_dictcursor +import test_fast_executemany import test_green import test_ipaddress import test_lobject @@ -74,6 +75,7 @@ def test_suite(): suite.addTest(test_dates.test_suite()) suite.addTest(test_errcodes.test_suite()) suite.addTest(test_extras_dictcursor.test_suite()) + suite.addTest(test_fast_executemany.test_suite()) suite.addTest(test_green.test_suite()) suite.addTest(test_ipaddress.test_suite()) suite.addTest(test_lobject.test_suite()) diff --git a/tests/test_fast_executemany.py b/tests/test_fast_executemany.py new file mode 100755 index 00000000..92222748 --- /dev/null +++ b/tests/test_fast_executemany.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python +# +# test_fast_executemany.py - tests for fast executemany implementations +# +# Copyright (C) 2017 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import unittest +from datetime import date + +from testutils import ConnectingTestCase + +import psycopg2 +import psycopg2.extras +import psycopg2.extensions as ext + + +class TestPaginate(unittest.TestCase): + def test_paginate(self): + def pag(seq): + return psycopg2.extras._paginate(seq, 100) + + self.assertEqual(list(pag([])), []) + self.assertEqual(list(pag([1])), [[1]]) + self.assertEqual(list(pag(range(99))), [list(range(99))]) + self.assertEqual(list(pag(range(100))), [list(range(100))]) + self.assertEqual(list(pag(range(101))), [list(range(100)), [100]]) + self.assertEqual( + list(pag(range(200))), [list(range(100)), list(range(100, 200))]) + self.assertEqual( + list(pag(range(1000))), + [list(range(i * 100, (i + 1) * 100)) for i in range(10)]) + + +class FastExecuteTestMixin(object): + def setUp(self): + super(FastExecuteTestMixin, self).setUp() + cur = self.conn.cursor() + cur.execute("""create table testfast ( + id serial primary key, date date, val int, data text)""") + + +class TestExecuteBatch(FastExecuteTestMixin, ConnectingTestCase): + def test_empty(self): + cur = self.conn.cursor() + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, val) values (%s, %s)", + []) + cur.execute("select * from testfast order by id") + self.assertEqual(cur.fetchall(), []) + + def test_one(self): + cur = self.conn.cursor() + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, val) values (%s, %s)", + iter([(1, 10)])) + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(1, 10)]) + + def test_tuples(self): + cur = self.conn.cursor() + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, date, val) values (%s, %s, %s)", + ((i, date(2017, 1, i + 1), i * 10) for i in range(10))) + cur.execute("select id, date, val from testfast order by id") + self.assertEqual(cur.fetchall(), + [(i, date(2017, 1, i + 1), i * 10) for i in range(10)]) + + def test_many(self): + cur = self.conn.cursor() + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, val) values (%s, %s)", + ((i, i * 10) for i in range(1000))) + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(1000)]) + + def test_pages(self): + cur = self.conn.cursor() + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, val) values (%s, %s)", + ((i, i * 10) for i in range(25)), + page_size=10) + + # last command was 5 statements + self.assertEqual(sum(c == u';' for c in cur.query.decode('ascii')), 4) + + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) + + def test_unicode(self): + cur = self.conn.cursor() + ext.register_type(ext.UNICODE, cur) + snowman = u"\u2603" + + # unicode in statement + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, data) values (%%s, %%s) -- %s" % snowman, + [(1, 'x')]) + cur.execute("select id, data from testfast where id = 1") + self.assertEqual(cur.fetchone(), (1, 'x')) + + # unicode in data + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, data) values (%s, %s)", + [(2, snowman)]) + cur.execute("select id, data from testfast where id = 2") + self.assertEqual(cur.fetchone(), (2, snowman)) + + # unicode in both + psycopg2.extras.execute_batch(cur, + "insert into testfast (id, data) values (%%s, %%s) -- %s" % snowman, + [(3, snowman)]) + cur.execute("select id, data from testfast where id = 3") + self.assertEqual(cur.fetchone(), (3, snowman)) + + +class TestExecuteValuse(FastExecuteTestMixin, ConnectingTestCase): + def test_empty(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, val) values %s", + []) + cur.execute("select * from testfast order by id") + self.assertEqual(cur.fetchall(), []) + + def test_one(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, val) values %s", + iter([(1, 10)])) + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(1, 10)]) + + def test_tuples(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, date, val) values %s", + ((i, date(2017, 1, i + 1), i * 10) for i in range(10))) + cur.execute("select id, date, val from testfast order by id") + self.assertEqual(cur.fetchall(), + [(i, date(2017, 1, i + 1), i * 10) for i in range(10)]) + + def test_dicts(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, date, val) values %s", + (dict(id=i, date=date(2017, 1, i + 1), val=i * 10, foo="bar") + for i in range(10)), + template='(%(id)s, %(date)s, %(val)s)') + cur.execute("select id, date, val from testfast order by id") + self.assertEqual(cur.fetchall(), + [(i, date(2017, 1, i + 1), i * 10) for i in range(10)]) + + def test_many(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, val) values %s", + ((i, i * 10) for i in range(1000))) + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(1000)]) + + def test_pages(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, val) values %s", + ((i, i * 10) for i in range(25)), + page_size=10) + + # last statement was 5 tuples (one parens is for the fields list) + self.assertEqual(sum(c == '(' for c in cur.query.decode('ascii')), 6) + + cur.execute("select id, val from testfast order by id") + self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) + + def test_unicode(self): + cur = self.conn.cursor() + ext.register_type(ext.UNICODE, cur) + snowman = u"\u2603" + + # unicode in statement + psycopg2.extras.execute_values(cur, + "insert into testfast (id, data) values %%s -- %s" % snowman, + [(1, 'x')]) + cur.execute("select id, data from testfast where id = 1") + self.assertEqual(cur.fetchone(), (1, 'x')) + + # unicode in data + psycopg2.extras.execute_values(cur, + "insert into testfast (id, data) values %s", + [(2, snowman)]) + cur.execute("select id, data from testfast where id = 2") + self.assertEqual(cur.fetchone(), (2, snowman)) + + # unicode in both + psycopg2.extras.execute_values(cur, + "insert into testfast (id, data) values %%s -- %s" % snowman, + [(3, snowman)]) + cur.execute("select id, data from testfast where id = 3") + self.assertEqual(cur.fetchone(), (3, snowman)) + + def test_invalid_sql(self): + cur = self.conn.cursor() + self.assertRaises(ValueError, psycopg2.extras.execute_values, cur, + "insert", []) + self.assertRaises(ValueError, psycopg2.extras.execute_values, cur, + "insert %s and %s", []) + self.assertRaises(ValueError, psycopg2.extras.execute_values, cur, + "insert %f", []) + self.assertRaises(ValueError, psycopg2.extras.execute_values, cur, + "insert %f %s", []) + + def test_percent_escape(self): + cur = self.conn.cursor() + psycopg2.extras.execute_values(cur, + "insert into testfast (id, data) values %s -- a%%b", + [(1, 'hi')]) + self.assert_(b'a%%b' not in cur.query) + self.assert_(b'a%b' in cur.query) + + cur.execute("select id, data from testfast") + self.assertEqual(cur.fetchall(), [(1, 'hi')]) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_types_extras.py b/tests/test_types_extras.py index a584c868..8e615616 100755 --- a/tests/test_types_extras.py +++ b/tests/test_types_extras.py @@ -1766,184 +1766,6 @@ class RangeCasterTestCase(ConnectingTestCase): decorate_all_tests(RangeCasterTestCase, skip_if_no_range) -class TestFastExecute(ConnectingTestCase): - def setUp(self): - super(TestFastExecute, self).setUp() - cur = self.conn.cursor() - cur.execute("""create table testfast ( - id serial primary key, date date, val int, data text)""") - - def test_paginate(self): - def pag(seq): - return psycopg2.extras._paginate(seq, 100) - - self.assertEqual(list(pag([])), []) - self.assertEqual(list(pag([1])), [[1]]) - self.assertEqual(list(pag(range(99))), [list(range(99))]) - self.assertEqual(list(pag(range(100))), [list(range(100))]) - self.assertEqual(list(pag(range(101))), [list(range(100)), [100]]) - self.assertEqual( - list(pag(range(200))), [list(range(100)), list(range(100, 200))]) - self.assertEqual( - list(pag(range(1000))), - [list(range(i * 100, (i + 1) * 100)) for i in range(10)]) - - def test_execute_batch_empty(self): - cur = self.conn.cursor() - psycopg2.extras.execute_batch(cur, - "insert into testfast (id, val) values (%s, %s)", - []) - cur.execute("select * from testfast order by id") - self.assertEqual(cur.fetchall(), []) - - def test_execute_batch_one(self): - cur = self.conn.cursor() - psycopg2.extras.execute_batch(cur, - "insert into testfast (id, val) values (%s, %s)", - iter([(1, 10)])) - cur.execute("select id, val from testfast order by id") - self.assertEqual(cur.fetchall(), [(1, 10)]) - - def test_execute_batch_tuples(self): - cur = self.conn.cursor() - psycopg2.extras.execute_batch(cur, - "insert into testfast (id, date, val) values (%s, %s, %s)", - ((i, date(2017, 1, i + 1), i * 10) for i in range(10))) - cur.execute("select id, date, val from testfast order by id") - self.assertEqual(cur.fetchall(), - [(i, date(2017, 1, i + 1), i * 10) for i in range(10)]) - - def test_execute_batch_many(self): - cur = self.conn.cursor() - psycopg2.extras.execute_batch(cur, - "insert into testfast (id, val) values (%s, %s)", - ((i, i * 10) for i in range(1000))) - cur.execute("select id, val from testfast order by id") - self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(1000)]) - - def test_execute_batch_pages(self): - cur = self.conn.cursor() - psycopg2.extras.execute_batch(cur, - "insert into testfast (id, val) values (%s, %s)", - ((i, i * 10) for i in range(25)), - page_size=10) - - # last command was 5 statements - self.assertEqual(sum(c == u';' for c in cur.query.decode('ascii')), 4) - - cur.execute("select id, val from testfast order by id") - self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) - - def test_execute_batch_unicode(self): - cur = self.conn.cursor() - ext.register_type(ext.UNICODE, cur) - snowman = u"\u2603" - - # unicode in statement - psycopg2.extras.execute_batch(cur, - "insert into testfast (id, data) values (%%s, %%s) -- %s" % snowman, - [(1, 'x')]) - cur.execute("select id, data from testfast where id = 1") - self.assertEqual(cur.fetchone(), (1, 'x')) - - # unicode in data - psycopg2.extras.execute_batch(cur, - "insert into testfast (id, data) values (%s, %s)", - [(2, snowman)]) - cur.execute("select id, data from testfast where id = 2") - self.assertEqual(cur.fetchone(), (2, snowman)) - - # unicode in both - psycopg2.extras.execute_batch(cur, - "insert into testfast (id, data) values (%%s, %%s) -- %s" % snowman, - [(3, snowman)]) - cur.execute("select id, data from testfast where id = 3") - self.assertEqual(cur.fetchone(), (3, snowman)) - - def test_execute_values_empty(self): - cur = self.conn.cursor() - psycopg2.extras.execute_values(cur, - "insert into testfast (id, val) values %s", - []) - cur.execute("select * from testfast order by id") - self.assertEqual(cur.fetchall(), []) - - def test_execute_values_one(self): - cur = self.conn.cursor() - psycopg2.extras.execute_values(cur, - "insert into testfast (id, val) values %s", - iter([(1, 10)])) - cur.execute("select id, val from testfast order by id") - self.assertEqual(cur.fetchall(), [(1, 10)]) - - def test_execute_values_tuples(self): - cur = self.conn.cursor() - psycopg2.extras.execute_values(cur, - "insert into testfast (id, date, val) values %s", - ((i, date(2017, 1, i + 1), i * 10) for i in range(10))) - cur.execute("select id, date, val from testfast order by id") - self.assertEqual(cur.fetchall(), - [(i, date(2017, 1, i + 1), i * 10) for i in range(10)]) - - def test_execute_values_dicts(self): - cur = self.conn.cursor() - psycopg2.extras.execute_values(cur, - "insert into testfast (id, date, val) values %s", - (dict(id=i, date=date(2017, 1, i + 1), val=i * 10, foo="bar") - for i in range(10)), - template='(%(id)s, %(date)s, %(val)s)') - cur.execute("select id, date, val from testfast order by id") - self.assertEqual(cur.fetchall(), - [(i, date(2017, 1, i + 1), i * 10) for i in range(10)]) - - def test_execute_values_many(self): - cur = self.conn.cursor() - psycopg2.extras.execute_values(cur, - "insert into testfast (id, val) values %s", - ((i, i * 10) for i in range(1000))) - cur.execute("select id, val from testfast order by id") - self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(1000)]) - - def test_execute_values_pages(self): - cur = self.conn.cursor() - psycopg2.extras.execute_values(cur, - "insert into testfast (id, val) values %s", - ((i, i * 10) for i in range(25)), - page_size=10) - - # last statement was 5 tuples (one parens is for the fields list) - self.assertEqual(sum(c == '(' for c in cur.query.decode('ascii')), 6) - - cur.execute("select id, val from testfast order by id") - self.assertEqual(cur.fetchall(), [(i, i * 10) for i in range(25)]) - - def test_execute_values_unicode(self): - cur = self.conn.cursor() - ext.register_type(ext.UNICODE, cur) - snowman = u"\u2603" - - # unicode in statement - psycopg2.extras.execute_values(cur, - "insert into testfast (id, data) values %%s -- %s" % snowman, - [(1, 'x')]) - cur.execute("select id, data from testfast where id = 1") - self.assertEqual(cur.fetchone(), (1, 'x')) - - # unicode in data - psycopg2.extras.execute_values(cur, - "insert into testfast (id, data) values %s", - [(2, snowman)]) - cur.execute("select id, data from testfast where id = 2") - self.assertEqual(cur.fetchone(), (2, snowman)) - - # unicode in both - psycopg2.extras.execute_values(cur, - "insert into testfast (id, data) values %%s -- %s" % snowman, - [(3, snowman)]) - cur.execute("select id, data from testfast where id = 3") - self.assertEqual(cur.fetchone(), (3, snowman)) - - def test_suite(): return unittest.TestLoader().loadTestsFromName(__name__) From 95226baa9b3e9ccff50de053a93b2ea53bb1e25c Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Thu, 2 Feb 2017 17:42:06 +0000 Subject: [PATCH 17/21] Further minimal performance tweaks to execute_values --- lib/extras.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/extras.py b/lib/extras.py index 80034e6f..38ca17a9 100644 --- a/lib/extras.py +++ b/lib/extras.py @@ -1241,19 +1241,20 @@ def execute_values(cur, sql, argslist, template=None, page_size=100): for page in _paginate(argslist, page_size=page_size): if template is None: - template = '(%s)' % ','.join(['%s'] * len(page[0])) - parts = [pre] + template = b'(' + b','.join([b'%s'] * len(page[0])) + b')' + parts = pre[:] for args in page: parts.append(cur.mogrify(template, args)) parts.append(b',') - parts[-1] = post + parts[-1:] = post cur.execute(b''.join(parts)) def _split_sql(sql): """Split *sql* on a single ``%s`` placeholder. - Return a (pre, post) pair around the ``%s``, with ``%%`` -> ``%`` replacement. + Split on the %s, perform %% replacement and return pre, post lists of + snippets. """ curr = pre = [] post = [] @@ -1278,4 +1279,4 @@ def _split_sql(sql): if curr is pre: raise ValueError("the query doesn't contain any '%s' placeholder") - return b''.join(pre), b''.join(post) + return pre, post From 8baf6aa37293c75e8ffbf150f6504faa43a08045 Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Thu, 2 Feb 2017 19:03:22 +0000 Subject: [PATCH 18/21] Convert warnings into errors on test --- setup.py | 6 +++++- tests/__init__.py | 5 +++++ tests/test_errcodes.py | 5 ++++- tests/test_types_extras.py | 11 +++++++++-- tests/testutils.py | 8 ++++++-- 5 files changed, 29 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 1c5c8597..d251f988 100644 --- a/setup.py +++ b/setup.py @@ -50,7 +50,11 @@ else: # workaround subclass for ticket #153 pass - sys.path.insert(0, 'scripts') + # Configure distutils to run our custom 2to3 fixers as well + from lib2to3.refactor import get_fixers_from_package + build_py.fixer_names = [f for f in get_fixers_from_package('lib2to3.fixes') + # creates a pending deprecation warning on py 3.4 + if not f.endswith('.fix_reload')] try: import configparser diff --git a/tests/__init__.py b/tests/__init__.py index 1a240994..5382afe8 100755 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -22,6 +22,11 @@ # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # License for more details. +# Convert warnings into errors here. We can't do it with -W because on +# Travis importing site raises a warning. +import warnings +warnings.simplefilter('error') # noqa + import sys from testconfig import dsn from testutils import unittest diff --git a/tests/test_errcodes.py b/tests/test_errcodes.py index 6865194f..3e7ed81a 100755 --- a/tests/test_errcodes.py +++ b/tests/test_errcodes.py @@ -27,7 +27,10 @@ from testutils import unittest, ConnectingTestCase try: reload except NameError: - from imp import reload + try: + from importlib import reload + except ImportError: + from imp import reload from threading import Thread from psycopg2 import errorcodes diff --git a/tests/test_types_extras.py b/tests/test_types_extras.py index 8e615616..7249ae04 100755 --- a/tests/test_types_extras.py +++ b/tests/test_types_extras.py @@ -17,6 +17,7 @@ from __future__ import with_statement import re import sys +import warnings from decimal import Decimal from datetime import date, datetime from functools import wraps @@ -77,7 +78,10 @@ class TypesExtrasTests(ConnectingTestCase): self.failUnless(type(s) == list and len(s) == 0) def testINET(self): - psycopg2.extras.register_inet() + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + psycopg2.extras.register_inet() + i = psycopg2.extras.Inet("192.168.1.0/24") s = self.execute("SELECT %s AS foo", (i,)) self.failUnless(i.addr == s.addr) @@ -86,7 +90,10 @@ class TypesExtrasTests(ConnectingTestCase): self.failUnless(s is None) def testINETARRAY(self): - psycopg2.extras.register_inet() + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + psycopg2.extras.register_inet() + i = psycopg2.extras.Inet("192.168.1.0/24") s = self.execute("SELECT %s AS foo", ([i],)) self.failUnless(i.addr == s[0].addr) diff --git a/tests/testutils.py b/tests/testutils.py index 93477357..d63dc00b 100644 --- a/tests/testutils.py +++ b/tests/testutils.py @@ -50,7 +50,9 @@ else: @wraps(f) def skipIf__(self): if cond: - warnings.warn(msg) + with warnings.catch_warnings(): + warnings.simplefilter('always', UserWarning) + warnings.warn(msg) return else: return f(self) @@ -61,7 +63,9 @@ else: return skipIf(True, msg) def skipTest(self, msg): - warnings.warn(msg) + with warnings.catch_warnings(): + warnings.simplefilter('always', UserWarning) + warnings.warn(msg) return unittest.TestCase.skipTest = skipTest From ce9be69615c700a0472098c5db171d33091c0b5a Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Fri, 3 Feb 2017 04:28:27 +0000 Subject: [PATCH 19/21] Added async_ as an alias for async Added in argument for psycopg2.connect() and connection.__init__, and for the connection.async attribute. --- lib/__init__.py | 14 ++- psycopg/connection_type.c | 10 +- psycopg/psycopgmodule.c | 10 +- tests/__init__.py | 5 + tests/test_async.py | 16 +-- tests/test_async_keyword.py | 217 ++++++++++++++++++++++++++++++++++++ tests/test_cancel.py | 4 +- tests/test_connection.py | 13 +-- tests/test_module.py | 35 +++--- tests/test_replication.py | 2 +- tests/testutils.py | 6 +- 11 files changed, 282 insertions(+), 50 deletions(-) create mode 100755 tests/test_async_keyword.py diff --git a/lib/__init__.py b/lib/__init__.py index fb22b4c0..492b924d 100644 --- a/lib/__init__.py +++ b/lib/__init__.py @@ -82,8 +82,7 @@ else: del Decimal, Adapter -def connect(dsn=None, connection_factory=None, cursor_factory=None, - async=False, **kwargs): +def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs): """ Create a new database connection. @@ -111,17 +110,24 @@ def connect(dsn=None, connection_factory=None, cursor_factory=None, Using the *cursor_factory* parameter, a new default cursor factory will be used by cursor(). - Using *async*=True an asynchronous connection will be created. + Using *async*=True an asynchronous connection will be created. *async_* is + a valid alias (for Python versions where ``async`` is a keyword). Any other keyword parameter will be passed to the underlying client library: the list of supported parameters depends on the library version. """ + kwasync = {} + if 'async' in kwargs: + kwasync['async'] = kwargs.pop('async') + if 'async_' in kwargs: + kwasync['async_'] = kwargs.pop('async_') + if dsn is None and not kwargs: raise TypeError('missing dsn and no parameters') dsn = _ext.make_dsn(dsn, **kwargs) - conn = _connect(dsn, connection_factory=connection_factory, async=async) + conn = _connect(dsn, connection_factory=connection_factory, **kwasync) if cursor_factory is not None: conn.cursor_factory = cursor_factory diff --git a/psycopg/connection_type.c b/psycopg/connection_type.c index 7401bc14..2066579e 100644 --- a/psycopg/connection_type.c +++ b/psycopg/connection_type.c @@ -1040,6 +1040,8 @@ static struct PyMemberDef connectionObject_members[] = { "The current connection string."}, {"async", T_LONG, offsetof(connectionObject, async), READONLY, "True if the connection is asynchronous."}, + {"async_", T_LONG, offsetof(connectionObject, async), READONLY, + "True if the connection is asynchronous."}, {"status", T_INT, offsetof(connectionObject, status), READONLY, "The current transaction status."}, @@ -1186,12 +1188,14 @@ static int connection_init(PyObject *obj, PyObject *args, PyObject *kwds) { const char *dsn; - long int async = 0; - static char *kwlist[] = {"dsn", "async", NULL}; + long int async = 0, async_ = 0; + static char *kwlist[] = {"dsn", "async", "async_", NULL}; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|l", kwlist, &dsn, &async)) + if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|ll", kwlist, + &dsn, &async, &async_)) return -1; + if (async_) { async = async_; } return connection_setup((connectionObject *)obj, dsn, async); } diff --git a/psycopg/psycopgmodule.c b/psycopg/psycopgmodule.c index c4d1517a..6c95bd69 100644 --- a/psycopg/psycopgmodule.c +++ b/psycopg/psycopgmodule.c @@ -82,15 +82,17 @@ psyco_connect(PyObject *self, PyObject *args, PyObject *keywds) PyObject *conn = NULL; PyObject *factory = NULL; const char *dsn = NULL; - int async = 0; + int async = 0, async_ = 0; - static char *kwlist[] = {"dsn", "connection_factory", "async", NULL}; + static char *kwlist[] = {"dsn", "connection_factory", "async", "async_", NULL}; - if (!PyArg_ParseTupleAndKeywords(args, keywds, "s|Oi", kwlist, - &dsn, &factory, &async)) { + if (!PyArg_ParseTupleAndKeywords(args, keywds, "s|Oii", kwlist, + &dsn, &factory, &async, &async_)) { return NULL; } + if (async_) { async = async_; } + Dprintf("psyco_connect: dsn = '%s', async = %d", dsn, async); /* allocate connection, fill with errors and return it */ diff --git a/tests/__init__.py b/tests/__init__.py index 5382afe8..2b3a6a4d 100755 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -54,6 +54,9 @@ import test_types_basic import test_types_extras import test_with +if sys.version_info[:2] < (3, 6): + import test_async_keyword + def test_suite(): # If connection to test db fails, bail out early. @@ -69,6 +72,8 @@ def test_suite(): suite = unittest.TestSuite() suite.addTest(test_async.test_suite()) + if sys.version_info[:2] < (3, 6): + suite.addTest(test_async_keyword.test_suite()) suite.addTest(test_bugX000.test_suite()) suite.addTest(test_bug_gc.test_suite()) suite.addTest(test_cancel.test_suite()) diff --git a/tests/test_async.py b/tests/test_async.py index 6f8fed58..cdf17225 100755 --- a/tests/test_async.py +++ b/tests/test_async.py @@ -55,7 +55,7 @@ class AsyncTests(ConnectingTestCase): ConnectingTestCase.setUp(self) self.sync_conn = self.conn - self.conn = self.connect(async=True) + self.conn = self.connect(async_=True) self.wait(self.conn) @@ -71,8 +71,8 @@ class AsyncTests(ConnectingTestCase): sync_cur = self.sync_conn.cursor() del cur, sync_cur - self.assert_(self.conn.async) - self.assert_(not self.sync_conn.async) + self.assert_(self.conn.async_) + self.assert_(not self.sync_conn.async_) # the async connection should be in isolevel 0 self.assertEquals(self.conn.isolation_level, 0) @@ -310,12 +310,12 @@ class AsyncTests(ConnectingTestCase): def test_async_subclass(self): class MyConn(psycopg2.extensions.connection): - def __init__(self, dsn, async=0): - psycopg2.extensions.connection.__init__(self, dsn, async=async) + def __init__(self, dsn, async_=0): + psycopg2.extensions.connection.__init__(self, dsn, async_=async_) - conn = self.connect(connection_factory=MyConn, async=True) + conn = self.connect(connection_factory=MyConn, async_=True) self.assert_(isinstance(conn, MyConn)) - self.assert_(conn.async) + self.assert_(conn.async_) conn.close() def test_flush_on_write(self): @@ -438,7 +438,7 @@ class AsyncTests(ConnectingTestCase): def test_async_connection_error_message(self): try: - cnn = psycopg2.connect('dbname=thisdatabasedoesntexist', async=True) + cnn = psycopg2.connect('dbname=thisdatabasedoesntexist', async_=True) self.wait(cnn) except psycopg2.Error, e: self.assertNotEqual(str(e), "asynchronous connection failed", diff --git a/tests/test_async_keyword.py b/tests/test_async_keyword.py new file mode 100755 index 00000000..3b20e8d6 --- /dev/null +++ b/tests/test_async_keyword.py @@ -0,0 +1,217 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# test_async_keyword.py - test for objects using 'async' as attribute/param +# +# Copyright (C) 2017 Daniele Varrazzo +# +# psycopg2 is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# In addition, as a special exception, the copyright holders give +# permission to link this program with the OpenSSL library (or with +# modified versions of OpenSSL that use the same license as OpenSSL), +# and distribute linked combinations including the two. +# +# You must obey the GNU Lesser General Public License in all respects for +# all of the code used other than OpenSSL. +# +# psycopg2 is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +import psycopg2 +from psycopg2 import extras + +from testconfig import dsn +from testutils import (ConnectingTestCase, unittest, skip_before_postgres, + assertDsnEqual) +from test_replication import ReplicationTestCase, skip_repl_if_green +from psycopg2.extras import LogicalReplicationConnection, StopReplication + + +class AsyncTests(ConnectingTestCase): + def setUp(self): + ConnectingTestCase.setUp(self) + + self.sync_conn = self.conn + self.conn = self.connect(async=True) + + self.wait(self.conn) + + curs = self.conn.cursor() + curs.execute(''' + CREATE TEMPORARY TABLE table1 ( + id int PRIMARY KEY + )''') + self.wait(curs) + + def test_connection_setup(self): + cur = self.conn.cursor() + sync_cur = self.sync_conn.cursor() + del cur, sync_cur + + self.assert_(self.conn.async) + self.assert_(not self.sync_conn.async) + + # the async connection should be in isolevel 0 + self.assertEquals(self.conn.isolation_level, 0) + + # check other properties to be found on the connection + self.assert_(self.conn.server_version) + self.assert_(self.conn.protocol_version in (2, 3)) + self.assert_(self.conn.encoding in psycopg2.extensions.encodings) + + def test_async_subclass(self): + class MyConn(psycopg2.extensions.connection): + def __init__(self, dsn, async=0): + psycopg2.extensions.connection.__init__(self, dsn, async=async) + + conn = self.connect(connection_factory=MyConn, async=True) + self.assert_(isinstance(conn, MyConn)) + self.assert_(conn.async) + conn.close() + + def test_async_connection_error_message(self): + try: + cnn = psycopg2.connect('dbname=thisdatabasedoesntexist', async=True) + self.wait(cnn) + except psycopg2.Error, e: + self.assertNotEqual(str(e), "asynchronous connection failed", + "connection error reason lost") + else: + self.fail("no exception raised") + + +class CancelTests(ConnectingTestCase): + def setUp(self): + ConnectingTestCase.setUp(self) + + cur = self.conn.cursor() + cur.execute(''' + CREATE TEMPORARY TABLE table1 ( + id int PRIMARY KEY + )''') + self.conn.commit() + + @skip_before_postgres(8, 2) + def test_async_cancel(self): + async_conn = psycopg2.connect(dsn, async=True) + self.assertRaises(psycopg2.OperationalError, async_conn.cancel) + extras.wait_select(async_conn) + cur = async_conn.cursor() + cur.execute("select pg_sleep(10000)") + self.assertTrue(async_conn.isexecuting()) + async_conn.cancel() + self.assertRaises(psycopg2.extensions.QueryCanceledError, + extras.wait_select, async_conn) + cur.execute("select 1") + extras.wait_select(async_conn) + self.assertEqual(cur.fetchall(), [(1, )]) + + def test_async_connection_cancel(self): + async_conn = psycopg2.connect(dsn, async=True) + async_conn.close() + self.assertTrue(async_conn.closed) + + +class ConnectTestCase(unittest.TestCase): + def setUp(self): + self.args = None + + def connect_stub(dsn, connection_factory=None, async=False): + self.args = (dsn, connection_factory, async) + + self._connect_orig = psycopg2._connect + psycopg2._connect = connect_stub + + def tearDown(self): + psycopg2._connect = self._connect_orig + + def test_there_has_to_be_something(self): + self.assertRaises(TypeError, psycopg2.connect) + self.assertRaises(TypeError, psycopg2.connect, + connection_factory=lambda dsn, async=False: None) + self.assertRaises(TypeError, psycopg2.connect, + async=True) + + def test_factory(self): + def f(dsn, async=False): + pass + + psycopg2.connect(database='foo', host='baz', connection_factory=f) + assertDsnEqual(self, self.args[0], 'dbname=foo host=baz') + self.assertEqual(self.args[1], f) + self.assertEqual(self.args[2], False) + + psycopg2.connect("dbname=foo host=baz", connection_factory=f) + assertDsnEqual(self, self.args[0], 'dbname=foo host=baz') + self.assertEqual(self.args[1], f) + self.assertEqual(self.args[2], False) + + def test_async(self): + psycopg2.connect(database='foo', host='baz', async=1) + assertDsnEqual(self, self.args[0], 'dbname=foo host=baz') + self.assertEqual(self.args[1], None) + self.assert_(self.args[2]) + + psycopg2.connect("dbname=foo host=baz", async=True) + assertDsnEqual(self, self.args[0], 'dbname=foo host=baz') + self.assertEqual(self.args[1], None) + self.assert_(self.args[2]) + + +class AsyncReplicationTest(ReplicationTestCase): + @skip_before_postgres(9, 4) # slots require 9.4 + @skip_repl_if_green + def test_async_replication(self): + conn = self.repl_connect( + connection_factory=LogicalReplicationConnection, async=1) + if conn is None: + return + + cur = conn.cursor() + + self.create_replication_slot(cur, output_plugin='test_decoding') + self.wait(cur) + + cur.start_replication(self.slot) + self.wait(cur) + + self.make_replication_events() + + self.msg_count = 0 + + def consume(msg): + # just check the methods + "%s: %s" % (cur.io_timestamp, repr(msg)) + + self.msg_count += 1 + if self.msg_count > 3: + cur.send_feedback(reply=True) + raise StopReplication() + + cur.send_feedback(flush_lsn=msg.data_start) + + # cannot be used in asynchronous mode + self.assertRaises(psycopg2.ProgrammingError, cur.consume_stream, consume) + + def process_stream(): + from select import select + while True: + msg = cur.read_message() + if msg: + consume(msg) + else: + select([cur], [], []) + self.assertRaises(StopReplication, process_stream) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromName(__name__) + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_cancel.py b/tests/test_cancel.py index a8eb7506..6f66ef77 100755 --- a/tests/test_cancel.py +++ b/tests/test_cancel.py @@ -87,7 +87,7 @@ class CancelTests(ConnectingTestCase): @skip_before_postgres(8, 2) def test_async_cancel(self): - async_conn = psycopg2.connect(dsn, async=True) + async_conn = psycopg2.connect(dsn, async_=True) self.assertRaises(psycopg2.OperationalError, async_conn.cancel) extras.wait_select(async_conn) cur = async_conn.cursor() @@ -101,7 +101,7 @@ class CancelTests(ConnectingTestCase): self.assertEqual(cur.fetchall(), [(1, )]) def test_async_connection_cancel(self): - async_conn = psycopg2.connect(dsn, async=True) + async_conn = psycopg2.connect(dsn, async_=True) async_conn.close() self.assertTrue(async_conn.closed) diff --git a/tests/test_connection.py b/tests/test_connection.py index 833751b9..68e4da8c 100755 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -33,7 +33,7 @@ import psycopg2.errorcodes from psycopg2 import extensions as ext from testutils import ( - unittest, decorate_all_tests, skip_if_no_superuser, + unittest, assertDsnEqual, decorate_all_tests, skip_if_no_superuser, skip_before_postgres, skip_after_postgres, skip_before_libpq, ConnectingTestCase, skip_if_tpc_disabled, skip_if_windows) @@ -392,9 +392,6 @@ class ParseDsnTestCase(ConnectingTestCase): class MakeDsnTestCase(ConnectingTestCase): - def assertDsnEqual(self, dsn1, dsn2): - self.assertEqual(set(dsn1.split()), set(dsn2.split())) - def test_empty_arguments(self): self.assertEqual(ext.make_dsn(), '') @@ -412,7 +409,7 @@ class MakeDsnTestCase(ConnectingTestCase): def test_empty_param(self): dsn = ext.make_dsn(dbname='sony', password='') - self.assertDsnEqual(dsn, "dbname=sony password=''") + assertDsnEqual(self, dsn, "dbname=sony password=''") def test_escape(self): dsn = ext.make_dsn(dbname='hello world') @@ -435,10 +432,10 @@ class MakeDsnTestCase(ConnectingTestCase): def test_params_merging(self): dsn = ext.make_dsn('dbname=foo host=bar', host='baz') - self.assertDsnEqual(dsn, 'dbname=foo host=baz') + assertDsnEqual(self, dsn, 'dbname=foo host=baz') dsn = ext.make_dsn('dbname=foo', user='postgres') - self.assertDsnEqual(dsn, 'dbname=foo user=postgres') + assertDsnEqual(self, dsn, 'dbname=foo user=postgres') def test_no_dsn_munging(self): dsnin = 'dbname=a host=b user=c password=d' @@ -452,7 +449,7 @@ class MakeDsnTestCase(ConnectingTestCase): self.assertEqual(dsn, url) dsn = ext.make_dsn(url, application_name='woot') - self.assertDsnEqual(dsn, + assertDsnEqual(self, dsn, 'dbname=test user=tester password=secret application_name=woot') self.assertRaises(psycopg2.ProgrammingError, diff --git a/tests/test_module.py b/tests/test_module.py index 6a1606d6..a5d36e65 100755 --- a/tests/test_module.py +++ b/tests/test_module.py @@ -26,8 +26,8 @@ import os import sys from subprocess import Popen -from testutils import unittest, skip_before_python, skip_before_postgres -from testutils import ConnectingTestCase, skip_copy_if_green, script_to_py3 +from testutils import (unittest, skip_before_python, skip_before_postgres, + ConnectingTestCase, skip_copy_if_green, script_to_py3, assertDsnEqual) import psycopg2 @@ -36,24 +36,21 @@ class ConnectTestCase(unittest.TestCase): def setUp(self): self.args = None - def conect_stub(dsn, connection_factory=None, async=False): - self.args = (dsn, connection_factory, async) + def connect_stub(dsn, connection_factory=None, async_=False): + self.args = (dsn, connection_factory, async_) self._connect_orig = psycopg2._connect - psycopg2._connect = conect_stub + psycopg2._connect = connect_stub def tearDown(self): psycopg2._connect = self._connect_orig - def assertDsnEqual(self, dsn1, dsn2): - self.assertEqual(set(dsn1.split()), set(dsn2.split())) - def test_there_has_to_be_something(self): self.assertRaises(TypeError, psycopg2.connect) self.assertRaises(TypeError, psycopg2.connect, - connection_factory=lambda dsn, async=False: None) + connection_factory=lambda dsn, async_=False: None) self.assertRaises(TypeError, psycopg2.connect, - async=True) + async_=True) def test_no_keywords(self): psycopg2.connect('') @@ -92,27 +89,27 @@ class ConnectTestCase(unittest.TestCase): self.assertEqual(self.args[0], 'options=stuff') def test_factory(self): - def f(dsn, async=False): + def f(dsn, async_=False): pass psycopg2.connect(database='foo', host='baz', connection_factory=f) - self.assertDsnEqual(self.args[0], 'dbname=foo host=baz') + assertDsnEqual(self, self.args[0], 'dbname=foo host=baz') self.assertEqual(self.args[1], f) self.assertEqual(self.args[2], False) psycopg2.connect("dbname=foo host=baz", connection_factory=f) - self.assertDsnEqual(self.args[0], 'dbname=foo host=baz') + assertDsnEqual(self, self.args[0], 'dbname=foo host=baz') self.assertEqual(self.args[1], f) self.assertEqual(self.args[2], False) def test_async(self): - psycopg2.connect(database='foo', host='baz', async=1) - self.assertDsnEqual(self.args[0], 'dbname=foo host=baz') + psycopg2.connect(database='foo', host='baz', async_=1) + assertDsnEqual(self, self.args[0], 'dbname=foo host=baz') self.assertEqual(self.args[1], None) self.assert_(self.args[2]) - psycopg2.connect("dbname=foo host=baz", async=True) - self.assertDsnEqual(self.args[0], 'dbname=foo host=baz') + psycopg2.connect("dbname=foo host=baz", async_=True) + assertDsnEqual(self, self.args[0], 'dbname=foo host=baz') self.assertEqual(self.args[1], None) self.assert_(self.args[2]) @@ -124,7 +121,7 @@ class ConnectTestCase(unittest.TestCase): def test_empty_param(self): psycopg2.connect(database='sony', password='') - self.assertDsnEqual(self.args[0], "dbname=sony password=''") + assertDsnEqual(self, self.args[0], "dbname=sony password=''") def test_escape(self): psycopg2.connect(database='hello world') @@ -147,7 +144,7 @@ class ConnectTestCase(unittest.TestCase): self.assertEqual(self.args[0], 'dbname=bar') psycopg2.connect('dbname=foo', user='postgres') - self.assertDsnEqual(self.args[0], 'dbname=foo user=postgres') + assertDsnEqual(self, self.args[0], 'dbname=foo user=postgres') class ExceptionsTestCase(ConnectingTestCase): diff --git a/tests/test_replication.py b/tests/test_replication.py index 79d1295d..0aed578f 100755 --- a/tests/test_replication.py +++ b/tests/test_replication.py @@ -183,7 +183,7 @@ class AsyncReplicationTest(ReplicationTestCase): @skip_repl_if_green def test_async_replication(self): conn = self.repl_connect( - connection_factory=LogicalReplicationConnection, async=1) + connection_factory=LogicalReplicationConnection, async_=1) if conn is None: return diff --git a/tests/testutils.py b/tests/testutils.py index d63dc00b..69dfe85c 100644 --- a/tests/testutils.py +++ b/tests/testutils.py @@ -134,7 +134,7 @@ class ConnectingTestCase(unittest.TestCase): import psycopg2 try: conn = self.connect(**kwargs) - if conn.async == 1: + if conn.async_ == 1: self.wait(conn) except psycopg2.OperationalError, e: # If pgcode is not set it is a genuine connection error @@ -459,3 +459,7 @@ class py3_raises_typeerror(object): if sys.version_info[0] >= 3: assert type is TypeError return True + + +def assertDsnEqual(testsuite, dsn1, dsn2): + testsuite.assertEqual(set(dsn1.split()), set(dsn2.split())) From 44c3b776917444ca98667f5d6f4470c767f820bf Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Fri, 3 Feb 2017 04:39:20 +0000 Subject: [PATCH 20/21] Added docs for async_ alias --- NEWS | 2 ++ doc/src/connection.rst | 4 ++++ doc/src/extensions.rst | 2 ++ doc/src/module.rst | 6 +++++- 4 files changed, 13 insertions(+), 1 deletion(-) diff --git a/NEWS b/NEWS index b4d11e64..e51b706c 100644 --- a/NEWS +++ b/NEWS @@ -29,6 +29,8 @@ New features: unicode encoding/decoding (:ticket:`#473`). - Fixed error caused by missing decoding `~psycopg2.extras.LoggingConnection` (:ticket:`#483`). +- Added ``async_`` as an alias for ``async`` to support Python 3.7 where + ``async`` will become a keyword (:ticket:`#495`). Other changes: diff --git a/doc/src/connection.rst b/doc/src/connection.rst index c99c8bd8..0bc584ca 100644 --- a/doc/src/connection.rst +++ b/doc/src/connection.rst @@ -706,9 +706,13 @@ The ``connection`` class .. attribute:: async + async_ Read only attribute: 1 if the connection is asynchronous, 0 otherwise. + .. versionchanged:: 2.7 added the `!async_` alias for Python versions + where `!async` is a keyword. + .. method:: poll() diff --git a/doc/src/extensions.rst b/doc/src/extensions.rst index 9c5a8538..8d70ba38 100644 --- a/doc/src/extensions.rst +++ b/doc/src/extensions.rst @@ -29,6 +29,8 @@ introspection etc. For a complete description of the class, see `connection`. + .. versionchanged:: 2.7 + *async_* can be used as alias for *async*. .. class:: cursor(conn, name=None) diff --git a/doc/src/module.rst b/doc/src/module.rst index 97fbdf19..2bd259e3 100644 --- a/doc/src/module.rst +++ b/doc/src/module.rst @@ -64,7 +64,8 @@ The module interface respects the standard defined in the |DBAPI|_. cursors you can use this parameter instead of subclassing a connection. Using *async*\=\ `!True` an asynchronous connection will be created: see - :ref:`async-support` to know about advantages and limitations. + :ref:`async-support` to know about advantages and limitations. *async_* is + a valid alias for the Python version where ``async`` is a keyword. .. versionchanged:: 2.4.3 any keyword argument is passed to the connection. Previously only the @@ -76,6 +77,9 @@ The module interface respects the standard defined in the |DBAPI|_. .. versionchanged:: 2.7 both *dsn* and keyword arguments can be specified. + .. versionchanged:: 2.7 + added *async_* alias. + .. seealso:: - `~psycopg2.extensions.parse_dsn` From ca42306d7916647448184907e03c77ff54ebd4f9 Mon Sep 17 00:00:00 2001 From: Daniele Varrazzo Date: Fri, 3 Feb 2017 04:52:49 +0000 Subject: [PATCH 21/21] Misplaced NEWS entry fixed. --- NEWS | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/NEWS b/NEWS index 7fa5d816..c9cbb4c5 100644 --- a/NEWS +++ b/NEWS @@ -30,13 +30,13 @@ New features: - `~cursor.executemany()` slowness addressed by `~psycopg2.extras.execute_batch()` and `~psycopg2.extras.execute_values()` (:ticket:`#491`). +- Added ``async_`` as an alias for ``async`` to support Python 3.7 where + ``async`` will become a keyword (:ticket:`#495`). Bug fixes: - Fixed error caused by missing decoding `~psycopg2.extras.LoggingConnection` (:ticket:`#483`). -- Added ``async_`` as an alias for ``async`` to support Python 3.7 where - ``async`` will become a keyword (:ticket:`#495`). Other changes: