Merge remote-tracking branch 'psycopg2/master'

This commit is contained in:
Ashesh Vashi 2017-09-11 18:26:34 +05:30
commit 6e0edf7779
12 changed files with 42 additions and 194 deletions

14
NEWS
View File

@ -1,6 +1,20 @@
Current release
---------------
What's new in psycopg 2.7.3.1
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Dropped libresolv from wheel package to avoid incompatibility with
glibc 2.26 (wheels ticket #2)
What's new in psycopg 2.7.3
^^^^^^^^^^^^^^^^^^^^^^^^^^^
- Restored default :sql:`timestamptz[]` typecasting to Python `!datetime`.
Regression introduced in Psycopg 2.7.2 (:ticket:`#578`).
What's new in psycopg 2.7.2
^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@ -30,7 +30,7 @@ upload:
# this command requires ssh configured to the proper target
tar czf - -C html . | ssh psycoweb tar xzvf - -C docs/current
# this command requires a .pypirc with the right privileges
python src/tools/pypi_docs_upload.py psycopg2 $$(pwd)/html
# python src/tools/pypi_docs_upload.py psycopg2 $$(pwd)/html
clean:
$(MAKE) $(SPHOPTS) -C src $@

View File

@ -753,7 +753,7 @@ The ``connection`` class
`psycopg2.extensions`: see :ref:`connection-status-constants`
for the available values.
The status is undefined for `closed` connectons.
The status is undefined for `closed` connections.
.. method:: lobject([oid [, mode [, new_oid [, new_file [, lobject_factory]]]]])

View File

@ -1,166 +0,0 @@
# -*- coding: utf-8 -*-
"""
Standalone script to upload a project docs on PyPI
Hacked together from the following distutils extension, avaliable from
https://bitbucket.org/jezdez/sphinx-pypi-upload/overview (ver. 0.2.1)
sphinx_pypi_upload
~~~~~~~~~~~~~~~~~~
setuptools command for uploading Sphinx documentation to PyPI
:author: Jannis Leidel
:contact: jannis@leidel.info
:copyright: Copyright 2009, Jannis Leidel.
:license: BSD, see LICENSE for details.
"""
import os
import sys
import socket
import zipfile
import httplib
import base64
import urlparse
import tempfile
import cStringIO as StringIO
from ConfigParser import ConfigParser
from distutils import log
from distutils.command.upload import upload
from distutils.errors import DistutilsOptionError
class UploadDoc(object):
"""Distutils command to upload Sphinx documentation."""
def __init__(self, name, upload_dir, repository=None):
self.name = name
self.upload_dir = upload_dir
p = ConfigParser()
p.read(os.path.expanduser('~/.pypirc'))
self.username = p.get('pypi', 'username')
self.password = p.get('pypi', 'password')
self.show_response = False
self.repository = repository or upload.DEFAULT_REPOSITORY
def create_zipfile(self):
# name = self.distribution.metadata.get_name()
name = self.name
tmp_dir = tempfile.mkdtemp()
tmp_file = os.path.join(tmp_dir, "%s.zip" % name)
zip_file = zipfile.ZipFile(tmp_file, "w")
for root, dirs, files in os.walk(self.upload_dir):
if not files:
raise DistutilsOptionError, \
"no files found in upload directory '%s'" % self.upload_dir
for name in files:
full = os.path.join(root, name)
relative = root[len(self.upload_dir):].lstrip(os.path.sep)
dest = os.path.join(relative, name)
zip_file.write(full, dest)
zip_file.close()
return tmp_file
def upload_file(self, filename):
content = open(filename,'rb').read()
# meta = self.distribution.metadata
data = {
':action': 'doc_upload',
'name': self.name, # meta.get_name(),
'content': (os.path.basename(filename),content),
}
# set up the authentication
auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
# Build up the MIME payload for the POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = '\n--' + boundary
end_boundary = sep_boundary + '--'
body = StringIO.StringIO()
for key, value in data.items():
# handle multiple entries for the same name
if type(value) != type([]):
value = [value]
for value in value:
if type(value) is tuple:
fn = ';filename="%s"' % value[0]
value = value[1]
else:
fn = ""
value = str(value)
body.write(sep_boundary)
body.write('\nContent-Disposition: form-data; name="%s"'%key)
body.write(fn)
body.write("\n\n")
body.write(value)
if value and value[-1] == '\r':
body.write('\n') # write an extra newline (lurve Macs)
body.write(end_boundary)
body.write("\n")
body = body.getvalue()
self.announce("Submitting documentation to %s" % (self.repository), log.INFO)
# build the Request
# We can't use urllib2 since we need to send the Basic
# auth right with the first request
schema, netloc, url, params, query, fragments = \
urlparse.urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
http = httplib.HTTPConnection(netloc)
elif schema == 'https':
http = httplib.HTTPSConnection(netloc)
else:
raise AssertionError, "unsupported schema "+schema
data = ''
loglevel = log.INFO
try:
http.connect()
http.putrequest("POST", url)
http.putheader('Content-type',
'multipart/form-data; boundary=%s'%boundary)
http.putheader('Content-length', str(len(body)))
http.putheader('Authorization', auth)
http.endheaders()
http.send(body)
except socket.error, e:
self.announce(str(e), log.ERROR)
return
response = http.getresponse()
if response.status == 200:
self.announce('Server response (%s): %s' % (response.status, response.reason),
log.INFO)
elif response.status == 301:
location = response.getheader('Location')
if location is None:
location = 'http://packages.python.org/%s/' % self.name # meta.get_name()
self.announce('Upload successful. Visit %s' % location,
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (response.status, response.reason),
log.ERROR)
if self.show_response:
print '-'*75, response.read(), '-'*75
def run(self):
zip_file = self.create_zipfile()
self.upload_file(zip_file)
os.remove(zip_file)
def announce(self, msg, *args, **kwargs):
print msg
if __name__ == '__main__':
if len(sys.argv) != 3:
print >>sys.stderr, "usage: %s PROJECT UPLOAD_DIR" % sys.argv[0]
sys.exit(2)
project, upload_dir = sys.argv[1:]
up = UploadDoc(project, upload_dir=upload_dir)
up.run()

View File

@ -1178,7 +1178,7 @@ def execute_batch(cur, sql, argslist, page_size=100):
fewer multi-statement commands, each one containing at most *page_size*
statements, resulting in a reduced number of server roundtrips.
After the execution of the functtion the `cursor.rowcount` property will
After the execution of the function the `cursor.rowcount` property will
**not** contain a total result.
"""
@ -1201,10 +1201,15 @@ def execute_values(cur, sql, argslist, template=None, page_size=100):
*template*.
:param template: the snippet to merge to every item in *argslist* to
compose the query. If *argslist* items are sequences it should contain
positional placeholders (e.g. ``"(%s, %s, %s)"``, or ``"(%s, %s, 42)``"
if there are constants value...); If *argslist* is items are mapping
it should contain named placeholders (e.g. ``"(%(id)s, %(f1)s, 42)"``).
compose the query.
- If the *argslist* items are sequences it should contain positional
placeholders (e.g. ``"(%s, %s, %s)"``, or ``"(%s, %s, 42)``" if there
are constants value...).
- If the *argslist* items are mappings it should contain named
placeholders (e.g. ``"(%(id)s, %(f1)s, 42)"``).
If not specified, assume the arguments are sequence and use a simple
positional template (i.e. ``(%s, %s, ...)``), with the number of
placeholders sniffed by the first element in *argslist*.
@ -1215,7 +1220,7 @@ def execute_values(cur, sql, argslist, template=None, page_size=100):
.. __: https://www.postgresql.org/docs/current/static/queries-values.html
After the execution of the functtion the `cursor.rowcount` property will
After the execution of the function the `cursor.rowcount` property will
**not** contain a total result.
While :sql:`INSERT` is an obvious candidate for this function it is

View File

@ -288,6 +288,7 @@ typecast_GENERIC_ARRAY_cast(const char *str, Py_ssize_t len, PyObject *curs)
#define typecast_UNICODEARRAY_cast typecast_GENERIC_ARRAY_cast
#define typecast_BOOLEANARRAY_cast typecast_GENERIC_ARRAY_cast
#define typecast_DATETIMEARRAY_cast typecast_GENERIC_ARRAY_cast
#define typecast_DATETIMETZARRAY_cast typecast_GENERIC_ARRAY_cast
#define typecast_DATEARRAY_cast typecast_GENERIC_ARRAY_cast
#define typecast_TIMEARRAY_cast typecast_GENERIC_ARRAY_cast
#define typecast_INTERVALARRAY_cast typecast_GENERIC_ARRAY_cast

View File

@ -57,6 +57,7 @@ static typecastObject_initlist typecast_builtins[] = {
{"STRINGARRAY", typecast_STRINGARRAY_types, typecast_STRINGARRAY_cast, "STRING"},
{"BOOLEANARRAY", typecast_BOOLEANARRAY_types, typecast_BOOLEANARRAY_cast, "BOOLEAN"},
{"DATETIMEARRAY", typecast_DATETIMEARRAY_types, typecast_DATETIMEARRAY_cast, "DATETIME"},
{"DATETIMETZARRAY", typecast_DATETIMETZARRAY_types, typecast_DATETIMETZARRAY_cast, "DATETIMETZ"},
{"TIMEARRAY", typecast_TIMEARRAY_types, typecast_TIMEARRAY_cast, "TIME"},
{"DATEARRAY", typecast_DATEARRAY_types, typecast_DATEARRAY_cast, "DATE"},
{"INTERVALARRAY", typecast_INTERVALARRAY_types, typecast_INTERVALARRAY_cast, "INTERVAL"},

View File

@ -1,15 +0,0 @@
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
DOCDIR="$DIR/../doc"
# this command requires ssh configured to the proper target
tar czf - -C "$DOCDIR/html" . | ssh psycoweb tar xzvf - -C docs/current
# download the script to upload the docs to PyPI
test -e "$DIR/pypi_docs_upload.py" \
|| wget -O "$DIR/pypi_docs_upload.py" \
https://gist.githubusercontent.com/dvarrazzo/dac46237070d69dbc075/raw
# this command requires a ~/.pypirc with the right privileges
python "$DIR/pypi_docs_upload.py" psycopg2 "$DOCDIR/html"

View File

@ -64,7 +64,7 @@ except ImportError:
# Take a look at http://www.python.org/dev/peps/pep-0440/
# for a consistent versioning pattern.
PSYCOPG_VERSION = '2.7.2.dev1'
PSYCOPG_VERSION = '2.7.4.dev0'
# note: if you are changing the list of supported Python version please fix

View File

@ -1568,6 +1568,7 @@ class PasswordLeakTestCase(ConnectingTestCase):
class SignalTestCase(ConnectingTestCase):
@slow
@skip_before_postgres(8, 2)
def test_bug_551_returning(self):
# Raise an exception trying to decode 'id'
self._test_bug_551(query="""

View File

@ -317,6 +317,11 @@ class DatetimeTests(ConnectingTestCase, CommonDatetimeTestsMixin):
from datetime import datetime
self._test_type_roundtrip_array(datetime(2010, 5, 3, 10, 20, 30))
def test_type_roundtrip_datetimetz_array(self):
from datetime import datetime
self._test_type_roundtrip_array(
datetime(2010, 5, 3, 10, 20, 30, tzinfo=FixedOffsetTimezone(0)))
def test_type_roundtrip_time_array(self):
from datetime import time
self._test_type_roundtrip_array(time(10, 20, 30))

View File

@ -886,7 +886,7 @@ class JsonTestCase(ConnectingTestCase):
curs = self.conn.cursor()
for obj in enumerate(objs):
self.assertEqual(curs.mogrify("%s", (Json(obj),)),
self.assertQuotedEqual(curs.mogrify("%s", (Json(obj),)),
psycopg2.extensions.QuotedString(json.dumps(obj)).getquoted())
@skip_if_no_json_module
@ -904,7 +904,7 @@ class JsonTestCase(ConnectingTestCase):
def dumps(obj):
return json.dumps(obj, cls=DecimalEncoder)
self.assertEqual(curs.mogrify("%s", (Json(obj, dumps=dumps),)),
self.assertQuotedEqual(curs.mogrify("%s", (Json(obj, dumps=dumps),)),
b"'123.45'")
@skip_if_no_json_module
@ -923,7 +923,7 @@ class JsonTestCase(ConnectingTestCase):
curs = self.conn.cursor()
obj = Decimal('123.45')
self.assertEqual(curs.mogrify("%s", (MyJson(obj),)), b"'123.45'")
self.assertQuotedEqual(curs.mogrify("%s", (MyJson(obj),)), b"'123.45'")
@skip_if_no_json_module
def test_register_on_dict(self):
@ -933,7 +933,8 @@ class JsonTestCase(ConnectingTestCase):
try:
curs = self.conn.cursor()
obj = {'a': 123}
self.assertEqual(curs.mogrify("%s", (obj,)), b"""'{"a": 123}'""")
self.assertQuotedEqual(
curs.mogrify("%s", (obj,)), b"""'{"a": 123}'""")
finally:
del psycopg2.extensions.adapters[dict, ext.ISQLQuote]
@ -1085,6 +1086,7 @@ class JsonTestCase(ConnectingTestCase):
self.assert_(s.endswith("'"))
@skip_if_no_json_module
@skip_before_postgres(8, 2)
def test_scs(self):
cnn_on = self.connect(options="-c standard_conforming_strings=on")
cur_on = cnn_on.cursor()