diff --git a/doc/src/faq.rst b/doc/src/faq.rst index a7374312..9d1dbeb3 100644 --- a/doc/src/faq.rst +++ b/doc/src/faq.rst @@ -180,7 +180,7 @@ Psycopg automatically converts PostgreSQL :sql:`json` data into Python objects. Psycopg converts :sql:`json` values into Python objects but :sql:`jsonb` values are returned as strings. Can :sql:`jsonb` be converted automatically? Automatic conversion of :sql:`jsonb` values is supported from Psycopg release 2.5.4. For previous versions you can register the :sql:`json` - typecaster on the :sql:`jsonb` oids (which are known and not suppsed to + typecaster on the :sql:`jsonb` oids (which are known and not supposed to change in future PostgreSQL versions):: psycopg2.extras.register_json(oid=3802, array_oid=3807, globally=True) diff --git a/psycopg/cursor_type.c b/psycopg/cursor_type.c index c290c715..199c8a30 100644 --- a/psycopg/cursor_type.c +++ b/psycopg/cursor_type.c @@ -768,7 +768,7 @@ curs_fetchone(cursorObject *self, PyObject *dummy) Dprintf("curs_fetchone: rowcount = %ld", self->rowcount); if (self->row >= self->rowcount) { - /* we exausted available data: return None */ + /* we exhausted available data: return None */ Py_RETURN_NONE; } diff --git a/psycopg/xid_type.c b/psycopg/xid_type.c index a4e05cb6..2ac36ed8 100644 --- a/psycopg/xid_type.c +++ b/psycopg/xid_type.c @@ -598,7 +598,7 @@ xid_recover(PyObject *conn) /* curs = conn.cursor() * (sort of. Use the real cursor in case the connection returns - * somenthing non-dbapi -- see ticket #114) */ + * something non-dbapi -- see ticket #114) */ if (!(curs = PyObject_CallFunctionObjArgs( (PyObject *)&cursorType, conn, NULL))) { goto exit; } diff --git a/scripts/make_errorcodes.py b/scripts/make_errorcodes.py index e0800771..91c98330 100755 --- a/scripts/make_errorcodes.py +++ b/scripts/make_errorcodes.py @@ -77,7 +77,7 @@ def parse_errors_txt(url): m = re.match(r"(.....)\s+(?:E|W|S)\s+ERRCODE_(\S+)(?:\s+(\S+))?$", line) if m: errcode, macro, spec = m.groups() - # skip errcodes without specs as they are not publically visible + # skip errcodes without specs as they are not publicly visible if not spec: continue errlabel = spec.upper() diff --git a/scripts/make_errors.py b/scripts/make_errors.py index 0d0a3a81..897f80b4 100755 --- a/scripts/make_errors.py +++ b/scripts/make_errors.py @@ -61,7 +61,7 @@ def parse_errors_txt(url): m = re.match(r"(.....)\s+(?:E|W|S)\s+ERRCODE_(\S+)(?:\s+(\S+))?$", line) if m: errcode, macro, spec = m.groups() - # skip errcodes without specs as they are not publically visible + # skip errcodes without specs as they are not publicly visible if not spec: continue errlabel = spec.upper() diff --git a/tests/dbapi20.py b/tests/dbapi20.py index fe89bb0e..9a8a9b2c 100644 --- a/tests/dbapi20.py +++ b/tests/dbapi20.py @@ -64,7 +64,7 @@ import sys # - self.populate is now self._populate(), so if a driver stub # overrides self.ddl1 this change propagates # - VARCHAR columns now have a width, which will hopefully make the -# DDL even more portible (this will be reversed if it causes more problems) +# DDL even more portable (this will be reversed if it causes more problems) # - cursor.rowcount being checked after various execute and fetchXXX methods # - Check for fetchall and fetchmany returning empty lists after results # are exhausted (already checking for empty lists if select retrieved @@ -76,7 +76,7 @@ class DatabaseAPI20Test(unittest.TestCase): ''' Test a database self.driver for DB API 2.0 compatibility. This implementation tests Gadfly, but the TestCase is structured so that other self.drivers can subclass this - test case to ensure compiliance with the DB-API. It is + test case to ensure compliance with the DB-API. It is expected that this TestCase may be expanded in the future if ambiguities or edge conditions are discovered. diff --git a/tests/test_connection.py b/tests/test_connection.py index d8226892..e1cbb649 100755 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1242,7 +1242,7 @@ class ConnectionTwoPhaseTests(ConnectingTestCase): def test_xid_unicode_unparsed(self): # We don't expect people shooting snowmen as transaction ids, # so if something explodes in an encode error I don't mind. - # Let's just check uniconde is accepted as type. + # Let's just check unicode is accepted as type. cnn = self.connect() cnn.set_client_encoding('utf8') cnn.tpc_begin(u"transaction-id")