mirror of
https://github.com/psycopg/psycopg2.git
synced 2025-07-30 18:10:01 +03:00
Compare commits
561 Commits
2_8_BETA_1
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
c2b6a8aaea | ||
|
dcb302493a | ||
|
5509e01108 | ||
|
6cd0fbdc49 | ||
|
cee23d83e0 | ||
|
5bfba4c961 | ||
|
b943457896 | ||
|
d0bc154f31 | ||
|
1eac4fd4da | ||
|
c8abc5ce61 | ||
|
65626ec565 | ||
|
310bc75532 | ||
|
d43e5fe092 | ||
|
3b684f91ca | ||
|
bf7fc6cfa4 | ||
|
979d56a797 | ||
|
4903f1c5d6 | ||
|
1dc7b5b70b | ||
|
ed4ba11d17 | ||
|
947f731400 | ||
|
b8d49e6280 | ||
|
4dfa680a71 | ||
|
f4282c6d87 | ||
|
a8765121d9 | ||
|
bb52bcf769 | ||
|
fa24c922e7 | ||
|
3c7889b0e7 | ||
|
e83754a414 | ||
|
a805acf59f | ||
|
78561ac99d | ||
|
5283a835dc | ||
|
f64dd397fd | ||
|
cba6d39be0 | ||
|
282360dd04 | ||
|
362cb00978 | ||
|
eaeeb76944 | ||
|
4987362fb4 | ||
|
8c9a35de38 | ||
|
563b55a725 | ||
|
dac8fa5632 | ||
|
e1cf23d9c7 | ||
|
0eccfbec47 | ||
|
26f0f13b39 | ||
|
a59079a4f2 | ||
|
f9780aa054 | ||
|
658afe4cd9 | ||
|
f79867c9f2 | ||
|
dc5249ba01 | ||
|
7c2706a8b4 | ||
|
4a4b5acdc2 | ||
|
efc5ad01e0 | ||
|
866bcef589 | ||
|
3b9aa7cf9f | ||
|
829a7a2be9 | ||
|
a971c11d50 | ||
|
00870545b7 | ||
|
bf45060074 | ||
|
5fb59cd6ee | ||
|
e0d1daf290 | ||
|
941ac9a724 | ||
|
4e473010a3 | ||
|
8947b00142 | ||
|
46191f1fde | ||
|
e73d2fa9f0 | ||
|
89005ac5b8 | ||
|
bfdffc2c57 | ||
|
ad5bee7054 | ||
|
37d1de1c8f | ||
|
abf2723c0a | ||
|
2da65a715c | ||
|
3fa60fd268 | ||
|
1c1484e43b | ||
|
c81cec604f | ||
|
7fe8cb77ca | ||
|
b39d5d6492 | ||
|
921510d5be | ||
|
999d7a6d01 | ||
|
3eee3e336d | ||
|
1e0086b1fe | ||
|
4fe28d661a | ||
|
14e06d8185 | ||
|
959339cefb | ||
|
fb77bdca0b | ||
|
ef7053c070 | ||
|
ea71fbcd46 | ||
|
0c5b5f4ec3 | ||
|
20fcfd6786 | ||
|
329f43c762 | ||
|
9f020124f8 | ||
|
8b17e218be | ||
|
c96f991a8d | ||
|
3450d159b5 | ||
|
5108191aa5 | ||
|
638be85eb6 | ||
|
0b01ded426 | ||
|
46238ba351 | ||
|
51dd59ef9d | ||
|
333b3b7ac4 | ||
|
7a8f4d6222 | ||
|
b747b5b0fd | ||
|
1781e8b2c9 | ||
|
fdb204b4e3 | ||
|
09b82e4094 | ||
|
97df29a312 | ||
|
daeec37fab | ||
|
c0666b0935 | ||
|
cc21faa4f4 | ||
|
63947e2552 | ||
|
52df8371f3 | ||
|
feeb989323 | ||
|
e8d92b74fd | ||
|
026b5bf3ab | ||
|
02b5e226f4 | ||
|
57009707b1 | ||
|
3182ea2303 | ||
|
ea32730a39 | ||
|
deb00e5454 | ||
|
8c824d0e47 | ||
|
1bf8e77ea2 | ||
|
af3ee06ec0 | ||
|
963fb1190b | ||
|
27a99dac72 | ||
|
78690cfaf8 | ||
|
259d15ae3e | ||
|
77039cad63 | ||
|
e6e465c509 | ||
|
12700a5f02 | ||
|
271dd1fce7 | ||
|
e4b2a197c6 | ||
|
20bb486663 | ||
|
f401d0b738 | ||
|
4912be0e7f | ||
|
aabac5df31 | ||
|
a12dbc4357 | ||
|
bc82c8f9cc | ||
|
c38aa27d7d | ||
|
bd96594e2d | ||
|
182a51a33f | ||
|
76b703e910 | ||
|
29a65f756c | ||
|
6d815f5df9 | ||
|
c7326f8da7 | ||
|
68d786b610 | ||
|
7054e1aadf | ||
|
ac25d3bdc0 | ||
|
9535462ce9 | ||
|
d88e4c2a3c | ||
|
31a80410db | ||
|
d6c81b4ff0 | ||
|
c6f30880a2 | ||
|
e3664380c4 | ||
|
fdf957dcbd | ||
|
3e7bb8d1aa | ||
|
07c83ef8bb | ||
|
f07b3ad0a6 | ||
|
611c610041 | ||
|
25c40f8ac3 | ||
|
ba92a22bc9 | ||
|
3c58e96e10 | ||
|
626078388a | ||
|
c34bf2f2f9 | ||
|
94ba06748f | ||
|
c5528da2dc | ||
|
2dc137975a | ||
|
bc79abace1 | ||
|
846ae52ab2 | ||
|
217f4120ca | ||
|
4d4d2bc444 | ||
|
4b637ec34a | ||
|
53bda13afa | ||
|
8ef195f2ff | ||
|
1b013b529b | ||
|
0a4a469669 | ||
|
5fb1305a14 | ||
|
898cbff5a6 | ||
|
7dd193a7f7 | ||
|
1a0c02a6f4 | ||
|
1454b14ae0 | ||
|
8e186dd7e9 | ||
|
7236a1f851 | ||
|
63d30aa397 | ||
|
6b80bd0648 | ||
|
5a96021612 | ||
|
4a46aa39a5 | ||
|
3430dcdee6 | ||
|
1b255b7dc3 | ||
|
52c4d6fad4 | ||
|
ee3a069f1d | ||
|
9cfe80ea55 | ||
|
64b159676f | ||
|
8fe9861df5 | ||
|
39f12bbfc5 | ||
|
47b93efcf8 | ||
|
7749898a94 | ||
|
46bc175cc7 | ||
|
dd9c6659bc | ||
|
62490a6bcc | ||
|
c59ff6a4b7 | ||
|
b241def64b | ||
|
2df79c5a5f | ||
|
dd2ff2af56 | ||
|
b46424447f | ||
|
bab166e2c1 | ||
|
c5aa98d8bc | ||
|
cbbf195a23 | ||
|
2b7383c9f9 | ||
|
50145014e8 | ||
|
9ac54b3615 | ||
|
f5d6366287 | ||
|
bbc5fd3180 | ||
|
9e5847222d | ||
|
cefb818105 | ||
|
37ab1d8877 | ||
|
1d3a89a0bb | ||
|
af05c3a1ec | ||
|
1d3d5e905f | ||
|
f28502663f | ||
|
2eac70786e | ||
|
476a969bd8 | ||
|
5667026883 | ||
|
521981584d | ||
|
06c3c3a557 | ||
|
808007456d | ||
|
a9db3228d3 | ||
|
52cd94442c | ||
|
d116b80c5f | ||
|
e7ce6761e1 | ||
|
efae570a07 | ||
|
d1c7e6a094 | ||
|
9b91b09f9c | ||
|
b5dd3aae86 | ||
|
367ea40b1e | ||
|
ec531bee31 | ||
|
8a2deb39ed | ||
|
e5ad0ab2d9 | ||
|
d8e6426433 | ||
|
506a10026a | ||
|
cb12317d21 | ||
|
e7c5f95bf6 | ||
|
09d6e3cf64 | ||
|
5d19c6ef7e | ||
|
9dbe8c6757 | ||
|
fed22d39e9 | ||
|
19ddbc47ca | ||
|
cdc83d64db | ||
|
3db4abcfa4 | ||
|
8f40c648af | ||
|
45599b2716 | ||
|
787a7b90ae | ||
|
55aef83fa1 | ||
|
3487f627e2 | ||
|
f59d626fe3 | ||
|
22575528be | ||
|
0f457a01d9 | ||
|
8ea5d0c0b8 | ||
|
391386cfb9 | ||
|
0d7953a521 | ||
|
73969ba3e7 | ||
|
f469331af5 | ||
|
8830e30f73 | ||
|
c3b65d63b6 | ||
|
e85ef2298b | ||
|
8d7f660309 | ||
|
8449844af3 | ||
|
d956eaa3b1 | ||
|
7babeccbec | ||
|
6c48b63ae4 | ||
|
b05a581931 | ||
|
694a20fb95 | ||
|
490c53bace | ||
|
cdca0a20e0 | ||
|
60ed2770f3 | ||
|
8764a85320 | ||
|
f1dfbd59af | ||
|
e3f8cf0702 | ||
|
d8aa60221d | ||
|
12b1432fe1 | ||
|
f5e870dcc0 | ||
|
87dc783bc6 | ||
|
8c50af551d | ||
|
b125d9dd66 | ||
|
d04a420bce | ||
|
f900fa4960 | ||
|
7ad357599f | ||
|
31b37685b7 | ||
|
163dadb6c6 | ||
|
ec39e1e406 | ||
|
7cd7b97d5d | ||
|
616dab7064 | ||
|
f54cf3b87b | ||
|
dd97344149 | ||
|
171371da5a | ||
|
b203be11a6 | ||
|
dc007e790a | ||
|
dec28a21ac | ||
|
dd1724c447 | ||
|
c203d681c4 | ||
|
58c6a07e43 | ||
|
195b254937 | ||
|
0ee9d840a1 | ||
|
9387bd3c09 | ||
|
6de8c0c6d2 | ||
|
f7618f8bf5 | ||
|
3aadecebaa | ||
|
90e8c80ed1 | ||
|
82d679cdb3 | ||
|
b0ddf6ea90 | ||
|
423a663306 | ||
|
f339bb30fb | ||
|
5d2e51e76e | ||
|
6d8382b7ed | ||
|
9c30fdbc63 | ||
|
ed3d44562d | ||
|
513b0019b1 | ||
|
6eb4fab1db | ||
|
442f300e91 | ||
|
5e957daa82 | ||
|
c8697e6c67 | ||
|
a9153ac373 | ||
|
701637b5fa | ||
|
5ccd977e2b | ||
|
9380f2a721 | ||
|
7e1e801899 | ||
|
e154cbe5aa | ||
|
bca72937d8 | ||
|
f8c1cff6a3 | ||
|
ee34198bf6 | ||
|
659910ee81 | ||
|
cecff195fc | ||
|
a61f30b2d2 | ||
|
96f0f63de2 | ||
|
779a1370ce | ||
|
1092d437c0 | ||
|
1afbaf495e | ||
|
a9b814cb53 | ||
|
ac488acee3 | ||
|
f935476b3f | ||
|
14355e9c69 | ||
|
7fdf77065d | ||
|
690772f093 | ||
|
a75afe4d83 | ||
|
f9442744af | ||
|
1ec0bb5633 | ||
|
87279d5d03 | ||
|
accd1965bb | ||
|
d0216ce68d | ||
|
a35549d0ad | ||
|
6043dac0ea | ||
|
364b0e0563 | ||
|
4c3e0e5f1d | ||
|
75c659a5e7 | ||
|
8b2450287e | ||
|
cb3353be1f | ||
|
f2852a520a | ||
|
012a20b010 | ||
|
fbba461052 | ||
|
62743c3be1 | ||
|
6de23eb793 | ||
|
2bee47efac | ||
|
054123254e | ||
|
5d96b0c024 | ||
|
34615b7629 | ||
|
34c54f3fc3 | ||
|
497ad5c7c0 | ||
|
de58332bdd | ||
|
12bc9d68c4 | ||
|
9bcca1a7b0 | ||
|
f2d13ec809 | ||
|
e14e3385b4 | ||
|
1c80c9d8d5 | ||
|
e9dc1df889 | ||
|
dbd6577d91 | ||
|
94a660aa6f | ||
|
584197f57d | ||
|
5e0b02afb2 | ||
|
93aa469f16 | ||
|
8a6b280d86 | ||
|
9bfde497e5 | ||
|
c3c24cd67b | ||
|
501b0412f6 | ||
|
9154d0920c | ||
|
3d29ace058 | ||
|
46022cb162 | ||
|
f4144615f7 | ||
|
c3b35ba510 | ||
|
c20c13c493 | ||
|
5c02fdaa0d | ||
|
9c32457c28 | ||
|
a1fd2da1dc | ||
|
d70d3ee482 | ||
|
16c07ba9c4 | ||
|
cb7109dfa9 | ||
|
eb893e65f0 | ||
|
b0b09cbb24 | ||
|
63352d7da0 | ||
|
4097b4f2a0 | ||
|
8a18ff7699 | ||
|
850c585501 | ||
|
2f094841b2 | ||
|
96156727c0 | ||
|
5e9572aff8 | ||
|
24a8d600bf | ||
|
b2a09fb404 | ||
|
b029bd80d4 | ||
|
500f438033 | ||
|
ade98c1359 | ||
|
38a411dc07 | ||
|
214a8efe64 | ||
|
d5c7ec7ae8 | ||
|
8f11821c17 | ||
|
58654990d6 | ||
|
0dec435856 | ||
|
4e13acdc88 | ||
|
f08019e356 | ||
|
33d3c074fa | ||
|
80df0553a6 | ||
|
a47fcdd508 | ||
|
9097a5b989 | ||
|
4d10f1235f | ||
|
6e972200a3 | ||
|
3465ce282e | ||
|
f40ad0f3ae | ||
|
491296e0f5 | ||
|
ee056bc6e8 | ||
|
c32dbf357c | ||
|
bc65c636ae | ||
|
91a8962770 | ||
|
be8e1a2632 | ||
|
2635f43788 | ||
|
0578c1ab92 | ||
|
842e383c0c | ||
|
527592a0a5 | ||
|
668d507c34 | ||
|
b79895186c | ||
|
90755e6f13 | ||
|
5eec11f232 | ||
|
f827e49f55 | ||
|
6cff5a3e08 | ||
|
f96982bdfd | ||
|
ed7d8ea28c | ||
|
014097c1af | ||
|
1b2c1d620f | ||
|
285c64d101 | ||
|
637a990e09 | ||
|
9eec303cf7 | ||
|
b1078b1b92 | ||
|
1178501aaf | ||
|
5c72203180 | ||
|
591476621c | ||
|
fda738c90d | ||
|
5858b0b9b4 | ||
|
169ce22228 | ||
|
c875197432 | ||
|
73f6a0cd95 | ||
|
37ce131d2c | ||
|
00fc2820a0 | ||
|
325aadbf2c | ||
|
39b1994c26 | ||
|
324cded166 | ||
|
6b740df704 | ||
|
4821a6294e | ||
|
26b61e809f | ||
|
14bfc54344 | ||
|
72fe91c02e | ||
|
5e01c47818 | ||
|
4058f363d6 | ||
|
63e4bc961e | ||
|
3357477fde | ||
|
d2dce4dd17 | ||
|
15d684134f | ||
|
5c4b8a3d1f | ||
|
cc815e8e8d | ||
|
21d16b6f67 | ||
|
7b31b39fed | ||
|
46106e1b78 | ||
|
755a128ffb | ||
|
34d45aea87 | ||
|
c64d2448e8 | ||
|
544e157649 | ||
|
ed74189acd | ||
|
813ca30953 | ||
|
b76ff2fc33 | ||
|
b8bf6d9917 | ||
|
ff91ad5186 | ||
|
f946042a79 | ||
|
3eecf34bea | ||
|
953bc66ca6 | ||
|
5b4caadd23 | ||
|
8b7506f80d | ||
|
e569e49b8b | ||
|
f8f5a77838 | ||
|
0c581380c7 | ||
|
7571ec9368 | ||
|
eedbb33226 | ||
|
17b0c61338 | ||
|
62a078fe0c | ||
|
7c5afd6977 | ||
|
458254c49e | ||
|
dc5dd30526 | ||
|
e4d365705a | ||
|
b4b470c29a | ||
|
155c739863 | ||
|
a5c0a2215e | ||
|
17a074b30a | ||
|
97220eadc6 | ||
|
e740c21ee6 | ||
|
c15e4c1a85 | ||
|
5957a7ee45 | ||
|
917335eacb | ||
|
963123812d | ||
|
734845b79a | ||
|
761c3784c4 | ||
|
e864050d07 | ||
|
2a8fa4bef7 | ||
|
5467f65122 | ||
|
0935c9d8ca | ||
|
d61c902230 | ||
|
b5c7c93092 | ||
|
e922e2a96e | ||
|
fec0a5587d | ||
|
b9d0808f95 | ||
|
8448b3b840 | ||
|
dfb301b42b | ||
|
f4a2630f1a | ||
|
4ace9544ff | ||
|
e8135ee2cf | ||
|
b0119fef81 | ||
|
8cfe176a85 | ||
|
aaba4dcf87 | ||
|
d90ad8627d | ||
|
194efc4375 | ||
|
432fdd7d32 | ||
|
8685120485 | ||
|
ad4c6a4673 | ||
|
d411dc3a73 | ||
|
6db347b5d7 | ||
|
3f890f8bbe | ||
|
afbbdd18b6 | ||
|
03bb44dd2c | ||
|
18f5d5ad05 | ||
|
f2654d3573 | ||
|
92ac3ba4fc | ||
|
3ae9dfd545 | ||
|
c929f20048 | ||
|
be7e1916d7 | ||
|
7fadb75097 | ||
|
147ff65e4a | ||
|
b17670a27e | ||
|
a68df50c7b | ||
|
0eb4560771 | ||
|
599432552a | ||
|
d177fa9bd0 | ||
|
63ce5ca94f | ||
|
f70d6fd0ed | ||
|
e5e8cec350 | ||
|
d08be18671 | ||
|
6bbfce7b89 | ||
|
80b7b845d2 | ||
|
3b7c083c3d | ||
|
7c7bbb9742 | ||
|
1dd8c7435f |
247
.appveyor.yml
247
.appveyor.yml
|
@ -1,247 +0,0 @@
|
|||
version : 2.x.{build}
|
||||
|
||||
clone_folder: C:\Project
|
||||
|
||||
environment:
|
||||
global:
|
||||
# MSVC Express 2008's setenv.cmd failes if /E:ON and /V:ON are not
|
||||
# enabled in the batch script interpreter
|
||||
#
|
||||
# WITH_COMPILER: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_compiler.cmd"
|
||||
CMD_IN_ENV: cmd /E:ON /V:ON /C .\appveyor\run_with_env.cmd
|
||||
|
||||
matrix:
|
||||
# For Python versions available on Appveyor, see
|
||||
# https://www.appveyor.com/docs/build-environment/
|
||||
- {PYVER: "27", PYTHON_ARCH: "32"}
|
||||
- {PYVER: "27", PYTHON_ARCH: "64"}
|
||||
- {PYVER: "34", PYTHON_ARCH: "32"}
|
||||
- {PYVER: "34", PYTHON_ARCH: "64"}
|
||||
- {PYVER: "35", PYTHON_ARCH: "32"}
|
||||
- {PYVER: "35", PYTHON_ARCH: "64"}
|
||||
- {PYVER: "36", PYTHON_ARCH: "32"}
|
||||
- {PYVER: "36", PYTHON_ARCH: "64"}
|
||||
- {PYVER: "37", PYTHON_ARCH: "32"}
|
||||
- {PYVER: "37", PYTHON_ARCH: "64"}
|
||||
|
||||
|
||||
OPENSSL_VERSION: "1_0_2n"
|
||||
POSTGRES_VERSION: "10_1"
|
||||
|
||||
PSYCOPG2_TESTDB: psycopg2_test
|
||||
PSYCOPG2_TESTDB_USER: postgres
|
||||
PSYCOPG2_TESTDB_PASSWORD: Password12!
|
||||
PSYCOPG2_TESTDB_HOST: localhost
|
||||
PSYCOPG2_TESTDB_PORT: 5432
|
||||
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: Password12!
|
||||
|
||||
matrix:
|
||||
fast_finish: false
|
||||
|
||||
services:
|
||||
# Note: if you change this service also change the paths to match
|
||||
# (see where Program Files\Postgres\9.6 is used)
|
||||
- postgresql96
|
||||
|
||||
cache:
|
||||
# Rebuild cache if following file changes
|
||||
# (See the file to zap the cache manually)
|
||||
- C:\Others -> scripts\appveyor.cache_rebuild
|
||||
|
||||
# Script called before repo cloning
|
||||
init:
|
||||
# Uncomment next line to get RDP access during the build.
|
||||
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
|
||||
# Set env variable according to the build environment
|
||||
- SET PYTHON=C:\Python%PYVER%
|
||||
- IF "%PYTHON_ARCH%"=="64" SET PYTHON=%PYTHON%-x64
|
||||
|
||||
# Py 2.7 = VS Ver. 9.0 (VS 2008)
|
||||
# Py 3.4 = VS Ver. 10.0 (VS 2010)
|
||||
# Py 3.5, 3.6, 3.7 = VS Ver. 14.0 (VS 2015)
|
||||
- IF "%PYVER%"=="27" SET VS_VER=9.0
|
||||
- IF "%PYVER%"=="34" SET VS_VER=10.0
|
||||
- IF "%PYVER%"=="35" SET VS_VER=14.0
|
||||
- IF "%PYVER%"=="36" SET VS_VER=14.0
|
||||
- IF "%PYVER%"=="37" SET VS_VER=14.0
|
||||
|
||||
- IF "%VS_VER%"=="10.0" IF "%PYTHON_ARCH%"=="64" SET DISTUTILS_USE_SDK=1
|
||||
|
||||
# Set Python to the path
|
||||
- SET PATH=%PYTHON%;%PYTHON%\Scripts;C:\Program Files\Git\mingw64\bin;%PATH%
|
||||
|
||||
# Verify Python version and architecture
|
||||
- ECHO *******************************************************************
|
||||
- ECHO Python Information
|
||||
- ECHO *******************************************************************
|
||||
- "%PYTHON%\\python --version"
|
||||
- "%PYTHON%\\python -c \"import sys; print('64bit: ' + str(sys.maxsize > 2**32))\""
|
||||
|
||||
# Get & Install NASM
|
||||
#- curl -L -o nasminst.exe https://www.nasm.us/pub/nasm/releasebuilds/2.12.02/win64/nasm-2.12.02-installer-x64.exe && start /wait nasminst.exe /S
|
||||
#- SET PATH="C:\Program Files (x86)\nasm;%PATH%"
|
||||
|
||||
# Fix problem with VS2008 Express and 64bit builds
|
||||
- ECHO Fixing VS2008 Express and 64bit builds
|
||||
- COPY "C:\\Program Files (x86)\\Microsoft Visual Studio 9.0\\VC\\bin\\vcvars64.bat" "C:\\Program Files (x86)\\Microsoft Visual Studio 9.0\\VC\\bin\\amd64\\vcvarsamd64.bat"
|
||||
|
||||
# Fix problem with VS2010 Express 64bit missing vcvars64.bat
|
||||
# Note: repository not cloned at this point, so need to fetch
|
||||
# file another way
|
||||
- ECHO Fixing VS2010 Express and 64bit builds
|
||||
- curl -fsSL -o "C:\\Program Files (x86)\\Microsoft Visual Studio 10.0\\VC\\bin\\amd64\\vcvars64.bat" https://raw.githubusercontent.com/psycopg/psycopg2/master/scripts/vcvars64-vs2010.bat
|
||||
|
||||
# Setup the compiler based upon version and architecture
|
||||
- ECHO Configuring Compiler
|
||||
- IF "%PYTHON_ARCH%"=="32" (CALL "C:\\Program Files (x86)\\Microsoft Visual Studio %VS_VER%\\VC\\vcvarsall.bat" x86)
|
||||
- IF "%PYTHON_ARCH%"=="64" (CALL "C:\\Program Files (x86)\\Microsoft Visual Studio %VS_VER%\\VC\\vcvarsall.bat" amd64)
|
||||
|
||||
# The program rc.exe on 64bit with some versions look in the wrong path
|
||||
# location when building postgresql. This cheats by copying the x64 bit
|
||||
# files to that location.
|
||||
- IF "%PYTHON_ARCH%"=="64" (COPY /Y "C:\\Program Files\\Microsoft SDKs\\Windows\\v7.0\\Bin\\x64\\rc*" "C:\\Program Files (x86)\\Microsoft SDKs\\Windows\\v7.0A\\Bin")
|
||||
|
||||
# Change PostgreSQL config before service starts to allow > 1 prepared
|
||||
# transactions for test cases
|
||||
- ECHO max_prepared_transactions = 10 >> "C:\\Program Files\\PostgreSQL\\9.6\\data\\postgresql.conf"
|
||||
|
||||
|
||||
# Repository gets cloned, Cache is restored
|
||||
install:
|
||||
# We start off CD'ed to cloned folder
|
||||
- SET BASE_DIR=C:\Others\%PYTHON_ARCH%\%VS_VER%
|
||||
- SET BUILD_DIR=%BASE_DIR%\Builds
|
||||
- IF NOT EXIST %BUILD_DIR% MKDIR %BUILD_DIR%
|
||||
|
||||
- ECHO *******************************************************************
|
||||
- ECHO Initialized variables specific for this build
|
||||
- ECHO *******************************************************************
|
||||
- ECHO %BASE_DIR%
|
||||
- ECHO %BUILD_DIR%
|
||||
- ECHO *******************************************************************
|
||||
|
||||
# Setup directories for building OpenSSL libraries
|
||||
- ECHO *******************************************************************
|
||||
- ECHO Preparing for building OpenSSL
|
||||
- ECHO *******************************************************************
|
||||
- SET OPENSSLTOP=%BASE_DIR%\openssl
|
||||
- IF NOT EXIST %OPENSSLTOP%\include\openssl MKDIR %OPENSSLTOP%\include\openssl
|
||||
- IF NOT EXIST %OPENSSLTOP%\lib MKDIR %OPENSSLTOP%\lib
|
||||
|
||||
# Setup OpenSSL Environment Variables based on processor architecture
|
||||
- ps: >-
|
||||
If ($env:PYTHON_ARCH -Match "32" ) {
|
||||
$env:VCVARS_PLATFORM="x86"
|
||||
$env:TARGET="VC-WIN32"
|
||||
$env:DO="do_ms"
|
||||
} Else {
|
||||
$env:VCVARS_PLATFORM="amd64"
|
||||
$env:TARGET="VC-WIN64A"
|
||||
$env:DO="do_win64a"
|
||||
$env:CPU="AMD64"
|
||||
}
|
||||
# Download OpenSSL source
|
||||
- CD C:\Others
|
||||
- IF NOT EXIST OpenSSL_%OPENSSL_VERSION%.zip (
|
||||
curl -fsSL -o OpenSSL_%OPENSSL_VERSION%.zip https://github.com/openssl/openssl/archive/OpenSSL_%OPENSSL_VERSION%.zip
|
||||
)
|
||||
|
||||
# To use OpenSSL >= 1.1.0, both libpq and psycopg build environments have
|
||||
# to support the new library names. Below are commands to build OpenSSL
|
||||
# 1.1.0:
|
||||
# - mkdir _build
|
||||
# - cd _build
|
||||
# - perl ..\Configure %TARGET% no-asm no-shared --prefix=%BASE_DIR%\openssl --openssldir=%BASE_DIR%\openssl
|
||||
# - nmake build_libs install_dev
|
||||
- IF NOT EXIST %OPENSSLTOP%\lib\ssleay32.lib (
|
||||
CD %BUILD_DIR% &&
|
||||
7z x C:\Others\OpenSSL_%OPENSSL_VERSION%.zip &&
|
||||
CD openssl-OpenSSL_%OPENSSL_VERSION% &&
|
||||
perl Configure %TARGET% no-asm no-shared no-zlib --prefix=%OPENSSLTOP% --openssldir=%OPENSSLTOP% &&
|
||||
CALL ms\%DO% &&
|
||||
nmake -f ms\nt.mak init headers lib &&
|
||||
COPY inc32\openssl\*.h %OPENSSLTOP%\include\openssl &&
|
||||
COPY out32\*.lib %OPENSSLTOP%\lib &&
|
||||
CD %BASE_DIR% &&
|
||||
RMDIR /S /Q %BUILD_DIR%\openssl-OpenSSL_%OPENSSL_VERSION%
|
||||
)
|
||||
|
||||
# Setup directories for building PostgreSQL librarires
|
||||
- ECHO *******************************************************************
|
||||
- ECHO Preparing for building PostgreSQL libraries
|
||||
- ECHO *******************************************************************
|
||||
- SET PGTOP=%BASE_DIR%\postgresql
|
||||
- IF NOT EXIST %PGTOP%\include MKDIR %PGTOP%\include
|
||||
- IF NOT EXIST %PGTOP%\lib MKDIR %PGTOP%\lib
|
||||
- IF NOT EXIST %PGTOP%\bin MKDIR %PGTOP%\bin
|
||||
|
||||
# Download PostgreSQL source
|
||||
- CD C:\Others
|
||||
- IF NOT EXIST postgres-REL_%POSTGRES_VERSION%.zip (
|
||||
curl -fsSL -o postgres-REL_%POSTGRES_VERSION%.zip https://github.com/postgres/postgres/archive/REL_%POSTGRES_VERSION%.zip
|
||||
)
|
||||
|
||||
# Setup build config file (config.pl)
|
||||
# Hack the Mkvcbuild.pm file so we build the lib version of libpq
|
||||
# Build libpgport, libpgcommon, libpq
|
||||
# Install includes
|
||||
# Copy over built libraries
|
||||
# Prepare local include directory for building from
|
||||
# Build pg_config in place
|
||||
# NOTE: Cannot set and use the same variable inside an IF
|
||||
- SET PGBUILD=%BUILD_DIR%\postgres-REL_%POSTGRES_VERSION%
|
||||
- IF NOT EXIST %PGTOP%\lib\libpq.lib (
|
||||
CD %BUILD_DIR% &&
|
||||
7z x C:\Others\postgres-REL_%POSTGRES_VERSION%.zip &&
|
||||
CD postgres-REL_%POSTGRES_VERSION%\src\tools\msvc &&
|
||||
ECHO $config-^>{ldap} = 0; > config.pl &&
|
||||
ECHO $config-^>{openssl} = "%OPENSSLTOP:\=\\%"; >> config.pl &&
|
||||
ECHO.>> config.pl &&
|
||||
ECHO 1;>> config.pl &&
|
||||
perl -pi.bak -e "s/'libpq', 'dll'/'libpq', 'lib'/g" Mkvcbuild.pm &&
|
||||
build libpgport &&
|
||||
build libpgcommon &&
|
||||
build libpq &&
|
||||
ECHO "" > %PGBUILD%\src\backend\parser\gram.h &&
|
||||
perl -pi.bak -e "s/qw\(Install\)/qw\(Install CopyIncludeFiles\)/g" Install.pm &&
|
||||
perl -MInstall=CopyIncludeFiles -e"chdir('../../..'); CopyIncludeFiles('%PGTOP%')" &&
|
||||
COPY %PGBUILD%\Release\libpgport\libpgport.lib %PGTOP%\lib &&
|
||||
COPY %PGBUILD%\Release\libpgcommon\libpgcommon.lib %PGTOP%\lib &&
|
||||
COPY %PGBUILD%\Release\libpq\libpq.lib %PGTOP%\lib &&
|
||||
XCOPY /Y /S %PGBUILD%\src\include\port\win32\* %PGBUILD%\src\include &&
|
||||
XCOPY /Y /S %PGBUILD%\src\include\port\win32_msvc\* %PGBUILD%\src\include &&
|
||||
CD %PGBUILD%\src\bin\pg_config &&
|
||||
cl pg_config.c /MT /nologo /I%PGBUILD%\src\include /link /LIBPATH:%PGTOP%\lib libpgcommon.lib libpgport.lib advapi32.lib /NODEFAULTLIB:libcmt.lib /OUT:%PGTOP%\bin\pg_config.exe &&
|
||||
CD %BASE_DIR% &&
|
||||
RMDIR /S /Q %PGBUILD%
|
||||
)
|
||||
|
||||
build: off
|
||||
|
||||
#before_build:
|
||||
|
||||
build_script:
|
||||
# Add PostgreSQL binaries to the path
|
||||
- PATH=C:\Program Files\PostgreSQL\9.6\bin\;%PATH%
|
||||
- CD C:\Project
|
||||
- "%PYTHON%\\python.exe setup.py build_ext --have-ssl --pg-config %PGTOP%\\bin\\pg_config.exe -l libpgcommon -l libpgport -L %OPENSSLTOP%\\lib -I %OPENSSLTOP%\\include"
|
||||
- "%PYTHON%\\python.exe setup.py build"
|
||||
- "%PYTHON%\\python.exe setup.py install"
|
||||
- RD /S /Q psycopg2.egg-info
|
||||
|
||||
#after_build:
|
||||
|
||||
before_test:
|
||||
# Create and setup PostgreSQL database for the tests
|
||||
- createdb %PSYCOPG2_TESTDB%
|
||||
- psql -d %PSYCOPG2_TESTDB% -c "CREATE EXTENSION HSTORE;"
|
||||
|
||||
test_script:
|
||||
# Print psycopg and libpq versions
|
||||
- "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.__version__)\""
|
||||
- "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.__libpq_version__)\""
|
||||
- "%PYTHON%\\python.exe -c \"import psycopg2; print(psycopg2.extensions.libpq_version())\""
|
||||
- "%PYTHON%\\python.exe -c \"import tests; tests.unittest.main(defaultTest='tests.test_suite')\" --verbose"
|
4
.github/FUNDING.yml
vendored
Normal file
4
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
github:
|
||||
- dvarrazzo
|
||||
custom:
|
||||
- "https://www.paypal.me/dvarrazzo"
|
23
.github/ISSUE_TEMPLATE/problem-installing-psycopg2.md
vendored
Normal file
23
.github/ISSUE_TEMPLATE/problem-installing-psycopg2.md
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
---
|
||||
name: Problem installing psycopg2
|
||||
about: Report a case in which psycopg2 failed to install on your platform
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**This is a bug tracker**
|
||||
If you have a question, such has "how do you do X with Python/PostgreSQL/psycopg2" please [write to the mailing list](https://lists.postgresql.org/manage/) or [open a question](https://github.com/psycopg/psycopg2/discussions) instead.
|
||||
|
||||
**Before opening this ticket, please confirm that:**
|
||||
- [ ] I am running the latest version of pip, i.e. typing ``pip --version`` you get [this version](https://pypi.org/project/pip/).
|
||||
- [ ] I have read the [installation documentation](https://www.psycopg.org/docs/install.html) and the [frequently asked questions](https://www.psycopg.org/docs/faq.html)
|
||||
- [ ] If install failed, I typed `pg_config` on the command line and I obtained an output instead of an error.
|
||||
|
||||
**Please complete the following information:**
|
||||
- OS:
|
||||
- Psycopg version:
|
||||
- Python version:
|
||||
- PostgreSQL version:
|
||||
- pip version
|
27
.github/ISSUE_TEMPLATE/problem-using-psycopg2.md
vendored
Normal file
27
.github/ISSUE_TEMPLATE/problem-using-psycopg2.md
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
name: Problem using psycopg2
|
||||
about: Report a case in which psycopg2 is not working as expected
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**This is a bug tracker**
|
||||
If you have a question, such has "how do you do X with Python/PostgreSQL/psycopg2" please [write to the mailing list](https://lists.postgresql.org/manage/) or [open a question](https://github.com/psycopg/psycopg2/discussions) instead.
|
||||
|
||||
**Please complete the following information:**
|
||||
- OS:
|
||||
- Psycopg version:
|
||||
- Python version:
|
||||
- PostgreSQL version:
|
||||
- pip version
|
||||
|
||||
**Describe the bug**
|
||||
Please let us know:
|
||||
|
||||
1: what you did
|
||||
2: what you expected to happen
|
||||
3: what happened instead
|
||||
|
||||
If possible, provide a script reproducing the issue.
|
6
.github/dependabot.yml
vendored
Normal file
6
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
18
.github/workflows/docs.yml
vendored
Normal file
18
.github/workflows/docs.yml
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
name: Build documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# This should match the DOC_BRANCH value in the psycopg-website Makefile
|
||||
- master
|
||||
|
||||
jobs:
|
||||
docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger docs build
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
repository: psycopg/psycopg-website
|
||||
event-type: psycopg2-commit
|
||||
token: ${{ secrets.ACCESS_TOKEN }}
|
266
.github/workflows/packages.yml
vendored
Normal file
266
.github/workflows/packages.yml
vendored
Normal file
|
@ -0,0 +1,266 @@
|
|||
---
|
||||
name: Build packages
|
||||
on:
|
||||
- workflow_dispatch
|
||||
|
||||
env:
|
||||
PIP_BREAK_SYSTEM_PACKAGES: "1"
|
||||
LIBPQ_VERSION: "16.0"
|
||||
OPENSSL_VERSION: "1.1.1w"
|
||||
|
||||
jobs:
|
||||
sdist: # {{{
|
||||
if: true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- package_name: psycopg2
|
||||
- package_name: psycopg2-binary
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repos
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build sdist
|
||||
run: ./scripts/build/build_sdist.sh
|
||||
env:
|
||||
PACKAGE_NAME: ${{ matrix.package_name }}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: sdist-${{ matrix.package_name }}
|
||||
path: |
|
||||
dist/*.tar.gz
|
||||
|
||||
env:
|
||||
PSYCOPG2_TESTDB: postgres
|
||||
PSYCOPG2_TESTDB_HOST: 172.17.0.1
|
||||
PSYCOPG2_TESTDB_USER: postgres
|
||||
PSYCOPG2_TESTDB_PASSWORD: password
|
||||
PSYCOPG2_TEST_FAST: 1
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_PASSWORD: password
|
||||
ports:
|
||||
- 5432:5432
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
# }}}
|
||||
|
||||
linux: # {{{
|
||||
if: true
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [manylinux, musllinux]
|
||||
arch: [x86_64, i686, aarch64, ppc64le]
|
||||
pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repos
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU for multi-arch build
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Cache libpq build
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/libpq.build
|
||||
key: libpq-${{ env.LIBPQ_VERSION }}-${{ matrix.platform }}-${{ matrix.arch }}
|
||||
|
||||
- name: Build wheels
|
||||
uses: pypa/cibuildwheel@v2.23.3
|
||||
env:
|
||||
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
|
||||
CIBW_MANYLINUX_I686_IMAGE: manylinux2014
|
||||
CIBW_MANYLINUX_AARCH64_IMAGE: manylinux2014
|
||||
CIBW_MANYLINUX_PPC64LE_IMAGE: manylinux2014
|
||||
CIBW_BUILD: ${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}}
|
||||
CIBW_ARCHS_LINUX: auto aarch64 ppc64le
|
||||
CIBW_BEFORE_ALL_LINUX: ./scripts/build/wheel_linux_before_all.sh
|
||||
CIBW_REPAIR_WHEEL_COMMAND: >-
|
||||
./scripts/build/strip_wheel.sh {wheel}
|
||||
&& auditwheel repair -w {dest_dir} {wheel}
|
||||
CIBW_TEST_COMMAND: >-
|
||||
export PYTHONPATH={project} &&
|
||||
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
|
||||
CIBW_ENVIRONMENT_PASS_LINUX: LIBPQ_VERSION OPENSSL_VERSION
|
||||
CIBW_ENVIRONMENT: >-
|
||||
PACKAGE_NAME=psycopg2-binary
|
||||
LIBPQ_BUILD_PREFIX=/host/tmp/libpq.build
|
||||
PATH="$LIBPQ_BUILD_PREFIX/bin:$PATH"
|
||||
LD_LIBRARY_PATH="$LIBPQ_BUILD_PREFIX/lib:$LIBPQ_BUILD_PREFIX/lib64"
|
||||
PSYCOPG2_TESTDB=postgres
|
||||
PSYCOPG2_TESTDB_HOST=172.17.0.1
|
||||
PSYCOPG2_TESTDB_USER=postgres
|
||||
PSYCOPG2_TESTDB_PASSWORD=password
|
||||
PSYCOPG2_TEST_FAST=1
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: linux-${{matrix.pyver}}-${{matrix.platform}}_${{matrix.arch}}
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_PASSWORD: password
|
||||
ports:
|
||||
- 5432:5432
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
# }}}
|
||||
|
||||
macos: # {{{
|
||||
runs-on: macos-latest
|
||||
if: true
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# These archs require an Apple M1 runner: [arm64, universal2]
|
||||
arch: [x86_64, arm64]
|
||||
pyver: [cp39, cp310, cp311, cp312, cp313]
|
||||
|
||||
steps:
|
||||
- name: Checkout repos
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cache libpq build
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/libpq.build
|
||||
key: libpq-${{ env.LIBPQ_VERSION }}-macos-${{ matrix.arch }}
|
||||
|
||||
- name: Build wheels
|
||||
uses: pypa/cibuildwheel@v2.23.3
|
||||
env:
|
||||
CIBW_BUILD: ${{matrix.pyver}}-macosx_${{matrix.arch}}
|
||||
CIBW_ARCHS_MACOS: ${{matrix.arch}}
|
||||
MACOSX_ARCHITECTURE: ${{matrix.arch}}
|
||||
CIBW_BEFORE_ALL_MACOS: ./scripts/build/wheel_macos_before_all.sh
|
||||
CIBW_TEST_COMMAND: >-
|
||||
export PYTHONPATH={project} &&
|
||||
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
|
||||
CIBW_ENVIRONMENT: >-
|
||||
PG_VERSION=16
|
||||
PACKAGE_NAME=psycopg2-binary
|
||||
PSYCOPG2_TESTDB=postgres
|
||||
PATH="/tmp/libpq.build/bin:$PATH"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: macos-${{matrix.pyver}}-macos-${{matrix.arch}}
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
# }}}
|
||||
|
||||
windows: # {{{
|
||||
runs-on: windows-latest
|
||||
if: true
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: [win_amd64]
|
||||
pyver: [cp38, cp39, cp310, cp311, cp312, cp313]
|
||||
package_name: [psycopg2, psycopg2-binary]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
steps:
|
||||
# there are some other libpq in PATH
|
||||
- name: Drop spurious libpq in the path
|
||||
run: rm -rf c:/tools/php C:/Strawberry/c/bin
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Start PostgreSQL service for test
|
||||
run: |
|
||||
$PgSvc = Get-Service "postgresql*"
|
||||
Set-Service $PgSvc.Name -StartupType manual
|
||||
$PgSvc.Start()
|
||||
shell: powershell
|
||||
|
||||
- name: Export GitHub Actions cache environment variables
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const path = require('path')
|
||||
core.exportVariable('ACTIONS_CACHE_URL', process.env.ACTIONS_CACHE_URL || '');
|
||||
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
|
||||
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/lib'));
|
||||
core.addPath(path.join(process.env.VCPKG_INSTALLATION_ROOT, 'installed/x64-windows-release/bin'));
|
||||
|
||||
- name: Create the binary package source tree
|
||||
run: >-
|
||||
sed -i 's/^setup(name="psycopg2"/setup(name="${{matrix.package_name}}"/'
|
||||
setup.py
|
||||
if: ${{ matrix.package_name != 'psycopg2' }}
|
||||
|
||||
- name: Build wheels
|
||||
uses: pypa/cibuildwheel@v2.23.3
|
||||
env:
|
||||
VCPKG_BINARY_SOURCES: "clear;x-gha,readwrite" # cache vcpkg
|
||||
CIBW_BUILD: ${{matrix.pyver}}-${{matrix.arch}}
|
||||
CIBW_ARCHS_WINDOWS: AMD64 x86
|
||||
CIBW_BEFORE_BUILD_WINDOWS: '.\scripts\build\wheel_win32_before_build.bat'
|
||||
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: >-
|
||||
delvewheel repair -w {dest_dir}
|
||||
--no-mangle "libiconv-2.dll;libwinpthread-1.dll" {wheel}
|
||||
CIBW_TEST_COMMAND: >-
|
||||
set PYTHONPATH={project} &&
|
||||
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
|
||||
# Note: no fast test because we don't run Windows tests
|
||||
CIBW_ENVIRONMENT_WINDOWS: >-
|
||||
PSYCOPG2_TESTDB=postgres
|
||||
PSYCOPG2_TESTDB_USER=postgres
|
||||
PSYCOPG2_TESTDB_HOST=localhost
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: windows-${{ matrix.package_name }}-${{matrix.pyver}}-${{matrix.arch}}
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
# }}}
|
||||
|
||||
merge: # {{{
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- sdist
|
||||
- linux
|
||||
- macos
|
||||
- windows
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@v4
|
||||
with:
|
||||
name: psycopg2-artifacts
|
||||
delete-merged: true
|
||||
|
||||
# }}}
|
79
.github/workflows/tests.yml
vendored
Normal file
79
.github/workflows/tests.yml
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
name: Tests
|
||||
|
||||
env:
|
||||
PIP_BREAK_SYSTEM_PACKAGES: "1"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
if: true
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- {python: "3.8", postgres: "12"}
|
||||
- {python: "3.9", postgres: "13"}
|
||||
- {python: "3.10", postgres: "14"}
|
||||
- {python: "3.11", postgres: "15"}
|
||||
- {python: "3.12", postgres: "16"}
|
||||
- {python: "3.13", postgres: "17"}
|
||||
|
||||
# Opposite extremes of the supported Py/PG range, other architecture
|
||||
- {python: "3.8", postgres: "17", architecture: "x86"}
|
||||
- {python: "3.9", postgres: "16", architecture: "x86"}
|
||||
- {python: "3.10", postgres: "15", architecture: "x86"}
|
||||
- {python: "3.11", postgres: "14", architecture: "x86"}
|
||||
- {python: "3.12", postgres: "13", architecture: "x86"}
|
||||
- {python: "3.13", postgres: "12", architecture: "x86"}
|
||||
|
||||
env:
|
||||
PSYCOPG2_TESTDB: postgres
|
||||
PSYCOPG2_TESTDB_HOST: 127.0.0.1
|
||||
PSYCOPG2_TESTDB_USER: postgres
|
||||
PSYCOPG2_TESTDB_PASSWORD: password
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
image: postgres:${{ matrix.postgres }}
|
||||
env:
|
||||
POSTGRES_PASSWORD: password
|
||||
ports:
|
||||
- 5432:5432
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Can enable to test an unreleased libpq version.
|
||||
- name: install libpq 16
|
||||
if: false
|
||||
run: |
|
||||
set -x
|
||||
rel=$(lsb_release -c -s)
|
||||
echo "deb http://apt.postgresql.org/pub/repos/apt ${rel}-pgdg main 16" \
|
||||
| sudo tee -a /etc/apt/sources.list.d/pgdg.list
|
||||
sudo apt-get -qq update
|
||||
pqver=$(apt-cache show libpq5 | grep ^Version: | head -1 \
|
||||
| awk '{print $2}')
|
||||
sudo apt-get -qq -y install "libpq-dev=${pqver}" "libpq5=${pqver}"
|
||||
|
||||
- name: Install tox
|
||||
run: pip install "tox < 4"
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
- name: Run tests
|
||||
env:
|
||||
MATRIX_PYTHON: ${{ matrix.python }}
|
||||
run: tox -e ${MATRIX_PYTHON%-dev}
|
||||
timeout-minutes: 5
|
8
.gitignore
vendored
8
.gitignore
vendored
|
@ -6,11 +6,7 @@ MANIFEST
|
|||
*.sw[po]
|
||||
*.egg-info/
|
||||
dist/*
|
||||
build/*
|
||||
doc/src/_build/*
|
||||
doc/html/*
|
||||
doc/psycopg2.txt
|
||||
scripts/pypi_docs_upload.py
|
||||
/build
|
||||
env
|
||||
env?
|
||||
.idea
|
||||
|
@ -18,3 +14,5 @@ env?
|
|||
.vscode/
|
||||
/rel
|
||||
/wheels
|
||||
/packages
|
||||
/wheelhouse
|
||||
|
|
26
.travis.yml
26
.travis.yml
|
@ -1,26 +0,0 @@
|
|||
# Travis CI configuration file for psycopg2
|
||||
|
||||
dist: xenial
|
||||
sudo: required
|
||||
language: python
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- python: 2.7
|
||||
- python: 3.7
|
||||
- python: 3.6
|
||||
- python: 3.5
|
||||
- python: 3.4
|
||||
dist: trusty
|
||||
|
||||
install:
|
||||
- pip install -U pip setuptools wheel
|
||||
- pip install .
|
||||
- rm -rf psycopg2.egg-info
|
||||
- sudo scripts/travis_prepare.sh
|
||||
|
||||
script:
|
||||
- scripts/travis_test.sh
|
||||
|
||||
notifications:
|
||||
email: false
|
2
INSTALL
2
INSTALL
|
@ -1,4 +1,4 @@
|
|||
Installation instructions are included in the docs.
|
||||
|
||||
Please check the 'doc/src/install.rst' file or online at
|
||||
<http://initd.org/psycopg/docs/install.html>.
|
||||
<https://www.psycopg.org/docs/install.html>.
|
||||
|
|
9
Makefile
9
Makefile
|
@ -42,7 +42,7 @@ endif
|
|||
VERSION := $(shell grep PSYCOPG_VERSION setup.py | head -1 | sed -e "s/.*'\(.*\)'/\1/")
|
||||
SDIST := dist/psycopg2-$(VERSION).tar.gz
|
||||
|
||||
.PHONY: env check clean
|
||||
.PHONY: check clean
|
||||
|
||||
default: package
|
||||
|
||||
|
@ -50,12 +50,10 @@ all: package sdist
|
|||
|
||||
package: $(PLATLIB) $(PURELIB)
|
||||
|
||||
docs: docs-html docs-txt
|
||||
docs: docs-html
|
||||
|
||||
docs-html: doc/html/genindex.html
|
||||
|
||||
docs-txt: doc/psycopg2.txt
|
||||
|
||||
# for PyPI documentation
|
||||
docs-zip: doc/docs.zip
|
||||
|
||||
|
@ -98,9 +96,6 @@ $(SDIST): $(SOURCE)
|
|||
doc/html/genindex.html: $(PLATLIB) $(PURELIB) $(SOURCE_DOC)
|
||||
$(MAKE) -C doc html
|
||||
|
||||
doc/psycopg2.txt: $(PLATLIB) $(PURELIB) $(SOURCE_DOC)
|
||||
$(MAKE) -C doc text
|
||||
|
||||
doc/docs.zip: doc/html/genindex.html
|
||||
(cd doc/html && zip -r ../docs.zip *)
|
||||
|
||||
|
|
237
NEWS
237
NEWS
|
@ -1,6 +1,204 @@
|
|||
Current release
|
||||
---------------
|
||||
|
||||
What's new in psycopg 2.9.10
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Add support for Python 3.13.
|
||||
- Receive notifications on commit (:ticket:`#1728`).
|
||||
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
|
||||
PostgreSQL 17.
|
||||
- Drop support for Python 3.7.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.9
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Add support for Python 3.12.
|
||||
- Drop support for Python 3.6.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.8
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Wheel package bundled with PostgreSQL 16 libpq in order to add support for
|
||||
recent features, such as ``sslcertmode``.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.7
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fix propagation of exceptions raised during module initialization
|
||||
(:ticket:`#1598`).
|
||||
- Fix building when pg_config returns an empty string (:ticket:`#1599`).
|
||||
- Wheel package bundled with OpenSSL 1.1.1v.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.6
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Package manylinux 2014 for aarch64 and ppc64le platforms, in order to
|
||||
include libpq 15 in the binary package (:ticket:`#1396`).
|
||||
- Wheel package bundled with OpenSSL 1.1.1t.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.5
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Add support for Python 3.11.
|
||||
- Add support for rowcount in MERGE statements in binary packages
|
||||
(:ticket:`#1497`).
|
||||
- Wheel package bundled with OpenSSL 1.1.1r and PostgreSQL 15 libpq.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.4
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fix `~psycopg2.extras.register_composite()`,
|
||||
`~psycopg2.extras.register_range()` with customized :sql:`search_path`
|
||||
(:ticket:`#1487`).
|
||||
- Handle correctly composite types with names or in schemas requiring escape.
|
||||
- Find ``pg_service.conf`` file in the ``/etc/postgresql-common`` directory in
|
||||
binary packages (:ticket:`#1365`).
|
||||
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
|
||||
PostgreSQL 15.
|
||||
- Wheel package bundled with OpenSSL 1.1.1q and PostgreSQL 14.4 libpq.
|
||||
|
||||
|
||||
What's new in psycopg 2.9.3
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Alpine (musl) wheels now available (:ticket:`#1392`).
|
||||
- macOS arm64 (Apple M1) wheels now available (:ticket:`1482`).
|
||||
|
||||
|
||||
What's new in psycopg 2.9.2
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Raise `ValueError` for dates >= Y10k (:ticket:`#1307`).
|
||||
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
|
||||
PostgreSQL 14.
|
||||
- Add preliminary support for Python 3.11 (:tickets:`#1376, #1386`).
|
||||
- Wheel package bundled with OpenSSL 1.1.1l and PostgreSQL 14.1 libpq
|
||||
(:ticket:`#1388`).
|
||||
|
||||
|
||||
What's new in psycopg 2.9.1
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fix regression with named `~psycopg2.sql.Placeholder` (:ticket:`#1291`).
|
||||
|
||||
|
||||
What's new in psycopg 2.9
|
||||
-------------------------
|
||||
|
||||
- ``with connection`` starts a transaction on autocommit transactions too
|
||||
(:ticket:`#941`).
|
||||
- Timezones with fractional minutes are supported on Python 3.7 and following
|
||||
(:ticket:`#1272`).
|
||||
- Escape table and column names in `~cursor.copy_from()` and
|
||||
`~cursor.copy_to()`.
|
||||
- Connection exceptions with sqlstate ``08XXX`` reclassified as
|
||||
`~psycopg2.OperationalError` (a subclass of the previously used
|
||||
`~psycopg2.DatabaseError`) (:ticket:`#1148`).
|
||||
- Include library dirs required from libpq to work around MacOS build problems
|
||||
(:ticket:`#1200`).
|
||||
|
||||
Other changes:
|
||||
|
||||
- Dropped support for Python 2.7, 3.4, 3.5 (:tickets:`#1198, #1000, #1197`).
|
||||
- Dropped support for mx.DateTime.
|
||||
- Use `datetime.timezone` objects by default in datetime objects instead of
|
||||
`~psycopg2.tz.FixedOffsetTimezone`.
|
||||
- The `psycopg2.tz` module is deprecated and scheduled to be dropped in the
|
||||
next major release.
|
||||
- Provide :pep:`599` wheels packages (manylinux2014 tag) for i686 and x86_64
|
||||
platforms.
|
||||
- Provide :pep:`600` wheels packages (manylinux_2_24 tag) for aarch64 and
|
||||
ppc64le platforms.
|
||||
- Wheel package bundled with OpenSSL 1.1.1k and PostgreSQL 13.3 libpq.
|
||||
- Build system for Linux/MacOS binary packages moved to GitHub Actions.
|
||||
|
||||
|
||||
What's new in psycopg 2.8.7
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Accept empty params as `~psycopg2.connect()` (:ticket:`#1250`).
|
||||
- Fix attributes refcount in `Column` initialisation (:ticket:`#1252`).
|
||||
- Allow re-initialisation of static variables in the C module (:ticket:`#1267`).
|
||||
|
||||
|
||||
What's new in psycopg 2.8.6
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fixed memory leak changing connection encoding to the current one
|
||||
(:ticket:`#1101`).
|
||||
- Fixed search of mxDateTime headers in virtualenvs (:ticket:`#996`).
|
||||
- Added missing values from errorcodes (:ticket:`#1133`).
|
||||
- `cursor.query` reports the query of the last :sql:`COPY` operation too
|
||||
(:ticket:`#1141`).
|
||||
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
|
||||
PostgreSQL 13.
|
||||
- Added wheel packages for ARM architecture (:ticket:`#1125`).
|
||||
- Wheel package bundled with OpenSSL 1.1.1g.
|
||||
|
||||
|
||||
What's new in psycopg 2.8.5
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fixed use of `!connection_factory` and `!cursor_factory` together
|
||||
(:ticket:`#1019`).
|
||||
- Added support for `~logging.LoggerAdapter` in
|
||||
`~psycopg2.extras.LoggingConnection` (:ticket:`#1026`).
|
||||
- `~psycopg2.extensions.Column` objects in `cursor.description` can be sliced
|
||||
(:ticket:`#1034`).
|
||||
- Added AIX support (:ticket:`#1061`).
|
||||
- Fixed `~copy.copy()` of `~psycopg2.extras.DictCursor` rows (:ticket:`#1073`).
|
||||
|
||||
|
||||
What's new in psycopg 2.8.4
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fixed building with Python 3.8 (:ticket:`#854`).
|
||||
- Don't swallow keyboard interrupts on connect when a password is specified
|
||||
in the connection string (:ticket:`#898`).
|
||||
- Don't advance replication cursor when the message wasn't confirmed
|
||||
(:ticket:`#940`).
|
||||
- Fixed inclusion of ``time.h`` on linux (:ticket:`#951`).
|
||||
- Fixed int overflow for large values in `~psycopg2.extensions.Column.table_oid`
|
||||
and `~psycopg2.extensions.Column.type_code` (:ticket:`#961`).
|
||||
- `~psycopg2.errorcodes` map and `~psycopg2.errors` classes updated to
|
||||
PostgreSQL 12.
|
||||
- Wheel package bundled with OpenSSL 1.1.1d and PostgreSQL at least 11.4.
|
||||
|
||||
|
||||
What's new in psycopg 2.8.3
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Added *interval_status* parameter to
|
||||
`~psycopg2.extras.ReplicationCursor.start_replication()` method and other
|
||||
facilities to send automatic replication keepalives at periodic intervals
|
||||
(:ticket:`#913`).
|
||||
- Fixed namedtuples caching introduced in 2.8 (:ticket:`#928`).
|
||||
|
||||
|
||||
What's new in psycopg 2.8.2
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fixed `~psycopg2.extras.RealDictCursor` when there are repeated columns
|
||||
(:ticket:`#884`).
|
||||
- Binary packages built with openssl 1.1.1b. Should fix concurrency problems
|
||||
(:tickets:`#543, #836`).
|
||||
|
||||
|
||||
What's new in psycopg 2.8.1
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fixed `~psycopg2.extras.RealDictRow` modifiability (:ticket:`#886`).
|
||||
- Fixed "there's no async cursor" error polling a connection with no cursor
|
||||
(:ticket:`#887`).
|
||||
|
||||
|
||||
What's new in psycopg 2.8
|
||||
-------------------------
|
||||
|
||||
|
@ -17,13 +215,15 @@ New features:
|
|||
- Added `connection.info` object to retrieve various PostgreSQL connection
|
||||
information (:ticket:`#726`).
|
||||
- Added `~connection.get_native_connection()` to expose the raw ``PGconn``
|
||||
structure (:ticket:`#782`).
|
||||
structure to C extensions via Capsule (:ticket:`#782`).
|
||||
- Added `~connection.pgconn_ptr` and `~cursor.pgresult_ptr` to expose raw
|
||||
C structures to Python and interact with libpq via ctypes (:ticket:`#782`).
|
||||
- `~psycopg2.sql.Identifier` can represent qualified names in SQL composition
|
||||
(:ticket:`#732`).
|
||||
- Added `!ReplicationCursor`.\ `~psycopg2.extras.ReplicationCursor.wal_end`
|
||||
attribute (:ticket:`#800`).
|
||||
- Added *fetch* parameter to `~psycopg2.extras.execute_values()` function
|
||||
(:ticket:`#813`).
|
||||
- Fixed adaptation of numeric subclasses such as `~enum.IntEnum`
|
||||
(:ticket:`#591`).
|
||||
- `!str()` on `~psycopg2.extras.Range` produces a human-readable representation
|
||||
(:ticket:`#773`).
|
||||
- `~psycopg2.extras.DictCursor` and `~psycopg2.extras.RealDictCursor` rows
|
||||
|
@ -32,6 +232,15 @@ New features:
|
|||
the `~psycopg2.extensions.Diagnostics` object (:ticket:`#783`).
|
||||
- More efficient `~psycopg2.extras.NamedTupleCursor` (:ticket:`#838`).
|
||||
|
||||
Bug fixes:
|
||||
|
||||
- Fixed connections occasionally broken by the unrelated use of the
|
||||
multiprocessing module (:ticket:`#829`).
|
||||
- Fixed async communication blocking if results are returned in different
|
||||
chunks, e.g. with notices interspersed to the results (:ticket:`#856`).
|
||||
- Fixed adaptation of numeric subclasses such as `~enum.IntEnum`
|
||||
(:ticket:`#591`).
|
||||
|
||||
Other changes:
|
||||
|
||||
- Dropped support for Python 2.6, 3.2, 3.3.
|
||||
|
@ -39,11 +248,17 @@ Other changes:
|
|||
- Dropped deprecated `!register_tstz_w_secs()` (was previously a no-op).
|
||||
- Dropped deprecated `!PersistentConnectionPool`. This pool class was mostly
|
||||
designed to interact with Zope. Use `!ZPsycopgDA.pool` instead.
|
||||
- Binary packages no longer installed by default. The 'psycopg2-binary'
|
||||
package must be used explicitly.
|
||||
- Dropped `!PSYCOPG_DISPLAY_SIZE` build parameter.
|
||||
- No longer use 2to3 during installation for Python 2 & 3 compatability. All
|
||||
- Dropped support for mxDateTime as the default date and time adapter.
|
||||
mxDatetime support continues to be available as an alternative to Python's
|
||||
builtin datetime.
|
||||
- No longer use 2to3 during installation for Python 2 & 3 compatibility. All
|
||||
source files are now compatible with Python 2 & 3 as is.
|
||||
- The `!psycopg2.test` package is no longer installed by ``python setup.py
|
||||
install``.
|
||||
- Wheel package bundled with OpenSSL 1.0.2r and PostgreSQL 11.2 libpq.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.7
|
||||
|
@ -51,14 +266,14 @@ What's new in psycopg 2.7.7
|
|||
|
||||
- Cleanup of the cursor results assignment code, which might have solved
|
||||
double free and inconsistencies in concurrent usage (:tickets:`#346, #384`).
|
||||
- Wheel package compiled against OpenSSL 1.0.2q.
|
||||
- Wheel package bundled with OpenSSL 1.0.2q.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.6.1
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Fixed binary package broken on OS X 10.12 (:ticket:`#807`).
|
||||
- Wheel package compiled against PostgreSQL 11.1 libpq.
|
||||
- Wheel package bundled with PostgreSQL 11.1 libpq.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.6
|
||||
|
@ -75,7 +290,7 @@ What's new in psycopg 2.7.6
|
|||
- `~psycopg2.extras.execute_values()` accepts `~psycopg2.sql.Composable`
|
||||
objects (:ticket:`#794`).
|
||||
- `~psycopg2.errorcodes` map updated to PostgreSQL 11.
|
||||
- Wheel package compiled against PostgreSQL 10.5 libpq and OpenSSL 1.0.2p.
|
||||
- Wheel package bundled with PostgreSQL 10.5 libpq and OpenSSL 1.0.2p.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.5
|
||||
|
@ -89,7 +304,7 @@ What's new in psycopg 2.7.5
|
|||
- Maybe fixed building on MSYS2 (as reported in :ticket:`#658`).
|
||||
- Allow string subclasses in connection and other places (:ticket:`#679`).
|
||||
- Don't raise an exception closing an unused named cursor (:ticket:`#716`).
|
||||
- Wheel package compiled against PostgreSQL 10.4 libpq and OpenSSL 1.0.2o.
|
||||
- Wheel package bundled with PostgreSQL 10.4 libpq and OpenSSL 1.0.2o.
|
||||
|
||||
|
||||
What's new in psycopg 2.7.4
|
||||
|
@ -111,7 +326,7 @@ What's new in psycopg 2.7.4
|
|||
- Fixed `~cursor.rowcount` after `~cursor.executemany()` with :sql:`RETURNING`
|
||||
statements (:ticket:`#633`).
|
||||
- Fixed compatibility problem with pypy3 (:ticket:`#649`).
|
||||
- Wheel packages compiled against PostgreSQL 10.1 libpq and OpenSSL 1.0.2n.
|
||||
- Wheel packages bundled with PostgreSQL 10.1 libpq and OpenSSL 1.0.2n.
|
||||
- Wheel packages for Python 2.6 no more available (support dropped from
|
||||
wheel building infrastructure).
|
||||
|
||||
|
@ -119,7 +334,7 @@ What's new in psycopg 2.7.4
|
|||
What's new in psycopg 2.7.3.2
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
- Wheel package compiled against PostgreSQL 10.0 libpq and OpenSSL 1.0.2l
|
||||
- Wheel package bundled with PostgreSQL 10.0 libpq and OpenSSL 1.0.2l
|
||||
(:tickets:`#601, #602`).
|
||||
|
||||
|
||||
|
@ -192,7 +407,7 @@ New features:
|
|||
them together.
|
||||
- Added `~psycopg2.__libpq_version__` and
|
||||
`~psycopg2.extensions.libpq_version()` to inspect the version of the
|
||||
``libpq`` library the module was compiled/loaded with
|
||||
``libpq`` library the module was bundled with
|
||||
(:tickets:`#35, #323`).
|
||||
- The attributes `~connection.notices` and `~connection.notifies` can be
|
||||
customized replacing them with any object exposing an `!append()` method
|
||||
|
|
36
README.rst
36
README.rst
|
@ -17,17 +17,31 @@ flexible objects adaptation system.
|
|||
|
||||
Psycopg 2 is both Unicode and Python 3 friendly.
|
||||
|
||||
.. Note::
|
||||
|
||||
The psycopg2 package is still widely used and actively maintained, but it
|
||||
is not expected to receive new features.
|
||||
|
||||
`Psycopg 3`__ is the evolution of psycopg2 and is where `new features are
|
||||
being developed`__: if you are starting a new project you should probably
|
||||
start from 3!
|
||||
|
||||
.. __: https://pypi.org/project/psycopg/
|
||||
.. __: https://www.psycopg.org/psycopg3/docs/index.html
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
Documentation is included in the ``doc`` directory and is `available online`__.
|
||||
|
||||
.. __: http://initd.org/psycopg/docs/
|
||||
.. __: https://www.psycopg.org/docs/
|
||||
|
||||
For any other resource (source code repository, bug tracker, mailing list)
|
||||
please check the `project homepage`__.
|
||||
|
||||
.. __: https://psycopg.org/
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
@ -56,19 +70,11 @@ production it is advised to use the package built from sources.
|
|||
|
||||
.. _PyPI: https://pypi.org/project/psycopg2/
|
||||
.. _psycopg2-binary: https://pypi.org/project/psycopg2-binary/
|
||||
.. _install: http://initd.org/psycopg/docs/install.html#install-from-source
|
||||
.. _faq: http://initd.org/psycopg/docs/faq.html#faq-compile
|
||||
.. _install: https://www.psycopg.org/docs/install.html#install-from-source
|
||||
.. _faq: https://www.psycopg.org/docs/faq.html#faq-compile
|
||||
|
||||
.. __: http://initd.org/psycopg/
|
||||
:Build status: |gh-actions|
|
||||
|
||||
|
||||
:Linux/OSX: |travis|
|
||||
:Windows: |appveyor|
|
||||
|
||||
.. |travis| image:: https://travis-ci.org/psycopg/psycopg2.svg?branch=master
|
||||
:target: https://travis-ci.org/psycopg/psycopg2
|
||||
:alt: Linux and OSX build status
|
||||
|
||||
.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/github/psycopg/psycopg2?branch=master&svg=true
|
||||
:target: https://ci.appveyor.com/project/psycopg/psycopg2/branch/master
|
||||
:alt: Windows build status
|
||||
.. |gh-actions| image:: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml/badge.svg
|
||||
:target: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
|
||||
:alt: Build status
|
||||
|
|
8
doc/.gitignore
vendored
Normal file
8
doc/.gitignore
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
env
|
||||
src/_build/*
|
||||
html/*
|
||||
psycopg2.txt
|
||||
src/sqlstate_errors.rst
|
||||
|
||||
# Added by psycopg-website to customize published docs
|
||||
src/_templates/layout.html
|
|
@ -1,7 +1,7 @@
|
|||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
|
29
doc/Makefile
29
doc/Makefile
|
@ -1,6 +1,6 @@
|
|||
.PHONY: env help clean html text doctest
|
||||
.PHONY: env help clean html package doctest
|
||||
|
||||
docs: html text
|
||||
docs: html
|
||||
|
||||
check: doctest
|
||||
|
||||
|
@ -8,33 +8,32 @@ check: doctest
|
|||
# It is not clean by 'make clean'
|
||||
|
||||
PYTHON := python$(PYTHON_VERSION)
|
||||
PYTHON_VERSION ?= $(shell $(PYTHON) -c 'import sys; print ("%d.%d" % sys.version_info[:2])')
|
||||
PYTHON_VERSION ?= $(shell $(PYTHON) -c 'import sys; print("%d.%d" % sys.version_info[:2])')
|
||||
BUILD_DIR = $(shell pwd)/../build/lib.$(PYTHON_VERSION)
|
||||
|
||||
SPHINXBUILD ?= $$(pwd)/env/bin/sphinx-build
|
||||
SPHOPTS = PYTHONPATH=$$(pwd)/../build/lib.$(PYTHON_VERSION)/ SPHINXBUILD=$(SPHINXBUILD)
|
||||
SPHOPTS = SPHINXBUILD=$(SPHINXBUILD)
|
||||
|
||||
html:
|
||||
$(MAKE) PYTHON=$(PYTHON) -C .. package
|
||||
html: package src/sqlstate_errors.rst
|
||||
$(MAKE) $(SPHOPTS) -C src $@
|
||||
cp -r src/_build/html .
|
||||
|
||||
text:
|
||||
src/sqlstate_errors.rst: ../psycopg/sqlstate_errors.h $(BUILD_DIR)
|
||||
./env/bin/python src/tools/make_sqlstate_docs.py $< > $@
|
||||
|
||||
$(BUILD_DIR):
|
||||
$(MAKE) PYTHON=$(PYTHON) -C .. package
|
||||
$(MAKE) $(SPHOPTS) -C src $@
|
||||
cd src && tools/stitch_text.py index.rst _build/text > ../psycopg2.txt
|
||||
|
||||
doctest:
|
||||
$(MAKE) PYTHON=$(PYTHON) -C .. package
|
||||
$(MAKE) $(SPHOPTS) -C src $@
|
||||
|
||||
upload:
|
||||
# this command requires ssh configured to the proper target
|
||||
tar czf - -C html . | ssh psycoweb tar xzvf - -C docs/current
|
||||
|
||||
clean:
|
||||
$(MAKE) $(SPHOPTS) -C src $@
|
||||
rm -rf html psycopg2.txt
|
||||
rm -rf html src/sqlstate_errors.rst
|
||||
|
||||
env: requirements.txt
|
||||
virtualenv env
|
||||
$(PYTHON) -m venv env
|
||||
./env/bin/pip install -r requirements.txt
|
||||
echo "$$(pwd)/../build/lib.$(PYTHON_VERSION)" \
|
||||
> env/lib/python$(PYTHON_VERSION)/site-packages/psycopg.pth
|
||||
|
|
|
@ -6,7 +6,7 @@ introspection, so you will need the same prerequisites_. The only extra
|
|||
prerequisite is virtualenv_: the packages needed to build the docs will be
|
||||
installed when building the env.
|
||||
|
||||
.. _prerequisites: http://initd.org/psycopg/docs/install.html#install-from-source
|
||||
.. _prerequisites: https://www.psycopg.org/docs/install.html#install-from-source
|
||||
.. _virtualenv: https://virtualenv.pypa.io/en/latest/
|
||||
|
||||
Build the env once with::
|
||||
|
@ -17,10 +17,4 @@ Then you can build the documentation with::
|
|||
|
||||
make
|
||||
|
||||
Or the single targets::
|
||||
|
||||
make html
|
||||
make text
|
||||
|
||||
You should find the rendered documentation in the ``html`` dir and the text
|
||||
file ``psycopg2.txt``.
|
||||
You should find the rendered documentation in the ``html`` directory.
|
||||
|
|
|
@ -13,82 +13,46 @@ How to make a psycopg2 release
|
|||
In the rest of this document we assume you have exported the version number
|
||||
into an environment variable, e.g.::
|
||||
|
||||
$ export VERSION=2.7
|
||||
$ export VERSION=2.8.4
|
||||
|
||||
- In the `Travis settings`__ you may want to be sure that the variables
|
||||
``TEST_PAST`` and ``TEST_FUTURE`` are set to a nonzero string to check all
|
||||
the supported postgres version.
|
||||
- Push psycopg2 to master or to the maint branch. Make sure tests on `GitHub
|
||||
Actions`__.
|
||||
|
||||
.. __: https://travis-ci.org/psycopg/psycopg2/settings
|
||||
|
||||
- Push psycopg2 to master or to the maint branch. Make sure tests on Travis__
|
||||
and AppVeyor__ pass.
|
||||
|
||||
.. __: https://travis-ci.org/psycopg/psycopg2
|
||||
.. __: https://ci.appveyor.com/project/psycopg/psycopg2
|
||||
|
||||
- For an extra test merge or rebase the `test_i686`__ branch on the commit to
|
||||
release and push it too: this will test with Python 32 bits and debug
|
||||
versions.
|
||||
|
||||
.. __: https://github.com/psycopg/psycopg2/tree/test_i686
|
||||
.. __: https://github.com/psycopg/psycopg2/actions/workflows/tests.yml
|
||||
|
||||
- Create a signed tag with the content of the relevant NEWS bit and push it.
|
||||
E.g.::
|
||||
|
||||
$ git tag -a -s 2_7
|
||||
# Tag name will be 2_8_4
|
||||
$ git tag -a -s ${VERSION//\./_}
|
||||
|
||||
Psycopg 2.7 released
|
||||
Psycopg 2.8.4 released
|
||||
|
||||
What's new in psycopg 2.7
|
||||
-------------------------
|
||||
What's new in psycopg 2.8.4
|
||||
---------------------------
|
||||
|
||||
New features:
|
||||
|
||||
- Added `~psycopg2.sql` module to generate SQL dynamically (:ticket:`#308`).
|
||||
- Fixed bug blah (:ticket:`#42`).
|
||||
...
|
||||
|
||||
- Update the `psycopg2-wheels`_ submodule to the tag version and push. This
|
||||
will build the packages on `Travis CI`__ and `AppVeyor`__ and upload them to
|
||||
the `initd.org upload`__ dir.
|
||||
- Create the packages:
|
||||
|
||||
.. _psycopg2-wheels: https://github.com/psycopg/psycopg2-wheels
|
||||
.. __: https://travis-ci.org/psycopg/psycopg2-wheels
|
||||
.. __: https://ci.appveyor.com/project/psycopg/psycopg2-wheels
|
||||
.. __: http://initd.org/psycopg/upload/
|
||||
- On GitHub Actions run manually a `package build workflow`__.
|
||||
|
||||
- Download the packages generated (this assumes ssh configured properly)::
|
||||
.. __: https://github.com/psycopg/psycopg2/actions/workflows/packages.yml
|
||||
|
||||
$ rsync -arv initd-upload:psycopg2-${VERSION} .
|
||||
- When the workflows have finished download the packages from the job
|
||||
artifacts.
|
||||
|
||||
- Sign the packages and upload the signatures back::
|
||||
- Only for stable packages: upload the signed packages on PyPI::
|
||||
|
||||
$ for f in psycopg2-${VERSION}/*.{exe,tar.gz,whl}; do \
|
||||
gpg --armor --detach-sign $f;
|
||||
done
|
||||
|
||||
$ rsync -arv psycopg2-${VERSION} initd-upload:
|
||||
|
||||
- Run the ``copy-tarball.sh`` script on the server to copy the uploaded files
|
||||
in the `tarballs`__ dir::
|
||||
|
||||
$ ssh psycoweb@initd.org copy-tarball.sh ${VERSION}
|
||||
|
||||
.. __: http://initd.org/psycopg/tarballs/
|
||||
|
||||
- Remove the ``.exe`` from the dir, because we don't want to upload them on
|
||||
PyPI::
|
||||
|
||||
$ rm -v psycopg2-${VERSION}/*.exe{,.asc}
|
||||
|
||||
- Only for stable packages: upload the packages and signatures on PyPI::
|
||||
|
||||
$ twine upload psycopg2-${VERSION}/*
|
||||
$ twine upload -s wheelhouse/psycopg2-${VERSION}/*
|
||||
|
||||
- Create a release and release notes in the psycopg website, announce to
|
||||
psycopg and pgsql-announce mailing lists.
|
||||
|
||||
- Edit ``setup.py`` changing the version again (e.g. go to ``2.7.1.dev0``).
|
||||
- Edit ``setup.py`` changing the version again (e.g. go to ``2.8.5.dev0``).
|
||||
|
||||
|
||||
Releasing test packages
|
||||
|
@ -96,7 +60,7 @@ Releasing test packages
|
|||
|
||||
Test packages may be uploaded on the `PyPI testing site`__ using::
|
||||
|
||||
$ twine upload -r testpypi psycopg2-${VERSION}/*
|
||||
$ twine upload -s -r testpypi wheelhouse/psycopg2-${VERSION}/*
|
||||
|
||||
assuming `proper configuration`__ of ``~/.pypirc``.
|
||||
|
||||
|
|
2
doc/requirements.in
Normal file
2
doc/requirements.in
Normal file
|
@ -0,0 +1,2 @@
|
|||
Sphinx
|
||||
sphinx-better-theme
|
|
@ -1,3 +1,50 @@
|
|||
# Packages only needed to build the docs
|
||||
Pygments>=2.2,<2.3
|
||||
Sphinx>=1.6,<=1.7
|
||||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.10
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile requirements.in
|
||||
#
|
||||
alabaster==0.7.13
|
||||
# via sphinx
|
||||
babel==2.12.1
|
||||
# via sphinx
|
||||
certifi>=2023.7.22
|
||||
# via requests
|
||||
charset-normalizer==3.1.0
|
||||
# via requests
|
||||
docutils==0.19
|
||||
# via sphinx
|
||||
idna==3.4
|
||||
# via requests
|
||||
imagesize==1.4.1
|
||||
# via sphinx
|
||||
jinja2==3.1.2
|
||||
# via sphinx
|
||||
markupsafe==2.1.2
|
||||
# via jinja2
|
||||
packaging==23.1
|
||||
# via sphinx
|
||||
pygments==2.15.0
|
||||
# via sphinx
|
||||
requests==2.31.0
|
||||
# via sphinx
|
||||
snowballstemmer==2.2.0
|
||||
# via sphinx
|
||||
sphinx==6.1.3
|
||||
# via -r requirements.in
|
||||
sphinx-better-theme==0.1.5
|
||||
# via -r requirements.in
|
||||
sphinxcontrib-applehelp==1.0.4
|
||||
# via sphinx
|
||||
sphinxcontrib-devhelp==1.0.2
|
||||
# via sphinx
|
||||
sphinxcontrib-htmlhelp==2.0.1
|
||||
# via sphinx
|
||||
sphinxcontrib-jsmath==1.0.1
|
||||
# via sphinx
|
||||
sphinxcontrib-qthelp==1.0.3
|
||||
# via sphinx
|
||||
sphinxcontrib-serializinghtml==1.1.5
|
||||
# via sphinx
|
||||
urllib3==1.26.17
|
||||
# via requests
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
@import url("classic.css");
|
||||
|
||||
blockquote {
|
||||
font-style: italic;
|
||||
}
|
||||
|
@ -37,3 +35,102 @@ dl.faq dt {
|
|||
table.data-types div.line-block {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
|
||||
/* better theme customisation */
|
||||
|
||||
body {
|
||||
background-color: #216464;
|
||||
}
|
||||
|
||||
header, .related, .document, footer {
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
header h1 {
|
||||
font-size: 150%;
|
||||
margin-bottom: 0;
|
||||
padding: 0.5rem 10px 0.5rem 10px;
|
||||
}
|
||||
|
||||
h1, h2, h3 {
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
.body h1, .body h2, .body h3 {
|
||||
color: #074848;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 200%;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 160%;
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: 140%;
|
||||
}
|
||||
|
||||
footer#pagefooter {
|
||||
margin-bottom: 1rem;
|
||||
font-size: 85%;
|
||||
color: #444;
|
||||
}
|
||||
|
||||
#rellinks, #breadcrumbs {
|
||||
padding-right: 10px;
|
||||
padding-left: 10px;
|
||||
}
|
||||
|
||||
.sphinxsidebar {
|
||||
padding-left: 10px;
|
||||
}
|
||||
|
||||
.bodywrapper {
|
||||
padding-right: 10px;
|
||||
}
|
||||
|
||||
div.body h1, div.body h2, div.body h3 {
|
||||
background-color: #f2f2f2;
|
||||
border-bottom: 1px solid #d0d0d0;
|
||||
}
|
||||
|
||||
div.body p.rubric {
|
||||
border-bottom: 1px solid #d0d0d0;
|
||||
}
|
||||
|
||||
body .sphinxsidebar .search {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
html pre {
|
||||
background-color: #efc;
|
||||
border: 1px solid #ac9;
|
||||
border-left: none;
|
||||
border-right: none;
|
||||
}
|
||||
|
||||
a, a:visited {
|
||||
color: #0b6868;
|
||||
}
|
||||
|
||||
th {
|
||||
background-color: #ede;
|
||||
}
|
||||
|
||||
code.xref, a code {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
code.descname {
|
||||
font-weight: bold;
|
||||
font-size: 120%;
|
||||
}
|
||||
|
||||
@media (max-width: 820px) {
|
||||
body {
|
||||
background-color: white;
|
||||
}
|
||||
}
|
||||
|
|
6
doc/src/_templates/searchbox.html
Normal file
6
doc/src/_templates/searchbox.html
Normal file
|
@ -0,0 +1,6 @@
|
|||
{# Add a title over the search box #}
|
||||
|
||||
{%- if pagename != "search" %}
|
||||
<h3>Quick search</h3>
|
||||
{%- include "!searchbox.html" %}
|
||||
{%- endif %}
|
|
@ -12,7 +12,7 @@ More advanced topics
|
|||
conn.commit()
|
||||
|
||||
def wait(conn):
|
||||
while 1:
|
||||
while True:
|
||||
state = conn.poll()
|
||||
if state == psycopg2.extensions.POLL_OK:
|
||||
break
|
||||
|
@ -226,7 +226,7 @@ read:
|
|||
|
||||
>>> cur.execute("SELECT '(10.2,20.3)'::point")
|
||||
>>> point = cur.fetchone()[0]
|
||||
>>> print type(point), point.x, point.y
|
||||
>>> print(type(point), point.x, point.y)
|
||||
<class 'Point'> 10.2 20.3
|
||||
|
||||
A typecaster created by `!new_type()` can be also used with
|
||||
|
@ -284,15 +284,15 @@ something to read::
|
|||
curs = conn.cursor()
|
||||
curs.execute("LISTEN test;")
|
||||
|
||||
print "Waiting for notifications on channel 'test'"
|
||||
while 1:
|
||||
print("Waiting for notifications on channel 'test'")
|
||||
while True:
|
||||
if select.select([conn],[],[],5) == ([],[],[]):
|
||||
print "Timeout"
|
||||
print("Timeout")
|
||||
else:
|
||||
conn.poll()
|
||||
while conn.notifies:
|
||||
notify = conn.notifies.pop(0)
|
||||
print "Got NOTIFY:", notify.pid, notify.channel, notify.payload
|
||||
print("Got NOTIFY:", notify.pid, notify.channel, notify.payload)
|
||||
|
||||
Running the script and executing a command such as :sql:`NOTIFY test, 'hello'`
|
||||
in a separate :program:`psql` shell, the output may look similar to:
|
||||
|
@ -328,7 +328,7 @@ received from a previous version server will have the
|
|||
Asynchronous support
|
||||
--------------------
|
||||
|
||||
.. versionadded:: 2.2.0
|
||||
.. versionadded:: 2.2
|
||||
|
||||
Psycopg can issue asynchronous queries to a PostgreSQL database. An asynchronous
|
||||
communication style is established passing the parameter *async*\=1 to the
|
||||
|
@ -347,7 +347,7 @@ together with the Python :py:func:`~select.select` function in order to carry on
|
|||
asynchronous operations with Psycopg::
|
||||
|
||||
def wait(conn):
|
||||
while 1:
|
||||
while True:
|
||||
state = conn.poll()
|
||||
if state == psycopg2.extensions.POLL_OK:
|
||||
break
|
||||
|
@ -468,7 +468,7 @@ example callback (using `!select()` to block) is provided as
|
|||
`psycopg2.extras.wait_select()`: it boils down to something similar to::
|
||||
|
||||
def wait_select(conn):
|
||||
while 1:
|
||||
while True:
|
||||
state = conn.poll()
|
||||
if state == extensions.POLL_OK:
|
||||
break
|
||||
|
@ -490,7 +490,7 @@ resources about the topic.
|
|||
.. _Eventlet: https://eventlet.net/
|
||||
.. _gevent: http://www.gevent.org/
|
||||
.. _SQLAlchemy: https://www.sqlalchemy.org/
|
||||
.. _psycogreen: http://bitbucket.org/dvarrazzo/psycogreen/
|
||||
.. _psycogreen: https://github.com/psycopg/psycogreen/
|
||||
.. __: https://www.postgresql.org/docs/current/static/libpq-async.html
|
||||
|
||||
.. warning::
|
||||
|
@ -552,8 +552,7 @@ value greater than zero in ``postgresql.conf`` (these changes require a server
|
|||
restart). Create a database ``psycopg2_test``.
|
||||
|
||||
Then run the following code to quickly try the replication support out. This
|
||||
is not production code -- it has no error handling, it sends feedback too
|
||||
often, etc. -- and it's only intended as a simple demo of logical
|
||||
is not production code -- it's only intended as a simple demo of logical
|
||||
replication::
|
||||
|
||||
from __future__ import print_function
|
||||
|
|
120
doc/src/conf.py
120
doc/src/conf.py
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Psycopg documentation build configuration file, created by
|
||||
# sphinx-quickstart on Sun Feb 7 13:48:41 2010.
|
||||
|
@ -11,7 +10,9 @@
|
|||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys, os
|
||||
import os
|
||||
import sys
|
||||
from better import better_theme_path
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
|
@ -22,11 +23,16 @@ sys.path.append(os.path.abspath('tools/lib'))
|
|||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.doctest', 'sphinx.ext.intersphinx' ]
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.intersphinx',
|
||||
]
|
||||
|
||||
# Specific extensions for Psycopg documentation.
|
||||
extensions += [ 'dbapi_extension', 'sql_role', 'ticket_role' ]
|
||||
extensions += ['dbapi_extension', 'sql_role', 'ticket_role']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
@ -35,14 +41,16 @@ templates_path = ['_templates']
|
|||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8'
|
||||
# source_encoding = 'utf-8'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'Psycopg'
|
||||
copyright = u'2001-2016, Federico Di Gregorio, Daniele Varrazzo'
|
||||
project = 'Psycopg'
|
||||
copyright = (
|
||||
'2001-2021, Federico Di Gregorio, Daniele Varrazzo, The Psycopg Team'
|
||||
)
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
|
@ -54,15 +62,14 @@ version = '2.0'
|
|||
# The full version, including alpha/beta/rc tags.
|
||||
try:
|
||||
import psycopg2
|
||||
release = psycopg2.__version__.split()[0]
|
||||
version = '.'.join(release.split('.')[:2])
|
||||
except ImportError:
|
||||
print("WARNING: couldn't import psycopg to read version.")
|
||||
release = version
|
||||
else:
|
||||
release = psycopg2.__version__.split()[0]
|
||||
version = '.'.join(release.split('.')[:2])
|
||||
|
||||
intersphinx_mapping = {
|
||||
'py': ('https://docs.python.org/3', None),
|
||||
}
|
||||
intersphinx_mapping = {'py': ('https://docs.python.org/3', None)}
|
||||
|
||||
# Pattern to generate links to the bug tracker
|
||||
ticket_url = 'https://github.com/psycopg/psycopg2/issues/%s'
|
||||
|
@ -71,16 +78,16 @@ ticket_remap_offset = 230
|
|||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of documents that shouldn't be included in the build.
|
||||
#unused_docs = []
|
||||
# unused_docs = []
|
||||
|
||||
# List of directories, relative to source directory, that shouldn't be searched
|
||||
# for source files.
|
||||
|
@ -90,15 +97,15 @@ exclude_trees = ['_build', 'html']
|
|||
default_role = 'obj'
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
# show_authors = False
|
||||
|
||||
# Using 'python' instead of the default gives warnings if parsing an example
|
||||
# fails, instead of defaulting to none
|
||||
|
@ -108,7 +115,7 @@ highlight_language = 'python'
|
|||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# Include TODO items in the documentation
|
||||
todo_include_todos = False
|
||||
|
@ -121,8 +128,6 @@ rst_epilog = """
|
|||
.. _transaction isolation level:
|
||||
https://www.postgresql.org/docs/current/static/transaction-iso.html
|
||||
|
||||
.. _mx.DateTime: https://www.egenix.com/products/python/mxBase/mxDateTime/
|
||||
|
||||
.. |MVCC| replace:: :abbr:`MVCC (Multiversion concurrency control)`
|
||||
"""
|
||||
|
||||
|
@ -130,35 +135,41 @@ rst_epilog = """
|
|||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
html_theme = 'classic'
|
||||
html_theme = 'better'
|
||||
|
||||
# The stylesheet to use with HTML output: this will include the original one
|
||||
# adding a few classes.
|
||||
html_style = 'psycopg.css'
|
||||
# html_style = 'psycopg.css'
|
||||
|
||||
# Hide the sphinx footer
|
||||
html_show_sphinx = False
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
html_theme_options = {
|
||||
'linktotheme': False,
|
||||
'cssfiles': ['_static/psycopg.css'],
|
||||
}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
html_theme_path = [better_theme_path]
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
html_short_title = 'Home'
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
|
@ -167,38 +178,41 @@ html_static_path = ['_static']
|
|||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
# no need for the prev/next topic link using better theme: they are on top
|
||||
html_sidebars = {
|
||||
'**': ['localtoc.html', 'searchbox.html'],
|
||||
}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_use_modindex = True
|
||||
# html_use_modindex = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = ''
|
||||
# html_file_suffix = ''
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'psycopgdoc'
|
||||
|
@ -207,35 +221,41 @@ htmlhelp_basename = 'psycopgdoc'
|
|||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
# The paper size ('letter' or 'a4').
|
||||
#latex_paper_size = 'letter'
|
||||
# latex_paper_size = 'letter'
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#latex_font_size = '10pt'
|
||||
# latex_font_size = '10pt'
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'psycopg.tex', u'Psycopg Documentation',
|
||||
u'Federico Di Gregorio', 'manual'),
|
||||
(
|
||||
'index',
|
||||
'psycopg.tex',
|
||||
'Psycopg Documentation',
|
||||
'Federico Di Gregorio',
|
||||
'manual',
|
||||
)
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
# latex_use_parts = False
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#latex_preamble = ''
|
||||
# latex_preamble = ''
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_use_modindex = True
|
||||
# latex_use_modindex = True
|
||||
|
||||
toc_object_entries = False
|
||||
|
||||
doctest_global_setup = """
|
||||
|
||||
|
|
|
@ -21,6 +21,28 @@ The ``connection`` class
|
|||
Connections are thread safe and can be shared among many threads. See
|
||||
:ref:`thread-safety` for details.
|
||||
|
||||
Connections can be used as context managers. Note that a context wraps a
|
||||
transaction: if the context exits with success the transaction is
|
||||
committed, if it exits with an exception the transaction is rolled back.
|
||||
Note that the connection is not closed by the context and it can be used
|
||||
for several contexts.
|
||||
|
||||
.. code:: python
|
||||
|
||||
conn = psycopg2.connect(DSN)
|
||||
|
||||
with conn:
|
||||
with conn.cursor() as curs:
|
||||
curs.execute(SQL1)
|
||||
|
||||
with conn:
|
||||
with conn.cursor() as curs:
|
||||
curs.execute(SQL2)
|
||||
|
||||
# leaving contexts doesn't close the connection
|
||||
conn.close()
|
||||
|
||||
|
||||
.. method:: cursor(name=None, cursor_factory=None, scrollable=None, withhold=False)
|
||||
|
||||
Return a new `cursor` object using the connection.
|
||||
|
@ -117,7 +139,7 @@ The ``connection`` class
|
|||
with a `~connection.commit()`/`~connection.rollback()` before
|
||||
closing.
|
||||
|
||||
.. _PgBouncer: http://pgbouncer.projects.postgresql.org/
|
||||
.. _PgBouncer: http://www.pgbouncer.org/
|
||||
|
||||
|
||||
.. index::
|
||||
|
@ -610,30 +632,7 @@ The ``connection`` class
|
|||
A `~psycopg2.extensions.ConnectionInfo` object exposing information
|
||||
about the native libpq connection.
|
||||
|
||||
.. versionadded:: 2.8.0
|
||||
|
||||
|
||||
.. index::
|
||||
pair: Connection; Parameters
|
||||
|
||||
.. method:: get_dsn_parameters()
|
||||
|
||||
Get the effective dsn parameters for the connection as a dictionary.
|
||||
|
||||
The *password* parameter is removed from the result.
|
||||
|
||||
Example::
|
||||
|
||||
>>> conn.get_dsn_parameters()
|
||||
{'dbname': 'test', 'user': 'postgres', 'port': '5432', 'sslmode': 'prefer'}
|
||||
|
||||
Requires libpq >= 9.3.
|
||||
|
||||
.. seealso:: libpq docs for `PQconninfo()`__ for details.
|
||||
|
||||
.. __: https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-PQCONNINFO
|
||||
|
||||
.. versionadded:: 2.7
|
||||
.. versionadded:: 2.8
|
||||
|
||||
|
||||
.. index::
|
||||
|
@ -662,7 +661,7 @@ The ``connection`` class
|
|||
:param new_oid: Create a new object using the specified OID. The
|
||||
function raises `~psycopg2.OperationalError` if the OID is already
|
||||
in use. Default is 0, meaning assign a new one automatically.
|
||||
:param new_file: The name of a file to be imported in the the database
|
||||
:param new_file: The name of a file to be imported in the database
|
||||
(using the |lo_import|_ function)
|
||||
:param lobject_factory: Subclass of
|
||||
`~psycopg2.extensions.lobject` to be instantiated.
|
||||
|
@ -695,7 +694,7 @@ The ``connection`` class
|
|||
|
||||
.. rubric:: Methods related to asynchronous support
|
||||
|
||||
.. versionadded:: 2.2.0
|
||||
.. versionadded:: 2.2
|
||||
|
||||
.. seealso:: :ref:`async-support` and :ref:`green-support`.
|
||||
|
||||
|
@ -738,11 +737,28 @@ The ``connection`` class
|
|||
|
||||
Return `!True` if the connection is executing an asynchronous operation.
|
||||
|
||||
|
||||
.. rubric:: Interoperation with other C API modules
|
||||
|
||||
.. attribute:: pgconn_ptr
|
||||
|
||||
Return the internal `!PGconn*` as integer. Useful to pass the libpq
|
||||
raw connection structure to C functions, e.g. via `ctypes`::
|
||||
|
||||
>>> import ctypes
|
||||
>>> import ctypes.util
|
||||
>>> libpq = ctypes.pydll.LoadLibrary(ctypes.util.find_library('pq'))
|
||||
>>> libpq.PQserverVersion.argtypes = [ctypes.c_void_p]
|
||||
>>> libpq.PQserverVersion.restype = ctypes.c_int
|
||||
>>> libpq.PQserverVersion(conn.pgconn_ptr)
|
||||
90611
|
||||
|
||||
.. versionadded:: 2.8
|
||||
|
||||
|
||||
.. method:: get_native_connection()
|
||||
|
||||
Return the internal `PGconn*` wrapped in a PyCapsule object. This is
|
||||
Return the internal `!PGconn*` wrapped in a PyCapsule object. This is
|
||||
only useful for passing the `libpq` raw connection associated to this
|
||||
connection object to other C-level modules that may have a use for it.
|
||||
|
||||
|
@ -756,7 +772,7 @@ The ``connection`` class
|
|||
|
||||
.. rubric:: informative methods of the native connection
|
||||
|
||||
.. note::
|
||||
.. note::
|
||||
|
||||
These methods are better accessed using the `~connection.info`
|
||||
attributes and may be dropped in future versions.
|
||||
|
@ -829,8 +845,10 @@ The ``connection`` class
|
|||
Also available as `~connection.info`\ `!.`\
|
||||
`~psycopg2.extensions.ConnectionInfo.backend_pid`.
|
||||
|
||||
Returns the process ID (PID) of the backend server process handling
|
||||
this connection.
|
||||
Returns the process ID (PID) of the backend server process *you
|
||||
connected to*. Note that if you use a connection pool service such as
|
||||
PgBouncer_ this value will not be updated if your connection is
|
||||
switched to a different backend.
|
||||
|
||||
Note that the PID belongs to a process executing on the database
|
||||
server host, not the local host!
|
||||
|
@ -866,6 +884,32 @@ The ``connection`` class
|
|||
.. versionadded:: 2.0.12
|
||||
|
||||
|
||||
.. index::
|
||||
pair: Connection; Parameters
|
||||
|
||||
.. method:: get_dsn_parameters()
|
||||
|
||||
Also available as `~connection.info`\ `!.`\
|
||||
`~psycopg2.extensions.ConnectionInfo.dsn_parameters`.
|
||||
|
||||
Get the effective dsn parameters for the connection as a dictionary.
|
||||
|
||||
The *password* parameter is removed from the result.
|
||||
|
||||
Example::
|
||||
|
||||
>>> conn.get_dsn_parameters()
|
||||
{'dbname': 'test', 'user': 'postgres', 'port': '5432', 'sslmode': 'prefer'}
|
||||
|
||||
Requires libpq >= 9.3.
|
||||
|
||||
.. seealso:: libpq docs for `PQconninfo()`__ for details.
|
||||
|
||||
.. __: https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-PQCONNINFO
|
||||
|
||||
.. versionadded:: 2.7
|
||||
|
||||
|
||||
.. testcode::
|
||||
:hide:
|
||||
|
||||
|
|
|
@ -34,6 +34,16 @@ The ``cursor`` class
|
|||
many cursors from the same connection and should use each cursor from
|
||||
a single thread. See :ref:`thread-safety` for details.
|
||||
|
||||
Cursors can be used as context managers: leaving the context will close
|
||||
the cursor.
|
||||
|
||||
.. code:: python
|
||||
|
||||
with conn.cursor() as curs:
|
||||
curs.execute(SQL)
|
||||
|
||||
# the cursor is now closed
|
||||
|
||||
|
||||
.. attribute:: description
|
||||
|
||||
|
@ -114,7 +124,7 @@ The ``cursor`` class
|
|||
.. attribute:: name
|
||||
|
||||
Read-only attribute containing the name of the cursor if it was
|
||||
creates as named cursor by `connection.cursor()`, or `!None` if
|
||||
created as named cursor by `connection.cursor()`, or `!None` if
|
||||
it is a client side cursor. See :ref:`server-side-cursors`.
|
||||
|
||||
.. extension::
|
||||
|
@ -198,6 +208,14 @@ The ``cursor`` class
|
|||
Parameters are bounded to the query using the same rules described in
|
||||
the `~cursor.execute()` method.
|
||||
|
||||
.. code:: python
|
||||
|
||||
>>> nums = ((1,), (5,), (10,))
|
||||
>>> cur.executemany("INSERT INTO test (num) VALUES (%s)", nums)
|
||||
|
||||
>>> tuples = ((123, "foo"), (42, "bar"), (23, "baz"))
|
||||
>>> cur.executemany("INSERT INTO test (num, data) VALUES (%s, %s)", tuples)
|
||||
|
||||
.. warning::
|
||||
In its current implementation this method is not faster than
|
||||
executing `~cursor.execute()` in a loop. For better performance
|
||||
|
@ -222,6 +240,16 @@ The ``cursor`` class
|
|||
.. versionchanged:: 2.7
|
||||
added support for named arguments.
|
||||
|
||||
.. note::
|
||||
|
||||
`!callproc()` can only be used with PostgreSQL functions__, not
|
||||
with the procedures__ introduced in PostgreSQL 11, which require
|
||||
the :sql:`CALL` statement to run. Please use a normal
|
||||
`execute()` to run them.
|
||||
|
||||
.. __: https://www.postgresql.org/docs/current/sql-createfunction.html
|
||||
.. __: https://www.postgresql.org/docs/current/sql-createprocedure.html
|
||||
|
||||
.. method:: mogrify(operation [, parameters])
|
||||
|
||||
Return a query string after arguments binding. The string returned is
|
||||
|
@ -264,7 +292,7 @@ The ``cursor`` class
|
|||
|
||||
>>> cur.execute("SELECT * FROM test;")
|
||||
>>> for record in cur:
|
||||
... print record
|
||||
... print(record)
|
||||
...
|
||||
(1, 100, "abc'def")
|
||||
(2, None, 'dada')
|
||||
|
@ -488,8 +516,10 @@ The ``cursor`` class
|
|||
|
||||
The time zone factory used to handle data types such as
|
||||
:sql:`TIMESTAMP WITH TIME ZONE`. It should be a `~datetime.tzinfo`
|
||||
object. A few implementations are available in the `psycopg2.tz`
|
||||
module.
|
||||
object. Default is `datetime.timezone`.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
previosly the default factory was `psycopg2.tz.FixedOffsetTimezone`.
|
||||
|
||||
|
||||
.. method:: nextset()
|
||||
|
@ -540,13 +570,6 @@ The ``cursor`` class
|
|||
>>> cur.fetchall()
|
||||
[(6, 42, 'foo'), (7, 74, 'bar')]
|
||||
|
||||
.. note:: the name of the table is not quoted: if the table name
|
||||
contains uppercase letters or special characters it must be quoted
|
||||
with double quotes::
|
||||
|
||||
cur.copy_from(f, '"TABLE"')
|
||||
|
||||
|
||||
.. versionchanged:: 2.0.6
|
||||
added the *columns* parameter.
|
||||
|
||||
|
@ -555,6 +578,11 @@ The ``cursor`` class
|
|||
are encoded in the connection `~connection.encoding` when sent to
|
||||
the backend.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
the table and fields names are now quoted. If you need to specify
|
||||
a schema-qualified table please use `copy_expert()`.
|
||||
|
||||
|
||||
.. method:: copy_to(file, table, sep='\\t', null='\\\\N', columns=None)
|
||||
|
||||
Write the content of the table named *table* *to* the file-like
|
||||
|
@ -576,12 +604,6 @@ The ``cursor`` class
|
|||
2|\N|dada
|
||||
...
|
||||
|
||||
.. note:: the name of the table is not quoted: if the table name
|
||||
contains uppercase letters or special characters it must be quoted
|
||||
with double quotes::
|
||||
|
||||
cur.copy_to(f, '"TABLE"')
|
||||
|
||||
.. versionchanged:: 2.0.6
|
||||
added the *columns* parameter.
|
||||
|
||||
|
@ -590,6 +612,10 @@ The ``cursor`` class
|
|||
are decoded in the connection `~connection.encoding` when read
|
||||
from the backend.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
the table and fields names are now quoted. If you need to specify
|
||||
a schema-qualified table please use `copy_expert()`.
|
||||
|
||||
|
||||
.. method:: copy_expert(sql, file, size=8192)
|
||||
|
||||
|
@ -632,6 +658,24 @@ The ``cursor`` class
|
|||
using Unicode data instead of bytes.
|
||||
|
||||
|
||||
.. rubric:: Interoperation with other C API modules
|
||||
|
||||
.. attribute:: pgresult_ptr
|
||||
|
||||
Return the cursor's internal `!PGresult*` as integer. Useful to pass
|
||||
the libpq raw result structure to C functions, e.g. via `ctypes`::
|
||||
|
||||
>>> import ctypes
|
||||
>>> libpq = ctypes.pydll.LoadLibrary(ctypes.util.find_library('pq'))
|
||||
>>> libpq.PQcmdStatus.argtypes = [ctypes.c_void_p]
|
||||
>>> libpq.PQcmdStatus.restype = ctypes.c_char_p
|
||||
|
||||
>>> curs.execute("select 'x'")
|
||||
>>> libpq.PQcmdStatus(curs.pgresult_ptr)
|
||||
b'SELECT 1'
|
||||
|
||||
.. versionadded:: 2.8
|
||||
|
||||
.. testcode::
|
||||
:hide:
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ An example of the available constants defined in the module:
|
|||
'42P01'
|
||||
|
||||
Constants representing all the error values defined by PostgreSQL versions
|
||||
between 8.1 and 11 are included in the module.
|
||||
between 8.1 and 15 are included in the module.
|
||||
|
||||
|
||||
.. autofunction:: lookup(code)
|
||||
|
|
|
@ -10,11 +10,21 @@
|
|||
|
||||
.. versionadded:: 2.8
|
||||
|
||||
.. versionchanged:: 2.8.4 added errors introduced in PostgreSQL 12
|
||||
|
||||
.. versionchanged:: 2.8.6 added errors introduced in PostgreSQL 13
|
||||
|
||||
.. versionchanged:: 2.9.2 added errors introduced in PostgreSQL 14
|
||||
|
||||
.. versionchanged:: 2.9.4 added errors introduced in PostgreSQL 15
|
||||
|
||||
.. versionchanged:: 2.9.10 added errors introduced in PostgreSQL 17
|
||||
|
||||
This module exposes the classes psycopg raises upon receiving an error from
|
||||
the database with a :sql:`SQLSTATE` value attached (available in the
|
||||
`~psycopg2.Error.pgcode` attribute). The content of the module is generated
|
||||
from the PostgreSQL source code and includes classes for every error defined
|
||||
by PostgreSQL in versions between 9.1 and 11.
|
||||
by PostgreSQL in versions between 9.1 and 15.
|
||||
|
||||
Every class in the module is named after what referred as "condition name" `in
|
||||
the documentation`__, converted to CamelCase: e.g. the error 22012,
|
||||
|
@ -54,10 +64,6 @@ idiomatic error handler:
|
|||
except psycopg2.errors.LockNotAvailable:
|
||||
locked = True
|
||||
|
||||
For completeness, the module also exposes all the :ref:`DB-API-defined
|
||||
exceptions <dbapi-exceptions>` and :ref:`a few psycopg-specific ones
|
||||
<extension-exceptions>` exposed by the `!extensions` module. One stop shop
|
||||
for all your mistakes...
|
||||
|
||||
.. autofunction:: lookup
|
||||
|
||||
|
@ -67,3 +73,17 @@ for all your mistakes...
|
|||
cur.execute("LOCK TABLE mytable IN ACCESS EXCLUSIVE MODE NOWAIT")
|
||||
except psycopg2.errors.lookup("55P03"):
|
||||
locked = True
|
||||
|
||||
|
||||
SQLSTATE exception classes
|
||||
--------------------------
|
||||
|
||||
The following table contains the list of all the SQLSTATE classes exposed by
|
||||
the module.
|
||||
|
||||
Note that, for completeness, the module also exposes all the
|
||||
:ref:`DB-API-defined exceptions <dbapi-exceptions>` and :ref:`a few
|
||||
psycopg-specific ones <extension-exceptions>` exposed by the `!extensions`
|
||||
module, which are not listed here.
|
||||
|
||||
.. include:: sqlstate_errors.rst
|
||||
|
|
|
@ -101,7 +101,7 @@ introspection etc.
|
|||
|
||||
Set the lobject current position.
|
||||
|
||||
.. versionchanged:: 2.6.0
|
||||
.. versionchanged:: 2.6
|
||||
added support for *offset* > 2GB.
|
||||
|
||||
|
||||
|
@ -109,9 +109,9 @@ introspection etc.
|
|||
|
||||
Return the lobject current position.
|
||||
|
||||
.. versionadded:: 2.2.0
|
||||
.. versionadded:: 2.2
|
||||
|
||||
.. versionchanged:: 2.6.0
|
||||
.. versionchanged:: 2.6
|
||||
added support for return value > 2GB.
|
||||
|
||||
|
||||
|
@ -127,9 +127,9 @@ introspection etc.
|
|||
.. |lo_truncate| replace:: `!lo_truncate()`
|
||||
.. _lo_truncate: https://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-TRUNCATE
|
||||
|
||||
.. versionadded:: 2.2.0
|
||||
.. versionadded:: 2.2
|
||||
|
||||
.. versionchanged:: 2.6.0
|
||||
.. versionchanged:: 2.6
|
||||
added support for *len* > 2GB.
|
||||
|
||||
.. warning::
|
||||
|
@ -165,6 +165,15 @@ introspection etc.
|
|||
.. autoattribute:: host
|
||||
.. autoattribute:: port
|
||||
.. autoattribute:: options
|
||||
.. autoattribute:: dsn_parameters
|
||||
|
||||
Example::
|
||||
|
||||
>>> conn.info.dsn_parameters
|
||||
{'dbname': 'test', 'user': 'postgres', 'port': '5432', 'sslmode': 'prefer'}
|
||||
|
||||
Requires libpq >= 9.3.
|
||||
|
||||
.. autoattribute:: status
|
||||
.. autoattribute:: transaction_status
|
||||
.. automethod:: parameter_status(name)
|
||||
|
@ -404,9 +413,9 @@ deal with Python objects adaptation:
|
|||
|
||||
.. method:: getquoted()
|
||||
|
||||
Return the string enclosed in single quotes. Any single quote
|
||||
appearing in the the string is escaped by doubling it according to SQL
|
||||
string constants syntax. Backslashes are escaped too.
|
||||
Return the string enclosed in single quotes. Any single quote appearing
|
||||
in the string is escaped by doubling it according to SQL string
|
||||
constants syntax. Backslashes are escaped too.
|
||||
|
||||
>>> QuotedString(r"O'Reilly").getquoted()
|
||||
"'O''Reilly'"
|
||||
|
@ -444,13 +453,6 @@ deal with Python objects adaptation:
|
|||
|
||||
Specialized adapters for Python datetime objects.
|
||||
|
||||
.. class:: DateFromMx
|
||||
TimeFromMx
|
||||
TimestampFromMx
|
||||
IntervalFromMx
|
||||
|
||||
Specialized adapters for `mx.DateTime`_ objects.
|
||||
|
||||
.. data:: adapters
|
||||
|
||||
Dictionary of the currently registered object adapters. Use
|
||||
|
@ -592,7 +594,7 @@ Coroutines support functions
|
|||
These functions are used to set and retrieve the callback function for
|
||||
:ref:`cooperation with coroutine libraries <green-support>`.
|
||||
|
||||
.. versionadded:: 2.2.0
|
||||
.. versionadded:: 2.2
|
||||
|
||||
.. autofunction:: set_wait_callback(f)
|
||||
|
||||
|
@ -749,8 +751,8 @@ methods. The level can be set to one of the following constants:
|
|||
|
||||
.. data:: ISOLATION_LEVEL_READ_COMMITTED
|
||||
|
||||
This is usually the the default PostgreSQL value, but a different default
|
||||
may be set in the database configuration.
|
||||
This is usually the default PostgreSQL value, but a different default may
|
||||
be set in the database configuration.
|
||||
|
||||
A new transaction is started at the first `~cursor.execute()` command on a
|
||||
cursor and at each new `!execute()` after a `~connection.commit()` or a
|
||||
|
@ -900,7 +902,7 @@ internal usage and Python code should not rely on them.
|
|||
Poll constants
|
||||
--------------
|
||||
|
||||
.. versionadded:: 2.2.0
|
||||
.. versionadded:: 2.2
|
||||
|
||||
These values can be returned by `connection.poll()` during asynchronous
|
||||
connection and communication. They match the values in the libpq enum
|
||||
|
@ -995,21 +997,7 @@ from the database. See :ref:`unicode-handling` for details.
|
|||
Typecasters to convert time-related data types to Python `!datetime`
|
||||
objects.
|
||||
|
||||
.. data:: MXDATE
|
||||
MXDATETIME
|
||||
MXDATETIMETZ
|
||||
MXINTERVAL
|
||||
MXTIME
|
||||
MXDATEARRAY
|
||||
MXDATETIMEARRAY
|
||||
MXDATETIMETZARRAY
|
||||
MXINTERVALARRAY
|
||||
MXTIMEARRAY
|
||||
|
||||
Typecasters to convert time-related data types to `mx.DateTime`_ objects.
|
||||
Only available if Psycopg was compiled with `!mx` support.
|
||||
|
||||
.. versionchanged:: 2.2.0
|
||||
.. versionchanged:: 2.2
|
||||
previously the `DECIMAL` typecaster and the specific time-related
|
||||
typecasters (`!PY*` and `!MX*`) were not exposed by the `extensions`
|
||||
module. In older versions they can be imported from the implementation
|
||||
|
|
|
@ -41,8 +41,8 @@ If you want to use a `!connection` subclass you can pass it as the
|
|||
Dictionary-like cursor
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The dict cursors allow to access to the retrieved records using an interface
|
||||
similar to the Python dictionaries instead of the tuples.
|
||||
The dict cursors allow to access to the attributes of retrieved records
|
||||
using an interface similar to the Python dictionaries instead of the tuples.
|
||||
|
||||
>>> dict_cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
||||
>>> dict_cur.execute("INSERT INTO test (num, data) VALUES(%s, %s)",
|
||||
|
@ -136,8 +136,8 @@ Logging cursor
|
|||
|
||||
.. _replication-objects:
|
||||
|
||||
Replication connection and cursor classes
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Replication support objects
|
||||
---------------------------
|
||||
|
||||
See :ref:`replication-support` for an introduction to the topic.
|
||||
|
||||
|
@ -270,7 +270,7 @@ The individual messages in the replication stream are represented by
|
|||
Replication slots are a feature of PostgreSQL server starting with
|
||||
version 9.4.
|
||||
|
||||
.. method:: start_replication(slot_name=None, slot_type=None, start_lsn=0, timeline=0, options=None, decode=False)
|
||||
.. method:: start_replication(slot_name=None, slot_type=None, start_lsn=0, timeline=0, options=None, decode=False, status_interval=10)
|
||||
|
||||
Start replication on the connection.
|
||||
|
||||
|
@ -288,6 +288,7 @@ The individual messages in the replication stream are represented by
|
|||
slot (not allowed with physical replication)
|
||||
:param decode: a flag indicating that unicode conversion should be
|
||||
performed on messages received from the server
|
||||
:param status_interval: time between feedback packets sent to the server
|
||||
|
||||
If a *slot_name* is specified, the slot must exist on the server and
|
||||
its type must match the replication type used.
|
||||
|
@ -328,6 +329,14 @@ The individual messages in the replication stream are represented by
|
|||
*This parameter should not be set with physical replication or with
|
||||
logical replication plugins that produce binary output.*
|
||||
|
||||
Replication stream should periodically send feedback to the database
|
||||
to prevent disconnect via timeout. Feedback is automatically sent when
|
||||
`read_message()` is called or during run of the `consume_stream()`.
|
||||
To specify the feedback interval use *status_interval* parameter.
|
||||
The value of this parameter must be set to at least 1 second, but
|
||||
it can have a fractional part.
|
||||
|
||||
|
||||
This function constructs a |START_REPLICATION|_ command and calls
|
||||
`start_replication_expert()` internally.
|
||||
|
||||
|
@ -336,10 +345,13 @@ The individual messages in the replication stream are represented by
|
|||
`read_message()` in case of :ref:`asynchronous connection
|
||||
<async-support>`.
|
||||
|
||||
.. versionchanged:: 2.8.3
|
||||
added the *status_interval* parameter.
|
||||
|
||||
.. |START_REPLICATION| replace:: :sql:`START_REPLICATION`
|
||||
.. _START_REPLICATION: https://www.postgresql.org/docs/current/static/protocol-replication.html
|
||||
|
||||
.. method:: start_replication_expert(command, decode=False)
|
||||
.. method:: start_replication_expert(command, decode=False, status_interval=10)
|
||||
|
||||
Start replication on the connection using provided
|
||||
|START_REPLICATION|_ command.
|
||||
|
@ -348,9 +360,13 @@ The individual messages in the replication stream are represented by
|
|||
`~psycopg2.sql.Composable` instance for dynamic generation.
|
||||
:param decode: a flag indicating that unicode conversion should be
|
||||
performed on messages received from the server.
|
||||
:param status_interval: time between feedback packets sent to the server
|
||||
|
||||
.. versionchanged:: 2.8.3
|
||||
added the *status_interval* parameter.
|
||||
|
||||
|
||||
.. method:: consume_stream(consume, keepalive_interval=10)
|
||||
.. method:: consume_stream(consume, keepalive_interval=None)
|
||||
|
||||
:param consume: a callable object with signature :samp:`consume({msg})`
|
||||
:param keepalive_interval: interval (in seconds) to send keepalive
|
||||
|
@ -373,14 +389,15 @@ The individual messages in the replication stream are represented by
|
|||
`ReplicationMessage` class. See `read_message()` for details about
|
||||
message decoding.
|
||||
|
||||
This method also sends keepalive messages to the server in case there
|
||||
were no new data from the server for the duration of
|
||||
*keepalive_interval* (in seconds). The value of this parameter must
|
||||
This method also sends feedback messages to the server every
|
||||
*keepalive_interval* (in seconds). The value of this parameter must
|
||||
be set to at least 1 second, but it can have a fractional part.
|
||||
If the *keepalive_interval* is not specified, the value of
|
||||
*status_interval* specified in the `start_replication()` or
|
||||
`start_replication_expert()` will be used.
|
||||
|
||||
After processing certain amount of messages the client should send a
|
||||
confirmation message to the server. This should be done by calling
|
||||
`send_feedback()` method on the corresponding replication cursor. A
|
||||
The client must confirm every processed message by calling
|
||||
`send_feedback()` method on the corresponding replication cursor. A
|
||||
reference to the cursor is provided in the `ReplicationMessage` as an
|
||||
attribute.
|
||||
|
||||
|
@ -393,9 +410,7 @@ The individual messages in the replication stream are represented by
|
|||
|
||||
def __call__(self, msg):
|
||||
self.process_message(msg.payload)
|
||||
|
||||
if self.should_send_feedback(msg):
|
||||
msg.cursor.send_feedback(flush_lsn=msg.data_start)
|
||||
msg.cursor.send_feedback(flush_lsn=msg.data_start)
|
||||
|
||||
consumer = LogicalStreamConsumer()
|
||||
cur.consume_stream(consumer)
|
||||
|
@ -408,12 +423,10 @@ The individual messages in the replication stream are represented by
|
|||
retains all the WAL segments that might be needed to stream the
|
||||
changes via all of the currently open replication slots.
|
||||
|
||||
On the other hand, it is not recommended to send confirmation
|
||||
after *every* processed message, since that will put an
|
||||
unnecessary load on network and the server. A possible strategy
|
||||
is to confirm after every COMMIT message.
|
||||
.. versionchanged:: 2.8.3
|
||||
changed the default value of the *keepalive_interval* parameter to `!None`.
|
||||
|
||||
.. method:: send_feedback(write_lsn=0, flush_lsn=0, apply_lsn=0, reply=False)
|
||||
.. method:: send_feedback(write_lsn=0, flush_lsn=0, apply_lsn=0, reply=False, force=False)
|
||||
|
||||
:param write_lsn: a LSN position up to which the client has written the data locally
|
||||
:param flush_lsn: a LSN position up to which the client has processed the
|
||||
|
@ -423,13 +436,21 @@ The individual messages in the replication stream are represented by
|
|||
has applied the changes (physical replication
|
||||
master-slave protocol only)
|
||||
:param reply: request the server to send back a keepalive message immediately
|
||||
:param force: force sending a feedback message regardless of status_interval timeout
|
||||
|
||||
Use this method to report to the server that all messages up to a
|
||||
certain LSN position have been processed on the client and may be
|
||||
discarded on the server.
|
||||
|
||||
This method can also be called with all default parameters' values to
|
||||
just send a keepalive message to the server.
|
||||
If the *reply* or *force* parameters are not set, this method will
|
||||
just update internal structures without sending the feedback message
|
||||
to the server. The library sends feedback message automatically
|
||||
when *status_interval* timeout is reached. For this to work, you must
|
||||
call `send_feedback()` on the same Cursor that you called `start_replication()`
|
||||
on (the one in `message.cursor`) or your feedback will be lost.
|
||||
|
||||
.. versionchanged:: 2.8.3
|
||||
added the *force* parameter.
|
||||
|
||||
Low-level replication cursor methods for :ref:`asynchronous connection
|
||||
<async-support>` operation.
|
||||
|
@ -463,9 +484,9 @@ The individual messages in the replication stream are represented by
|
|||
corresponding connection to block the process until there is more data
|
||||
from the server.
|
||||
|
||||
The server can send keepalive messages to the client periodically.
|
||||
Such messages are silently consumed by this method and are never
|
||||
reported to the caller.
|
||||
Last, but not least, this method sends feedback messages when
|
||||
*status_interval* timeout is reached or when keepalive message with
|
||||
reply request arrived from the server.
|
||||
|
||||
.. method:: fileno()
|
||||
|
||||
|
@ -481,6 +502,21 @@ The individual messages in the replication stream are represented by
|
|||
communication with the server (a data or keepalive message in either
|
||||
direction).
|
||||
|
||||
.. attribute:: feedback_timestamp
|
||||
|
||||
A `~datetime` object representing the timestamp at the moment when
|
||||
the last feedback message sent to the server.
|
||||
|
||||
.. versionadded:: 2.8.3
|
||||
|
||||
.. attribute:: wal_end
|
||||
|
||||
LSN position of the current end of WAL on the server at the
|
||||
moment of last data or keepalive message received from the
|
||||
server.
|
||||
|
||||
.. versionadded:: 2.8
|
||||
|
||||
An actual example of asynchronous operation might look like this::
|
||||
|
||||
from select import select
|
||||
|
@ -488,19 +524,18 @@ The individual messages in the replication stream are represented by
|
|||
|
||||
def consume(msg):
|
||||
# ...
|
||||
msg.cursor.send_feedback(flush_lsn=msg.data_start)
|
||||
|
||||
keepalive_interval = 10.0
|
||||
status_interval = 10.0
|
||||
while True:
|
||||
msg = cur.read_message()
|
||||
if msg:
|
||||
consume(msg)
|
||||
else:
|
||||
now = datetime.now()
|
||||
timeout = keepalive_interval - (now - cur.io_timestamp).total_seconds()
|
||||
timeout = status_interval - (now - cur.feedback_timestamp).total_seconds()
|
||||
try:
|
||||
sel = select([cur], [], [], max(0, timeout))
|
||||
if not any(sel):
|
||||
cur.send_feedback() # timed out, send keepalive message
|
||||
except InterruptedError:
|
||||
pass # recalculate timeout and continue
|
||||
|
||||
|
@ -531,8 +566,8 @@ JSON_ adaptation
|
|||
added |jsonb| support. In previous versions |jsonb| values are returned
|
||||
as strings. See :ref:`the FAQ <faq-jsonb-adapt>` for a workaround.
|
||||
|
||||
Psycopg can adapt Python objects to and from the PostgreSQL |pgjson|_ and
|
||||
|jsonb| types. With PostgreSQL 9.2 and following versions adaptation is
|
||||
Psycopg can adapt Python objects to and from the PostgreSQL |jsons|_
|
||||
types. With PostgreSQL 9.2 and following versions adaptation is
|
||||
available out-of-the-box. To use JSON data with previous database versions
|
||||
(either with the `9.1 json extension`__, but even if you want to convert text
|
||||
fields to JSON) you can use the `register_json()` function.
|
||||
|
@ -543,9 +578,10 @@ The Python :py:mod:`json` module is used by default to convert Python objects
|
|||
to JSON and to parse data from the database.
|
||||
|
||||
.. _JSON: https://www.json.org/
|
||||
.. |pgjson| replace:: :sql:`json`
|
||||
.. |json| replace:: :sql:`json`
|
||||
.. |jsonb| replace:: :sql:`jsonb`
|
||||
.. _pgjson: https://www.postgresql.org/docs/current/static/datatype-json.html
|
||||
.. |jsons| replace:: |json| and |jsonb|
|
||||
.. _jsons: https://www.postgresql.org/docs/current/static/datatype-json.html
|
||||
|
||||
In order to pass a Python object to the database as query argument you can use
|
||||
the `Json` adapter::
|
||||
|
@ -553,7 +589,7 @@ the `Json` adapter::
|
|||
curs.execute("insert into mytable (jsondata) values (%s)",
|
||||
[Json({'a': 100})])
|
||||
|
||||
Reading from the database, |pgjson| and |jsonb| values will be automatically
|
||||
Reading from the database, |json| and |jsonb| values will be automatically
|
||||
converted to Python objects.
|
||||
|
||||
.. note::
|
||||
|
@ -601,7 +637,7 @@ or you can subclass it overriding the `~Json.dumps()` method::
|
|||
|
||||
Customizing the conversion from PostgreSQL to Python can be done passing a
|
||||
custom `!loads()` function to `register_json()`. For the builtin data types
|
||||
(|pgjson| from PostgreSQL 9.2, |jsonb| from PostgreSQL 9.4) use
|
||||
(|json| from PostgreSQL 9.2, |jsonb| from PostgreSQL 9.4) use
|
||||
`register_default_json()` and `register_default_jsonb()`. For example, if you
|
||||
want to convert the float values from :sql:`json` into
|
||||
:py:class:`~decimal.Decimal` you can use::
|
||||
|
@ -609,6 +645,13 @@ want to convert the float values from :sql:`json` into
|
|||
loads = lambda x: json.loads(x, parse_float=Decimal)
|
||||
psycopg2.extras.register_json(conn, loads=loads)
|
||||
|
||||
Or, if you want to use an alternative JSON module implementation, such as the
|
||||
faster UltraJSON_, you can use::
|
||||
|
||||
psycopg2.extras.register_default_json(loads=ujson.loads, globally=True)
|
||||
psycopg2.extras.register_default_jsonb(loads=ujson.loads, globally=True)
|
||||
|
||||
.. _UltraJSON: https://pypi.org/project/ujson/
|
||||
|
||||
|
||||
.. autoclass:: Json
|
||||
|
@ -977,7 +1020,7 @@ Fast execution helpers
|
|||
|
||||
The current implementation of `~cursor.executemany()` is (using an extremely
|
||||
charitable understatement) not particularly performing. These functions can
|
||||
be used to speed up the repeated execution of a statement againts a set of
|
||||
be used to speed up the repeated execution of a statement against a set of
|
||||
parameters. By reducing the number of server roundtrips the performance can be
|
||||
`orders of magnitude better`__ than using `!executemany()`.
|
||||
|
||||
|
@ -986,6 +1029,14 @@ parameters. By reducing the number of server roundtrips the performance can be
|
|||
|
||||
.. autofunction:: execute_batch
|
||||
|
||||
.. code:: python
|
||||
|
||||
>>> nums = ((1,), (5,), (10,))
|
||||
>>> execute_batch(cur, "INSERT INTO test (num) VALUES (%s)", nums)
|
||||
|
||||
>>> tuples = ((123, "foo"), (42, "bar"), (23, "baz"))
|
||||
>>> execute_batch(cur, "INSERT INTO test (num, data) VALUES (%s, %s)", tuples)
|
||||
|
||||
.. versionadded:: 2.7
|
||||
|
||||
.. note::
|
||||
|
|
|
@ -7,6 +7,30 @@ Here are a few gotchas you may encounter using `psycopg2`. Feel free to
|
|||
suggest new entries!
|
||||
|
||||
|
||||
Meta
|
||||
----
|
||||
|
||||
.. _faq-question:
|
||||
.. cssclass:: faq
|
||||
|
||||
How do I ask a question?
|
||||
- Have you first checked if your question is answered already in the
|
||||
documentation?
|
||||
|
||||
- If your question is about installing psycopg, have you checked the
|
||||
:ref:`install FAQ <faq-compile>` and the :ref:`install docs
|
||||
<installation>`?
|
||||
|
||||
- Have you googled for your error message?
|
||||
|
||||
- If you haven't found an answer yet, please write to the `Mailing List`_.
|
||||
|
||||
- If you haven't found a bug, DO NOT write to the bug tracker to ask
|
||||
questions. You will only get piro grumpy.
|
||||
|
||||
.. _mailing list: https://www.postgresql.org/list/psycopg/
|
||||
|
||||
|
||||
.. _faq-transactions:
|
||||
|
||||
Problems with transactions handling
|
||||
|
@ -156,7 +180,7 @@ Psycopg automatically converts PostgreSQL :sql:`json` data into Python objects.
|
|||
Psycopg converts :sql:`json` values into Python objects but :sql:`jsonb` values are returned as strings. Can :sql:`jsonb` be converted automatically?
|
||||
Automatic conversion of :sql:`jsonb` values is supported from Psycopg
|
||||
release 2.5.4. For previous versions you can register the :sql:`json`
|
||||
typecaster on the :sql:`jsonb` oids (which are known and not suppsed to
|
||||
typecaster on the :sql:`jsonb` oids (which are known and not supposed to
|
||||
change in future PostgreSQL versions)::
|
||||
|
||||
psycopg2.extras.register_json(oid=3802, array_oid=3807, globally=True)
|
||||
|
@ -247,7 +271,7 @@ When should I save and re-use a connection as opposed to creating a new one as n
|
|||
|
||||
What are the advantages or disadvantages of using named cursors?
|
||||
The only disadvantages is that they use up resources on the server and
|
||||
that there is a little overhead because a at least two queries (one to
|
||||
that there is a little overhead because at least two queries (one to
|
||||
create the cursor and one to fetch the initial result set) are issued to
|
||||
the backend. The advantage is that data is fetched one chunk at a time:
|
||||
using small `~cursor.fetchmany()` values it is possible to use very
|
||||
|
@ -268,7 +292,7 @@ How do I interrupt a long-running query in an interactive shell?
|
|||
can handle a :kbd:`Ctrl-C` correctly. For previous versions, you can use
|
||||
`this implementation`__.
|
||||
|
||||
.. __: http://initd.org/psycopg/articles/2014/07/20/cancelling-postgresql-statements-python/
|
||||
.. __: https://www.psycopg.org/articles/2014/07/20/cancelling-postgresql-statements-python/
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
|
@ -287,15 +311,24 @@ How do I interrupt a long-running query in an interactive shell?
|
|||
|
||||
.. _faq-compile:
|
||||
|
||||
Problems compiling and deploying psycopg2
|
||||
-----------------------------------------
|
||||
Problems compiling and installing psycopg2
|
||||
------------------------------------------
|
||||
|
||||
.. _faq-wheels:
|
||||
.. cssclass:: faq
|
||||
|
||||
Psycopg 2.8 fails to install, Psycopg 2.7 was working fine.
|
||||
With Psycopg 2.7 you were installing binary packages, but they have proven
|
||||
unreliable so now you have to install them explicitly using the
|
||||
``psycopg2-binary`` package. See :ref:`binary-packages` for all the
|
||||
details.
|
||||
|
||||
.. _faq-python-h:
|
||||
.. cssclass:: faq
|
||||
|
||||
I can't compile `!psycopg2`: the compiler says *error: Python.h: No such file or directory*. What am I missing?
|
||||
You need to install a Python development package: it is usually called
|
||||
``python-dev``.
|
||||
``python-dev`` or ``python3-dev`` according to your Python version.
|
||||
|
||||
|
||||
.. _faq-libpq-fe-h:
|
||||
|
|
|
@ -23,7 +23,7 @@ extended and customized thanks to a flexible :ref:`objects adaptation system
|
|||
Psycopg 2 is both Unicode and Python 3 friendly.
|
||||
|
||||
|
||||
.. _Psycopg: http://initd.org/psycopg/
|
||||
.. _Psycopg: https://psycopg.org/
|
||||
.. _PostgreSQL: https://www.postgresql.org/
|
||||
.. _Python: https://www.python.org/
|
||||
.. _libpq: https://www.postgresql.org/docs/current/static/libpq.html
|
||||
|
@ -57,6 +57,7 @@ Psycopg 2 is both Unicode and Python 3 friendly.
|
|||
.. rubric:: Indices and tables
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
Introduction
|
||||
.. _installation:
|
||||
|
||||
Installation
|
||||
============
|
||||
|
||||
.. sectionauthor:: Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
|
@ -6,20 +8,117 @@ Introduction
|
|||
Psycopg is a PostgreSQL_ adapter for the Python_ programming language. It is a
|
||||
wrapper for the libpq_, the official PostgreSQL client library.
|
||||
|
||||
The `psycopg2` package is the current mature implementation of the adapter: it
|
||||
is a C extension and as such it is only compatible with CPython_. If you want
|
||||
to use Psycopg on a different Python implementation (PyPy, Jython, IronPython)
|
||||
there is an experimental `porting of Psycopg for Ctypes`__, but it is not as
|
||||
mature as the C implementation yet.
|
||||
|
||||
.. _PostgreSQL: https://www.postgresql.org/
|
||||
.. _Python: https://www.python.org/
|
||||
.. _libpq: https://www.postgresql.org/docs/current/static/libpq.html
|
||||
.. _CPython: https://en.wikipedia.org/wiki/CPython
|
||||
.. _Ctypes: https://docs.python.org/library/ctypes.html
|
||||
.. __: https://github.com/mvantellingen/psycopg2-ctypes
|
||||
|
||||
|
||||
.. index::
|
||||
single: Install; from PyPI
|
||||
single: Install; wheel
|
||||
single: Wheel
|
||||
|
||||
.. _binary-packages:
|
||||
|
||||
Quick Install
|
||||
-------------
|
||||
|
||||
For most operating systems, the quickest way to install Psycopg is using the
|
||||
wheel_ package available on PyPI_:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install psycopg2-binary
|
||||
|
||||
This will install a pre-compiled binary version of the module which does not
|
||||
require the build or runtime prerequisites described below. Make sure to use
|
||||
an up-to-date version of :program:`pip` (you can upgrade it using something
|
||||
like ``pip install -U pip``).
|
||||
|
||||
You may then import the ``psycopg2`` package, as usual:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import psycopg2
|
||||
|
||||
# Connect to your postgres DB
|
||||
conn = psycopg2.connect("dbname=test user=postgres")
|
||||
|
||||
# Open a cursor to perform database operations
|
||||
cur = conn.cursor()
|
||||
|
||||
# Execute a query
|
||||
cur.execute("SELECT * FROM my_data")
|
||||
|
||||
# Retrieve query results
|
||||
records = cur.fetchall()
|
||||
|
||||
.. _PyPI: https://pypi.org/project/psycopg2-binary/
|
||||
.. _wheel: https://pythonwheels.com/
|
||||
|
||||
|
||||
psycopg vs psycopg-binary
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``psycopg2-binary`` package is meant for beginners to start playing
|
||||
with Python and PostgreSQL without the need to meet the build
|
||||
requirements.
|
||||
|
||||
If you are the maintainer of a published package depending on `!psycopg2`
|
||||
you shouldn't use ``psycopg2-binary`` as a module dependency. **For
|
||||
production use you are advised to use the source distribution.**
|
||||
|
||||
The binary packages come with their own versions of a few C libraries,
|
||||
among which ``libpq`` and ``libssl``, which will be used regardless of other
|
||||
libraries available on the client: upgrading the system libraries will not
|
||||
upgrade the libraries used by `!psycopg2`. Please build `!psycopg2` from
|
||||
source if you want to maintain binary upgradeability.
|
||||
|
||||
.. warning::
|
||||
|
||||
The `!psycopg2` wheel package comes packaged, among the others, with its
|
||||
own ``libssl`` binary. This may create conflicts with other extension
|
||||
modules binding with ``libssl`` as well, for instance with the Python
|
||||
`ssl` module: in some cases, under concurrency, the interaction between
|
||||
the two libraries may result in a segfault. In case of doubts you are
|
||||
advised to use a package built from source.
|
||||
|
||||
|
||||
.. index::
|
||||
single: Install; disable wheel
|
||||
single: Wheel; disable
|
||||
|
||||
.. _disable-wheel:
|
||||
|
||||
Change in binary packages between Psycopg 2.7 and 2.8
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In version 2.7.x, :command:`pip install psycopg2` would have tried to install
|
||||
automatically the binary package of Psycopg. Because of concurrency problems
|
||||
binary packages have displayed, ``psycopg2-binary`` has become a separate
|
||||
package, and from 2.8 it has become the only way to install the binary
|
||||
package.
|
||||
|
||||
If you are using Psycopg 2.7 and you want to disable the use of wheel binary
|
||||
packages, relying on the system libraries available on your client, you
|
||||
can use the :command:`pip` |--no-binary option|__, e.g.:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install --no-binary :all: psycopg2
|
||||
|
||||
.. |--no-binary option| replace:: ``--no-binary`` option
|
||||
.. __: https://pip.pypa.io/en/stable/reference/pip_install/#install-no-binary
|
||||
|
||||
which can be specified in your :file:`requirements.txt` files too, e.g. use:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
psycopg2>=2.7,<2.8 --no-binary psycopg2
|
||||
|
||||
to use the last bugfix release of the `!psycopg2` 2.7 package, specifying to
|
||||
always compile it from source. Of course in this case you will have to meet
|
||||
the :ref:`build prerequisites <build-prerequisites>`.
|
||||
|
||||
|
||||
.. index::
|
||||
single: Prerequisites
|
||||
|
@ -32,11 +131,17 @@ The current `!psycopg2` implementation supports:
|
|||
..
|
||||
NOTE: keep consistent with setup.py and the /features/ page.
|
||||
|
||||
- Python version 2.7
|
||||
- Python 3 versions from 3.4 to 3.7
|
||||
- PostgreSQL server versions from 7.4 to 11
|
||||
- Python versions from 3.8 to 3.13
|
||||
- PostgreSQL server versions from 7.4 to 17
|
||||
- PostgreSQL client library version from 9.1
|
||||
|
||||
.. note::
|
||||
|
||||
Not all the psycopg2 versions support all the supported Python versions.
|
||||
|
||||
Please see the :ref:`release notes <news>` to verify when the support for
|
||||
a new Python version was added and when the support for an old Python
|
||||
version was removed.
|
||||
|
||||
|
||||
.. _build-prerequisites:
|
||||
|
@ -55,8 +160,9 @@ it from sources you will need:
|
|||
- A C compiler.
|
||||
|
||||
- The Python header files. They are usually installed in a package such as
|
||||
**python-dev**. A message such as *error: Python.h: No such file or
|
||||
directory* is an indication that the Python headers are missing.
|
||||
**python-dev** or **python3-dev**. A message such as *error: Python.h: No
|
||||
such file or directory* is an indication that the Python headers are
|
||||
missing.
|
||||
|
||||
- The libpq header files. They are usually installed in a package such as
|
||||
**libpq-dev**. If you get an *error: libpq-fe.h: No such file or directory*
|
||||
|
@ -99,7 +205,7 @@ self-contained wheel package, it will need the libpq_ library at runtime
|
|||
(usually distributed in a ``libpq.so`` or ``libpq.dll`` file). `!psycopg2`
|
||||
relies on the host OS to find the library if the library is installed in a
|
||||
standard location there is usually no problem; if the library is in a
|
||||
non-standard location you will have to tell somehow Psycopg how to find it,
|
||||
non-standard location you will have to tell Psycopg how to find it,
|
||||
which is OS-dependent (for instance setting a suitable
|
||||
:envvar:`LD_LIBRARY_PATH` on Linux).
|
||||
|
||||
|
@ -120,96 +226,6 @@ which is OS-dependent (for instance setting a suitable
|
|||
to connect to.
|
||||
|
||||
|
||||
|
||||
.. index::
|
||||
single: Install; from PyPI
|
||||
single: Install; wheel
|
||||
single: Wheel
|
||||
|
||||
Binary install from PyPI
|
||||
------------------------
|
||||
|
||||
`!psycopg2` is also `available on PyPI`__ in the form of wheel_ packages for
|
||||
the most common platform (Linux, OSX, Windows): this should make you able to
|
||||
install a binary version of the module, not requiring the above build or
|
||||
runtime prerequisites.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``-binary`` package is meant for beginners to start playing with
|
||||
Python and PostgreSQL without the need to meet the build requirements.
|
||||
If you are the maintainer of a publish package depending on `!psycopg2`
|
||||
you shouldn't use ``psycopg2-binary`` as a module dependency. For
|
||||
production use you are advised to use the source distribution.
|
||||
|
||||
|
||||
Make sure to use an up-to-date version of :program:`pip` (you can upgrade it
|
||||
using something like ``pip install -U pip``), then you can run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install psycopg2-binary
|
||||
|
||||
.. __: PyPI-binary_
|
||||
.. _PyPI-binary: https://pypi.org/project/psycopg2-binary/
|
||||
.. _wheel: https://pythonwheels.com/
|
||||
|
||||
.. note::
|
||||
|
||||
The binary packages come with their own versions of a few C libraries,
|
||||
among which ``libpq`` and ``libssl``, which will be used regardless of other
|
||||
libraries available on the client: upgrading the system libraries will not
|
||||
upgrade the libraries used by `!psycopg2`. Please build `!psycopg2` from
|
||||
source if you want to maintain binary upgradeability.
|
||||
|
||||
.. warning::
|
||||
|
||||
The `!psycopg2` wheel package comes packaged, among the others, with its
|
||||
own ``libssl`` binary. This may create conflicts with other extension
|
||||
modules binding with ``libssl`` as well, for instance with the Python
|
||||
`ssl` module: in some cases, under concurrency, the interaction between
|
||||
the two libraries may result in a segfault. In case of doubts you are
|
||||
advised to use a package built from source.
|
||||
|
||||
|
||||
|
||||
.. index::
|
||||
single: Install; disable wheel
|
||||
single: Wheel; disable
|
||||
|
||||
.. _disable-wheel:
|
||||
|
||||
Disabling wheel packages for Psycopg 2.7
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In version 2.7.x, `pip install psycopg2` would have tried to install the wheel
|
||||
binary package of Psycopg. Because of the problems the wheel package have
|
||||
displayed, `psycopg2-binary` has become a separate package, and from 2.8 it
|
||||
has become the only way to install the binary package.
|
||||
|
||||
If you are using psycopg 2.7 and you want to disable the use of wheel binary
|
||||
packages, relying on the system libraries available on your client, you
|
||||
can use the :command:`pip` |--no-binary option|__, e.g.:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install --no-binary :all: psycopg2
|
||||
|
||||
.. |--no-binary option| replace:: ``--no-binary`` option
|
||||
.. __: https://pip.pypa.io/en/stable/reference/pip_install/#install-no-binary
|
||||
|
||||
which can be specified in your :file:`requirements.txt` files too, e.g. use:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
psycopg2>=2.7,<2.8 --no-binary psycopg2
|
||||
|
||||
to use the last bugfix release of the `!psycopg2` 2.7 package, specifying to
|
||||
always compile it from source. Of course in this case you will have to meet
|
||||
the :ref:`build prerequisites <build-prerequisites>`.
|
||||
|
||||
|
||||
|
||||
.. index::
|
||||
single: setup.py
|
||||
single: setup.cfg
|
||||
|
@ -221,7 +237,6 @@ If you have less standard requirements such as:
|
|||
|
||||
- creating a :ref:`debug build <debug-build>`,
|
||||
- using :program:`pg_config` not in the :envvar:`PATH`,
|
||||
- supporting ``mx.DateTime``,
|
||||
|
||||
then take a look at the ``setup.cfg`` file.
|
||||
|
||||
|
@ -250,7 +265,8 @@ In case of problems, Psycopg can be configured to emit detailed debug
|
|||
messages, which can be very useful for diagnostics and to report a bug. In
|
||||
order to create a debug package:
|
||||
|
||||
- `Download`__ and unpack the Psycopg source package.
|
||||
- `Download`__ and unpack the Psycopg *source package* (the ``.tar.gz``
|
||||
package).
|
||||
|
||||
- Edit the ``setup.cfg`` file adding the ``PSYCOPG_DEBUG`` flag to the
|
||||
``define`` option.
|
||||
|
@ -267,9 +283,32 @@ order to create a debug package:
|
|||
one you just compiled and not e.g. the system one): you will have a copious
|
||||
stream of informations printed on stderr.
|
||||
|
||||
.. __: http://initd.org/psycopg/download/
|
||||
.. __: https://pypi.org/project/psycopg2/#files
|
||||
|
||||
|
||||
Non-standard Python Implementation
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The `psycopg2` package is the current mature implementation of the adapter: it
|
||||
is a C extension and as such it is only compatible with CPython_. If you want
|
||||
to use Psycopg on a different Python implementation (PyPy, Jython, IronPython)
|
||||
there is a couple of alternative:
|
||||
|
||||
- a `Ctypes port`__, but it is not as mature as the C implementation yet
|
||||
and it is not as feature-complete;
|
||||
|
||||
- a `CFFI port`__ which is currently more used and reported more efficient on
|
||||
PyPy, but please be careful of its version numbers because they are not
|
||||
aligned to the official psycopg2 ones and some features may differ.
|
||||
|
||||
.. _PostgreSQL: https://www.postgresql.org/
|
||||
.. _Python: https://www.python.org/
|
||||
.. _libpq: https://www.postgresql.org/docs/current/static/libpq.html
|
||||
.. _CPython: https://en.wikipedia.org/wiki/CPython
|
||||
.. _Ctypes: https://docs.python.org/library/ctypes.html
|
||||
.. __: https://github.com/mvantellingen/psycopg2-ctypes
|
||||
.. __: https://github.com/chtd/psycopg2cffi
|
||||
|
||||
|
||||
.. index::
|
||||
single: tests
|
||||
|
@ -298,7 +337,6 @@ setting the environment variables:
|
|||
The database should already exist before running the tests.
|
||||
|
||||
|
||||
|
||||
.. _other-problems:
|
||||
|
||||
If you still have problems
|
||||
|
@ -322,5 +360,5 @@ Try the following. *In order:*
|
|||
ever and about the quality time you have wasted figuring out the correct
|
||||
:envvar:`ARCHFLAGS`. Especially useful from the Starbucks near you.
|
||||
|
||||
.. _mailing list: https://lists.postgresql.org/mj/mj_wwwusr?func=lists-long-full&extra=psycopg
|
||||
.. _mailing list: https://www.postgresql.org/list/psycopg/
|
||||
.. _bug tracker: https://github.com/psycopg/psycopg2/issues
|
||||
|
|
|
@ -168,7 +168,7 @@ available through the following exceptions:
|
|||
|
||||
>>> e.pgcode
|
||||
'42P01'
|
||||
>>> print e.pgerror
|
||||
>>> print(e.pgerror)
|
||||
ERROR: relation "barf" does not exist
|
||||
LINE 1: SELECT * FROM barf
|
||||
^
|
||||
|
@ -184,7 +184,7 @@ available through the following exceptions:
|
|||
|
||||
>>> try:
|
||||
... cur.execute("SELECT * FROM barf")
|
||||
... except psycopg2.Error, e:
|
||||
... except psycopg2.Error as e:
|
||||
... pass
|
||||
|
||||
>>> e.diag.severity
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
single: Release notes
|
||||
single: News
|
||||
|
||||
.. _news:
|
||||
|
||||
Release notes
|
||||
=============
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ name should be escaped using `~psycopg2.extensions.quote_ident()`::
|
|||
# This works, but it is not optimal
|
||||
table_name = 'my_table'
|
||||
cur.execute(
|
||||
"insert into %s values (%%s, %%s)" % ext.quote_ident(table_name),
|
||||
"insert into %s values (%%s, %%s)" % ext.quote_ident(table_name, cur),
|
||||
[10, 20])
|
||||
|
||||
This is now safe, but it somewhat ad-hoc. In case, for some reason, it is
|
||||
|
@ -55,10 +55,53 @@ from the query parameters::
|
|||
.format(sql.Identifier('my_table')),
|
||||
[10, 20])
|
||||
|
||||
The objects exposed by the `!sql` module can be used to compose a query as a
|
||||
Python string (using the `~Composable.as_string()` method) or passed directly
|
||||
to cursor methods such as `~cursor.execute()`, `~cursor.executemany()`,
|
||||
`~cursor.copy_expert()`.
|
||||
|
||||
Module usage
|
||||
------------
|
||||
|
||||
Usually you should express the template of your query as an `SQL` instance
|
||||
with `{}`\-style placeholders and use `~SQL.format()` to merge the variable
|
||||
parts into them, all of which must be `Composable` subclasses. You can still
|
||||
have `%s`\ -style placeholders in your query and pass values to
|
||||
`~cursor.execute()`: such value placeholders will be untouched by
|
||||
`!format()`::
|
||||
|
||||
query = sql.SQL("select {field} from {table} where {pkey} = %s").format(
|
||||
field=sql.Identifier('my_name'),
|
||||
table=sql.Identifier('some_table'),
|
||||
pkey=sql.Identifier('id'))
|
||||
|
||||
The resulting object is meant to be passed directly to cursor methods such as
|
||||
`~cursor.execute()`, `~cursor.executemany()`, `~cursor.copy_expert()`, but can
|
||||
also be used to compose a query as a Python string, using the
|
||||
`~Composable.as_string()` method::
|
||||
|
||||
cur.execute(query, (42,))
|
||||
|
||||
If part of your query is a variable sequence of arguments, such as a
|
||||
comma-separated list of field names, you can use the `SQL.join()` method to
|
||||
pass them to the query::
|
||||
|
||||
query = sql.SQL("select {fields} from {table}").format(
|
||||
fields=sql.SQL(',').join([
|
||||
sql.Identifier('field1'),
|
||||
sql.Identifier('field2'),
|
||||
sql.Identifier('field3'),
|
||||
]),
|
||||
table=sql.Identifier('some_table'))
|
||||
|
||||
|
||||
`!sql` objects
|
||||
--------------
|
||||
|
||||
The `!sql` objects are in the following inheritance hierarchy:
|
||||
|
||||
| `Composable`: the base class exposing the common interface
|
||||
| ``|__`` `SQL`: a literal snippet of an SQL query
|
||||
| ``|__`` `Identifier`: a PostgreSQL identifier or dot-separated sequence of identifiers
|
||||
| ``|__`` `Literal`: a value hardcoded into a query
|
||||
| ``|__`` `Placeholder`: a `%s`\ -style placeholder whose value will be added later e.g. by `~cursor.execute()`
|
||||
| ``|__`` `Composed`: a sequence of `!Composable` instances.
|
||||
|
||||
|
||||
.. autoclass:: Composable
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
extension
|
||||
~~~~~~~~~
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sql role
|
||||
~~~~~~~~
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
ticket role
|
||||
~~~~~~~~~~~
|
||||
|
|
57
doc/src/tools/make_sqlstate_docs.py
Normal file
57
doc/src/tools/make_sqlstate_docs.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
#!/usr/bin/env python
|
||||
"""Create the docs table of the sqlstate errors.
|
||||
"""
|
||||
|
||||
|
||||
import re
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
|
||||
from psycopg2._psycopg import sqlstate_errors
|
||||
|
||||
|
||||
def main():
|
||||
sqlclasses = {}
|
||||
clsfile = sys.argv[1]
|
||||
with open(clsfile) as f:
|
||||
for l in f:
|
||||
m = re.match(r'/\* Class (..) - (.+) \*/', l)
|
||||
if m is not None:
|
||||
sqlclasses[m.group(1)] = m.group(2)
|
||||
|
||||
Line = namedtuple('Line', 'colstate colexc colbase sqlstate')
|
||||
|
||||
lines = [Line('SQLSTATE', 'Exception', 'Base exception', None)]
|
||||
for k in sorted(sqlstate_errors):
|
||||
exc = sqlstate_errors[k]
|
||||
lines.append(Line(
|
||||
f"``{k}``", f"`!{exc.__name__}`",
|
||||
f"`!{get_base_exception(exc).__name__}`", k))
|
||||
|
||||
widths = [max(len(l[c]) for l in lines) for c in range(3)]
|
||||
h = Line(*(['=' * w for w in widths] + [None]))
|
||||
lines.insert(0, h)
|
||||
lines.insert(2, h)
|
||||
lines.append(h)
|
||||
|
||||
h1 = '-' * (sum(widths) + len(widths) - 1)
|
||||
sqlclass = None
|
||||
for l in lines:
|
||||
cls = l.sqlstate[:2] if l.sqlstate else None
|
||||
if cls and cls != sqlclass:
|
||||
print(f"**Class {cls}**: {sqlclasses[cls]}")
|
||||
print(h1)
|
||||
sqlclass = cls
|
||||
|
||||
print("%-*s %-*s %-*s" % (
|
||||
widths[0], l.colstate, widths[1], l.colexc, widths[2], l.colbase))
|
||||
|
||||
|
||||
def get_base_exception(exc):
|
||||
for cls in exc.__mro__:
|
||||
if cls.__module__ == 'psycopg2':
|
||||
return cls
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
|
@ -1,61 +0,0 @@
|
|||
#! /usr/bin/env python
|
||||
"""A script to stitch together the generated text files in the correct order.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
sys.stderr.write("usage: %s index.rst text-dir\n")
|
||||
return 2
|
||||
|
||||
_, index, txt_dir = sys.argv
|
||||
|
||||
for fb in iter_file_base(index):
|
||||
emit(fb, txt_dir)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def iter_file_base(fn):
|
||||
f = open(fn)
|
||||
|
||||
while not next(f).startswith('.. toctree'):
|
||||
pass
|
||||
while next(f).strip().startswith(':'):
|
||||
pass
|
||||
|
||||
yield os.path.splitext(os.path.basename(fn))[0]
|
||||
|
||||
n = 0
|
||||
while True:
|
||||
line = next(f)
|
||||
if line.isspace():
|
||||
continue
|
||||
if line.startswith(".."):
|
||||
break
|
||||
n += 1
|
||||
yield line.strip()
|
||||
|
||||
f.close()
|
||||
|
||||
if n < 5:
|
||||
# maybe format changed?
|
||||
raise Exception("Not enough files found. Format change in index.rst?")
|
||||
|
||||
|
||||
def emit(basename, txt_dir):
|
||||
f = open(os.path.join(txt_dir, basename + ".txt"))
|
||||
for line in f:
|
||||
line = line.replace("``", "'")
|
||||
sys.stdout.write(line)
|
||||
f.close()
|
||||
|
||||
# some space between sections
|
||||
sys.stdout.write("\n\n")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
|
@ -5,6 +5,10 @@
|
|||
|
||||
.. module:: psycopg2.tz
|
||||
|
||||
.. deprecated:: 2.9
|
||||
The module will be dropped in psycopg 2.10. Use `datetime.timezone`
|
||||
instead.
|
||||
|
||||
This module holds two different tzinfo implementations that can be used as the
|
||||
`tzinfo` argument to `~datetime.datetime` constructors, directly passed to
|
||||
Psycopg functions or used to set the `cursor.tzinfo_factory` attribute in
|
||||
|
|
|
@ -407,7 +407,7 @@ defined on the database connection (the `PostgreSQL encoding`__, available in
|
|||
`connection.encoding`, is translated into a `Python encoding`__ using the
|
||||
`~psycopg2.extensions.encodings` mapping)::
|
||||
|
||||
>>> print u, type(u)
|
||||
>>> print(u, type(u))
|
||||
àèìòù€ <type 'unicode'>
|
||||
|
||||
>>> cur.execute("INSERT INTO test (num, data) VALUES (%s,%s);", (74, u))
|
||||
|
@ -418,19 +418,19 @@ defined on the database connection (the `PostgreSQL encoding`__, available in
|
|||
When reading data from the database, in Python 2 the strings returned are
|
||||
usually 8 bit `!str` objects encoded in the database client encoding::
|
||||
|
||||
>>> print conn.encoding
|
||||
>>> print(conn.encoding)
|
||||
UTF8
|
||||
|
||||
>>> cur.execute("SELECT data FROM test WHERE num = 74")
|
||||
>>> x = cur.fetchone()[0]
|
||||
>>> print x, type(x), repr(x)
|
||||
>>> print(x, type(x), repr(x))
|
||||
àèìòù€ <type 'str'> '\xc3\xa0\xc3\xa8\xc3\xac\xc3\xb2\xc3\xb9\xe2\x82\xac'
|
||||
|
||||
>>> conn.set_client_encoding('LATIN9')
|
||||
|
||||
>>> cur.execute("SELECT data FROM test WHERE num = 74")
|
||||
>>> x = cur.fetchone()[0]
|
||||
>>> print type(x), repr(x)
|
||||
>>> print(type(x), repr(x))
|
||||
<type 'str'> '\xe0\xe8\xec\xf2\xf9\xa4'
|
||||
|
||||
In Python 3 instead the strings are automatically *decoded* in the connection
|
||||
|
@ -442,7 +442,7 @@ In Python 2 you must register a :ref:`typecaster
|
|||
|
||||
>>> cur.execute("SELECT data FROM test WHERE num = 74")
|
||||
>>> x = cur.fetchone()[0]
|
||||
>>> print x, type(x), repr(x)
|
||||
>>> print(x, type(x), repr(x))
|
||||
àèìòù€ <type 'unicode'> u'\xe0\xe8\xec\xf2\xf9\u20ac'
|
||||
|
||||
In the above example, the `~psycopg2.extensions.UNICODE` typecaster is
|
||||
|
@ -540,7 +540,6 @@ or `!memoryview` (in Python 3).
|
|||
single: Date objects; Adaptation
|
||||
single: Time objects; Adaptation
|
||||
single: Interval objects; Adaptation
|
||||
single: mx.DateTime; Adaptation
|
||||
|
||||
.. _adapt-date:
|
||||
|
||||
|
@ -550,8 +549,7 @@ Date/Time objects adaptation
|
|||
Python builtin `~datetime.datetime`, `~datetime.date`,
|
||||
`~datetime.time`, `~datetime.timedelta` are converted into PostgreSQL's
|
||||
:sql:`timestamp[tz]`, :sql:`date`, :sql:`time[tz]`, :sql:`interval` data types.
|
||||
Time zones are supported too. The Egenix `mx.DateTime`_ objects are adapted
|
||||
the same way::
|
||||
Time zones are supported too.
|
||||
|
||||
>>> dt = datetime.datetime.now()
|
||||
>>> dt
|
||||
|
@ -576,29 +574,39 @@ Time zones handling
|
|||
'''''''''''''''''''
|
||||
|
||||
The PostgreSQL type :sql:`timestamp with time zone` (a.k.a.
|
||||
:sql:`timestamptz`) is converted into Python `~datetime.datetime` objects with
|
||||
a `~datetime.datetime.tzinfo` attribute set to a
|
||||
`~psycopg2.tz.FixedOffsetTimezone` instance.
|
||||
:sql:`timestamptz`) is converted into Python `~datetime.datetime` objects.
|
||||
|
||||
>>> cur.execute("SET TIME ZONE 'Europe/Rome';") # UTC + 1 hour
|
||||
>>> cur.execute("SELECT '2010-01-01 10:30:45'::timestamptz;")
|
||||
>>> cur.fetchone()[0].tzinfo
|
||||
psycopg2.tz.FixedOffsetTimezone(offset=60, name=None)
|
||||
>>> cur.execute("SET TIME ZONE 'Europe/Rome'") # UTC + 1 hour
|
||||
>>> cur.execute("SELECT '2010-01-01 10:30:45'::timestamptz")
|
||||
>>> cur.fetchone()[0]
|
||||
datetime.datetime(2010, 1, 1, 10, 30, 45,
|
||||
tzinfo=datetime.timezone(datetime.timedelta(seconds=3600)))
|
||||
|
||||
Note that only time zones with an integer number of minutes are supported:
|
||||
this is a limitation of the Python `datetime` module. A few historical time
|
||||
zones had seconds in the UTC offset: these time zones will have the offset
|
||||
rounded to the nearest minute, with an error of up to 30 seconds.
|
||||
.. note::
|
||||
|
||||
>>> cur.execute("SET TIME ZONE 'Asia/Calcutta';") # offset was +5:53:20
|
||||
>>> cur.execute("SELECT '1930-01-01 10:30:45'::timestamptz;")
|
||||
>>> cur.fetchone()[0].tzinfo
|
||||
psycopg2.tz.FixedOffsetTimezone(offset=353, name=None)
|
||||
Before Python 3.7, the `datetime` module only supported timezones with an
|
||||
integer number of minutes. A few historical time zones had seconds in the
|
||||
UTC offset: these time zones will have the offset rounded to the nearest
|
||||
minute, with an error of up to 30 seconds, on Python versions before 3.7.
|
||||
|
||||
>>> cur.execute("SET TIME ZONE 'Asia/Calcutta'") # offset was +5:21:10
|
||||
>>> cur.execute("SELECT '1900-01-01 10:30:45'::timestamptz")
|
||||
>>> cur.fetchone()[0].tzinfo
|
||||
# On Python 3.6: 5h, 21m
|
||||
datetime.timezone(datetime.timedelta(0, 19260))
|
||||
# On Python 3.7 and following: 5h, 21m, 10s
|
||||
datetime.timezone(datetime.timedelta(seconds=19270))
|
||||
|
||||
.. versionchanged:: 2.2.2
|
||||
timezones with seconds are supported (with rounding). Previously such
|
||||
timezones raised an error.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
timezones with seconds are supported without rounding.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
use `datetime.timezone` as default tzinfo object instead of
|
||||
`~psycopg2.tz.FixedOffsetTimezone`.
|
||||
|
||||
.. index::
|
||||
double: Date objects; Infinite
|
||||
|
@ -634,6 +642,29 @@ Of course it will not be possible to write the value of `date.max` in the
|
|||
database anymore: :sql:`infinity` will be stored instead.
|
||||
|
||||
|
||||
.. _time-handling:
|
||||
|
||||
Time handling
|
||||
'''''''''''''
|
||||
|
||||
The PostgreSQL :sql:`time` and Python `~datetime.time` types are not
|
||||
fully bidirectional.
|
||||
|
||||
Within PostgreSQL, the :sql:`time` type's maximum value of ``24:00:00`` is
|
||||
treated as 24-hours later than the minimum value of ``00:00:00``.
|
||||
|
||||
>>> cur.execute("SELECT '24:00:00'::time - '00:00:00'::time")
|
||||
>>> cur.fetchone()[0]
|
||||
datetime.timedelta(days=1)
|
||||
|
||||
However, Python's `!time` only supports times until ``23:59:59``.
|
||||
Retrieving a value of ``24:00:00`` results in a `!time` of ``00:00:00``.
|
||||
|
||||
>>> cur.execute("SELECT '24:00:00'::time, '00:00:00'::time")
|
||||
>>> cur.fetchone()
|
||||
(datetime.time(0, 0), datetime.time(0, 0))
|
||||
|
||||
|
||||
.. _adapt-list:
|
||||
|
||||
Lists adaptation
|
||||
|
@ -727,18 +758,25 @@ until a call to the `~connection.rollback()` method.
|
|||
|
||||
The connection is responsible for terminating its transaction, calling either
|
||||
the `~connection.commit()` or `~connection.rollback()` method. Committed
|
||||
changes are immediately made persistent into the database. Closing the
|
||||
connection using the `~connection.close()` method or destroying the
|
||||
connection object (using `!del` or letting it fall out of scope)
|
||||
will result in an implicit rollback.
|
||||
changes are immediately made persistent in the database. If the connection
|
||||
is closed (using the `~connection.close()` method) or destroyed (using `!del`
|
||||
or by letting it fall out of scope) while a transaction is in progress, the
|
||||
server will discard the transaction. However doing so is not advisable:
|
||||
middleware such as PgBouncer_ may see the connection closed uncleanly and
|
||||
dispose of it.
|
||||
|
||||
.. _PgBouncer: http://www.pgbouncer.org/
|
||||
|
||||
It is possible to set the connection in *autocommit* mode: this way all the
|
||||
commands executed will be immediately committed and no rollback is possible. A
|
||||
few commands (e.g. :sql:`CREATE DATABASE`, :sql:`VACUUM`...) require to be run
|
||||
few commands (e.g. :sql:`CREATE DATABASE`, :sql:`VACUUM`, :sql:`CALL` on
|
||||
`stored procedures`__ using transaction control...) require to be run
|
||||
outside any transaction: in order to be able to run these commands from
|
||||
Psycopg, the connection must be in autocommit mode: you can use the
|
||||
`~connection.autocommit` property.
|
||||
|
||||
.. __: https://www.postgresql.org/docs/current/xproc.html
|
||||
|
||||
.. warning::
|
||||
|
||||
By default even a simple :sql:`SELECT` will start a transaction: in
|
||||
|
@ -757,6 +795,8 @@ the details.
|
|||
.. index::
|
||||
single: with statement
|
||||
|
||||
.. _with:
|
||||
|
||||
``with`` statement
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
|
@ -774,9 +814,7 @@ is rolled back.
|
|||
When a cursor exits the ``with`` block it is closed, releasing any resource
|
||||
eventually associated with it. The state of the transaction is not affected.
|
||||
|
||||
Note that, unlike file objects or other resources, exiting the connection's
|
||||
``with`` block *doesn't close the connection* but only the transaction
|
||||
associated with it: a connection can be used in more than a ``with`` statement
|
||||
A connection can be used in more than one ``with`` statement
|
||||
and each ``with`` block is effectively wrapped in a separate transaction::
|
||||
|
||||
conn = psycopg2.connect(DSN)
|
||||
|
@ -791,6 +829,21 @@ and each ``with`` block is effectively wrapped in a separate transaction::
|
|||
|
||||
conn.close()
|
||||
|
||||
.. warning::
|
||||
|
||||
Unlike file objects or other resources, exiting the connection's
|
||||
``with`` block **doesn't close the connection**, but only the transaction
|
||||
associated to it. If you want to make sure the connection is closed after
|
||||
a certain point, you should still use a try-catch block::
|
||||
|
||||
conn = psycopg2.connect(DSN)
|
||||
try:
|
||||
# connection usage
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
``with connection`` starts a transaction also on autocommit connections.
|
||||
|
||||
|
||||
.. index::
|
||||
|
@ -807,7 +860,7 @@ Server side cursors
|
|||
|
||||
When a database query is executed, the Psycopg `cursor` usually fetches
|
||||
all the records returned by the backend, transferring them to the client
|
||||
process. If the query returned an huge amount of data, a proportionally large
|
||||
process. If the query returns a huge amount of data, a proportionally large
|
||||
amount of memory will be allocated by the client.
|
||||
|
||||
If the dataset is too large to be practically handled on the client side, it is
|
||||
|
@ -990,7 +1043,7 @@ using the |lo_import|_ and |lo_export|_ libpq functions.
|
|||
.. _lo_export: https://www.postgresql.org/docs/current/static/lo-interfaces.html#LO-EXPORT
|
||||
|
||||
.. versionchanged:: 2.6
|
||||
added support for large objects greated than 2GB. Note that the support is
|
||||
added support for large objects greater than 2GB. Note that the support is
|
||||
enabled only if all the following conditions are verified:
|
||||
|
||||
- the Python build is 64 bits;
|
||||
|
@ -999,8 +1052,8 @@ using the |lo_import|_ and |lo_export|_ libpq functions.
|
|||
(`~connection.server_version` must be >= ``90300``).
|
||||
|
||||
If Psycopg was built with 64 bits large objects support (i.e. the first
|
||||
two contidions above are verified), the `psycopg2.__version__` constant
|
||||
will contain the ``lo64`` flag. If any of the contition is not met
|
||||
two conditions above are verified), the `psycopg2.__version__` constant
|
||||
will contain the ``lo64`` flag. If any of the condition is not met
|
||||
several `!lobject` methods will fail if the arguments exceed 2GB.
|
||||
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ provide new-style classes for connection and cursor objects and other sweet
|
|||
candies. Like the original, psycopg 2 was written with the aim of being very
|
||||
small and fast, and stable as a rock.
|
||||
|
||||
Homepage: http://initd.org/projects/psycopg2
|
||||
Homepage: https://psycopg.org/
|
||||
|
||||
.. _PostgreSQL: https://www.postgresql.org/
|
||||
.. _Python: https://www.python.org/
|
||||
|
@ -18,7 +18,8 @@ Homepage: http://initd.org/projects/psycopg2
|
|||
"""
|
||||
# psycopg/__init__.py - initialization of the psycopg module
|
||||
#
|
||||
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -60,8 +61,6 @@ from psycopg2._psycopg import ( # noqa
|
|||
__version__, __libpq_version__,
|
||||
)
|
||||
|
||||
from psycopg2 import tz # noqa
|
||||
|
||||
|
||||
# Register default adapters.
|
||||
|
||||
|
@ -119,9 +118,6 @@ def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs):
|
|||
if 'async_' in kwargs:
|
||||
kwasync['async_'] = kwargs.pop('async_')
|
||||
|
||||
if dsn is None and not kwargs:
|
||||
raise TypeError('missing dsn and no parameters')
|
||||
|
||||
dsn = _ext.make_dsn(dsn, **kwargs)
|
||||
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
|
||||
if cursor_factory is not None:
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
|
||||
# psycopg/_ipaddress.py - Ipaddres-based network types adaptation
|
||||
#
|
||||
# Copyright (C) 2016 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2016-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -25,7 +26,6 @@
|
|||
|
||||
from psycopg2.extensions import (
|
||||
new_type, new_array_type, register_type, register_adapter, QuotedString)
|
||||
from psycopg2.compat import text_type
|
||||
|
||||
# The module is imported on register_ipaddress
|
||||
ipaddress = None
|
||||
|
@ -77,13 +77,13 @@ def cast_interface(s, cur=None):
|
|||
if s is None:
|
||||
return None
|
||||
# Py2 version force the use of unicode. meh.
|
||||
return ipaddress.ip_interface(text_type(s))
|
||||
return ipaddress.ip_interface(str(s))
|
||||
|
||||
|
||||
def cast_network(s, cur=None):
|
||||
if s is None:
|
||||
return None
|
||||
return ipaddress.ip_network(text_type(s))
|
||||
return ipaddress.ip_network(str(s))
|
||||
|
||||
|
||||
def adapt_ipaddress(obj):
|
||||
|
|
20
lib/_json.py
20
lib/_json.py
|
@ -7,7 +7,8 @@ extensions importing register_json from extras.
|
|||
|
||||
# psycopg/_json.py - Implementation of the JSON adaptation objects
|
||||
#
|
||||
# Copyright (C) 2012 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2012-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -28,7 +29,6 @@ extensions importing register_json from extras.
|
|||
# License for more details.
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
from psycopg2._psycopg import ISQLQuote, QuotedString
|
||||
from psycopg2._psycopg import new_type, new_array_type, register_type
|
||||
|
@ -43,7 +43,7 @@ JSONB_OID = 3802
|
|||
JSONBARRAY_OID = 3807
|
||||
|
||||
|
||||
class Json(object):
|
||||
class Json:
|
||||
"""
|
||||
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
|
||||
:sql:`json` data type.
|
||||
|
@ -81,13 +81,9 @@ class Json(object):
|
|||
qs.prepare(self._conn)
|
||||
return qs.getquoted()
|
||||
|
||||
if sys.version_info < (3,):
|
||||
def __str__(self):
|
||||
return self.getquoted()
|
||||
else:
|
||||
def __str__(self):
|
||||
# getquoted is binary in Py3
|
||||
return self.getquoted().decode('ascii', 'replace')
|
||||
def __str__(self):
|
||||
# getquoted is binary
|
||||
return self.getquoted().decode('ascii', 'replace')
|
||||
|
||||
|
||||
def register_json(conn_or_curs=None, globally=False, loads=None,
|
||||
|
@ -167,7 +163,7 @@ def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
|
|||
|
||||
JSON = new_type((oid, ), name, typecast_json)
|
||||
if array_oid is not None:
|
||||
JSONARRAY = new_array_type((array_oid, ), "%sARRAY" % name, JSON)
|
||||
JSONARRAY = new_array_type((array_oid, ), f"{name}ARRAY", JSON)
|
||||
else:
|
||||
JSONARRAY = None
|
||||
|
||||
|
@ -198,6 +194,6 @@ def _get_json_oids(conn_or_curs, name='json'):
|
|||
conn.rollback()
|
||||
|
||||
if not r:
|
||||
raise conn.ProgrammingError("%s data type not found" % name)
|
||||
raise conn.ProgrammingError(f"{name} data type not found")
|
||||
|
||||
return r
|
||||
|
|
|
@ -1,104 +0,0 @@
|
|||
"""
|
||||
LRU cache implementation for Python 2.7
|
||||
|
||||
Ported from http://code.activestate.com/recipes/578078/ and simplified for our
|
||||
use (only support maxsize > 0 and positional arguments).
|
||||
"""
|
||||
|
||||
from collections import namedtuple
|
||||
from functools import update_wrapper
|
||||
from threading import RLock
|
||||
|
||||
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
|
||||
|
||||
|
||||
def lru_cache(maxsize=100):
|
||||
"""Least-recently-used cache decorator.
|
||||
|
||||
Arguments to the cached function must be hashable.
|
||||
|
||||
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
|
||||
|
||||
"""
|
||||
def decorating_function(user_function):
|
||||
|
||||
cache = dict()
|
||||
stats = [0, 0] # make statistics updateable non-locally
|
||||
HITS, MISSES = 0, 1 # names for the stats fields
|
||||
cache_get = cache.get # bound method to lookup key or return None
|
||||
_len = len # localize the global len() function
|
||||
lock = RLock() # linkedlist updates aren't threadsafe
|
||||
root = [] # root of the circular doubly linked list
|
||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
||||
nonlocal_root = [root] # make updateable non-locally
|
||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
||||
|
||||
assert maxsize and maxsize > 0, "maxsize %s not supported" % maxsize
|
||||
|
||||
def wrapper(*args):
|
||||
# size limited caching that tracks accesses by recency
|
||||
key = args
|
||||
with lock:
|
||||
link = cache_get(key)
|
||||
if link is not None:
|
||||
# record recent use of the key by moving it to the
|
||||
# front of the list
|
||||
root, = nonlocal_root
|
||||
link_prev, link_next, key, result = link
|
||||
link_prev[NEXT] = link_next
|
||||
link_next[PREV] = link_prev
|
||||
last = root[PREV]
|
||||
last[NEXT] = root[PREV] = link
|
||||
link[PREV] = last
|
||||
link[NEXT] = root
|
||||
stats[HITS] += 1
|
||||
return result
|
||||
result = user_function(*args)
|
||||
with lock:
|
||||
root, = nonlocal_root
|
||||
if key in cache:
|
||||
# getting here means that this same key was added to the
|
||||
# cache while the lock was released. since the link
|
||||
# update is already done, we need only return the
|
||||
# computed result and update the count of misses.
|
||||
pass
|
||||
elif _len(cache) >= maxsize:
|
||||
# use the old root to store the new key and result
|
||||
oldroot = root
|
||||
oldroot[KEY] = key
|
||||
oldroot[RESULT] = result
|
||||
# empty the oldest link and make it the new root
|
||||
root = nonlocal_root[0] = oldroot[NEXT]
|
||||
oldkey = root[KEY]
|
||||
# oldvalue = root[RESULT]
|
||||
root[KEY] = root[RESULT] = None
|
||||
# now update the cache dictionary for the new links
|
||||
del cache[oldkey]
|
||||
cache[key] = oldroot
|
||||
else:
|
||||
# put result in a new link at the front of the list
|
||||
last = root[PREV]
|
||||
link = [last, root, key, result]
|
||||
last[NEXT] = root[PREV] = cache[key] = link
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
with lock:
|
||||
cache.clear()
|
||||
root = nonlocal_root[0]
|
||||
root[:] = [root, root, None, None]
|
||||
stats[:] = [0, 0]
|
||||
|
||||
wrapper.__wrapped__ = user_function
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
return update_wrapper(wrapper, user_function)
|
||||
|
||||
return decorating_function
|
|
@ -4,7 +4,8 @@
|
|||
|
||||
# psycopg/_range.py - Implementation of the Range type and adaptation
|
||||
#
|
||||
# Copyright (C) 2012 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2012-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -29,10 +30,9 @@ import re
|
|||
from psycopg2._psycopg import ProgrammingError, InterfaceError
|
||||
from psycopg2.extensions import ISQLQuote, adapt, register_adapter
|
||||
from psycopg2.extensions import new_type, new_array_type, register_type
|
||||
from psycopg2.compat import string_types
|
||||
|
||||
|
||||
class Range(object):
|
||||
class Range:
|
||||
"""Python representation for a PostgreSQL |range|_ type.
|
||||
|
||||
:param lower: lower bound for the range. `!None` means unbound
|
||||
|
@ -47,7 +47,7 @@ class Range(object):
|
|||
def __init__(self, lower=None, upper=None, bounds='[)', empty=False):
|
||||
if not empty:
|
||||
if bounds not in ('[)', '(]', '()', '[]'):
|
||||
raise ValueError("bound flags not valid: %r" % bounds)
|
||||
raise ValueError(f"bound flags not valid: {bounds!r}")
|
||||
|
||||
self._lower = lower
|
||||
self._upper = upper
|
||||
|
@ -57,9 +57,9 @@ class Range(object):
|
|||
|
||||
def __repr__(self):
|
||||
if self._bounds is None:
|
||||
return "%s(empty=True)" % self.__class__.__name__
|
||||
return f"{self.__class__.__name__}(empty=True)"
|
||||
else:
|
||||
return "%s(%r, %r, %r)" % (self.__class__.__name__,
|
||||
return "{}({!r}, {!r}, {!r})".format(self.__class__.__name__,
|
||||
self._lower, self._upper, self._bounds)
|
||||
|
||||
def __str__(self):
|
||||
|
@ -143,10 +143,6 @@ class Range(object):
|
|||
def __bool__(self):
|
||||
return self._bounds is not None
|
||||
|
||||
def __nonzero__(self):
|
||||
# Python 2 compatibility
|
||||
return type(self).__bool__(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Range):
|
||||
return False
|
||||
|
@ -238,7 +234,7 @@ def register_range(pgrange, pyrange, conn_or_curs, globally=False):
|
|||
return caster
|
||||
|
||||
|
||||
class RangeAdapter(object):
|
||||
class RangeAdapter:
|
||||
"""`ISQLQuote` adapter for `Range` subclasses.
|
||||
|
||||
This is an abstract class: concrete classes must set a `name` class
|
||||
|
@ -286,7 +282,7 @@ class RangeAdapter(object):
|
|||
+ b", '" + r._bounds.encode('utf8') + b"')"
|
||||
|
||||
|
||||
class RangeCaster(object):
|
||||
class RangeCaster:
|
||||
"""Helper class to convert between `Range` and PostgreSQL range types.
|
||||
|
||||
Objects of this class are usually created by `register_range()`. Manual
|
||||
|
@ -314,7 +310,7 @@ class RangeCaster(object):
|
|||
# an implementation detail and is not documented. It is currently used
|
||||
# for the numeric ranges.
|
||||
self.adapter = None
|
||||
if isinstance(pgrange, string_types):
|
||||
if isinstance(pgrange, str):
|
||||
self.adapter = type(pgrange, (RangeAdapter,), {})
|
||||
self.adapter.name = pgrange
|
||||
else:
|
||||
|
@ -331,7 +327,7 @@ class RangeCaster(object):
|
|||
|
||||
self.range = None
|
||||
try:
|
||||
if isinstance(pyrange, string_types):
|
||||
if isinstance(pyrange, str):
|
||||
self.range = type(pyrange, (Range,), {})
|
||||
if issubclass(pyrange, Range) and pyrange is not Range:
|
||||
self.range = pyrange
|
||||
|
@ -367,33 +363,54 @@ class RangeCaster(object):
|
|||
schema = 'public'
|
||||
|
||||
# get the type oid and attributes
|
||||
try:
|
||||
curs.execute("""\
|
||||
select rngtypid, rngsubtype,
|
||||
(select typarray from pg_type where oid = rngtypid)
|
||||
curs.execute("""\
|
||||
select rngtypid, rngsubtype, typarray
|
||||
from pg_range r
|
||||
join pg_type t on t.oid = rngtypid
|
||||
join pg_namespace ns on ns.oid = typnamespace
|
||||
where typname = %s and ns.nspname = %s;
|
||||
""", (tname, schema))
|
||||
rec = curs.fetchone()
|
||||
|
||||
except ProgrammingError:
|
||||
if not conn.autocommit:
|
||||
conn.rollback()
|
||||
raise
|
||||
else:
|
||||
rec = curs.fetchone()
|
||||
if not rec:
|
||||
# The above algorithm doesn't work for customized seach_path
|
||||
# (#1487) The implementation below works better, but, to guarantee
|
||||
# backwards compatibility, use it only if the original one failed.
|
||||
try:
|
||||
savepoint = False
|
||||
# Because we executed statements earlier, we are either INTRANS
|
||||
# or we are IDLE only if the transaction is autocommit, in
|
||||
# which case we don't need the savepoint anyway.
|
||||
if conn.status == STATUS_IN_TRANSACTION:
|
||||
curs.execute("SAVEPOINT register_type")
|
||||
savepoint = True
|
||||
|
||||
# revert the status of the connection as before the command
|
||||
if (conn_status != STATUS_IN_TRANSACTION
|
||||
and not conn.autocommit):
|
||||
conn.rollback()
|
||||
curs.execute("""\
|
||||
SELECT rngtypid, rngsubtype, typarray, typname, nspname
|
||||
from pg_range r
|
||||
join pg_type t on t.oid = rngtypid
|
||||
join pg_namespace ns on ns.oid = typnamespace
|
||||
WHERE t.oid = %s::regtype
|
||||
""", (name, ))
|
||||
except ProgrammingError:
|
||||
pass
|
||||
else:
|
||||
rec = curs.fetchone()
|
||||
if rec:
|
||||
tname, schema = rec[3:]
|
||||
finally:
|
||||
if savepoint:
|
||||
curs.execute("ROLLBACK TO SAVEPOINT register_type")
|
||||
|
||||
# revert the status of the connection as before the command
|
||||
if conn_status != STATUS_IN_TRANSACTION and not conn.autocommit:
|
||||
conn.rollback()
|
||||
|
||||
if not rec:
|
||||
raise ProgrammingError(
|
||||
"PostgreSQL type '%s' not found" % name)
|
||||
f"PostgreSQL range '{name}' not found")
|
||||
|
||||
type, subtype, array = rec
|
||||
type, subtype, array = rec[:3]
|
||||
|
||||
return RangeCaster(name, pyrange,
|
||||
oid=type, subtype_oid=subtype, array_oid=array)
|
||||
|
@ -423,7 +440,7 @@ where typname = %s and ns.nspname = %s;
|
|||
|
||||
m = self._re_range.match(s)
|
||||
if m is None:
|
||||
raise InterfaceError("failed to parse range: '%s'" % s)
|
||||
raise InterfaceError(f"failed to parse range: '{s}'")
|
||||
|
||||
lower = m.group(3)
|
||||
if lower is None:
|
||||
|
@ -503,8 +520,7 @@ class NumberRangeAdapter(RangeAdapter):
|
|||
else:
|
||||
upper = ''
|
||||
|
||||
return ("'%s%s,%s%s'" % (
|
||||
r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')
|
||||
return (f"'{r._bounds[0]}{lower},{upper}{r._bounds[1]}'").encode('ascii')
|
||||
|
||||
|
||||
# TODO: probably won't work with infs, nans and other tricky cases.
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
import sys
|
||||
|
||||
__all__ = ['string_types', 'text_type', 'lru_cache']
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
# Python 2
|
||||
string_types = basestring,
|
||||
text_type = unicode
|
||||
from ._lru_cache import lru_cache
|
||||
|
||||
else:
|
||||
# Python 3
|
||||
string_types = str,
|
||||
text_type = str
|
||||
from functools import lru_cache
|
|
@ -1,10 +1,11 @@
|
|||
"""Error codes for PostgresSQL
|
||||
"""Error codes for PostgreSQL
|
||||
|
||||
This module contains symbolic names for all PostgreSQL error codes.
|
||||
"""
|
||||
# psycopg2/errorcodes.py - PostgreSQL error codes
|
||||
#
|
||||
# Copyright (C) 2006-2010 Johan Dahlin <jdahlin@async.com.br>
|
||||
# Copyright (C) 2006-2019 Johan Dahlin <jdahlin@async.com.br>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -42,7 +43,8 @@ def lookup(code, _cache={}):
|
|||
tmp = {}
|
||||
for k, v in globals().items():
|
||||
if isinstance(v, str) and len(v) in (2, 5):
|
||||
tmp[v] = k
|
||||
# Strip trailing underscore used to disambiguate duplicate values
|
||||
tmp[v] = k.rstrip("_")
|
||||
|
||||
assert tmp
|
||||
|
||||
|
@ -105,7 +107,7 @@ SUCCESSFUL_COMPLETION = '00000'
|
|||
# Class 01 - Warning
|
||||
WARNING = '01000'
|
||||
NULL_VALUE_ELIMINATED_IN_SET_FUNCTION = '01003'
|
||||
STRING_DATA_RIGHT_TRUNCATION = '01004'
|
||||
STRING_DATA_RIGHT_TRUNCATION_ = '01004'
|
||||
PRIVILEGE_NOT_REVOKED = '01006'
|
||||
PRIVILEGE_NOT_GRANTED = '01007'
|
||||
IMPLICIT_ZERO_BIT_PADDING = '01008'
|
||||
|
@ -163,7 +165,7 @@ DATA_EXCEPTION = '22000'
|
|||
STRING_DATA_RIGHT_TRUNCATION = '22001'
|
||||
NULL_VALUE_NO_INDICATOR_PARAMETER = '22002'
|
||||
NUMERIC_VALUE_OUT_OF_RANGE = '22003'
|
||||
NULL_VALUE_NOT_ALLOWED = '22004'
|
||||
NULL_VALUE_NOT_ALLOWED_ = '22004'
|
||||
ERROR_IN_ASSIGNMENT = '22005'
|
||||
INVALID_DATETIME_FORMAT = '22007'
|
||||
DATETIME_FIELD_OVERFLOW = '22008'
|
||||
|
@ -205,6 +207,23 @@ TRIM_ERROR = '22027'
|
|||
ARRAY_SUBSCRIPT_ERROR = '2202E'
|
||||
INVALID_TABLESAMPLE_REPEAT = '2202G'
|
||||
INVALID_TABLESAMPLE_ARGUMENT = '2202H'
|
||||
DUPLICATE_JSON_OBJECT_KEY_VALUE = '22030'
|
||||
INVALID_ARGUMENT_FOR_SQL_JSON_DATETIME_FUNCTION = '22031'
|
||||
INVALID_JSON_TEXT = '22032'
|
||||
INVALID_SQL_JSON_SUBSCRIPT = '22033'
|
||||
MORE_THAN_ONE_SQL_JSON_ITEM = '22034'
|
||||
NO_SQL_JSON_ITEM = '22035'
|
||||
NON_NUMERIC_SQL_JSON_ITEM = '22036'
|
||||
NON_UNIQUE_KEYS_IN_A_JSON_OBJECT = '22037'
|
||||
SINGLETON_SQL_JSON_ITEM_REQUIRED = '22038'
|
||||
SQL_JSON_ARRAY_NOT_FOUND = '22039'
|
||||
SQL_JSON_MEMBER_NOT_FOUND = '2203A'
|
||||
SQL_JSON_NUMBER_NOT_FOUND = '2203B'
|
||||
SQL_JSON_OBJECT_NOT_FOUND = '2203C'
|
||||
TOO_MANY_JSON_ARRAY_ELEMENTS = '2203D'
|
||||
TOO_MANY_JSON_OBJECT_MEMBERS = '2203E'
|
||||
SQL_JSON_SCALAR_REQUIRED = '2203F'
|
||||
SQL_JSON_ITEM_CANNOT_BE_CAST_TO_TARGET_TYPE = '2203G'
|
||||
FLOATING_POINT_EXCEPTION = '22P01'
|
||||
INVALID_TEXT_REPRESENTATION = '22P02'
|
||||
INVALID_BINARY_REPRESENTATION = '22P03'
|
||||
|
@ -237,6 +256,7 @@ HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL = '25008'
|
|||
NO_ACTIVE_SQL_TRANSACTION = '25P01'
|
||||
IN_FAILED_SQL_TRANSACTION = '25P02'
|
||||
IDLE_IN_TRANSACTION_SESSION_TIMEOUT = '25P03'
|
||||
TRANSACTION_TIMEOUT = '25P04'
|
||||
|
||||
# Class 26 - Invalid SQL Statement Name
|
||||
INVALID_SQL_STATEMENT_NAME = '26000'
|
||||
|
@ -257,9 +277,9 @@ INVALID_TRANSACTION_TERMINATION = '2D000'
|
|||
|
||||
# Class 2F - SQL Routine Exception
|
||||
SQL_ROUTINE_EXCEPTION = '2F000'
|
||||
MODIFYING_SQL_DATA_NOT_PERMITTED = '2F002'
|
||||
PROHIBITED_SQL_STATEMENT_ATTEMPTED = '2F003'
|
||||
READING_SQL_DATA_NOT_PERMITTED = '2F004'
|
||||
MODIFYING_SQL_DATA_NOT_PERMITTED_ = '2F002'
|
||||
PROHIBITED_SQL_STATEMENT_ATTEMPTED_ = '2F003'
|
||||
READING_SQL_DATA_NOT_PERMITTED_ = '2F004'
|
||||
FUNCTION_EXECUTED_NO_RETURN_STATEMENT = '2F005'
|
||||
|
||||
# Class 34 - Invalid Cursor Name
|
||||
|
@ -373,6 +393,7 @@ ADMIN_SHUTDOWN = '57P01'
|
|||
CRASH_SHUTDOWN = '57P02'
|
||||
CANNOT_CONNECT_NOW = '57P03'
|
||||
DATABASE_DROPPED = '57P04'
|
||||
IDLE_SESSION_TIMEOUT = '57P05'
|
||||
|
||||
# Class 58 - System Error (errors external to PostgreSQL itself)
|
||||
SYSTEM_ERROR = '58000'
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
# psycopg/errors.py - SQLSTATE and DB-API exceptions
|
||||
#
|
||||
# Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
|
|
@ -12,7 +12,8 @@ This module holds all the extensions to the DBAPI-2.0 provided by psycopg.
|
|||
"""
|
||||
# psycopg/extensions.py - DBAPI-2.0 extensions specific to psycopg
|
||||
#
|
||||
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -41,21 +42,10 @@ from psycopg2._psycopg import ( # noqa
|
|||
ROWIDARRAY, STRINGARRAY, TIME, TIMEARRAY, UNICODE, UNICODEARRAY,
|
||||
AsIs, Binary, Boolean, Float, Int, QuotedString, )
|
||||
|
||||
try:
|
||||
from psycopg2._psycopg import ( # noqa
|
||||
MXDATE, MXDATETIME, MXDATETIMETZ, MXINTERVAL, MXTIME, MXDATEARRAY,
|
||||
MXDATETIMEARRAY, MXDATETIMETZARRAY, MXINTERVALARRAY, MXTIMEARRAY,
|
||||
DateFromMx, TimeFromMx, TimestampFromMx, IntervalFromMx, )
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from psycopg2._psycopg import ( # noqa
|
||||
PYDATE, PYDATETIME, PYDATETIMETZ, PYINTERVAL, PYTIME, PYDATEARRAY,
|
||||
PYDATETIMEARRAY, PYDATETIMETZARRAY, PYINTERVALARRAY, PYTIMEARRAY,
|
||||
DateFromPy, TimeFromPy, TimestampFromPy, IntervalFromPy, )
|
||||
except ImportError:
|
||||
pass
|
||||
from psycopg2._psycopg import ( # noqa
|
||||
PYDATE, PYDATETIME, PYDATETIMETZ, PYINTERVAL, PYTIME, PYDATEARRAY,
|
||||
PYDATETIMEARRAY, PYDATETIMETZARRAY, PYINTERVALARRAY, PYTIMEARRAY,
|
||||
DateFromPy, TimeFromPy, TimestampFromPy, IntervalFromPy, )
|
||||
|
||||
from psycopg2._psycopg import ( # noqa
|
||||
adapt, adapters, encodings, connection, cursor,
|
||||
|
@ -108,7 +98,7 @@ def register_adapter(typ, callable):
|
|||
|
||||
|
||||
# The SQL_IN class is the official adapter for tuples starting from 2.0.6.
|
||||
class SQL_IN(object):
|
||||
class SQL_IN:
|
||||
"""Adapt any iterable to an SQL quotable object."""
|
||||
def __init__(self, seq):
|
||||
self._seq = seq
|
||||
|
@ -132,7 +122,7 @@ class SQL_IN(object):
|
|||
return str(self.getquoted())
|
||||
|
||||
|
||||
class NoneAdapter(object):
|
||||
class NoneAdapter:
|
||||
"""Adapt None to NULL.
|
||||
|
||||
This adapter is not used normally as a fast path in mogrify uses NULL,
|
||||
|
@ -170,7 +160,7 @@ def make_dsn(dsn=None, **kwargs):
|
|||
tmp.update(kwargs)
|
||||
kwargs = tmp
|
||||
|
||||
dsn = " ".join(["%s=%s" % (k, _param_escape(str(v)))
|
||||
dsn = " ".join(["{}={}".format(k, _param_escape(str(v)))
|
||||
for (k, v) in kwargs.items()])
|
||||
|
||||
# verify that the returned dsn is valid
|
||||
|
|
352
lib/extras.py
352
lib/extras.py
|
@ -5,7 +5,8 @@ and classes until a better place in the distribution is found.
|
|||
"""
|
||||
# psycopg/extras.py - miscellaneous extra goodies for psycopg
|
||||
#
|
||||
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -26,7 +27,6 @@ and classes until a better place in the distribution is found.
|
|||
# License for more details.
|
||||
|
||||
import os as _os
|
||||
import sys as _sys
|
||||
import time as _time
|
||||
import re as _re
|
||||
from collections import namedtuple, OrderedDict
|
||||
|
@ -38,7 +38,7 @@ from psycopg2 import extensions as _ext
|
|||
from .extensions import cursor as _cursor
|
||||
from .extensions import connection as _connection
|
||||
from .extensions import adapt as _A, quote_ident
|
||||
from .compat import lru_cache
|
||||
from functools import lru_cache
|
||||
|
||||
from psycopg2._psycopg import ( # noqa
|
||||
REPLICATION_PHYSICAL, REPLICATION_LOGICAL,
|
||||
|
@ -72,51 +72,51 @@ class DictCursorBase(_cursor):
|
|||
else:
|
||||
raise NotImplementedError(
|
||||
"DictCursorBase can't be instantiated without a row factory.")
|
||||
super(DictCursorBase, self).__init__(*args, **kwargs)
|
||||
self._query_executed = 0
|
||||
self._prefetch = 0
|
||||
super().__init__(*args, **kwargs)
|
||||
self._query_executed = False
|
||||
self._prefetch = False
|
||||
self.row_factory = row_factory
|
||||
|
||||
def fetchone(self):
|
||||
if self._prefetch:
|
||||
res = super(DictCursorBase, self).fetchone()
|
||||
res = super().fetchone()
|
||||
if self._query_executed:
|
||||
self._build_index()
|
||||
if not self._prefetch:
|
||||
res = super(DictCursorBase, self).fetchone()
|
||||
res = super().fetchone()
|
||||
return res
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
if self._prefetch:
|
||||
res = super(DictCursorBase, self).fetchmany(size)
|
||||
res = super().fetchmany(size)
|
||||
if self._query_executed:
|
||||
self._build_index()
|
||||
if not self._prefetch:
|
||||
res = super(DictCursorBase, self).fetchmany(size)
|
||||
res = super().fetchmany(size)
|
||||
return res
|
||||
|
||||
def fetchall(self):
|
||||
if self._prefetch:
|
||||
res = super(DictCursorBase, self).fetchall()
|
||||
res = super().fetchall()
|
||||
if self._query_executed:
|
||||
self._build_index()
|
||||
if not self._prefetch:
|
||||
res = super(DictCursorBase, self).fetchall()
|
||||
res = super().fetchall()
|
||||
return res
|
||||
|
||||
def __iter__(self):
|
||||
try:
|
||||
if self._prefetch:
|
||||
res = super(DictCursorBase, self).__iter__()
|
||||
res = super().__iter__()
|
||||
first = next(res)
|
||||
if self._query_executed:
|
||||
self._build_index()
|
||||
if not self._prefetch:
|
||||
res = super(DictCursorBase, self).__iter__()
|
||||
res = super().__iter__()
|
||||
first = next(res)
|
||||
|
||||
yield first
|
||||
while 1:
|
||||
while True:
|
||||
yield next(res)
|
||||
except StopIteration:
|
||||
return
|
||||
|
@ -125,33 +125,36 @@ class DictCursorBase(_cursor):
|
|||
class DictConnection(_connection):
|
||||
"""A connection that uses `DictCursor` automatically."""
|
||||
def cursor(self, *args, **kwargs):
|
||||
kwargs.setdefault('cursor_factory', DictCursor)
|
||||
return super(DictConnection, self).cursor(*args, **kwargs)
|
||||
kwargs.setdefault('cursor_factory', self.cursor_factory or DictCursor)
|
||||
return super().cursor(*args, **kwargs)
|
||||
|
||||
|
||||
class DictCursor(DictCursorBase):
|
||||
"""A cursor that keeps a list of column name -> index mappings."""
|
||||
"""A cursor that keeps a list of column name -> index mappings__.
|
||||
|
||||
.. __: https://docs.python.org/glossary.html#term-mapping
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['row_factory'] = DictRow
|
||||
super(DictCursor, self).__init__(*args, **kwargs)
|
||||
self._prefetch = 1
|
||||
super().__init__(*args, **kwargs)
|
||||
self._prefetch = True
|
||||
|
||||
def execute(self, query, vars=None):
|
||||
self.index = OrderedDict()
|
||||
self._query_executed = 1
|
||||
return super(DictCursor, self).execute(query, vars)
|
||||
self._query_executed = True
|
||||
return super().execute(query, vars)
|
||||
|
||||
def callproc(self, procname, vars=None):
|
||||
self.index = OrderedDict()
|
||||
self._query_executed = 1
|
||||
return super(DictCursor, self).callproc(procname, vars)
|
||||
self._query_executed = True
|
||||
return super().callproc(procname, vars)
|
||||
|
||||
def _build_index(self):
|
||||
if self._query_executed == 1 and self.description:
|
||||
if self._query_executed and self.description:
|
||||
for i in range(len(self.description)):
|
||||
self.index[self.description[i][0]] = i
|
||||
self._query_executed = 0
|
||||
self._query_executed = False
|
||||
|
||||
|
||||
class DictRow(list):
|
||||
|
@ -166,22 +169,22 @@ class DictRow(list):
|
|||
def __getitem__(self, x):
|
||||
if not isinstance(x, (int, slice)):
|
||||
x = self._index[x]
|
||||
return super(DictRow, self).__getitem__(x)
|
||||
return super().__getitem__(x)
|
||||
|
||||
def __setitem__(self, x, v):
|
||||
if not isinstance(x, (int, slice)):
|
||||
x = self._index[x]
|
||||
super(DictRow, self).__setitem__(x, v)
|
||||
super().__setitem__(x, v)
|
||||
|
||||
def items(self):
|
||||
g = super(DictRow, self).__getitem__
|
||||
g = super().__getitem__
|
||||
return ((n, g(self._index[n])) for n in self._index)
|
||||
|
||||
def keys(self):
|
||||
return iter(self._index)
|
||||
|
||||
def values(self):
|
||||
g = super(DictRow, self).__getitem__
|
||||
g = super().__getitem__
|
||||
return (g(self._index[n]) for n in self._index)
|
||||
|
||||
def get(self, x, default=None):
|
||||
|
@ -196,6 +199,10 @@ class DictRow(list):
|
|||
def __contains__(self, x):
|
||||
return x in self._index
|
||||
|
||||
def __reduce__(self):
|
||||
# this is apparently useless, but it fixes #1073
|
||||
return super().__reduce__()
|
||||
|
||||
def __getstate__(self):
|
||||
return self[:], self._index.copy()
|
||||
|
||||
|
@ -203,27 +210,12 @@ class DictRow(list):
|
|||
self[:] = data[0]
|
||||
self._index = data[1]
|
||||
|
||||
if _sys.version_info[0] < 3:
|
||||
iterkeys = keys
|
||||
itervalues = values
|
||||
iteritems = items
|
||||
has_key = __contains__
|
||||
|
||||
def keys(self):
|
||||
return list(self.iterkeys())
|
||||
|
||||
def values(self):
|
||||
return tuple(self.itervalues())
|
||||
|
||||
def items(self):
|
||||
return list(self.iteritems())
|
||||
|
||||
|
||||
class RealDictConnection(_connection):
|
||||
"""A connection that uses `RealDictCursor` automatically."""
|
||||
def cursor(self, *args, **kwargs):
|
||||
kwargs.setdefault('cursor_factory', RealDictCursor)
|
||||
return super(RealDictConnection, self).cursor(*args, **kwargs)
|
||||
kwargs.setdefault('cursor_factory', self.cursor_factory or RealDictCursor)
|
||||
return super().cursor(*args, **kwargs)
|
||||
|
||||
|
||||
class RealDictCursor(DictCursorBase):
|
||||
|
@ -236,82 +228,64 @@ class RealDictCursor(DictCursorBase):
|
|||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['row_factory'] = RealDictRow
|
||||
super(RealDictCursor, self).__init__(*args, **kwargs)
|
||||
self._prefetch = 0
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def execute(self, query, vars=None):
|
||||
self.column_mapping = []
|
||||
self._query_executed = 1
|
||||
return super(RealDictCursor, self).execute(query, vars)
|
||||
self._query_executed = True
|
||||
return super().execute(query, vars)
|
||||
|
||||
def callproc(self, procname, vars=None):
|
||||
self.column_mapping = []
|
||||
self._query_executed = 1
|
||||
return super(RealDictCursor, self).callproc(procname, vars)
|
||||
self._query_executed = True
|
||||
return super().callproc(procname, vars)
|
||||
|
||||
def _build_index(self):
|
||||
if self._query_executed == 1 and self.description:
|
||||
if self._query_executed and self.description:
|
||||
self.column_mapping = [d[0] for d in self.description]
|
||||
self._query_executed = 0
|
||||
self._query_executed = False
|
||||
|
||||
|
||||
class RealDictRow(dict):
|
||||
class RealDictRow(OrderedDict):
|
||||
"""A `!dict` subclass representing a data record."""
|
||||
|
||||
__slots__ = ('_column_mapping',)
|
||||
def __init__(self, *args, **kwargs):
|
||||
if args and isinstance(args[0], _cursor):
|
||||
cursor = args[0]
|
||||
args = args[1:]
|
||||
else:
|
||||
cursor = None
|
||||
|
||||
def __init__(self, cursor):
|
||||
super(RealDictRow, self).__init__()
|
||||
# Required for named cursors
|
||||
if cursor.description and not cursor.column_mapping:
|
||||
cursor._build_index()
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._column_mapping = cursor.column_mapping
|
||||
if cursor is not None:
|
||||
# Required for named cursors
|
||||
if cursor.description and not cursor.column_mapping:
|
||||
cursor._build_index()
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
if type(name) == int:
|
||||
name = self._column_mapping[name]
|
||||
super(RealDictRow, self).__setitem__(name, value)
|
||||
# Store the cols mapping in the dict itself until the row is fully
|
||||
# populated, so we don't need to add attributes to the class
|
||||
# (hence keeping its maintenance, special pickle support, etc.)
|
||||
self[RealDictRow] = cursor.column_mapping
|
||||
|
||||
def __getstate__(self):
|
||||
return self.copy(), self._column_mapping[:]
|
||||
def __setitem__(self, key, value):
|
||||
if RealDictRow in self:
|
||||
# We are in the row building phase
|
||||
mapping = self[RealDictRow]
|
||||
super().__setitem__(mapping[key], value)
|
||||
if key == len(mapping) - 1:
|
||||
# Row building finished
|
||||
del self[RealDictRow]
|
||||
return
|
||||
|
||||
def __setstate__(self, data):
|
||||
self.update(data[0])
|
||||
self._column_mapping = data[1]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._column_mapping)
|
||||
|
||||
def keys(self):
|
||||
return iter(self._column_mapping)
|
||||
|
||||
def values(self):
|
||||
return (self[k] for k in self._column_mapping)
|
||||
|
||||
def items(self):
|
||||
return ((k, self[k]) for k in self._column_mapping)
|
||||
|
||||
if _sys.version_info[0] < 3:
|
||||
iterkeys = keys
|
||||
itervalues = values
|
||||
iteritems = items
|
||||
|
||||
def keys(self):
|
||||
return list(self.iterkeys())
|
||||
|
||||
def values(self):
|
||||
return list(self.itervalues())
|
||||
|
||||
def items(self):
|
||||
return list(self.iteritems())
|
||||
super().__setitem__(key, value)
|
||||
|
||||
|
||||
class NamedTupleConnection(_connection):
|
||||
"""A connection that uses `NamedTupleCursor` automatically."""
|
||||
def cursor(self, *args, **kwargs):
|
||||
kwargs.setdefault('cursor_factory', NamedTupleCursor)
|
||||
return super(NamedTupleConnection, self).cursor(*args, **kwargs)
|
||||
kwargs.setdefault('cursor_factory', self.cursor_factory or NamedTupleCursor)
|
||||
return super().cursor(*args, **kwargs)
|
||||
|
||||
|
||||
class NamedTupleCursor(_cursor):
|
||||
|
@ -335,18 +309,18 @@ class NamedTupleCursor(_cursor):
|
|||
|
||||
def execute(self, query, vars=None):
|
||||
self.Record = None
|
||||
return super(NamedTupleCursor, self).execute(query, vars)
|
||||
return super().execute(query, vars)
|
||||
|
||||
def executemany(self, query, vars):
|
||||
self.Record = None
|
||||
return super(NamedTupleCursor, self).executemany(query, vars)
|
||||
return super().executemany(query, vars)
|
||||
|
||||
def callproc(self, procname, vars=None):
|
||||
self.Record = None
|
||||
return super(NamedTupleCursor, self).callproc(procname, vars)
|
||||
return super().callproc(procname, vars)
|
||||
|
||||
def fetchone(self):
|
||||
t = super(NamedTupleCursor, self).fetchone()
|
||||
t = super().fetchone()
|
||||
if t is not None:
|
||||
nt = self.Record
|
||||
if nt is None:
|
||||
|
@ -354,14 +328,14 @@ class NamedTupleCursor(_cursor):
|
|||
return nt._make(t)
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
ts = super(NamedTupleCursor, self).fetchmany(size)
|
||||
ts = super().fetchmany(size)
|
||||
nt = self.Record
|
||||
if nt is None:
|
||||
nt = self.Record = self._make_nt()
|
||||
return list(map(nt._make, ts))
|
||||
|
||||
def fetchall(self):
|
||||
ts = super(NamedTupleCursor, self).fetchall()
|
||||
ts = super().fetchall()
|
||||
nt = self.Record
|
||||
if nt is None:
|
||||
nt = self.Record = self._make_nt()
|
||||
|
@ -369,7 +343,7 @@ class NamedTupleCursor(_cursor):
|
|||
|
||||
def __iter__(self):
|
||||
try:
|
||||
it = super(NamedTupleCursor, self).__iter__()
|
||||
it = super().__iter__()
|
||||
t = next(it)
|
||||
|
||||
nt = self.Record
|
||||
|
@ -378,23 +352,20 @@ class NamedTupleCursor(_cursor):
|
|||
|
||||
yield nt._make(t)
|
||||
|
||||
while 1:
|
||||
while True:
|
||||
yield nt._make(next(it))
|
||||
except StopIteration:
|
||||
return
|
||||
|
||||
# ascii except alnum and underscore
|
||||
_re_clean = _re.compile(
|
||||
'[' + _re.escape(' !"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~') + ']')
|
||||
|
||||
def _make_nt(self):
|
||||
key = tuple(d[0] for d in self.description) if self.description else ()
|
||||
return self._cached_make_nt(key)
|
||||
|
||||
def _do_make_nt(self, key):
|
||||
@classmethod
|
||||
def _do_make_nt(cls, key):
|
||||
fields = []
|
||||
for s in key:
|
||||
s = self._re_clean.sub('_', s)
|
||||
s = _re_clean.sub('_', s)
|
||||
# Python identifier cannot start with numbers, namedtuple fields
|
||||
# cannot start with underscore. So...
|
||||
if s[0] == '_' or '0' <= s[0] <= '9':
|
||||
|
@ -404,9 +375,15 @@ class NamedTupleCursor(_cursor):
|
|||
nt = namedtuple("Record", fields)
|
||||
return nt
|
||||
|
||||
# Exposed for testability, and if someone wants to monkeypatch to tweak
|
||||
# the cache size.
|
||||
_cached_make_nt = lru_cache(512)(_do_make_nt)
|
||||
|
||||
@lru_cache(512)
|
||||
def _cached_make_nt(cls, key):
|
||||
return cls._do_make_nt(key)
|
||||
|
||||
|
||||
# Exposed for testability, and if someone wants to monkeypatch to tweak
|
||||
# the cache size.
|
||||
NamedTupleCursor._cached_make_nt = classmethod(_cached_make_nt)
|
||||
|
||||
|
||||
class LoggingConnection(_connection):
|
||||
|
@ -418,11 +395,12 @@ class LoggingConnection(_connection):
|
|||
def initialize(self, logobj):
|
||||
"""Initialize the connection to log to `!logobj`.
|
||||
|
||||
The `!logobj` parameter can be an open file object or a Logger
|
||||
The `!logobj` parameter can be an open file object or a Logger/LoggerAdapter
|
||||
instance from the standard logging module.
|
||||
"""
|
||||
self._logobj = logobj
|
||||
if _logging and isinstance(logobj, _logging.Logger):
|
||||
if _logging and isinstance(
|
||||
logobj, (_logging.Logger, _logging.LoggerAdapter)):
|
||||
self.log = self._logtologger
|
||||
else:
|
||||
self.log = self._logtofile
|
||||
|
@ -439,7 +417,7 @@ class LoggingConnection(_connection):
|
|||
def _logtofile(self, msg, curs):
|
||||
msg = self.filter(msg, curs)
|
||||
if msg:
|
||||
if _sys.version_info[0] >= 3 and isinstance(msg, bytes):
|
||||
if isinstance(msg, bytes):
|
||||
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
|
||||
self._logobj.write(msg + _os.linesep)
|
||||
|
||||
|
@ -455,8 +433,8 @@ class LoggingConnection(_connection):
|
|||
|
||||
def cursor(self, *args, **kwargs):
|
||||
self._check()
|
||||
kwargs.setdefault('cursor_factory', LoggingCursor)
|
||||
return super(LoggingConnection, self).cursor(*args, **kwargs)
|
||||
kwargs.setdefault('cursor_factory', self.cursor_factory or LoggingCursor)
|
||||
return super().cursor(*args, **kwargs)
|
||||
|
||||
|
||||
class LoggingCursor(_cursor):
|
||||
|
@ -464,13 +442,13 @@ class LoggingCursor(_cursor):
|
|||
|
||||
def execute(self, query, vars=None):
|
||||
try:
|
||||
return super(LoggingCursor, self).execute(query, vars)
|
||||
return super().execute(query, vars)
|
||||
finally:
|
||||
self.connection.log(self.query, self)
|
||||
|
||||
def callproc(self, procname, vars=None):
|
||||
try:
|
||||
return super(LoggingCursor, self).callproc(procname, vars)
|
||||
return super().callproc(procname, vars)
|
||||
finally:
|
||||
self.connection.log(self.query, self)
|
||||
|
||||
|
@ -493,12 +471,13 @@ class MinTimeLoggingConnection(LoggingConnection):
|
|||
def filter(self, msg, curs):
|
||||
t = (_time.time() - curs.timestamp) * 1000
|
||||
if t > self._mintime:
|
||||
if _sys.version_info[0] >= 3 and isinstance(msg, bytes):
|
||||
if isinstance(msg, bytes):
|
||||
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
|
||||
return msg + _os.linesep + " (execution time: %d ms)" % t
|
||||
return f"{msg}{_os.linesep} (execution time: {t} ms)"
|
||||
|
||||
def cursor(self, *args, **kwargs):
|
||||
kwargs.setdefault('cursor_factory', MinTimeLoggingCursor)
|
||||
kwargs.setdefault('cursor_factory',
|
||||
self.cursor_factory or MinTimeLoggingCursor)
|
||||
return LoggingConnection.cursor(self, *args, **kwargs)
|
||||
|
||||
|
||||
|
@ -518,14 +497,14 @@ class LogicalReplicationConnection(_replicationConnection):
|
|||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['replication_type'] = REPLICATION_LOGICAL
|
||||
super(LogicalReplicationConnection, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class PhysicalReplicationConnection(_replicationConnection):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['replication_type'] = REPLICATION_PHYSICAL
|
||||
super(PhysicalReplicationConnection, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class StopReplication(Exception):
|
||||
|
@ -546,7 +525,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None):
|
||||
"""Create streaming replication slot."""
|
||||
|
||||
command = "CREATE_REPLICATION_SLOT %s " % quote_ident(slot_name, self)
|
||||
command = f"CREATE_REPLICATION_SLOT {quote_ident(slot_name, self)} "
|
||||
|
||||
if slot_type is None:
|
||||
slot_type = self.connection.replication_type
|
||||
|
@ -557,7 +536,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
"output plugin name is required to create "
|
||||
"logical replication slot")
|
||||
|
||||
command += "LOGICAL %s" % quote_ident(output_plugin, self)
|
||||
command += f"LOGICAL {quote_ident(output_plugin, self)}"
|
||||
|
||||
elif slot_type == REPLICATION_PHYSICAL:
|
||||
if output_plugin is not None:
|
||||
|
@ -569,18 +548,19 @@ class ReplicationCursor(_replicationCursor):
|
|||
|
||||
else:
|
||||
raise psycopg2.ProgrammingError(
|
||||
"unrecognized replication type: %s" % repr(slot_type))
|
||||
f"unrecognized replication type: {repr(slot_type)}")
|
||||
|
||||
self.execute(command)
|
||||
|
||||
def drop_replication_slot(self, slot_name):
|
||||
"""Drop streaming replication slot."""
|
||||
|
||||
command = "DROP_REPLICATION_SLOT %s" % quote_ident(slot_name, self)
|
||||
command = f"DROP_REPLICATION_SLOT {quote_ident(slot_name, self)}"
|
||||
self.execute(command)
|
||||
|
||||
def start_replication(self, slot_name=None, slot_type=None, start_lsn=0,
|
||||
timeline=0, options=None, decode=False):
|
||||
def start_replication(
|
||||
self, slot_name=None, slot_type=None, start_lsn=0,
|
||||
timeline=0, options=None, decode=False, status_interval=10):
|
||||
"""Start replication stream."""
|
||||
|
||||
command = "START_REPLICATION "
|
||||
|
@ -590,7 +570,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
|
||||
if slot_type == REPLICATION_LOGICAL:
|
||||
if slot_name:
|
||||
command += "SLOT %s " % quote_ident(slot_name, self)
|
||||
command += f"SLOT {quote_ident(slot_name, self)} "
|
||||
else:
|
||||
raise psycopg2.ProgrammingError(
|
||||
"slot name is required for logical replication")
|
||||
|
@ -599,19 +579,18 @@ class ReplicationCursor(_replicationCursor):
|
|||
|
||||
elif slot_type == REPLICATION_PHYSICAL:
|
||||
if slot_name:
|
||||
command += "SLOT %s " % quote_ident(slot_name, self)
|
||||
command += f"SLOT {quote_ident(slot_name, self)} "
|
||||
# don't add "PHYSICAL", before 9.4 it was just START_REPLICATION XXX/XXX
|
||||
|
||||
else:
|
||||
raise psycopg2.ProgrammingError(
|
||||
"unrecognized replication type: %s" % repr(slot_type))
|
||||
f"unrecognized replication type: {repr(slot_type)}")
|
||||
|
||||
if type(start_lsn) is str:
|
||||
lsn = start_lsn.split('/')
|
||||
lsn = "%X/%08X" % (int(lsn[0], 16), int(lsn[1], 16))
|
||||
lsn = f"{int(lsn[0], 16):X}/{int(lsn[1], 16):08X}"
|
||||
else:
|
||||
lsn = "%X/%08X" % ((start_lsn >> 32) & 0xFFFFFFFF,
|
||||
start_lsn & 0xFFFFFFFF)
|
||||
lsn = f"{start_lsn >> 32 & 4294967295:X}/{start_lsn & 4294967295:08X}"
|
||||
|
||||
command += lsn
|
||||
|
||||
|
@ -620,7 +599,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
raise psycopg2.ProgrammingError(
|
||||
"cannot specify timeline for logical replication")
|
||||
|
||||
command += " TIMELINE %d" % timeline
|
||||
command += f" TIMELINE {timeline}"
|
||||
|
||||
if options:
|
||||
if slot_type == REPLICATION_PHYSICAL:
|
||||
|
@ -631,10 +610,11 @@ class ReplicationCursor(_replicationCursor):
|
|||
for k, v in options.items():
|
||||
if not command.endswith('('):
|
||||
command += ", "
|
||||
command += "%s %s" % (quote_ident(k, self), _A(str(v)))
|
||||
command += f"{quote_ident(k, self)} {_A(str(v))}"
|
||||
command += ")"
|
||||
|
||||
self.start_replication_expert(command, decode=decode)
|
||||
self.start_replication_expert(
|
||||
command, decode=decode, status_interval=status_interval)
|
||||
|
||||
# allows replication cursors to be used in select.select() directly
|
||||
def fileno(self):
|
||||
|
@ -643,7 +623,7 @@ class ReplicationCursor(_replicationCursor):
|
|||
|
||||
# a dbtype and adapter for Python UUID type
|
||||
|
||||
class UUID_adapter(object):
|
||||
class UUID_adapter:
|
||||
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
|
||||
|
||||
.. __: https://docs.python.org/library/uuid.html
|
||||
|
@ -658,10 +638,10 @@ class UUID_adapter(object):
|
|||
return self
|
||||
|
||||
def getquoted(self):
|
||||
return ("'%s'::uuid" % self._uuid).encode('utf8')
|
||||
return (f"'{self._uuid}'::uuid").encode('utf8')
|
||||
|
||||
def __str__(self):
|
||||
return "'%s'::uuid" % self._uuid
|
||||
return f"'{self._uuid}'::uuid"
|
||||
|
||||
|
||||
def register_uuid(oids=None, conn_or_curs=None):
|
||||
|
@ -698,7 +678,7 @@ def register_uuid(oids=None, conn_or_curs=None):
|
|||
|
||||
# a type, dbtype and adapter for PostgreSQL inet type
|
||||
|
||||
class Inet(object):
|
||||
class Inet:
|
||||
"""Wrap a string to allow for correct SQL-quoting of inet values.
|
||||
|
||||
Note that this adapter does NOT check the passed value to make
|
||||
|
@ -710,7 +690,7 @@ class Inet(object):
|
|||
self.addr = addr
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (self.__class__.__name__, self.addr)
|
||||
return f"{self.__class__.__name__}({self.addr!r})"
|
||||
|
||||
def prepare(self, conn):
|
||||
self._conn = conn
|
||||
|
@ -773,7 +753,7 @@ def wait_select(conn):
|
|||
import select
|
||||
from psycopg2.extensions import POLL_OK, POLL_READ, POLL_WRITE
|
||||
|
||||
while 1:
|
||||
while True:
|
||||
try:
|
||||
state = conn.poll()
|
||||
if state == POLL_OK:
|
||||
|
@ -783,7 +763,7 @@ def wait_select(conn):
|
|||
elif state == POLL_WRITE:
|
||||
select.select([], [conn.fileno()], [])
|
||||
else:
|
||||
raise conn.OperationalError("bad state from poll: %s" % state)
|
||||
raise conn.OperationalError(f"bad state from poll: {state}")
|
||||
except KeyboardInterrupt:
|
||||
conn.cancel()
|
||||
# the loop will be broken by a server error
|
||||
|
@ -805,7 +785,7 @@ def _solve_conn_curs(conn_or_curs):
|
|||
return conn, curs
|
||||
|
||||
|
||||
class HstoreAdapter(object):
|
||||
class HstoreAdapter:
|
||||
"""Adapt a Python dict to the hstore syntax."""
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
|
@ -885,7 +865,7 @@ class HstoreAdapter(object):
|
|||
for m in self._re_hstore.finditer(s):
|
||||
if m is None or m.start() != start:
|
||||
raise psycopg2.InterfaceError(
|
||||
"error parsing hstore pair at char %d" % start)
|
||||
f"error parsing hstore pair at char {start}")
|
||||
k = _bsdec.sub(r'\1', m.group(1))
|
||||
v = m.group(2)
|
||||
if v is not None:
|
||||
|
@ -896,7 +876,7 @@ class HstoreAdapter(object):
|
|||
|
||||
if start < len(s):
|
||||
raise psycopg2.InterfaceError(
|
||||
"error parsing hstore: unparsed data after char %d" % start)
|
||||
f"error parsing hstore: unparsed data after char {start}")
|
||||
|
||||
return rv
|
||||
|
||||
|
@ -924,12 +904,11 @@ class HstoreAdapter(object):
|
|||
rv0, rv1 = [], []
|
||||
|
||||
# get the oid for the hstore
|
||||
curs.execute("""\
|
||||
SELECT t.oid, %s
|
||||
curs.execute(f"""SELECT t.oid, {typarray}
|
||||
FROM pg_type t JOIN pg_namespace ns
|
||||
ON typnamespace = ns.oid
|
||||
WHERE typname = 'hstore';
|
||||
""" % typarray)
|
||||
""")
|
||||
for oids in curs:
|
||||
rv0.append(oids[0])
|
||||
rv1.append(oids[1])
|
||||
|
@ -993,12 +972,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
|
|||
array_oid = tuple([x for x in array_oid if x])
|
||||
|
||||
# create and register the typecaster
|
||||
if _sys.version_info[0] < 3 and unicode:
|
||||
cast = HstoreAdapter.parse_unicode
|
||||
else:
|
||||
cast = HstoreAdapter.parse
|
||||
|
||||
HSTORE = _ext.new_type(oid, "HSTORE", cast)
|
||||
HSTORE = _ext.new_type(oid, "HSTORE", HstoreAdapter.parse)
|
||||
_ext.register_type(HSTORE, not globally and conn_or_curs or None)
|
||||
_ext.register_adapter(dict, HstoreAdapter)
|
||||
|
||||
|
@ -1007,7 +981,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
|
|||
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
|
||||
|
||||
|
||||
class CompositeCaster(object):
|
||||
class CompositeCaster:
|
||||
"""Helps conversion of a PostgreSQL composite type into a Python object.
|
||||
|
||||
The class is usually created by the `register_composite()` function.
|
||||
|
@ -1028,7 +1002,7 @@ class CompositeCaster(object):
|
|||
self.typecaster = _ext.new_type((oid,), name, self.parse)
|
||||
if array_oid:
|
||||
self.array_typecaster = _ext.new_array_type(
|
||||
(array_oid,), "%sARRAY" % name, self.typecaster)
|
||||
(array_oid,), f"{name}ARRAY", self.typecaster)
|
||||
else:
|
||||
self.array_typecaster = None
|
||||
|
||||
|
@ -1072,7 +1046,7 @@ class CompositeCaster(object):
|
|||
rv = []
|
||||
for m in self._re_tokenize.finditer(s):
|
||||
if m is None:
|
||||
raise psycopg2.InterfaceError("can't parse type: %r" % s)
|
||||
raise psycopg2.InterfaceError(f"can't parse type: {s!r}")
|
||||
if m.group(1) is not None:
|
||||
rv.append(None)
|
||||
elif m.group(2) is not None:
|
||||
|
@ -1083,6 +1057,7 @@ class CompositeCaster(object):
|
|||
return rv
|
||||
|
||||
def _create_type(self, name, attnames):
|
||||
name = _re_clean.sub('_', name)
|
||||
self.type = namedtuple(name, attnames)
|
||||
self._ctor = self.type._make
|
||||
|
||||
|
@ -1120,14 +1095,46 @@ ORDER BY attnum;
|
|||
|
||||
recs = curs.fetchall()
|
||||
|
||||
if not recs:
|
||||
# The above algorithm doesn't work for customized seach_path
|
||||
# (#1487) The implementation below works better, but, to guarantee
|
||||
# backwards compatibility, use it only if the original one failed.
|
||||
try:
|
||||
savepoint = False
|
||||
# Because we executed statements earlier, we are either INTRANS
|
||||
# or we are IDLE only if the transaction is autocommit, in
|
||||
# which case we don't need the savepoint anyway.
|
||||
if conn.status == _ext.STATUS_IN_TRANSACTION:
|
||||
curs.execute("SAVEPOINT register_type")
|
||||
savepoint = True
|
||||
|
||||
curs.execute("""\
|
||||
SELECT t.oid, %s, attname, atttypid, typname, nspname
|
||||
FROM pg_type t
|
||||
JOIN pg_namespace ns ON typnamespace = ns.oid
|
||||
JOIN pg_attribute a ON attrelid = typrelid
|
||||
WHERE t.oid = %%s::regtype
|
||||
AND attnum > 0 AND NOT attisdropped
|
||||
ORDER BY attnum;
|
||||
""" % typarray, (name, ))
|
||||
except psycopg2.ProgrammingError:
|
||||
pass
|
||||
else:
|
||||
recs = curs.fetchall()
|
||||
if recs:
|
||||
tname = recs[0][4]
|
||||
schema = recs[0][5]
|
||||
finally:
|
||||
if savepoint:
|
||||
curs.execute("ROLLBACK TO SAVEPOINT register_type")
|
||||
|
||||
# revert the status of the connection as before the command
|
||||
if (conn_status != _ext.STATUS_IN_TRANSACTION
|
||||
and not conn.autocommit):
|
||||
if conn_status != _ext.STATUS_IN_TRANSACTION and not conn.autocommit:
|
||||
conn.rollback()
|
||||
|
||||
if not recs:
|
||||
raise psycopg2.ProgrammingError(
|
||||
"PostgreSQL type '%s' not found" % name)
|
||||
f"PostgreSQL type '{name}' not found")
|
||||
|
||||
type_oid = recs[0][0]
|
||||
array_oid = recs[0][1]
|
||||
|
@ -1172,7 +1179,7 @@ def _paginate(seq, page_size):
|
|||
"""
|
||||
page = []
|
||||
it = iter(seq)
|
||||
while 1:
|
||||
while True:
|
||||
try:
|
||||
for i in range(page_size):
|
||||
page.append(next(it))
|
||||
|
@ -1326,3 +1333,8 @@ def _split_sql(sql):
|
|||
raise ValueError("the query doesn't contain any '%s' placeholder")
|
||||
|
||||
return pre, post
|
||||
|
||||
|
||||
# ascii except alnum and underscore
|
||||
_re_clean = _re.compile(
|
||||
'[' + _re.escape(' !"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~') + ']')
|
||||
|
|
11
lib/pool.py
11
lib/pool.py
|
@ -4,7 +4,8 @@ This module implements thread-safe (and not) connection pools.
|
|||
"""
|
||||
# psycopg/pool.py - pooling code for psycopg
|
||||
#
|
||||
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -32,7 +33,7 @@ class PoolError(psycopg2.Error):
|
|||
pass
|
||||
|
||||
|
||||
class AbstractConnectionPool(object):
|
||||
class AbstractConnectionPool:
|
||||
"""Generic key-based pooling code."""
|
||||
|
||||
def __init__(self, minconn, maxconn, *args, **kwargs):
|
||||
|
@ -95,11 +96,11 @@ class AbstractConnectionPool(object):
|
|||
"""Put away a connection."""
|
||||
if self.closed:
|
||||
raise PoolError("connection pool is closed")
|
||||
|
||||
if key is None:
|
||||
key = self._rused.get(id(conn))
|
||||
|
||||
if not key:
|
||||
raise PoolError("trying to put unkeyed connection")
|
||||
if key is None:
|
||||
raise PoolError("trying to put unkeyed connection")
|
||||
|
||||
if len(self._pool) < self.minconn and not close:
|
||||
# Return the connection into a consistent state before putting
|
||||
|
|
47
lib/sql.py
47
lib/sql.py
|
@ -3,7 +3,8 @@
|
|||
|
||||
# psycopg/sql.py - SQL composition utility module
|
||||
#
|
||||
# Copyright (C) 2016 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2016-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -23,17 +24,15 @@
|
|||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
# License for more details.
|
||||
|
||||
import sys
|
||||
import string
|
||||
|
||||
from psycopg2 import extensions as ext
|
||||
from psycopg2.compat import string_types
|
||||
|
||||
|
||||
_formatter = string.Formatter()
|
||||
|
||||
|
||||
class Composable(object):
|
||||
class Composable:
|
||||
"""
|
||||
Abstract base class for objects that can be used to compose an SQL string.
|
||||
|
||||
|
@ -51,7 +50,7 @@ class Composable(object):
|
|||
self._wrapped = wrapped
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (self.__class__.__name__, self._wrapped)
|
||||
return f"{self.__class__.__name__}({self._wrapped!r})"
|
||||
|
||||
def as_string(self, context):
|
||||
"""
|
||||
|
@ -107,10 +106,10 @@ class Composed(Composable):
|
|||
for i in seq:
|
||||
if not isinstance(i, Composable):
|
||||
raise TypeError(
|
||||
"Composed elements must be Composable, got %r instead" % i)
|
||||
f"Composed elements must be Composable, got {i!r} instead")
|
||||
wrapped.append(i)
|
||||
|
||||
super(Composed, self).__init__(wrapped)
|
||||
super().__init__(wrapped)
|
||||
|
||||
@property
|
||||
def seq(self):
|
||||
|
@ -148,7 +147,7 @@ class Composed(Composable):
|
|||
"foo", "bar"
|
||||
|
||||
"""
|
||||
if isinstance(joiner, string_types):
|
||||
if isinstance(joiner, str):
|
||||
joiner = SQL(joiner)
|
||||
elif not isinstance(joiner, SQL):
|
||||
raise TypeError(
|
||||
|
@ -180,9 +179,9 @@ class SQL(Composable):
|
|||
select "foo", "bar" from "table"
|
||||
"""
|
||||
def __init__(self, string):
|
||||
if not isinstance(string, string_types):
|
||||
if not isinstance(string, str):
|
||||
raise TypeError("SQL values must be strings")
|
||||
super(SQL, self).__init__(string)
|
||||
super().__init__(string)
|
||||
|
||||
@property
|
||||
def string(self):
|
||||
|
@ -290,7 +289,7 @@ class SQL(Composable):
|
|||
|
||||
class Identifier(Composable):
|
||||
"""
|
||||
A `Composable` representing an SQL identifer or a dot-separated sequence.
|
||||
A `Composable` representing an SQL identifier or a dot-separated sequence.
|
||||
|
||||
Identifiers usually represent names of database objects, such as tables or
|
||||
fields. PostgreSQL identifiers follow `different rules`__ than SQL string
|
||||
|
@ -324,10 +323,10 @@ class Identifier(Composable):
|
|||
raise TypeError("Identifier cannot be empty")
|
||||
|
||||
for s in strings:
|
||||
if not isinstance(s, string_types):
|
||||
if not isinstance(s, str):
|
||||
raise TypeError("SQL identifier parts must be strings")
|
||||
|
||||
super(Identifier, self).__init__(strings)
|
||||
super().__init__(strings)
|
||||
|
||||
@property
|
||||
def strings(self):
|
||||
|
@ -345,9 +344,7 @@ class Identifier(Composable):
|
|||
"the Identifier wraps more than one than one string")
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (
|
||||
self.__class__.__name__,
|
||||
', '.join(map(repr, self._wrapped)))
|
||||
return f"{self.__class__.__name__}({', '.join(map(repr, self._wrapped))})"
|
||||
|
||||
def as_string(self, context):
|
||||
return '.'.join(ext.quote_ident(s, context) for s in self._wrapped)
|
||||
|
@ -392,7 +389,7 @@ class Literal(Composable):
|
|||
a.prepare(conn)
|
||||
|
||||
rv = a.getquoted()
|
||||
if sys.version_info[0] >= 3 and isinstance(rv, bytes):
|
||||
if isinstance(rv, bytes):
|
||||
rv = rv.decode(ext.encodings[conn.encoding])
|
||||
|
||||
return rv
|
||||
|
@ -426,14 +423,14 @@ class Placeholder(Composable):
|
|||
"""
|
||||
|
||||
def __init__(self, name=None):
|
||||
if isinstance(name, string_types):
|
||||
if isinstance(name, str):
|
||||
if ')' in name:
|
||||
raise ValueError("invalid name: %r" % name)
|
||||
raise ValueError(f"invalid name: {name!r}")
|
||||
|
||||
elif name is not None:
|
||||
raise TypeError("expected string or None as name, got %r" % name)
|
||||
raise TypeError(f"expected string or None as name, got {name!r}")
|
||||
|
||||
super(Placeholder, self).__init__(name)
|
||||
super().__init__(name)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@ -441,12 +438,14 @@ class Placeholder(Composable):
|
|||
return self._wrapped
|
||||
|
||||
def __repr__(self):
|
||||
return "Placeholder(%r)" % (
|
||||
self._wrapped if self._wrapped is not None else '',)
|
||||
if self._wrapped is None:
|
||||
return f"{self.__class__.__name__}()"
|
||||
else:
|
||||
return f"{self.__class__.__name__}({self._wrapped!r})"
|
||||
|
||||
def as_string(self, context):
|
||||
if self._wrapped is not None:
|
||||
return "%%(%s)s" % self._wrapped
|
||||
return f"%({self._wrapped})s"
|
||||
else:
|
||||
return "%s"
|
||||
|
||||
|
|
50
lib/tz.py
50
lib/tz.py
|
@ -6,7 +6,8 @@ functions or used to set the .tzinfo_factory attribute in cursors.
|
|||
"""
|
||||
# psycopg/tz.py - tzinfo implementation
|
||||
#
|
||||
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
# Copyright (C) 2020-2021 The Psycopg Team
|
||||
#
|
||||
# psycopg2 is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
|
@ -44,6 +45,11 @@ class FixedOffsetTimezone(datetime.tzinfo):
|
|||
offset and name that instance will be returned. This saves memory and
|
||||
improves comparability.
|
||||
|
||||
.. versionchanged:: 2.9
|
||||
|
||||
The constructor can take either a timedelta or a number of minutes of
|
||||
offset. Previously only minutes were supported.
|
||||
|
||||
.. __: https://docs.python.org/library/datetime.html
|
||||
"""
|
||||
_name = None
|
||||
|
@ -53,7 +59,9 @@ class FixedOffsetTimezone(datetime.tzinfo):
|
|||
|
||||
def __init__(self, offset=None, name=None):
|
||||
if offset is not None:
|
||||
self._offset = datetime.timedelta(minutes=offset)
|
||||
if not isinstance(offset, datetime.timedelta):
|
||||
offset = datetime.timedelta(minutes=offset)
|
||||
self._offset = offset
|
||||
if name is not None:
|
||||
self._name = name
|
||||
|
||||
|
@ -64,18 +72,28 @@ class FixedOffsetTimezone(datetime.tzinfo):
|
|||
try:
|
||||
return cls._cache[key]
|
||||
except KeyError:
|
||||
tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name)
|
||||
tz = super().__new__(cls, offset, name)
|
||||
cls._cache[key] = tz
|
||||
return tz
|
||||
|
||||
def __repr__(self):
|
||||
offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
|
||||
return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" \
|
||||
% (offset_mins, self._name)
|
||||
% (self._offset, self._name)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, FixedOffsetTimezone):
|
||||
return self._offset == other._offset
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, FixedOffsetTimezone):
|
||||
return self._offset != other._offset
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __getinitargs__(self):
|
||||
offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
|
||||
return offset_mins, self._name
|
||||
return self._offset, self._name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self._offset
|
||||
|
@ -83,14 +101,16 @@ class FixedOffsetTimezone(datetime.tzinfo):
|
|||
def tzname(self, dt):
|
||||
if self._name is not None:
|
||||
return self._name
|
||||
else:
|
||||
seconds = self._offset.seconds + self._offset.days * 86400
|
||||
hours, seconds = divmod(seconds, 3600)
|
||||
minutes = seconds / 60
|
||||
if minutes:
|
||||
return "%+03d:%d" % (hours, minutes)
|
||||
else:
|
||||
return "%+03d" % hours
|
||||
|
||||
minutes, seconds = divmod(self._offset.total_seconds(), 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
rv = "%+03d" % hours
|
||||
if minutes or seconds:
|
||||
rv += ":%02d" % minutes
|
||||
if seconds:
|
||||
rv += ":%02d" % seconds
|
||||
|
||||
return rv
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_asis.c - adapt types as they are
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -44,14 +45,12 @@ asis_getquoted(asisObject *self, PyObject *args)
|
|||
}
|
||||
else {
|
||||
rv = PyObject_Str(self->wrapped);
|
||||
#if PY_MAJOR_VERSION > 2
|
||||
/* unicode to bytes in Py3 */
|
||||
/* unicode to bytes */
|
||||
if (rv) {
|
||||
PyObject *tmp = PyUnicode_AsUTF8String(rv);
|
||||
Py_DECREF(rv);
|
||||
rv = tmp;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
return rv;
|
||||
|
@ -60,7 +59,7 @@ asis_getquoted(asisObject *self, PyObject *args)
|
|||
static PyObject *
|
||||
asis_str(asisObject *self)
|
||||
{
|
||||
return psycopg_ensure_text(asis_getquoted(self, NULL));
|
||||
return psyco_ensure_text(asis_getquoted(self, NULL));
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_asis.h - definition for the psycopg AsIs type wrapper
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_binary.c - Binary objects
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -45,9 +46,6 @@ binary_escape(unsigned char *from, size_t from_length,
|
|||
return PQescapeBytea(from, from_length, to_length);
|
||||
}
|
||||
|
||||
#define HAS_BUFFER (PY_MAJOR_VERSION < 3)
|
||||
#define HAS_MEMORYVIEW (PY_MAJOR_VERSION > 2 || PY_MINOR_VERSION >= 6)
|
||||
|
||||
/* binary_quote - do the quote process on plain and unicode strings */
|
||||
|
||||
static PyObject *
|
||||
|
@ -58,10 +56,8 @@ binary_quote(binaryObject *self)
|
|||
Py_ssize_t buffer_len;
|
||||
size_t len = 0;
|
||||
PyObject *rv = NULL;
|
||||
#if HAS_MEMORYVIEW
|
||||
Py_buffer view;
|
||||
int got_view = 0;
|
||||
#endif
|
||||
|
||||
/* Allow Binary(None) to work */
|
||||
if (self->wrapped == Py_None) {
|
||||
|
@ -71,8 +67,6 @@ binary_quote(binaryObject *self)
|
|||
}
|
||||
|
||||
/* if we got a plain string or a buffer we escape it and save the buffer */
|
||||
|
||||
#if HAS_MEMORYVIEW
|
||||
if (PyObject_CheckBuffer(self->wrapped)) {
|
||||
if (0 > PyObject_GetBuffer(self->wrapped, &view, PyBUF_CONTIG_RO)) {
|
||||
goto exit;
|
||||
|
@ -81,16 +75,6 @@ binary_quote(binaryObject *self)
|
|||
buffer = (const char *)(view.buf);
|
||||
buffer_len = view.len;
|
||||
}
|
||||
#endif
|
||||
|
||||
#if HAS_BUFFER
|
||||
if (!buffer && (Bytes_Check(self->wrapped) || PyBuffer_Check(self->wrapped))) {
|
||||
if (PyObject_AsReadBuffer(self->wrapped, (const void **)&buffer,
|
||||
&buffer_len) < 0) {
|
||||
goto exit;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!buffer) {
|
||||
goto exit;
|
||||
|
@ -114,9 +98,7 @@ binary_quote(binaryObject *self)
|
|||
|
||||
exit:
|
||||
if (to) { PQfreemem(to); }
|
||||
#if HAS_MEMORYVIEW
|
||||
if (got_view) { PyBuffer_Release(&view); }
|
||||
#endif
|
||||
|
||||
/* if the wrapped object is not bytes or a buffer, this is an error */
|
||||
if (!rv && !PyErr_Occurred()) {
|
||||
|
@ -142,7 +124,7 @@ binary_getquoted(binaryObject *self, PyObject *args)
|
|||
static PyObject *
|
||||
binary_str(binaryObject *self)
|
||||
{
|
||||
return psycopg_ensure_text(binary_getquoted(self, NULL));
|
||||
return psyco_ensure_text(binary_getquoted(self, NULL));
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_binary.h - definition for the Binary type
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_datetime.c - python date/time objects
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -36,10 +37,8 @@
|
|||
|
||||
|
||||
RAISES_NEG int
|
||||
psyco_adapter_datetime_init(void)
|
||||
adapter_datetime_init(void)
|
||||
{
|
||||
Dprintf("psyco_adapter_datetime_init: datetime init");
|
||||
|
||||
PyDateTime_IMPORT;
|
||||
|
||||
if (!PyDateTimeAPI) {
|
||||
|
@ -78,7 +77,7 @@ _pydatetime_string_date_time(pydatetimeObject *self)
|
|||
break;
|
||||
}
|
||||
|
||||
if (!(iso = psycopg_ensure_bytes(
|
||||
if (!(iso = psyco_ensure_bytes(
|
||||
PyObject_CallMethod(self->wrapped, "isoformat", NULL)))) {
|
||||
goto error;
|
||||
}
|
||||
|
@ -128,7 +127,7 @@ pydatetime_getquoted(pydatetimeObject *self, PyObject *args)
|
|||
static PyObject *
|
||||
pydatetime_str(pydatetimeObject *self)
|
||||
{
|
||||
return psycopg_ensure_text(pydatetime_getquoted(self, NULL));
|
||||
return psyco_ensure_text(pydatetime_getquoted(self, NULL));
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
@ -265,8 +264,6 @@ PyTypeObject pydatetimeType = {
|
|||
|
||||
/** module-level functions **/
|
||||
|
||||
#ifdef PSYCOPG_DEFAULT_PYDATETIME
|
||||
|
||||
PyObject *
|
||||
psyco_Date(PyObject *self, PyObject *args)
|
||||
{
|
||||
|
@ -426,8 +423,8 @@ psyco_TimeFromTicks(PyObject *self, PyObject *args)
|
|||
PyObject *
|
||||
psyco_TimestampFromTicks(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *m = NULL;
|
||||
PyObject *tz = NULL;
|
||||
pydatetimeObject *wrapper = NULL;
|
||||
PyObject *dt_aware = NULL;
|
||||
PyObject *res = NULL;
|
||||
struct tm tm;
|
||||
time_t t;
|
||||
|
@ -436,10 +433,6 @@ psyco_TimestampFromTicks(PyObject *self, PyObject *args)
|
|||
if (!PyArg_ParseTuple(args, "d", &ticks))
|
||||
return NULL;
|
||||
|
||||
/* get psycopg2.tz.LOCAL from pythonland */
|
||||
if (!(m = PyImport_ImportModule("psycopg2.tz"))) { goto exit; }
|
||||
if (!(tz = PyObject_GetAttrString(m, "LOCAL"))) { goto exit; }
|
||||
|
||||
t = (time_t)floor(ticks);
|
||||
ticks -= (double)t;
|
||||
if (!localtime_r(&t, &tm)) {
|
||||
|
@ -447,19 +440,32 @@ psyco_TimestampFromTicks(PyObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
res = _psyco_Timestamp(
|
||||
tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday,
|
||||
tm.tm_hour, tm.tm_min, (double)tm.tm_sec + ticks,
|
||||
tz);
|
||||
/* Convert the tm to a wrapper containing a naive datetime.datetime */
|
||||
if (!(wrapper = (pydatetimeObject *)_psyco_Timestamp(
|
||||
tm.tm_year + 1900, tm.tm_mon + 1, tm.tm_mday,
|
||||
tm.tm_hour, tm.tm_min, (double)tm.tm_sec + ticks, NULL))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
/* Localize the datetime and assign it back to the wrapper */
|
||||
if (!(dt_aware = PyObject_CallMethod(
|
||||
wrapper->wrapped, "astimezone", NULL))) {
|
||||
goto exit;
|
||||
}
|
||||
Py_CLEAR(wrapper->wrapped);
|
||||
wrapper->wrapped = dt_aware;
|
||||
dt_aware = NULL;
|
||||
|
||||
/* the wrapper is ready to be returned */
|
||||
res = (PyObject *)wrapper;
|
||||
wrapper = NULL;
|
||||
|
||||
exit:
|
||||
Py_XDECREF(tz);
|
||||
Py_XDECREF(m);
|
||||
Py_XDECREF(dt_aware);
|
||||
Py_XDECREF(wrapper);
|
||||
return res;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
PyObject *
|
||||
psyco_DateFromPy(PyObject *self, PyObject *args)
|
||||
{
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_datetime.h - definition for the python date/time types
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -45,10 +46,7 @@ typedef struct {
|
|||
} pydatetimeObject;
|
||||
|
||||
|
||||
RAISES_NEG HIDDEN int psyco_adapter_datetime_init(void);
|
||||
|
||||
/* functions exported to psycopgmodule.c */
|
||||
#ifdef PSYCOPG_DEFAULT_PYDATETIME
|
||||
RAISES_NEG HIDDEN int adapter_datetime_init(void);
|
||||
|
||||
HIDDEN PyObject *psyco_Date(PyObject *module, PyObject *args);
|
||||
#define psyco_Date_doc \
|
||||
|
@ -86,8 +84,6 @@ HIDDEN PyObject *psyco_TimestampFromTicks(PyObject *module, PyObject *args);
|
|||
"Ticks are the number of seconds since the epoch; see the documentation " \
|
||||
"of the standard Python time module for details)."
|
||||
|
||||
#endif /* PSYCOPG_DEFAULT_PYDATETIME */
|
||||
|
||||
HIDDEN PyObject *psyco_DateFromPy(PyObject *module, PyObject *args);
|
||||
#define psyco_DateFromPy_doc \
|
||||
"DateFromPy(datetime.date) -> new wrapper"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_list.c - python list objects
|
||||
*
|
||||
* Copyright (C) 2004-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2004-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -165,7 +166,7 @@ exit:
|
|||
static PyObject *
|
||||
list_str(listObject *self)
|
||||
{
|
||||
return psycopg_ensure_text(list_quote(self));
|
||||
return psyco_ensure_text(list_quote(self));
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_list.h - definition for the python list types
|
||||
*
|
||||
* Copyright (C) 2004-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2004-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,428 +0,0 @@
|
|||
/* adapter_mxdatetime.c - mx date/time objects
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
* psycopg2 is free software: you can redistribute it and/or modify it
|
||||
* under the terms of the GNU Lesser General Public License as published
|
||||
* by the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* In addition, as a special exception, the copyright holders give
|
||||
* permission to link this program with the OpenSSL library (or with
|
||||
* modified versions of OpenSSL that use the same license as OpenSSL),
|
||||
* and distribute linked combinations including the two.
|
||||
*
|
||||
* You must obey the GNU Lesser General Public License in all respects for
|
||||
* all of the code used other than OpenSSL.
|
||||
*
|
||||
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
* License for more details.
|
||||
*/
|
||||
|
||||
#define PSYCOPG_MODULE
|
||||
#include "psycopg/psycopg.h"
|
||||
|
||||
#include "psycopg/adapter_mxdatetime.h"
|
||||
#include "psycopg/microprotocols_proto.h"
|
||||
|
||||
#include <mxDateTime.h>
|
||||
#include <string.h>
|
||||
|
||||
|
||||
/* Return 0 on success, -1 on failure, but don't set an exception */
|
||||
|
||||
int
|
||||
psyco_adapter_mxdatetime_init(void)
|
||||
{
|
||||
Dprintf("psyco_adapter_mxdatetime_init: mx.DateTime init");
|
||||
|
||||
if (mxDateTime_ImportModuleAndAPI()) {
|
||||
Dprintf("psyco_adapter_mxdatetime_init: mx.DateTime initialization failed");
|
||||
PyErr_Clear();
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/* mxdatetime_str, mxdatetime_getquoted - return result of quoting */
|
||||
|
||||
static PyObject *
|
||||
mxdatetime_str(mxdatetimeObject *self)
|
||||
{
|
||||
mxDateTimeObject *dt;
|
||||
mxDateTimeDeltaObject *dtd;
|
||||
char buf[128] = { 0, };
|
||||
|
||||
switch (self->type) {
|
||||
|
||||
case PSYCO_MXDATETIME_DATE:
|
||||
dt = (mxDateTimeObject *)self->wrapped;
|
||||
if (dt->year >= 1)
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1, "'%04ld-%02d-%02d'::date",
|
||||
dt->year, (int)dt->month, (int)dt->day);
|
||||
else
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1, "'%04ld-%02d-%02d BC'::date",
|
||||
1 - dt->year, (int)dt->month, (int)dt->day);
|
||||
break;
|
||||
|
||||
case PSYCO_MXDATETIME_TIMESTAMP:
|
||||
dt = (mxDateTimeObject *)self->wrapped;
|
||||
if (dt->year >= 1)
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1,
|
||||
"'%04ld-%02d-%02dT%02d:%02d:%09.6f'::timestamp",
|
||||
dt->year, (int)dt->month, (int)dt->day,
|
||||
(int)dt->hour, (int)dt->minute, dt->second);
|
||||
else
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1,
|
||||
"'%04ld-%02d-%02dT%02d:%02d:%09.6f BC'::timestamp",
|
||||
1 - dt->year, (int)dt->month, (int)dt->day,
|
||||
(int)dt->hour, (int)dt->minute, dt->second);
|
||||
break;
|
||||
|
||||
case PSYCO_MXDATETIME_TIME:
|
||||
case PSYCO_MXDATETIME_INTERVAL:
|
||||
/* given the limitation of the mx.DateTime module that uses the same
|
||||
type for both time and delta values we need to do some black magic
|
||||
and make sure we're not using an adapt()ed interval as a simple
|
||||
time */
|
||||
dtd = (mxDateTimeDeltaObject *)self->wrapped;
|
||||
if (0 <= dtd->seconds && dtd->seconds < 24*3600) {
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1, "'%02d:%02d:%09.6f'::time",
|
||||
(int)dtd->hour, (int)dtd->minute, dtd->second);
|
||||
} else {
|
||||
double ss = dtd->hour*3600.0 + dtd->minute*60.0 + dtd->second;
|
||||
|
||||
if (dtd->seconds >= 0)
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1, "'%ld days %.6f seconds'::interval",
|
||||
dtd->day, ss);
|
||||
else
|
||||
PyOS_snprintf(buf, sizeof(buf) - 1, "'-%ld days -%.6f seconds'::interval",
|
||||
dtd->day, ss);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return PyString_FromString(buf);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
mxdatetime_getquoted(mxdatetimeObject *self, PyObject *args)
|
||||
{
|
||||
return mxdatetime_str(self);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
mxdatetime_conform(mxdatetimeObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *res, *proto;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O", &proto)) return NULL;
|
||||
|
||||
if (proto == (PyObject*)&isqlquoteType)
|
||||
res = (PyObject*)self;
|
||||
else
|
||||
res = Py_None;
|
||||
|
||||
Py_INCREF(res);
|
||||
return res;
|
||||
}
|
||||
|
||||
/** the MxDateTime object **/
|
||||
|
||||
/* object member list */
|
||||
|
||||
static struct PyMemberDef mxdatetimeObject_members[] = {
|
||||
{"adapted", T_OBJECT, offsetof(mxdatetimeObject, wrapped), READONLY},
|
||||
{"type", T_INT, offsetof(mxdatetimeObject, type), READONLY},
|
||||
{NULL}
|
||||
};
|
||||
|
||||
/* object method table */
|
||||
|
||||
static PyMethodDef mxdatetimeObject_methods[] = {
|
||||
{"getquoted", (PyCFunction)mxdatetime_getquoted, METH_NOARGS,
|
||||
"getquoted() -> wrapped object value as SQL date/time"},
|
||||
{"__conform__", (PyCFunction)mxdatetime_conform, METH_VARARGS, NULL},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
/* initialization and finalization methods */
|
||||
|
||||
static int
|
||||
mxdatetime_setup(mxdatetimeObject *self, PyObject *obj, int type)
|
||||
{
|
||||
Dprintf("mxdatetime_setup: init mxdatetime object at %p, refcnt = "
|
||||
FORMAT_CODE_PY_SSIZE_T,
|
||||
self, Py_REFCNT(self)
|
||||
);
|
||||
|
||||
self->type = type;
|
||||
Py_INCREF(obj);
|
||||
self->wrapped = obj;
|
||||
|
||||
Dprintf("mxdatetime_setup: good mxdatetime object at %p, refcnt = "
|
||||
FORMAT_CODE_PY_SSIZE_T,
|
||||
self, Py_REFCNT(self)
|
||||
);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void
|
||||
mxdatetime_dealloc(PyObject* obj)
|
||||
{
|
||||
mxdatetimeObject *self = (mxdatetimeObject *)obj;
|
||||
|
||||
Py_CLEAR(self->wrapped);
|
||||
|
||||
Dprintf("mxdatetime_dealloc: deleted mxdatetime object at %p, refcnt = "
|
||||
FORMAT_CODE_PY_SSIZE_T,
|
||||
obj, Py_REFCNT(obj)
|
||||
);
|
||||
|
||||
Py_TYPE(obj)->tp_free(obj);
|
||||
}
|
||||
|
||||
static int
|
||||
mxdatetime_init(PyObject *obj, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
PyObject *mx;
|
||||
int type = -1; /* raise an error if type was not passed! */
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O|i", &mx, &type))
|
||||
return -1;
|
||||
|
||||
return mxdatetime_setup((mxdatetimeObject *)obj, mx, type);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
mxdatetime_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
return type->tp_alloc(type, 0);
|
||||
}
|
||||
|
||||
|
||||
/* object type */
|
||||
|
||||
#define mxdatetimeType_doc \
|
||||
"MxDateTime(mx, type) -> new mx.DateTime wrapper object"
|
||||
|
||||
PyTypeObject mxdatetimeType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"psycopg2._psycopg.MxDateTime",
|
||||
sizeof(mxdatetimeObject), 0,
|
||||
mxdatetime_dealloc, /*tp_dealloc*/
|
||||
0, /*tp_print*/
|
||||
0, /*tp_getattr*/
|
||||
0, /*tp_setattr*/
|
||||
0, /*tp_compare*/
|
||||
0, /*tp_repr*/
|
||||
0, /*tp_as_number*/
|
||||
0, /*tp_as_sequence*/
|
||||
0, /*tp_as_mapping*/
|
||||
0, /*tp_hash */
|
||||
0, /*tp_call*/
|
||||
(reprfunc)mxdatetime_str, /*tp_str*/
|
||||
0, /*tp_getattro*/
|
||||
0, /*tp_setattro*/
|
||||
0, /*tp_as_buffer*/
|
||||
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /*tp_flags*/
|
||||
mxdatetimeType_doc, /*tp_doc*/
|
||||
0, /*tp_traverse*/
|
||||
0, /*tp_clear*/
|
||||
0, /*tp_richcompare*/
|
||||
0, /*tp_weaklistoffset*/
|
||||
0, /*tp_iter*/
|
||||
0, /*tp_iternext*/
|
||||
mxdatetimeObject_methods, /*tp_methods*/
|
||||
mxdatetimeObject_members, /*tp_members*/
|
||||
0, /*tp_getset*/
|
||||
0, /*tp_base*/
|
||||
0, /*tp_dict*/
|
||||
0, /*tp_descr_get*/
|
||||
0, /*tp_descr_set*/
|
||||
0, /*tp_dictoffset*/
|
||||
mxdatetime_init, /*tp_init*/
|
||||
0, /*tp_alloc*/
|
||||
mxdatetime_new, /*tp_new*/
|
||||
};
|
||||
|
||||
|
||||
/** module-level functions **/
|
||||
|
||||
#ifdef PSYCOPG_DEFAULT_MXDATETIME
|
||||
|
||||
PyObject *
|
||||
psyco_Date(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *res, *mx;
|
||||
int year, month, day;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "iii", &year, &month, &day))
|
||||
return NULL;
|
||||
|
||||
mx = mxDateTime.DateTime_FromDateAndTime(year, month, day, 0, 0, 0.0);
|
||||
if (mx == NULL) return NULL;
|
||||
|
||||
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_DATE);
|
||||
Py_DECREF(mx);
|
||||
return res;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_Time(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *res, *mx;
|
||||
int hours, minutes=0;
|
||||
double seconds=0.0;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "iid", &hours, &minutes, &seconds))
|
||||
return NULL;
|
||||
|
||||
mx = mxDateTime.DateTimeDelta_FromTime(hours, minutes, seconds);
|
||||
if (mx == NULL) return NULL;
|
||||
|
||||
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_TIME);
|
||||
Py_DECREF(mx);
|
||||
return res;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_Timestamp(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *res, *mx;
|
||||
int year, month, day;
|
||||
int hour=0, minute=0; /* default to midnight */
|
||||
double second=0.0;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "lii|iid", &year, &month, &day,
|
||||
&hour, &minute, &second))
|
||||
return NULL;
|
||||
|
||||
mx = mxDateTime.DateTime_FromDateAndTime(year, month, day,
|
||||
hour, minute, second);
|
||||
if (mx == NULL) return NULL;
|
||||
|
||||
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_TIMESTAMP);
|
||||
Py_DECREF(mx);
|
||||
return res;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_DateFromTicks(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *res, *mx;
|
||||
double ticks;
|
||||
|
||||
if (!PyArg_ParseTuple(args,"d", &ticks))
|
||||
return NULL;
|
||||
|
||||
if (!(mx = mxDateTime.DateTime_FromTicks(ticks)))
|
||||
return NULL;
|
||||
|
||||
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_DATE);
|
||||
Py_DECREF(mx);
|
||||
return res;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_TimeFromTicks(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *res, *mx, *dt;
|
||||
double ticks;
|
||||
|
||||
if (!PyArg_ParseTuple(args,"d", &ticks))
|
||||
return NULL;
|
||||
|
||||
if (!(dt = mxDateTime.DateTime_FromTicks(ticks)))
|
||||
return NULL;
|
||||
|
||||
if (!(mx = mxDateTime.DateTimeDelta_FromDaysAndSeconds(
|
||||
0, ((mxDateTimeObject*)dt)->abstime)))
|
||||
{
|
||||
Py_DECREF(dt);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_DECREF(dt);
|
||||
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_TIME);
|
||||
Py_DECREF(mx);
|
||||
return res;
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_TimestampFromTicks(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *mx, *res;
|
||||
double ticks;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "d", &ticks))
|
||||
return NULL;
|
||||
|
||||
if (!(mx = mxDateTime.DateTime_FromTicks(ticks)))
|
||||
return NULL;
|
||||
|
||||
res = PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_TIMESTAMP);
|
||||
Py_DECREF(mx);
|
||||
return res;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
PyObject *
|
||||
psyco_DateFromMx(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *mx;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTime_Type, &mx))
|
||||
return NULL;
|
||||
|
||||
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_DATE);
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_TimeFromMx(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *mx;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTimeDelta_Type, &mx))
|
||||
return NULL;
|
||||
|
||||
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_TIME);
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_TimestampFromMx(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *mx;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTime_Type, &mx))
|
||||
return NULL;
|
||||
|
||||
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_TIMESTAMP);
|
||||
}
|
||||
|
||||
PyObject *
|
||||
psyco_IntervalFromMx(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *mx;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!", mxDateTime.DateTime_Type, &mx))
|
||||
return NULL;
|
||||
|
||||
return PyObject_CallFunction((PyObject *)&mxdatetimeType, "Oi", mx,
|
||||
PSYCO_MXDATETIME_INTERVAL);
|
||||
}
|
|
@ -1,98 +0,0 @@
|
|||
/* adapter_mxdatetime.h - definition for the mx date/time types
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
* psycopg2 is free software: you can redistribute it and/or modify it
|
||||
* under the terms of the GNU Lesser General Public License as published
|
||||
* by the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* In addition, as a special exception, the copyright holders give
|
||||
* permission to link this program with the OpenSSL library (or with
|
||||
* modified versions of OpenSSL that use the same license as OpenSSL),
|
||||
* and distribute linked combinations including the two.
|
||||
*
|
||||
* You must obey the GNU Lesser General Public License in all respects for
|
||||
* all of the code used other than OpenSSL.
|
||||
*
|
||||
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
* License for more details.
|
||||
*/
|
||||
|
||||
#ifndef PSYCOPG_MXDATETIME_H
|
||||
#define PSYCOPG_MXDATETIME_H 1
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
extern HIDDEN PyTypeObject mxdatetimeType;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
|
||||
PyObject *wrapped;
|
||||
int type;
|
||||
#define PSYCO_MXDATETIME_TIME 0
|
||||
#define PSYCO_MXDATETIME_DATE 1
|
||||
#define PSYCO_MXDATETIME_TIMESTAMP 2
|
||||
#define PSYCO_MXDATETIME_INTERVAL 3
|
||||
|
||||
} mxdatetimeObject;
|
||||
|
||||
/* functions exported to psycopgmodule.c */
|
||||
#ifdef PSYCOPG_DEFAULT_MXDATETIME
|
||||
|
||||
HIDDEN PyObject *psyco_Date(PyObject *module, PyObject *args);
|
||||
#define psyco_Date_doc \
|
||||
"Date(year, month, day) -> new date"
|
||||
|
||||
HIDDEN PyObject *psyco_Time(PyObject *module, PyObject *args);
|
||||
#define psyco_Time_doc \
|
||||
"Time(hour, minutes, seconds) -> new time"
|
||||
|
||||
HIDDEN PyObject *psyco_Timestamp(PyObject *module, PyObject *args);
|
||||
#define psyco_Timestamp_doc \
|
||||
"Time(year, month, day, hour, minutes, seconds) -> new timestamp"
|
||||
|
||||
HIDDEN PyObject *psyco_DateFromTicks(PyObject *module, PyObject *args);
|
||||
#define psyco_DateFromTicks_doc \
|
||||
"DateFromTicks(ticks) -> new date"
|
||||
|
||||
HIDDEN PyObject *psyco_TimeFromTicks(PyObject *module, PyObject *args);
|
||||
#define psyco_TimeFromTicks_doc \
|
||||
"TimeFromTicks(ticks) -> new time"
|
||||
|
||||
HIDDEN PyObject *psyco_TimestampFromTicks(PyObject *module, PyObject *args);
|
||||
#define psyco_TimestampFromTicks_doc \
|
||||
"TimestampFromTicks(ticks) -> new timestamp"
|
||||
|
||||
#endif /* PSYCOPG_DEFAULT_MXDATETIME */
|
||||
|
||||
HIDDEN int psyco_adapter_mxdatetime_init(void);
|
||||
|
||||
HIDDEN PyObject *psyco_DateFromMx(PyObject *module, PyObject *args);
|
||||
#define psyco_DateFromMx_doc \
|
||||
"DateFromMx(mx) -> new date"
|
||||
|
||||
HIDDEN PyObject *psyco_TimeFromMx(PyObject *module, PyObject *args);
|
||||
#define psyco_TimeFromMx_doc \
|
||||
"TimeFromMx(mx) -> new time"
|
||||
|
||||
HIDDEN PyObject *psyco_TimestampFromMx(PyObject *module, PyObject *args);
|
||||
#define psyco_TimestampFromMx_doc \
|
||||
"TimestampFromMx(mx) -> new timestamp"
|
||||
|
||||
HIDDEN PyObject *psyco_IntervalFromMx(PyObject *module, PyObject *args);
|
||||
#define psyco_IntervalFromMx_doc \
|
||||
"IntervalFromMx(mx) -> new interval"
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif /* !defined(PSYCOPG_MXDATETIME_H) */
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_pboolean.c - psycopg boolean type wrapper implementation
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -48,7 +49,7 @@ pboolean_getquoted(pbooleanObject *self, PyObject *args)
|
|||
static PyObject *
|
||||
pboolean_str(pbooleanObject *self)
|
||||
{
|
||||
return psycopg_ensure_text(pboolean_getquoted(self, NULL));
|
||||
return psyco_ensure_text(pboolean_getquoted(self, NULL));
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_pboolean.h - definition for the psycopg boolean type wrapper
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_pdecimal.c - psycopg Decimal type wrapper implementation
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -80,8 +81,7 @@ pdecimal_getquoted(pdecimalObject *self, PyObject *args)
|
|||
/* res may be unicode and may suffer for issue #57 */
|
||||
output:
|
||||
|
||||
#if PY_MAJOR_VERSION > 2
|
||||
/* unicode to bytes in Py3 */
|
||||
/* unicode to bytes */
|
||||
{
|
||||
PyObject *tmp = PyUnicode_AsUTF8String(res);
|
||||
Py_DECREF(res);
|
||||
|
@ -89,7 +89,6 @@ output:
|
|||
goto end;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if ('-' == Bytes_AS_STRING(res)[0]) {
|
||||
/* Prepend a space in front of negative numbers (ticket #57) */
|
||||
|
@ -113,7 +112,7 @@ end:
|
|||
static PyObject *
|
||||
pdecimal_str(pdecimalObject *self)
|
||||
{
|
||||
return psycopg_ensure_text(pdecimal_getquoted(self, NULL));
|
||||
return psyco_ensure_text(pdecimal_getquoted(self, NULL));
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_pdecimal.h - definition for the psycopg Decimal type wrapper
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_float.c - psycopg pfloat type wrapper implementation
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -53,8 +54,7 @@ pfloat_getquoted(pfloatObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
#if PY_MAJOR_VERSION > 2
|
||||
/* unicode to bytes in Py3 */
|
||||
/* unicode to bytes */
|
||||
{
|
||||
PyObject *tmp = PyUnicode_AsUTF8String(rv);
|
||||
Py_DECREF(rv);
|
||||
|
@ -62,7 +62,6 @@ pfloat_getquoted(pfloatObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if ('-' == Bytes_AS_STRING(rv)[0]) {
|
||||
/* Prepend a space in front of negative numbers (ticket #57) */
|
||||
|
@ -86,7 +85,7 @@ exit:
|
|||
static PyObject *
|
||||
pfloat_str(pfloatObject *self)
|
||||
{
|
||||
return psycopg_ensure_text(pfloat_getquoted(self, NULL));
|
||||
return psyco_ensure_text(pfloat_getquoted(self, NULL));
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_pfloat.h - definition for the psycopg float type wrapper
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_int.c - psycopg pint type wrapper implementation
|
||||
*
|
||||
* Copyright (C) 2011 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2011-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -39,11 +40,7 @@ pint_getquoted(pintObject *self, PyObject *args)
|
|||
|
||||
/* Convert subclass to int to handle IntEnum and other subclasses
|
||||
* whose str() is not the number. */
|
||||
if (PyLong_CheckExact(self->wrapped)
|
||||
#if PY_MAJOR_VERSION < 2
|
||||
|| PyInt_CheckExact(self->wrapped)
|
||||
#endif
|
||||
) {
|
||||
if (PyLong_CheckExact(self->wrapped)) {
|
||||
res = PyObject_Str(self->wrapped);
|
||||
} else {
|
||||
PyObject *tmp;
|
||||
|
@ -59,8 +56,7 @@ pint_getquoted(pintObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
#if PY_MAJOR_VERSION > 2
|
||||
/* unicode to bytes in Py3 */
|
||||
/* unicode to bytes */
|
||||
{
|
||||
PyObject *tmp = PyUnicode_AsUTF8String(res);
|
||||
Py_DECREF(res);
|
||||
|
@ -68,7 +64,6 @@ pint_getquoted(pintObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if ('-' == Bytes_AS_STRING(res)[0]) {
|
||||
/* Prepend a space in front of negative numbers (ticket #57) */
|
||||
|
@ -91,7 +86,7 @@ exit:
|
|||
static PyObject *
|
||||
pint_str(pintObject *self)
|
||||
{
|
||||
return psycopg_ensure_text(pint_getquoted(self, NULL));
|
||||
return psyco_ensure_text(pint_getquoted(self, NULL));
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_pint.h - definition for the psycopg int type wrapper
|
||||
*
|
||||
* Copyright (C) 2011 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2011-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_qstring.c - QuotedString objects
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -73,7 +74,7 @@ qstring_quote(qstringObject *self)
|
|||
|
||||
/* encode the string into buffer */
|
||||
Bytes_AsStringAndSize(str, &s, &len);
|
||||
if (!(buffer = psycopg_escape_string(self->conn, s, len, NULL, &qlen))) {
|
||||
if (!(buffer = psyco_escape_string(self->conn, s, len, NULL, &qlen))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
|
@ -107,7 +108,7 @@ qstring_getquoted(qstringObject *self, PyObject *args)
|
|||
static PyObject *
|
||||
qstring_str(qstringObject *self)
|
||||
{
|
||||
return psycopg_ensure_text(qstring_getquoted(self, NULL));
|
||||
return psyco_ensure_text(qstring_getquoted(self, NULL));
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
|
@ -161,9 +162,9 @@ qstring_set_encoding(qstringObject *self, PyObject *pyenc)
|
|||
|
||||
/* get a C copy of the encoding (which may come from unicode) */
|
||||
Py_INCREF(pyenc);
|
||||
if (!(pyenc = psycopg_ensure_bytes(pyenc))) { goto exit; }
|
||||
if (!(pyenc = psyco_ensure_bytes(pyenc))) { goto exit; }
|
||||
if (!(tmp = Bytes_AsString(pyenc))) { goto exit; }
|
||||
if (0 > psycopg_strdup(&cenc, tmp, -1)) { goto exit; }
|
||||
if (0 > psyco_strdup(&cenc, tmp, -1)) { goto exit; }
|
||||
|
||||
Dprintf("qstring_set_encoding: encoding set to %s", cenc);
|
||||
PyMem_Free((void *)self->encoding);
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* adapter_qstring.h - definition for the QuotedString type
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
58
psycopg/aix_support.c
Normal file
58
psycopg/aix_support.c
Normal file
|
@ -0,0 +1,58 @@
|
|||
/* aix_support.c - emulate functions missing on AIX
|
||||
*
|
||||
* Copyright (C) 2017 My Karlsson <mk@acc.umu.se>
|
||||
* Copyright (c) 2018, Joyent, Inc.
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
* psycopg2 is free software: you can redistribute it and/or modify it
|
||||
* under the terms of the GNU Lesser General Public License as published
|
||||
* by the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* In addition, as a special exception, the copyright holders give
|
||||
* permission to link this program with the OpenSSL library (or with
|
||||
* modified versions of OpenSSL that use the same license as OpenSSL),
|
||||
* and distribute linked combinations including the two.
|
||||
*
|
||||
* You must obey the GNU Lesser General Public License in all respects for
|
||||
* all of the code used other than OpenSSL.
|
||||
*
|
||||
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
* License for more details.
|
||||
*/
|
||||
|
||||
#define PSYCOPG_MODULE
|
||||
#include "psycopg/psycopg.h"
|
||||
#include "psycopg/aix_support.h"
|
||||
|
||||
#if defined(_AIX)
|
||||
/* timeradd is missing on AIX */
|
||||
#ifndef timeradd
|
||||
void
|
||||
timeradd(struct timeval *a, struct timeval *b, struct timeval *c)
|
||||
{
|
||||
c->tv_sec = a->tv_sec + b->tv_sec;
|
||||
c->tv_usec = a->tv_usec + b->tv_usec;
|
||||
if (c->tv_usec >= 1000000) {
|
||||
c->tv_usec -= 1000000;
|
||||
c->tv_sec += 1;
|
||||
}
|
||||
}
|
||||
|
||||
/* timersub is missing on AIX */
|
||||
void
|
||||
timersub(struct timeval *a, struct timeval *b, struct timeval *c)
|
||||
{
|
||||
c->tv_sec = a->tv_sec - b->tv_sec;
|
||||
c->tv_usec = a->tv_usec - b->tv_usec;
|
||||
if (c->tv_usec < 0) {
|
||||
c->tv_usec += 1000000;
|
||||
c->tv_sec -= 1;
|
||||
}
|
||||
}
|
||||
#endif /* timeradd */
|
||||
#endif /* defined(_AIX)*/
|
48
psycopg/aix_support.h
Normal file
48
psycopg/aix_support.h
Normal file
|
@ -0,0 +1,48 @@
|
|||
/* aix_support.h - definitions for aix_support.c
|
||||
*
|
||||
* Copyright (C) 2017 My Karlsson <mk@acc.umu.se>
|
||||
* Copyright (c) 2018-2019, Joyent, Inc.
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
* psycopg2 is free software: you can redistribute it and/or modify it
|
||||
* under the terms of the GNU Lesser General Public License as published
|
||||
* by the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* In addition, as a special exception, the copyright holders give
|
||||
* permission to link this program with the OpenSSL library (or with
|
||||
* modified versions of OpenSSL that use the same license as OpenSSL),
|
||||
* and distribute linked combinations including the two.
|
||||
*
|
||||
* You must obey the GNU Lesser General Public License in all respects for
|
||||
* all of the code used other than OpenSSL.
|
||||
*
|
||||
* psycopg2 is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
|
||||
* License for more details.
|
||||
*/
|
||||
#ifndef PSYCOPG_AIX_SUPPORT_H
|
||||
#define PSYCOPG_AIX_SUPPORT_H
|
||||
|
||||
#include "psycopg/config.h"
|
||||
|
||||
#ifdef _AIX
|
||||
#include <sys/time.h>
|
||||
|
||||
#ifndef timeradd
|
||||
extern HIDDEN void timeradd(struct timeval *a, struct timeval *b, struct timeval *c);
|
||||
extern HIDDEN void timersub(struct timeval *a, struct timeval *b, struct timeval *c);
|
||||
#endif
|
||||
|
||||
#ifndef timercmp
|
||||
#define timercmp(a, b, cmp) \
|
||||
(((a)->tv_sec == (b)->tv_sec) ? \
|
||||
((a)->tv_usec cmp (b)->tv_usec) : \
|
||||
((a)->tv_sec cmp (b)->tv_sec))
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#endif /* !defined(PSYCOPG_AIX_SUPPORT_H) */
|
|
@ -1,6 +1,7 @@
|
|||
/* bytes_format.c - bytes-oriented version of PyString_Format
|
||||
*
|
||||
* Copyright (C) 2010 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2010-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -41,7 +42,7 @@
|
|||
* analyze, test, perform and/or display publicly, prepare derivative works,
|
||||
* distribute, and otherwise use Python alone or in any derivative version,
|
||||
* provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||
* i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
|
||||
* i.e., "Copyright (c) 2001-2019, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
|
||||
* Python Software Foundation; All Rights Reserved" are retained in Python alone or
|
||||
* in any derivative version prepared by Licensee.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* column.h - definition for a column in cursor.description type
|
||||
*
|
||||
* Copyright (C) 2018 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* column_type.c - python interface to cursor.description objects
|
||||
*
|
||||
* Copyright (C) 2018 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -96,17 +97,36 @@ column_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
|
|||
static int
|
||||
column_init(columnObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
PyObject *name = NULL;
|
||||
PyObject *type_code = NULL;
|
||||
PyObject *display_size = NULL;
|
||||
PyObject *internal_size = NULL;
|
||||
PyObject *precision = NULL;
|
||||
PyObject *scale = NULL;
|
||||
PyObject *null_ok = NULL;
|
||||
PyObject *table_oid = NULL;
|
||||
PyObject *table_column = NULL;
|
||||
|
||||
static char *kwlist[] = {
|
||||
"name", "type_code", "display_size", "internal_size",
|
||||
"precision", "scale", "null_ok", "table_oid", "table_column", NULL};
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|OOOOOOOOO", kwlist,
|
||||
&self->name, &self->type_code, &self->display_size,
|
||||
&self->internal_size, &self->precision, &self->scale,
|
||||
&self->null_ok, &self->table_oid, &self->table_column)) {
|
||||
&name, &type_code, &display_size, &internal_size, &precision,
|
||||
&scale, &null_ok, &table_oid, &table_column)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
Py_XINCREF(name); self->name = name;
|
||||
Py_XINCREF(type_code); self->type_code = type_code;
|
||||
Py_XINCREF(display_size); self->display_size = display_size;
|
||||
Py_XINCREF(internal_size); self->internal_size = internal_size;
|
||||
Py_XINCREF(precision); self->precision = precision;
|
||||
Py_XINCREF(scale); self->scale = scale;
|
||||
Py_XINCREF(null_ok); self->null_ok = null_ok;
|
||||
Py_XINCREF(table_oid); self->table_oid = table_oid;
|
||||
Py_XINCREF(table_column); self->table_column = table_column;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -232,6 +252,32 @@ column_getitem(columnObject *self, Py_ssize_t item)
|
|||
}
|
||||
|
||||
|
||||
static PyObject*
|
||||
column_subscript(columnObject* self, PyObject* item)
|
||||
{
|
||||
PyObject *t = NULL;
|
||||
PyObject *rv = NULL;
|
||||
|
||||
/* t = tuple(self) */
|
||||
if (!(t = PyObject_CallFunctionObjArgs(
|
||||
(PyObject *)&PyTuple_Type, (PyObject *)self, NULL))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
/* rv = t[item] */
|
||||
rv = PyObject_GetItem(t, item);
|
||||
|
||||
exit:
|
||||
Py_XDECREF(t);
|
||||
return rv;
|
||||
}
|
||||
|
||||
static PyMappingMethods column_mapping = {
|
||||
(lenfunc)column_len, /* mp_length */
|
||||
(binaryfunc)column_subscript, /* mp_subscript */
|
||||
0 /* mp_ass_subscript */
|
||||
};
|
||||
|
||||
static PySequenceMethods column_sequence = {
|
||||
(lenfunc)column_len, /* sq_length */
|
||||
0, /* sq_concat */
|
||||
|
@ -345,7 +391,7 @@ PyTypeObject columnType = {
|
|||
(reprfunc)column_repr, /*tp_repr*/
|
||||
0, /*tp_as_number*/
|
||||
&column_sequence, /*tp_as_sequence*/
|
||||
0, /*tp_as_mapping*/
|
||||
&column_mapping, /*tp_as_mapping*/
|
||||
0, /*tp_hash */
|
||||
0, /*tp_call*/
|
||||
0, /*tp_str*/
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* config.h - general config and Dprintf macro
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -33,6 +34,18 @@
|
|||
# define HIDDEN
|
||||
#endif
|
||||
|
||||
/* support for getpid() */
|
||||
#if defined( __GNUC__)
|
||||
#define CONN_CHECK_PID
|
||||
#include <sys/types.h>
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
#ifdef _WIN32
|
||||
/* Windows doesn't seem affected by bug #829: just make it compile. */
|
||||
#define pid_t int
|
||||
#endif
|
||||
|
||||
|
||||
/* debug printf-like function */
|
||||
#ifdef PSYCOPG_DEBUG
|
||||
extern HIDDEN int psycopg_debug_enabled;
|
||||
|
@ -40,8 +53,6 @@ extern HIDDEN int psycopg_debug_enabled;
|
|||
|
||||
#if defined( __GNUC__) && !defined(__APPLE__)
|
||||
#ifdef PSYCOPG_DEBUG
|
||||
#include <sys/types.h>
|
||||
#include <unistd.h>
|
||||
#define Dprintf(fmt, args...) \
|
||||
if (!psycopg_debug_enabled) ; else \
|
||||
fprintf(stderr, "[%d] " fmt "\n", (int) getpid() , ## args)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* connection.h - definition for the psycopg connection type
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -87,7 +88,7 @@ struct connectionObject {
|
|||
pthread_mutex_t lock; /* the global connection lock */
|
||||
|
||||
char *dsn; /* data source name */
|
||||
char *critical; /* critical error on this connection */
|
||||
char *error; /* temporarily stored error before raising */
|
||||
char *encoding; /* current backend encoding */
|
||||
|
||||
long int closed; /* 1 means connection has been closed;
|
||||
|
@ -108,6 +109,7 @@ struct connectionObject {
|
|||
* for a green connection. If NULL, the connection is idle. */
|
||||
PyObject *async_cursor;
|
||||
int async_status; /* asynchronous execution status */
|
||||
PGresult *pgres; /* temporary result across async calls */
|
||||
|
||||
/* notice processing */
|
||||
PyObject *notice_list;
|
||||
|
@ -140,6 +142,12 @@ struct connectionObject {
|
|||
int isolevel;
|
||||
int readonly;
|
||||
int deferrable;
|
||||
|
||||
/* the pid this connection was created into */
|
||||
pid_t procpid;
|
||||
|
||||
/* inside a with block */
|
||||
int entered;
|
||||
};
|
||||
|
||||
/* map isolation level values into a numeric const */
|
||||
|
@ -159,8 +167,9 @@ HIDDEN int conn_get_server_version(PGconn *pgconn);
|
|||
HIDDEN void conn_notice_process(connectionObject *self);
|
||||
HIDDEN void conn_notice_clean(connectionObject *self);
|
||||
HIDDEN void conn_notifies_process(connectionObject *self);
|
||||
RAISES_NEG HIDDEN int conn_setup(connectionObject *self, PGconn *pgconn);
|
||||
HIDDEN int conn_connect(connectionObject *self, long int async);
|
||||
RAISES_NEG HIDDEN int conn_setup(connectionObject *self);
|
||||
HIDDEN int conn_connect(connectionObject *self, const char *dsn, long int async);
|
||||
HIDDEN char *conn_obscure_password(const char *dsn);
|
||||
HIDDEN void conn_close(connectionObject *self);
|
||||
HIDDEN void conn_close_locked(connectionObject *self);
|
||||
RAISES_NEG HIDDEN int conn_commit(connectionObject *self);
|
||||
|
@ -173,6 +182,8 @@ RAISES_NEG HIDDEN int conn_tpc_begin(connectionObject *self, xidObject *xid);
|
|||
RAISES_NEG HIDDEN int conn_tpc_command(connectionObject *self,
|
||||
const char *cmd, xidObject *xid);
|
||||
HIDDEN PyObject *conn_tpc_recover(connectionObject *self);
|
||||
HIDDEN void conn_set_result(connectionObject *self, PGresult *pgres);
|
||||
HIDDEN void conn_set_error(connectionObject *self, const char *msg);
|
||||
|
||||
/* exception-raising macros */
|
||||
#define EXC_IF_CONN_CLOSED(self) if ((self)->closed > 0) { \
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* connection_int.c - code used by the connection object
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -32,6 +33,7 @@
|
|||
#include "psycopg/green.h"
|
||||
#include "psycopg/notify.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
/* String indexes match the ISOLATION_LEVEL_* consts */
|
||||
|
@ -79,7 +81,7 @@ const int SRV_STATE_UNCHANGED = -1;
|
|||
PyObject *
|
||||
conn_text_from_chars(connectionObject *self, const char *str)
|
||||
{
|
||||
return psycopg_text_from_chars_safe(str, -1, self ? self->pydecoder : NULL);
|
||||
return psyco_text_from_chars_safe(str, -1, self ? self->pydecoder : NULL);
|
||||
}
|
||||
|
||||
|
||||
|
@ -478,7 +480,7 @@ conn_get_python_codec(const char *encoding,
|
|||
|
||||
/* get the Python name of the encoding as a C string */
|
||||
if (!(encname = conn_pgenc_to_pyenc(encoding, &pgenc))) { goto exit; }
|
||||
if (!(encname = psycopg_ensure_bytes(encname))) { goto exit; }
|
||||
if (!(encname = psyco_ensure_bytes(encname))) { goto exit; }
|
||||
|
||||
/* Look up the codec functions */
|
||||
if (!(enc_tmp = PyCodec_Encoder(Bytes_AS_STRING(encname)))) { goto exit; }
|
||||
|
@ -649,25 +651,23 @@ conn_is_datestyle_ok(PGconn *pgconn)
|
|||
/* conn_setup - setup and read basic information about the connection */
|
||||
|
||||
RAISES_NEG int
|
||||
conn_setup(connectionObject *self, PGconn *pgconn)
|
||||
conn_setup(connectionObject *self)
|
||||
{
|
||||
PGresult *pgres = NULL;
|
||||
char *error = NULL;
|
||||
int rv = -1;
|
||||
|
||||
self->equote = conn_get_standard_conforming_strings(pgconn);
|
||||
self->server_version = conn_get_server_version(pgconn);
|
||||
self->equote = conn_get_standard_conforming_strings(self->pgconn);
|
||||
self->server_version = conn_get_server_version(self->pgconn);
|
||||
self->protocol = conn_get_protocol_version(self->pgconn);
|
||||
if (3 != self->protocol) {
|
||||
PyErr_SetString(InterfaceError, "only protocol 3 supported");
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (0 > conn_read_encoding(self, pgconn)) {
|
||||
if (0 > conn_read_encoding(self, self->pgconn)) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (0 > conn_setup_cancel(self, pgconn)) {
|
||||
if (0 > conn_setup_cancel(self, self->pgconn)) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
|
@ -678,11 +678,10 @@ conn_setup(connectionObject *self, PGconn *pgconn)
|
|||
if (!dsn_has_replication(self->dsn) && !conn_is_datestyle_ok(self->pgconn)) {
|
||||
int res;
|
||||
Py_UNBLOCK_THREADS;
|
||||
res = pq_set_guc_locked(self, "datestyle", "ISO",
|
||||
&pgres, &error, &_save);
|
||||
res = pq_set_guc_locked(self, "datestyle", "ISO", &_save);
|
||||
Py_BLOCK_THREADS;
|
||||
if (res < 0) {
|
||||
pq_complete_error(self, &pgres, &error);
|
||||
pq_complete_error(self);
|
||||
goto unlock;
|
||||
}
|
||||
}
|
||||
|
@ -708,9 +707,8 @@ exit:
|
|||
/* conn_connect - execute a connection to the database */
|
||||
|
||||
static int
|
||||
_conn_sync_connect(connectionObject *self)
|
||||
_conn_sync_connect(connectionObject *self, const char *dsn)
|
||||
{
|
||||
PGconn *pgconn;
|
||||
int green;
|
||||
|
||||
/* store this value to prevent inconsistencies due to a change
|
||||
|
@ -718,31 +716,31 @@ _conn_sync_connect(connectionObject *self)
|
|||
green = psyco_green();
|
||||
if (!green) {
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
self->pgconn = pgconn = PQconnectdb(self->dsn);
|
||||
self->pgconn = PQconnectdb(dsn);
|
||||
Py_END_ALLOW_THREADS;
|
||||
Dprintf("conn_connect: new postgresql connection at %p", pgconn);
|
||||
Dprintf("conn_connect: new PG connection at %p", self->pgconn);
|
||||
}
|
||||
else {
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
self->pgconn = pgconn = PQconnectStart(self->dsn);
|
||||
self->pgconn = PQconnectStart(dsn);
|
||||
Py_END_ALLOW_THREADS;
|
||||
Dprintf("conn_connect: new green postgresql connection at %p", pgconn);
|
||||
Dprintf("conn_connect: new green PG connection at %p", self->pgconn);
|
||||
}
|
||||
|
||||
if (pgconn == NULL)
|
||||
if (!self->pgconn)
|
||||
{
|
||||
Dprintf("conn_connect: PQconnectdb(%s) FAILED", self->dsn);
|
||||
Dprintf("conn_connect: PQconnectdb(%s) FAILED", dsn);
|
||||
PyErr_SetString(OperationalError, "PQconnectdb() failed");
|
||||
return -1;
|
||||
}
|
||||
else if (PQstatus(pgconn) == CONNECTION_BAD)
|
||||
else if (PQstatus(self->pgconn) == CONNECTION_BAD)
|
||||
{
|
||||
Dprintf("conn_connect: PQconnectdb(%s) returned BAD", self->dsn);
|
||||
PyErr_SetString(OperationalError, PQerrorMessage(pgconn));
|
||||
Dprintf("conn_connect: PQconnectdb(%s) returned BAD", dsn);
|
||||
PyErr_SetString(OperationalError, PQerrorMessage(self->pgconn));
|
||||
return -1;
|
||||
}
|
||||
|
||||
PQsetNoticeProcessor(pgconn, conn_notice_callback, (void*)self);
|
||||
PQsetNoticeProcessor(self->pgconn, conn_notice_callback, (void*)self);
|
||||
|
||||
/* if the connection is green, wait to finish connection */
|
||||
if (green) {
|
||||
|
@ -759,7 +757,7 @@ _conn_sync_connect(connectionObject *self)
|
|||
*/
|
||||
self->status = CONN_STATUS_READY;
|
||||
|
||||
if (conn_setup(self, self->pgconn) == -1) {
|
||||
if (conn_setup(self) == -1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
@ -767,23 +765,23 @@ _conn_sync_connect(connectionObject *self)
|
|||
}
|
||||
|
||||
static int
|
||||
_conn_async_connect(connectionObject *self)
|
||||
_conn_async_connect(connectionObject *self, const char *dsn)
|
||||
{
|
||||
PGconn *pgconn;
|
||||
|
||||
self->pgconn = pgconn = PQconnectStart(self->dsn);
|
||||
self->pgconn = pgconn = PQconnectStart(dsn);
|
||||
|
||||
Dprintf("conn_connect: new postgresql connection at %p", pgconn);
|
||||
|
||||
if (pgconn == NULL)
|
||||
{
|
||||
Dprintf("conn_connect: PQconnectStart(%s) FAILED", self->dsn);
|
||||
Dprintf("conn_connect: PQconnectStart(%s) FAILED", dsn);
|
||||
PyErr_SetString(OperationalError, "PQconnectStart() failed");
|
||||
return -1;
|
||||
}
|
||||
else if (PQstatus(pgconn) == CONNECTION_BAD)
|
||||
{
|
||||
Dprintf("conn_connect: PQconnectdb(%s) returned BAD", self->dsn);
|
||||
Dprintf("conn_connect: PQconnectdb(%s) returned BAD", dsn);
|
||||
PyErr_SetString(OperationalError, PQerrorMessage(pgconn));
|
||||
return -1;
|
||||
}
|
||||
|
@ -804,17 +802,17 @@ _conn_async_connect(connectionObject *self)
|
|||
}
|
||||
|
||||
int
|
||||
conn_connect(connectionObject *self, long int async)
|
||||
conn_connect(connectionObject *self, const char *dsn, long int async)
|
||||
{
|
||||
int rv;
|
||||
|
||||
if (async == 1) {
|
||||
Dprintf("con_connect: connecting in ASYNC mode");
|
||||
rv = _conn_async_connect(self);
|
||||
rv = _conn_async_connect(self, dsn);
|
||||
}
|
||||
else {
|
||||
Dprintf("con_connect: connecting in SYNC mode");
|
||||
rv = _conn_sync_connect(self);
|
||||
rv = _conn_sync_connect(self, dsn);
|
||||
}
|
||||
|
||||
if (rv != 0) {
|
||||
|
@ -863,11 +861,16 @@ _conn_poll_connecting(connectionObject *self)
|
|||
/* Advance to the next state after an attempt of flushing output */
|
||||
|
||||
static int
|
||||
_conn_poll_advance_write(connectionObject *self, int flush)
|
||||
_conn_poll_advance_write(connectionObject *self)
|
||||
{
|
||||
int res;
|
||||
int flush;
|
||||
|
||||
Dprintf("conn_poll: poll writing");
|
||||
|
||||
flush = PQflush(self->pgconn);
|
||||
Dprintf("conn_poll: PQflush() = %i", flush);
|
||||
|
||||
switch (flush) {
|
||||
case 0: /* success */
|
||||
/* we've finished pushing the query to the server. Let's start
|
||||
|
@ -891,18 +894,24 @@ _conn_poll_advance_write(connectionObject *self, int flush)
|
|||
return res;
|
||||
}
|
||||
|
||||
/* Advance to the next state after a call to a pq_is_busy* function */
|
||||
|
||||
/* Advance to the next state after reading results */
|
||||
|
||||
static int
|
||||
_conn_poll_advance_read(connectionObject *self, int busy)
|
||||
_conn_poll_advance_read(connectionObject *self)
|
||||
{
|
||||
int res;
|
||||
int busy;
|
||||
|
||||
Dprintf("conn_poll: poll reading");
|
||||
|
||||
busy = pq_get_result_async(self);
|
||||
|
||||
switch (busy) {
|
||||
case 0: /* result is ready */
|
||||
res = PSYCO_POLL_OK;
|
||||
Dprintf("conn_poll: async_status -> ASYNC_DONE");
|
||||
self->async_status = ASYNC_DONE;
|
||||
res = PSYCO_POLL_OK;
|
||||
break;
|
||||
case 1: /* result not ready: fd would block */
|
||||
res = PSYCO_POLL_READ;
|
||||
|
@ -911,13 +920,15 @@ _conn_poll_advance_read(connectionObject *self, int busy)
|
|||
res = PSYCO_POLL_ERROR;
|
||||
break;
|
||||
default:
|
||||
Dprintf("conn_poll: unexpected result from pq_is_busy: %d", busy);
|
||||
Dprintf("conn_poll: unexpected result from pq_get_result_async: %d",
|
||||
busy);
|
||||
res = PSYCO_POLL_ERROR;
|
||||
break;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
/* Poll the connection for the send query/retrieve result phase
|
||||
|
||||
Advance the async_status (usually going WRITE -> READ -> DONE) but don't
|
||||
|
@ -931,27 +942,18 @@ _conn_poll_query(connectionObject *self)
|
|||
switch (self->async_status) {
|
||||
case ASYNC_WRITE:
|
||||
Dprintf("conn_poll: async_status = ASYNC_WRITE");
|
||||
res = _conn_poll_advance_write(self, PQflush(self->pgconn));
|
||||
res = _conn_poll_advance_write(self);
|
||||
break;
|
||||
|
||||
case ASYNC_READ:
|
||||
Dprintf("conn_poll: async_status = ASYNC_READ");
|
||||
if (self->async) {
|
||||
res = _conn_poll_advance_read(self, pq_is_busy(self));
|
||||
}
|
||||
else {
|
||||
/* we are a green connection being polled as result of a query.
|
||||
this means that our caller has the lock and we are being called
|
||||
from the callback. If we tried to acquire the lock now it would
|
||||
be a deadlock. */
|
||||
res = _conn_poll_advance_read(self, pq_is_busy_locked(self));
|
||||
}
|
||||
res = _conn_poll_advance_read(self);
|
||||
break;
|
||||
|
||||
case ASYNC_DONE:
|
||||
Dprintf("conn_poll: async_status = ASYNC_DONE");
|
||||
/* We haven't asked anything: just check for notifications. */
|
||||
res = _conn_poll_advance_read(self, pq_is_busy(self));
|
||||
res = _conn_poll_advance_read(self);
|
||||
break;
|
||||
|
||||
default:
|
||||
|
@ -974,7 +976,6 @@ static int
|
|||
_conn_poll_setup_async(connectionObject *self)
|
||||
{
|
||||
int res = PSYCO_POLL_ERROR;
|
||||
PGresult *pgres;
|
||||
|
||||
switch (self->status) {
|
||||
case CONN_STATUS_CONNECTING:
|
||||
|
@ -1025,12 +1026,12 @@ _conn_poll_setup_async(connectionObject *self)
|
|||
res = _conn_poll_query(self);
|
||||
if (res == PSYCO_POLL_OK) {
|
||||
res = PSYCO_POLL_ERROR;
|
||||
pgres = pq_get_last_result(self);
|
||||
if (pgres == NULL || PQresultStatus(pgres) != PGRES_COMMAND_OK ) {
|
||||
if (self->pgres == NULL
|
||||
|| PQresultStatus(self->pgres) != PGRES_COMMAND_OK ) {
|
||||
PyErr_SetString(OperationalError, "can't set datestyle to ISO");
|
||||
break;
|
||||
}
|
||||
CLEARPGRES(pgres);
|
||||
CLEARPGRES(self->pgres);
|
||||
|
||||
Dprintf("conn_poll: status -> CONN_STATUS_READY");
|
||||
self->status = CONN_STATUS_READY;
|
||||
|
@ -1042,6 +1043,29 @@ _conn_poll_setup_async(connectionObject *self)
|
|||
}
|
||||
|
||||
|
||||
static cursorObject *
|
||||
_conn_get_async_cursor(connectionObject *self) {
|
||||
PyObject *py_curs;
|
||||
|
||||
if (!(py_curs = PyWeakref_GetObject(self->async_cursor))) {
|
||||
PyErr_SetString(PyExc_SystemError,
|
||||
"got null dereferencing cursor weakref");
|
||||
goto error;
|
||||
}
|
||||
if (Py_None == py_curs) {
|
||||
PyErr_SetString(InterfaceError,
|
||||
"the asynchronous cursor has disappeared");
|
||||
goto error;
|
||||
}
|
||||
|
||||
Py_INCREF(py_curs);
|
||||
return (cursorObject *)py_curs;
|
||||
|
||||
error:
|
||||
pq_clear_async(self);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* conn_poll - Main polling switch
|
||||
*
|
||||
* The function is called in all the states and connection types and invokes
|
||||
|
@ -1056,12 +1080,13 @@ conn_poll(connectionObject *self)
|
|||
|
||||
switch (self->status) {
|
||||
case CONN_STATUS_SETUP:
|
||||
Dprintf("conn_poll: status -> CONN_STATUS_CONNECTING");
|
||||
Dprintf("conn_poll: status -> CONN_STATUS_SETUP");
|
||||
self->status = CONN_STATUS_CONNECTING;
|
||||
res = PSYCO_POLL_WRITE;
|
||||
break;
|
||||
|
||||
case CONN_STATUS_CONNECTING:
|
||||
Dprintf("conn_poll: status -> CONN_STATUS_CONNECTING");
|
||||
res = _conn_poll_connecting(self);
|
||||
if (res == PSYCO_POLL_OK && self->async) {
|
||||
res = _conn_poll_setup_async(self);
|
||||
|
@ -1069,39 +1094,28 @@ conn_poll(connectionObject *self)
|
|||
break;
|
||||
|
||||
case CONN_STATUS_DATESTYLE:
|
||||
Dprintf("conn_poll: status -> CONN_STATUS_DATESTYLE");
|
||||
res = _conn_poll_setup_async(self);
|
||||
break;
|
||||
|
||||
case CONN_STATUS_READY:
|
||||
case CONN_STATUS_BEGIN:
|
||||
case CONN_STATUS_PREPARED:
|
||||
Dprintf("conn_poll: status -> CONN_STATUS_*");
|
||||
res = _conn_poll_query(self);
|
||||
|
||||
if (res == PSYCO_POLL_OK && self->async && self->async_cursor) {
|
||||
cursorObject *curs;
|
||||
|
||||
/* An async query has just finished: parse the tuple in the
|
||||
* target cursor. */
|
||||
cursorObject *curs;
|
||||
PyObject *py_curs;
|
||||
if (!(py_curs = PyWeakref_GetObject(self->async_cursor))) {
|
||||
/* It shouldn't happen but consider it to avoid dereferencing
|
||||
* a null pointer below. */
|
||||
pq_clear_async(self);
|
||||
PyErr_SetString(PyExc_SystemError,
|
||||
"got null dereferencing cursor weakref");
|
||||
res = PSYCO_POLL_ERROR;
|
||||
break;
|
||||
}
|
||||
if (Py_None == py_curs) {
|
||||
pq_clear_async(self);
|
||||
PyErr_SetString(InterfaceError,
|
||||
"the asynchronous cursor has disappeared");
|
||||
if (!(curs = _conn_get_async_cursor(self))) {
|
||||
res = PSYCO_POLL_ERROR;
|
||||
break;
|
||||
}
|
||||
|
||||
curs = (cursorObject *)py_curs;
|
||||
CLEARPGRES(curs->pgres);
|
||||
curs->pgres = pq_get_last_result(self);
|
||||
curs_set_result(curs, self->pgres);
|
||||
self->pgres = NULL;
|
||||
|
||||
/* fetch the tuples (if there are any) and build the result. We
|
||||
* don't care if pq_fetch return 0 or 1, but if there was an error,
|
||||
|
@ -1111,6 +1125,7 @@ conn_poll(connectionObject *self)
|
|||
}
|
||||
|
||||
/* We have finished with our async_cursor */
|
||||
Py_DECREF(curs);
|
||||
Py_CLEAR(self->async_cursor);
|
||||
}
|
||||
break;
|
||||
|
@ -1120,6 +1135,7 @@ conn_poll(connectionObject *self)
|
|||
res = PSYCO_POLL_ERROR;
|
||||
}
|
||||
|
||||
Dprintf("conn_poll: returning %d", res);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -1143,6 +1159,60 @@ conn_close(connectionObject *self)
|
|||
Py_END_ALLOW_THREADS;
|
||||
}
|
||||
|
||||
|
||||
/* Return a copy of the 'dsn' string with the password scrubbed.
|
||||
*
|
||||
* The string returned is allocated on the Python heap.
|
||||
*
|
||||
* In case of error return NULL and raise an exception.
|
||||
*/
|
||||
char *
|
||||
conn_obscure_password(const char *dsn)
|
||||
{
|
||||
PQconninfoOption *options = NULL;
|
||||
PyObject *d = NULL, *v = NULL, *pydsn = NULL;
|
||||
char *rv = NULL;
|
||||
|
||||
if (!dsn) {
|
||||
PyErr_SetString(InternalError, "unexpected null string");
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (!(options = PQconninfoParse(dsn, NULL))) {
|
||||
/* unlikely: the dsn was already tested valid */
|
||||
PyErr_SetString(InternalError, "the connection string is not valid");
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (!(d = psyco_dict_from_conninfo_options(
|
||||
options, /* include_password = */ 1))) {
|
||||
goto exit;
|
||||
}
|
||||
if (NULL == PyDict_GetItemString(d, "password")) {
|
||||
/* the dsn doesn't have a password */
|
||||
psyco_strdup(&rv, dsn, -1);
|
||||
goto exit;
|
||||
}
|
||||
|
||||
/* scrub the password and put back the connection string together */
|
||||
if (!(v = Text_FromUTF8("xxx"))) { goto exit; }
|
||||
if (0 > PyDict_SetItemString(d, "password", v)) { goto exit; }
|
||||
if (!(pydsn = psyco_make_dsn(Py_None, d))) { goto exit; }
|
||||
if (!(pydsn = psyco_ensure_bytes(pydsn))) { goto exit; }
|
||||
|
||||
/* Return the connection string with the password replaced */
|
||||
psyco_strdup(&rv, Bytes_AS_STRING(pydsn), -1);
|
||||
|
||||
exit:
|
||||
PQconninfoFree(options);
|
||||
Py_XDECREF(v);
|
||||
Py_XDECREF(d);
|
||||
Py_XDECREF(pydsn);
|
||||
|
||||
return rv;
|
||||
}
|
||||
|
||||
|
||||
/* conn_close_locked - shut down the connection with the lock already taken */
|
||||
|
||||
void conn_close_locked(connectionObject *self)
|
||||
|
@ -1200,8 +1270,6 @@ conn_set_session(connectionObject *self, int autocommit,
|
|||
int isolevel, int readonly, int deferrable)
|
||||
{
|
||||
int rv = -1;
|
||||
PGresult *pgres = NULL;
|
||||
char *error = NULL;
|
||||
int want_autocommit = autocommit == SRV_STATE_UNCHANGED ?
|
||||
self->autocommit : autocommit;
|
||||
|
||||
|
@ -1231,21 +1299,21 @@ conn_set_session(connectionObject *self, int autocommit,
|
|||
if (isolevel != SRV_STATE_UNCHANGED) {
|
||||
if (0 > pq_set_guc_locked(self,
|
||||
"default_transaction_isolation", srv_isolevels[isolevel],
|
||||
&pgres, &error, &_save)) {
|
||||
&_save)) {
|
||||
goto endlock;
|
||||
}
|
||||
}
|
||||
if (readonly != SRV_STATE_UNCHANGED) {
|
||||
if (0 > pq_set_guc_locked(self,
|
||||
"default_transaction_read_only", srv_state_guc[readonly],
|
||||
&pgres, &error, &_save)) {
|
||||
&_save)) {
|
||||
goto endlock;
|
||||
}
|
||||
}
|
||||
if (deferrable != SRV_STATE_UNCHANGED) {
|
||||
if (0 > pq_set_guc_locked(self,
|
||||
"default_transaction_deferrable", srv_state_guc[deferrable],
|
||||
&pgres, &error, &_save)) {
|
||||
&_save)) {
|
||||
goto endlock;
|
||||
}
|
||||
}
|
||||
|
@ -1256,26 +1324,31 @@ conn_set_session(connectionObject *self, int autocommit,
|
|||
if (self->isolevel != ISOLATION_LEVEL_DEFAULT) {
|
||||
if (0 > pq_set_guc_locked(self,
|
||||
"default_transaction_isolation", "default",
|
||||
&pgres, &error, &_save)) {
|
||||
&_save)) {
|
||||
goto endlock;
|
||||
}
|
||||
}
|
||||
if (self->readonly != STATE_DEFAULT) {
|
||||
if (0 > pq_set_guc_locked(self,
|
||||
"default_transaction_read_only", "default",
|
||||
&pgres, &error, &_save)) {
|
||||
&_save)) {
|
||||
goto endlock;
|
||||
}
|
||||
}
|
||||
if (self->server_version >= 90100 && self->deferrable != STATE_DEFAULT) {
|
||||
if (0 > pq_set_guc_locked(self,
|
||||
"default_transaction_deferrable", "default",
|
||||
&pgres, &error, &_save)) {
|
||||
&_save)) {
|
||||
goto endlock;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Py_BLOCK_THREADS;
|
||||
conn_notifies_process(self);
|
||||
conn_notice_process(self);
|
||||
Py_UNBLOCK_THREADS;
|
||||
|
||||
if (autocommit != SRV_STATE_UNCHANGED) {
|
||||
self->autocommit = autocommit;
|
||||
}
|
||||
|
@ -1295,7 +1368,7 @@ endlock:
|
|||
Py_END_ALLOW_THREADS;
|
||||
|
||||
if (rv < 0) {
|
||||
pq_complete_error(self, &pgres, &error);
|
||||
pq_complete_error(self);
|
||||
goto exit;
|
||||
}
|
||||
|
||||
|
@ -1314,8 +1387,6 @@ exit:
|
|||
RAISES_NEG int
|
||||
conn_set_client_encoding(connectionObject *self, const char *pgenc)
|
||||
{
|
||||
PGresult *pgres = NULL;
|
||||
char *error = NULL;
|
||||
int res = -1;
|
||||
char *clean_enc = NULL;
|
||||
|
||||
|
@ -1324,28 +1395,35 @@ conn_set_client_encoding(connectionObject *self, const char *pgenc)
|
|||
|
||||
/* If the current encoding is equal to the requested one we don't
|
||||
issue any query to the backend */
|
||||
if (strcmp(self->encoding, clean_enc) == 0) return 0;
|
||||
if (strcmp(self->encoding, clean_enc) == 0) {
|
||||
res = 0;
|
||||
goto exit;
|
||||
}
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
pthread_mutex_lock(&self->lock);
|
||||
|
||||
/* abort the current transaction, to set the encoding ouside of
|
||||
transactions */
|
||||
if ((res = pq_abort_locked(self, &pgres, &error, &_save))) {
|
||||
if ((res = pq_abort_locked(self, &_save))) {
|
||||
goto endlock;
|
||||
}
|
||||
|
||||
if ((res = pq_set_guc_locked(self, "client_encoding", clean_enc,
|
||||
&pgres, &error, &_save))) {
|
||||
if ((res = pq_set_guc_locked(self, "client_encoding", clean_enc, &_save))) {
|
||||
goto endlock;
|
||||
}
|
||||
|
||||
Py_BLOCK_THREADS;
|
||||
conn_notifies_process(self);
|
||||
conn_notice_process(self);
|
||||
Py_UNBLOCK_THREADS;
|
||||
|
||||
endlock:
|
||||
pthread_mutex_unlock(&self->lock);
|
||||
Py_END_ALLOW_THREADS;
|
||||
|
||||
if (res < 0) {
|
||||
pq_complete_error(self, &pgres, &error);
|
||||
pq_complete_error(self);
|
||||
goto exit;
|
||||
}
|
||||
|
||||
|
@ -1371,18 +1449,15 @@ exit:
|
|||
RAISES_NEG int
|
||||
conn_tpc_begin(connectionObject *self, xidObject *xid)
|
||||
{
|
||||
PGresult *pgres = NULL;
|
||||
char *error = NULL;
|
||||
|
||||
Dprintf("conn_tpc_begin: starting transaction");
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
pthread_mutex_lock(&self->lock);
|
||||
|
||||
if (pq_begin_locked(self, &pgres, &error, &_save) < 0) {
|
||||
if (pq_begin_locked(self, &_save) < 0) {
|
||||
pthread_mutex_unlock(&(self->lock));
|
||||
Py_BLOCK_THREADS;
|
||||
pq_complete_error(self, &pgres, &error);
|
||||
pq_complete_error(self);
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
@ -1405,8 +1480,6 @@ conn_tpc_begin(connectionObject *self, xidObject *xid)
|
|||
RAISES_NEG int
|
||||
conn_tpc_command(connectionObject *self, const char *cmd, xidObject *xid)
|
||||
{
|
||||
PGresult *pgres = NULL;
|
||||
char *error = NULL;
|
||||
PyObject *tid = NULL;
|
||||
const char *ctid;
|
||||
int rv = -1;
|
||||
|
@ -1414,17 +1487,16 @@ conn_tpc_command(connectionObject *self, const char *cmd, xidObject *xid)
|
|||
Dprintf("conn_tpc_command: %s", cmd);
|
||||
|
||||
/* convert the xid into PostgreSQL transaction id while keeping the GIL */
|
||||
if (!(tid = psycopg_ensure_bytes(xid_get_tid(xid)))) { goto exit; }
|
||||
if (!(tid = psyco_ensure_bytes(xid_get_tid(xid)))) { goto exit; }
|
||||
if (!(ctid = Bytes_AsString(tid))) { goto exit; }
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
pthread_mutex_lock(&self->lock);
|
||||
|
||||
if (0 > (rv = pq_tpc_command_locked(self, cmd, ctid,
|
||||
&pgres, &error, &_save))) {
|
||||
if (0 > (rv = pq_tpc_command_locked(self, cmd, ctid, &_save))) {
|
||||
pthread_mutex_unlock(&self->lock);
|
||||
Py_BLOCK_THREADS;
|
||||
pq_complete_error(self, &pgres, &error);
|
||||
pq_complete_error(self);
|
||||
goto exit;
|
||||
}
|
||||
|
||||
|
@ -1469,3 +1541,24 @@ exit:
|
|||
return rv;
|
||||
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
conn_set_result(connectionObject *self, PGresult *pgres)
|
||||
{
|
||||
PQclear(self->pgres);
|
||||
self->pgres = pgres;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
conn_set_error(connectionObject *self, const char *msg)
|
||||
{
|
||||
if (self->error) {
|
||||
free(self->error);
|
||||
self->error = NULL;
|
||||
}
|
||||
if (msg && *msg) {
|
||||
self->error = strdup(msg);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* connection_type.c - python interface to connection objects
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -34,6 +35,7 @@
|
|||
#include "psycopg/green.h"
|
||||
#include "psycopg/xid.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
|
||||
|
@ -112,10 +114,10 @@ psyco_conn_cursor(connectionObject *self, PyObject *args, PyObject *kwargs)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
if (0 > psyco_curs_withhold_set((cursorObject *)obj, withhold)) {
|
||||
if (0 > curs_withhold_set((cursorObject *)obj, withhold)) {
|
||||
goto exit;
|
||||
}
|
||||
if (0 > psyco_curs_scrollable_set((cursorObject *)obj, scrollable)) {
|
||||
if (0 > curs_scrollable_set((cursorObject *)obj, scrollable)) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
|
@ -405,10 +407,22 @@ psyco_conn_tpc_recover(connectionObject *self, PyObject *dummy)
|
|||
static PyObject *
|
||||
psyco_conn_enter(connectionObject *self, PyObject *dummy)
|
||||
{
|
||||
PyObject *rv = NULL;
|
||||
|
||||
EXC_IF_CONN_CLOSED(self);
|
||||
|
||||
if (self->entered) {
|
||||
PyErr_SetString(ProgrammingError,
|
||||
"the connection cannot be re-entered recursively");
|
||||
goto exit;
|
||||
}
|
||||
|
||||
self->entered = 1;
|
||||
Py_INCREF(self);
|
||||
return (PyObject *)self;
|
||||
rv = (PyObject *)self;
|
||||
|
||||
exit:
|
||||
return rv;
|
||||
}
|
||||
|
||||
|
||||
|
@ -426,6 +440,9 @@ psyco_conn_exit(connectionObject *self, PyObject *args)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
/* even if there will be an error, consider ourselves out */
|
||||
self->entered = 0;
|
||||
|
||||
if (type == Py_None) {
|
||||
if (!(tmp = PyObject_CallMethod((PyObject *)self, "commit", NULL))) {
|
||||
goto exit;
|
||||
|
@ -478,7 +495,7 @@ _psyco_conn_parse_isolevel(PyObject *pyval)
|
|||
|
||||
/* parse from the string -- this includes "default" */
|
||||
else {
|
||||
if (!(pyval = psycopg_ensure_bytes(pyval))) {
|
||||
if (!(pyval = psyco_ensure_bytes(pyval))) {
|
||||
goto exit;
|
||||
}
|
||||
for (level = 1; level <= 4; level++) {
|
||||
|
@ -516,7 +533,7 @@ _psyco_conn_parse_onoff(PyObject *pyval)
|
|||
rv = STATE_DEFAULT;
|
||||
}
|
||||
else if (PyUnicode_CheckExact(pyval) || Bytes_CheckExact(pyval)) {
|
||||
if (!(pyval = psycopg_ensure_bytes(pyval))) {
|
||||
if (!(pyval = psyco_ensure_bytes(pyval))) {
|
||||
goto exit;
|
||||
}
|
||||
if (0 == strcasecmp("default", Bytes_AS_STRING(pyval))) {
|
||||
|
@ -918,7 +935,7 @@ psyco_conn_get_dsn_parameters(connectionObject *self, PyObject *dummy)
|
|||
goto exit;
|
||||
}
|
||||
|
||||
res = psycopg_dict_from_conninfo_options(options, /* include_password = */ 0);
|
||||
res = psyco_dict_from_conninfo_options(options, /* include_password = */ 0);
|
||||
|
||||
exit:
|
||||
PQconninfoFree(options);
|
||||
|
@ -968,7 +985,7 @@ psyco_conn_lobject(connectionObject *self, PyObject *args, PyObject *keywds)
|
|||
Dprintf("psyco_conn_lobject: new lobject for connection at %p", self);
|
||||
Dprintf("psyco_conn_lobject: parameters: oid = %u, mode = %s",
|
||||
oid, smode);
|
||||
Dprintf("psyco_conn_lobject: parameters: new_oid = %d, new_file = %s",
|
||||
Dprintf("psyco_conn_lobject: parameters: new_oid = %u, new_file = %s",
|
||||
new_oid, new_file);
|
||||
|
||||
if (new_file)
|
||||
|
@ -1008,7 +1025,7 @@ psyco_conn_get_backend_pid(connectionObject *self, PyObject *dummy)
|
|||
|
||||
/* get info about the connection */
|
||||
|
||||
#define psyco_conn_info_get_doc \
|
||||
#define psyco_conn_info_doc \
|
||||
"info -- Get connection info."
|
||||
|
||||
static PyObject *
|
||||
|
@ -1019,6 +1036,23 @@ psyco_conn_info_get(connectionObject *self)
|
|||
}
|
||||
|
||||
|
||||
/* return the pointer to the PGconn structure */
|
||||
|
||||
#define psyco_conn_pgconn_ptr_doc \
|
||||
"pgconn_ptr -- Get the PGconn structure pointer."
|
||||
|
||||
static PyObject *
|
||||
psyco_conn_pgconn_ptr_get(connectionObject *self)
|
||||
{
|
||||
if (self->pgconn) {
|
||||
return PyLong_FromVoidPtr((void *)self->pgconn);
|
||||
}
|
||||
else {
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* reset the currect connection */
|
||||
|
||||
#define psyco_conn_reset_doc \
|
||||
|
@ -1035,7 +1069,7 @@ psyco_conn_reset(connectionObject *self, PyObject *dummy)
|
|||
if (pq_reset(self) < 0)
|
||||
return NULL;
|
||||
|
||||
res = conn_setup(self, self->pgconn);
|
||||
res = conn_setup(self);
|
||||
if (res < 0)
|
||||
return NULL;
|
||||
|
||||
|
@ -1270,73 +1304,27 @@ static struct PyGetSetDef connectionObject_getsets[] = {
|
|||
psyco_conn_deferrable_doc },
|
||||
{ "info",
|
||||
(getter)psyco_conn_info_get, NULL,
|
||||
psyco_conn_info_get_doc },
|
||||
psyco_conn_info_doc },
|
||||
{ "pgconn_ptr",
|
||||
(getter)psyco_conn_pgconn_ptr_get, NULL,
|
||||
psyco_conn_pgconn_ptr_doc },
|
||||
{NULL}
|
||||
};
|
||||
#undef EXCEPTION_GETTER
|
||||
|
||||
/* initialization and finalization methods */
|
||||
|
||||
RAISES_NEG static int
|
||||
obscure_password(connectionObject *conn)
|
||||
{
|
||||
PQconninfoOption *options;
|
||||
PyObject *d = NULL, *v = NULL, *dsn = NULL;
|
||||
char *tmp;
|
||||
int rv = -1;
|
||||
|
||||
if (!conn || !conn->dsn) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!(options = PQconninfoParse(conn->dsn, NULL))) {
|
||||
/* unlikely: the dsn was already tested valid */
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!(d = psycopg_dict_from_conninfo_options(
|
||||
options, /* include_password = */ 1))) {
|
||||
goto exit;
|
||||
}
|
||||
if (NULL == PyDict_GetItemString(d, "password")) {
|
||||
/* the dsn doesn't have a password */
|
||||
rv = 0;
|
||||
goto exit;
|
||||
}
|
||||
|
||||
/* scrub the password and put back the connection string together */
|
||||
if (!(v = Text_FromUTF8("xxx"))) { goto exit; }
|
||||
if (0 > PyDict_SetItemString(d, "password", v)) { goto exit; }
|
||||
if (!(dsn = psycopg_make_dsn(Py_None, d))) { goto exit; }
|
||||
if (!(dsn = psycopg_ensure_bytes(dsn))) { goto exit; }
|
||||
|
||||
/* Replace the connection string on the connection object */
|
||||
tmp = conn->dsn;
|
||||
psycopg_strdup(&conn->dsn, Bytes_AS_STRING(dsn), -1);
|
||||
PyMem_Free(tmp);
|
||||
|
||||
rv = 0;
|
||||
|
||||
exit:
|
||||
PQconninfoFree(options);
|
||||
Py_XDECREF(v);
|
||||
Py_XDECREF(d);
|
||||
Py_XDECREF(dsn);
|
||||
|
||||
return rv;
|
||||
}
|
||||
|
||||
static int
|
||||
connection_setup(connectionObject *self, const char *dsn, long int async)
|
||||
{
|
||||
int res = -1;
|
||||
int rv = -1;
|
||||
|
||||
Dprintf("connection_setup: init connection object at %p, "
|
||||
"async %ld, refcnt = " FORMAT_CODE_PY_SSIZE_T,
|
||||
self, async, Py_REFCNT(self)
|
||||
);
|
||||
|
||||
if (0 > psycopg_strdup(&self->dsn, dsn, -1)) { goto exit; }
|
||||
if (!(self->dsn = conn_obscure_password(dsn))) { goto exit; }
|
||||
if (!(self->notice_list = PyList_New(0))) { goto exit; }
|
||||
if (!(self->notifies = PyList_New(0))) { goto exit; }
|
||||
self->async = async;
|
||||
|
@ -1347,31 +1335,30 @@ connection_setup(connectionObject *self, const char *dsn, long int async)
|
|||
self->isolevel = ISOLATION_LEVEL_DEFAULT;
|
||||
self->readonly = STATE_DEFAULT;
|
||||
self->deferrable = STATE_DEFAULT;
|
||||
#ifdef CONN_CHECK_PID
|
||||
self->procpid = getpid();
|
||||
#endif
|
||||
|
||||
/* other fields have been zeroed by tp_alloc */
|
||||
|
||||
pthread_mutex_init(&(self->lock), NULL);
|
||||
if (0 != pthread_mutex_init(&(self->lock), NULL)) {
|
||||
PyErr_SetString(InternalError, "lock initialization failed");
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (conn_connect(self, async) != 0) {
|
||||
if (conn_connect(self, dsn, async) != 0) {
|
||||
Dprintf("connection_init: FAILED");
|
||||
goto exit;
|
||||
}
|
||||
else {
|
||||
Dprintf("connection_setup: good connection object at %p, refcnt = "
|
||||
FORMAT_CODE_PY_SSIZE_T,
|
||||
self, Py_REFCNT(self)
|
||||
);
|
||||
res = 0;
|
||||
}
|
||||
|
||||
rv = 0;
|
||||
|
||||
Dprintf("connection_setup: good connection object at %p, refcnt = "
|
||||
FORMAT_CODE_PY_SSIZE_T,
|
||||
self, Py_REFCNT(self));
|
||||
|
||||
exit:
|
||||
/* here we obfuscate the password even if there was a connection error */
|
||||
{
|
||||
PyObject *ptype = NULL, *pvalue = NULL, *ptb = NULL;
|
||||
PyErr_Fetch(&ptype, &pvalue, &ptb);
|
||||
obscure_password(self);
|
||||
PyErr_Restore(ptype, pvalue, ptb);
|
||||
}
|
||||
return res;
|
||||
return rv;
|
||||
}
|
||||
|
||||
|
||||
|
@ -1400,7 +1387,15 @@ connection_dealloc(PyObject* obj)
|
|||
* resulting in a double-free segfault (ticket #166). */
|
||||
PyObject_GC_UnTrack(self);
|
||||
|
||||
conn_close(self);
|
||||
/* close the connection only if this is the same process it was created
|
||||
* into, otherwise using multiprocessing we may close the connection
|
||||
* belonging to another process. */
|
||||
#ifdef CONN_CHECK_PID
|
||||
if (self->procpid == getpid())
|
||||
#endif
|
||||
{
|
||||
conn_close(self);
|
||||
}
|
||||
|
||||
if (self->weakreflist) {
|
||||
PyObject_ClearWeakRefs(obj);
|
||||
|
@ -1410,8 +1405,9 @@ connection_dealloc(PyObject* obj)
|
|||
|
||||
PyMem_Free(self->dsn);
|
||||
PyMem_Free(self->encoding);
|
||||
if (self->critical) free(self->critical);
|
||||
if (self->error) free(self->error);
|
||||
if (self->cancel) PQfreeCancel(self->cancel);
|
||||
PQclear(self->pgres);
|
||||
|
||||
connection_clear(self);
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* connection.h - definition for the psycopg ConnectionInfo type
|
||||
*
|
||||
* Copyright (C) 2018 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* conninfo_type.c - present information about the libpq connection
|
||||
*
|
||||
* Copyright (C) 2018 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2018-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -147,7 +148,7 @@ port_get(connInfoObject *self)
|
|||
|
||||
|
||||
static const char options_doc[] =
|
||||
"The command-line options passed in the the connection request.\n"
|
||||
"The command-line options passed in the connection request.\n"
|
||||
"\n"
|
||||
".. seealso:: libpq docs for `PQoptions()`__ for details.\n"
|
||||
".. __: https://www.postgresql.org/docs/current/static/libpq-status.html"
|
||||
|
@ -166,6 +167,46 @@ options_get(connInfoObject *self)
|
|||
}
|
||||
|
||||
|
||||
static const char dsn_parameters_doc[] =
|
||||
"The effective connection parameters.\n"
|
||||
"\n"
|
||||
":type: `!dict`\n"
|
||||
"\n"
|
||||
"The results include values which weren't explicitly set by the connection\n"
|
||||
"string, such as defaults, environment variables, etc.\n"
|
||||
"The *password* parameter is removed from the results.\n"
|
||||
"\n"
|
||||
".. seealso:: libpq docs for `PQconninfo()`__ for details.\n"
|
||||
".. __: https://www.postgresql.org/docs/current/libpq-connect.html"
|
||||
"#LIBPQ-PQCONNINFO";
|
||||
|
||||
static PyObject *
|
||||
dsn_parameters_get(connInfoObject *self)
|
||||
{
|
||||
#if PG_VERSION_NUM >= 90300
|
||||
PyObject *res = NULL;
|
||||
PQconninfoOption *options = NULL;
|
||||
|
||||
EXC_IF_CONN_CLOSED(self->conn);
|
||||
|
||||
if (!(options = PQconninfo(self->conn->pgconn))) {
|
||||
PyErr_NoMemory();
|
||||
goto exit;
|
||||
}
|
||||
|
||||
res = psyco_dict_from_conninfo_options(options, /* include_password = */ 0);
|
||||
|
||||
exit:
|
||||
PQconninfoFree(options);
|
||||
|
||||
return res;
|
||||
#else
|
||||
PyErr_SetString(NotSupportedError, "PQconninfo not available in libpq < 9.3");
|
||||
return NULL;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
static const char status_doc[] =
|
||||
"The status of the connection.\n"
|
||||
"\n"
|
||||
|
@ -322,7 +363,7 @@ socket_get(connInfoObject *self)
|
|||
|
||||
|
||||
static const char backend_pid_doc[] =
|
||||
"The process ID (PID) of the backend process handling this connection.\n"
|
||||
"The process ID (PID) of the backend process you connected to.\n"
|
||||
"\n"
|
||||
":type: `!int`\n"
|
||||
"\n"
|
||||
|
@ -497,6 +538,8 @@ static struct PyGetSetDef connInfoObject_getsets[] = {
|
|||
{ "host", (getter)host_get, NULL, (char *)host_doc },
|
||||
{ "port", (getter)port_get, NULL, (char *)port_doc },
|
||||
{ "options", (getter)options_get, NULL, (char *)options_doc },
|
||||
{ "dsn_parameters", (getter)dsn_parameters_get, NULL,
|
||||
(char *)dsn_parameters_doc },
|
||||
{ "status", (getter)status_get, NULL, (char *)status_doc },
|
||||
{ "transaction_status", (getter)transaction_status_get, NULL,
|
||||
(char *)transaction_status_doc },
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* cursor.h - definition for the psycopg cursor type
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -93,9 +94,10 @@ struct cursorObject {
|
|||
/* C-callable functions in cursor_int.c and cursor_type.c */
|
||||
BORROWED HIDDEN PyObject *curs_get_cast(cursorObject *self, PyObject *oid);
|
||||
HIDDEN void curs_reset(cursorObject *self);
|
||||
RAISES_NEG HIDDEN int psyco_curs_withhold_set(cursorObject *self, PyObject *pyvalue);
|
||||
RAISES_NEG HIDDEN int psyco_curs_scrollable_set(cursorObject *self, PyObject *pyvalue);
|
||||
HIDDEN PyObject *psyco_curs_validate_sql_basic(cursorObject *self, PyObject *sql);
|
||||
RAISES_NEG HIDDEN int curs_withhold_set(cursorObject *self, PyObject *pyvalue);
|
||||
RAISES_NEG HIDDEN int curs_scrollable_set(cursorObject *self, PyObject *pyvalue);
|
||||
HIDDEN PyObject *curs_validate_sql_basic(cursorObject *self, PyObject *sql);
|
||||
HIDDEN void curs_set_result(cursorObject *self, PGresult *pgres);
|
||||
|
||||
/* exception-raising macros */
|
||||
#define EXC_IF_CURS_CLOSED(self) \
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* cursor_int.c - code used by the cursor object
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -108,7 +109,7 @@ exit:
|
|||
* after having set an exception.
|
||||
*/
|
||||
PyObject *
|
||||
psyco_curs_validate_sql_basic(cursorObject *self, PyObject *sql)
|
||||
curs_validate_sql_basic(cursorObject *self, PyObject *sql)
|
||||
{
|
||||
PyObject *rv = NULL;
|
||||
PyObject *comp = NULL;
|
||||
|
@ -160,3 +161,11 @@ exit:
|
|||
Py_XDECREF(comp);
|
||||
return rv;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
curs_set_result(cursorObject *self, PGresult *pgres)
|
||||
{
|
||||
PQclear(self->pgres);
|
||||
self->pgres = pgres;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* cursor_type.c - python interface to cursor objects
|
||||
*
|
||||
* Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -43,17 +44,15 @@
|
|||
|
||||
/* close method - close the cursor */
|
||||
|
||||
#define psyco_curs_close_doc \
|
||||
#define curs_close_doc \
|
||||
"close() -- Close the cursor."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_close(cursorObject *self, PyObject *dummy)
|
||||
curs_close(cursorObject *self, PyObject *dummy)
|
||||
{
|
||||
PyObject *rv = NULL;
|
||||
char *lname = NULL;
|
||||
|
||||
EXC_IF_ASYNC_IN_PROGRESS(self, close);
|
||||
|
||||
if (self->closed) {
|
||||
rv = Py_None;
|
||||
Py_INCREF(rv);
|
||||
|
@ -64,6 +63,8 @@ psyco_curs_close(cursorObject *self, PyObject *dummy)
|
|||
char buffer[256];
|
||||
PGTransactionStatusType status;
|
||||
|
||||
EXC_IF_ASYNC_IN_PROGRESS(self, close_named);
|
||||
|
||||
if (self->conn) {
|
||||
status = PQtransactionStatus(self->conn->pgconn);
|
||||
}
|
||||
|
@ -86,7 +87,7 @@ psyco_curs_close(cursorObject *self, PyObject *dummy)
|
|||
* closing it (the view exists since PG 8.2 according to docs).
|
||||
*/
|
||||
if (!self->query && self->conn->server_version >= 80200) {
|
||||
if (!(lname = psycopg_escape_string(
|
||||
if (!(lname = psyco_escape_string(
|
||||
self->conn, self->name, -1, NULL, NULL))) {
|
||||
goto exit;
|
||||
}
|
||||
|
@ -107,8 +108,10 @@ psyco_curs_close(cursorObject *self, PyObject *dummy)
|
|||
}
|
||||
|
||||
close:
|
||||
CLEARPGRES(self->pgres);
|
||||
|
||||
self->closed = 1;
|
||||
Dprintf("psyco_curs_close: cursor at %p closed", self);
|
||||
Dprintf("curs_close: cursor at %p closed", self);
|
||||
|
||||
rv = Py_None;
|
||||
Py_INCREF(rv);
|
||||
|
@ -315,7 +318,7 @@ _psyco_curs_merge_query_args(cursorObject *self,
|
|||
{
|
||||
PyObject *fquery;
|
||||
|
||||
/* if PyString_Format() return NULL an error occured: if the error is
|
||||
/* if PyString_Format() return NULL an error occurred: if the error is
|
||||
a TypeError we need to check the exception.args[0] string for the
|
||||
values:
|
||||
|
||||
|
@ -333,7 +336,7 @@ _psyco_curs_merge_query_args(cursorObject *self,
|
|||
PyErr_Fetch(&err, &arg, &trace);
|
||||
|
||||
if (err && PyErr_GivenExceptionMatches(err, PyExc_TypeError)) {
|
||||
Dprintf("psyco_curs_execute: TypeError exception caught");
|
||||
Dprintf("curs_execute: TypeError exception caught");
|
||||
PyErr_NormalizeException(&err, &arg, &trace);
|
||||
|
||||
if (PyObject_HasAttrString(arg, "args")) {
|
||||
|
@ -341,11 +344,11 @@ _psyco_curs_merge_query_args(cursorObject *self,
|
|||
PyObject *str = PySequence_GetItem(args, 0);
|
||||
const char *s = Bytes_AS_STRING(str);
|
||||
|
||||
Dprintf("psyco_curs_execute: -> %s", s);
|
||||
Dprintf("curs_execute: -> %s", s);
|
||||
|
||||
if (!strcmp(s, "not enough arguments for format string")
|
||||
|| !strcmp(s, "not all arguments converted")) {
|
||||
Dprintf("psyco_curs_execute: -> got a match");
|
||||
Dprintf("curs_execute: -> got a match");
|
||||
psyco_set_error(ProgrammingError, self, s);
|
||||
pe = 1;
|
||||
}
|
||||
|
@ -367,7 +370,7 @@ _psyco_curs_merge_query_args(cursorObject *self,
|
|||
return fquery;
|
||||
}
|
||||
|
||||
#define psyco_curs_execute_doc \
|
||||
#define curs_execute_doc \
|
||||
"execute(query, vars=None) -- Execute query with bound vars."
|
||||
|
||||
RAISES_NEG static int
|
||||
|
@ -380,13 +383,13 @@ _psyco_curs_execute(cursorObject *self,
|
|||
PyObject *fquery = NULL, *cvt = NULL;
|
||||
|
||||
/* query becomes NULL or refcount +1, so good to XDECREF at the end */
|
||||
if (!(query = psyco_curs_validate_sql_basic(self, query))) {
|
||||
if (!(query = curs_validate_sql_basic(self, query))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
CLEARPGRES(self->pgres);
|
||||
Py_CLEAR(self->query);
|
||||
Dprintf("psyco_curs_execute: starting execution of new query");
|
||||
Dprintf("curs_execute: starting execution of new query");
|
||||
|
||||
/* here we are, and we have a sequence or a dictionary filled with
|
||||
objects to be substituted (bound variables). we try to be smart and do
|
||||
|
@ -442,7 +445,7 @@ _psyco_curs_execute(cursorObject *self,
|
|||
|
||||
/* At this point, the SQL statement must be str, not unicode */
|
||||
tmp = pq_execute(self, Bytes_AS_STRING(self->query), async, no_result, 0);
|
||||
Dprintf("psyco_curs_execute: res = %d, pgres = %p", tmp, self->pgres);
|
||||
Dprintf("curs_execute: res = %d, pgres = %p", tmp, self->pgres);
|
||||
if (tmp < 0) { goto exit; }
|
||||
|
||||
res = 0; /* Success */
|
||||
|
@ -456,7 +459,7 @@ exit:
|
|||
}
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_execute(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
curs_execute(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
PyObject *vars = NULL, *operation = NULL;
|
||||
|
||||
|
@ -493,11 +496,11 @@ psyco_curs_execute(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
#define psyco_curs_executemany_doc \
|
||||
#define curs_executemany_doc \
|
||||
"executemany(query, vars_list) -- Execute many queries with bound vars."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_executemany(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
curs_executemany(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
PyObject *operation = NULL, *vars = NULL;
|
||||
PyObject *v, *iter = NULL;
|
||||
|
@ -554,7 +557,7 @@ psyco_curs_executemany(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
}
|
||||
|
||||
|
||||
#define psyco_curs_mogrify_doc \
|
||||
#define curs_mogrify_doc \
|
||||
"mogrify(query, vars=None) -> str -- Return query after vars binding."
|
||||
|
||||
static PyObject *
|
||||
|
@ -563,10 +566,10 @@ _psyco_curs_mogrify(cursorObject *self,
|
|||
{
|
||||
PyObject *fquery = NULL, *cvt = NULL;
|
||||
|
||||
operation = psyco_curs_validate_sql_basic(self, operation);
|
||||
operation = curs_validate_sql_basic(self, operation);
|
||||
if (operation == NULL) { goto cleanup; }
|
||||
|
||||
Dprintf("psyco_curs_mogrify: starting mogrify");
|
||||
Dprintf("curs_mogrify: starting mogrify");
|
||||
|
||||
/* here we are, and we have a sequence or a dictionary filled with
|
||||
objects to be substituted (bound variables). we try to be smart and do
|
||||
|
@ -584,7 +587,7 @@ _psyco_curs_mogrify(cursorObject *self,
|
|||
goto cleanup;
|
||||
}
|
||||
|
||||
Dprintf("psyco_curs_mogrify: cvt->refcnt = " FORMAT_CODE_PY_SSIZE_T
|
||||
Dprintf("curs_mogrify: cvt->refcnt = " FORMAT_CODE_PY_SSIZE_T
|
||||
", fquery->refcnt = " FORMAT_CODE_PY_SSIZE_T,
|
||||
Py_REFCNT(cvt), Py_REFCNT(fquery));
|
||||
}
|
||||
|
@ -601,7 +604,7 @@ cleanup:
|
|||
}
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_mogrify(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
curs_mogrify(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
PyObject *vars = NULL, *operation = NULL;
|
||||
|
||||
|
@ -617,7 +620,7 @@ psyco_curs_mogrify(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
|
||||
|
||||
/* cast method - convert an oid/string into a Python object */
|
||||
#define psyco_curs_cast_doc \
|
||||
#define curs_cast_doc \
|
||||
"cast(oid, s) -> value\n\n" \
|
||||
"Convert the string s to a Python object according to its oid.\n\n" \
|
||||
"Look for a typecaster first in the cursor, then in its connection," \
|
||||
|
@ -625,7 +628,7 @@ psyco_curs_mogrify(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
"leave the value as a string."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_cast(cursorObject *self, PyObject *args)
|
||||
curs_cast(cursorObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *oid;
|
||||
PyObject *s;
|
||||
|
@ -641,7 +644,7 @@ psyco_curs_cast(cursorObject *self, PyObject *args)
|
|||
|
||||
/* fetchone method - fetch one row of results */
|
||||
|
||||
#define psyco_curs_fetchone_doc \
|
||||
#define curs_fetchone_doc \
|
||||
"fetchone() -> tuple or None\n\n" \
|
||||
"Return the next row of a query result set in the form of a tuple (by\n" \
|
||||
"default) or using the sequence factory previously set in the\n" \
|
||||
|
@ -742,7 +745,7 @@ exit:
|
|||
}
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_fetchone(cursorObject *self, PyObject *dummy)
|
||||
curs_fetchone(cursorObject *self, PyObject *dummy)
|
||||
{
|
||||
PyObject *res;
|
||||
|
||||
|
@ -761,11 +764,11 @@ psyco_curs_fetchone(cursorObject *self, PyObject *dummy)
|
|||
if (_psyco_curs_prefetch(self) < 0) return NULL;
|
||||
}
|
||||
|
||||
Dprintf("psyco_curs_fetchone: fetching row %ld", self->row);
|
||||
Dprintf("psyco_curs_fetchone: rowcount = %ld", self->rowcount);
|
||||
Dprintf("curs_fetchone: fetching row %ld", self->row);
|
||||
Dprintf("curs_fetchone: rowcount = %ld", self->rowcount);
|
||||
|
||||
if (self->row >= self->rowcount) {
|
||||
/* we exausted available data: return None */
|
||||
/* we exhausted available data: return None */
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
|
@ -787,11 +790,11 @@ psyco_curs_fetchone(cursorObject *self, PyObject *dummy)
|
|||
* Fetch several records at time. Return NULL when the cursor is exhausted.
|
||||
*/
|
||||
static PyObject *
|
||||
psyco_curs_next_named(cursorObject *self)
|
||||
curs_next_named(cursorObject *self)
|
||||
{
|
||||
PyObject *res;
|
||||
|
||||
Dprintf("psyco_curs_next_named");
|
||||
Dprintf("curs_next_named");
|
||||
EXC_IF_CURS_CLOSED(self);
|
||||
EXC_IF_ASYNC_IN_PROGRESS(self, next);
|
||||
if (_psyco_curs_prefetch(self) < 0) return NULL;
|
||||
|
@ -800,8 +803,8 @@ psyco_curs_next_named(cursorObject *self)
|
|||
EXC_IF_NO_MARK(self);
|
||||
EXC_IF_TPC_PREPARED(self->conn, next);
|
||||
|
||||
Dprintf("psyco_curs_next_named: row %ld", self->row);
|
||||
Dprintf("psyco_curs_next_named: rowcount = %ld", self->rowcount);
|
||||
Dprintf("curs_next_named: row %ld", self->row);
|
||||
Dprintf("curs_next_named: rowcount = %ld", self->rowcount);
|
||||
if (self->row >= self->rowcount) {
|
||||
char buffer[128];
|
||||
|
||||
|
@ -832,7 +835,7 @@ psyco_curs_next_named(cursorObject *self)
|
|||
|
||||
/* fetch many - fetch some results */
|
||||
|
||||
#define psyco_curs_fetchmany_doc \
|
||||
#define curs_fetchmany_doc \
|
||||
"fetchmany(size=self.arraysize) -> list of tuple\n\n" \
|
||||
"Return the next `size` rows of a query result set in the form of a list\n" \
|
||||
"of tuples (by default) or using the sequence factory previously set in\n" \
|
||||
|
@ -840,7 +843,7 @@ psyco_curs_next_named(cursorObject *self)
|
|||
"Return an empty list when no more data is available.\n"
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_fetchmany(cursorObject *self, PyObject *args, PyObject *kwords)
|
||||
curs_fetchmany(cursorObject *self, PyObject *args, PyObject *kwords)
|
||||
{
|
||||
int i;
|
||||
PyObject *list = NULL;
|
||||
|
@ -885,7 +888,7 @@ psyco_curs_fetchmany(cursorObject *self, PyObject *args, PyObject *kwords)
|
|||
size = self->rowcount - self->row;
|
||||
}
|
||||
|
||||
Dprintf("psyco_curs_fetchmany: size = %ld", size);
|
||||
Dprintf("curs_fetchmany: size = %ld", size);
|
||||
|
||||
if (size <= 0) {
|
||||
rv = PyList_New(0);
|
||||
|
@ -925,7 +928,7 @@ exit:
|
|||
|
||||
/* fetch all - fetch all results */
|
||||
|
||||
#define psyco_curs_fetchall_doc \
|
||||
#define curs_fetchall_doc \
|
||||
"fetchall() -> list of tuple\n\n" \
|
||||
"Return all the remaining rows of a query result set.\n\n" \
|
||||
"Rows are returned in the form of a list of tuples (by default) or using\n" \
|
||||
|
@ -933,7 +936,7 @@ exit:
|
|||
"Return `!None` when no more data is available.\n"
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_fetchall(cursorObject *self, PyObject *dummy)
|
||||
curs_fetchall(cursorObject *self, PyObject *dummy)
|
||||
{
|
||||
int i, size;
|
||||
PyObject *list = NULL;
|
||||
|
@ -994,11 +997,11 @@ exit:
|
|||
|
||||
/* callproc method - execute a stored procedure */
|
||||
|
||||
#define psyco_curs_callproc_doc \
|
||||
#define curs_callproc_doc \
|
||||
"callproc(procname, parameters=None) -- Execute stored procedure."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_callproc(cursorObject *self, PyObject *args)
|
||||
curs_callproc(cursorObject *self, PyObject *args)
|
||||
{
|
||||
const char *procname = NULL;
|
||||
char *sql = NULL;
|
||||
|
@ -1057,10 +1060,10 @@ psyco_curs_callproc(cursorObject *self, PyObject *args)
|
|||
Py_INCREF(pname); /* was borrowed */
|
||||
|
||||
/* this also makes a check for keys being strings */
|
||||
if (!(pname = psycopg_ensure_bytes(pname))) { goto exit; }
|
||||
if (!(pname = psyco_ensure_bytes(pname))) { goto exit; }
|
||||
if (!(cpname = Bytes_AsString(pname))) { goto exit; }
|
||||
|
||||
if (!(scpnames[i] = psycopg_escape_identifier(
|
||||
if (!(scpnames[i] = psyco_escape_identifier(
|
||||
self->conn, cpname, -1))) {
|
||||
Py_CLEAR(pname);
|
||||
goto exit;
|
||||
|
@ -1130,7 +1133,7 @@ exit:
|
|||
}
|
||||
}
|
||||
}
|
||||
PyMem_Del(scpnames);
|
||||
PyMem_Free(scpnames);
|
||||
Py_XDECREF(pname);
|
||||
Py_XDECREF(pnames);
|
||||
Py_XDECREF(operation);
|
||||
|
@ -1142,13 +1145,13 @@ exit:
|
|||
|
||||
/* nextset method - return the next set of data (not supported) */
|
||||
|
||||
#define psyco_curs_nextset_doc \
|
||||
#define curs_nextset_doc \
|
||||
"nextset() -- Skip to next set of data.\n\n" \
|
||||
"This method is not supported (PostgreSQL does not have multiple data \n" \
|
||||
"sets) and will raise a NotSupportedError exception."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_nextset(cursorObject *self, PyObject *dummy)
|
||||
curs_nextset(cursorObject *self, PyObject *dummy)
|
||||
{
|
||||
EXC_IF_CURS_CLOSED(self);
|
||||
|
||||
|
@ -1159,12 +1162,12 @@ psyco_curs_nextset(cursorObject *self, PyObject *dummy)
|
|||
|
||||
/* setinputsizes - predefine memory areas for execute (does nothing) */
|
||||
|
||||
#define psyco_curs_setinputsizes_doc \
|
||||
#define curs_setinputsizes_doc \
|
||||
"setinputsizes(sizes) -- Set memory areas before execute.\n\n" \
|
||||
"This method currently does nothing but it is safe to call it."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_setinputsizes(cursorObject *self, PyObject *args)
|
||||
curs_setinputsizes(cursorObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *sizes;
|
||||
|
||||
|
@ -1179,12 +1182,12 @@ psyco_curs_setinputsizes(cursorObject *self, PyObject *args)
|
|||
|
||||
/* setoutputsize - predefine memory areas for execute (does nothing) */
|
||||
|
||||
#define psyco_curs_setoutputsize_doc \
|
||||
#define curs_setoutputsize_doc \
|
||||
"setoutputsize(size, column=None) -- Set column buffer size.\n\n" \
|
||||
"This method currently does nothing but it is safe to call it."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_setoutputsize(cursorObject *self, PyObject *args)
|
||||
curs_setoutputsize(cursorObject *self, PyObject *args)
|
||||
{
|
||||
long int size, column;
|
||||
|
||||
|
@ -1199,11 +1202,11 @@ psyco_curs_setoutputsize(cursorObject *self, PyObject *args)
|
|||
|
||||
/* scroll - scroll position in result list */
|
||||
|
||||
#define psyco_curs_scroll_doc \
|
||||
#define curs_scroll_doc \
|
||||
"scroll(value, mode='relative') -- Scroll to new position according to mode."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_scroll(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
curs_scroll(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
int value, newpos;
|
||||
const char *mode = "relative";
|
||||
|
@ -1261,21 +1264,21 @@ psyco_curs_scroll(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
}
|
||||
|
||||
|
||||
#define psyco_curs_enter_doc \
|
||||
#define curs_enter_doc \
|
||||
"__enter__ -> self"
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_enter(cursorObject *self, PyObject *dummy)
|
||||
curs_enter(cursorObject *self, PyObject *dummy)
|
||||
{
|
||||
Py_INCREF(self);
|
||||
return (PyObject *)self;
|
||||
}
|
||||
|
||||
#define psyco_curs_exit_doc \
|
||||
#define curs_exit_doc \
|
||||
"__exit__ -- close the cursor"
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_exit(cursorObject *self, PyObject *args)
|
||||
curs_exit(cursorObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *tmp = NULL;
|
||||
PyObject *rv = NULL;
|
||||
|
@ -1300,11 +1303,9 @@ exit:
|
|||
/* Return a newly allocated buffer containing the list of columns to be
|
||||
* copied. On error return NULL and set an exception.
|
||||
*/
|
||||
static char *_psyco_curs_copy_columns(PyObject *columns)
|
||||
static char *_psyco_curs_copy_columns(cursorObject *self, PyObject *columns)
|
||||
{
|
||||
PyObject *col, *coliter;
|
||||
Py_ssize_t collen;
|
||||
char *colname;
|
||||
char *columnlist = NULL;
|
||||
Py_ssize_t bufsize = 512;
|
||||
Py_ssize_t offset = 1;
|
||||
|
@ -1330,15 +1331,28 @@ static char *_psyco_curs_copy_columns(PyObject *columns)
|
|||
columnlist[0] = '(';
|
||||
|
||||
while ((col = PyIter_Next(coliter)) != NULL) {
|
||||
if (!(col = psycopg_ensure_bytes(col))) {
|
||||
Py_ssize_t collen;
|
||||
char *colname;
|
||||
char *quoted_colname;
|
||||
|
||||
if (!(col = psyco_ensure_bytes(col))) {
|
||||
Py_DECREF(coliter);
|
||||
goto error;
|
||||
}
|
||||
Bytes_AsStringAndSize(col, &colname, &collen);
|
||||
if (!(quoted_colname = psyco_escape_identifier(
|
||||
self->conn, colname, collen))) {
|
||||
Py_DECREF(col);
|
||||
Py_DECREF(coliter);
|
||||
goto error;
|
||||
}
|
||||
collen = strlen(quoted_colname);
|
||||
|
||||
while (offset + collen > bufsize - 2) {
|
||||
char *tmp;
|
||||
bufsize *= 2;
|
||||
if (NULL == (tmp = PyMem_Realloc(columnlist, bufsize))) {
|
||||
PQfreemem(quoted_colname);
|
||||
Py_DECREF(col);
|
||||
Py_DECREF(coliter);
|
||||
PyErr_NoMemory();
|
||||
|
@ -1346,10 +1360,11 @@ static char *_psyco_curs_copy_columns(PyObject *columns)
|
|||
}
|
||||
columnlist = tmp;
|
||||
}
|
||||
strncpy(&columnlist[offset], colname, collen);
|
||||
strncpy(&columnlist[offset], quoted_colname, collen);
|
||||
offset += collen;
|
||||
columnlist[offset++] = ',';
|
||||
Py_DECREF(col);
|
||||
PQfreemem(quoted_colname);
|
||||
}
|
||||
Py_DECREF(coliter);
|
||||
|
||||
|
@ -1377,11 +1392,11 @@ exit:
|
|||
|
||||
/* extension: copy_from - implements COPY FROM */
|
||||
|
||||
#define psyco_curs_copy_from_doc \
|
||||
#define curs_copy_from_doc \
|
||||
"copy_from(file, table, sep='\\t', null='\\\\N', size=8192, columns=None) -- Copy table from file."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
static char *kwlist[] = {
|
||||
"file", "table", "sep", "null", "size", "columns", NULL};
|
||||
|
@ -1396,8 +1411,9 @@ psyco_curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
char *columnlist = NULL;
|
||||
char *quoted_delimiter = NULL;
|
||||
char *quoted_null = NULL;
|
||||
|
||||
char *quoted_table_name = NULL;
|
||||
const char *table_name;
|
||||
|
||||
Py_ssize_t bufsize = DEFAULT_COPYBUFF;
|
||||
PyObject *file, *columns = NULL, *res = NULL;
|
||||
|
||||
|
@ -1418,20 +1434,26 @@ psyco_curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
EXC_IF_GREEN(copy_from);
|
||||
EXC_IF_TPC_PREPARED(self->conn, copy_from);
|
||||
|
||||
if (NULL == (columnlist = _psyco_curs_copy_columns(columns)))
|
||||
if (!(columnlist = _psyco_curs_copy_columns(self, columns))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (!(quoted_delimiter = psycopg_escape_string(
|
||||
if (!(quoted_delimiter = psyco_escape_string(
|
||||
self->conn, sep, -1, NULL, NULL))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (!(quoted_null = psycopg_escape_string(
|
||||
if (!(quoted_null = psyco_escape_string(
|
||||
self->conn, null, -1, NULL, NULL))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
query_size = strlen(command) + strlen(table_name) + strlen(columnlist)
|
||||
if (!(quoted_table_name = psyco_escape_identifier(
|
||||
self->conn, table_name, -1))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
query_size = strlen(command) + strlen(quoted_table_name) + strlen(columnlist)
|
||||
+ strlen(quoted_delimiter) + strlen(quoted_null) + 1;
|
||||
if (!(query = PyMem_New(char, query_size))) {
|
||||
PyErr_NoMemory();
|
||||
|
@ -1439,12 +1461,17 @@ psyco_curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
}
|
||||
|
||||
PyOS_snprintf(query, query_size, command,
|
||||
table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
quoted_table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
|
||||
Dprintf("psyco_curs_copy_from: query = %s", query);
|
||||
Dprintf("curs_copy_from: query = %s", query);
|
||||
|
||||
Py_CLEAR(self->query);
|
||||
if (!(self->query = Bytes_FromString(query))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
/* This routine stores a borrowed reference. Although it is only held
|
||||
* for the duration of psyco_curs_copy_from, nested invocations of
|
||||
* for the duration of curs_copy_from, nested invocations of
|
||||
* Py_BEGIN_ALLOW_THREADS could surrender control to another thread,
|
||||
* which could invoke the garbage collector. We thus need an
|
||||
* INCREF/DECREF pair if we store this pointer in a GC object, such as
|
||||
|
@ -1461,6 +1488,9 @@ psyco_curs_copy_from(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
Py_CLEAR(self->copyfile);
|
||||
|
||||
exit:
|
||||
if (quoted_table_name) {
|
||||
PQfreemem(quoted_table_name);
|
||||
}
|
||||
PyMem_Free(columnlist);
|
||||
PyMem_Free(quoted_delimiter);
|
||||
PyMem_Free(quoted_null);
|
||||
|
@ -1471,11 +1501,11 @@ exit:
|
|||
|
||||
/* extension: copy_to - implements COPY TO */
|
||||
|
||||
#define psyco_curs_copy_to_doc \
|
||||
#define curs_copy_to_doc \
|
||||
"copy_to(file, table, sep='\\t', null='\\\\N', columns=None) -- Copy table to file."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
static char *kwlist[] = {"file", "table", "sep", "null", "columns", NULL};
|
||||
|
||||
|
@ -1491,6 +1521,7 @@ psyco_curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
char *quoted_null = NULL;
|
||||
|
||||
const char *table_name;
|
||||
char *quoted_table_name = NULL;
|
||||
PyObject *file = NULL, *columns = NULL, *res = NULL;
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(
|
||||
|
@ -1510,20 +1541,26 @@ psyco_curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
EXC_IF_GREEN(copy_to);
|
||||
EXC_IF_TPC_PREPARED(self->conn, copy_to);
|
||||
|
||||
if (NULL == (columnlist = _psyco_curs_copy_columns(columns)))
|
||||
if (!(quoted_table_name = psyco_escape_identifier(
|
||||
self->conn, table_name, -1))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (!(quoted_delimiter = psycopg_escape_string(
|
||||
if (!(columnlist = _psyco_curs_copy_columns(self, columns))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (!(quoted_delimiter = psyco_escape_string(
|
||||
self->conn, sep, -1, NULL, NULL))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
if (!(quoted_null = psycopg_escape_string(
|
||||
if (!(quoted_null = psyco_escape_string(
|
||||
self->conn, null, -1, NULL, NULL))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
query_size = strlen(command) + strlen(table_name) + strlen(columnlist)
|
||||
query_size = strlen(command) + strlen(quoted_table_name) + strlen(columnlist)
|
||||
+ strlen(quoted_delimiter) + strlen(quoted_null) + 1;
|
||||
if (!(query = PyMem_New(char, query_size))) {
|
||||
PyErr_NoMemory();
|
||||
|
@ -1531,9 +1568,14 @@ psyco_curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
}
|
||||
|
||||
PyOS_snprintf(query, query_size, command,
|
||||
table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
quoted_table_name, columnlist, quoted_delimiter, quoted_null);
|
||||
|
||||
Dprintf("psyco_curs_copy_to: query = %s", query);
|
||||
Dprintf("curs_copy_to: query = %s", query);
|
||||
|
||||
Py_CLEAR(self->query);
|
||||
if (!(self->query = Bytes_FromString(query))) {
|
||||
goto exit;
|
||||
}
|
||||
|
||||
self->copysize = 0;
|
||||
Py_INCREF(file);
|
||||
|
@ -1547,6 +1589,9 @@ psyco_curs_copy_to(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
Py_CLEAR(self->copyfile);
|
||||
|
||||
exit:
|
||||
if (quoted_table_name) {
|
||||
PQfreemem(quoted_table_name);
|
||||
}
|
||||
PyMem_Free(columnlist);
|
||||
PyMem_Free(quoted_delimiter);
|
||||
PyMem_Free(quoted_null);
|
||||
|
@ -1561,7 +1606,7 @@ exit:
|
|||
SQL statement, rather than composing the statement from parameters.
|
||||
*/
|
||||
|
||||
#define psyco_curs_copy_expert_doc \
|
||||
#define curs_copy_expert_doc \
|
||||
"copy_expert(sql, file, size=8192) -- Submit a user-composed COPY statement.\n" \
|
||||
"`file` must be an open, readable file for COPY FROM or an open, writable\n" \
|
||||
"file for COPY TO. The optional `size` argument, when specified for a COPY\n" \
|
||||
|
@ -1569,7 +1614,7 @@ exit:
|
|||
"buffer size."
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_copy_expert(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
curs_copy_expert(cursorObject *self, PyObject *args, PyObject *kwargs)
|
||||
{
|
||||
Py_ssize_t bufsize = DEFAULT_COPYBUFF;
|
||||
PyObject *sql, *file, *res = NULL;
|
||||
|
@ -1585,7 +1630,7 @@ psyco_curs_copy_expert(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
EXC_IF_GREEN(copy_expert);
|
||||
EXC_IF_TPC_PREPARED(self->conn, copy_expert);
|
||||
|
||||
sql = psyco_curs_validate_sql_basic(self, sql);
|
||||
sql = curs_validate_sql_basic(self, sql);
|
||||
|
||||
/* Any failure from here forward should 'goto exit' rather than
|
||||
'return NULL' directly. */
|
||||
|
@ -1612,6 +1657,10 @@ psyco_curs_copy_expert(cursorObject *self, PyObject *args, PyObject *kwargs)
|
|||
Py_INCREF(file);
|
||||
self->copyfile = file;
|
||||
|
||||
Py_CLEAR(self->query);
|
||||
Py_INCREF(sql);
|
||||
self->query = sql;
|
||||
|
||||
/* At this point, the SQL statement must be str, not unicode */
|
||||
if (pq_execute(self, Bytes_AS_STRING(sql), 0, 0, 0) >= 0) {
|
||||
res = Py_None;
|
||||
|
@ -1628,28 +1677,28 @@ exit:
|
|||
|
||||
/* extension: closed - return true if cursor is closed */
|
||||
|
||||
#define psyco_curs_closed_doc \
|
||||
#define curs_closed_doc \
|
||||
"True if cursor is closed, False if cursor is open"
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_get_closed(cursorObject *self, void *closure)
|
||||
curs_closed_get(cursorObject *self, void *closure)
|
||||
{
|
||||
return PyBool_FromLong(self->closed || (self->conn && self->conn->closed));
|
||||
}
|
||||
|
||||
/* extension: withhold - get or set "WITH HOLD" for named cursors */
|
||||
|
||||
#define psyco_curs_withhold_doc \
|
||||
#define curs_withhold_doc \
|
||||
"Set or return cursor use of WITH HOLD"
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_withhold_get(cursorObject *self)
|
||||
curs_withhold_get(cursorObject *self)
|
||||
{
|
||||
return PyBool_FromLong(self->withhold);
|
||||
}
|
||||
|
||||
RAISES_NEG int
|
||||
psyco_curs_withhold_set(cursorObject *self, PyObject *pyvalue)
|
||||
curs_withhold_set(cursorObject *self, PyObject *pyvalue)
|
||||
{
|
||||
int value;
|
||||
|
||||
|
@ -1667,11 +1716,11 @@ psyco_curs_withhold_set(cursorObject *self, PyObject *pyvalue)
|
|||
return 0;
|
||||
}
|
||||
|
||||
#define psyco_curs_scrollable_doc \
|
||||
#define curs_scrollable_doc \
|
||||
"Set or return cursor use of SCROLL"
|
||||
|
||||
static PyObject *
|
||||
psyco_curs_scrollable_get(cursorObject *self)
|
||||
curs_scrollable_get(cursorObject *self)
|
||||
{
|
||||
PyObject *ret = NULL;
|
||||
|
||||
|
@ -1694,7 +1743,7 @@ psyco_curs_scrollable_get(cursorObject *self)
|
|||
}
|
||||
|
||||
RAISES_NEG int
|
||||
psyco_curs_scrollable_set(cursorObject *self, PyObject *pyvalue)
|
||||
curs_scrollable_set(cursorObject *self, PyObject *pyvalue)
|
||||
{
|
||||
int value;
|
||||
|
||||
|
@ -1716,6 +1765,21 @@ psyco_curs_scrollable_set(cursorObject *self, PyObject *pyvalue)
|
|||
}
|
||||
|
||||
|
||||
#define curs_pgresult_ptr_doc \
|
||||
"pgresult_ptr -- Get the PGresult structure pointer."
|
||||
|
||||
static PyObject *
|
||||
curs_pgresult_ptr_get(cursorObject *self)
|
||||
{
|
||||
if (self->pgres) {
|
||||
return PyLong_FromVoidPtr((void *)self->pgres);
|
||||
}
|
||||
else {
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** the cursor object **/
|
||||
|
||||
/* iterator protocol */
|
||||
|
@ -1734,8 +1798,8 @@ cursor_next(PyObject *self)
|
|||
PyObject *res;
|
||||
|
||||
if (NULL == ((cursorObject*)self)->name) {
|
||||
/* we don't parse arguments: psyco_curs_fetchone will do that for us */
|
||||
res = psyco_curs_fetchone((cursorObject*)self, NULL);
|
||||
/* we don't parse arguments: curs_fetchone will do that for us */
|
||||
res = curs_fetchone((cursorObject*)self, NULL);
|
||||
|
||||
/* convert a None to NULL to signal the end of iteration */
|
||||
if (res && res == Py_None) {
|
||||
|
@ -1744,7 +1808,7 @@ cursor_next(PyObject *self)
|
|||
}
|
||||
}
|
||||
else {
|
||||
res = psyco_curs_next_named((cursorObject*)self);
|
||||
res = curs_next_named((cursorObject*)self);
|
||||
}
|
||||
|
||||
return res;
|
||||
|
@ -1754,44 +1818,44 @@ cursor_next(PyObject *self)
|
|||
|
||||
static struct PyMethodDef cursorObject_methods[] = {
|
||||
/* DBAPI-2.0 core */
|
||||
{"close", (PyCFunction)psyco_curs_close,
|
||||
METH_NOARGS, psyco_curs_close_doc},
|
||||
{"execute", (PyCFunction)psyco_curs_execute,
|
||||
METH_VARARGS|METH_KEYWORDS, psyco_curs_execute_doc},
|
||||
{"executemany", (PyCFunction)psyco_curs_executemany,
|
||||
METH_VARARGS|METH_KEYWORDS, psyco_curs_executemany_doc},
|
||||
{"fetchone", (PyCFunction)psyco_curs_fetchone,
|
||||
METH_NOARGS, psyco_curs_fetchone_doc},
|
||||
{"fetchmany", (PyCFunction)psyco_curs_fetchmany,
|
||||
METH_VARARGS|METH_KEYWORDS, psyco_curs_fetchmany_doc},
|
||||
{"fetchall", (PyCFunction)psyco_curs_fetchall,
|
||||
METH_NOARGS, psyco_curs_fetchall_doc},
|
||||
{"callproc", (PyCFunction)psyco_curs_callproc,
|
||||
METH_VARARGS, psyco_curs_callproc_doc},
|
||||
{"nextset", (PyCFunction)psyco_curs_nextset,
|
||||
METH_NOARGS, psyco_curs_nextset_doc},
|
||||
{"setinputsizes", (PyCFunction)psyco_curs_setinputsizes,
|
||||
METH_VARARGS, psyco_curs_setinputsizes_doc},
|
||||
{"setoutputsize", (PyCFunction)psyco_curs_setoutputsize,
|
||||
METH_VARARGS, psyco_curs_setoutputsize_doc},
|
||||
{"close", (PyCFunction)curs_close,
|
||||
METH_NOARGS, curs_close_doc},
|
||||
{"execute", (PyCFunction)curs_execute,
|
||||
METH_VARARGS|METH_KEYWORDS, curs_execute_doc},
|
||||
{"executemany", (PyCFunction)curs_executemany,
|
||||
METH_VARARGS|METH_KEYWORDS, curs_executemany_doc},
|
||||
{"fetchone", (PyCFunction)curs_fetchone,
|
||||
METH_NOARGS, curs_fetchone_doc},
|
||||
{"fetchmany", (PyCFunction)curs_fetchmany,
|
||||
METH_VARARGS|METH_KEYWORDS, curs_fetchmany_doc},
|
||||
{"fetchall", (PyCFunction)curs_fetchall,
|
||||
METH_NOARGS, curs_fetchall_doc},
|
||||
{"callproc", (PyCFunction)curs_callproc,
|
||||
METH_VARARGS, curs_callproc_doc},
|
||||
{"nextset", (PyCFunction)curs_nextset,
|
||||
METH_NOARGS, curs_nextset_doc},
|
||||
{"setinputsizes", (PyCFunction)curs_setinputsizes,
|
||||
METH_VARARGS, curs_setinputsizes_doc},
|
||||
{"setoutputsize", (PyCFunction)curs_setoutputsize,
|
||||
METH_VARARGS, curs_setoutputsize_doc},
|
||||
/* DBAPI-2.0 extensions */
|
||||
{"scroll", (PyCFunction)psyco_curs_scroll,
|
||||
METH_VARARGS|METH_KEYWORDS, psyco_curs_scroll_doc},
|
||||
{"__enter__", (PyCFunction)psyco_curs_enter,
|
||||
METH_NOARGS, psyco_curs_enter_doc},
|
||||
{"__exit__", (PyCFunction)psyco_curs_exit,
|
||||
METH_VARARGS, psyco_curs_exit_doc},
|
||||
{"scroll", (PyCFunction)curs_scroll,
|
||||
METH_VARARGS|METH_KEYWORDS, curs_scroll_doc},
|
||||
{"__enter__", (PyCFunction)curs_enter,
|
||||
METH_NOARGS, curs_enter_doc},
|
||||
{"__exit__", (PyCFunction)curs_exit,
|
||||
METH_VARARGS, curs_exit_doc},
|
||||
/* psycopg extensions */
|
||||
{"cast", (PyCFunction)psyco_curs_cast,
|
||||
METH_VARARGS, psyco_curs_cast_doc},
|
||||
{"mogrify", (PyCFunction)psyco_curs_mogrify,
|
||||
METH_VARARGS|METH_KEYWORDS, psyco_curs_mogrify_doc},
|
||||
{"copy_from", (PyCFunction)psyco_curs_copy_from,
|
||||
METH_VARARGS|METH_KEYWORDS, psyco_curs_copy_from_doc},
|
||||
{"copy_to", (PyCFunction)psyco_curs_copy_to,
|
||||
METH_VARARGS|METH_KEYWORDS, psyco_curs_copy_to_doc},
|
||||
{"copy_expert", (PyCFunction)psyco_curs_copy_expert,
|
||||
METH_VARARGS|METH_KEYWORDS, psyco_curs_copy_expert_doc},
|
||||
{"cast", (PyCFunction)curs_cast,
|
||||
METH_VARARGS, curs_cast_doc},
|
||||
{"mogrify", (PyCFunction)curs_mogrify,
|
||||
METH_VARARGS|METH_KEYWORDS, curs_mogrify_doc},
|
||||
{"copy_from", (PyCFunction)curs_copy_from,
|
||||
METH_VARARGS|METH_KEYWORDS, curs_copy_from_doc},
|
||||
{"copy_to", (PyCFunction)curs_copy_to,
|
||||
METH_VARARGS|METH_KEYWORDS, curs_copy_to_doc},
|
||||
{"copy_expert", (PyCFunction)curs_copy_expert,
|
||||
METH_VARARGS|METH_KEYWORDS, curs_copy_expert_doc},
|
||||
{NULL}
|
||||
};
|
||||
|
||||
|
@ -1810,7 +1874,7 @@ static struct PyMemberDef cursorObject_members[] = {
|
|||
"Number of records ``iter(cur)`` must fetch per network roundtrip."},
|
||||
{"description", T_OBJECT, OFFSETOF(description), READONLY,
|
||||
"Cursor description as defined in DBAPI-2.0."},
|
||||
{"lastrowid", T_LONG, OFFSETOF(lastoid), READONLY,
|
||||
{"lastrowid", T_OID, OFFSETOF(lastoid), READONLY,
|
||||
"The ``oid`` of the last row inserted by the cursor."},
|
||||
/* DBAPI-2.0 extensions */
|
||||
{"rownumber", T_LONG, OFFSETOF(row), READONLY,
|
||||
|
@ -1832,16 +1896,19 @@ static struct PyMemberDef cursorObject_members[] = {
|
|||
|
||||
/* object calculated member list */
|
||||
static struct PyGetSetDef cursorObject_getsets[] = {
|
||||
{ "closed", (getter)psyco_curs_get_closed, NULL,
|
||||
psyco_curs_closed_doc, NULL },
|
||||
{ "closed", (getter)curs_closed_get, NULL,
|
||||
curs_closed_doc, NULL },
|
||||
{ "withhold",
|
||||
(getter)psyco_curs_withhold_get,
|
||||
(setter)psyco_curs_withhold_set,
|
||||
psyco_curs_withhold_doc, NULL },
|
||||
(getter)curs_withhold_get,
|
||||
(setter)curs_withhold_set,
|
||||
curs_withhold_doc, NULL },
|
||||
{ "scrollable",
|
||||
(getter)psyco_curs_scrollable_get,
|
||||
(setter)psyco_curs_scrollable_set,
|
||||
psyco_curs_scrollable_doc, NULL },
|
||||
(getter)curs_scrollable_get,
|
||||
(setter)curs_scrollable_set,
|
||||
curs_scrollable_doc, NULL },
|
||||
{ "pgresult_ptr",
|
||||
(getter)curs_pgresult_ptr_get, NULL,
|
||||
curs_pgresult_ptr_doc, NULL },
|
||||
{NULL}
|
||||
};
|
||||
|
||||
|
@ -1854,10 +1921,10 @@ cursor_setup(cursorObject *self, connectionObject *conn, const char *name)
|
|||
Dprintf("cursor_setup: parameters: name = %s, conn = %p", name, conn);
|
||||
|
||||
if (name) {
|
||||
if (0 > psycopg_strdup(&self->name, name, -1)) {
|
||||
if (0 > psyco_strdup(&self->name, name, -1)) {
|
||||
return -1;
|
||||
}
|
||||
if (!(self->qname = psycopg_escape_identifier(conn, name, -1))) {
|
||||
if (!(self->qname = psyco_escape_identifier(conn, name, -1))) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
@ -1884,10 +1951,11 @@ cursor_setup(cursorObject *self, connectionObject *conn, const char *name)
|
|||
|
||||
/* default tzinfo factory */
|
||||
{
|
||||
/* The datetime api doesn't seem to have a constructor to make a
|
||||
* datetime.timezone, so use the Python interface. */
|
||||
PyObject *m = NULL;
|
||||
if ((m = PyImport_ImportModule("psycopg2.tz"))) {
|
||||
self->tzinfo_factory = PyObject_GetAttrString(
|
||||
m, "FixedOffsetTimezone");
|
||||
if ((m = PyImport_ImportModule("datetime"))) {
|
||||
self->tzinfo_factory = PyObject_GetAttrString(m, "timezone");
|
||||
Py_DECREF(m);
|
||||
}
|
||||
if (!self->tzinfo_factory) {
|
||||
|
@ -1962,7 +2030,7 @@ cursor_init(PyObject *obj, PyObject *args, PyObject *kwargs)
|
|||
|
||||
if (name != Py_None) {
|
||||
Py_INCREF(name); /* for ensure_bytes */
|
||||
if (!(bname = psycopg_ensure_bytes(name))) {
|
||||
if (!(bname = psyco_ensure_bytes(name))) {
|
||||
/* name has had a ref stolen */
|
||||
goto exit;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* diagnostics.c - definition for the psycopg Diagnostics type
|
||||
*
|
||||
* Copyright (C) 2013 Matthew Woodcraft <matthew@woodcraft.me.uk>
|
||||
* Copyright (C) 2013-2019 Matthew Woodcraft <matthew@woodcraft.me.uk>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* diagnostics.c - present information from libpq error responses
|
||||
*
|
||||
* Copyright (C) 2013 Matthew Woodcraft <matthew@woodcraft.me.uk>
|
||||
* Copyright (C) 2013-2019 Matthew Woodcraft <matthew@woodcraft.me.uk>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -61,7 +62,7 @@
|
|||
* If the cursor or its result isn't available, return None.
|
||||
*/
|
||||
static PyObject *
|
||||
psyco_diagnostics_get_field(diagnosticsObject *self, void *closure)
|
||||
diagnostics_get_field(diagnosticsObject *self, void *closure)
|
||||
{
|
||||
const char *errortext;
|
||||
|
||||
|
@ -76,41 +77,41 @@ psyco_diagnostics_get_field(diagnosticsObject *self, void *closure)
|
|||
|
||||
/* object calculated member list */
|
||||
static struct PyGetSetDef diagnosticsObject_getsets[] = {
|
||||
{ "severity", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "severity", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_SEVERITY },
|
||||
{ "severity_nonlocalized", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "severity_nonlocalized", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_SEVERITY_NONLOCALIZED },
|
||||
{ "sqlstate", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "sqlstate", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_SQLSTATE },
|
||||
{ "message_primary", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "message_primary", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_MESSAGE_PRIMARY },
|
||||
{ "message_detail", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "message_detail", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_MESSAGE_DETAIL },
|
||||
{ "message_hint", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "message_hint", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_MESSAGE_HINT },
|
||||
{ "statement_position", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "statement_position", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_STATEMENT_POSITION },
|
||||
{ "internal_position", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "internal_position", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_INTERNAL_POSITION },
|
||||
{ "internal_query", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "internal_query", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_INTERNAL_QUERY },
|
||||
{ "context", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "context", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_CONTEXT },
|
||||
{ "schema_name", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "schema_name", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_SCHEMA_NAME },
|
||||
{ "table_name", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "table_name", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_TABLE_NAME },
|
||||
{ "column_name", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "column_name", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_COLUMN_NAME },
|
||||
{ "datatype_name", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "datatype_name", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_DATATYPE_NAME },
|
||||
{ "constraint_name", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "constraint_name", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_CONSTRAINT_NAME },
|
||||
{ "source_file", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "source_file", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_SOURCE_FILE },
|
||||
{ "source_line", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "source_line", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_SOURCE_LINE },
|
||||
{ "source_function", (getter)psyco_diagnostics_get_field, NULL,
|
||||
{ "source_function", (getter)diagnostics_get_field, NULL,
|
||||
NULL, (void*) PG_DIAG_SOURCE_FUNCTION },
|
||||
{NULL}
|
||||
};
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* error.h - definition for the psycopg base Error type
|
||||
*
|
||||
* Copyright (C) 2013 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2013-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* error_type.c - python interface to the Error objects
|
||||
*
|
||||
* Copyright (C) 2013 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2013-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -34,7 +35,7 @@
|
|||
PyObject *
|
||||
error_text_from_chars(errorObject *self, const char *str)
|
||||
{
|
||||
return psycopg_text_from_chars_safe(str, -1, self->pydecoder);
|
||||
return psyco_text_from_chars_safe(str, -1, self->pydecoder);
|
||||
}
|
||||
|
||||
|
||||
|
@ -64,6 +65,8 @@ base_exception_from_sqlstate(const char *sqlstate)
|
|||
switch (sqlstate[0]) {
|
||||
case '0':
|
||||
switch (sqlstate[1]) {
|
||||
case '8': /* Class 08 - Connection Exception */
|
||||
return OperationalError;
|
||||
case 'A': /* Class 0A - Feature Not Supported */
|
||||
return NotSupportedError;
|
||||
}
|
||||
|
@ -229,7 +232,7 @@ static struct PyGetSetDef error_getsets[] = {
|
|||
* would require implementing __getstate__, and as of 2012 it's a little
|
||||
* bit too late to care. */
|
||||
static PyObject *
|
||||
psyco_error_reduce(errorObject *self, PyObject *dummy)
|
||||
error_reduce(errorObject *self, PyObject *dummy)
|
||||
{
|
||||
PyObject *meth = NULL;
|
||||
PyObject *tuple = NULL;
|
||||
|
@ -288,7 +291,7 @@ error:
|
|||
}
|
||||
|
||||
PyObject *
|
||||
psyco_error_setstate(errorObject *self, PyObject *state)
|
||||
error_setstate(errorObject *self, PyObject *state)
|
||||
{
|
||||
PyObject *rv = NULL;
|
||||
|
||||
|
@ -326,8 +329,8 @@ error:
|
|||
|
||||
static PyMethodDef error_methods[] = {
|
||||
/* Make Error and all its subclasses picklable. */
|
||||
{"__reduce__", (PyCFunction)psyco_error_reduce, METH_NOARGS },
|
||||
{"__setstate__", (PyCFunction)psyco_error_setstate, METH_O },
|
||||
{"__reduce__", (PyCFunction)error_reduce, METH_NOARGS },
|
||||
{"__setstate__", (PyCFunction)error_setstate, METH_O },
|
||||
{NULL}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* green.c - cooperation with coroutine libraries.
|
||||
*
|
||||
* Copyright (C) 2010 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2010-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -177,10 +178,12 @@ psyco_exec_green(connectionObject *conn, const char *command)
|
|||
goto end;
|
||||
}
|
||||
|
||||
/* Now we can read the data without fear of blocking. */
|
||||
result = pq_get_last_result(conn);
|
||||
/* the result is now in the connection: take its ownership */
|
||||
result = conn->pgres;
|
||||
conn->pgres = NULL;
|
||||
|
||||
end:
|
||||
CLEARPGRES(conn->pgres);
|
||||
conn->async_status = ASYNC_DONE;
|
||||
Py_CLEAR(conn->async_cursor);
|
||||
return result;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* green.c - cooperation with coroutine libraries.
|
||||
*
|
||||
* Copyright (C) 2010 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2010-2019 Daniele Varrazzo <daniele.varrazzo@gmail.com>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
* required for advanced communication with the server, such as
|
||||
* streaming replication
|
||||
*
|
||||
* Copyright (C) 2003-2015 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
@ -37,6 +38,7 @@
|
|||
#include "psycopg/win32_support.h"
|
||||
#else
|
||||
#include <arpa/inet.h>
|
||||
#include <sys/time.h>
|
||||
#endif
|
||||
|
||||
/* support routines taken from pg_basebackup/streamutil.c */
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* libpq_support.h - definitions for libpq_support.c
|
||||
*
|
||||
* Copyright (C) 2003-2015 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2003-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* lobject.h - definition for the psycopg lobject type
|
||||
*
|
||||
* Copyright (C) 2006-2010 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2006-2019 Federico Di Gregorio <fog@debian.org>
|
||||
* Copyright (C) 2020-2021 The Psycopg Team
|
||||
*
|
||||
* This file is part of psycopg.
|
||||
*
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user