Sandbox removed

The sandbox directory was there to test issues before we had full test
coverage and to try quick and dirty Python snippets before moving them to
the examples directory, sending them to mailing list or copying them to
the docs. Almost anything that was in sandbox is now in the examples
directory or has been refactored into a full test, or is so old that is
of no use anymore. So, adieu sandbox.
This commit is contained in:
Federico Di Gregorio 2018-10-09 00:00:12 +02:00
parent e3c791cf60
commit 7806fc736a
27 changed files with 0 additions and 61927 deletions

View File

@ -1,30 +0,0 @@
import psycopg2
conn = psycopg2.connect("port=5433 dbname=test")
curs = conn.cursor()
#curs.execute("SELECT ARRAY[1,2,3] AS foo")
#print curs.fetchone()[0]
#curs.execute("SELECT ARRAY['1','2','3'] AS foo")
#print curs.fetchone()[0]
#curs.execute("""SELECT ARRAY[',','"','\\\\'] AS foo""")
#d = curs.fetchone()[0]
#print d, '->', d[0], d[1], d[2]
#curs.execute("SELECT ARRAY[ARRAY[1,2],ARRAY[3,4]] AS foo")
#print curs.fetchone()[0]
#curs.execute("SELECT ARRAY[ARRAY[now(), now()], ARRAY[now(), now()]] AS foo")
#print curs.description
#print curs.fetchone()[0]
#curs.execute("SELECT 1 AS foo, ARRAY[1,2] AS bar")
#print curs.fetchone()
#curs.execute("SELECT * FROM test()")
#print curs.fetchone()
curs.execute("SELECT %s", ([1,2,None],))
print(curs.fetchone())

View File

@ -1,35 +0,0 @@
import datetime
import time
import psycopg2
#d = datetime.timedelta(12, 100, 9876)
#print d.days, d.seconds, d.microseconds
#print psycopg.adapt(d).getquoted()
conn = psycopg2.connect("dbname=test_unicode")
conn.set_client_encoding("xxx")
curs = conn.cursor()
#curs.execute("SELECT 1.0 AS foo")
#print curs.fetchmany(2)
#print curs.fetchall()
def sleep(curs):
while not curs.isready():
print(".")
time.sleep(.1)
#curs.execute("""
# DECLARE zz INSENSITIVE SCROLL CURSOR WITH HOLD FOR
# SELECT now();
# FOR READ ONLY;""", async = 1)
curs.execute("SELECT now() AS foo", async=1)
sleep(curs)
print(curs.fetchall())
#curs.execute("""
# FETCH FORWARD 1 FROM zz;""", async = 1)
curs.execute("SELECT now() AS bar", async=1)
print(curs.fetchall())
curs.execute("SELECT now() AS bar")
sleep(curs)

View File

@ -1,25 +0,0 @@
#!/usr/bin/env python
#import psycopg as db
import psycopg2 as db
import threading
import time
import sys
def query_worker(dsn):
conn = db.connect(dsn)
cursor = conn.cursor()
while True:
cursor.execute("select * from pg_class")
while True:
row = cursor.fetchone()
if row is None:
break
if len(sys.argv) != 2:
print('usage: %s DSN' % sys.argv[0])
sys.exit(1)
th = threading.Thread(target=query_worker, args=(sys.argv[1],))
th.setDaemon(True)
th.start()
time.sleep(1)

View File

@ -1,15 +0,0 @@
import psycopg2
import psycopg2.extensions
DEC2FLOAT = psycopg2.extensions.new_type(
psycopg2._psycopg.DECIMAL.values,
'DEC2FLOAT',
psycopg2.extensions.FLOAT)
psycopg2.extensions.register_type(DEC2FLOAT)
o = psycopg2.connect("dbname=test")
c = o.cursor()
c.execute("SELECT NULL::decimal(10,2)")
n = c.fetchone()[0]
print(n, type(n))

View File

@ -1,18 +0,0 @@
import psycopg2
con = psycopg2.connect("dbname=test")
cur = con.cursor()
cur.execute("SELECT %s::regtype::oid", ('bytea', ))
print(cur.fetchone()[0])
# 17
cur.execute("CREATE DOMAIN thing AS bytea")
cur.execute("SELECT %s::regtype::oid", ('thing', ))
print(cur.fetchone()[0])
#62148
cur.execute("CREATE TABLE thingrel (thingcol thing)")
cur.execute("SELECT * FROM thingrel")
print(cur.description)
#(('thingcol', 17, None, -1, None, None, None),)

View File

@ -1,18 +0,0 @@
import psycopg2
o = psycopg2.connect("dbname=test")
c = o.cursor()
def sql():
c.execute("SELECT 1.23 AS foo")
print(1, c.fetchone())
#print c.description
c.execute("SELECT 1.23::float AS foo")
print(2, c.fetchone())
#print c.description
print("BEFORE")
sql()
import gtk
print("AFTER")
sql()

View File

@ -1,13 +0,0 @@
import psycopg2
import psycopg2.extras
conn = psycopg2.connect("dbname=test")
curs = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
curs.execute("SELECT '2005-2-12'::date AS foo, 'boo!' as bar")
for x in curs.fetchall():
print(type(x), x[0], x[1], x['foo'], x['bar'])
curs.execute("SELECT '2005-2-12'::date AS foo, 'boo!' as bar")
for x in curs:
print(type(x), x[0], x[1], x['foo'], x['bar'])

View File

@ -1,82 +0,0 @@
"""
script: test_leak.py
This script attempts to repeatedly insert the same list of rows into
the database table, causing a duplicate key error to occur. It will
then roll back the transaction and try again.
Database table schema:
-- CREATE TABLE t (foo TEXT PRIMARY KEY);
There are two ways to run the script, which will launch one of the
two functions:
# leak() will cause increasingly more RAM to be used by the script.
$ python <script_nam> leak
# noleak() does not have the RAM usage problem. The only difference
# between it and leak() is that 'rows' is created once, before the loop.
$ python <script_name> noleak
Use Control-C to quit the script.
"""
import sys
import psycopg2
DB_NAME = 'test'
connection = psycopg2.connect(database=DB_NAME)
cursor = connection.cursor()
# Uncomment the following if table 't' does not exist
create_table = """CREATE TABLE t (foo TEXT PRIMARY KEY)"""
cursor.execute(create_table)
insert = """INSERT INTO t VALUES (%(foo)s)"""
def leak():
"""rows created in each loop run"""
count = 0
while 1:
try:
rows = []
for i in range(1, 100):
row = {'foo': i}
rows.append(row)
count += 1
print("loop count:", count)
cursor.executemany(insert, rows)
connection.commit()
except psycopg2.IntegrityError:
connection.rollback()
def noleak():
"""rows created once, before the loop"""
rows = []
for i in range(1, 100):
row = {'foo': i}
rows.append(row)
count = 0
while 1:
try:
count += 1
print("loop count:", count)
cursor.executemany(insert, rows)
connection.commit()
except psycopg2.IntegrityError:
connection.rollback()
usage = "%s requires one argument: 'leak' or 'noleak'" % sys.argv[0]
try:
if 'leak' == sys.argv[1]:
run_function = leak
elif 'noleak' == sys.argv[1]:
run_function = noleak
else:
print(usage)
sys.exit()
except IndexError:
print(usage)
sys.exit()
# Run leak() or noleak(), whichever was indicated on the command line
run_function()

View File

@ -1,43 +0,0 @@
#!/usr/bin/env python
"""
Test if the arguments object can be used with both positional and keyword
arguments.
"""
class O(object):
def __init__(self, *args, **kwds):
self.args = args
self.kwds = kwds
def __getitem__(self, k):
if isinstance(k, int):
return self.args[k]
else:
return self.kwds[k]
o = O('R%', second='S%')
print(o[0])
print(o['second'])
#-------------------------------------------------------------------------------
import psycopg2 as dbapi
conn = dbapi.connect(database='test')
cursor = conn.cursor()
cursor.execute("""
SELECT * FROM location_pretty
WHERE keyname LIKE %s OR keyname LIKE %(second)s
""", (o,))
for row in cursor:
print(row)

View File

@ -1,31 +0,0 @@
import psycopg2
import psycopg2.extensions
class Portal(psycopg2.extensions.cursor):
def __init__(self, name, curs):
psycopg2.extensions.cursor.__init__(
self, curs.connection, '"'+name+'"')
CURSOR = psycopg2.extensions.new_type((1790,), "CURSOR", Portal)
psycopg2.extensions.register_type(CURSOR)
conn = psycopg2.connect("dbname=test")
curs = conn.cursor()
curs.execute("SELECT reffunc2()")
portal = curs.fetchone()[0]
print(portal.fetchone())
print(portal.fetchmany(2))
portal.scroll(0, 'absolute')
print(portal.fetchall())
#print curs.rowcount
#print curs.statusmessage
#print curs.fetchone()
#print curs.rowcount
#print curs.statusmessage
#print curs.fetchone()
#print curs.rowcount
#print curs.statusmessage

View File

@ -1,12 +0,0 @@
class B(object):
def __init__(self, x):
if x: self._o = True
else: self._o = False
def __getattribute__(self, attr):
print("ga called", attr)
return object.__getattribute__(self, attr)
def _sqlquote(self):
if self._o:
return 'It is True'
else:
return 'It is False'

View File

@ -1,11 +0,0 @@
import psycopg2
import threading, os, time, gc
for i in range(20000):
conn = psycopg2.connect('dbname=test')
del conn
if i%200 == 0:
datafile = os.popen('ps -p %s -o rss' % os.getpid())
line = datafile.readlines(2)[1].strip()
datafile.close()
print(str(i) + '\t' + line)

View File

@ -1,42 +0,0 @@
import psycopg2
import threading, os, time, gc
super_lock = threading.Lock()
def f():
try:
conn = psycopg2.connect('dbname=testx')
#c = db.cursor()
#c.close()
#conn.close()
del conn
except:
pass
#print "ERROR"
def g():
n = 30
k = 0
i = 1
while i > 0:
while n > 0:
threading.Thread(target=f).start()
time.sleep(0.001)
threading.Thread(target=f).start()
time.sleep(0.001)
threading.Thread(target=f).start()
n -= 1
while threading.activeCount() > 1:
time.sleep(0.01)
datafile = os.popen('ps -p %s -o rss' % os.getpid())
line = datafile.readlines(2)[1].strip()
datafile.close()
n = 30
print(str(k*n) + '\t' + line)
k += 1
while threading.activeCount()>1:
pass
g()

View File

@ -1,49 +0,0 @@
import datetime
import time
import psycopg2
#d = datetime.timedelta(12, 100, 9876)
#print d.days, d.seconds, d.microseconds
#print psycopg.adapt(d).getquoted()
conn = psycopg2.connect("dbname=test")
#conn.set_client_encoding("xxx")
curs = conn.cursor()
curs.execute("SELECT '2005-2-12'::date AS foo")
print(curs.fetchall())
curs.execute("SELECT '10:23:60'::time AS foo")
print(curs.fetchall())
curs.execute("SELECT '10:23:59.895342'::time AS foo")
print(curs.fetchall())
curs.execute("SELECT '0:0:12.31423'::time with time zone AS foo")
print(curs.fetchall())
curs.execute("SELECT '0:0:12+01:30'::time with time zone AS foo")
print(curs.fetchall())
curs.execute("SELECT '2005-2-12 10:23:59.895342'::timestamp AS foo")
print(curs.fetchall())
curs.execute("SELECT '2005-2-12 10:23:59.895342'::timestamp with time zone AS foo")
print(curs.fetchall())
#print curs.fetchmany(2)
#print curs.fetchall()
def sleep(curs):
while not curs.isready():
print(".")
time.sleep(.1)
#curs.execute("""
# DECLARE zz INSENSITIVE SCROLL CURSOR WITH HOLD FOR
# SELECT now();
# FOR READ ONLY;""", async = 1)
#curs.execute("SELECT now() AS foo", async=1);
#sleep(curs)
#print curs.fetchall()
#curs.execute("""
# FETCH FORWARD 1 FROM zz;""", async = 1)
#curs.execute("SELECT now() AS bar", async=1);
#print curs.fetchall()
#curs.execute("SELECT now() AS bar");
#sleep(curs)

View File

@ -1,8 +0,0 @@
import psycopg2
import psycopg2.extras
conn = psycopg2.connect("dbname=test")
curs = conn.cursor()
curs.execute("SELECT true AS foo WHERE 'a' in %s", (("aa", "bb"),))
print(curs.fetchall())
print(curs.query)

File diff suppressed because it is too large Load Diff

View File

@ -1,42 +0,0 @@
import psycopg2
dbconn = psycopg2.connect(database="test",host="localhost",port="5432")
query = """
CREATE TEMP TABLE data (
field01 char,
field02 varchar,
field03 varchar,
field04 varchar,
field05 varchar,
field06 varchar,
field07 varchar,
field08 varchar,
field09 numeric,
field10 integer,
field11 numeric,
field12 numeric,
field13 numeric,
field14 numeric,
field15 numeric,
field16 numeric,
field17 char,
field18 char,
field19 char,
field20 varchar,
field21 varchar,
field22 integer,
field23 char,
field24 char
);
"""
cursor = dbconn.cursor()
cursor.execute(query)
f = open('test_copy2.csv')
cursor.copy_from(f, 'data', sep='|')
f.close()
dbconn.commit()
cursor.close()
dbconn.close()

View File

@ -1,81 +0,0 @@
#!/usr/bin/env python
"""Test for issue #113: test with error during green processing
"""
DSN = 'dbname=test'
import eventlet.patcher
eventlet.patcher.monkey_patch()
import os
import signal
from time import sleep
import psycopg2
from psycopg2 import extensions
from eventlet.hubs import trampoline
# register a test wait callback that fails if SIGHUP is received
panic = []
def wait_cb(conn):
"""A wait callback useful to allow eventlet to work with Psycopg."""
while 1:
if panic:
raise Exception('whatever')
state = conn.poll()
if state == extensions.POLL_OK:
break
elif state == extensions.POLL_READ:
trampoline(conn.fileno(), read=True)
elif state == extensions.POLL_WRITE:
trampoline(conn.fileno(), write=True)
else:
raise psycopg2.OperationalError(
"Bad result from poll: %r" % state)
extensions.set_wait_callback(wait_cb)
# SIGHUP handler to inject a fail in the callback
def handler(signum, frame):
panic.append(True)
signal.signal(signal.SIGHUP, handler)
# Simulate another green thread working
def worker():
while 1:
print("I'm working")
sleep(1)
eventlet.spawn(worker)
# You can unplug the network cable etc. here.
# Kill -HUP will raise an exception in the callback.
print("PID", os.getpid())
conn = psycopg2.connect(DSN)
curs = conn.cursor()
try:
for i in range(1000):
curs.execute("select %s, pg_sleep(1)", (i,))
r = curs.fetchone()
print("selected", r)
except BaseException, e:
print("got exception:", e.__class__.__name__, e)
if conn.closed:
print("the connection is closed")
else:
conn.rollback()
curs.execute("select 1")
print(curs.fetchone())

View File

@ -1,31 +0,0 @@
import gc
import sys
import os
import signal
import warnings
import psycopg2
print("Testing psycopg2 version %s" % psycopg2.__version__)
dbname = os.environ.get('PSYCOPG2_TESTDB', 'psycopg2_test')
conn = psycopg2.connect("dbname=%s" % dbname)
curs = conn.cursor()
curs.isready()
print("Now restart the test postgresql server to drop all connections, press enter when done.")
raw_input()
try:
curs.isready() # No need to test return value
curs.isready()
except:
print("Test passed")
sys.exit(0)
if curs.isready():
print("Warning: looks like the connection didn't get killed. This test is probably in-effective")
print("Test inconclusive")
sys.exit(1)
gc.collect() # used to error here
print("Test Passed")

View File

@ -1,12 +0,0 @@
def test():
import sys, os, thread, psycopg2
def test2():
while True:
for filename in map(lambda m: getattr(m, "__file__", None), sys.modules.values()):
os.stat("/dev/null")
connection = psycopg2.connect(database="test")
cursor = connection.cursor()
thread.start_new_thread(test2, ())
while True:
cursor.execute("COMMIT")
test()

View File

@ -1,8 +0,0 @@
import gtk
import psycopg2
o = psycopg2.connect("dbname=test")
c = o.cursor()
c.execute("SELECT 1.23::float AS foo")
x = c.fetchone()[0]
print(x, type(x))

View File

@ -1,73 +0,0 @@
"""
A script to reproduce the race condition described in ticket #58
from https://bugzilla.redhat.com/show_bug.cgi?id=711095
Results in the error:
python: Modules/gcmodule.c:277: visit_decref: Assertion `gc->gc.gc_refs != 0'
failed.
on unpatched library.
"""
import threading
import gc
import time
import psycopg2
from StringIO import StringIO
done = 0
class GCThread(threading.Thread):
# A thread that sits in an infinite loop, forcing the garbage collector
# to run
def run(self):
global done
while not done:
gc.collect()
time.sleep(0.1) # give the other thread a chance to run
gc_thread = GCThread()
# This assumes a pre-existing db named "test", with:
# "CREATE TABLE test (id serial PRIMARY KEY, num integer, data varchar);"
conn = psycopg2.connect("dbname=test user=postgres")
cur = conn.cursor()
# Start the other thread, running the GC regularly
gc_thread.start()
# Now do lots of "cursor.copy_from" calls:
print("copy_from")
for i in range(1000):
f = StringIO("42\tfoo\n74\tbar\n")
cur.copy_from(f, 'test', columns=('num', 'data'))
# Assuming the other thread gets a chance to run during this call, expect a
# build of python (with assertions enabled) to bail out here with:
# python: Modules/gcmodule.c:277: visit_decref: Assertion `gc->gc.gc_refs != 0' failed.
# Also exercise the copy_to code path
print("copy_to")
cur.execute("truncate test")
f = StringIO("42\tfoo\n74\tbar\n")
cur.copy_from(f, 'test', columns=('num', 'data'))
for i in range(1000):
f = StringIO()
cur.copy_to(f, 'test', columns=('num', 'data'))
# And copy_expert too
print("copy_expert")
cur.execute("truncate test")
for i in range(1000):
f = StringIO("42\tfoo\n74\tbar\n")
cur.copy_expert("copy test to stdout", f)
# Terminate the GC thread's loop:
done = 1
cur.close()
conn.close()

View File

@ -1,44 +0,0 @@
import psycopg2
import traceback
# Change the table here to something the user can create tables in ...
db = psycopg2.connect('dbname=test')
cursor = db.cursor()
print('Creating tables and sample data')
cursor.execute('''
CREATE TEMPORARY TABLE foo (
id int PRIMARY KEY
)''')
cursor.execute('''
CREATE TEMPORARY TABLE bar (
id int PRIMARY KEY,
foo_id int,
CONSTRAINT bar_foo_fk FOREIGN KEY (foo_id) REFERENCES foo(id) DEFERRABLE
)''')
cursor.execute('INSERT INTO foo VALUES (1)')
cursor.execute('INSERT INTO bar VALUES (1, 1)')
db.commit()
print('Deferring constraint and breaking referential integrity')
cursor.execute('SET CONSTRAINTS bar_foo_fk DEFERRED')
cursor.execute('UPDATE bar SET foo_id = 42 WHERE id = 1')
print('Committing (this should fail)')
try:
db.commit()
except:
traceback.print_exc()
print('Rolling back connection')
db.rollback()
print('Running a trivial query')
try:
cursor.execute('SELECT TRUE')
except:
traceback.print_exc()
print('db.closed:', db.closed)

View File

@ -1,63 +0,0 @@
from __future__ import print_function
import psycopg2, psycopg2.extensions
import threading
import gc
import time
import sys
# inherit psycopg2 connection class just so that
# garbage collector enters the tp_clear code path
# in delete_garbage()
class my_connection(psycopg2.extensions.connection):
pass
class db_user(threading.Thread):
def run(self):
conn2 = psycopg2.connect(sys.argv[1], connection_factory=my_connection)
cursor = conn2.cursor()
cursor.execute("UPDATE test_psycopg2_dealloc SET a = 3", async=1)
# the conn2 desctructor will block indefinitely
# on the completion of the query
# (and it will not be holding the GIL during that time)
print("begin conn2 del", file=sys.stderr)
del cursor, conn2
print("end conn2 del", file=sys.stderr)
def main():
# lock out a db row
conn1 = psycopg2.connect(sys.argv[1], connection_factory=my_connection)
cursor = conn1.cursor()
cursor.execute("DROP TABLE IF EXISTS test_psycopg2_dealloc")
cursor.execute("CREATE TABLE test_psycopg2_dealloc (a int)")
cursor.execute("INSERT INTO test_psycopg2_dealloc VALUES (1)")
conn1.commit()
cursor.execute("UPDATE test_psycopg2_dealloc SET a = 2", async=1)
# concurrent thread trying to access the locked row
db_user().start()
# eventually, a gc.collect run will happen
# while the conn2 is inside conn_close()
# but this second dealloc won't get blocked
# as it will avoid conn_close()
for i in range(10):
if gc.collect():
print("garbage collection done", file=sys.stderr)
break
time.sleep(1)
# we now unlock the row by invoking
# the desctructor of conn1. This will permit the
# concurrent thread destructor of conn2 to
# continue and it will end up trying to free
# self->dsn a second time.
print("begin conn1 del", file=sys.stderr)
del cursor, conn1
print("end conn1 del", file=sys.stderr)
if __name__ == '__main__':
main()

View File

@ -1,7 +0,0 @@
import psycopg2.extensions
print(dir(psycopg2._psycopg))
print(psycopg2.extensions.new_type(
(600,), "POINT", lambda oids, name, fun: None))
print("ciccia ciccia")
print(psycopg2._psycopg)

View File

@ -1,9 +0,0 @@
import datetime
import time
import psycopg2
conn = psycopg2.connect("dbname=test")
curs = conn.cursor()
curs.execute("set timezone = 'Asia/Calcutta'")
curs.execute("SELECT now()")
print(curs.fetchone()[0])

View File

@ -1,486 +0,0 @@
#
# This is a valgrind suppression file that should be used when using valgrind.
#
# Here's an example of running valgrind:
#
# cd python/dist/src
# valgrind --tool=memcheck --suppressions=Misc/valgrind-python.supp \
# ./python -E -tt ./Lib/test/regrtest.py -u bsddb,network
#
# You must edit Objects/obmalloc.c and uncomment Py_USING_MEMORY_DEBUGGER
# to use the preferred suppressions with Py_ADDRESS_IN_RANGE.
#
# If you do not want to recompile Python, you can uncomment
# suppressions for PyObject_Free and PyObject_Realloc.
#
# See Misc/README.valgrind for more information.
# all tool names: Addrcheck,Memcheck,cachegrind,helgrind,massif
{
ADDRESS_IN_RANGE/Invalid read of size 4
Memcheck:Addr4
fun:Py_ADDRESS_IN_RANGE
}
{
ADDRESS_IN_RANGE/Invalid read of size 4
Memcheck:Value4
fun:Py_ADDRESS_IN_RANGE
}
{
ADDRESS_IN_RANGE/Invalid read of size 8 (x86_64)
Memcheck:Value8
fun:Py_ADDRESS_IN_RANGE
}
{
ADDRESS_IN_RANGE/Conditional jump or move depends on uninitialised value
Memcheck:Cond
fun:Py_ADDRESS_IN_RANGE
}
{
ADDRESS_IN_RANGE/Invalid read of size 4
Memcheck:Addr4
fun:PyObject_Free
}
{
ADDRESS_IN_RANGE/Invalid read of size 4
Memcheck:Value4
fun:PyObject_Free
}
{
ADDRESS_IN_RANGE/Conditional jump or move depends on uninitialised value
Memcheck:Cond
fun:PyObject_Free
}
{
ADDRESS_IN_RANGE/Invalid read of size 4
Memcheck:Addr4
fun:PyObject_Realloc
}
{
ADDRESS_IN_RANGE/Invalid read of size 4
Memcheck:Value4
fun:PyObject_Realloc
}
{
ADDRESS_IN_RANGE/Conditional jump or move depends on uninitialised value
Memcheck:Cond
fun:PyObject_Realloc
}
###
### All the suppressions below are for errors that occur within libraries
### that Python uses. The problems to not appear to be related to Python's
### use of the libraries.
###
{
GDBM problems, see test_gdbm
Memcheck:Param
write(buf)
fun:write
fun:gdbm_open
}
{
Avoid problem in libc on gentoo
Memcheck:Cond
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
}
{
Avoid problem in glibc on gentoo
Memcheck:Addr8
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/libdl-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/libdl-2.3.5.so
fun:dlopen
}
{
Avoid problem in glibc on gentoo
Memcheck:Addr8
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/libdl-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/libdl-2.3.5.so
fun:dlopen
}
{
Avoid problem in glibc on gentoo
Memcheck:Cond
obj:/lib/ld-2.3.5.so
obj:/lib/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/libdl-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/libdl-2.3.5.so
fun:dlopen
}
{
Avoid problem in glibc on gentoo
Memcheck:Cond
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/libdl-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/libdl-2.3.5.so
fun:dlopen
}
{
Avoid problems w/readline doing a putenv and leaking on exit
Memcheck:Leak
fun:malloc
fun:xmalloc
fun:sh_set_lines_and_columns
fun:_rl_get_screen_size
fun:_rl_init_terminal_io
obj:/lib/libreadline.so.4.3
fun:rl_initialize
fun:setup_readline
fun:initreadline
fun:_PyImport_LoadDynamicModule
fun:load_module
fun:import_submodule
fun:load_next
fun:import_module_ex
fun:PyImport_ImportModuleEx
}
{
Mysterious leak that seems to deal w/pthreads
Memcheck:Leak
fun:calloc
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_allocate_tls
fun:__pthread_initialize_minimal
}
{
Mysterious leak that seems to deal w/pthreads
Memcheck:Leak
fun:memalign
obj:/lib/ld-2.3.5.so
fun:_dl_allocate_tls
fun:__pthread_initialize_minimal
}
###
### These occur from somewhere within the SSL, when running
### test_socket_sll. They are too general to leave on by default.
###
###{
### somewhere in SSL stuff
### Memcheck:Cond
### fun:memset
###}
###{
### somewhere in SSL stuff
### Memcheck:Value4
### fun:memset
###}
###
###{
### somewhere in SSL stuff
### Memcheck:Cond
### fun:MD5_Update
###}
###
###{
### somewhere in SSL stuff
### Memcheck:Value4
### fun:MD5_Update
###}
#
# All of these problems come from using test_socket_ssl
#
{
from test_socket_ssl
Memcheck:Cond
fun:BN_bin2bn
}
{
from test_socket_ssl
Memcheck:Cond
fun:BN_num_bits_word
}
{
from test_socket_ssl
Memcheck:Value4
fun:BN_num_bits_word
}
{
from test_socket_ssl
Memcheck:Cond
fun:BN_mod_exp_mont_word
}
{
from test_socket_ssl
Memcheck:Cond
fun:BN_mod_exp_mont
}
{
from test_socket_ssl
Memcheck:Param
write(buf)
fun:write
obj:/usr/lib/libcrypto.so.0.9.7
}
{
from test_socket_ssl
Memcheck:Cond
fun:RSA_verify
}
{
from test_socket_ssl
Memcheck:Value4
fun:RSA_verify
}
{
from test_socket_ssl
Memcheck:Value4
fun:DES_set_key_unchecked
}
{
from test_socket_ssl
Memcheck:Value4
fun:DES_encrypt2
}
{
from test_socket_ssl
Memcheck:Cond
obj:/usr/lib/libssl.so.0.9.7
}
{
from test_socket_ssl
Memcheck:Value4
obj:/usr/lib/libssl.so.0.9.7
}
{
from test_socket_ssl
Memcheck:Cond
fun:BUF_MEM_grow_clean
}
{
from test_socket_ssl
Memcheck:Cond
fun:memcpy
fun:ssl3_read_bytes
}
{
from test_socket_ssl
Memcheck:Cond
fun:SHA1_Update
}
{
from test_socket_ssl
Memcheck:Value4
fun:SHA1_Update
}
{
Debian unstable with libc-i686 suppressions
Memcheck:Cond
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
fun:dlopen
fun:_PyImport_GetDynLoadFunc
fun:_PyImport_LoadDynamicModule
}
{
Debian unstable with libc-i686 suppressions
Memcheck:Cond
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
fun:dlopen
fun:_PyImport_GetDynLoadFunc
fun:_PyImport_LoadDynamicModule
}
{
Debian unstable with libc-i686 suppressions
Memcheck:Addr4
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
fun:dlopen
fun:_PyImport_GetDynLoadFunc
fun:_PyImport_LoadDynamicModule
}
{
Debian unstable with libc-i686 suppressions
Memcheck:Addr4
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
}
{
Debian unstable with libc-i686 suppressions
Memcheck:Cond
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
fun:dlopen
fun:_PyImport_GetDynLoadFunc
fun:_PyImport_LoadDynamicModule
obj:/usr/bin/python2.3
obj:/usr/bin/python2.3
}
{
Debian unstable with libc-i686 suppressions
Memcheck:Cond
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:__libc_dlopen_mode
fun:__nss_lookup_function
obj:/lib/tls/i686/cmov/libc-2.3.5.so
fun:__nss_passwd_lookup
fun:getpwuid_r
fun:pqGetpwuid
}
{
Debian unstable with libc-i686 suppressions
Memcheck:Cond
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:__libc_dlopen_mode
fun:__nss_lookup_function
obj:/lib/tls/i686/cmov/libc-2.3.5.so
fun:__nss_passwd_lookup
}
{
Debian unstable with libc-i686 suppressions
Memcheck:Addr4
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:__libc_dlopen_mode
fun:__nss_lookup_function
obj:/lib/tls/i686/cmov/libnss_compat-2.3.5.so
fun:_nss_compat_getpwuid_r
}
{
Debian unstable with libc-i686 suppressions
Memcheck:Cond
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libc-2.3.5.so
obj:/lib/ld-2.3.5.so
fun:_dl_open
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
obj:/lib/ld-2.3.5.so
obj:/lib/tls/i686/cmov/libdl-2.3.5.so
fun:dlopen
fun:_PyImport_GetDynLoadFunc
fun:_PyImport_LoadDynamicModule
obj:/usr/bin/python2.4
obj:/usr/bin/python2.4
}