diff --git a/Tests/large_memory_test.py b/Tests/large_memory_test.py new file mode 100644 index 000000000..148841ec2 --- /dev/null +++ b/Tests/large_memory_test.py @@ -0,0 +1,27 @@ +from tester import * + +# This test is not run automatically. +# +# It requires > 2gb memory for the >2 gigapixel image generated in the +# second test. Running this automatically would amount to a denial of +# service on our testing infrastructure. I expect this test to fail +# on any 32 bit machine, as well as any smallish things (like +# raspberrypis). It does succeed on a 3gb Ubuntu 12.04x64 VM on python +# 2.7 an 3.2 + +from PIL import Image +ydim = 32769 +xdim = 48000 +f = tempfile('temp.png') + +def _write_png(xdim,ydim): + im = Image.new('L',(xdim,ydim),(0)) + im.save(f) + success() + +def test_large(): + """ succeeded prepatch""" + _write_png(xdim,ydim) +def test_2gpx(): + """failed prepatch""" + _write_png(xdim,xdim) diff --git a/_imaging.c b/_imaging.c index e792ebfa5..3510e1568 100644 --- a/_imaging.c +++ b/_imaging.c @@ -1250,6 +1250,7 @@ _putdata(ImagingObject* self, PyObject* args) image = self->image; + // UNDONE Py_ssize_t 2Gpix image issue n = PyObject_Length(data); if (n > (int) (image->xsize * image->ysize)) { PyErr_SetString(PyExc_TypeError, "too many data entries"); @@ -3073,7 +3074,7 @@ image_length(ImagingObject *self) { Imaging im = self->image; - return im->xsize * im->ysize; + return (Py_ssize_t) im->xsize * im->ysize; } static PyObject *