mirror of
https://github.com/python-pillow/Pillow.git
synced 2024-12-25 17:36:18 +03:00
Merge branch 'main' into apng
This commit is contained in:
commit
58e0dd7df9
2
.github/workflows/test-mingw.yml
vendored
2
.github/workflows/test-mingw.yml
vendored
|
@ -80,7 +80,7 @@ jobs:
|
|||
pushd depends && ./install_extra_test_images.sh && popd
|
||||
|
||||
- name: Build Pillow
|
||||
run: CFLAGS="-coverage" python3 -m pip install --global-option="build_ext" .
|
||||
run: SETUPTOOLS_USE_DISTUTILS="stdlib" CFLAGS="-coverage" python3 -m pip install --global-option="build_ext" .
|
||||
|
||||
- name: Test Pillow
|
||||
run: |
|
||||
|
|
|
@ -57,7 +57,7 @@ repos:
|
|||
- id: sphinx-lint
|
||||
|
||||
- repo: https://github.com/tox-dev/tox-ini-fmt
|
||||
rev: 1.0.0
|
||||
rev: 1.3.0
|
||||
hooks:
|
||||
- id: tox-ini-fmt
|
||||
|
||||
|
|
|
@ -5,6 +5,12 @@ Changelog (Pillow)
|
|||
10.0.0 (unreleased)
|
||||
-------------------
|
||||
|
||||
- Support float font sizes #7107
|
||||
[radarhere]
|
||||
|
||||
- Use later value for duplicate xref entries in PdfParser #7102
|
||||
[radarhere]
|
||||
|
||||
- Load before getting size in __getstate__ #7105
|
||||
[bigcat88, radarhere]
|
||||
|
||||
|
|
BIN
Tests/images/duplicate_xref_entry.pdf
Normal file
BIN
Tests/images/duplicate_xref_entry.pdf
Normal file
Binary file not shown.
|
@ -191,6 +191,16 @@ def test_getlength(
|
|||
assert length == length_raqm
|
||||
|
||||
|
||||
def test_float_size():
|
||||
lengths = []
|
||||
for size in (48, 48.5, 49):
|
||||
f = ImageFont.truetype(
|
||||
"Tests/fonts/NotoSans-Regular.ttf", size, layout_engine=layout_engine
|
||||
)
|
||||
lengths.append(f.getlength("text"))
|
||||
assert lengths[0] != lengths[1] != lengths[2]
|
||||
|
||||
|
||||
def test_render_multiline(font):
|
||||
im = Image.new(mode="RGB", size=(300, 100))
|
||||
draw = ImageDraw.Draw(im)
|
||||
|
|
|
@ -117,3 +117,9 @@ def test_pdf_repr():
|
|||
assert pdf_repr(b"a)/b\\(c") == rb"(a\)/b\\\(c)"
|
||||
assert pdf_repr([123, True, {"a": PdfName(b"b")}]) == b"[ 123 true <<\n/a /b\n>> ]"
|
||||
assert pdf_repr(PdfBinary(b"\x90\x1F\xA0")) == b"<901FA0>"
|
||||
|
||||
|
||||
def test_duplicate_xref_entry():
|
||||
pdf = PdfParser("Tests/images/duplicate_xref_entry.pdf")
|
||||
assert pdf.xref_table.existing_entries[6][0] == 1197
|
||||
pdf.close()
|
||||
|
|
|
@ -957,14 +957,11 @@ class PdfParser:
|
|||
check_format_condition(m, "xref entry not found")
|
||||
offset = m.end()
|
||||
is_free = m.group(3) == b"f"
|
||||
generation = int(m.group(2))
|
||||
if not is_free:
|
||||
generation = int(m.group(2))
|
||||
new_entry = (int(m.group(1)), generation)
|
||||
check_format_condition(
|
||||
i not in self.xref_table or self.xref_table[i] == new_entry,
|
||||
"xref entry duplicated (and not identical)",
|
||||
)
|
||||
self.xref_table[i] = new_entry
|
||||
if i not in self.xref_table:
|
||||
self.xref_table[i] = new_entry
|
||||
return offset
|
||||
|
||||
def read_indirect(self, ref, max_nesting=-1):
|
||||
|
|
|
@ -116,7 +116,9 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
|
|||
int error = 0;
|
||||
|
||||
char *filename = NULL;
|
||||
Py_ssize_t size;
|
||||
float size;
|
||||
FT_Size_RequestRec req;
|
||||
FT_Long width;
|
||||
Py_ssize_t index = 0;
|
||||
Py_ssize_t layout_engine = 0;
|
||||
unsigned char *encoding;
|
||||
|
@ -133,7 +135,7 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
|
|||
if (!PyArg_ParseTupleAndKeywords(
|
||||
args,
|
||||
kw,
|
||||
"etn|nsy#n",
|
||||
"etf|nsy#n",
|
||||
kwlist,
|
||||
Py_FileSystemDefaultEncoding,
|
||||
&filename,
|
||||
|
@ -179,7 +181,13 @@ getfont(PyObject *self_, PyObject *args, PyObject *kw) {
|
|||
}
|
||||
|
||||
if (!error) {
|
||||
error = FT_Set_Pixel_Sizes(self->face, 0, size);
|
||||
width = size * 64;
|
||||
req.type = FT_SIZE_REQUEST_TYPE_NOMINAL;
|
||||
req.width = width;
|
||||
req.height = width;
|
||||
req.horiResolution = 0;
|
||||
req.vertResolution = 0;
|
||||
error = FT_Request_Size(self->face, &req);
|
||||
}
|
||||
|
||||
if (!error && encoding && strlen((char *)encoding) == 4) {
|
||||
|
|
Loading…
Reference in New Issue
Block a user