mirror of
https://github.com/explosion/spaCy.git
synced 2025-06-29 09:23:12 +03:00
In order to support Python 3.13, we had to migrate to Cython 3.0. This caused some tricky interaction with our Pydantic usage, because Cython 3 uses the from __future__ import annotations semantics, which causes type annotations to be saved as strings. The end result is that we can't have Language.factory decorated functions in Cython modules anymore, as the Language.factory decorator expects to inspect the signature of the functions and build a Pydantic model. If the function is implemented in Cython, an error is raised because the type is not resolved. To address this I've moved the factory functions into a new module, spacy.pipeline.factories. I've added __getattr__ importlib hooks to the previous locations, in case anyone was importing these functions directly. The change should have no backwards compatibility implications. Along the way I've also refactored the registration of functions for the config. Previously these ran as import-time side-effects, using the registry decorator. I've created instead a new module spacy.registrations. When the registry is accessed it calls a function ensure_populated(), which cases the registrations to occur. I've made a similar change to the Language.factory registrations in the new spacy.pipeline.factories module. I want to remove these import-time side-effects so that we can speed up the loading time of the library, which can be especially painful on the CLI. I also find that I'm often working to track down the implementations of functions referenced by strings in the config. Having the registrations all happen in one place will make this easier. With these changes I've fortunately avoided the need to migrate to Pydantic v2 properly --- we're still using the v1 compatibility shim. We might not be able to hold out forever though: Pydantic (reasonably) aren't actively supporting the v1 shims. I put a lot of work into v2 migration when investigating the 3.13 support, and it's definitely challenging. In any case, it's a relief that we don't have to do the v2 migration at the same time as the Cython 3.0/Python 3.13 support.
151 lines
4.2 KiB
Python
151 lines
4.2 KiB
Python
import warnings
|
|
from unittest import TestCase
|
|
|
|
import pytest
|
|
import srsly
|
|
from numpy import zeros
|
|
|
|
from spacy.kb.kb_in_memory import InMemoryLookupKB, Writer
|
|
from spacy.language import Language
|
|
from spacy.pipeline import TrainablePipe
|
|
from spacy.vectors import Vectors
|
|
from spacy.vocab import Vocab
|
|
|
|
from ..util import make_tempdir
|
|
|
|
|
|
def nlp():
|
|
return Language()
|
|
|
|
|
|
def vectors():
|
|
data = zeros((3, 1), dtype="f")
|
|
keys = ["cat", "dog", "rat"]
|
|
return Vectors(data=data, keys=keys)
|
|
|
|
|
|
def custom_pipe():
|
|
# create dummy pipe partially implementing interface -- only want to test to_disk
|
|
class SerializableDummy:
|
|
def __init__(self, **cfg):
|
|
if cfg:
|
|
self.cfg = cfg
|
|
else:
|
|
self.cfg = None
|
|
super(SerializableDummy, self).__init__()
|
|
|
|
def to_bytes(self, exclude=tuple(), disable=None, **kwargs):
|
|
return srsly.msgpack_dumps({"dummy": srsly.json_dumps(None)})
|
|
|
|
def from_bytes(self, bytes_data, exclude):
|
|
return self
|
|
|
|
def to_disk(self, path, exclude=tuple(), **kwargs):
|
|
pass
|
|
|
|
def from_disk(self, path, exclude=tuple(), **kwargs):
|
|
return self
|
|
|
|
class MyPipe(TrainablePipe):
|
|
def __init__(self, vocab, model=True, **cfg):
|
|
if cfg:
|
|
self.cfg = cfg
|
|
else:
|
|
self.cfg = None
|
|
self.model = SerializableDummy()
|
|
self.vocab = vocab
|
|
|
|
return MyPipe(Vocab())
|
|
|
|
|
|
def tagger():
|
|
nlp = Language()
|
|
tagger = nlp.add_pipe("tagger")
|
|
# need to add model for two reasons:
|
|
# 1. no model leads to error in serialization,
|
|
# 2. the affected line is the one for model serialization
|
|
tagger.add_label("A")
|
|
nlp.initialize()
|
|
return tagger
|
|
|
|
|
|
def entity_linker():
|
|
nlp = Language()
|
|
|
|
def create_kb(vocab):
|
|
kb = InMemoryLookupKB(vocab, entity_vector_length=1)
|
|
kb.add_entity("test", 0.0, zeros((1,), dtype="f"))
|
|
return kb
|
|
|
|
entity_linker = nlp.add_pipe("entity_linker")
|
|
entity_linker.set_kb(create_kb)
|
|
# need to add model for two reasons:
|
|
# 1. no model leads to error in serialization,
|
|
# 2. the affected line is the one for model serialization
|
|
nlp.initialize()
|
|
return entity_linker
|
|
|
|
|
|
objects_to_test = (
|
|
[nlp, vectors, custom_pipe, tagger, entity_linker],
|
|
["nlp", "vectors", "custom_pipe", "tagger", "entity_linker"],
|
|
)
|
|
|
|
|
|
def write_obj_and_catch_warnings(obj):
|
|
with make_tempdir() as d:
|
|
with warnings.catch_warnings(record=True) as warnings_list:
|
|
warnings.filterwarnings("always", category=ResourceWarning)
|
|
obj.to_disk(d)
|
|
# in python3.5 it seems that deprecation warnings are not filtered by filterwarnings
|
|
return list(filter(lambda x: isinstance(x, ResourceWarning), warnings_list))
|
|
|
|
|
|
@pytest.mark.parametrize("obj_factory", objects_to_test[0], ids=objects_to_test[1])
|
|
def test_to_disk_resource_warning(obj_factory):
|
|
obj = obj_factory()
|
|
warnings_list = write_obj_and_catch_warnings(obj)
|
|
assert len(warnings_list) == 0
|
|
|
|
|
|
def test_writer_with_path_py35():
|
|
writer = None
|
|
with make_tempdir() as d:
|
|
path = d / "test"
|
|
try:
|
|
writer = Writer(path)
|
|
except Exception as e:
|
|
pytest.fail(str(e))
|
|
finally:
|
|
if writer:
|
|
writer.close()
|
|
|
|
|
|
def test_save_and_load_knowledge_base():
|
|
nlp = Language()
|
|
kb = InMemoryLookupKB(nlp.vocab, entity_vector_length=1)
|
|
with make_tempdir() as d:
|
|
path = d / "kb"
|
|
try:
|
|
kb.to_disk(path)
|
|
except Exception as e:
|
|
pytest.fail(str(e))
|
|
|
|
try:
|
|
kb_loaded = InMemoryLookupKB(nlp.vocab, entity_vector_length=1)
|
|
kb_loaded.from_disk(path)
|
|
except Exception as e:
|
|
pytest.fail(str(e))
|
|
|
|
|
|
class TestToDiskResourceWarningUnittest(TestCase):
|
|
def test_resource_warning(self):
|
|
items = [x() for x in objects_to_test[0]]
|
|
names = objects_to_test[1]
|
|
scenarios = zip(items, names)
|
|
|
|
for item, name in scenarios:
|
|
with self.subTest(msg=name):
|
|
warnings_list = write_obj_and_catch_warnings(item)
|
|
self.assertEqual(len(warnings_list), 0)
|