spaCy/spacy/tests/pipeline/test_pipe_methods.py
Ines Montani 43b960c01b
Refactor pipeline components, config and language data (#5759)
* Update with WIP

* Update with WIP

* Update with pipeline serialization

* Update types and pipe factories

* Add deep merge, tidy up and add tests

* Fix pipe creation from config

* Don't validate default configs on load

* Update spacy/language.py

Co-authored-by: Ines Montani <ines@ines.io>

* Adjust factory/component meta error

* Clean up factory args and remove defaults

* Add test for failing empty dict defaults

* Update pipeline handling and methods

* provide KB as registry function instead of as object

* small change in test to make functionality more clear

* update example script for EL configuration

* Fix typo

* Simplify test

* Simplify test

* splitting pipes.pyx into separate files

* moving default configs to each component file

* fix batch_size type

* removing default values from component constructors where possible (TODO: test 4725)

* skip instead of xfail

* Add test for config -> nlp with multiple instances

* pipeline.pipes -> pipeline.pipe

* Tidy up, document, remove kwargs

* small cleanup/generalization for Tok2VecListener

* use DEFAULT_UPSTREAM field

* revert to avoid circular imports

* Fix tests

* Replace deprecated arg

* Make model dirs require config

* fix pickling of keyword-only arguments in constructor

* WIP: clean up and integrate full config

* Add helper to handle function args more reliably

Now also includes keyword-only args

* Fix config composition and serialization

* Improve config debugging and add visual diff

* Remove unused defaults and fix type

* Remove pipeline and factories from meta

* Update spacy/default_config.cfg

Co-authored-by: Sofie Van Landeghem <svlandeg@users.noreply.github.com>

* Update spacy/default_config.cfg

* small UX edits

* avoid printing stack trace for debug CLI commands

* Add support for language-specific factories

* specify the section of the config which holds the model to debug

* WIP: add Language.from_config

* Update with language data refactor WIP

* Auto-format

* Add backwards-compat handling for Language.factories

* Update morphologizer.pyx

* Fix morphologizer

* Update and simplify lemmatizers

* Fix Japanese tests

* Port over tagger changes

* Fix Chinese and tests

* Update to latest Thinc

* WIP: xfail first Russian lemmatizer test

* Fix component-specific overrides

* fix nO for output layers in debug_model

* Fix default value

* Fix tests and don't pass objects in config

* Fix deep merging

* Fix lemma lookup data registry

Only load the lookups if an entry is available in the registry (and if spacy-lookups-data is installed)

* Add types

* Add Vocab.from_config

* Fix typo

* Fix tests

* Make config copying more elegant

* Fix pipe analysis

* Fix lemmatizers and is_base_form

* WIP: move language defaults to config

* Fix morphology type

* Fix vocab

* Remove comment

* Update to latest Thinc

* Add morph rules to config

* Tidy up

* Remove set_morphology option from tagger factory

* Hack use_gpu

* Move [pipeline] to top-level block and make [nlp.pipeline] list

Allows separating component blocks from component order – otherwise, ordering the config would mean a changed component order, which is bad. Also allows initial config to define more components and not use all of them

* Fix use_gpu and resume in CLI

* Auto-format

* Remove resume from config

* Fix formatting and error

* [pipeline] -> [components]

* Fix types

* Fix tagger test: requires set_morphology?

Co-authored-by: Sofie Van Landeghem <svlandeg@users.noreply.github.com>
Co-authored-by: svlandeg <sofie.vanlandeghem@gmail.com>
Co-authored-by: Matthew Honnibal <honnibal+gh@gmail.com>
2020-07-22 13:42:59 +02:00

236 lines
7.3 KiB
Python

import pytest
from spacy.language import Language
@pytest.fixture
def nlp():
return Language()
@Language.component("new_pipe")
def new_pipe(doc):
return doc
@Language.component("other_pipe")
def other_pipe(doc):
return doc
def test_add_pipe_no_name(nlp):
nlp.add_pipe("new_pipe")
assert "new_pipe" in nlp.pipe_names
def test_add_pipe_duplicate_name(nlp):
nlp.add_pipe("new_pipe", name="duplicate_name")
with pytest.raises(ValueError):
nlp.add_pipe("new_pipe", name="duplicate_name")
@pytest.mark.parametrize("name", ["parser"])
def test_add_pipe_first(nlp, name):
nlp.add_pipe("new_pipe", name=name, first=True)
assert nlp.pipeline[0][0] == name
@pytest.mark.parametrize("name1,name2", [("parser", "lambda_pipe")])
def test_add_pipe_last(nlp, name1, name2):
Language.component("new_pipe2", func=lambda doc: doc)
nlp.add_pipe("new_pipe2", name=name2)
nlp.add_pipe("new_pipe", name=name1, last=True)
assert nlp.pipeline[0][0] != name1
assert nlp.pipeline[-1][0] == name1
def test_cant_add_pipe_first_and_last(nlp):
with pytest.raises(ValueError):
nlp.add_pipe("new_pipe", first=True, last=True)
@pytest.mark.parametrize("name", ["my_component"])
def test_get_pipe(nlp, name):
with pytest.raises(KeyError):
nlp.get_pipe(name)
nlp.add_pipe("new_pipe", name=name)
assert nlp.get_pipe(name) == new_pipe
@pytest.mark.parametrize(
"name,replacement,invalid_replacement",
[("my_component", "other_pipe", lambda doc: doc)],
)
def test_replace_pipe(nlp, name, replacement, invalid_replacement):
with pytest.raises(ValueError):
nlp.replace_pipe(name, new_pipe)
nlp.add_pipe("new_pipe", name=name)
with pytest.raises(ValueError):
nlp.replace_pipe(name, invalid_replacement)
nlp.replace_pipe(name, replacement)
assert nlp.get_pipe(name) == nlp.create_pipe(replacement)
@pytest.mark.parametrize("old_name,new_name", [("old_pipe", "new_pipe")])
def test_rename_pipe(nlp, old_name, new_name):
with pytest.raises(ValueError):
nlp.rename_pipe(old_name, new_name)
nlp.add_pipe("new_pipe", name=old_name)
nlp.rename_pipe(old_name, new_name)
assert nlp.pipeline[0][0] == new_name
@pytest.mark.parametrize("name", ["my_component"])
def test_remove_pipe(nlp, name):
with pytest.raises(ValueError):
nlp.remove_pipe(name)
nlp.add_pipe("new_pipe", name=name)
assert len(nlp.pipeline) == 1
removed_name, removed_component = nlp.remove_pipe(name)
assert not len(nlp.pipeline)
assert removed_name == name
assert removed_component == new_pipe
@pytest.mark.parametrize("name", ["my_component"])
def test_disable_pipes_method(nlp, name):
nlp.add_pipe("new_pipe", name=name)
assert nlp.has_pipe(name)
disabled = nlp.select_pipes(disable=name)
assert not nlp.has_pipe(name)
disabled.restore()
@pytest.mark.parametrize("name", ["my_component"])
def test_enable_pipes_method(nlp, name):
nlp.add_pipe("new_pipe", name=name)
assert nlp.has_pipe(name)
disabled = nlp.select_pipes(enable=[])
assert not nlp.has_pipe(name)
disabled.restore()
@pytest.mark.parametrize("name", ["my_component"])
def test_disable_pipes_context(nlp, name):
nlp.add_pipe("new_pipe", name=name)
assert nlp.has_pipe(name)
with nlp.select_pipes(disable=name):
assert not nlp.has_pipe(name)
assert nlp.has_pipe(name)
def test_select_pipes_list_arg(nlp):
for name in ["c1", "c2", "c3"]:
nlp.add_pipe("new_pipe", name=name)
assert nlp.has_pipe(name)
with nlp.select_pipes(disable=["c1", "c2"]):
assert not nlp.has_pipe("c1")
assert not nlp.has_pipe("c2")
assert nlp.has_pipe("c3")
with nlp.select_pipes(enable="c3"):
assert not nlp.has_pipe("c1")
assert not nlp.has_pipe("c2")
assert nlp.has_pipe("c3")
with nlp.select_pipes(enable=["c1", "c2"], disable="c3"):
assert nlp.has_pipe("c1")
assert nlp.has_pipe("c2")
assert not nlp.has_pipe("c3")
with nlp.select_pipes(enable=[]):
assert not nlp.has_pipe("c1")
assert not nlp.has_pipe("c2")
assert not nlp.has_pipe("c3")
with nlp.select_pipes(enable=["c1", "c2", "c3"], disable=[]):
assert nlp.has_pipe("c1")
assert nlp.has_pipe("c2")
assert nlp.has_pipe("c3")
with nlp.select_pipes(disable=["c1", "c2", "c3"], enable=[]):
assert not nlp.has_pipe("c1")
assert not nlp.has_pipe("c2")
assert not nlp.has_pipe("c3")
def test_select_pipes_errors(nlp):
for name in ["c1", "c2", "c3"]:
nlp.add_pipe("new_pipe", name=name)
assert nlp.has_pipe(name)
with pytest.raises(ValueError):
nlp.select_pipes()
with pytest.raises(ValueError):
nlp.select_pipes(enable=["c1", "c2"], disable=["c1"])
with pytest.raises(ValueError):
nlp.select_pipes(enable=["c1", "c2"], disable=[])
with pytest.raises(ValueError):
nlp.select_pipes(enable=[], disable=["c3"])
@pytest.mark.parametrize("n_pipes", [100])
def test_add_lots_of_pipes(nlp, n_pipes):
Language.component("n_pipes", func=lambda doc: doc)
for i in range(n_pipes):
nlp.add_pipe("n_pipes", name=f"pipe_{i}")
assert len(nlp.pipe_names) == n_pipes
@pytest.mark.parametrize("component", [lambda doc: doc, {"hello": "world"}])
def test_raise_for_invalid_components(nlp, component):
with pytest.raises(ValueError):
nlp.add_pipe(component)
@pytest.mark.parametrize("component", ["ner", "tagger", "parser", "textcat"])
def test_pipe_base_class_add_label(nlp, component):
label = "TEST"
pipe = nlp.create_pipe(component)
pipe.add_label(label)
if component == "tagger":
# Tagger always has the default coarse-grained label scheme
assert label in pipe.labels
else:
assert pipe.labels == (label,)
def test_pipe_labels(nlp):
input_labels = {
"ner": ["PERSON", "ORG", "GPE"],
"textcat": ["POSITIVE", "NEGATIVE"],
}
for name, labels in input_labels.items():
nlp.add_pipe(name)
pipe = nlp.get_pipe(name)
for label in labels:
pipe.add_label(label)
assert len(pipe.labels) == len(labels)
assert len(nlp.pipe_labels) == len(input_labels)
for name, labels in nlp.pipe_labels.items():
assert sorted(input_labels[name]) == sorted(labels)
def test_add_pipe_before_after():
"""Test that before/after works with strings and ints."""
nlp = Language()
nlp.add_pipe("ner")
with pytest.raises(ValueError):
nlp.add_pipe("textcat", before="parser")
nlp.add_pipe("textcat", before="ner")
assert nlp.pipe_names == ["textcat", "ner"]
with pytest.raises(ValueError):
nlp.add_pipe("parser", before=3)
with pytest.raises(ValueError):
nlp.add_pipe("parser", after=3)
nlp.add_pipe("parser", after=0)
assert nlp.pipe_names == ["textcat", "parser", "ner"]
nlp.add_pipe("tagger", before=2)
assert nlp.pipe_names == ["textcat", "parser", "tagger", "ner"]
with pytest.raises(ValueError):
nlp.add_pipe("entity_ruler", after=1, first=True)
with pytest.raises(ValueError):
nlp.add_pipe("entity_ruler", before="ner", after=2)
with pytest.raises(ValueError):
nlp.add_pipe("entity_ruler", before=True)
with pytest.raises(ValueError):
nlp.add_pipe("entity_ruler", first=False)