spaCy/spacy/tests/lang/uk/test_tokenizer_exc.py

12 lines
364 B
Python
Raw Normal View History

import pytest
2019-02-08 16:14:49 +03:00
@pytest.mark.parametrize(
"text,norms,lemmas",
[("ім.", ["імені"], ["ім'я"]), ("проф.", ["професор"], ["професор"])],
)
def test_uk_tokenizer_abbrev_exceptions(uk_tokenizer, text, norms, lemmas):
tokens = uk_tokenizer(text)
assert len(tokens) == 1
assert [token.norm_ for token in tokens] == norms