spaCy/spacy/tests/lang/eu/test_text.py

20 lines
488 B
Python
Raw Normal View History

2020-03-04 09:58:56 +03:00
import pytest
def test_eu_tokenizer_handles_long_text(eu_tokenizer):
text = """ta nere guitarra estrenatu ondoren"""
tokens = eu_tokenizer(text)
assert len(tokens) == 5
2020-03-25 14:28:12 +03:00
@pytest.mark.parametrize(
"text,length",
[
("milesker ederra joan zen hitzaldia plazer hutsa", 7),
("astelehen guztia sofan pasau biot", 5),
],
)
2020-03-04 09:58:56 +03:00
def test_eu_tokenizer_handles_cnts(eu_tokenizer, text, length):
tokens = eu_tokenizer(text)
assert len(tokens) == length