mirror of
https://github.com/explosion/spaCy.git
synced 2024-09-22 11:59:14 +03:00
428887b8f2
* new language: Luxembourgish (lb) * update * update * Update and rename .github/CONTRIBUTOR_AGREEMENT.md to .github/contributors/PeterGilles.md * Update and rename .github/contributors/PeterGilles.md to .github/CONTRIBUTOR_AGREEMENT.md * Update norm_exceptions.py * Delete README.md * moved test_lemma.py * deactivated 'lemma_lookup = LOOKUP' * update * Update conftest.py * update * tests updated * import unicode_literals * Update spacy/tests/lang/lb/test_text.py Co-Authored-By: Ines Montani <ines@ines.io> * Create PeterGilles.md
27 lines
685 B
Python
27 lines
685 B
Python
# coding: utf-8
|
|
#from __future__ import unicolb_literals
|
|
from __future__ import unicode_literals
|
|
|
|
import pytest
|
|
|
|
|
|
@pytest.mark.parametrize("text,length", [("z.B.", 1), ("zb.", 2), ("(z.B.", 2)])
|
|
def test_lb_tokenizer_splits_prefix_interact(lb_tokenizer, text, length):
|
|
tokens = lb_tokenizer(text)
|
|
assert len(tokens) == length
|
|
|
|
|
|
@pytest.mark.parametrize("text", ["z.B.)"])
|
|
def test_lb_tokenizer_splits_suffix_interact(lb_tokenizer, text):
|
|
tokens = lb_tokenizer(text)
|
|
assert len(tokens) == 2
|
|
|
|
|
|
@pytest.mark.parametrize("text", ["(z.B.)"])
|
|
def test_lb_tokenizer_splits_even_wrap_interact(lb_tokenizer, text):
|
|
tokens = lb_tokenizer(text)
|
|
assert len(tokens) == 3
|
|
|
|
|
|
|