mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-29 11:26:28 +03:00
360ccf628a
* Init * fix tests * Update spacy/errors.py Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Fix test_blank_languages * Rename xx to mul in docs * Format _util with black * prettier formatting --------- Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com>
26 lines
674 B
Python
26 lines
674 B
Python
import pytest
|
||
|
||
MUL_BASIC_TOKENIZATION_TESTS = [
|
||
(
|
||
"Lääʹddjânnmest lie nuʹtt 10 000 säʹmmliʹžžed. Seeʹst pâʹjjel",
|
||
[
|
||
"Lääʹddjânnmest",
|
||
"lie",
|
||
"nuʹtt",
|
||
"10",
|
||
"000",
|
||
"säʹmmliʹžžed",
|
||
".",
|
||
"Seeʹst",
|
||
"pâʹjjel",
|
||
],
|
||
),
|
||
]
|
||
|
||
|
||
@pytest.mark.parametrize("text,expected_tokens", MUL_BASIC_TOKENIZATION_TESTS)
|
||
def test_mul_tokenizer_basic(mul_tokenizer, text, expected_tokens):
|
||
tokens = mul_tokenizer(text)
|
||
token_list = [token.text for token in tokens if not token.is_space]
|
||
assert expected_tokens == token_list
|