mirror of
https://github.com/explosion/spaCy.git
synced 2024-09-22 03:49:17 +03:00
21 lines
280 B
Python
21 lines
280 B
Python
import pytest
|
||
|
||
from spacy.lang.en import English
|
||
|
||
|
||
@pytest.mark.parametrize(
|
||
"word",
|
||
[
|
||
"don't",
|
||
"don’t",
|
||
"I'd",
|
||
"I’d",
|
||
],
|
||
)
|
||
def test_issue3521(fr_tokenizer, word):
|
||
nlp = English()
|
||
|
||
tok = nlp(word)[1]
|
||
assert tok.is_stop
|
||
|