spaCy/spacy/tests/regression/test_issue360.py

15 lines
287 B
Python
Raw Normal View History

from __future__ import unicode_literals
from ...en import English
import pytest
@pytest.fixture
def en_tokenizer():
return English.Defaults.create_tokenizer()
def test_big_ellipsis(en_tokenizer):
tokens = en_tokenizer(u'$45...............Asking')
assert len(tokens) > 2