spaCy/spacy/tests/regression/test_issue360.py
2017-01-04 00:49:31 +01:00

15 lines
287 B
Python

from __future__ import unicode_literals
from ...en import English
import pytest
@pytest.fixture
def en_tokenizer():
return English.Defaults.create_tokenizer()
def test_big_ellipsis(en_tokenizer):
tokens = en_tokenizer(u'$45...............Asking')
assert len(tokens) > 2