spaCy/spacy/tests/regression/test_issue360.py
2017-01-10 19:24:10 +01:00

17 lines
303 B
Python

# coding: utf-8
from __future__ import unicode_literals
from ...en import English
import pytest
@pytest.fixture
def en_tokenizer():
return English.Defaults.create_tokenizer()
def test_big_ellipsis(en_tokenizer):
tokens = en_tokenizer('$45...............Asking')
assert len(tokens) > 2