mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-27 18:36:36 +03:00
15 lines
287 B
Python
15 lines
287 B
Python
from __future__ import unicode_literals
|
|
from ...en import English
|
|
|
|
import pytest
|
|
|
|
|
|
@pytest.fixture
|
|
def en_tokenizer():
|
|
return English.Defaults.create_tokenizer()
|
|
|
|
|
|
def test_big_ellipsis(en_tokenizer):
|
|
tokens = en_tokenizer(u'$45...............Asking')
|
|
assert len(tokens) > 2
|