spaCy/tests/tokenizer/test_infix.py

31 lines
682 B
Python
Raw Normal View History

2014-10-14 13:26:16 +04:00
from __future__ import unicode_literals
import pytest
def test_hyphen(en_tokenizer):
tokens = en_tokenizer('best-known')
assert len(tokens) == 3
2014-10-14 13:26:16 +04:00
def test_period(en_tokenizer):
tokens = en_tokenizer('best.Known')
2014-10-14 13:26:16 +04:00
assert len(tokens) == 3
tokens = en_tokenizer('zombo.com')
2014-10-14 13:26:16 +04:00
assert len(tokens) == 1
2015-07-26 18:30:34 +03:00
def test_ellipsis(en_tokenizer):
tokens = en_tokenizer('best...Known')
assert len(tokens) == 3
tokens = en_tokenizer('best...known')
assert len(tokens) == 3
def test_email(en_tokenizer):
tokens = en_tokenizer('hello@example.com')
assert len(tokens) == 3
tokens = en_tokenizer('hi+there@gmail.it')
assert len(tokens) == 3