spaCy/tests/test_tokens_from_list.py
2015-01-24 07:22:30 +11:00

17 lines
304 B
Python

from __future__ import unicode_literals
import pytest
from spacy.en import English
@pytest.fixture
def EN():
return English()
def test1(EN):
words = ['JAPAN', 'GET', 'LUCKY']
tokens = EN.tokenizer.tokens_from_list(words)
assert len(tokens) == 3
assert tokens[0].orth_ == 'JAPAN'