mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-27 18:36:36 +03:00
10 lines
235 B
Python
10 lines
235 B
Python
from __future__ import unicode_literals
|
|
import pytest
|
|
|
|
|
|
def test1(en_tokenizer):
|
|
words = ['JAPAN', 'GET', 'LUCKY']
|
|
tokens = en_tokenizer.tokens_from_list(words)
|
|
assert len(tokens) == 3
|
|
assert tokens[0].orth_ == 'JAPAN'
|