spaCy/tests/tokens/test_tokens_api.py

32 lines
978 B
Python
Raw Normal View History

2015-02-07 21:14:07 +03:00
from __future__ import unicode_literals
from spacy.tokens import Doc
2015-02-07 21:14:07 +03:00
import pytest
2015-04-19 22:39:18 +03:00
2015-07-23 02:19:11 +03:00
def mest_getitem(EN):
tokens = EN(u'Give it back! He pleaded.')
2015-02-07 21:14:07 +03:00
assert tokens[0].orth_ == 'Give'
assert tokens[-1].orth_ == '.'
with pytest.raises(IndexError):
tokens[len(tokens)]
2015-07-23 02:19:11 +03:00
def mest_serialize(EN):
tokens = EN(u'Give it back! He pleaded.')
packed = tokens.to_bytes()
new_tokens = Doc(EN.vocab).from_bytes(packed)
assert tokens.string == new_tokens.string
assert [t.orth_ for t in tokens] == [t.orth_ for t in new_tokens]
assert [t.orth for t in tokens] == [t.orth for t in new_tokens]
def test_serialize_whitespace(EN):
tokens = EN(u' Give it back! He pleaded. ')
2015-07-23 02:19:11 +03:00
packed = tokens.to_bytes()
new_tokens = Doc(EN.vocab).from_bytes(packed)
assert tokens.string == new_tokens.string
assert [t.orth_ for t in tokens] == [t.orth_ for t in new_tokens]
assert [t.orth for t in tokens] == [t.orth for t in new_tokens]