mirror of
https://github.com/explosion/spaCy.git
synced 2025-01-27 17:54:39 +03:00
Add test for vocab serialization
This commit is contained in:
parent
00b2094dc3
commit
7b1ddcc04d
|
@ -3,6 +3,7 @@ from __future__ import unicode_literals
|
||||||
|
|
||||||
from ..util import get_doc, assert_docs_equal
|
from ..util import get_doc, assert_docs_equal
|
||||||
from ...tokens import Doc
|
from ...tokens import Doc
|
||||||
|
from ...vocab import Vocab
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -22,6 +23,15 @@ def test_serialize_empty_doc(en_vocab):
|
||||||
for token1, token2 in zip(doc, doc2):
|
for token1, token2 in zip(doc, doc2):
|
||||||
assert token1.text == token2.text
|
assert token1.text == token2.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
|
@pytest.mark.parametrize('text', ['rat'])
|
||||||
|
def test_serialize_vocab(en_vocab, text):
|
||||||
|
text_hash = en_vocab.strings.add(text)
|
||||||
|
vocab_bytes = en_vocab.to_bytes()
|
||||||
|
new_vocab = Vocab().from_bytes(vocab_bytes)
|
||||||
|
assert new_vocab.strings(text_hash) == text
|
||||||
|
|
||||||
#
|
#
|
||||||
#@pytest.mark.parametrize('text', [TEXT])
|
#@pytest.mark.parametrize('text', [TEXT])
|
||||||
#def test_serialize_tokens(en_vocab, text):
|
#def test_serialize_tokens(en_vocab, text):
|
||||||
|
|
Loading…
Reference in New Issue
Block a user