mirror of
https://github.com/explosion/spaCy.git
synced 2025-01-25 00:34:20 +03:00
Start updating serializer test
This commit is contained in:
parent
1166b0c491
commit
f9327343ce
|
@ -1,8 +1,8 @@
|
|||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from ...serialize.packer import Packer
|
||||
from ..util import get_doc, assert_docs_equal
|
||||
from ...tokens import Doc
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -15,26 +15,27 @@ ENTS = [('hi', 'PERSON', 0, 1)]
|
|||
|
||||
def test_serialize_empty_doc(en_vocab):
|
||||
doc = get_doc(en_vocab)
|
||||
packer = Packer(en_vocab, {})
|
||||
b = packer.pack(doc)
|
||||
assert b == b''
|
||||
loaded = get_doc(en_vocab).from_bytes(b)
|
||||
assert len(loaded) == 0
|
||||
data = doc.to_bytes()
|
||||
doc2 = Doc(en_vocab)
|
||||
doc2.from_bytes(data)
|
||||
assert len(doc) == len(doc2)
|
||||
for token1, token2 in zip(doc, doc2):
|
||||
assert token1.text == token2.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize('text', [TEXT])
|
||||
def test_serialize_tokens(en_vocab, text):
|
||||
doc1 = get_doc(en_vocab, [t for t in text])
|
||||
doc2 = get_doc(en_vocab).from_bytes(doc1.to_bytes())
|
||||
assert_docs_equal(doc1, doc2)
|
||||
|
||||
|
||||
@pytest.mark.models
|
||||
@pytest.mark.parametrize('text', [TEXT])
|
||||
@pytest.mark.parametrize('tags', [TAGS, []])
|
||||
@pytest.mark.parametrize('deps', [DEPS, []])
|
||||
@pytest.mark.parametrize('ents', [ENTS, []])
|
||||
def test_serialize_tokens_ner(EN, text, tags, deps, ents):
|
||||
doc1 = get_doc(EN.vocab, [t for t in text], tags=tags, deps=deps, ents=ents)
|
||||
doc2 = get_doc(EN.vocab).from_bytes(doc1.to_bytes())
|
||||
assert_docs_equal(doc1, doc2)
|
||||
#
|
||||
#@pytest.mark.parametrize('text', [TEXT])
|
||||
#def test_serialize_tokens(en_vocab, text):
|
||||
# doc1 = get_doc(en_vocab, [t for t in text])
|
||||
# doc2 = get_doc(en_vocab).from_bytes(doc1.to_bytes())
|
||||
# assert_docs_equal(doc1, doc2)
|
||||
#
|
||||
#
|
||||
#@pytest.mark.models
|
||||
#@pytest.mark.parametrize('text', [TEXT])
|
||||
#@pytest.mark.parametrize('tags', [TAGS, []])
|
||||
#@pytest.mark.parametrize('deps', [DEPS, []])
|
||||
#@pytest.mark.parametrize('ents', [ENTS, []])
|
||||
#def test_serialize_tokens_ner(EN, text, tags, deps, ents):
|
||||
# doc1 = get_doc(EN.vocab, [t for t in text], tags=tags, deps=deps, ents=ents)
|
||||
# doc2 = get_doc(EN.vocab).from_bytes(doc1.to_bytes())
|
||||
# assert_docs_equal(doc1, doc2)
|
||||
|
|
Loading…
Reference in New Issue
Block a user