2017-11-09 04:29:03 +03:00
|
|
|
# coding: utf-8
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2018-07-25 00:38:44 +03:00
|
|
|
from spacy.tokens import Doc
|
|
|
|
from spacy.compat import path2str
|
2017-11-09 04:29:03 +03:00
|
|
|
|
2018-07-25 00:38:44 +03:00
|
|
|
from ..util import make_tempdir
|
|
|
|
|
|
|
|
|
|
|
|
def test_serialize_empty_doc(en_vocab):
|
|
|
|
doc = Doc(en_vocab)
|
|
|
|
data = doc.to_bytes()
|
|
|
|
doc2 = Doc(en_vocab)
|
|
|
|
doc2.from_bytes(data)
|
|
|
|
assert len(doc) == len(doc2)
|
|
|
|
for token1, token2 in zip(doc, doc2):
|
|
|
|
assert token1.text == token2.text
|
2017-11-09 04:29:03 +03:00
|
|
|
|
|
|
|
|
|
|
|
def test_serialize_doc_roundtrip_bytes(en_vocab):
|
2018-07-25 00:38:44 +03:00
|
|
|
doc = Doc(en_vocab, words=['hello', 'world'])
|
2017-11-09 04:29:03 +03:00
|
|
|
doc_b = doc.to_bytes()
|
|
|
|
new_doc = Doc(en_vocab).from_bytes(doc_b)
|
|
|
|
assert new_doc.to_bytes() == doc_b
|
|
|
|
|
|
|
|
|
|
|
|
def test_serialize_doc_roundtrip_disk(en_vocab):
|
2018-07-25 00:38:44 +03:00
|
|
|
doc = Doc(en_vocab, words=['hello', 'world'])
|
2017-11-09 04:29:03 +03:00
|
|
|
with make_tempdir() as d:
|
|
|
|
file_path = d / 'doc'
|
|
|
|
doc.to_disk(file_path)
|
|
|
|
doc_d = Doc(en_vocab).from_disk(file_path)
|
|
|
|
assert doc.to_bytes() == doc_d.to_bytes()
|
|
|
|
|
|
|
|
|
|
|
|
def test_serialize_doc_roundtrip_disk_str_path(en_vocab):
|
2018-07-25 00:38:44 +03:00
|
|
|
doc = Doc(en_vocab, words=['hello', 'world'])
|
2017-11-09 04:29:03 +03:00
|
|
|
with make_tempdir() as d:
|
|
|
|
file_path = d / 'doc'
|
|
|
|
file_path = path2str(file_path)
|
|
|
|
doc.to_disk(file_path)
|
|
|
|
doc_d = Doc(en_vocab).from_disk(file_path)
|
|
|
|
assert doc.to_bytes() == doc_d.to_bytes()
|