spaCy/spacy/tests/serialize/test_serialize_tokenizer.py

36 lines
1.2 KiB
Python
Raw Normal View History

2017-06-03 14:26:34 +03:00
# coding: utf-8
from __future__ import unicode_literals
from ...util import get_lang_class
from ..util import make_tempdir, assert_packed_msg_equal
2017-06-03 14:26:34 +03:00
import pytest
def load_tokenizer(b):
tok = get_lang_class('en').Defaults.create_tokenizer()
tok.from_bytes(b)
return tok
2018-07-06 13:40:28 +03:00
@pytest.mark.skip(reason="Currently unreliable across platforms")
@pytest.mark.parametrize('text', ["I💜you", "theyre", "“hello”"])
2017-06-03 14:26:34 +03:00
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer = en_tokenizer
new_tokenizer = load_tokenizer(tokenizer.to_bytes())
assert_packed_msg_equal(new_tokenizer.to_bytes(), tokenizer.to_bytes())
2018-07-06 13:40:28 +03:00
assert new_tokenizer.to_bytes() == tokenizer.to_bytes()
doc1 = tokenizer(text)
2017-06-03 14:26:34 +03:00
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
2018-07-06 13:40:28 +03:00
@pytest.mark.skip(reason="Currently unreliable across platforms")
2017-06-03 14:26:34 +03:00
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()