mirror of
https://github.com/explosion/spaCy.git
synced 2024-11-13 13:17:06 +03:00
18 lines
506 B
Python
18 lines
506 B
Python
# coding: utf8
|
|
from __future__ import unicode_literals
|
|
|
|
from ...tokens.doc import Doc
|
|
import pytest
|
|
|
|
|
|
@pytest.mark.xfail
|
|
@pytest.mark.models
|
|
@pytest.mark.parametrize('text', ["I cant do this."])
|
|
def test_issue636(EN, text):
|
|
"""Test that to_bytes and from_bytes don't change the token lemma."""
|
|
doc1 = EN(text)
|
|
doc2 = Doc(EN.vocab)
|
|
doc2.from_bytes(doc1.to_bytes())
|
|
print([t.lemma_ for t in doc1], [t.lemma_ for t in doc2])
|
|
assert [t.lemma_ for t in doc1] == [t.lemma_ for t in doc2]
|