2019-03-08 02:10:07 +03:00
|
|
|
import pytest
|
2019-03-08 15:28:53 +03:00
|
|
|
|
2019-03-08 02:10:07 +03:00
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def i_has(en_tokenizer):
|
|
|
|
doc = en_tokenizer("I has")
|
2020-10-01 23:21:46 +03:00
|
|
|
doc[0].set_morph({"PronType": "prs"})
|
|
|
|
doc[1].set_morph({
|
2020-08-07 16:27:13 +03:00
|
|
|
"VerbForm": "fin",
|
|
|
|
"Tense": "pres",
|
|
|
|
"Number": "sing",
|
|
|
|
"Person": "three",
|
2020-10-01 23:21:46 +03:00
|
|
|
})
|
2020-08-07 16:27:13 +03:00
|
|
|
|
2019-03-08 02:10:07 +03:00
|
|
|
return doc
|
|
|
|
|
2019-03-08 15:28:53 +03:00
|
|
|
|
2020-01-24 00:01:54 +03:00
|
|
|
def test_token_morph_eq(i_has):
|
|
|
|
assert i_has[0].morph is not i_has[0].morph
|
|
|
|
assert i_has[0].morph == i_has[0].morph
|
|
|
|
assert i_has[0].morph != i_has[1].morph
|
|
|
|
|
|
|
|
|
|
|
|
def test_token_morph_key(i_has):
|
|
|
|
assert i_has[0].morph.key != 0
|
|
|
|
assert i_has[1].morph.key != 0
|
|
|
|
assert i_has[0].morph.key == i_has[0].morph.key
|
|
|
|
assert i_has[0].morph.key != i_has[1].morph.key
|
2019-03-08 02:10:07 +03:00
|
|
|
|
2019-03-08 15:28:53 +03:00
|
|
|
|
2019-03-08 02:10:07 +03:00
|
|
|
def test_morph_props(i_has):
|
2020-07-24 10:28:06 +03:00
|
|
|
assert i_has[0].morph.get("PronType") == ["prs"]
|
2020-01-24 00:01:54 +03:00
|
|
|
assert i_has[1].morph.get("PronType") == []
|
2019-03-08 02:10:07 +03:00
|
|
|
|
|
|
|
|
|
|
|
def test_morph_iter(i_has):
|
2020-01-24 00:01:54 +03:00
|
|
|
assert set(i_has[0].morph) == set(["PronType=prs"])
|
2020-02-18 17:38:18 +03:00
|
|
|
assert set(i_has[1].morph) == set(
|
|
|
|
["Number=sing", "Person=three", "Tense=pres", "VerbForm=fin"]
|
|
|
|
)
|
2019-03-08 03:38:54 +03:00
|
|
|
|
|
|
|
|
|
|
|
def test_morph_get(i_has):
|
2020-07-24 10:28:06 +03:00
|
|
|
assert i_has[0].morph.get("PronType") == ["prs"]
|
2020-01-24 00:01:54 +03:00
|
|
|
|
|
|
|
|
|
|
|
def test_morph_set(i_has):
|
2020-07-24 10:28:06 +03:00
|
|
|
assert i_has[0].morph.get("PronType") == ["prs"]
|
2020-01-24 00:01:54 +03:00
|
|
|
# set by string
|
2020-10-01 23:21:46 +03:00
|
|
|
i_has[0].set_morph("PronType=unk")
|
2020-07-24 10:28:06 +03:00
|
|
|
assert i_has[0].morph.get("PronType") == ["unk"]
|
2020-01-24 00:01:54 +03:00
|
|
|
# set by string, fields are alphabetized
|
2020-10-01 23:21:46 +03:00
|
|
|
i_has[0].set_morph("PronType=123|NounType=unk")
|
|
|
|
assert str(i_has[0].morph) == "NounType=unk|PronType=123"
|
2020-01-24 00:01:54 +03:00
|
|
|
# set by dict
|
2020-10-01 23:21:46 +03:00
|
|
|
i_has[0].set_morph({"AType": "123", "BType": "unk"})
|
|
|
|
assert str(i_has[0].morph) == "AType=123|BType=unk"
|
2020-01-24 00:01:54 +03:00
|
|
|
# set by string with multiple values, fields and values are alphabetized
|
2020-10-01 23:21:46 +03:00
|
|
|
i_has[0].set_morph("BType=c|AType=b,a")
|
|
|
|
assert str(i_has[0].morph) == "AType=a,b|BType=c"
|
2020-01-24 00:01:54 +03:00
|
|
|
# set by dict with multiple values, fields and values are alphabetized
|
2020-10-01 23:21:46 +03:00
|
|
|
i_has[0].set_morph({"AType": "b,a", "BType": "c"})
|
|
|
|
assert str(i_has[0].morph) == "AType=a,b|BType=c"
|
2020-01-24 00:01:54 +03:00
|
|
|
|
|
|
|
|
|
|
|
def test_morph_str(i_has):
|
|
|
|
assert str(i_has[0].morph) == "PronType=prs"
|
|
|
|
assert str(i_has[1].morph) == "Number=sing|Person=three|Tense=pres|VerbForm=fin"
|
2020-09-13 15:06:07 +03:00
|
|
|
|
|
|
|
|
|
|
|
def test_morph_property(tokenizer):
|
|
|
|
doc = tokenizer("a dog")
|
|
|
|
|
|
|
|
# set through token.morph_
|
2020-10-01 23:21:46 +03:00
|
|
|
doc[0].set_morph("PronType=prs")
|
|
|
|
assert str(doc[0].morph) == "PronType=prs"
|
2020-09-13 15:06:07 +03:00
|
|
|
assert doc.to_array(["MORPH"])[0] != 0
|
|
|
|
|
|
|
|
# unset with token.morph
|
2020-10-01 23:21:46 +03:00
|
|
|
doc[0].set_morph(0)
|
2020-09-13 15:06:07 +03:00
|
|
|
assert doc.to_array(["MORPH"])[0] == 0
|
|
|
|
|
|
|
|
# empty morph is equivalent to "_"
|
2020-10-01 23:21:46 +03:00
|
|
|
doc[0].set_morph("")
|
|
|
|
assert str(doc[0].morph) == ""
|
2020-09-13 15:06:07 +03:00
|
|
|
assert doc.to_array(["MORPH"])[0] == tokenizer.vocab.strings["_"]
|
|
|
|
|
|
|
|
# "_" morph is also equivalent to empty morph
|
2020-10-01 23:21:46 +03:00
|
|
|
doc[0].set_morph("_")
|
|
|
|
assert str(doc[0].morph) == ""
|
2020-09-13 15:06:07 +03:00
|
|
|
assert doc.to_array(["MORPH"])[0] == tokenizer.vocab.strings["_"]
|
|
|
|
|
|
|
|
# set through existing hash with token.morph
|
|
|
|
tokenizer.vocab.strings.add("Feat=Val")
|
2020-10-01 23:21:46 +03:00
|
|
|
doc[0].set_morph(tokenizer.vocab.strings.add("Feat=Val"))
|
|
|
|
assert str(doc[0].morph) == "Feat=Val"
|