2022-11-21 10:12:03 +03:00
|
|
|
import os
|
|
|
|
|
2017-01-12 17:27:46 +03:00
|
|
|
import pytest
|
2021-12-04 22:34:48 +03:00
|
|
|
from spacy.attrs import IS_ALPHA, LEMMA, ORTH
|
2022-11-21 10:12:03 +03:00
|
|
|
from spacy.lang.en import English
|
2018-07-25 00:38:44 +03:00
|
|
|
from spacy.parts_of_speech import NOUN, VERB
|
2021-12-04 22:34:48 +03:00
|
|
|
from spacy.vocab import Vocab
|
|
|
|
|
2022-11-21 10:12:03 +03:00
|
|
|
from ..util import make_tempdir
|
|
|
|
|
2021-12-04 22:34:48 +03:00
|
|
|
|
|
|
|
@pytest.mark.issue(1868)
|
|
|
|
def test_issue1868():
|
|
|
|
"""Test Vocab.__contains__ works with int keys."""
|
|
|
|
vocab = Vocab()
|
|
|
|
lex = vocab["hello"]
|
|
|
|
assert lex.orth in vocab
|
|
|
|
assert lex.orth_ in vocab
|
|
|
|
assert "some string" not in vocab
|
|
|
|
int_id = vocab.strings.add("some string")
|
|
|
|
assert int_id not in vocab
|
2015-10-26 04:31:05 +03:00
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"text1,text2", [("Hello", "bye"), ("Hello", "hello"), ("Hello", "Hello,")]
|
|
|
|
)
|
2017-01-12 17:27:46 +03:00
|
|
|
def test_vocab_api_neq(en_vocab, text1, text2):
|
|
|
|
assert en_vocab[text1].orth != en_vocab[text2].orth
|
2015-10-26 04:31:05 +03:00
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text", "Hello")
|
2017-01-12 17:27:46 +03:00
|
|
|
def test_vocab_api_eq(en_vocab, text):
|
|
|
|
lex = en_vocab[text]
|
|
|
|
assert en_vocab[text].orth == lex.orth
|
2015-10-26 04:31:05 +03:00
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text", ["example"])
|
2017-01-12 17:27:46 +03:00
|
|
|
def test_vocab_api_shape_attr(en_vocab, text):
|
|
|
|
lex = en_vocab[text]
|
|
|
|
assert lex.orth != lex.shape
|
2015-10-26 04:31:05 +03:00
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"string,symbol",
|
|
|
|
[
|
|
|
|
("IS_ALPHA", IS_ALPHA),
|
|
|
|
("NOUN", NOUN),
|
|
|
|
("VERB", VERB),
|
|
|
|
("LEMMA", LEMMA),
|
|
|
|
("ORTH", ORTH),
|
|
|
|
],
|
|
|
|
)
|
2017-01-12 17:27:46 +03:00
|
|
|
def test_vocab_api_symbols(en_vocab, string, symbol):
|
|
|
|
assert en_vocab.strings[string] == symbol
|
2015-10-26 04:31:05 +03:00
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text", "Hello")
|
2017-01-13 16:26:53 +03:00
|
|
|
def test_vocab_api_contains(en_vocab, text):
|
2018-11-30 19:43:08 +03:00
|
|
|
_ = en_vocab[text] # noqa: F841
|
2017-01-12 17:27:46 +03:00
|
|
|
assert text in en_vocab
|
|
|
|
assert "LKsdjvlsakdvlaksdvlkasjdvljasdlkfvm" not in en_vocab
|
2019-03-11 17:23:20 +03:00
|
|
|
|
|
|
|
|
|
|
|
def test_vocab_writing_system(en_vocab):
|
|
|
|
assert en_vocab.writing_system["direction"] == "ltr"
|
2019-03-11 17:28:22 +03:00
|
|
|
assert en_vocab.writing_system["has_case"] is True
|
2022-11-21 10:12:03 +03:00
|
|
|
|
|
|
|
|
|
|
|
def test_to_disk():
|
|
|
|
nlp = English()
|
|
|
|
with make_tempdir() as d:
|
|
|
|
nlp.vocab.to_disk(d)
|
|
|
|
assert "vectors" in os.listdir(d)
|
|
|
|
assert "lookups.bin" in os.listdir(d)
|
|
|
|
|
|
|
|
|
|
|
|
def test_to_disk_exclude():
|
|
|
|
nlp = English()
|
|
|
|
with make_tempdir() as d:
|
|
|
|
nlp.vocab.to_disk(d, exclude=("vectors", "lookups"))
|
|
|
|
assert "vectors" not in os.listdir(d)
|
|
|
|
assert "lookups.bin" not in os.listdir(d)
|