mirror of
https://github.com/explosion/spaCy.git
synced 2025-06-21 13:33:08 +03:00
Update tests
This commit is contained in:
parent
bbd8acd4bf
commit
8a13f510d6
|
@ -145,8 +145,6 @@ def test_accept_blocked_token():
|
||||||
nlp1 = English()
|
nlp1 = English()
|
||||||
doc1 = nlp1("I live in New York")
|
doc1 = nlp1("I live in New York")
|
||||||
config = {
|
config = {
|
||||||
"learn_tokens": False,
|
|
||||||
"min_action_freq": 30,
|
|
||||||
}
|
}
|
||||||
ner1 = nlp1.create_pipe("ner", config=config)
|
ner1 = nlp1.create_pipe("ner", config=config)
|
||||||
assert [token.ent_iob_ for token in doc1] == ["", "", "", "", ""]
|
assert [token.ent_iob_ for token in doc1] == ["", "", "", "", ""]
|
||||||
|
@ -167,8 +165,6 @@ def test_accept_blocked_token():
|
||||||
nlp2 = English()
|
nlp2 = English()
|
||||||
doc2 = nlp2("I live in New York")
|
doc2 = nlp2("I live in New York")
|
||||||
config = {
|
config = {
|
||||||
"learn_tokens": False,
|
|
||||||
"min_action_freq": 30,
|
|
||||||
}
|
}
|
||||||
ner2 = nlp2.create_pipe("ner", config=config)
|
ner2 = nlp2.create_pipe("ner", config=config)
|
||||||
|
|
||||||
|
@ -225,8 +221,6 @@ def test_overwrite_token():
|
||||||
assert [token.ent_type_ for token in doc] == ["", "", "", "", ""]
|
assert [token.ent_type_ for token in doc] == ["", "", "", "", ""]
|
||||||
# Check that a new ner can overwrite O
|
# Check that a new ner can overwrite O
|
||||||
config = {
|
config = {
|
||||||
"learn_tokens": False,
|
|
||||||
"min_action_freq": 30,
|
|
||||||
}
|
}
|
||||||
ner2 = nlp.create_pipe("ner", config=config)
|
ner2 = nlp.create_pipe("ner", config=config)
|
||||||
ner2.moves.add_action(5, "")
|
ner2.moves.add_action(5, "")
|
||||||
|
|
|
@ -272,8 +272,6 @@ def test_issue1963(en_tokenizer):
|
||||||
def test_issue1967(label):
|
def test_issue1967(label):
|
||||||
nlp = Language()
|
nlp = Language()
|
||||||
config = {
|
config = {
|
||||||
"learn_tokens": False,
|
|
||||||
"min_action_freq": 30,
|
|
||||||
}
|
}
|
||||||
ner = nlp.create_pipe("ner", config=config)
|
ner = nlp.create_pipe("ner", config=config)
|
||||||
example = Example.from_dict(
|
example = Example.from_dict(
|
||||||
|
|
|
@ -139,8 +139,6 @@ def test_issue4042_bug2():
|
||||||
output_dir.mkdir()
|
output_dir.mkdir()
|
||||||
ner1.to_disk(output_dir)
|
ner1.to_disk(output_dir)
|
||||||
config = {
|
config = {
|
||||||
"learn_tokens": False,
|
|
||||||
"min_action_freq": 30,
|
|
||||||
}
|
}
|
||||||
ner2 = nlp1.create_pipe("ner", config=config)
|
ner2 = nlp1.create_pipe("ner", config=config)
|
||||||
ner2.from_disk(output_dir)
|
ner2.from_disk(output_dir)
|
||||||
|
@ -304,8 +302,6 @@ def test_issue4313():
|
||||||
beam_density = 0.0001
|
beam_density = 0.0001
|
||||||
nlp = English()
|
nlp = English()
|
||||||
config = {
|
config = {
|
||||||
"learn_tokens": False,
|
|
||||||
"min_action_freq": 30,
|
|
||||||
}
|
}
|
||||||
ner = nlp.create_pipe("ner", config=config)
|
ner = nlp.create_pipe("ner", config=config)
|
||||||
ner.add_label("SOME_LABEL")
|
ner.add_label("SOME_LABEL")
|
||||||
|
|
|
@ -185,20 +185,16 @@ def test_issue4725_1():
|
||||||
vocab = Vocab(vectors_name="test_vocab_add_vector")
|
vocab = Vocab(vectors_name="test_vocab_add_vector")
|
||||||
nlp = English(vocab=vocab)
|
nlp = English(vocab=vocab)
|
||||||
config = {
|
config = {
|
||||||
"learn_tokens": False,
|
|
||||||
"min_action_freq": 342,
|
|
||||||
"update_with_oracle_cut_size": 111,
|
"update_with_oracle_cut_size": 111,
|
||||||
}
|
}
|
||||||
ner = nlp.create_pipe("ner", config=config)
|
ner = nlp.create_pipe("ner", config=config)
|
||||||
with make_tempdir() as tmp_path:
|
with make_tempdir() as tmp_path:
|
||||||
with (tmp_path / "ner.pkl").open("wb") as file_:
|
with (tmp_path / "ner.pkl").open("wb") as file_:
|
||||||
pickle.dump(ner, file_)
|
pickle.dump(ner, file_)
|
||||||
assert ner.cfg["min_action_freq"] == 342
|
|
||||||
assert ner.cfg["update_with_oracle_cut_size"] == 111
|
assert ner.cfg["update_with_oracle_cut_size"] == 111
|
||||||
|
|
||||||
with (tmp_path / "ner.pkl").open("rb") as file_:
|
with (tmp_path / "ner.pkl").open("rb") as file_:
|
||||||
ner2 = pickle.load(file_)
|
ner2 = pickle.load(file_)
|
||||||
assert ner2.cfg["min_action_freq"] == 342
|
|
||||||
assert ner2.cfg["update_with_oracle_cut_size"] == 111
|
assert ner2.cfg["update_with_oracle_cut_size"] == 111
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user