mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-27 02:16:32 +03:00
8387ce4c01
* Implement Doc.from_json: rough draft. * Implement Doc.from_json: first draft with tests. * Implement Doc.from_json: added documentation on website for Doc.to_json(), Doc.from_json(). * Implement Doc.from_json: formatting changes. * Implement Doc.to_json(): reverting unrelated formatting changes. * Implement Doc.to_json(): fixing entity and span conversion. Moving fixture and doc <-> json conversion tests into single file. * Implement Doc.from_json(): replaced entity/span converters with doc.char_span() calls. * Implement Doc.from_json(): handling sentence boundaries in spans. * Implementing Doc.from_json(): added parser-free sentence boundaries transfer. * Implementing Doc.from_json(): added parser-free sentence boundaries transfer. * Implementing Doc.from_json(): incorporated various PR feedback. * Renaming fixture for document without dependencies. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implementing Doc.from_json(): using two sent_starts instead of one. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implementing Doc.from_json(): doc_without_dependency_parser() -> doc_without_deps. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implementing Doc.from_json(): incorporating various PR feedback. Rebased on latest master. * Implementing Doc.from_json(): refactored Doc.from_json() to work with annotation IDs instead of their string representations. * Implement Doc.from_json(): reverting unwanted formatting/rebasing changes. * Implement Doc.from_json(): added check for char_span() calculation for entities. * Update spacy/tokens/doc.pyx Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): minor refactoring, additional check for token attribute consistency with corresponding test. * Implement Doc.from_json(): removed redundancy in annotation type key naming. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): Simplifying setting annotation values. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement doc.from_json(): renaming annot_types to token_attrs. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): adjustments for renaming of annot_types to token_attrs. * Implement Doc.from_json(): removing default categories. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): simplifying lexeme initialization. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): simplifying lexeme initialization. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): refactoring to only have keys for present annotations. * Implement Doc.from_json(): fix check for tokens' HEAD attributes. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): refactoring Doc.from_json(). * Implement Doc.from_json(): fixing span_group retrieval. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): fixing span retrieval. * Implement Doc.from_json(): added schema for Doc JSON format. Minor refactoring in Doc.from_json(). * Implement Doc.from_json(): added comment regarding Token and Span extension support. * Implement Doc.from_json(): renaming inconsistent_props to partial_attrs.. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): adjusting error message. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): extending E1038 message. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): added params to E1038 raises. * Implement Doc.from_json(): combined attribute collection with partial attributes check. * Implement Doc.from_json(): added optional schema validation. * Implement Doc.from_json(): fixed optional fields in schema, tests. * Implement Doc.from_json(): removed redundant None check for DEP. * Implement Doc.from_json(): added passing of schema validatoin message to E1037.. * Implement Doc.from_json(): removing redundant error E1040. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): changing message for E1037. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): adjusted website docs and docstring of Doc.from_json(). * Update spacy/tests/doc/test_json_doc_conversion.py * Implement Doc.from_json(): docstring update. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): docstring update. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): website docs update. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): docstring formatting. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): docstring formatting. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): fixing Doc reference in website docs. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): reformatted website/docs/api/doc.md. * Implement Doc.from_json(): bumped IDs of new errors to avoid merge conflicts. * Implement Doc.from_json(): fixing bug in tests. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Implement Doc.from_json(): fix setting of sentence starts for docs without DEP. * Implement Doc.from_json(): add check for valid char spans when manually setting sentence boundaries. Refactor sentence boundary setting slightly. Move error message for lack of support for partial token annotations to errors.py. * Implement Doc.from_json(): simplify token sentence start manipulation. Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * Combine related error messages * Update spacy/tests/doc/test_json_doc_conversion.py Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com>
192 lines
6.4 KiB
Python
192 lines
6.4 KiB
Python
import pytest
|
|
import spacy
|
|
from spacy import schemas
|
|
from spacy.tokens import Doc, Span
|
|
|
|
|
|
@pytest.fixture()
|
|
def doc(en_vocab):
|
|
words = ["c", "d", "e"]
|
|
pos = ["VERB", "NOUN", "NOUN"]
|
|
tags = ["VBP", "NN", "NN"]
|
|
heads = [0, 0, 1]
|
|
deps = ["ROOT", "dobj", "dobj"]
|
|
ents = ["O", "B-ORG", "O"]
|
|
morphs = ["Feat1=A", "Feat1=B", "Feat1=A|Feat2=D"]
|
|
|
|
return Doc(
|
|
en_vocab,
|
|
words=words,
|
|
pos=pos,
|
|
tags=tags,
|
|
heads=heads,
|
|
deps=deps,
|
|
ents=ents,
|
|
morphs=morphs,
|
|
)
|
|
|
|
|
|
@pytest.fixture()
|
|
def doc_without_deps(en_vocab):
|
|
words = ["c", "d", "e"]
|
|
pos = ["VERB", "NOUN", "NOUN"]
|
|
tags = ["VBP", "NN", "NN"]
|
|
ents = ["O", "B-ORG", "O"]
|
|
morphs = ["Feat1=A", "Feat1=B", "Feat1=A|Feat2=D"]
|
|
|
|
return Doc(
|
|
en_vocab,
|
|
words=words,
|
|
pos=pos,
|
|
tags=tags,
|
|
ents=ents,
|
|
morphs=morphs,
|
|
sent_starts=[True, False, True],
|
|
)
|
|
|
|
|
|
def test_doc_to_json(doc):
|
|
json_doc = doc.to_json()
|
|
assert json_doc["text"] == "c d e "
|
|
assert len(json_doc["tokens"]) == 3
|
|
assert json_doc["tokens"][0]["pos"] == "VERB"
|
|
assert json_doc["tokens"][0]["tag"] == "VBP"
|
|
assert json_doc["tokens"][0]["dep"] == "ROOT"
|
|
assert len(json_doc["ents"]) == 1
|
|
assert json_doc["ents"][0]["start"] == 2 # character offset!
|
|
assert json_doc["ents"][0]["end"] == 3 # character offset!
|
|
assert json_doc["ents"][0]["label"] == "ORG"
|
|
assert not schemas.validate(schemas.DocJSONSchema, json_doc)
|
|
|
|
|
|
def test_doc_to_json_underscore(doc):
|
|
Doc.set_extension("json_test1", default=False)
|
|
Doc.set_extension("json_test2", default=False)
|
|
doc._.json_test1 = "hello world"
|
|
doc._.json_test2 = [1, 2, 3]
|
|
json_doc = doc.to_json(underscore=["json_test1", "json_test2"])
|
|
assert "_" in json_doc
|
|
assert json_doc["_"]["json_test1"] == "hello world"
|
|
assert json_doc["_"]["json_test2"] == [1, 2, 3]
|
|
assert not schemas.validate(schemas.DocJSONSchema, json_doc)
|
|
|
|
|
|
def test_doc_to_json_underscore_error_attr(doc):
|
|
"""Test that Doc.to_json() raises an error if a custom attribute doesn't
|
|
exist in the ._ space."""
|
|
with pytest.raises(ValueError):
|
|
doc.to_json(underscore=["json_test3"])
|
|
|
|
|
|
def test_doc_to_json_underscore_error_serialize(doc):
|
|
"""Test that Doc.to_json() raises an error if a custom attribute value
|
|
isn't JSON-serializable."""
|
|
Doc.set_extension("json_test4", method=lambda doc: doc.text)
|
|
with pytest.raises(ValueError):
|
|
doc.to_json(underscore=["json_test4"])
|
|
|
|
|
|
def test_doc_to_json_span(doc):
|
|
"""Test that Doc.to_json() includes spans"""
|
|
doc.spans["test"] = [Span(doc, 0, 2, "test"), Span(doc, 0, 1, "test")]
|
|
json_doc = doc.to_json()
|
|
assert "spans" in json_doc
|
|
assert len(json_doc["spans"]) == 1
|
|
assert len(json_doc["spans"]["test"]) == 2
|
|
assert json_doc["spans"]["test"][0]["start"] == 0
|
|
assert not schemas.validate(schemas.DocJSONSchema, json_doc)
|
|
|
|
|
|
def test_json_to_doc(doc):
|
|
new_doc = Doc(doc.vocab).from_json(doc.to_json(), validate=True)
|
|
new_tokens = [token for token in new_doc]
|
|
assert new_doc.text == doc.text == "c d e "
|
|
assert len(new_tokens) == len([token for token in doc]) == 3
|
|
assert new_tokens[0].pos == doc[0].pos
|
|
assert new_tokens[0].tag == doc[0].tag
|
|
assert new_tokens[0].dep == doc[0].dep
|
|
assert new_tokens[0].head.idx == doc[0].head.idx
|
|
assert new_tokens[0].lemma == doc[0].lemma
|
|
assert len(new_doc.ents) == 1
|
|
assert new_doc.ents[0].start == 1
|
|
assert new_doc.ents[0].end == 2
|
|
assert new_doc.ents[0].label_ == "ORG"
|
|
|
|
|
|
def test_json_to_doc_underscore(doc):
|
|
if not Doc.has_extension("json_test1"):
|
|
Doc.set_extension("json_test1", default=False)
|
|
if not Doc.has_extension("json_test2"):
|
|
Doc.set_extension("json_test2", default=False)
|
|
|
|
doc._.json_test1 = "hello world"
|
|
doc._.json_test2 = [1, 2, 3]
|
|
json_doc = doc.to_json(underscore=["json_test1", "json_test2"])
|
|
new_doc = Doc(doc.vocab).from_json(json_doc, validate=True)
|
|
assert all([new_doc.has_extension(f"json_test{i}") for i in range(1, 3)])
|
|
assert new_doc._.json_test1 == "hello world"
|
|
assert new_doc._.json_test2 == [1, 2, 3]
|
|
|
|
|
|
def test_json_to_doc_spans(doc):
|
|
"""Test that Doc.from_json() includes correct.spans."""
|
|
doc.spans["test"] = [
|
|
Span(doc, 0, 2, label="test"),
|
|
Span(doc, 0, 1, label="test", kb_id=7),
|
|
]
|
|
json_doc = doc.to_json()
|
|
new_doc = Doc(doc.vocab).from_json(json_doc, validate=True)
|
|
assert len(new_doc.spans) == 1
|
|
assert len(new_doc.spans["test"]) == 2
|
|
for i in range(2):
|
|
assert new_doc.spans["test"][i].start == doc.spans["test"][i].start
|
|
assert new_doc.spans["test"][i].end == doc.spans["test"][i].end
|
|
assert new_doc.spans["test"][i].label == doc.spans["test"][i].label
|
|
assert new_doc.spans["test"][i].kb_id == doc.spans["test"][i].kb_id
|
|
|
|
|
|
def test_json_to_doc_sents(doc, doc_without_deps):
|
|
"""Test that Doc.from_json() includes correct.sents."""
|
|
for test_doc in (doc, doc_without_deps):
|
|
json_doc = test_doc.to_json()
|
|
new_doc = Doc(doc.vocab).from_json(json_doc, validate=True)
|
|
assert [sent.text for sent in test_doc.sents] == [
|
|
sent.text for sent in new_doc.sents
|
|
]
|
|
assert [token.is_sent_start for token in test_doc] == [
|
|
token.is_sent_start for token in new_doc
|
|
]
|
|
|
|
|
|
def test_json_to_doc_cats(doc):
|
|
"""Test that Doc.from_json() includes correct .cats."""
|
|
cats = {"A": 0.3, "B": 0.7}
|
|
doc.cats = cats
|
|
json_doc = doc.to_json()
|
|
new_doc = Doc(doc.vocab).from_json(json_doc, validate=True)
|
|
assert new_doc.cats == cats
|
|
|
|
|
|
def test_json_to_doc_spaces():
|
|
"""Test that Doc.from_json() preserves spaces correctly."""
|
|
doc = spacy.blank("en")("This is just brilliant.")
|
|
json_doc = doc.to_json()
|
|
new_doc = Doc(doc.vocab).from_json(json_doc, validate=True)
|
|
assert doc.text == new_doc.text
|
|
|
|
|
|
def test_json_to_doc_attribute_consistency(doc):
|
|
"""Test that Doc.from_json() raises an exception if tokens don't all have the same set of properties."""
|
|
doc_json = doc.to_json()
|
|
doc_json["tokens"][1].pop("morph")
|
|
with pytest.raises(ValueError):
|
|
Doc(doc.vocab).from_json(doc_json)
|
|
|
|
|
|
def test_json_to_doc_validation_error(doc):
|
|
"""Test that Doc.from_json() raises an exception when validating invalid input."""
|
|
doc_json = doc.to_json()
|
|
doc_json.pop("tokens")
|
|
with pytest.raises(ValueError):
|
|
Doc(doc.vocab).from_json(doc_json, validate=True)
|