mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-25 09:26:27 +03:00
Make warning tests more explicit
This commit is contained in:
parent
0d206cf47c
commit
fe39fd4d13
|
@ -7,6 +7,7 @@ import numpy
|
|||
from spacy.tokens import Doc
|
||||
from spacy.vocab import Vocab
|
||||
from spacy.attrs import LEMMA
|
||||
from spacy.errors import ModelsWarning
|
||||
|
||||
from ..util import get_doc
|
||||
|
||||
|
@ -344,11 +345,10 @@ def test_doc_api_has_vector():
|
|||
|
||||
def test_doc_api_similarity_match():
|
||||
doc = Doc(Vocab(), words=["a"])
|
||||
with pytest.warns(None):
|
||||
assert doc.similarity(doc[0]) == 1.0
|
||||
assert doc.similarity(doc.vocab["a"]) == 1.0
|
||||
assert doc.similarity(doc[0]) == 1.0
|
||||
assert doc.similarity(doc.vocab["a"]) == 1.0
|
||||
doc2 = Doc(doc.vocab, words=["a", "b", "c"])
|
||||
with pytest.warns(None):
|
||||
with pytest.warns(ModelsWarning):
|
||||
assert doc.similarity(doc2[:1]) == 1.0
|
||||
assert doc.similarity(doc2) == 0.0
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ import pytest
|
|||
from spacy.attrs import ORTH, LENGTH
|
||||
from spacy.tokens import Doc, Span
|
||||
from spacy.vocab import Vocab
|
||||
from spacy.errors import ModelsWarning
|
||||
|
||||
from ..util import get_doc
|
||||
|
||||
|
@ -104,7 +105,7 @@ def test_span_similarity_match():
|
|||
doc = Doc(Vocab(), words=["a", "b", "a", "b"])
|
||||
span1 = doc[:2]
|
||||
span2 = doc[2:]
|
||||
with pytest.warns(None):
|
||||
with pytest.warns(ModelsWarning):
|
||||
assert span1.similarity(span2) == 1.0
|
||||
assert span1.similarity(doc) == 0.0
|
||||
assert span1[:1].similarity(doc.vocab["a"]) == 1.0
|
||||
|
|
|
@ -47,7 +47,7 @@ def test_vectors_similarity_TT(vocab, vectors):
|
|||
def test_vectors_similarity_TD(vocab, vectors):
|
||||
[(word1, vec1), (word2, vec2)] = vectors
|
||||
doc = Doc(vocab, words=[word1, word2])
|
||||
with pytest.warns(None):
|
||||
with pytest.warns(UserWarning):
|
||||
assert doc.similarity(doc[0]) == doc[0].similarity(doc)
|
||||
|
||||
|
||||
|
@ -60,5 +60,5 @@ def test_vectors_similarity_DS(vocab, vectors):
|
|||
def test_vectors_similarity_TS(vocab, vectors):
|
||||
[(word1, vec1), (word2, vec2)] = vectors
|
||||
doc = Doc(vocab, words=[word1, word2])
|
||||
with pytest.warns(None):
|
||||
with pytest.warns(UserWarning):
|
||||
assert doc[:2].similarity(doc[0]) == doc[0].similarity(doc[:2])
|
||||
|
|
|
@ -235,7 +235,7 @@ def test_vectors_lexeme_doc_similarity(vocab, text):
|
|||
@pytest.mark.parametrize("text", [["apple", "orange", "juice"]])
|
||||
def test_vectors_span_span_similarity(vocab, text):
|
||||
doc = Doc(vocab, words=text)
|
||||
with pytest.warns(None):
|
||||
with pytest.warns(UserWarning):
|
||||
assert doc[0:2].similarity(doc[1:3]) == doc[1:3].similarity(doc[0:2])
|
||||
assert -1.0 < doc[0:2].similarity(doc[1:3]) < 1.0
|
||||
|
||||
|
@ -243,7 +243,7 @@ def test_vectors_span_span_similarity(vocab, text):
|
|||
@pytest.mark.parametrize("text", [["apple", "orange", "juice"]])
|
||||
def test_vectors_span_doc_similarity(vocab, text):
|
||||
doc = Doc(vocab, words=text)
|
||||
with pytest.warns(None):
|
||||
with pytest.warns(UserWarning):
|
||||
assert doc[0:2].similarity(doc) == doc.similarity(doc[0:2])
|
||||
assert -1.0 < doc[0:2].similarity(doc) < 1.0
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user