Clean up warnings in the test suite (#11331)

This commit is contained in:
Adriane Boyd 2022-08-22 12:04:30 +02:00 committed by GitHub
parent 0f07defe2c
commit f55bb7470d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 23 additions and 8 deletions

View File

@ -54,12 +54,12 @@ steps:
condition: eq(${{ parameters.gpu }}, true) condition: eq(${{ parameters.gpu }}, true)
- script: | - script: |
${{ parameters.prefix }} python -m pytest --pyargs spacy ${{ parameters.prefix }} python -m pytest --pyargs spacy -W error
displayName: "Run CPU tests" displayName: "Run CPU tests"
condition: eq(${{ parameters.gpu }}, false) condition: eq(${{ parameters.gpu }}, false)
- script: | - script: |
${{ parameters.prefix }} python -m pytest --pyargs spacy -p spacy.tests.enable_gpu ${{ parameters.prefix }} python -m pytest --pyargs spacy -W error -p spacy.tests.enable_gpu
displayName: "Run GPU tests" displayName: "Run GPU tests"
condition: eq(${{ parameters.gpu }}, true) condition: eq(${{ parameters.gpu }}, true)

View File

@ -3,6 +3,7 @@ import weakref
import numpy import numpy
from numpy.testing import assert_array_equal from numpy.testing import assert_array_equal
import pytest import pytest
import warnings
from thinc.api import NumpyOps, get_current_ops from thinc.api import NumpyOps, get_current_ops
from spacy.attrs import DEP, ENT_IOB, ENT_TYPE, HEAD, IS_ALPHA, MORPH, POS from spacy.attrs import DEP, ENT_IOB, ENT_TYPE, HEAD, IS_ALPHA, MORPH, POS
@ -529,9 +530,9 @@ def test_doc_from_array_sent_starts(en_vocab):
# no warning using default attrs # no warning using default attrs
attrs = doc._get_array_attrs() attrs = doc._get_array_attrs()
arr = doc.to_array(attrs) arr = doc.to_array(attrs)
with pytest.warns(None) as record: with warnings.catch_warnings():
warnings.simplefilter("error")
new_doc.from_array(attrs, arr) new_doc.from_array(attrs, arr)
assert len(record) == 0
# only SENT_START uses SENT_START # only SENT_START uses SENT_START
attrs = [SENT_START] attrs = [SENT_START]
arr = doc.to_array(attrs) arr = doc.to_array(attrs)

View File

@ -2,6 +2,9 @@ import pytest
from spacy.tokens import Doc from spacy.tokens import Doc
pytestmark = pytest.mark.filterwarnings("ignore::DeprecationWarning")
def test_ru_doc_lemmatization(ru_lemmatizer): def test_ru_doc_lemmatization(ru_lemmatizer):
words = ["мама", "мыла", "раму"] words = ["мама", "мыла", "раму"]
pos = ["NOUN", "VERB", "NOUN"] pos = ["NOUN", "VERB", "NOUN"]

View File

@ -1,6 +1,10 @@
import pytest
from spacy.tokens import Doc from spacy.tokens import Doc
pytestmark = pytest.mark.filterwarnings("ignore::DeprecationWarning")
def test_uk_lemmatizer(uk_lemmatizer): def test_uk_lemmatizer(uk_lemmatizer):
"""Check that the default uk lemmatizer runs.""" """Check that the default uk lemmatizer runs."""
doc = Doc(uk_lemmatizer.vocab, words=["a", "b", "c"]) doc = Doc(uk_lemmatizer.vocab, words=["a", "b", "c"])

View File

@ -1,4 +1,5 @@
import pytest import pytest
import warnings
import srsly import srsly
from mock import Mock from mock import Mock
@ -344,13 +345,13 @@ def test_phrase_matcher_validation(en_vocab):
matcher.add("TEST1", [doc1]) matcher.add("TEST1", [doc1])
with pytest.warns(UserWarning): with pytest.warns(UserWarning):
matcher.add("TEST2", [doc2]) matcher.add("TEST2", [doc2])
with pytest.warns(None) as record: with warnings.catch_warnings():
warnings.simplefilter("error")
matcher.add("TEST3", [doc3]) matcher.add("TEST3", [doc3])
assert not record.list
matcher = PhraseMatcher(en_vocab, attr="POS", validate=True) matcher = PhraseMatcher(en_vocab, attr="POS", validate=True)
with pytest.warns(None) as record: with warnings.catch_warnings():
warnings.simplefilter("error")
matcher.add("TEST4", [doc2]) matcher.add("TEST4", [doc2])
assert not record.list
def test_attr_validation(en_vocab): def test_attr_validation(en_vocab):

View File

@ -1048,6 +1048,10 @@ def test_no_gold_ents(patterns):
for eg in train_examples: for eg in train_examples:
eg.predicted = ruler(eg.predicted) eg.predicted = ruler(eg.predicted)
# Entity ruler is no longer needed (initialization below wipes out the
# patterns and causes warnings)
nlp.remove_pipe("entity_ruler")
def create_kb(vocab): def create_kb(vocab):
# create artificial KB # create artificial KB
mykb = KnowledgeBase(vocab, entity_vector_length=vector_length) mykb = KnowledgeBase(vocab, entity_vector_length=vector_length)

View File

@ -337,3 +337,5 @@ def ensure_shape(vectors_loc):
# store all the results in a list in memory # store all the results in a list in memory
lines2 = open_file(vectors_loc) lines2 = open_file(vectors_loc)
yield from lines2 yield from lines2
lines2.close()
lines.close()