Fix compat for v2.x branch

This commit is contained in:
Ines Montani 2020-05-22 14:22:36 +02:00
parent 65c7e82de2
commit c685ee734a
5 changed files with 27 additions and 10 deletions

View File

@ -1,3 +1,6 @@
# coding: utf8
from __future__ import unicode_literals
from spacy.lang.en import English from spacy.lang.en import English

View File

@ -1,16 +1,17 @@
# coding: utf8 # coding: utf8
import warnings import warnings
from unittest import TestCase from unittest import TestCase
import pytest import pytest
import srsly import srsly
from numpy import zeros from numpy import zeros
from spacy.kb import KnowledgeBase, Writer from spacy.kb import KnowledgeBase, Writer
from spacy.vectors import Vectors from spacy.vectors import Vectors
from spacy.language import Language from spacy.language import Language
from spacy.pipeline import Pipe from spacy.pipeline import Pipe
from spacy.tests.util import make_tempdir from spacy.compat import is_python2
from ..util import make_tempdir
def nlp(): def nlp():
@ -96,12 +97,14 @@ def write_obj_and_catch_warnings(obj):
return list(filter(lambda x: isinstance(x, ResourceWarning), warnings_list)) return list(filter(lambda x: isinstance(x, ResourceWarning), warnings_list))
@pytest.mark.skipif(is_python2, reason="ResourceWarning needs Python 3.x")
@pytest.mark.parametrize("obj", objects_to_test[0], ids=objects_to_test[1]) @pytest.mark.parametrize("obj", objects_to_test[0], ids=objects_to_test[1])
def test_to_disk_resource_warning(obj): def test_to_disk_resource_warning(obj):
warnings_list = write_obj_and_catch_warnings(obj) warnings_list = write_obj_and_catch_warnings(obj)
assert len(warnings_list) == 0 assert len(warnings_list) == 0
@pytest.mark.skipif(is_python2, reason="ResourceWarning needs Python 3.x")
def test_writer_with_path_py35(): def test_writer_with_path_py35():
writer = None writer = None
with make_tempdir() as d: with make_tempdir() as d:
@ -132,6 +135,8 @@ def test_save_and_load_knowledge_base():
pytest.fail(str(e)) pytest.fail(str(e))
if not is_python2:
class TestToDiskResourceWarningUnittest(TestCase): class TestToDiskResourceWarningUnittest(TestCase):
def test_resource_warning(self): def test_resource_warning(self):
scenarios = zip(*objects_to_test) scenarios = zip(*objects_to_test)

View File

@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import unicode_literals
from spacy.lang.en import English from spacy.lang.en import English
from spacy.lang.en.syntax_iterators import noun_chunks from spacy.lang.en.syntax_iterators import noun_chunks
from spacy.tests.util import get_doc from spacy.tests.util import get_doc
@ -6,11 +9,13 @@ from spacy.vocab import Vocab
def test_issue5458(): def test_issue5458():
# Test that the noun chuncker does not generate overlapping spans # Test that the noun chuncker does not generate overlapping spans
# fmt: off
words = ["In", "an", "era", "where", "markets", "have", "brought", "prosperity", "and", "empowerment", "."] words = ["In", "an", "era", "where", "markets", "have", "brought", "prosperity", "and", "empowerment", "."]
vocab = Vocab(strings=words) vocab = Vocab(strings=words)
dependencies = ["ROOT", "det", "pobj", "advmod", "nsubj", "aux", "relcl", "dobj", "cc", "conj", "punct"] dependencies = ["ROOT", "det", "pobj", "advmod", "nsubj", "aux", "relcl", "dobj", "cc", "conj", "punct"]
pos_tags = ["ADP", "DET", "NOUN", "ADV", "NOUN", "AUX", "VERB", "NOUN", "CCONJ", "NOUN", "PUNCT"] pos_tags = ["ADP", "DET", "NOUN", "ADV", "NOUN", "AUX", "VERB", "NOUN", "CCONJ", "NOUN", "PUNCT"]
heads = [0, 1, -2, 6, 2, 1, -4, -1, -1, -2, -10] heads = [0, 1, -2, 6, 2, 1, -4, -1, -1, -2, -10]
# fmt: on
en_doc = get_doc(vocab, words, pos_tags, heads, dependencies) en_doc = get_doc(vocab, words, pos_tags, heads, dependencies)
en_doc.noun_chunks_iterator = noun_chunks en_doc.noun_chunks_iterator = noun_chunks

View File

@ -5,6 +5,7 @@ import pytest
import pickle import pickle
from spacy.vocab import Vocab from spacy.vocab import Vocab
from spacy.strings import StringStore from spacy.strings import StringStore
from spacy.compat import is_python2
from ..util import make_tempdir from ..util import make_tempdir
@ -134,6 +135,7 @@ def test_serialize_stringstore_roundtrip_disk(strings1, strings2):
assert list(sstore1_d) != list(sstore2_d) assert list(sstore1_d) != list(sstore2_d)
@pytest.mark.skipif(is_python2, reason="Dict order? Not sure if worth investigating")
@pytest.mark.parametrize("strings,lex_attr", test_strings_attrs) @pytest.mark.parametrize("strings,lex_attr", test_strings_attrs)
def test_pickle_vocab(strings, lex_attr): def test_pickle_vocab(strings, lex_attr):
vocab = Vocab(strings=strings) vocab = Vocab(strings=strings)

View File

@ -10,6 +10,7 @@ from spacy.vectors import Vectors
from spacy.tokenizer import Tokenizer from spacy.tokenizer import Tokenizer
from spacy.strings import hash_string from spacy.strings import hash_string
from spacy.tokens import Doc from spacy.tokens import Doc
from spacy.compat import is_python2
from ..util import add_vecs_to_vocab, make_tempdir from ..util import add_vecs_to_vocab, make_tempdir
@ -339,6 +340,7 @@ def test_vocab_prune_vectors():
assert_allclose(similarity, cosine(data[0], data[2]), atol=1e-4, rtol=1e-3) assert_allclose(similarity, cosine(data[0], data[2]), atol=1e-4, rtol=1e-3)
@pytest.mark.skipif(is_python2, reason="Dict order? Not sure if worth investigating")
def test_vectors_serialize(): def test_vectors_serialize():
data = numpy.asarray([[4, 2, 2, 2], [4, 2, 2, 2], [1, 1, 1, 1]], dtype="f") data = numpy.asarray([[4, 2, 2, 2], [4, 2, 2, 2], [1, 1, 1, 1]], dtype="f")
v = Vectors(data=data, keys=["A", "B", "C"]) v = Vectors(data=data, keys=["A", "B", "C"])