mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-25 17:36:30 +03:00
unskipping tests with python >= 3.6
This commit is contained in:
parent
5ca57d8221
commit
a6a68da673
|
@ -113,9 +113,8 @@ def test_en_tokenizer_norm_exceptions(en_tokenizer, text, norms):
|
||||||
assert [token.norm_ for token in tokens] == norms
|
assert [token.norm_ for token in tokens] == norms
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"text,norm", [("radicalised", "radicalized"), ("cuz", "because")]
|
"text,norm", [("Jan.", "January"), ("'cuz", "because")]
|
||||||
)
|
)
|
||||||
def test_en_lex_attrs_norm_exceptions(en_tokenizer, text, norm):
|
def test_en_lex_attrs_norm_exceptions(en_tokenizer, text, norm):
|
||||||
tokens = en_tokenizer(text)
|
tokens = en_tokenizer(text)
|
||||||
|
|
|
@ -89,7 +89,6 @@ def test_uk_tokenizer_splits_open_appostrophe(uk_tokenizer, text):
|
||||||
assert tokens[0].text == "'"
|
assert tokens[0].text == "'"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="See Issue #3327 and PR #3329")
|
|
||||||
@pytest.mark.parametrize("text", ["Тест''"])
|
@pytest.mark.parametrize("text", ["Тест''"])
|
||||||
def test_uk_tokenizer_splits_double_end_quote(uk_tokenizer, text):
|
def test_uk_tokenizer_splits_double_end_quote(uk_tokenizer, text):
|
||||||
tokens = uk_tokenizer(text)
|
tokens = uk_tokenizer(text)
|
||||||
|
|
|
@ -140,9 +140,6 @@ def test_to_from_bytes(parser, blank_parser):
|
||||||
assert blank_parser.moves.n_moves == parser.moves.n_moves
|
assert blank_parser.moves.n_moves == parser.moves.n_moves
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(
|
|
||||||
reason="This seems to be a dict ordering bug somewhere. Only failing on some platforms."
|
|
||||||
)
|
|
||||||
def test_serialize_tagger_roundtrip_bytes(en_vocab, taggers):
|
def test_serialize_tagger_roundtrip_bytes(en_vocab, taggers):
|
||||||
tagger1 = taggers[0]
|
tagger1 = taggers[0]
|
||||||
tagger1_b = tagger1.to_bytes()
|
tagger1_b = tagger1.to_bytes()
|
||||||
|
|
|
@ -26,7 +26,6 @@ def test_serialize_custom_tokenizer(en_vocab, en_tokenizer):
|
||||||
assert tokenizer_reloaded.rules == {}
|
assert tokenizer_reloaded.rules == {}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="Currently unreliable across platforms")
|
|
||||||
@pytest.mark.parametrize("text", ["I💜you", "they’re", "“hello”"])
|
@pytest.mark.parametrize("text", ["I💜you", "they’re", "“hello”"])
|
||||||
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
|
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
|
||||||
tokenizer = en_tokenizer
|
tokenizer = en_tokenizer
|
||||||
|
@ -38,7 +37,6 @@ def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
|
||||||
assert [token.text for token in doc1] == [token.text for token in doc2]
|
assert [token.text for token in doc1] == [token.text for token in doc2]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="Currently unreliable across platforms")
|
|
||||||
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
|
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
|
||||||
tokenizer = en_tokenizer
|
tokenizer = en_tokenizer
|
||||||
with make_tempdir() as d:
|
with make_tempdir() as d:
|
||||||
|
|
|
@ -71,7 +71,6 @@ def test_table_api_to_from_bytes():
|
||||||
assert "def" not in new_table2
|
assert "def" not in new_table2
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="This fails on Python 3.5")
|
|
||||||
def test_lookups_to_from_bytes():
|
def test_lookups_to_from_bytes():
|
||||||
lookups = Lookups()
|
lookups = Lookups()
|
||||||
lookups.add_table("table1", {"foo": "bar", "hello": "world"})
|
lookups.add_table("table1", {"foo": "bar", "hello": "world"})
|
||||||
|
@ -91,7 +90,6 @@ def test_lookups_to_from_bytes():
|
||||||
assert new_lookups.to_bytes() == lookups_bytes
|
assert new_lookups.to_bytes() == lookups_bytes
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="This fails on Python 3.5")
|
|
||||||
def test_lookups_to_from_disk():
|
def test_lookups_to_from_disk():
|
||||||
lookups = Lookups()
|
lookups = Lookups()
|
||||||
lookups.add_table("table1", {"foo": "bar", "hello": "world"})
|
lookups.add_table("table1", {"foo": "bar", "hello": "world"})
|
||||||
|
@ -111,7 +109,6 @@ def test_lookups_to_from_disk():
|
||||||
assert table2["b"] == 2
|
assert table2["b"] == 2
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="This fails on Python 3.5")
|
|
||||||
def test_lookups_to_from_bytes_via_vocab():
|
def test_lookups_to_from_bytes_via_vocab():
|
||||||
table_name = "test"
|
table_name = "test"
|
||||||
vocab = Vocab()
|
vocab = Vocab()
|
||||||
|
@ -128,7 +125,6 @@ def test_lookups_to_from_bytes_via_vocab():
|
||||||
assert new_vocab.to_bytes() == vocab_bytes
|
assert new_vocab.to_bytes() == vocab_bytes
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="This fails on Python 3.5")
|
|
||||||
def test_lookups_to_from_disk_via_vocab():
|
def test_lookups_to_from_disk_via_vocab():
|
||||||
table_name = "test"
|
table_name = "test"
|
||||||
vocab = Vocab()
|
vocab = Vocab()
|
||||||
|
|
Loading…
Reference in New Issue
Block a user