diff --git a/spacy/tests/lang/en/test_prefix_suffix_infix.py b/spacy/tests/lang/en/test_prefix_suffix_infix.py index 9dfb54fd6..a903496e8 100644 --- a/spacy/tests/lang/en/test_prefix_suffix_infix.py +++ b/spacy/tests/lang/en/test_prefix_suffix_infix.py @@ -119,6 +119,7 @@ def test_en_tokenizer_splits_period_abbr(en_tokenizer): assert tokens[4].text == "Mr." +@pytest.mark.issue(225) @pytest.mark.xfail(reason="Issue #225 - not yet implemented") def test_en_tokenizer_splits_em_dash_infix(en_tokenizer): tokens = en_tokenizer( diff --git a/spacy/tests/lang/fr/test_prefix_suffix_infix.py b/spacy/tests/lang/fr/test_prefix_suffix_infix.py index 7770f807b..272531b63 100644 --- a/spacy/tests/lang/fr/test_prefix_suffix_infix.py +++ b/spacy/tests/lang/fr/test_prefix_suffix_infix.py @@ -4,6 +4,7 @@ from spacy.lang.punctuation import TOKENIZER_INFIXES from spacy.lang.char_classes import ALPHA +@pytest.mark.issue(768) @pytest.mark.parametrize( "text,expected_tokens", [("l'avion", ["l'", "avion"]), ("j'ai", ["j'", "ai"])] ) diff --git a/spacy/tests/matcher/test_dependency_matcher.py b/spacy/tests/matcher/test_dependency_matcher.py index 61ae43c52..1728c82af 100644 --- a/spacy/tests/matcher/test_dependency_matcher.py +++ b/spacy/tests/matcher/test_dependency_matcher.py @@ -370,6 +370,7 @@ def test_dependency_matcher_span_user_data(en_tokenizer): assert doc_t_i == span_t_i + offset +@pytest.mark.issue(9263) def test_dependency_matcher_order_issue(en_tokenizer): # issue from #9263 doc = en_tokenizer("I like text") @@ -415,6 +416,7 @@ def test_dependency_matcher_order_issue(en_tokenizer): assert matches == [] +@pytest.mark.issue(9263) def test_dependency_matcher_remove(en_tokenizer): # issue from #9263 doc = en_tokenizer("The red book") diff --git a/spacy/tests/matcher/test_matcher_logic.py b/spacy/tests/matcher/test_matcher_logic.py index dcbe1ff33..b96bb2032 100644 --- a/spacy/tests/matcher/test_matcher_logic.py +++ b/spacy/tests/matcher/test_matcher_logic.py @@ -152,6 +152,7 @@ def test_operator_combos(en_vocab): assert not matches, (string, pattern_str) +@pytest.mark.issue(1450) def test_matcher_end_zero_plus(en_vocab): """Test matcher works when patterns end with * operator. (issue 1450)""" matcher = Matcher(en_vocab) diff --git a/spacy/tests/serialize/test_serialize_pipeline.py b/spacy/tests/serialize/test_serialize_pipeline.py index 05871a524..eebf72638 100644 --- a/spacy/tests/serialize/test_serialize_pipeline.py +++ b/spacy/tests/serialize/test_serialize_pipeline.py @@ -162,6 +162,7 @@ def test_serialize_tagger_strings(en_vocab, de_vocab, taggers): assert label in tagger2.vocab.strings +@pytest.mark.issue(1105) def test_serialize_textcat_empty(en_vocab): # See issue #1105 cfg = {"model": DEFAULT_SINGLE_TEXTCAT_MODEL}