2019-08-20 17:33:09 +03:00
|
|
|
from spacy.matcher import Matcher
|
|
|
|
from spacy.tokens import Doc
|
|
|
|
|
|
|
|
|
|
|
|
def test_issue4120(en_vocab):
|
|
|
|
"""Test that matches without a final {OP: ?} token are returned."""
|
|
|
|
matcher = Matcher(en_vocab)
|
2019-10-25 23:21:08 +03:00
|
|
|
matcher.add("TEST", [[{"ORTH": "a"}, {"OP": "?"}]])
|
2019-08-20 17:33:09 +03:00
|
|
|
doc1 = Doc(en_vocab, words=["a"])
|
|
|
|
assert len(matcher(doc1)) == 1 # works
|
2019-08-21 23:46:56 +03:00
|
|
|
|
2019-08-20 17:33:09 +03:00
|
|
|
doc2 = Doc(en_vocab, words=["a", "b", "c"])
|
2019-08-21 23:46:56 +03:00
|
|
|
assert len(matcher(doc2)) == 2 # fixed
|
2019-08-20 17:33:09 +03:00
|
|
|
|
|
|
|
matcher = Matcher(en_vocab)
|
2019-10-25 23:21:08 +03:00
|
|
|
matcher.add("TEST", [[{"ORTH": "a"}, {"OP": "?"}, {"ORTH": "b"}]])
|
2019-08-20 17:33:09 +03:00
|
|
|
doc3 = Doc(en_vocab, words=["a", "b", "b", "c"])
|
|
|
|
assert len(matcher(doc3)) == 2 # works
|
|
|
|
|
|
|
|
matcher = Matcher(en_vocab)
|
2019-10-25 23:21:08 +03:00
|
|
|
matcher.add("TEST", [[{"ORTH": "a"}, {"OP": "?"}, {"ORTH": "b", "OP": "?"}]])
|
2019-08-20 17:33:09 +03:00
|
|
|
doc4 = Doc(en_vocab, words=["a", "b", "b", "c"])
|
2019-08-21 23:46:56 +03:00
|
|
|
assert len(matcher(doc4)) == 3 # fixed
|