2017-01-10 21:24:10 +03:00
|
|
|
# coding: utf-8
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2017-01-13 00:00:37 +03:00
|
|
|
from ...matcher import Matcher
|
|
|
|
from ...attrs import IS_PUNCT, ORTH
|
2016-10-28 17:38:32 +03:00
|
|
|
|
|
|
|
import pytest
|
|
|
|
|
2017-01-10 21:24:10 +03:00
|
|
|
|
2017-05-22 14:54:20 +03:00
|
|
|
def test_issue587(en_tokenizer):
|
2017-01-13 00:00:37 +03:00
|
|
|
"""Test that Matcher doesn't segfault on particular input"""
|
2017-05-22 14:54:20 +03:00
|
|
|
doc = en_tokenizer('a b; c')
|
|
|
|
matcher = Matcher(doc.vocab)
|
|
|
|
matcher.add('TEST1', None, [{ORTH: 'a'}, {ORTH: 'b'}])
|
|
|
|
matches = matcher(doc)
|
|
|
|
assert len(matches) == 1
|
|
|
|
matcher.add('TEST2', None, [{ORTH: 'a'}, {ORTH: 'b'}, {IS_PUNCT: True}, {ORTH: 'c'}])
|
|
|
|
matches = matcher(doc)
|
|
|
|
assert len(matches) == 2
|
|
|
|
matcher.add('TEST3', None, [{ORTH: 'a'}, {ORTH: 'b'}, {IS_PUNCT: True}, {ORTH: 'd'}])
|
|
|
|
matches = matcher(doc)
|
|
|
|
assert len(matches) == 2
|