mirror of
https://github.com/explosion/spaCy.git
synced 2025-01-26 17:24:41 +03:00
* Mark tests that require models
This commit is contained in:
parent
15eab7354f
commit
8e26a28616
|
@ -1,6 +1,7 @@
|
|||
from __future__ import unicode_literals
|
||||
import pytest
|
||||
|
||||
@pytest.mark.models
|
||||
def test_merge_tokens(EN):
|
||||
tokens = EN(u'Los Angeles start.')
|
||||
assert len(tokens) == 4
|
||||
|
@ -12,6 +13,7 @@ def test_merge_tokens(EN):
|
|||
assert tokens[0].head.orth_ == 'start'
|
||||
|
||||
|
||||
@pytest.mark.models
|
||||
def test_merge_heads(EN):
|
||||
tokens = EN(u'I found a pilates class near work.')
|
||||
assert len(tokens) == 8
|
||||
|
@ -26,6 +28,7 @@ def test_merge_heads(EN):
|
|||
assert tokens[5].head.i == 4
|
||||
|
||||
|
||||
@pytest.mark.models
|
||||
def test_issue_54(EN):
|
||||
text = u'Talks given by women had a slightly higher number of questions asked (3.2$\pm$0.2) than talks given by men (2.6$\pm$0.1).'
|
||||
tokens = EN(text)
|
||||
|
@ -44,6 +47,7 @@ def test_np_merges(EN):
|
|||
merged = tokens.merge(start, end, label, lemma, label)
|
||||
assert merged != None, (start, end, label, lemma)
|
||||
|
||||
@pytest.mark.models
|
||||
def test_entity_merge(EN):
|
||||
tokens = EN(u'Stewart Lee is a stand up comedian who lives in England and loves Joe Pasquale.\n')
|
||||
assert(len(tokens) == 17)
|
||||
|
@ -53,6 +57,7 @@ def test_entity_merge(EN):
|
|||
# check looping is ok
|
||||
assert(len(tokens) == 15)
|
||||
|
||||
@pytest.mark.models
|
||||
def test_sentence_update_after_merge(EN):
|
||||
tokens = EN(u'Stewart Lee is a stand up comedian. He lives in England and loves Joe Pasquale.')
|
||||
sent1, sent2 = list(tokens.sents)
|
||||
|
@ -65,6 +70,7 @@ def test_sentence_update_after_merge(EN):
|
|||
assert(len(sent1) == init_len - 1)
|
||||
assert(len(sent2) == init_len2 - 1)
|
||||
|
||||
@pytest.mark.models
|
||||
def test_subtree_size_check(EN):
|
||||
tokens = EN(u'Stewart Lee is a stand up comedian who lives in England and loves Joe Pasquale')
|
||||
sent1 = list(tokens.sents)[0]
|
||||
|
|
Loading…
Reference in New Issue
Block a user