mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-27 10:26:35 +03:00
Merge lemmatizer tests
This commit is contained in:
parent
3bc082abdf
commit
1add8ace67
|
@ -1,12 +0,0 @@
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from spacy.en import English
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
@pytest.mark.models
|
|
||||||
def test_lemma_assignment(EN):
|
|
||||||
tokens = u'Bananas in pyjamas are geese .'.split(' ')
|
|
||||||
doc = EN.tokenizer.tokens_from_list(tokens)
|
|
||||||
assert all( t.lemma_ == u'' for t in doc )
|
|
||||||
EN.tagger(doc)
|
|
||||||
assert all( t.lemma_ != u'' for t in doc )
|
|
|
@ -91,3 +91,12 @@ def test_pickle_lemmatizer(lemmatizer):
|
||||||
file_.seek(0)
|
file_.seek(0)
|
||||||
|
|
||||||
loaded = pickle.load(file_)
|
loaded = pickle.load(file_)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.models
|
||||||
|
def test_lemma_assignment(EN):
|
||||||
|
tokens = u'Bananas in pyjamas are geese .'.split(' ')
|
||||||
|
doc = EN.tokenizer.tokens_from_list(tokens)
|
||||||
|
assert all( t.lemma_ == u'' for t in doc )
|
||||||
|
EN.tagger(doc)
|
||||||
|
assert all( t.lemma_ != u'' for t in doc )
|
||||||
|
|
Loading…
Reference in New Issue
Block a user