Merge pull request #21 from leofidus/test_notoken

Add rokenizer test for zero length string
This commit is contained in:
honnibal 2015-02-11 00:19:38 +11:00
commit ae36067314

View File

@ -10,6 +10,10 @@ from spacy.en import English
def EN():
return English().tokenizer
def test_no_word(EN):
tokens = EN(u'')
assert len(tokens) == 0
def test_single_word(EN):
tokens = EN(u'hello')
assert tokens[0].orth_ == 'hello'