From 0ae05f77ab568c155bc3ddc704f5c115ca7fc18b Mon Sep 17 00:00:00 2001 From: leofidus Date: Sat, 7 Feb 2015 03:01:44 +0100 Subject: [PATCH] Add rokenizer test for zero length string --- tests/test_tokenizer.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py index 58bb1afaf..259719b71 100644 --- a/tests/test_tokenizer.py +++ b/tests/test_tokenizer.py @@ -10,6 +10,10 @@ from spacy.en import English def EN(): return English().tokenizer +def test_no_word(EN): + tokens = EN(u'') + assert len(tokens) == 0 + def test_single_word(EN): tokens = EN(u'hello') assert tokens[0].orth_ == 'hello'