2014-10-14 13:25:57 +04:00
|
|
|
"""Test that tokens are created correctly for whitespace."""
|
2017-01-04 02:46:35 +03:00
|
|
|
|
|
|
|
|
2014-10-14 13:25:57 +04:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
|
|
import pytest
|
|
|
|
|
|
|
|
|
2017-01-04 02:46:35 +03:00
|
|
|
@pytest.mark.parametrize('text', ["hello possums"])
|
|
|
|
def test_tokenizer_splits_single_space(en_tokenizer, text):
|
|
|
|
tokens = en_tokenizer(text)
|
2014-10-14 13:25:57 +04:00
|
|
|
assert len(tokens) == 2
|
|
|
|
|
|
|
|
|
2017-01-04 02:46:35 +03:00
|
|
|
@pytest.mark.parametrize('text', ["hello possums"])
|
|
|
|
def test_tokenizer_splits_double_space(en_tokenizer, text):
|
|
|
|
tokens = en_tokenizer(text)
|
2014-10-14 13:25:57 +04:00
|
|
|
assert len(tokens) == 3
|
2017-01-04 02:46:35 +03:00
|
|
|
assert tokens[1].text == " "
|
2014-10-14 13:25:57 +04:00
|
|
|
|
|
|
|
|
2017-01-04 02:46:35 +03:00
|
|
|
@pytest.mark.parametrize('text', ["hello\npossums"])
|
|
|
|
def test_tokenizer_splits_newline(en_tokenizer, text):
|
|
|
|
tokens = en_tokenizer(text)
|
2014-10-14 13:25:57 +04:00
|
|
|
assert len(tokens) == 3
|
2017-01-04 02:46:35 +03:00
|
|
|
assert tokens[1].text == "\n"
|
2014-10-14 13:25:57 +04:00
|
|
|
|
|
|
|
|
2017-01-04 02:46:35 +03:00
|
|
|
@pytest.mark.parametrize('text', ["hello \npossums"])
|
|
|
|
def test_tokenizer_splits_newline_space(en_tokenizer, text):
|
2015-06-07 18:24:49 +03:00
|
|
|
tokens = en_tokenizer('hello \npossums')
|
2014-10-14 13:25:57 +04:00
|
|
|
assert len(tokens) == 3
|
|
|
|
|
|
|
|
|
2017-01-04 02:46:35 +03:00
|
|
|
@pytest.mark.parametrize('text', ["hello \npossums"])
|
|
|
|
def test_tokenizer_splits_newline_double_space(en_tokenizer, text):
|
|
|
|
tokens = en_tokenizer(text)
|
2014-10-14 13:25:57 +04:00
|
|
|
assert len(tokens) == 3
|
|
|
|
|
|
|
|
|
2017-01-04 02:46:35 +03:00
|
|
|
@pytest.mark.parametrize('text', ["hello \n possums"])
|
|
|
|
def test_tokenizer_splits_newline_space_wrap(en_tokenizer, text):
|
|
|
|
tokens = en_tokenizer(text)
|
2014-10-14 13:25:57 +04:00
|
|
|
assert len(tokens) == 3
|