2017-07-03 16:43:06 +03:00
|
|
|
# coding: utf-8
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
|
|
import pytest
|
|
|
|
|
2018-07-25 00:38:44 +03:00
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text", ["ca.", "m.a.o.", "Jan.", "Dec.", "kr.", "jf."])
|
2017-07-03 16:43:06 +03:00
|
|
|
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
|
|
|
|
tokens = da_tokenizer(text)
|
|
|
|
assert len(tokens) == 1
|
|
|
|
|
2018-07-25 00:38:44 +03:00
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text", ["Jul.", "jul.", "Tor.", "Tors."])
|
2017-11-24 16:43:29 +03:00
|
|
|
def test_da_tokenizer_handles_ambiguous_abbr(da_tokenizer, text):
|
|
|
|
tokens = da_tokenizer(text)
|
|
|
|
assert len(tokens) == 2
|
|
|
|
|
2018-07-25 00:38:44 +03:00
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text", ["1.", "10.", "31."])
|
2017-11-24 17:03:24 +03:00
|
|
|
def test_da_tokenizer_handles_dates(da_tokenizer, text):
|
|
|
|
tokens = da_tokenizer(text)
|
|
|
|
assert len(tokens) == 1
|
|
|
|
|
2018-07-25 00:38:44 +03:00
|
|
|
|
2017-07-03 16:43:06 +03:00
|
|
|
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
|
|
|
|
text = "Det er bl.a. ikke meningen"
|
|
|
|
tokens = da_tokenizer(text)
|
|
|
|
assert len(tokens) == 5
|
|
|
|
assert tokens[2].text == "bl.a."
|
2017-11-24 13:29:37 +03:00
|
|
|
|
2018-07-25 00:38:44 +03:00
|
|
|
|
2017-11-24 13:29:37 +03:00
|
|
|
def test_da_tokenizer_handles_custom_base_exc(da_tokenizer):
|
|
|
|
text = "Her er noget du kan kigge i."
|
|
|
|
tokens = da_tokenizer(text)
|
|
|
|
assert len(tokens) == 8
|
|
|
|
assert tokens[6].text == "i"
|
|
|
|
assert tokens[7].text == "."
|
2017-11-27 15:35:41 +03:00
|
|
|
|
2018-07-25 00:38:44 +03:00
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"text,norm", [("akvarium", "akvarie"), ("bedstemoder", "bedstemor")]
|
|
|
|
)
|
2017-11-27 15:35:41 +03:00
|
|
|
def test_da_tokenizer_norm_exceptions(da_tokenizer, text, norm):
|
|
|
|
tokens = da_tokenizer(text)
|
|
|
|
assert tokens[0].norm_ == norm
|