Add missing tokenizer exceptions

This commit is contained in:
ines 2017-05-12 09:25:24 +02:00
parent bb8be3d194
commit 48177c4f92

View File

@ -17,7 +17,7 @@ class Danish(Language):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'da'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS)
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
stop_words = set(STOP_WORDS)