2016-12-17 14:25:44 +03:00
|
|
|
# encoding: utf8
|
2015-09-06 22:56:47 +03:00
|
|
|
from __future__ import unicode_literals, print_function
|
|
|
|
|
|
|
|
from os import path
|
|
|
|
|
|
|
|
from ..language import Language
|
2016-10-18 19:52:48 +03:00
|
|
|
from ..attrs import LANG
|
2016-12-08 15:58:32 +03:00
|
|
|
|
2016-12-18 18:54:19 +03:00
|
|
|
from .language_data import *
|
2015-09-06 22:56:47 +03:00
|
|
|
|
|
|
|
|
|
|
|
class German(Language):
|
2016-03-03 17:21:00 +03:00
|
|
|
lang = 'de'
|
2016-12-08 15:56:40 +03:00
|
|
|
|
2016-09-24 21:26:17 +03:00
|
|
|
class Defaults(Language.Defaults):
|
2016-09-25 16:37:33 +03:00
|
|
|
tokenizer_exceptions = dict(language_data.TOKENIZER_EXCEPTIONS)
|
2016-10-18 19:52:48 +03:00
|
|
|
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
|
|
|
|
lex_attr_getters[LANG] = lambda text: 'de'
|
2016-05-02 17:04:53 +03:00
|
|
|
|
2016-12-08 15:58:32 +03:00
|
|
|
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
|
|
|
|
tag_map = TAG_MAP
|
|
|
|
stop_words = STOP_WORDS
|