spaCy/spacy/lang/ru/tokenizer_exceptions.py

10 lines
123 B
Python

# encoding: utf8
from __future__ import unicode_literals
from ...symbols import ORTH, LEMMA
TOKENIZER_EXCEPTIONS = {
}