Update tokenizer_exceptions.py

This commit is contained in:
Ines Montani 2017-06-02 19:00:01 +02:00 committed by GitHub
parent 83467a00a7
commit e7ef51b382

View File

@ -6,10 +6,9 @@ from ..language_data import PRON_LEMMA, DET_LEMMA
TOKENIZER_EXCEPTIONS = { TOKENIZER_EXCEPTIONS = {
"pal": [ "pal": [
{ORTH: "pa", LEMMA: "para"}, {ORTH: "pa", LEMMA: "para"},
{ORTH: "el", LEMMA: DET_LEMMA, NORM: "el"} {ORTH: "l", LEMMA: DET_LEMMA, NORM: "el"}
], ],
"pala": [ "pala": [