diff --git a/spacy/lang/ga/tokenizer_exceptions.py b/spacy/lang/ga/tokenizer_exceptions.py index 185b08895..e93ada52f 100644 --- a/spacy/lang/ga/tokenizer_exceptions.py +++ b/spacy/lang/ga/tokenizer_exceptions.py @@ -24,8 +24,7 @@ _exc = { "led'": [ {ORTH: "le", LEMMA: "le", NORM: "le", POS: ADP}, - {ORTH: "d'", LEMMA: "mo", NORM: "do", POS: DET}], - + {ORTH: "d'", LEMMA: "mo", NORM: "do", POS: DET}] } for exc_data in [ @@ -77,11 +76,11 @@ for exc_data in [ {ORTH: "Uas.", LEMMA: "Uasal", POS: NOUN}, {ORTH: "uimh.", LEMMA: "uimhir", POS: NOUN}, {ORTH: "Uimh.", LEMMA: "uimhir", POS: NOUN}]: - _exc[exc_data[ORTH]] = [dict(exc_data)], + _exc[exc_data[ORTH]] = [exc_data] for orth in [ "d'", "D'"]: _exc[orth] = [{ORTH: orth}] -TOKENIZER_EXCEPTIONS = dict(_exc) +TOKENIZER_EXCEPTIONS = _exc