Fix obsolete data in English tokenizer exceptions

This commit is contained in:
Matthew Honnibal 2019-03-07 21:58:16 +01:00
parent 7afe56a360
commit 00cfadbf63

View File

@ -35,8 +35,6 @@ for pron in ["i"]:
LEMMA: "be", LEMMA: "be",
NORM: "am", NORM: "am",
TAG: "VBP", TAG: "VBP",
"tenspect": 1,
"number": 1,
}, },
] ]