Add missing tokenizer exceptions (resolves #1281)

This commit is contained in:
ines 2017-08-21 16:11:36 +02:00
parent c5c3f4c7d9
commit edc596d9a7

View File

@ -276,7 +276,10 @@ for verb_data in [
{ORTH: "are", LEMMA: "be", TAG: "VBP", "number": 2},
{ORTH: "is", LEMMA: "be", TAG: "VBZ"},
{ORTH: "was", LEMMA: "be"},
{ORTH: "were", LEMMA: "be"}
{ORTH: "were", LEMMA: "be"},
{ORTH: "have"},
{ORTH: "has", LEMMA: "have"},
{ORTH: "dare"}
]:
verb_data_tc = dict(verb_data)
verb_data_tc[ORTH] = verb_data_tc[ORTH].title()