Fix Japanese tokenizer flag

This commit is contained in:
Matthew Honnibal 2018-02-26 10:32:12 +01:00
parent 5faae803c6
commit f0478635df

View File

@ -27,9 +27,10 @@ import cytoolz
import conll17_ud_eval
import spacy.lang.zh
import spacy.lang.ja
spacy.lang.zh.Chinese.Defaults.use_jieba = False
spacy.lang.ja.Chinese.Defaults.use_janome = False
spacy.lang.ja.Japanese.Defaults.use_janome = False
random.seed(0)
numpy.random.seed(0)