Fixing encode issue #2

This commit is contained in:
maxirmx 2015-10-21 15:36:21 +03:00
parent e4a1726f77
commit fe9d2e2c4e

View File

@ -19,7 +19,7 @@ class Lemmatizer(object):
index[pos] = read_index(path.join(data_dir, 'wordnet', 'index.%s' % pos)) index[pos] = read_index(path.join(data_dir, 'wordnet', 'index.%s' % pos))
exc[pos] = read_exc(path.join(data_dir, 'wordnet', '%s.exc' % pos)) exc[pos] = read_exc(path.join(data_dir, 'wordnet', '%s.exc' % pos))
if path.exists(path.join(data_dir, 'vocab', 'lemma_rules.json')): if path.exists(path.join(data_dir, 'vocab', 'lemma_rules.json')):
rules = json.load(open(path.join(data_dir, 'vocab', 'lemma_rules.json')), encoding="UTF-8") rules = json.load(open(path.join(data_dir, 'vocab', 'lemma_rules.json'), encoding="UTF-8"))
else: else:
rules = {} rules = {}
return cls(index, exc, rules) return cls(index, exc, rules)