Fix hard-coded vector width

This commit is contained in:
Matthew Honnibal 2017-09-27 11:43:58 -05:00
parent 1ef4236f8e
commit 983201a83a

View File

@ -553,9 +553,10 @@ class NeuralLabeller(NeuralTagger):
self.labels[label] = len(self.labels) self.labels[label] = len(self.labels)
print(len(self.labels)) print(len(self.labels))
if self.model is True: if self.model is True:
token_vector_width = util.env_opt('token_vector_width')
self.model = chain( self.model = chain(
tok2vec, tok2vec,
Softmax(len(self.labels), 128) Softmax(len(self.labels), token_vector_width)
) )
link_vectors_to_models(self.vocab) link_vectors_to_models(self.vocab)