Fix fine_tune when optimizer is None

This commit is contained in:
Matthew Honnibal 2017-08-23 10:51:33 +02:00
parent 9c580ad28a
commit 1c5c256e58

View File

@ -382,6 +382,7 @@ def fine_tune(embedding, combine=None):
model.d_mix[1] += flat_vecs.dot(flat_grad.T).sum() model.d_mix[1] += flat_vecs.dot(flat_grad.T).sum()
bp_vecs([d_o * model.mix[1] for d_o in d_output], sgd=sgd) bp_vecs([d_o * model.mix[1] for d_o in d_output], sgd=sgd)
if sgd is not None:
sgd(model._mem.weights, model._mem.gradient, key=model.id) sgd(model._mem.weights, model._mem.gradient, key=model.id)
return [d_o * model.mix[0] for d_o in d_output] return [d_o * model.mix[0] for d_o in d_output]
return output, fine_tune_bwd return output, fine_tune_bwd