Update default Adam settings

This commit is contained in:
Matthew Honnibal 2018-05-01 15:18:20 +02:00
parent adbb1f7533
commit 548bdff943

View File

@ -43,8 +43,8 @@ def cosine(vec1, vec2):
def create_default_optimizer(ops, **cfg):
learn_rate = util.env_opt('learn_rate', 0.001)
beta1 = util.env_opt('optimizer_B1', 0.9)
beta2 = util.env_opt('optimizer_B2', 0.999)
eps = util.env_opt('optimizer_eps', 1e-08)
beta2 = util.env_opt('optimizer_B2', 0.9)
eps = util.env_opt('optimizer_eps', 1e-12)
L2 = util.env_opt('L2_penalty', 1e-6)
max_grad_norm = util.env_opt('grad_norm_clip', 1.)
optimizer = Adam(ops, learn_rate, L2=L2, beta1=beta1,