Fix Adam import

This commit is contained in:
Matthew Honnibal 2017-11-06 14:25:37 +01:00
parent 2eb11d60f2
commit 13336a6197

View File

@ -15,12 +15,12 @@ from thinc.linear.linear import LinearModel
from thinc.neural.ops import NumpyOps, CupyOps from thinc.neural.ops import NumpyOps, CupyOps
from thinc.neural.util import get_array_module, copy_array from thinc.neural.util import get_array_module, copy_array
from thinc.neural._lsuv import svd_orthonormal from thinc.neural._lsuv import svd_orthonormal
from thinc.neural.optimizers import Adam
from thinc import describe from thinc import describe
from thinc.describe import Dimension, Synapses, Biases, Gradient from thinc.describe import Dimension, Synapses, Biases, Gradient
from thinc.neural._classes.affine import _set_dimensions_if_needed from thinc.neural._classes.affine import _set_dimensions_if_needed
import thinc.extra.load_nlp import thinc.extra.load_nlp
from thinc.neural._lsuv import svd_orthonormal
from .attrs import ID, ORTH, LOWER, NORM, PREFIX, SUFFIX, SHAPE from .attrs import ID, ORTH, LOWER, NORM, PREFIX, SUFFIX, SHAPE
from . import util from . import util
@ -49,7 +49,7 @@ def create_default_optimizer(ops, **cfg):
optimizer = Adam(ops, learn_rate, L2=L2, beta1=beta1, optimizer = Adam(ops, learn_rate, L2=L2, beta1=beta1,
beta2=beta2, eps=eps) beta2=beta2, eps=eps)
optimizer.max_grad_norm = max_grad_norm optimizer.max_grad_norm = max_grad_norm
optimizer.device = device optimizer.device = ops.device
return optimizer return optimizer
@layerize @layerize