Fix dropout and batch-size defaults

This commit is contained in:
Matthew Honnibal 2018-12-01 13:42:35 +00:00
parent 9536ee787c
commit d9d339186b

View File

@ -22,13 +22,13 @@ from .. import about
# Batch size starts at 1 and grows, so that we make updates quickly
# at the beginning of training.
dropout_rates = util.decaying(
util.env_opt("dropout_from", 0.2),
util.env_opt("dropout_to", 0.2),
util.env_opt("dropout_from", 0.1),
util.env_opt("dropout_to", 0.1),
util.env_opt("dropout_decay", 0.0),
)
batch_sizes = util.compounding(
util.env_opt("batch_from", 1000),
util.env_opt("batch_to", 1000),
util.env_opt("batch_from", 750),
util.env_opt("batch_to", 750),
util.env_opt("batch_compound", 1.001),
)
@ -169,6 +169,7 @@ def train(
else:
# Start with a blank model, call begin_training
optimizer = nlp.begin_training(lambda: corpus.train_tuples, device=use_gpu)
nlp._optimizer = None
# Load in pre-trained weights