mirror of
https://github.com/explosion/spaCy.git
synced 2025-07-01 18:33:12 +03:00
Fix dropout and batch-size defaults
This commit is contained in:
parent
9536ee787c
commit
d9d339186b
|
@ -22,13 +22,13 @@ from .. import about
|
||||||
# Batch size starts at 1 and grows, so that we make updates quickly
|
# Batch size starts at 1 and grows, so that we make updates quickly
|
||||||
# at the beginning of training.
|
# at the beginning of training.
|
||||||
dropout_rates = util.decaying(
|
dropout_rates = util.decaying(
|
||||||
util.env_opt("dropout_from", 0.2),
|
util.env_opt("dropout_from", 0.1),
|
||||||
util.env_opt("dropout_to", 0.2),
|
util.env_opt("dropout_to", 0.1),
|
||||||
util.env_opt("dropout_decay", 0.0),
|
util.env_opt("dropout_decay", 0.0),
|
||||||
)
|
)
|
||||||
batch_sizes = util.compounding(
|
batch_sizes = util.compounding(
|
||||||
util.env_opt("batch_from", 1000),
|
util.env_opt("batch_from", 750),
|
||||||
util.env_opt("batch_to", 1000),
|
util.env_opt("batch_to", 750),
|
||||||
util.env_opt("batch_compound", 1.001),
|
util.env_opt("batch_compound", 1.001),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -169,6 +169,7 @@ def train(
|
||||||
else:
|
else:
|
||||||
# Start with a blank model, call begin_training
|
# Start with a blank model, call begin_training
|
||||||
optimizer = nlp.begin_training(lambda: corpus.train_tuples, device=use_gpu)
|
optimizer = nlp.begin_training(lambda: corpus.train_tuples, device=use_gpu)
|
||||||
|
|
||||||
nlp._optimizer = None
|
nlp._optimizer = None
|
||||||
|
|
||||||
# Load in pre-trained weights
|
# Load in pre-trained weights
|
||||||
|
|
Loading…
Reference in New Issue
Block a user