Fix learn rate for non-transformer

This commit is contained in:
Matthew Honnibal 2020-09-04 21:22:50 +02:00
parent 465785a672
commit 4b7abaafdb

View File

@ -186,11 +186,14 @@ accumulate_gradient = {{ transformer["size_factor"] }}
[training.optimizer]
@optimizers = "Adam.v1"
{% if use_transformer -%}
[training.optimizer.learn_rate]
@schedules = "warmup_linear.v1"
warmup_steps = 250
total_steps = 20000
initial_rate = 5e-5
{% endif %}
[training.train_corpus]
@readers = "spacy.Corpus.v1"