Update comment on Language.begin_training

This commit is contained in:
Matthew Honnibal 2017-09-14 16:18:30 +02:00
parent c6395b057a
commit 70da88a3a7

View File

@ -347,15 +347,9 @@ class Language(object):
"""Allocate models, pre-process training data and acquire a trainer and """Allocate models, pre-process training data and acquire a trainer and
optimizer. Used as a contextmanager. optimizer. Used as a contextmanager.
gold_tuples (iterable): Gold-standard training data. get_gold_tuples (function): Function returning gold data
**cfg: Config parameters. **cfg: Config parameters.
YIELDS (tuple): A trainer and an optimizer. returns: An optimizer
EXAMPLE:
>>> with nlp.begin_training(gold, use_gpu=True) as (trainer, optimizer):
>>> for epoch in trainer.epochs(gold):
>>> for docs, golds in epoch:
>>> state = nlp.update(docs, golds, sgd=optimizer)
""" """
if self.parser: if self.parser:
self.pipeline.append(NeuralLabeller(self.vocab)) self.pipeline.append(NeuralLabeller(self.vocab))