mirror of
https://github.com/explosion/spaCy.git
synced 2025-08-04 12:20:20 +03:00
Apply suggestions from code review
Co-authored-by: Sofie Van Landeghem <svlandeg@users.noreply.github.com>
This commit is contained in:
parent
8d725d9d9c
commit
985dd2236c
|
@ -70,7 +70,7 @@ cdef class TrainablePipe(Pipe):
|
||||||
|
|
||||||
teacher_pipe (Optional[TrainablePipe]): The teacher pipe to learn
|
teacher_pipe (Optional[TrainablePipe]): The teacher pipe to learn
|
||||||
from.
|
from.
|
||||||
examples (Iterable[Example]): Distillation examples. The eference
|
examples (Iterable[Example]): Distillation examples. The reference
|
||||||
and predicted docs must have the same number of tokens and the
|
and predicted docs must have the same number of tokens and the
|
||||||
same orthography.
|
same orthography.
|
||||||
drop (float): dropout rate.
|
drop (float): dropout rate.
|
||||||
|
|
|
@ -221,7 +221,7 @@ cdef class Parser(TrainablePipe):
|
||||||
|
|
||||||
teacher_pipe (Optional[TrainablePipe]): The teacher pipe to learn
|
teacher_pipe (Optional[TrainablePipe]): The teacher pipe to learn
|
||||||
from.
|
from.
|
||||||
examples (Iterable[Example]): Distillation examples. The eference
|
examples (Iterable[Example]): Distillation examples. The reference
|
||||||
and predicted docs must have the same number of tokens and the
|
and predicted docs must have the same number of tokens and the
|
||||||
same orthography.
|
same orthography.
|
||||||
drop (float): dropout rate.
|
drop (float): dropout rate.
|
||||||
|
@ -315,8 +315,6 @@ cdef class Parser(TrainablePipe):
|
||||||
|
|
||||||
RETURNS (Tuple[float, float]): The loss and the gradient.
|
RETURNS (Tuple[float, float]): The loss and the gradient.
|
||||||
|
|
||||||
RETURNS (Tuple[float, float]): The loss and the gradient.
|
|
||||||
|
|
||||||
DOCS: https://spacy.io/api/dependencyparser#get_teacher_student_loss
|
DOCS: https://spacy.io/api/dependencyparser#get_teacher_student_loss
|
||||||
"""
|
"""
|
||||||
loss_func = LegacySequenceCategoricalCrossentropy(normalize=False)
|
loss_func = LegacySequenceCategoricalCrossentropy(normalize=False)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user