mirror of
https://github.com/explosion/spaCy.git
synced 2025-07-19 20:52:23 +03:00
Transpose before calculating loss
This commit is contained in:
parent
18444fccd9
commit
67d9ebc922
|
@ -315,13 +315,11 @@ class CoreferenceResolver(TrainablePipe):
|
|||
# do softmax to cscores
|
||||
cscores = ops.softmax(cscores, axis=1)
|
||||
|
||||
diff = self.loss.get_grad(cscores, top_gscores)
|
||||
diff = self.loss.get_grad(cscores.T, top_gscores.T).T
|
||||
diff = diff[:, 1:]
|
||||
gradients.append((diff, cidx))
|
||||
|
||||
# scalar loss
|
||||
# loss += xp.sum(log_norm - log_marg)
|
||||
loss += float(self.loss.get_loss(cscores, top_gscores))
|
||||
loss += float(self.loss.get_loss(cscores.T, top_gscores.T))
|
||||
offset += ll
|
||||
return loss, gradients
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user