mirror of
https://github.com/explosion/spaCy.git
synced 2025-05-01 06:13:41 +03:00
[ci skip] Small updates
This commit is contained in:
parent
b728eaae18
commit
43bf05275f
|
@ -18,9 +18,9 @@ from .trainable_pipe import TrainablePipe
|
||||||
|
|
||||||
|
|
||||||
@registry.layers("spacy.Softmax.v1")
|
@registry.layers("spacy.Softmax.v1")
|
||||||
def build_linear_logistic(nO=None, nI=None) -> Model[Floats2d, Floats2d]:
|
def build_softmax(nO=None, nI=None) -> Model[Floats2d, Floats2d]:
|
||||||
"""An output layer for multi-label classification. It uses a linear layer
|
"""
|
||||||
followed by a logistic activation.
|
An output layer for softmax classification.
|
||||||
"""
|
"""
|
||||||
return Softmax_v2(nI=nI, nO=nO)
|
return Softmax_v2(nI=nI, nO=nO)
|
||||||
|
|
||||||
|
@ -376,11 +376,9 @@ class SpanCategorizerExclusive(TrainablePipe):
|
||||||
offset += spans.lengths[i]
|
offset += spans.lengths[i]
|
||||||
target = self.model.ops.asarray(target, dtype="f") # type: ignore
|
target = self.model.ops.asarray(target, dtype="f") # type: ignore
|
||||||
negative_samples = numpy.nonzero(negative_spans)[0]
|
negative_samples = numpy.nonzero(negative_spans)[0]
|
||||||
breakpoint()
|
|
||||||
target[negative_samples, self._negative_label] = 1.0
|
target[negative_samples, self._negative_label] = 1.0
|
||||||
d_scores = scores - target
|
d_scores = scores - target
|
||||||
neg_weight = self.cfg["negative_weight"]
|
neg_weight = self.cfg["negative_weight"]
|
||||||
if neg_weight != 1.0:
|
|
||||||
d_scores[negative_samples] *= neg_weight
|
d_scores[negative_samples] *= neg_weight
|
||||||
loss = float((d_scores**2).sum())
|
loss = float((d_scores**2).sum())
|
||||||
return loss, d_scores
|
return loss, d_scores
|
||||||
|
|
Loading…
Reference in New Issue
Block a user