2020-10-18 15:50:41 +03:00
|
|
|
from typing import Optional, List
|
|
|
|
|
|
|
|
from thinc.types import Floats2d
|
2020-07-22 14:42:59 +03:00
|
|
|
from thinc.api import Model, reduce_mean, Linear, list2ragged, Logistic
|
|
|
|
from thinc.api import chain, concatenate, clone, Dropout, ParametricAttention
|
|
|
|
from thinc.api import SparseLinear, Softmax, softmax_activation, Maxout, reduce_sum
|
2021-01-19 02:37:17 +03:00
|
|
|
from thinc.api import with_cpu, Relu, residual, LayerNorm
|
2021-01-06 14:44:04 +03:00
|
|
|
from thinc.layers.chain import init as init_chain
|
2020-02-27 20:42:27 +03:00
|
|
|
|
2021-01-15 13:42:40 +03:00
|
|
|
from ...attrs import ORTH
|
2020-02-28 13:57:41 +03:00
|
|
|
from ...util import registry
|
|
|
|
from ..extract_ngrams import extract_ngrams
|
2020-07-29 15:35:36 +03:00
|
|
|
from ..staticvectors import StaticVectors
|
2020-10-18 15:50:41 +03:00
|
|
|
from ...tokens import Doc
|
2021-01-06 14:44:04 +03:00
|
|
|
from .tok2vec import get_tok2vec_width
|
2020-02-27 20:42:27 +03:00
|
|
|
|
|
|
|
|
|
|
|
@registry.architectures.register("spacy.TextCatCNN.v1")
|
2020-07-22 14:42:59 +03:00
|
|
|
def build_simple_cnn_text_classifier(
|
|
|
|
tok2vec: Model, exclusive_classes: bool, nO: Optional[int] = None
|
2020-10-18 15:50:41 +03:00
|
|
|
) -> Model[List[Doc], Floats2d]:
|
2020-02-27 20:42:27 +03:00
|
|
|
"""
|
|
|
|
Build a simple CNN text classifier, given a token-to-vector model as inputs.
|
|
|
|
If exclusive_classes=True, a softmax non-linearity is applied, so that the
|
|
|
|
outputs sum to 1. If exclusive_classes=False, a logistic non-linearity
|
|
|
|
is applied instead, so that outputs are in the range [0, 1].
|
|
|
|
"""
|
|
|
|
with Model.define_operators({">>": chain}):
|
2020-10-18 15:50:41 +03:00
|
|
|
cnn = tok2vec >> list2ragged() >> reduce_mean()
|
2020-02-27 20:42:27 +03:00
|
|
|
if exclusive_classes:
|
2020-10-09 16:40:58 +03:00
|
|
|
output_layer = Softmax(nO=nO, nI=tok2vec.maybe_get_dim("nO"))
|
2020-10-18 15:50:41 +03:00
|
|
|
model = cnn >> output_layer
|
2020-02-27 20:42:27 +03:00
|
|
|
model.set_ref("output_layer", output_layer)
|
|
|
|
else:
|
2020-10-09 16:40:58 +03:00
|
|
|
linear_layer = Linear(nO=nO, nI=tok2vec.maybe_get_dim("nO"))
|
2020-10-18 15:50:41 +03:00
|
|
|
model = cnn >> linear_layer >> Logistic()
|
2020-02-27 20:42:27 +03:00
|
|
|
model.set_ref("output_layer", linear_layer)
|
|
|
|
model.set_ref("tok2vec", tok2vec)
|
|
|
|
model.set_dim("nO", nO)
|
2020-06-12 03:02:07 +03:00
|
|
|
model.attrs["multi_label"] = not exclusive_classes
|
2020-02-27 20:42:27 +03:00
|
|
|
return model
|
|
|
|
|
|
|
|
|
|
|
|
@registry.architectures.register("spacy.TextCatBOW.v1")
|
2020-07-31 18:02:54 +03:00
|
|
|
def build_bow_text_classifier(
|
|
|
|
exclusive_classes: bool,
|
|
|
|
ngram_size: int,
|
|
|
|
no_output_layer: bool,
|
|
|
|
nO: Optional[int] = None,
|
2020-10-18 15:50:41 +03:00
|
|
|
) -> Model[List[Doc], Floats2d]:
|
2020-02-27 20:42:27 +03:00
|
|
|
with Model.define_operators({">>": chain}):
|
2020-03-29 20:40:36 +03:00
|
|
|
sparse_linear = SparseLinear(nO)
|
|
|
|
model = extract_ngrams(ngram_size, attr=ORTH) >> sparse_linear
|
|
|
|
model = with_cpu(model, model.ops)
|
2020-02-27 20:42:27 +03:00
|
|
|
if not no_output_layer:
|
2020-03-29 20:40:36 +03:00
|
|
|
output_layer = softmax_activation() if exclusive_classes else Logistic()
|
|
|
|
model = model >> with_cpu(output_layer, output_layer.ops)
|
|
|
|
model.set_ref("output_layer", sparse_linear)
|
2020-06-12 03:02:07 +03:00
|
|
|
model.attrs["multi_label"] = not exclusive_classes
|
2020-03-29 20:40:36 +03:00
|
|
|
return model
|
|
|
|
|
|
|
|
|
2020-10-18 15:50:41 +03:00
|
|
|
@registry.architectures.register("spacy.TextCatEnsemble.v2")
|
2020-11-10 15:14:47 +03:00
|
|
|
def build_text_classifier_v2(
|
2020-10-18 15:50:41 +03:00
|
|
|
tok2vec: Model[List[Doc], List[Floats2d]],
|
|
|
|
linear_model: Model[List[Doc], Floats2d],
|
|
|
|
nO: Optional[int] = None,
|
|
|
|
) -> Model[List[Doc], Floats2d]:
|
|
|
|
exclusive_classes = not linear_model.attrs["multi_label"]
|
|
|
|
with Model.define_operators({">>": chain, "|": concatenate}):
|
2020-11-10 15:14:47 +03:00
|
|
|
width = tok2vec.maybe_get_dim("nO")
|
2021-01-15 03:57:36 +03:00
|
|
|
attention_layer = ParametricAttention(
|
|
|
|
width
|
|
|
|
) # TODO: benchmark performance difference of this layer
|
2021-01-19 02:37:17 +03:00
|
|
|
maxout_layer = Maxout(nO=width, nI=width)
|
|
|
|
norm_layer = LayerNorm(nI=width)
|
2020-10-18 15:50:41 +03:00
|
|
|
cnn_model = (
|
2021-01-15 03:57:36 +03:00
|
|
|
tok2vec
|
|
|
|
>> list2ragged()
|
|
|
|
>> attention_layer
|
|
|
|
>> reduce_sum()
|
2021-01-19 02:37:17 +03:00
|
|
|
>> residual(maxout_layer >> norm_layer >> Dropout(0.0))
|
2020-10-18 15:50:41 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
nO_double = nO * 2 if nO else None
|
|
|
|
if exclusive_classes:
|
|
|
|
output_layer = Softmax(nO=nO, nI=nO_double)
|
|
|
|
else:
|
2021-01-18 18:53:02 +03:00
|
|
|
output_layer = Linear(nO=nO, nI=nO_double) >> Logistic()
|
2020-10-18 15:50:41 +03:00
|
|
|
model = (linear_model | cnn_model) >> output_layer
|
|
|
|
model.set_ref("tok2vec", tok2vec)
|
|
|
|
if model.has_dim("nO") is not False:
|
|
|
|
model.set_dim("nO", nO)
|
|
|
|
model.set_ref("output_layer", linear_model.get_ref("output_layer"))
|
2021-01-06 14:44:04 +03:00
|
|
|
model.set_ref("attention_layer", attention_layer)
|
|
|
|
model.set_ref("maxout_layer", maxout_layer)
|
2021-01-19 02:37:17 +03:00
|
|
|
model.set_ref("norm_layer", norm_layer)
|
2020-10-18 15:50:41 +03:00
|
|
|
model.attrs["multi_label"] = not exclusive_classes
|
2021-01-06 14:44:04 +03:00
|
|
|
|
|
|
|
model.init = init_ensemble_textcat
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
|
|
|
def init_ensemble_textcat(model, X, Y) -> Model:
|
|
|
|
tok2vec_width = get_tok2vec_width(model)
|
|
|
|
model.get_ref("attention_layer").set_dim("nO", tok2vec_width)
|
|
|
|
model.get_ref("maxout_layer").set_dim("nO", tok2vec_width)
|
|
|
|
model.get_ref("maxout_layer").set_dim("nI", tok2vec_width)
|
2021-01-19 02:37:17 +03:00
|
|
|
model.get_ref("norm_layer").set_dim("nI", tok2vec_width)
|
2021-02-06 15:44:51 +03:00
|
|
|
model.get_ref("norm_layer").set_dim("nO", tok2vec_width)
|
2021-01-06 14:44:04 +03:00
|
|
|
init_chain(model, X, Y)
|
2020-10-18 15:50:41 +03:00
|
|
|
return model
|
|
|
|
|
2021-01-05 05:41:53 +03:00
|
|
|
|
2020-03-29 20:40:36 +03:00
|
|
|
@registry.architectures.register("spacy.TextCatLowData.v1")
|
2020-07-31 18:02:54 +03:00
|
|
|
def build_text_classifier_lowdata(
|
2020-10-18 15:50:41 +03:00
|
|
|
width: int, dropout: Optional[float], nO: Optional[int] = None
|
|
|
|
) -> Model[List[Doc], Floats2d]:
|
2020-08-07 17:17:34 +03:00
|
|
|
# Don't document this yet, I'm not sure it's right.
|
2020-03-29 20:40:36 +03:00
|
|
|
# Note, before v.3, this was the default if setting "low_data" and "pretrained_dims"
|
|
|
|
with Model.define_operators({">>": chain, "**": clone}):
|
|
|
|
model = (
|
2020-07-29 15:35:36 +03:00
|
|
|
StaticVectors(width)
|
2020-03-29 20:40:36 +03:00
|
|
|
>> list2ragged()
|
|
|
|
>> ParametricAttention(width)
|
|
|
|
>> reduce_sum()
|
|
|
|
>> residual(Relu(width, width)) ** 2
|
|
|
|
>> Linear(nO, width)
|
|
|
|
)
|
2020-06-03 12:50:16 +03:00
|
|
|
if dropout:
|
|
|
|
model = model >> Dropout(dropout)
|
|
|
|
model = model >> Logistic()
|
2020-02-27 20:42:27 +03:00
|
|
|
return model
|