Don't use a generator for no reason

This commit is contained in:
Paul O'Leary McCann 2021-05-24 19:06:15 +09:00
parent d6fd5fe1c0
commit d6389b133d
2 changed files with 2 additions and 6 deletions

View File

@ -2,7 +2,7 @@ from dataclasses import dataclass
from thinc.api import Model, Linear, Relu, Dropout, chain, noop
from thinc.types import Floats2d, Floats1d, Ints2d, Ragged
from typing import List, Callable, Tuple, Any, Generator
from typing import List, Callable, Tuple, Any
from ...tokens import Doc
from ...util import registry
@ -70,10 +70,6 @@ def tuplify(layer1: Model, layer2: Model, *layers) -> Model:
def tuplify_forward(model, X, is_train):
Ys = []
backprops = []
# If the input is a generator we need to unroll it.
# The type check is necessary because arrays etc. are also OK.
if isinstance(X, Generator):
X = list(X)
for layer in model.layers:
Y, backprop = layer(X, is_train)
Ys.append(Y)

View File

@ -207,7 +207,7 @@ class CoreferenceResolver(TrainablePipe):
return losses
set_dropout_rate(self.model, drop)
inputs = (example.predicted for example in examples)
inputs = [example.predicted for example in examples]
preds, backprop = self.model.begin_update(inputs)
score_matrix, mention_idx = preds
loss, d_scores = self.get_loss(examples, score_matrix, mention_idx)