Use normal PyTorchWrapper in coref

This commit is contained in:
Paul O'Leary McCann 2022-07-06 19:22:19 +09:00
parent f67c1735c5
commit b59b924e49

View File

@ -62,7 +62,6 @@ def coref_init(model: Model, X=None, Y=None):
antecedent_batch_size = model.attrs["antecedent_batch_size"] antecedent_batch_size = model.attrs["antecedent_batch_size"]
distance_embedding_size = model.attrs["distance_embedding_size"] distance_embedding_size = model.attrs["distance_embedding_size"]
PyTorchWrapper = registry.get("layers", "PyTorchWrapper.v2")
model._layers = [ model._layers = [
PyTorchWrapper( PyTorchWrapper(
CorefClusterer( CorefClusterer(