From 56b820f6afaef14e2cab9a6ff9f5edc58f806554 Mon Sep 17 00:00:00 2001 From: Matthew Honnibal Date: Thu, 2 Jul 2020 22:32:25 +0200 Subject: [PATCH] Remove broken 'self attention' option in pretrain --- spacy/cli/pretrain.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/spacy/cli/pretrain.py b/spacy/cli/pretrain.py index 6d6c65161..b40dc7f16 100644 --- a/spacy/cli/pretrain.py +++ b/spacy/cli/pretrain.py @@ -39,7 +39,6 @@ from .train import _load_pretrained_tok2vec cnn_window=("Window size for CNN layers", "option", "cW", int), cnn_pieces=("Maxout size for CNN layers. 1 for Mish", "option", "cP", int), use_chars=("Whether to use character-based embedding", "flag", "chr", bool), - sa_depth=("Depth of self-attention layers", "option", "sa", int), bilstm_depth=("Depth of BiLSTM layers (requires PyTorch)", "option", "lstm", int), embed_rows=("Number of embedding rows", "option", "er", int), loss_func=( @@ -87,7 +86,6 @@ def pretrain( width=96, conv_depth=4, cnn_pieces=3, - sa_depth=0, cnn_window=1, bilstm_depth=0, use_chars=False,