diff --git a/spacy/cli/templates/quickstart_training_recommendations.yml b/spacy/cli/templates/quickstart_training_recommendations.yml index 54aec2e31..47b3abbf6 100644 --- a/spacy/cli/templates/quickstart_training_recommendations.yml +++ b/spacy/cli/templates/quickstart_training_recommendations.yml @@ -1,17 +1,20 @@ # Recommended settings and available resources for each language, if available. # Not all languages have recommended word vectors or transformers and for some, # the recommended transformer for efficiency and accuracy may be the same. -en: - word_vectors: en_vectors_web_lg +ar: + word_vectors: null transformer: efficiency: - name: roberta-base + name: asafaya/bert-base-arabic size_factor: 3 accuracy: - name: roberta-base + name: asafaya/bert-base-arabic size_factor: 3 +da: + word_vectors: da_core_news_lg + transformer: null de: - word_vectors: null + word_vectors: de_core_news_lg transformer: efficiency: name: bert-base-german-cased @@ -19,17 +22,26 @@ de: accuracy: name: bert-base-german-cased size_factor: 3 -fr: - word_vectors: null +el: + word_vectors: el_core_news_lg transformer: efficiency: - name: camembert-base + name: nlpaueb/bert-base-greek-uncased-v1 size_factor: 3 accuracy: - name: camembert-base + name: nlpaueb/bert-base-greek-uncased-v1 + size_factor: 3 +en: + word_vectors: en_core_web_lg + transformer: + efficiency: + name: roberta-base + size_factor: 3 + accuracy: + name: roberta-base size_factor: 3 es: - word_vectors: null + word_vectors: es_core_news_lg transformer: efficiency: name: dccuchile/bert-base-spanish-wwm-cased @@ -37,15 +49,6 @@ es: accuracy: name: dccuchile/bert-base-spanish-wwm-cased size_factor: 3 -sv: - word_vectors: null - transformer: - efficiency: - name: KB/bert-base-swedish-cased - size_factor: 3 - accuracy: - name: KB/bert-base-swedish-cased - size_factor: 3 fi: word_vectors: null transformer: @@ -55,14 +58,65 @@ fi: accuracy: name: TurkuNLP/bert-base-finnish-cased-v1 size_factor: 3 -el: +fr: + word_vectors: fr_core_news_lg + transformer: + efficiency: + name: camembert-base + size_factor: 3 + accuracy: + name: camembert-base + size_factor: 3 +it: + word_vectors: it_core_news_lg + transformers: null +ja: + word_vectors: ja_core_news_lg + transformers: null +lt: + word_vectors: lt_core_news_lg + transformers: null +nb: + word_vectors: nb_core_news_lg + transformers: null +nl: + word_vectors: nl_core_news_lg + transformer: + efficiency: + name: pdelobelle/robbert-v2-dutch-base + size_factor: 3 + accuracy: + name: pdelobelle/robbert-v2-dutch-base + size_factor: 3 +pl: + word_vectors: pl_core_news_lg + transformer: + efficiency: + name: dkleczek/bert-base-polish-cased-v1 + size_factor: 3 + accuracy: + name: dkleczek/bert-base-polish-cased-v1 + size_factor: 3 +pt: + word_vectors: pt_core_news_lg + transformer: + efficiency: + name: neuralmind/bert-base-portuguese-cased + size_factor: 3 + accuracy: + name: neuralmind/bert-base-portuguese-cased + size_factor: 3 +ro: + word_vectors: ro_core_news_lg + transformers: null +sv: word_vectors: null transformer: efficiency: - name: nlpaueb/bert-base-greek-uncased-v1 + name: KB/bert-base-swedish-cased size_factor: 3 accuracy: - name: nlpaueb/bert-base-greek-uncased-v1 + name: KB/bert-base-swedish-cased size_factor: 3 tr: word_vectors: null @@ -74,7 +128,7 @@ tr: name: dbmdz/bert-base-turkish-cased size_factor: 3 zh: - word_vectors: null + word_vectors: zh_core_web_lg transformer: efficiency: name: bert-base-chinese @@ -83,39 +137,3 @@ zh: name: bert-base-chinese size_factor: 3 has_letters: false -ar: - word_vectors: null - transformer: - efficiency: - name: asafaya/bert-base-arabic - size_factor: 3 - accuracy: - name: asafaya/bert-base-arabic - size_factor: 3 -pl: - word_vectors: null - transformer: - efficiency: - name: dkleczek/bert-base-polish-cased-v1 - size_factor: 3 - accuracy: - name: dkleczek/bert-base-polish-cased-v1 - size_factor: 3 -nl: - word_vectors: null - transformer: - efficiency: - name: pdelobelle/robbert-v2-dutch-base - size_factor: 3 - accuracy: - name: pdelobelle/robbert-v2-dutch-base - size_factor: 3 -pt: - word_vectors: null - transformer: - efficiency: - name: neuralmind/bert-base-portuguese-cased - size_factor: 3 - accuracy: - name: neuralmind/bert-base-portuguese-cased - size_factor: 3