From f432bb4b48d84d541420d3888c4487b4e0d57622 Mon Sep 17 00:00:00 2001 From: ines Date: Sun, 4 Jun 2017 22:34:31 +0200 Subject: [PATCH] Fix fixture scopes --- spacy/tests/conftest.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/spacy/tests/conftest.py b/spacy/tests/conftest.py index b5a34cb2d..dc5f26536 100644 --- a/spacy/tests/conftest.py +++ b/spacy/tests/conftest.py @@ -22,48 +22,48 @@ _models = {'en': ['en_core_web_sm', 'en_core_web_md'], # only used for tests that require loading the models # in all other cases, use specific instances -@pytest.fixture(params=_models['en'], scope="session") +@pytest.fixture(params=_models['en'], scope='session') def EN(request): return load_test_model(request.param) -@pytest.fixture(params=_models['de'], scope="session") +@pytest.fixture(params=_models['de'], scope='session') def DE(request): return load_test_model(request.param) -@pytest.fixture(params=_models['fr'], scope="session") +@pytest.fixture(params=_models['fr'], scope='session') def FR(request): return load_test_model(request.param) -@pytest.fixture(params=_languages) +@pytest.fixture(params=_languages, scope='session') def tokenizer(request): lang = util.get_lang_class(request.param) return lang.Defaults.create_tokenizer() -@pytest.fixture +@pytest.fixture(scope='module') def en_tokenizer(): return util.get_lang_class('en').Defaults.create_tokenizer() -@pytest.fixture +@pytest.fixture(scope='module') def en_vocab(): return util.get_lang_class('en').Defaults.create_vocab() -@pytest.fixture +@pytest.fixture(scope='module') def en_parser(): return util.get_lang_class('en').Defaults.create_parser() -@pytest.fixture +@pytest.fixture(scope='module') def es_tokenizer(): return util.get_lang_class('es').Defaults.create_tokenizer() -@pytest.fixture +@pytest.fixture(scope='module') def de_tokenizer(): return util.get_lang_class('de').Defaults.create_tokenizer() @@ -73,31 +73,31 @@ def fr_tokenizer(): return util.get_lang_class('fr').Defaults.create_tokenizer() -@pytest.fixture +@pytest.fixture(scope='module') def hu_tokenizer(): return util.get_lang_class('hu').Defaults.create_tokenizer() -@pytest.fixture +@pytest.fixture(scope='module') def fi_tokenizer(): return util.get_lang_class('fi').Defaults.create_tokenizer() -@pytest.fixture +@pytest.fixture(scope='module') def sv_tokenizer(): return util.get_lang_class('sv').Defaults.create_tokenizer() -@pytest.fixture +@pytest.fixture(scope='module') def bn_tokenizer(): return util.get_lang_class('bn').Defaults.create_tokenizer() -@pytest.fixture +@pytest.fixture(scope='module') def he_tokenizer(): return util.get_lang_class('he').Defaults.create_tokenizer() -@pytest.fixture +@pytest.fixture(scope='module') def nb_tokenizer(): return util.get_lang_class('nb').Defaults.create_tokenizer() @@ -107,7 +107,7 @@ def stringstore(): return StringStore() -@pytest.fixture +@pytest.fixture(scope='module') def en_entityrecognizer(): return util.get_lang_class('en').Defaults.create_entity()