mirror of
https://github.com/explosion/spaCy.git
synced 2025-07-12 09:12:21 +03:00
Merge conftests into one cohesive file
This commit is contained in:
parent
909f24d7df
commit
c682b8ca90
|
@ -1,14 +1,64 @@
|
||||||
import pytest
|
# coding: utf-8
|
||||||
import os
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from ..en import English
|
from ..en import English
|
||||||
from ..de import German
|
from ..de import German
|
||||||
|
from ..es import Spanish
|
||||||
|
from ..it import Italian
|
||||||
|
from ..fr import French
|
||||||
|
from ..pt import Portuguese
|
||||||
|
from ..nl import Dutch
|
||||||
|
from ..sv import Swedish
|
||||||
|
from ..hu import Hungarian
|
||||||
|
from ..tokens import Doc
|
||||||
|
from ..attrs import ORTH, TAG, HEAD, DEP
|
||||||
|
|
||||||
|
from StringIO import StringIO
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
LANGUAGES = [English, German, Spanish, Italian, French, Portuguese, Dutch,
|
||||||
|
Swedish, Hungarian]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(params=LANGUAGES)
|
||||||
|
def tokenizer(request):
|
||||||
|
lang = request.param
|
||||||
|
return lang.Defaults.create_tokenizer()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def en_tokenizer():
|
||||||
|
return English.Defaults.create_tokenizer()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def en_vocab():
|
||||||
|
return English.Defaults.create_vocab()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def de_tokenizer():
|
||||||
|
return German.Defaults.create_tokenizer()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def hu_tokenizer():
|
||||||
|
return Hungarian.Defaults.create_tokenizer()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def text_file():
|
||||||
|
return StringIO()
|
||||||
|
|
||||||
|
|
||||||
|
# deprecated, to be replaced with more specific instances
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def EN():
|
def EN():
|
||||||
return English()
|
return English()
|
||||||
|
|
||||||
|
|
||||||
|
# deprecated, to be replaced with more specific instances
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def DE():
|
def DE():
|
||||||
return German()
|
return German()
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from ...de import German
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def de_tokenizer():
|
|
||||||
return German.Defaults.create_tokenizer()
|
|
|
@ -1,11 +0,0 @@
|
||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from ...en import English
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def en_tokenizer():
|
|
||||||
return English.Defaults.create_tokenizer()
|
|
|
@ -1,11 +0,0 @@
|
||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from ...hu import Hungarian
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def hu_tokenizer():
|
|
||||||
return Hungarian.Defaults.create_tokenizer()
|
|
|
@ -1,23 +0,0 @@
|
||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from ...en import English
|
|
||||||
from ...de import German
|
|
||||||
from ...es import Spanish
|
|
||||||
from ...it import Italian
|
|
||||||
from ...fr import French
|
|
||||||
from ...pt import Portuguese
|
|
||||||
from ...nl import Dutch
|
|
||||||
from ...sv import Swedish
|
|
||||||
from ...hu import Hungarian
|
|
||||||
|
|
||||||
|
|
||||||
LANGUAGES = [English, German, Spanish, Italian, French, Dutch, Swedish, Hungarian]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=LANGUAGES)
|
|
||||||
def tokenizer(request):
|
|
||||||
lang = request.param
|
|
||||||
return lang.Defaults.create_tokenizer()
|
|
Loading…
Reference in New Issue
Block a user