mirror of
				https://github.com/explosion/spaCy.git
				synced 2025-11-04 01:48:04 +03:00 
			
		
		
		
	Tidy up fixtures
This commit is contained in:
		
							parent
							
								
									b21b2e27e5
								
							
						
					
					
						commit
						acd5bcb0b3
					
				| 
						 | 
					@ -185,6 +185,12 @@ def ru_tokenizer():
 | 
				
			||||||
    return get_lang_class("ru").Defaults.create_tokenizer()
 | 
					    return get_lang_class("ru").Defaults.create_tokenizer()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@pytest.fixture
 | 
				
			||||||
 | 
					def ru_lemmatizer():
 | 
				
			||||||
 | 
					    pytest.importorskip("pymorphy2")
 | 
				
			||||||
 | 
					    return get_lang_class("ru").Defaults.create_lemmatizer()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@pytest.fixture(scope="session")
 | 
					@pytest.fixture(scope="session")
 | 
				
			||||||
def sr_tokenizer():
 | 
					def sr_tokenizer():
 | 
				
			||||||
    return get_lang_class("sr").Defaults.create_tokenizer()
 | 
					    return get_lang_class("sr").Defaults.create_tokenizer()
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -2,17 +2,10 @@
 | 
				
			||||||
from __future__ import unicode_literals
 | 
					from __future__ import unicode_literals
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import pytest
 | 
					import pytest
 | 
				
			||||||
from spacy.lang.ru import Russian
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ...util import get_doc
 | 
					from ...util import get_doc
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@pytest.fixture
 | 
					 | 
				
			||||||
def ru_lemmatizer():
 | 
					 | 
				
			||||||
    pytest.importorskip("pymorphy2")
 | 
					 | 
				
			||||||
    return Russian.Defaults.create_lemmatizer()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def test_ru_doc_lemmatization(ru_tokenizer):
 | 
					def test_ru_doc_lemmatization(ru_tokenizer):
 | 
				
			||||||
    words = ["мама", "мыла", "раму"]
 | 
					    words = ["мама", "мыла", "раму"]
 | 
				
			||||||
    tags = [
 | 
					    tags = [
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue
	
	Block a user