mirror of
				https://github.com/explosion/spaCy.git
				synced 2025-11-04 01:48:04 +03:00 
			
		
		
		
	* Auto-format tests with black * Add flake8 config * Tidy up and remove unused imports * Fix redefinitions of test functions * Replace orths_and_spaces with words and spaces * Fix compatibility with pytest 4.0 * xfail test for now Test was previously overwritten by following test due to naming conflict, so failure wasn't reported * Unfail passing test * Only use fixture via arguments Fixes pytest 4.0 compatibility
		
			
				
	
	
		
			35 lines
		
	
	
		
			1.0 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			35 lines
		
	
	
		
			1.0 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
# coding: utf8
 | 
						|
from __future__ import unicode_literals
 | 
						|
 | 
						|
import pytest
 | 
						|
 | 
						|
 | 
						|
SV_TOKEN_EXCEPTION_TESTS = [
 | 
						|
    (
 | 
						|
        "Smörsåsen används bl.a. till fisk",
 | 
						|
        ["Smörsåsen", "används", "bl.a.", "till", "fisk"],
 | 
						|
    ),
 | 
						|
    (
 | 
						|
        "Jag kommer först kl. 13 p.g.a. diverse förseningar",
 | 
						|
        ["Jag", "kommer", "först", "kl.", "13", "p.g.a.", "diverse", "förseningar"],
 | 
						|
    ),
 | 
						|
    (
 | 
						|
        "Anders I. tycker om ord med i i.",
 | 
						|
        ["Anders", "I.", "tycker", "om", "ord", "med", "i", "i", "."],
 | 
						|
    ),
 | 
						|
]
 | 
						|
 | 
						|
 | 
						|
@pytest.mark.parametrize("text,expected_tokens", SV_TOKEN_EXCEPTION_TESTS)
 | 
						|
def test_sv_tokenizer_handles_exception_cases(sv_tokenizer, text, expected_tokens):
 | 
						|
    tokens = sv_tokenizer(text)
 | 
						|
    token_list = [token.text for token in tokens if not token.is_space]
 | 
						|
    assert expected_tokens == token_list
 | 
						|
 | 
						|
 | 
						|
@pytest.mark.parametrize("text", ["driveru", "hajaru", "Serru", "Fixaru"])
 | 
						|
def test_sv_tokenizer_handles_verb_exceptions(sv_tokenizer, text):
 | 
						|
    tokens = sv_tokenizer(text)
 | 
						|
    assert len(tokens) == 2
 | 
						|
    assert tokens[1].text == "u"
 |