mirror of
				https://github.com/explosion/spaCy.git
				synced 2025-10-31 16:07:41 +03:00 
			
		
		
		
	
		
			
				
	
	
		
			13 lines
		
	
	
		
			380 B
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			13 lines
		
	
	
		
			380 B
		
	
	
	
		
			Python
		
	
	
	
	
	
| # coding: utf8
 | ||
| from __future__ import unicode_literals
 | ||
| 
 | ||
| import pytest
 | ||
| 
 | ||
| 
 | ||
| @pytest.mark.parametrize('text', ["au-delàs", "pair-programmâmes",
 | ||
|                                   "terra-formées", "σ-compacts"])
 | ||
| def test_issue852(fr_tokenizer, text):
 | ||
|     """Test that French tokenizer exceptions are imported correctly."""
 | ||
|     tokens = fr_tokenizer(text)
 | ||
|     assert len(tokens) == 1
 |