mirror of
				https://github.com/explosion/spaCy.git
				synced 2025-10-31 07:57:35 +03:00 
			
		
		
		
	* Make stable private modules public and adjust names * `spacy.ml._character_embed` -> `spacy.ml.character_embed` * `spacy.ml._precomputable_affine` -> `spacy.ml.precomputable_affine` * `spacy.tokens._serialize` -> `spacy.tokens.doc_bin` * `spacy.tokens._retokenize` -> `spacy.tokens.retokenize` * `spacy.tokens._dict_proxies` -> `spacy.tokens.span_groups` * Skip _precomputable_affine * retokenize -> retokenizer * Fix imports
		
			
				
	
	
		
			22 lines
		
	
	
		
			702 B
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			22 lines
		
	
	
		
			702 B
		
	
	
	
		
			Python
		
	
	
	
	
	
| from typing import Dict, Any, Union, List, Tuple
 | |
| from .doc import Doc
 | |
| from .span import Span
 | |
| from .token import Token
 | |
| from .. import Vocab
 | |
| 
 | |
| class Retokenizer:
 | |
|     def __init__(self, doc: Doc) -> None: ...
 | |
|     def merge(self, span: Span, attrs: Dict[Union[str, int], Any] = ...) -> None: ...
 | |
|     def split(
 | |
|         self,
 | |
|         token: Token,
 | |
|         orths: List[str],
 | |
|         heads: List[Union[Token, Tuple[Token, int]]],
 | |
|         attrs: Dict[Union[str, int], List[Any]] = ...,
 | |
|     ) -> None: ...
 | |
|     def __enter__(self) -> Retokenizer: ...
 | |
|     def __exit__(self, *args: Any) -> None: ...
 | |
| 
 | |
| def normalize_token_attrs(vocab: Vocab, attrs: Dict): ...
 | |
| def set_token_attrs(py_token: Token, attrs: Dict): ...
 |