mirror of
				https://github.com/explosion/spaCy.git
				synced 2025-10-29 06:57:49 +03:00 
			
		
		
		
	Make stable private modules public and adjust names (#11353)
* Make stable private modules public and adjust names * `spacy.ml._character_embed` -> `spacy.ml.character_embed` * `spacy.ml._precomputable_affine` -> `spacy.ml.precomputable_affine` * `spacy.tokens._serialize` -> `spacy.tokens.doc_bin` * `spacy.tokens._retokenize` -> `spacy.tokens.retokenize` * `spacy.tokens._dict_proxies` -> `spacy.tokens.span_groups` * Skip _precomputable_affine * retokenize -> retokenizer * Fix imports
This commit is contained in:
		
							parent
							
								
									4bce8fa755
								
							
						
					
					
						commit
						98a916e01a
					
				
							
								
								
									
										2
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								setup.py
									
									
									
									
									
								
							|  | @ -60,7 +60,7 @@ MOD_NAMES = [ | |||
|     "spacy.tokens.span_group", | ||||
|     "spacy.tokens.graph", | ||||
|     "spacy.tokens.morphanalysis", | ||||
|     "spacy.tokens._retokenize", | ||||
|     "spacy.tokens.retokenizer", | ||||
|     "spacy.matcher.matcher", | ||||
|     "spacy.matcher.phrasematcher", | ||||
|     "spacy.matcher.dependencymatcher", | ||||
|  |  | |||
|  | @ -7,7 +7,7 @@ from thinc.api import expand_window, residual, Maxout, Mish, PyTorchLSTM | |||
| from ...tokens import Doc | ||||
| from ...util import registry | ||||
| from ...errors import Errors | ||||
| from ...ml import _character_embed | ||||
| from ...ml import character_embed | ||||
| from ..staticvectors import StaticVectors | ||||
| from ..featureextractor import FeatureExtractor | ||||
| from ...pipeline.tok2vec import Tok2VecListener | ||||
|  | @ -226,7 +226,7 @@ def CharacterEmbed( | |||
|     if feature is None: | ||||
|         raise ValueError(Errors.E911.format(feat=feature)) | ||||
|     char_embed = chain( | ||||
|         _character_embed.CharacterEmbed(nM=nM, nC=nC), | ||||
|         character_embed.CharacterEmbed(nM=nM, nC=nC), | ||||
|         cast(Model[List[Floats2d], Ragged], list2ragged()), | ||||
|     ) | ||||
|     feature_extractor: Model[List[Doc], Ragged] = chain( | ||||
|  |  | |||
|  | @ -11,7 +11,7 @@ from ..matcher import Matcher | |||
| from ..scorer import Scorer | ||||
| from ..symbols import IDS | ||||
| from ..tokens import Doc, Span | ||||
| from ..tokens._retokenize import normalize_token_attrs, set_token_attrs | ||||
| from ..tokens.retokenizer import normalize_token_attrs, set_token_attrs | ||||
| from ..vocab import Vocab | ||||
| from ..util import SimpleFrozenList, registry | ||||
| from .. import util | ||||
|  |  | |||
|  | @ -9,7 +9,7 @@ from thinc.types import Array2d, Ragged | |||
| 
 | ||||
| from spacy.lang.en import English | ||||
| from spacy.ml import FeatureExtractor, StaticVectors | ||||
| from spacy.ml._character_embed import CharacterEmbed | ||||
| from spacy.ml.character_embed import CharacterEmbed | ||||
| from spacy.tokens import Doc | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -7,7 +7,7 @@ from spacy import util | |||
| from spacy.lang.en import English | ||||
| from spacy.language import Language | ||||
| from spacy.tokens import SpanGroup | ||||
| from spacy.tokens._dict_proxies import SpanGroups | ||||
| from spacy.tokens.span_groups import SpanGroups | ||||
| from spacy.training import Example | ||||
| from spacy.util import fix_random_seed, registry, make_tempdir | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,7 +1,7 @@ | |||
| import pytest | ||||
| 
 | ||||
| from spacy.tokens import Span, SpanGroup | ||||
| from spacy.tokens._dict_proxies import SpanGroups | ||||
| from spacy.tokens.span_groups import SpanGroups | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.issue(10685) | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ from .doc import Doc | |||
| from .token import Token | ||||
| from .span import Span | ||||
| from .span_group import SpanGroup | ||||
| from ._serialize import DocBin | ||||
| from .doc_bin import DocBin | ||||
| from .morphanalysis import MorphAnalysis | ||||
| 
 | ||||
| __all__ = ["Doc", "Token", "Span", "SpanGroup", "DocBin", "MorphAnalysis"] | ||||
|  |  | |||
|  | @ -4,8 +4,8 @@ from cymem.cymem import Pool | |||
| from thinc.types import Floats1d, Floats2d, Ints2d | ||||
| from .span import Span | ||||
| from .token import Token | ||||
| from ._dict_proxies import SpanGroups | ||||
| from ._retokenize import Retokenizer | ||||
| from .span_groups import SpanGroups | ||||
| from .retokenizer import Retokenizer | ||||
| from ..lexeme import Lexeme | ||||
| from ..vocab import Vocab | ||||
| from .underscore import Underscore | ||||
|  |  | |||
|  | @ -19,7 +19,7 @@ import warnings | |||
| 
 | ||||
| from .span cimport Span | ||||
| from .token cimport MISSING_DEP | ||||
| from ._dict_proxies import SpanGroups | ||||
| from .span_groups import SpanGroups | ||||
| from .token cimport Token | ||||
| from ..lexeme cimport Lexeme, EMPTY_LEXEME | ||||
| from ..typedefs cimport attr_t, flags_t | ||||
|  | @ -35,8 +35,8 @@ from .. import util | |||
| from .. import parts_of_speech | ||||
| from .. import schemas | ||||
| from .underscore import Underscore, get_ext_args | ||||
| from ._retokenize import Retokenizer | ||||
| from ._serialize import ALL_ATTRS as DOCBIN_ALL_ATTRS | ||||
| from .retokenizer import Retokenizer | ||||
| from .doc_bin import ALL_ATTRS as DOCBIN_ALL_ATTRS | ||||
| from ..util import get_words_and_spaces | ||||
| 
 | ||||
| DEF PADDING = 5 | ||||
|  |  | |||
|  | @ -12,7 +12,7 @@ from ..compat import copy_reg | |||
| from ..attrs import SPACY, ORTH, intify_attr, IDS | ||||
| from ..errors import Errors | ||||
| from ..util import ensure_path, SimpleFrozenList | ||||
| from ._dict_proxies import SpanGroups | ||||
| from .span_groups import SpanGroups | ||||
| 
 | ||||
| # fmt: off | ||||
| ALL_ATTRS = ("ORTH", "NORM", "TAG", "HEAD", "DEP", "ENT_IOB", "ENT_TYPE", "ENT_KB_ID", "ENT_ID", "LEMMA", "MORPH", "POS", "SENT_START") | ||||
		Loading…
	
		Reference in New Issue
	
	Block a user