Construct nlp from uninterpolated config before training

This commit is contained in:
Ines Montani 2020-09-26 15:16:59 +02:00
parent e06ff8b71d
commit b2d07de786

View File

@ -78,6 +78,9 @@ def train(
config = util.load_config(
config_path, overrides=config_overrides, interpolate=True
)
# Keep a second un-interpolated config so we can preserve variables in
# the final nlp object we train and serialize
raw_config = util.load_config(config_path, overrides=config_overrides)
if config["training"]["seed"] is not None:
fix_random_seed(config["training"]["seed"])
allocator = config["training"]["gpu_allocator"]
@ -86,7 +89,7 @@ def train(
# Use original config here before it's resolved to functions
sourced_components = get_sourced_components(config)
with show_validation_error(config_path):
nlp, config = util.load_model_from_config(config)
nlp, config = util.load_model_from_config(raw_config)
util.load_vocab_data_into_model(nlp, lookups=config["training"]["lookups"])
if config["training"]["vectors"] is not None:
util.load_vectors_into_model(nlp, config["training"]["vectors"])