spaCy/spacy/cli/package.py

448 lines
16 KiB
Python
Raw Normal View History

2021-01-30 05:11:36 +03:00
from typing import Optional, Union, Any, Dict, List, Tuple
2017-03-21 04:06:29 +03:00
import shutil
from pathlib import Path
2021-06-22 05:06:25 +03:00
from wasabi import Printer, MarkdownRenderer, get_raw_input
import srsly
2020-06-21 22:35:01 +03:00
import sys
from ._util import app, Arg, Opt, string_to_list, WHEEL_SUFFIX, SDIST_SUFFIX
2020-06-21 22:35:01 +03:00
from ..schemas import validate, ModelMetaSchema
from .. import util
2017-05-08 00:25:29 +03:00
from .. import about
@app.command("package")
2020-06-21 22:35:01 +03:00
def package_cli(
# fmt: off
input_dir: Path = Arg(..., help="Directory with pipeline data", exists=True, file_okay=False),
2020-06-21 22:35:01 +03:00
output_dir: Path = Arg(..., help="Output parent directory", exists=True, file_okay=False),
2021-01-30 05:11:36 +03:00
code_paths: str = Opt("", "--code", "-c", help="Comma-separated paths to Python file with additional code (registered functions) to be included in the package"),
meta_path: Optional[Path] = Opt(None, "--meta-path", "--meta", "-m", help="Path to meta.json", exists=True, dir_okay=False),
create_meta: bool = Opt(False, "--create-meta", "-C", help="Create meta.json, even if one exists"),
name: Optional[str] = Opt(None, "--name", "-n", help="Package name to override meta"),
version: Optional[str] = Opt(None, "--version", "-v", help="Package version to override meta"),
2021-01-30 05:11:36 +03:00
build: str = Opt("sdist", "--build", "-b", help="Comma-separated formats to build: sdist and/or wheel, or none."),
force: bool = Opt(False, "--force", "-f", "-F", help="Force overwriting existing data in output directory"),
# fmt: on
):
"""
Generate an installable Python package for a pipeline. Includes binary data,
2020-07-12 14:53:49 +03:00
meta and required installation files. A new directory will be created in the
specified output directory, and the data will be copied over. If
2020-07-12 14:53:49 +03:00
--create-meta is set and a meta.json already exists in the output directory,
the existing values will be used as the defaults in the command-line prompt.
After packaging, "python setup.py sdist" is run in the package directory,
which will create a .tar.gz archive that can be installed via "pip install".
2020-09-04 13:58:50 +03:00
If additional code files are provided (e.g. Python files containing custom
registered functions like pipeline components), they are copied into the
package and imported in the __init__.py.
DOCS: https://spacy.io/api/cli#package
"""
2021-01-30 05:11:36 +03:00
create_sdist, create_wheel = get_build_formats(string_to_list(build))
code_paths = [Path(p.strip()) for p in string_to_list(code_paths)]
2020-06-21 22:35:01 +03:00
package(
input_dir,
output_dir,
meta_path=meta_path,
code_paths=code_paths,
name=name,
version=version,
2020-06-21 22:35:01 +03:00
create_meta=create_meta,
2021-01-30 05:11:36 +03:00
create_sdist=create_sdist,
create_wheel=create_wheel,
2020-06-21 22:35:01 +03:00
force=force,
silent=False,
)
def package(
input_dir: Path,
output_dir: Path,
meta_path: Optional[Path] = None,
code_paths: List[Path] = [],
name: Optional[str] = None,
version: Optional[str] = None,
2020-06-21 22:35:01 +03:00
create_meta: bool = False,
create_sdist: bool = True,
create_wheel: bool = False,
2020-06-21 22:35:01 +03:00
force: bool = False,
silent: bool = True,
) -> None:
msg = Printer(no_print=silent, pretty=not silent)
2017-05-08 00:25:29 +03:00
input_path = util.ensure_path(input_dir)
output_path = util.ensure_path(output_dir)
meta_path = util.ensure_path(meta_path)
if create_wheel and not has_wheel():
err = "Generating a binary .whl file requires wheel to be installed"
msg.fail(err, "pip install wheel", exits=1)
2017-05-08 00:25:29 +03:00
if not input_path or not input_path.exists():
msg.fail("Can't locate pipeline data", input_path, exits=1)
2017-05-08 00:25:29 +03:00
if not output_path or not output_path.exists():
msg.fail("Output directory not found", output_path, exits=1)
2021-01-30 05:11:36 +03:00
if create_sdist or create_wheel:
opts = ["sdist" if create_sdist else "", "wheel" if create_wheel else ""]
msg.info(f"Building package artifacts: {', '.join(opt for opt in opts if opt)}")
for code_path in code_paths:
if not code_path.exists():
msg.fail("Can't find code file", code_path, exits=1)
# Import the code here so it's available when model is loaded (via
# get_meta helper). Also verifies that everything works
util.import_file(code_path.stem, code_path)
if code_paths:
msg.good(f"Including {len(code_paths)} Python module(s) with custom code")
2017-05-08 00:25:29 +03:00
if meta_path and not meta_path.exists():
msg.fail("Can't find pipeline meta.json", meta_path, exits=1)
2020-06-21 22:35:01 +03:00
meta_path = meta_path or input_dir / "meta.json"
if not meta_path.exists() or not meta_path.is_file():
msg.fail("Can't load pipeline meta.json", meta_path, exits=1)
2020-06-21 22:35:01 +03:00
meta = srsly.read_json(meta_path)
meta = get_meta(input_dir, meta)
if name is not None:
meta["name"] = name
if version is not None:
meta["version"] = version
2020-06-21 22:35:01 +03:00
if not create_meta: # only print if user doesn't want to overwrite
msg.good("Loaded meta.json from file", meta_path)
else:
meta = generate_meta(meta, msg)
2020-06-21 22:35:01 +03:00
errors = validate(ModelMetaSchema, meta)
if errors:
msg.fail("Invalid pipeline meta.json")
print("\n".join(errors))
sys.exit(1)
model_name = meta["name"]
if not model_name.startswith(meta["lang"] + "_"):
model_name = f"{meta['lang']}_{model_name}"
💫 New JSON helpers, training data internals & CLI rewrite (#2932) * Support nowrap setting in util.prints * Tidy up and fix whitespace * Simplify script and use read_jsonl helper * Add JSON schemas (see #2928) * Deprecate Doc.print_tree Will be replaced with Doc.to_json, which will produce a unified format * Add Doc.to_json() method (see #2928) Converts Doc objects to JSON using the same unified format as the training data. Method also supports serializing selected custom attributes in the doc._. space. * Remove outdated test * Add write_json and write_jsonl helpers * WIP: Update spacy train * Tidy up spacy train * WIP: Use wasabi for formatting * Add GoldParse helpers for JSON format * WIP: add debug-data command * Fix typo * Add missing import * Update wasabi pin * Add missing import * 💫 Refactor CLI (#2943) To be merged into #2932. ## Description - [x] refactor CLI To use [`wasabi`](https://github.com/ines/wasabi) - [x] use [`black`](https://github.com/ambv/black) for auto-formatting - [x] add `flake8` config - [x] move all messy UD-related scripts to `cli.ud` - [x] make converters function that take the opened file and return the converted data (instead of having them handle the IO) ### Types of change enhancement ## Checklist <!--- Before you submit the PR, go over this checklist and make sure you can tick off all the boxes. [] -> [x] --> - [x] I have submitted the spaCy Contributor Agreement. - [x] I ran the tests, and all new and existing tests passed. - [x] My changes don't require a change to the documentation, or if they do, I've added all required information. * Update wasabi pin * Delete old test * Update errors * Fix typo * Tidy up and format remaining code * Fix formatting * Improve formatting of messages * Auto-format remaining code * Add tok2vec stuff to spacy.train * Fix typo * Update wasabi pin * Fix path checks for when train() is called as function * Reformat and tidy up pretrain script * Update argument annotations * Raise error if model language doesn't match lang * Document new train command
2018-11-30 22:16:14 +03:00
model_name_v = model_name + "-" + meta["version"]
2020-06-21 22:35:01 +03:00
main_path = output_dir / model_name_v
package_path = main_path / model_name
if package_path.exists():
if force:
shutil.rmtree(str(package_path))
else:
💫 New JSON helpers, training data internals & CLI rewrite (#2932) * Support nowrap setting in util.prints * Tidy up and fix whitespace * Simplify script and use read_jsonl helper * Add JSON schemas (see #2928) * Deprecate Doc.print_tree Will be replaced with Doc.to_json, which will produce a unified format * Add Doc.to_json() method (see #2928) Converts Doc objects to JSON using the same unified format as the training data. Method also supports serializing selected custom attributes in the doc._. space. * Remove outdated test * Add write_json and write_jsonl helpers * WIP: Update spacy train * Tidy up spacy train * WIP: Use wasabi for formatting * Add GoldParse helpers for JSON format * WIP: add debug-data command * Fix typo * Add missing import * Update wasabi pin * Add missing import * 💫 Refactor CLI (#2943) To be merged into #2932. ## Description - [x] refactor CLI To use [`wasabi`](https://github.com/ines/wasabi) - [x] use [`black`](https://github.com/ambv/black) for auto-formatting - [x] add `flake8` config - [x] move all messy UD-related scripts to `cli.ud` - [x] make converters function that take the opened file and return the converted data (instead of having them handle the IO) ### Types of change enhancement ## Checklist <!--- Before you submit the PR, go over this checklist and make sure you can tick off all the boxes. [] -> [x] --> - [x] I have submitted the spaCy Contributor Agreement. - [x] I ran the tests, and all new and existing tests passed. - [x] My changes don't require a change to the documentation, or if they do, I've added all required information. * Update wasabi pin * Delete old test * Update errors * Fix typo * Tidy up and format remaining code * Fix formatting * Improve formatting of messages * Auto-format remaining code * Add tok2vec stuff to spacy.train * Fix typo * Update wasabi pin * Fix path checks for when train() is called as function * Reformat and tidy up pretrain script * Update argument annotations * Raise error if model language doesn't match lang * Document new train command
2018-11-30 22:16:14 +03:00
msg.fail(
"Package directory already exists",
"Please delete the directory and try again, or use the "
"`--force` flag to overwrite existing directories.",
💫 New JSON helpers, training data internals & CLI rewrite (#2932) * Support nowrap setting in util.prints * Tidy up and fix whitespace * Simplify script and use read_jsonl helper * Add JSON schemas (see #2928) * Deprecate Doc.print_tree Will be replaced with Doc.to_json, which will produce a unified format * Add Doc.to_json() method (see #2928) Converts Doc objects to JSON using the same unified format as the training data. Method also supports serializing selected custom attributes in the doc._. space. * Remove outdated test * Add write_json and write_jsonl helpers * WIP: Update spacy train * Tidy up spacy train * WIP: Use wasabi for formatting * Add GoldParse helpers for JSON format * WIP: add debug-data command * Fix typo * Add missing import * Update wasabi pin * Add missing import * 💫 Refactor CLI (#2943) To be merged into #2932. ## Description - [x] refactor CLI To use [`wasabi`](https://github.com/ines/wasabi) - [x] use [`black`](https://github.com/ambv/black) for auto-formatting - [x] add `flake8` config - [x] move all messy UD-related scripts to `cli.ud` - [x] make converters function that take the opened file and return the converted data (instead of having them handle the IO) ### Types of change enhancement ## Checklist <!--- Before you submit the PR, go over this checklist and make sure you can tick off all the boxes. [] -> [x] --> - [x] I have submitted the spaCy Contributor Agreement. - [x] I ran the tests, and all new and existing tests passed. - [x] My changes don't require a change to the documentation, or if they do, I've added all required information. * Update wasabi pin * Delete old test * Update errors * Fix typo * Tidy up and format remaining code * Fix formatting * Improve formatting of messages * Auto-format remaining code * Add tok2vec stuff to spacy.train * Fix typo * Update wasabi pin * Fix path checks for when train() is called as function * Reformat and tidy up pretrain script * Update argument annotations * Raise error if model language doesn't match lang * Document new train command
2018-11-30 22:16:14 +03:00
exits=1,
)
Path.mkdir(package_path, parents=True)
2020-06-21 22:35:01 +03:00
shutil.copytree(str(input_dir), str(package_path / model_name_v))
for file_name in FILENAMES_DOCS:
file_path = package_path / model_name_v / file_name
if file_path.exists():
shutil.copy(str(file_path), str(main_path))
2021-06-22 05:06:25 +03:00
readme_path = main_path / "README.md"
if not readme_path.exists():
readme = generate_readme(meta)
create_file(readme_path, readme)
create_file(package_path / model_name_v / "README.md", readme)
imports = []
for code_path in code_paths:
imports.append(code_path.stem)
shutil.copy(str(code_path), str(package_path))
create_file(main_path / "meta.json", srsly.json_dumps(meta, indent=2))
💫 New JSON helpers, training data internals & CLI rewrite (#2932) * Support nowrap setting in util.prints * Tidy up and fix whitespace * Simplify script and use read_jsonl helper * Add JSON schemas (see #2928) * Deprecate Doc.print_tree Will be replaced with Doc.to_json, which will produce a unified format * Add Doc.to_json() method (see #2928) Converts Doc objects to JSON using the same unified format as the training data. Method also supports serializing selected custom attributes in the doc._. space. * Remove outdated test * Add write_json and write_jsonl helpers * WIP: Update spacy train * Tidy up spacy train * WIP: Use wasabi for formatting * Add GoldParse helpers for JSON format * WIP: add debug-data command * Fix typo * Add missing import * Update wasabi pin * Add missing import * 💫 Refactor CLI (#2943) To be merged into #2932. ## Description - [x] refactor CLI To use [`wasabi`](https://github.com/ines/wasabi) - [x] use [`black`](https://github.com/ambv/black) for auto-formatting - [x] add `flake8` config - [x] move all messy UD-related scripts to `cli.ud` - [x] make converters function that take the opened file and return the converted data (instead of having them handle the IO) ### Types of change enhancement ## Checklist <!--- Before you submit the PR, go over this checklist and make sure you can tick off all the boxes. [] -> [x] --> - [x] I have submitted the spaCy Contributor Agreement. - [x] I ran the tests, and all new and existing tests passed. - [x] My changes don't require a change to the documentation, or if they do, I've added all required information. * Update wasabi pin * Delete old test * Update errors * Fix typo * Tidy up and format remaining code * Fix formatting * Improve formatting of messages * Auto-format remaining code * Add tok2vec stuff to spacy.train * Fix typo * Update wasabi pin * Fix path checks for when train() is called as function * Reformat and tidy up pretrain script * Update argument annotations * Raise error if model language doesn't match lang * Document new train command
2018-11-30 22:16:14 +03:00
create_file(main_path / "setup.py", TEMPLATE_SETUP)
create_file(main_path / "MANIFEST.in", TEMPLATE_MANIFEST)
init_py = TEMPLATE_INIT.format(
imports="\n".join(f"from . import {m}" for m in imports)
)
create_file(package_path / "__init__.py", init_py)
msg.good(f"Successfully created package '{model_name_v}'", main_path)
if create_sdist:
with util.working_dir(main_path):
2020-09-20 17:21:43 +03:00
util.run_command([sys.executable, "setup.py", "sdist"], capture=False)
zip_file = main_path / "dist" / f"{model_name_v}{SDIST_SUFFIX}"
msg.good(f"Successfully created zipped Python package", zip_file)
if create_wheel:
with util.working_dir(main_path):
util.run_command([sys.executable, "setup.py", "bdist_wheel"], capture=False)
wheel = main_path / "dist" / f"{model_name_v}{WHEEL_SUFFIX}"
msg.good(f"Successfully created binary wheel", wheel)
def has_wheel() -> bool:
try:
import wheel # noqa: F401
return True
except ImportError:
return False
2021-01-30 05:11:36 +03:00
def get_build_formats(formats: List[str]) -> Tuple[bool, bool]:
supported = ["sdist", "wheel", "none"]
for form in formats:
if form not in supported:
msg = Printer()
err = f"Unknown build format: {form}. Supported: {', '.join(supported)}"
msg.fail(err, exits=1)
if not formats or "none" in formats:
return (False, False)
return ("sdist" in formats, "wheel" in formats)
2020-06-21 22:35:01 +03:00
def create_file(file_path: Path, contents: str) -> None:
file_path.touch()
💫 New JSON helpers, training data internals & CLI rewrite (#2932) * Support nowrap setting in util.prints * Tidy up and fix whitespace * Simplify script and use read_jsonl helper * Add JSON schemas (see #2928) * Deprecate Doc.print_tree Will be replaced with Doc.to_json, which will produce a unified format * Add Doc.to_json() method (see #2928) Converts Doc objects to JSON using the same unified format as the training data. Method also supports serializing selected custom attributes in the doc._. space. * Remove outdated test * Add write_json and write_jsonl helpers * WIP: Update spacy train * Tidy up spacy train * WIP: Use wasabi for formatting * Add GoldParse helpers for JSON format * WIP: add debug-data command * Fix typo * Add missing import * Update wasabi pin * Add missing import * 💫 Refactor CLI (#2943) To be merged into #2932. ## Description - [x] refactor CLI To use [`wasabi`](https://github.com/ines/wasabi) - [x] use [`black`](https://github.com/ambv/black) for auto-formatting - [x] add `flake8` config - [x] move all messy UD-related scripts to `cli.ud` - [x] make converters function that take the opened file and return the converted data (instead of having them handle the IO) ### Types of change enhancement ## Checklist <!--- Before you submit the PR, go over this checklist and make sure you can tick off all the boxes. [] -> [x] --> - [x] I have submitted the spaCy Contributor Agreement. - [x] I ran the tests, and all new and existing tests passed. - [x] My changes don't require a change to the documentation, or if they do, I've added all required information. * Update wasabi pin * Delete old test * Update errors * Fix typo * Tidy up and format remaining code * Fix formatting * Improve formatting of messages * Auto-format remaining code * Add tok2vec stuff to spacy.train * Fix typo * Update wasabi pin * Fix path checks for when train() is called as function * Reformat and tidy up pretrain script * Update argument annotations * Raise error if model language doesn't match lang * Document new train command
2018-11-30 22:16:14 +03:00
file_path.open("w", encoding="utf-8").write(contents)
def get_meta(
model_path: Union[str, Path], existing_meta: Dict[str, Any]
2020-06-21 22:35:01 +03:00
) -> Dict[str, Any]:
meta = {
"lang": "en",
"name": "pipeline",
"version": "0.0.0",
"description": "",
"author": "",
"email": "",
"url": "",
"license": "MIT",
}
meta.update(existing_meta)
nlp = util.load_model_from_path(Path(model_path))
meta["spacy_version"] = util.get_model_version_range(about.__version__)
💫 New JSON helpers, training data internals & CLI rewrite (#2932) * Support nowrap setting in util.prints * Tidy up and fix whitespace * Simplify script and use read_jsonl helper * Add JSON schemas (see #2928) * Deprecate Doc.print_tree Will be replaced with Doc.to_json, which will produce a unified format * Add Doc.to_json() method (see #2928) Converts Doc objects to JSON using the same unified format as the training data. Method also supports serializing selected custom attributes in the doc._. space. * Remove outdated test * Add write_json and write_jsonl helpers * WIP: Update spacy train * Tidy up spacy train * WIP: Use wasabi for formatting * Add GoldParse helpers for JSON format * WIP: add debug-data command * Fix typo * Add missing import * Update wasabi pin * Add missing import * 💫 Refactor CLI (#2943) To be merged into #2932. ## Description - [x] refactor CLI To use [`wasabi`](https://github.com/ines/wasabi) - [x] use [`black`](https://github.com/ambv/black) for auto-formatting - [x] add `flake8` config - [x] move all messy UD-related scripts to `cli.ud` - [x] make converters function that take the opened file and return the converted data (instead of having them handle the IO) ### Types of change enhancement ## Checklist <!--- Before you submit the PR, go over this checklist and make sure you can tick off all the boxes. [] -> [x] --> - [x] I have submitted the spaCy Contributor Agreement. - [x] I ran the tests, and all new and existing tests passed. - [x] My changes don't require a change to the documentation, or if they do, I've added all required information. * Update wasabi pin * Delete old test * Update errors * Fix typo * Tidy up and format remaining code * Fix formatting * Improve formatting of messages * Auto-format remaining code * Add tok2vec stuff to spacy.train * Fix typo * Update wasabi pin * Fix path checks for when train() is called as function * Reformat and tidy up pretrain script * Update argument annotations * Raise error if model language doesn't match lang * Document new train command
2018-11-30 22:16:14 +03:00
meta["vectors"] = {
"width": nlp.vocab.vectors_length,
"vectors": len(nlp.vocab.vectors),
"keys": nlp.vocab.vectors.n_keys,
"name": nlp.vocab.vectors.name,
💫 New JSON helpers, training data internals & CLI rewrite (#2932) * Support nowrap setting in util.prints * Tidy up and fix whitespace * Simplify script and use read_jsonl helper * Add JSON schemas (see #2928) * Deprecate Doc.print_tree Will be replaced with Doc.to_json, which will produce a unified format * Add Doc.to_json() method (see #2928) Converts Doc objects to JSON using the same unified format as the training data. Method also supports serializing selected custom attributes in the doc._. space. * Remove outdated test * Add write_json and write_jsonl helpers * WIP: Update spacy train * Tidy up spacy train * WIP: Use wasabi for formatting * Add GoldParse helpers for JSON format * WIP: add debug-data command * Fix typo * Add missing import * Update wasabi pin * Add missing import * 💫 Refactor CLI (#2943) To be merged into #2932. ## Description - [x] refactor CLI To use [`wasabi`](https://github.com/ines/wasabi) - [x] use [`black`](https://github.com/ambv/black) for auto-formatting - [x] add `flake8` config - [x] move all messy UD-related scripts to `cli.ud` - [x] make converters function that take the opened file and return the converted data (instead of having them handle the IO) ### Types of change enhancement ## Checklist <!--- Before you submit the PR, go over this checklist and make sure you can tick off all the boxes. [] -> [x] --> - [x] I have submitted the spaCy Contributor Agreement. - [x] I ran the tests, and all new and existing tests passed. - [x] My changes don't require a change to the documentation, or if they do, I've added all required information. * Update wasabi pin * Delete old test * Update errors * Fix typo * Tidy up and format remaining code * Fix formatting * Improve formatting of messages * Auto-format remaining code * Add tok2vec stuff to spacy.train * Fix typo * Update wasabi pin * Fix path checks for when train() is called as function * Reformat and tidy up pretrain script * Update argument annotations * Raise error if model language doesn't match lang * Document new train command
2018-11-30 22:16:14 +03:00
}
if about.__title__ != "spacy":
meta["parent_package"] = about.__title__
return meta
def generate_meta(existing_meta: Dict[str, Any], msg: Printer) -> Dict[str, Any]:
meta = existing_meta or {}
settings = [
("lang", "Pipeline language", meta.get("lang", "en")),
("name", "Pipeline name", meta.get("name", "pipeline")),
("version", "Package version", meta.get("version", "0.0.0")),
("description", "Package description", meta.get("description", None)),
("author", "Author", meta.get("author", None)),
("email", "Author email", meta.get("email", None)),
("url", "Author website", meta.get("url", None)),
("license", "License", meta.get("license", "MIT")),
]
msg.divider("Generating meta.json")
msg.text(
"Enter the package settings for your pipeline. The following information "
"will be read from your pipeline data: pipeline, vectors."
)
for setting, desc, default in settings:
💫 New JSON helpers, training data internals & CLI rewrite (#2932) * Support nowrap setting in util.prints * Tidy up and fix whitespace * Simplify script and use read_jsonl helper * Add JSON schemas (see #2928) * Deprecate Doc.print_tree Will be replaced with Doc.to_json, which will produce a unified format * Add Doc.to_json() method (see #2928) Converts Doc objects to JSON using the same unified format as the training data. Method also supports serializing selected custom attributes in the doc._. space. * Remove outdated test * Add write_json and write_jsonl helpers * WIP: Update spacy train * Tidy up spacy train * WIP: Use wasabi for formatting * Add GoldParse helpers for JSON format * WIP: add debug-data command * Fix typo * Add missing import * Update wasabi pin * Add missing import * 💫 Refactor CLI (#2943) To be merged into #2932. ## Description - [x] refactor CLI To use [`wasabi`](https://github.com/ines/wasabi) - [x] use [`black`](https://github.com/ambv/black) for auto-formatting - [x] add `flake8` config - [x] move all messy UD-related scripts to `cli.ud` - [x] make converters function that take the opened file and return the converted data (instead of having them handle the IO) ### Types of change enhancement ## Checklist <!--- Before you submit the PR, go over this checklist and make sure you can tick off all the boxes. [] -> [x] --> - [x] I have submitted the spaCy Contributor Agreement. - [x] I ran the tests, and all new and existing tests passed. - [x] My changes don't require a change to the documentation, or if they do, I've added all required information. * Update wasabi pin * Delete old test * Update errors * Fix typo * Tidy up and format remaining code * Fix formatting * Improve formatting of messages * Auto-format remaining code * Add tok2vec stuff to spacy.train * Fix typo * Update wasabi pin * Fix path checks for when train() is called as function * Reformat and tidy up pretrain script * Update argument annotations * Raise error if model language doesn't match lang * Document new train command
2018-11-30 22:16:14 +03:00
response = get_raw_input(desc, default)
meta[setting] = default if response == "" and default else response
return meta
2021-06-22 05:06:25 +03:00
def generate_readme(meta: Dict[str, Any]) -> str:
"""
Generate a Markdown-formatted README text from a model meta.json. Used
within the GitHub release notes and as content for README.md file added
to model packages.
"""
md = MarkdownRenderer()
lang = meta["lang"]
name = f"{lang}_{meta['name']}"
version = meta["version"]
pipeline = ", ".join([md.code(p) for p in meta.get("pipeline", [])])
components = ", ".join([md.code(p) for p in meta.get("components", [])])
vecs = meta.get("vectors", {})
vectors = f"{vecs.get('keys', 0)} keys, {vecs.get('vectors', 0)} unique vectors ({ vecs.get('width', 0)} dimensions)"
2021-06-24 04:55:50 +03:00
author = meta.get("author") or "n/a"
2021-06-22 05:06:25 +03:00
notes = meta.get("notes", "")
2021-06-24 04:55:50 +03:00
license_name = meta.get("license")
sources = _format_sources(meta.get("sources"))
description = meta.get("description")
label_scheme = _format_label_scheme(meta.get("labels"))
accuracy = _format_accuracy(meta.get("performance"))
2021-06-22 05:06:25 +03:00
table_data = [
(md.bold("Name"), md.code(name)),
(md.bold("Version"), md.code(version)),
(md.bold("spaCy"), md.code(meta["spacy_version"])),
(md.bold("Default Pipeline"), pipeline),
(md.bold("Components"), components),
(md.bold("Vectors"), vectors),
2021-06-24 04:55:50 +03:00
(md.bold("Sources"), sources or "n/a"),
(md.bold("License"), md.code(license_name) if license_name else "n/a"),
2021-06-22 05:06:25 +03:00
(md.bold("Author"), md.link(author, meta["url"]) if "url" in meta else author),
]
# Put together Markdown body
2021-06-24 04:55:50 +03:00
if description:
md.add(description)
2021-06-22 05:06:25 +03:00
md.add(md.table(table_data, ["Feature", "Description"]))
2021-06-24 04:55:50 +03:00
if label_scheme:
md.add(md.title(3, "Label Scheme"))
md.add(label_scheme)
if accuracy:
md.add(md.title(3, "Accuracy"))
md.add(accuracy)
2021-06-22 05:06:25 +03:00
if notes:
md.add(notes)
return md.text
def _format_sources(data: Any) -> str:
if not data or not isinstance(data, list):
return "n/a"
sources = []
for source in data:
if not isinstance(source, dict):
source = {"name": source}
name = source.get("name")
if not name:
continue
url = source.get("url")
author = source.get("author")
result = name if not url else "[{}]({})".format(name, url)
if author:
result += " ({})".format(author)
sources.append(result)
return "<br />".join(sources)
def _format_accuracy(data: Dict[str, Any], exclude: List[str] = ["speed"]) -> str:
if not data:
return ""
md = MarkdownRenderer()
scalars = [(k, v) for k, v in data.items() if isinstance(v, (int, float))]
scores = [
(md.code(acc.upper()), f"{score*100:.2f}")
for acc, score in scalars
if acc not in exclude
]
md.add(md.table(scores, ["Type", "Score"]))
return md.text
def _format_label_scheme(data: Dict[str, Any]) -> str:
if not data:
return ""
md = MarkdownRenderer()
n_labels = 0
n_pipes = 0
label_data = []
for pipe, labels in data.items():
if not labels:
continue
col1 = md.bold(md.code(pipe))
col2 = ", ".join(
[md.code(label.replace("|", "\\|")) for label in labels]
2021-06-22 05:06:25 +03:00
) # noqa: W605
label_data.append((col1, col2))
n_labels += len(labels)
n_pipes += 1
if not label_data:
return ""
label_info = f"View label scheme ({n_labels} labels for {n_pipes} components)"
md.add("<details>")
md.add(f"<summary>{label_info}</summary>")
md.add(md.table(label_data, ["Component", "Labels"]))
md.add("</details>")
return md.text
TEMPLATE_SETUP = """
#!/usr/bin/env python
import io
import json
from os import path, walk
from shutil import copy
from setuptools import setup
def load_meta(fp):
with io.open(fp, encoding='utf8') as f:
return json.load(f)
def load_readme(fp):
if path.exists(fp):
with io.open(fp, encoding='utf8') as f:
return f.read()
return ""
def list_files(data_dir):
output = []
for root, _, filenames in walk(data_dir):
for filename in filenames:
if not filename.startswith('.'):
output.append(path.join(root, filename))
output = [path.relpath(p, path.dirname(data_dir)) for p in output]
output.append('meta.json')
return output
def list_requirements(meta):
parent_package = meta.get('parent_package', 'spacy')
requirements = [parent_package + meta['spacy_version']]
if 'setup_requires' in meta:
requirements += meta['setup_requires']
if 'requirements' in meta:
requirements += meta['requirements']
return requirements
def setup_package():
root = path.abspath(path.dirname(__file__))
meta_path = path.join(root, 'meta.json')
meta = load_meta(meta_path)
readme_path = path.join(root, 'README.md')
readme = load_readme(readme_path)
model_name = str(meta['lang'] + '_' + meta['name'])
model_dir = path.join(model_name, model_name + '-' + meta['version'])
copy(meta_path, path.join(model_name))
copy(meta_path, model_dir)
setup(
name=model_name,
description=meta.get('description'),
long_description=readme,
author=meta.get('author'),
author_email=meta.get('email'),
url=meta.get('url'),
version=meta['version'],
license=meta.get('license'),
packages=[model_name],
package_data={model_name: list_files(model_dir)},
install_requires=list_requirements(meta),
zip_safe=False,
entry_points={'spacy_models': ['{m} = {m}'.format(m=model_name)]}
)
if __name__ == '__main__':
setup_package()
""".lstrip()
TEMPLATE_MANIFEST = """
include meta.json
include LICENSE
include LICENSES_SOURCES
include README.md
""".strip()
TEMPLATE_INIT = """
from pathlib import Path
from spacy.util import load_model_from_init_py, get_model_meta
{imports}
__version__ = get_model_meta(Path(__file__).parent)['version']
def load(**overrides):
return load_model_from_init_py(__file__, **overrides)
""".lstrip()
FILENAMES_DOCS = ["LICENSE", "LICENSES_SOURCES", "README.md"]