Use latest wasabi

This commit is contained in:
Ines Montani 2019-11-04 02:38:45 +01:00
parent d82630d7c1
commit cf4ec88b38
13 changed files with 13 additions and 27 deletions

View File

@ -4,7 +4,7 @@ preshed>=3.0.2,<3.1.0
thinc>=7.3.0,<7.4.0
blis>=0.4.0,<0.5.0
murmurhash>=0.28.0,<1.1.0
wasabi>=0.3.0,<1.1.0
wasabi>=0.4.0,<1.1.0
srsly>=0.1.0,<1.1.0
# Third party dependencies
numpy>=1.15.0

View File

@ -49,7 +49,7 @@ install_requires =
blis>=0.4.0,<0.5.0
plac>=0.9.6,<1.2.0
requests>=2.13.0,<3.0.0
wasabi>=0.3.0,<1.1.0
wasabi>=0.4.0,<1.1.0
srsly>=0.1.0,<1.1.0
pathlib==1.0.1; python_version < "3.4"
importlib_metadata>=0.20; python_version < "3.8"

View File

@ -7,12 +7,10 @@ from __future__ import print_function
if __name__ == "__main__":
import plac
import sys
from wasabi import Printer
from wasabi import msg
from spacy.cli import download, link, info, package, train, pretrain, convert
from spacy.cli import init_model, profile, evaluate, validate, debug_data
msg = Printer()
commands = {
"download": download,
"link": link,

View File

@ -6,16 +6,13 @@ import requests
import os
import subprocess
import sys
from wasabi import Printer
from wasabi import msg
from .link import link
from ..util import get_package_path
from .. import about
msg = Printer()
@plac.annotations(
model=("Model to download (shortcut or name)", "positional", None, str),
direct=("Force direct download of name + version", "flag", "d", bool),

View File

@ -3,7 +3,7 @@ from __future__ import unicode_literals, division, print_function
import plac
from timeit import default_timer as timer
from wasabi import Printer
from wasabi import msg
from ..gold import GoldCorpus
from .. import util
@ -32,7 +32,6 @@ def evaluate(
Evaluate a model. To render a sample of parses in a HTML file, set an
output directory as the displacy_path argument.
"""
msg = Printer()
util.fix_random_seed()
if gpu_id >= 0:
util.use_gpu(gpu_id)

View File

@ -4,7 +4,7 @@ from __future__ import unicode_literals
import plac
import platform
from pathlib import Path
from wasabi import Printer
from wasabi import msg
import srsly
from ..compat import path2str, basestring_, unicode_
@ -23,7 +23,6 @@ def info(model=None, markdown=False, silent=False):
speficied as an argument, print model information. Flag --markdown
prints details in Markdown for easy copy-pasting to GitHub issues.
"""
msg = Printer()
if model:
if util.is_package(model):
model_path = util.get_package_path(model)

View File

@ -11,7 +11,7 @@ import tarfile
import gzip
import zipfile
import srsly
from wasabi import Printer
from wasabi import msg
from ..vectors import Vectors
from ..errors import Errors, Warnings, user_warning
@ -24,7 +24,6 @@ except ImportError:
DEFAULT_OOV_PROB = -20
msg = Printer()
@plac.annotations(

View File

@ -3,7 +3,7 @@ from __future__ import unicode_literals
import plac
from pathlib import Path
from wasabi import Printer
from wasabi import msg
from ..compat import symlink_to, path2str
from .. import util
@ -20,7 +20,6 @@ def link(origin, link_name, force=False, model_path=None):
either the name of a pip package, or the local path to the model data
directory. Linking models allows loading them via spacy.load(link_name).
"""
msg = Printer()
if util.is_package(origin):
model_path = util.get_package_path(origin)
else:

View File

@ -4,7 +4,7 @@ from __future__ import unicode_literals
import plac
import shutil
from pathlib import Path
from wasabi import Printer, get_raw_input
from wasabi import msg, get_raw_input
import srsly
from ..compat import path2str
@ -27,7 +27,6 @@ def package(input_dir, output_dir, meta_path=None, create_meta=False, force=Fals
set and a meta.json already exists in the output directory, the existing
values will be used as the defaults in the command-line prompt.
"""
msg = Printer()
input_path = util.ensure_path(input_dir)
output_path = util.ensure_path(output_dir)
meta_path = util.ensure_path(meta_path)

View File

@ -11,7 +11,7 @@ from pathlib import Path
from thinc.v2v import Affine, Maxout
from thinc.misc import LayerNorm as LN
from thinc.neural.util import prefer_gpu
from wasabi import Printer
from wasabi import msg
import srsly
from ..errors import Errors
@ -122,7 +122,6 @@ def pretrain(
for key in config:
if isinstance(config[key], Path):
config[key] = str(config[key])
msg = Printer()
util.fix_random_seed(seed)
has_gpu = prefer_gpu()

View File

@ -9,7 +9,7 @@ import pstats
import sys
import itertools
import thinc.extra.datasets
from wasabi import Printer
from wasabi import msg
from ..util import load_model
@ -26,7 +26,6 @@ def profile(model, inputs=None, n_texts=10000):
It can either be provided as a JSONL file, or be read from sys.sytdin.
If no input file is specified, the IMDB dataset is loaded via Thinc.
"""
msg = Printer()
if inputs is not None:
inputs = _read_inputs(inputs, msg)
if inputs is None:

View File

@ -8,7 +8,7 @@ from thinc.neural._classes.model import Model
from timeit import default_timer as timer
import shutil
import srsly
from wasabi import Printer
from wasabi import msg
import contextlib
import random
@ -89,7 +89,6 @@ def train(
# temp fix to avoid import issues cf https://github.com/explosion/spaCy/issues/4200
import tqdm
msg = Printer()
util.fix_random_seed()
util.set_env_log(verbose)

View File

@ -5,7 +5,7 @@ from pathlib import Path
import sys
import requests
import srsly
from wasabi import Printer
from wasabi import msg
from ..compat import path2str
from ..util import get_data_path
@ -17,7 +17,6 @@ def validate():
Validate that the currently installed version of spaCy is compatible
with the installed models. Should be run after `pip install -U spacy`.
"""
msg = Printer()
with msg.loading("Loading compatibility table..."):
r = requests.get(about.__compatibility__)
if r.status_code != 200: