mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-24 17:06:29 +03:00
Merge pull request #2152 from explosion/feature/tidy-up-dependencies
💫 Tidy up dependencies
This commit is contained in:
commit
a609a1ca29
|
@ -9,6 +9,7 @@ coordinates. Can be extended with more details from the API.
|
|||
* Custom pipeline components: https://spacy.io//usage/processing-pipelines#custom-components
|
||||
|
||||
Compatible with: spaCy v2.0.0+
|
||||
Prerequisites: pip install requests
|
||||
"""
|
||||
from __future__ import unicode_literals, print_function
|
||||
|
||||
|
|
|
@ -8,9 +8,6 @@ murmurhash>=0.28,<0.29
|
|||
plac<1.0.0,>=0.9.6
|
||||
ujson>=1.35
|
||||
dill>=0.2,<0.3
|
||||
requests>=2.13.0,<3.0.0
|
||||
regex==2017.4.5
|
||||
pytest>=3.0.6,<4.0.0
|
||||
mock>=2.0.0,<3.0.0
|
||||
msgpack-python==0.5.4
|
||||
msgpack-numpy==0.4.1
|
||||
|
|
5
setup.py
5
setup.py
|
@ -194,10 +194,7 @@ def setup_package():
|
|||
'pathlib',
|
||||
'ujson>=1.35',
|
||||
'dill>=0.2,<0.3',
|
||||
'requests>=2.13.0,<3.0.0',
|
||||
'regex==2017.4.5',
|
||||
'msgpack-python==0.5.4',
|
||||
'msgpack-numpy==0.4.1'],
|
||||
'regex==2017.4.5'],
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Console',
|
||||
|
|
|
@ -2,13 +2,14 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import plac
|
||||
import requests
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import ujson
|
||||
|
||||
from .link import link
|
||||
from ..util import prints, get_package_path
|
||||
from ..compat import url_read, HTTPError
|
||||
from .. import about
|
||||
|
||||
|
||||
|
@ -56,13 +57,14 @@ def download(model, direct=False):
|
|||
|
||||
|
||||
def get_json(url, desc):
|
||||
r = requests.get(url)
|
||||
if r.status_code != 200:
|
||||
try:
|
||||
data = url_read(url)
|
||||
except HTTPError as e:
|
||||
msg = ("Couldn't fetch %s. Please find a model for your spaCy "
|
||||
"installation (v%s), and download it manually.")
|
||||
prints(msg % (desc, about.__version__), about.__docs_models__,
|
||||
title="Server error (%d)" % r.status_code, exits=1)
|
||||
return r.json()
|
||||
title="Server error (%d: %s)" % (e.code, e.reason), exits=1)
|
||||
return ujson.loads(data)
|
||||
|
||||
|
||||
def get_compatibility():
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# coding: utf8
|
||||
from __future__ import unicode_literals, print_function
|
||||
|
||||
import requests
|
||||
import pkg_resources
|
||||
from pathlib import Path
|
||||
import sys
|
||||
import ujson
|
||||
|
||||
from ..compat import path2str, locale_escape
|
||||
from ..compat import path2str, locale_escape, url_read, HTTPError
|
||||
from ..util import prints, get_data_path, read_json
|
||||
from .. import about
|
||||
|
||||
|
@ -15,11 +15,12 @@ def validate():
|
|||
"""Validate that the currently installed version of spaCy is compatible
|
||||
with the installed models. Should be run after `pip install -U spacy`.
|
||||
"""
|
||||
r = requests.get(about.__compatibility__)
|
||||
if r.status_code != 200:
|
||||
try:
|
||||
data = url_read(about.__compatibility__)
|
||||
except HTTPError as e:
|
||||
prints("Couldn't fetch compatibility table.",
|
||||
title="Server error (%d)" % r.status_code, exits=1)
|
||||
compat = r.json()['spacy']
|
||||
title="Server error (%d: %s)" % (e.code, e.reason), exits=1)
|
||||
compat = ujson.loads(data)['spacy']
|
||||
current_compat = compat.get(about.__version__)
|
||||
if not current_compat:
|
||||
prints(about.__compatibility__, exits=1,
|
||||
|
|
|
@ -33,6 +33,16 @@ try:
|
|||
except ImportError:
|
||||
from thinc.neural.optimizers import Adam as Optimizer
|
||||
|
||||
try:
|
||||
import urllib.request
|
||||
except ImportError:
|
||||
import urllib2 as urllib
|
||||
|
||||
try:
|
||||
from urllib.error import HTTPError
|
||||
except ImportError:
|
||||
from urllib2 import HTTPError
|
||||
|
||||
pickle = pickle
|
||||
copy_reg = copy_reg
|
||||
CudaStream = CudaStream
|
||||
|
@ -56,6 +66,7 @@ if is_python2:
|
|||
input_ = raw_input # noqa: F821
|
||||
json_dumps = lambda data: ujson.dumps(data, indent=2, escape_forward_slashes=False).decode('utf8')
|
||||
path2str = lambda path: str(path).decode('utf8')
|
||||
url_open = urllib.urlopen
|
||||
|
||||
elif is_python3:
|
||||
bytes_ = bytes
|
||||
|
@ -64,6 +75,16 @@ elif is_python3:
|
|||
input_ = input
|
||||
json_dumps = lambda data: ujson.dumps(data, indent=2, escape_forward_slashes=False)
|
||||
path2str = lambda path: str(path)
|
||||
url_open = urllib.request.urlopen
|
||||
|
||||
|
||||
def url_read(url):
|
||||
file_ = url_open(url)
|
||||
code = file_.getcode()
|
||||
if code != 200:
|
||||
raise HTTPError(url, code, "Cannot GET url", [], file_)
|
||||
data = file_.read()
|
||||
return data
|
||||
|
||||
|
||||
def b_to_str(b_str):
|
||||
|
|
|
@ -8,7 +8,9 @@ cimport numpy as np
|
|||
import cytoolz
|
||||
from collections import OrderedDict
|
||||
import ujson
|
||||
import msgpack
|
||||
|
||||
from .util import msgpack
|
||||
from .util import msgpack_numpy
|
||||
|
||||
from thinc.api import chain
|
||||
from thinc.v2v import Affine, SELU, Softmax
|
||||
|
|
|
@ -23,9 +23,10 @@ from .symbols import ORTH
|
|||
from .compat import cupy, CudaStream, path2str, basestring_, input_, unicode_
|
||||
from .compat import import_file
|
||||
|
||||
import msgpack
|
||||
import msgpack_numpy
|
||||
msgpack_numpy.patch()
|
||||
# Import these directly from Thinc, so that we're sure we always have the
|
||||
# same version.
|
||||
from thinc.neural._classes.model import msgpack
|
||||
from thinc.neural._classes.model import msgpack_numpy
|
||||
|
||||
|
||||
LANGUAGES = {}
|
||||
|
|
|
@ -4,9 +4,10 @@ from __future__ import unicode_literals
|
|||
import functools
|
||||
import numpy
|
||||
from collections import OrderedDict
|
||||
import msgpack
|
||||
import msgpack_numpy
|
||||
msgpack_numpy.patch()
|
||||
|
||||
from .util import msgpack
|
||||
from .util import msgpack_numpy
|
||||
|
||||
cimport numpy as np
|
||||
from thinc.neural.util import get_array_module
|
||||
from thinc.neural._classes.model import Model
|
||||
|
|
Loading…
Reference in New Issue
Block a user