mirror of
https://github.com/explosion/spaCy.git
synced 2025-02-12 01:20:35 +03:00
Merge branch 'master' of https://github.com/honnibal/spaCy into develop
This commit is contained in:
commit
4f5b4a88f2
|
@ -8,16 +8,24 @@ environment:
|
|||
matrix:
|
||||
|
||||
# Python 2.7.10 is the latest version and is not pre-installed.
|
||||
|
||||
- PYTHON: "C:\\Python27.10-x64"
|
||||
PYTHON_VERSION: "2.7.10"
|
||||
PYTHON_ARCH: "64"
|
||||
|
||||
- PYTHON: "C:\\Python27.10-x32"
|
||||
PYTHON_VERSION: "2.7.10"
|
||||
PYTHON_ARCH: "32"
|
||||
|
||||
# The lastest Python 3.4.
|
||||
- PYTHON: "C:\\Python34-x64"
|
||||
PYTHON_VERSION: "3.4.x" # currently 3.4.3
|
||||
PYTHON_ARCH: "64"
|
||||
|
||||
#- PYTHON: "C:\\Python34-x32"
|
||||
# PYTHON_VERSION: "3.4.x" # currently 3.4.3
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
|
||||
install:
|
||||
# Install Python (from the official .msi of http://python.org) and pip when
|
||||
# not already installed.
|
||||
|
@ -30,10 +38,11 @@ install:
|
|||
- "SET PYTHONPATH=%CD%;%PYTHONPATH%"
|
||||
|
||||
# Filesystem root
|
||||
# - ps: "ls \"C:/\""
|
||||
#- ps: "ls \"C:/\""
|
||||
#- SET
|
||||
|
||||
# Installed SDKs
|
||||
# - ps: "ls \"C:/Program Files/Microsoft SDKs/Windows\""
|
||||
#- ps: "ls \"C:/Program Files/Microsoft SDKs/Windows\""
|
||||
|
||||
# Checking stdint.h
|
||||
#- ps: "ls \"C:/projects/spacy/include/\""
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
|
||||
<img src="https://ci.appveyor.com/api/projects/status/aoe3dtkep36rdaqf?svg=true" />
|
||||
|
||||
spaCy: Industrial-strength NLP
|
||||
==============================
|
||||
|
||||
|
@ -49,3 +52,6 @@ Difficult to support:
|
|||
|
||||
* PyPy 2.7
|
||||
* PyPy 3.4
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ class Lemmatizer(object):
|
|||
index[pos] = read_index(path.join(data_dir, 'wordnet', 'index.%s' % pos))
|
||||
exc[pos] = read_exc(path.join(data_dir, 'wordnet', '%s.exc' % pos))
|
||||
if path.exists(path.join(data_dir, 'vocab', 'lemma_rules.json')):
|
||||
rules = json.load(open(path.join(data_dir, 'vocab', 'lemma_rules.json')))
|
||||
rules = json.load(codecs.open(path.join(data_dir, 'vocab', 'lemma_rules.json'), encoding='utf_8'))
|
||||
else:
|
||||
rules = {}
|
||||
return cls(index, exc, rules)
|
||||
|
|
|
@ -120,6 +120,9 @@ cdef class Doc:
|
|||
def __str__(self):
|
||||
return u''.join([t.string for t in self])
|
||||
|
||||
def __repr__(self):
|
||||
return u''.join([t.string for t in self])
|
||||
|
||||
def similarity(self, other):
|
||||
if self.vector_norm == 0 or other.vector_norm == 0:
|
||||
return 0.0
|
||||
|
|
|
@ -46,6 +46,12 @@ cdef class Span:
|
|||
return 0
|
||||
return self.end - self.start
|
||||
|
||||
def __repr__(self):
|
||||
text = self.text_with_ws
|
||||
if self[-1].whitespace_:
|
||||
text = text[:-1]
|
||||
return text
|
||||
|
||||
def __getitem__(self, object i):
|
||||
if isinstance(i, slice):
|
||||
start, end = normalize_slice(len(self), i.start, i.stop, i.step)
|
||||
|
|
|
@ -43,6 +43,9 @@ cdef class Token:
|
|||
def __str__(self):
|
||||
return self.string
|
||||
|
||||
def __repr__(self):
|
||||
return self.string
|
||||
|
||||
cpdef bint check_flag(self, attr_id_t flag_id) except -1:
|
||||
return Lexeme.c_check_flag(self.c.lex, flag_id)
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user