spaCy/spacy/kb.pyx

447 lines
15 KiB
Cython
Raw Normal View History

2019-04-10 17:06:09 +03:00
# cython: infer_types=True
2019-03-18 19:27:51 +03:00
# cython: profile=True
# coding: utf8
from collections import OrderedDict
from cpython.exc cimport PyErr_CheckSignals
from spacy import util
2019-03-22 18:55:05 +03:00
from spacy.errors import Errors, Warnings, user_warning
from cymem.cymem cimport Pool
from preshed.maps cimport PreshMap
from cpython.mem cimport PyMem_Malloc
from cpython.exc cimport PyErr_SetFromErrno
from libc.stdio cimport FILE, fopen, fclose, fread, fwrite, feof, fseek
from libc.stdint cimport int32_t, int64_t
from libc.stdlib cimport qsort
from .typedefs cimport hash_t
from os import path
from libcpp.vector cimport vector
cdef class Candidate:
2019-04-25 00:52:34 +03:00
def __init__(self, KnowledgeBase kb, entity_hash, entity_freq, alias_hash, prior_prob):
self.kb = kb
self.entity_hash = entity_hash
2019-04-25 00:52:34 +03:00
self.entity_freq = entity_freq
2019-03-21 14:31:02 +03:00
self.alias_hash = alias_hash
self.prior_prob = prior_prob
@property
def entity(self):
"""RETURNS (uint64): hash of the entity's KB ID/name"""
return self.entity_hash
2019-03-21 20:20:57 +03:00
@property
def entity_(self):
"""RETURNS (unicode): ID/name of this entity in the KB"""
2019-04-10 17:06:09 +03:00
return self.kb.vocab.strings[self.entity_hash]
@property
def alias(self):
"""RETURNS (uint64): hash of the alias"""
return self.alias_hash
@property
def alias_(self):
2019-03-21 20:20:57 +03:00
"""RETURNS (unicode): ID of the original alias"""
2019-04-10 17:06:09 +03:00
return self.kb.vocab.strings[self.alias_hash]
2019-03-21 20:20:57 +03:00
2019-04-25 00:52:34 +03:00
@property
def entity_freq(self):
return self.entity_freq
@property
def prior_prob(self):
return self.prior_prob
cdef class KnowledgeBase:
2019-03-22 01:17:25 +03:00
def __init__(self, Vocab vocab):
self.vocab = vocab
2019-03-18 19:27:51 +03:00
self.mem = Pool()
self._entry_index = PreshMap()
self._alias_index = PreshMap()
# TODO initialize self._entries and self._aliases_table ?
self.vocab.strings.add("")
self._create_empty_vectors(dummy_hash=self.vocab.strings[""])
2019-03-18 19:27:51 +03:00
def __len__(self):
2019-03-19 17:51:56 +03:00
return self.get_size_entities()
def get_size_entities(self):
return len(self._entry_index)
def get_entity_strings(self):
2019-04-24 21:24:24 +03:00
return [self.vocab.strings[x] for x in self._entry_index]
2019-03-19 17:51:56 +03:00
def get_size_aliases(self):
return len(self._alias_index)
def get_alias_strings(self):
2019-04-24 21:24:24 +03:00
return [self.vocab.strings[x] for x in self._alias_index]
2019-03-19 17:51:56 +03:00
def add_entity(self, unicode entity, float prob=0.5, vectors=None, features=None):
"""
2019-04-10 17:06:09 +03:00
Add an entity to the KB, optionally specifying its log probability based on corpus frequency
Return the hash of the entity ID/name at the end
"""
cdef hash_t entity_hash = self.vocab.strings.add(entity)
2019-03-18 19:27:51 +03:00
# Return if this entity was added before
if entity_hash in self._entry_index:
user_warning(Warnings.W018.format(entity=entity))
return
2019-03-18 12:31:01 +03:00
cdef int32_t dummy_value = 342
new_index = self.c_add_entity(entity_hash=entity_hash, prob=prob,
vector_rows=&dummy_value, feats_row=dummy_value)
self._entry_index[entity_hash] = new_index
2019-03-18 12:31:01 +03:00
# TODO self._vectors_table.get_pointer(vectors),
2019-03-18 14:38:40 +03:00
# self._features_table.get(features))
return entity_hash
cpdef set_entities(self, entity_list, prob_list, vector_list, feature_list):
nr_entities = len(entity_list)
self._entry_index = PreshMap(nr_entities+1)
self._entries = entry_vec(nr_entities+1)
i = 0
cdef EntryC entry
cdef int32_t dummy_value = 342
while i < nr_entities:
# TODO features and vectors
entity_hash = self.vocab.strings.add(entity_list[i])
entry.entity_hash = entity_hash
entry.prob = prob_list[i]
entry.vector_rows = &dummy_value
entry.feats_row = dummy_value
self._entries[i+1] = entry
self._entry_index[entity_hash] = i+1
i += 1
# TODO: this method is untested
cpdef set_aliases(self, alias_list, entities_list, probabilities_list):
nr_aliases = len(alias_list)
self._alias_index = PreshMap(nr_aliases+1)
self._aliases_table = alias_vec(nr_aliases+1)
i = 0
cdef AliasC alias
cdef int32_t dummy_value = 342
while i <= nr_aliases:
alias_hash = self.vocab.strings.add(alias_list[i])
entities = entities_list[i]
probabilities = probabilities_list[i]
nr_candidates = len(entities)
entry_indices = vector[int64_t](nr_candidates)
probs = vector[float](nr_candidates)
for j in range(0, nr_candidates):
entity = entities[j]
entity_hash = self.vocab.strings[entity]
if not entity_hash in self._entry_index:
raise ValueError(Errors.E134.format(alias=alias, entity=entity))
entry_index = <int64_t>self._entry_index.get(entity_hash)
entry_indices[j] = entry_index
alias.entry_indices = entry_indices
alias.probs = probs
self._aliases_table[i] = alias
self._alias_index[alias_hash] = i
i += 1
2019-03-18 19:27:51 +03:00
def add_alias(self, unicode alias, entities, probabilities):
"""
For a given alias, add its potential entities and prior probabilies to the KB.
Return the alias_hash at the end
"""
# Throw an error if the length of entities and probabilities are not the same
if not len(entities) == len(probabilities):
2019-03-22 18:55:05 +03:00
raise ValueError(Errors.E132.format(alias=alias,
entities_length=len(entities),
probabilities_length=len(probabilities)))
2019-05-02 00:05:40 +03:00
# Throw an error if the probabilities sum up to more than 1 (allow for some rounding errors)
prob_sum = sum(probabilities)
2019-05-02 00:05:40 +03:00
if prob_sum > 1.00001:
2019-03-22 18:55:05 +03:00
raise ValueError(Errors.E133.format(alias=alias, sum=prob_sum))
2019-03-22 01:17:25 +03:00
cdef hash_t alias_hash = self.vocab.strings.add(alias)
# Return if this alias was added before
if alias_hash in self._alias_index:
2019-03-22 18:55:05 +03:00
user_warning(Warnings.W017.format(alias=alias))
return
2019-03-19 18:15:38 +03:00
cdef vector[int64_t] entry_indices
cdef vector[float] probs
for entity, prob in zip(entities, probabilities):
entity_hash = self.vocab.strings[entity]
if not entity_hash in self._entry_index:
2019-03-22 18:55:05 +03:00
raise ValueError(Errors.E134.format(alias=alias, entity=entity))
entry_index = <int64_t>self._entry_index.get(entity_hash)
2019-03-19 18:15:38 +03:00
entry_indices.push_back(int(entry_index))
probs.push_back(float(prob))
2019-03-18 14:38:40 +03:00
new_index = self.c_add_aliases(alias_hash=alias_hash, entry_indices=entry_indices, probs=probs)
self._alias_index[alias_hash] = new_index
2019-03-18 19:27:51 +03:00
return alias_hash
2019-03-18 19:50:01 +03:00
def get_candidates(self, unicode alias):
""" TODO: where to put this functionality ?"""
2019-03-22 01:17:25 +03:00
cdef hash_t alias_hash = self.vocab.strings[alias]
2019-03-19 17:51:56 +03:00
alias_index = <int64_t>self._alias_index.get(alias_hash)
alias_entry = self._aliases_table[alias_index]
return [Candidate(kb=self,
entity_hash=self._entries[entry_index].entity_hash,
2019-04-25 00:52:34 +03:00
entity_freq=self._entries[entry_index].prob,
2019-03-21 14:31:02 +03:00
alias_hash=alias_hash,
prior_prob=prob)
for (entry_index, prob) in zip(alias_entry.entry_indices, alias_entry.probs)
if entry_index != 0]
def dump(self, loc):
cdef Writer writer = Writer(loc)
writer.write_header(self.get_size_entities())
# dumping the entry records in the order in which they are in the _entries vector.
# index 0 is a dummy object not stored in the _entry_index and can be ignored.
i = 1
for entry_hash, entry_index in sorted(self._entry_index.items(), key=lambda x: x[1]):
entry = self._entries[entry_index]
assert entry.entity_hash == entry_hash
assert entry_index == i
2019-04-24 21:24:24 +03:00
writer.write_entry(entry.entity_hash, entry.prob)
i = i+1
writer.write_alias_length(self.get_size_aliases())
# dumping the aliases in the order in which they are in the _alias_index vector.
# index 0 is a dummy object not stored in the _aliases_table and can be ignored.
i = 1
for alias_hash, alias_index in sorted(self._alias_index.items(), key=lambda x: x[1]):
alias = self._aliases_table[alias_index]
assert alias_index == i
candidate_length = len(alias.entry_indices)
writer.write_alias_header(alias_hash, candidate_length)
for j in range(0, candidate_length):
writer.write_alias(alias.entry_indices[j], alias.probs[j])
i = i+1
writer.close()
cpdef load_bulk(self, loc):
cdef hash_t entity_hash
2019-04-24 21:24:24 +03:00
cdef hash_t alias_hash
cdef int64_t entry_index
cdef float prob
cdef EntryC entry
2019-04-24 21:24:24 +03:00
cdef AliasC alias
cdef int32_t dummy_value = 342
cdef Reader reader = Reader(loc)
2019-04-24 21:24:24 +03:00
# Step 1: load entities
cdef int64_t nr_entities
reader.read_header(&nr_entities)
self._entry_index = PreshMap(nr_entities+1)
self._entries = entry_vec(nr_entities+1)
2019-04-24 21:24:24 +03:00
# we assume that the entity data was written in sequence
# index 0 is a dummy object not stored in the _entry_index and can be ignored.
# TODO: should we initialize the dummy objects ?
cdef int i = 1
2019-04-24 21:24:24 +03:00
while i <= nr_entities:
reader.read_entry(&entity_hash, &prob)
# TODO features and vectors
entry.entity_hash = entity_hash
entry.prob = prob
entry.vector_rows = &dummy_value
entry.feats_row = dummy_value
self._entries[i] = entry
self._entry_index[entity_hash] = i
i += 1
2019-04-24 21:24:24 +03:00
# check that all entities were read in properly
assert nr_entities == self.get_size_entities()
# Step 2: load aliases
cdef int64_t nr_aliases
reader.read_alias_length(&nr_aliases)
self._alias_index = PreshMap(nr_aliases+1)
self._aliases_table = alias_vec(nr_aliases+1)
cdef int64_t nr_candidates
cdef vector[int64_t] entry_indices
cdef vector[float] probs
i = 1
# we assume the alias data was written in sequence
# index 0 is a dummy object not stored in the _entry_index and can be ignored.
while i <= nr_aliases:
reader.read_alias_header(&alias_hash, &nr_candidates)
entry_indices = vector[int64_t](nr_candidates)
probs = vector[float](nr_candidates)
for j in range(0, nr_candidates):
reader.read_alias(&entry_index, &prob)
entry_indices[j] = entry_index
probs[j] = prob
alias.entry_indices = entry_indices
alias.probs = probs
self._aliases_table[i] = alias
self._alias_index[alias_hash] = i
i += 1
# check that all aliases were read in properly
assert nr_aliases == self.get_size_aliases()
cdef class Writer:
def __init__(self, object loc):
if path.exists(loc):
assert not path.isdir(loc), "%s is directory." % loc
cdef bytes bytes_loc = loc.encode('utf8') if type(loc) == unicode else loc
self._fp = fopen(<char*>bytes_loc, 'wb')
assert self._fp != NULL
fseek(self._fp, 0, 0)
def close(self):
cdef size_t status = fclose(self._fp)
assert status == 0
cdef int write_header(self, int64_t nr_entries) except -1:
self._write(&nr_entries, sizeof(nr_entries))
2019-04-24 21:24:24 +03:00
cdef int write_entry(self, hash_t entry_hash, float entry_prob) except -1:
# TODO: feats_rows and vector rows
self._write(&entry_hash, sizeof(entry_hash))
self._write(&entry_prob, sizeof(entry_prob))
2019-04-24 21:24:24 +03:00
cdef int write_alias_length(self, int64_t alias_length) except -1:
self._write(&alias_length, sizeof(alias_length))
cdef int write_alias_header(self, hash_t alias_hash, int64_t candidate_length) except -1:
self._write(&alias_hash, sizeof(alias_hash))
self._write(&candidate_length, sizeof(candidate_length))
cdef int write_alias(self, int64_t entry_index, float prob) except -1:
self._write(&entry_index, sizeof(entry_index))
self._write(&prob, sizeof(prob))
cdef int _write(self, void* value, size_t size) except -1:
status = fwrite(value, size, 1, self._fp)
assert status == 1, status
cdef class Reader:
def __init__(self, object loc):
assert path.exists(loc)
assert not path.isdir(loc)
cdef bytes bytes_loc = loc.encode('utf8') if type(loc) == unicode else loc
self._fp = fopen(<char*>bytes_loc, 'rb')
if not self._fp:
PyErr_SetFromErrno(IOError)
status = fseek(self._fp, 0, 0) # this can be 0 if there is no header
def __dealloc__(self):
fclose(self._fp)
cdef int read_header(self, int64_t* nr_entries) except -1:
status = self._read(nr_entries, sizeof(int64_t))
if status < 1:
if feof(self._fp):
return 0 # end of file
raise IOError("error reading header from input file")
2019-04-24 21:24:24 +03:00
cdef int read_entry(self, hash_t* entity_hash, float* prob) except -1:
status = self._read(entity_hash, sizeof(hash_t))
if status < 1:
if feof(self._fp):
return 0 # end of file
raise IOError("error reading entity hash from input file")
status = self._read(prob, sizeof(float))
if status < 1:
if feof(self._fp):
return 0 # end of file
raise IOError("error reading entity prob from input file")
if feof(self._fp):
return 0
else:
return 1
2019-04-24 21:24:24 +03:00
cdef int read_alias_length(self, int64_t* alias_length) except -1:
status = self._read(alias_length, sizeof(int64_t))
if status < 1:
if feof(self._fp):
return 0 # end of file
raise IOError("error reading alias length from input file")
cdef int read_alias_header(self, hash_t* alias_hash, int64_t* candidate_length) except -1:
status = self._read(alias_hash, sizeof(hash_t))
if status < 1:
if feof(self._fp):
return 0 # end of file
raise IOError("error reading alias hash from input file")
status = self._read(candidate_length, sizeof(int64_t))
if status < 1:
if feof(self._fp):
return 0 # end of file
raise IOError("error reading candidate length from input file")
cdef int read_alias(self, int64_t* entry_index, float* prob) except -1:
status = self._read(entry_index, sizeof(int64_t))
if status < 1:
if feof(self._fp):
return 0 # end of file
raise IOError("error reading entry index for alias from input file")
status = self._read(prob, sizeof(float))
if status < 1:
if feof(self._fp):
return 0 # end of file
raise IOError("error reading prob for entity/alias from input file")
cdef int _read(self, void* value, size_t size) except -1:
status = fread(value, size, 1, self._fp)
return status