diff --git a/spacy/syntax/_beam_utils.pyx b/spacy/syntax/_beam_utils.pyx index b1085c762..93f5c6fdd 100644 --- a/spacy/syntax/_beam_utils.pyx +++ b/spacy/syntax/_beam_utils.pyx @@ -304,27 +304,3 @@ def get_gradient(nr_class, beam_maps, histories, losses): grads[j][i, clas] += loss key = key + tuple([clas]) return grads - - -def cleanup_beam(Beam beam): - cdef StateC* state - # Once parsing has finished, states in beam may not be unique. Is this - # correct? - seen = set() - for i in range(beam.width): - addr = beam._parents[i].content - if addr not in seen: - state = addr - del state - seen.add(addr) - else: - raise ValueError(Errors.E023.format(addr=addr, i=i)) - addr = beam._states[i].content - if addr not in seen: - state = addr - del state - seen.add(addr) - else: - raise ValueError(Errors.E023.format(addr=addr, i=i)) - - diff --git a/spacy/syntax/nn_parser.pyx b/spacy/syntax/nn_parser.pyx index 145c382a5..5a79ddee2 100644 --- a/spacy/syntax/nn_parser.pyx +++ b/spacy/syntax/nn_parser.pyx @@ -373,8 +373,6 @@ cdef class Parser: self.moves.finalize_doc(doc) for hook in self.postprocesses: hook(doc) - for beam in beams: - _beam_utils.cleanup_beam(beam) def transition_states(self, states, float[:, ::1] scores): cdef StateClass state @@ -527,9 +525,6 @@ cdef class Parser: else: model.backprops.append((ids, d_vector, bp_vectors)) model.make_updates(sgd) - cdef Beam beam - for beam in beams: - _beam_utils.cleanup_beam(beam) def _init_gold_batch(self, whole_docs, whole_golds, min_length=5, max_length=500): """Make a square batch, of length equal to the shortest doc. A long