mirror of
https://github.com/explosion/spaCy.git
synced 2025-01-26 01:04:34 +03:00
Fix .iob converter (closes #3620)
This commit is contained in:
parent
f6e9394aa5
commit
0bf6441863
|
@ -2,23 +2,17 @@
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
from cytoolz import partition_all
|
||||||
|
|
||||||
from ...gold import iob_to_biluo
|
from ...gold import iob_to_biluo
|
||||||
from ...util import minibatch
|
|
||||||
|
|
||||||
|
|
||||||
def iob2json(input_data, n_sents=10, *args, **kwargs):
|
def iob2json(input_data, n_sents=10, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Convert IOB files into JSON format for use with train cli.
|
Convert IOB files into JSON format for use with train cli.
|
||||||
"""
|
"""
|
||||||
docs = []
|
sentences = read_iob(input_data.split("\n"))
|
||||||
for group in minibatch(docs, n_sents):
|
docs = merge_sentences(sentences, n_sents)
|
||||||
group = list(group)
|
|
||||||
first = group.pop(0)
|
|
||||||
to_extend = first["paragraphs"][0]["sentences"]
|
|
||||||
for sent in group[1:]:
|
|
||||||
to_extend.extend(sent["paragraphs"][0]["sentences"])
|
|
||||||
docs.append(first)
|
|
||||||
return docs
|
return docs
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,7 +21,6 @@ def read_iob(raw_sents):
|
||||||
for line in raw_sents:
|
for line in raw_sents:
|
||||||
if not line.strip():
|
if not line.strip():
|
||||||
continue
|
continue
|
||||||
# tokens = [t.split("|") for t in line.split()]
|
|
||||||
tokens = [re.split("[^\w\-]", line.strip())]
|
tokens = [re.split("[^\w\-]", line.strip())]
|
||||||
if len(tokens[0]) == 3:
|
if len(tokens[0]) == 3:
|
||||||
words, pos, iob = zip(*tokens)
|
words, pos, iob = zip(*tokens)
|
||||||
|
@ -49,3 +42,15 @@ def read_iob(raw_sents):
|
||||||
paragraphs = [{"sentences": [sent]} for sent in sentences]
|
paragraphs = [{"sentences": [sent]} for sent in sentences]
|
||||||
docs = [{"id": 0, "paragraphs": [para]} for para in paragraphs]
|
docs = [{"id": 0, "paragraphs": [para]} for para in paragraphs]
|
||||||
return docs
|
return docs
|
||||||
|
|
||||||
|
|
||||||
|
def merge_sentences(docs, n_sents):
|
||||||
|
merged = []
|
||||||
|
for group in partition_all(n_sents, docs):
|
||||||
|
group = list(group)
|
||||||
|
first = group.pop(0)
|
||||||
|
to_extend = first["paragraphs"][0]["sentences"]
|
||||||
|
for sent in group[1:]:
|
||||||
|
to_extend.extend(sent["paragraphs"][0]["sentences"])
|
||||||
|
merged.append(first)
|
||||||
|
return merged
|
||||||
|
|
Loading…
Reference in New Issue
Block a user