mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-25 17:36:30 +03:00
Update Keras deep learning tutorial
This commit is contained in:
parent
f60cefc048
commit
ca89fd0919
|
@ -14,7 +14,7 @@ class SentimentAnalyser(object):
|
||||||
|
|
||||||
def __call__(self, doc):
|
def __call__(self, doc):
|
||||||
X = get_features([doc], self.max_length)
|
X = get_features([doc], self.max_length)
|
||||||
y = self._keras_model.predict(X)
|
y = self._model.predict(X)
|
||||||
self.set_sentiment(doc, y)
|
self.set_sentiment(doc, y)
|
||||||
|
|
||||||
def pipe(self, docs, batch_size=1000, n_threads=2):
|
def pipe(self, docs, batch_size=1000, n_threads=2):
|
||||||
|
@ -28,6 +28,13 @@ class SentimentAnalyser(object):
|
||||||
doc.user_data['sentiment'] = y
|
doc.user_data['sentiment'] = y
|
||||||
|
|
||||||
|
|
||||||
|
def get_features(docs, max_length):
|
||||||
|
Xs = numpy.zeros(len(docs), max_length, dtype='int32')
|
||||||
|
for i, doc in enumerate(minibatch):
|
||||||
|
for j, token in enumerate(doc[:max_length]):
|
||||||
|
Xs[i, j] = token.rank if token.has_vector else 0
|
||||||
|
return Xs
|
||||||
|
|
||||||
def compile_lstm(embeddings, shape, settings, optimizer):
|
def compile_lstm(embeddings, shape, settings, optimizer):
|
||||||
model = Sequential()
|
model = Sequential()
|
||||||
model.add(
|
model.add(
|
||||||
|
@ -59,14 +66,6 @@ def get_embeddings(vocab):
|
||||||
return vectors
|
return vectors
|
||||||
|
|
||||||
|
|
||||||
def get_features(docs, max_length):
|
|
||||||
Xs = numpy.zeros(len(docs), max_length, dtype='int32')
|
|
||||||
for i, doc in enumerate(minibatch):
|
|
||||||
for j, token in enumerate(doc[:max_length]):
|
|
||||||
Xs[i, j] = token.rank if token.has_vector else 0
|
|
||||||
return Xs
|
|
||||||
|
|
||||||
|
|
||||||
def train(train_texts, train_labels, dev_texts, dev_labels,
|
def train(train_texts, train_labels, dev_texts, dev_labels,
|
||||||
lstm_shape, lstm_settings, lstm_optimizer, batch_size=100, nb_epoch=5):
|
lstm_shape, lstm_settings, lstm_optimizer, batch_size=100, nb_epoch=5):
|
||||||
nlp = spacy.load('en', parser=False, tagger=False, entity=False)
|
nlp = spacy.load('en', parser=False, tagger=False, entity=False)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user