如何在tensorflow.js中构建和训练lstm网络

时间:2018-09-27 23:42:22

标签: node.js lstm tensorflow.js

我正在尝试使用tensorflow.js构建和训练lstm网络,我的数据集就像

输入:“我不喜欢这些鞋,它们对我来说太黄了,所以还给他们。 预期输出原因:“颜色”

我可以使用预先训练的word2vec模型将文本显示为矢量。 试图阅读文档tensorflow.js,但我对此没有多大意义,即使从其他来源也找不到如何在tensorflow.js中实现和训练网络的好例子。

有人可以举一个直接的例子来说明如何使用LSTM层创建模型并使用node.js和tensorflow.js对其进行训练吗?

1 个答案:

答案 0 :(得分:2)

这是带有多层LSTM的RNN的示例,该LSTM实现了Word2Vec。我认为您不需要做很多调整来匹配您的用例-可能只是数据源和超参数。当然,您还需要编写自己的测试功能。由于您说过可以提供自己的预先训练的Word2Vec向量,因此,如果愿意,也可以删除很大一部分代码。

我应该注意-我写了这个示例,我的偏好是使用一个名为Numjs的库来创建向量(与Numpy类似的库),然后将其转换为Tensorflow.js使用的张量。这并非Tensorflow.js的设计使用方式,但我发现他们创建张量的方法过于繁琐和不直观。

无论如何,这是我的示例(使用node.js)。我希望您觉得它有用。

require('@tensorflow/tfjs-node')
const tf = require('@tensorflow/tfjs');
const fs = require('fs');
var nj = require('numjs');

const maxlen = 30;
const step = 3;
fs.readFile(<file>, 'utf8', function (error, data) {
    if (error) throw error;
    var text = data.toString();
    create_model(text)
});


function onlyUnique(value, index, self) {
  return self.indexOf(value) === index;
}

function indexOfMax(arr) {
    if (arr.length === 0) {
        return -1;
    }
    var max = arr[0];
    var maxIndex = 0;
    for (var i = 1; i < arr.length; i++) {
        if (arr[i] > max) {
            maxIndex = i;
            max = arr[i];
        }
    }
    return maxIndex;
}

function sample(preds, temperature) {
  preds = nj.array(preds, 'float64');
  preds = nj.log(preds).divide(temperature)
  exp_preds = nj.exp(preds)
  preds = exp_preds.divide(nj.sum(exp_preds))
  arr = preds.tolist()
  return indexOfMax(arr)
}

async function create_model(text) {
  /* data prep */
  text = text.toLowerCase()
  console.log('corpus length:', text.length)
  var words = text.replace(/(\r\n\t|\n|\r\t)/gm," ").split(" ")
  words = words.filter(onlyUnique)
  words = words.sort()
  words = words.filter(String)

  console.log("total number of unique words" + words.length)

  var word_indices = {}
  var indices_word = {}
  for (let e0 of words.entries()) {
    var idx = e0[0]
    var word = e0[1]
    word_indices[word] = idx
    indices_word[idx] = word
  }

  console.log("maxlen: " + maxlen, " step: " + step)

  var sentences = []
  var sentences1 = []

  var next_words = []
  list_words = text.toLowerCase().replace(/(\r\n\t|\n|\r\t)/gm," ").split(" ").filter(String)
  console.log('list_words ' + list_words.length)

  for (var i = 0; i < (list_words.length - maxlen); i += step) {
    var sentences2 = list_words.slice(i, i + maxlen).join(" ")
    sentences.push(sentences2)
    next_words.push(list_words[i + maxlen])
  }
  console.log('nb sequences(length of sentences):', sentences.length)
  console.log("length of next_word", next_words.length)

  console.log('Vectorization...')
  var X = nj.zeros([sentences.length, maxlen, words.length])
  console.log('X shape' + X.shape)
  var y = nj.zeros([sentences.length, words.length])
  console.log('y shape' + y.shape)
  for (let e of sentences.entries()) {
    var i = e[0]
    var sentence = e[1]
    for (let e2 of sentence.split(" ").entries()) {
      var t = e2[0]
      var word = e2[1]
      X.set(i, t, word_indices[word], 1)
    }
    y.set(i, word_indices[next_words[i]], 1)
  }

  console.log('Creating model... Please wait.');

  console.log("MAXLEN " + maxlen + ", words.length " + words.length)
  var model = tf.sequential();
  model.add(tf.layers.lstm({
    units: 128,
    returnSequences: true,
    inputShape: [maxlen, words.length]
  }));
  model.add(tf.layers.dropout(0.2))
  model.add(tf.layers.lstm({
    units: 128,
    returnSequences: false
  }));
  model.add(tf.layers.dropout(0.2))
  model.add(tf.layers.dense({units: words.length, activation: 'softmax'}));

  model.compile({loss: 'categoricalCrossentropy', optimizer: tf.train.rmsprop(0.002)});

  x_tensor = tf.tensor3d(X.tolist(), null, 'bool')
  //x_tensor.print(true)
  y_tensor = tf.tensor2d(y.tolist(), null, 'bool')
  //y_tensor.print(true)

  /* training */
  await model.fit(x_tensor, y_tensor, {
    epochs: 100,
    batchSize: 32,
    callbacks: {
      onEpochEnd: async (epoch, logs) => {
        console.log(logs.loss + ",")
      }
    }
  })