Perceptron Javascript不一致

时间:2018-02-24 21:58:43

标签: javascript machine-learning neural-network perceptron

建立一个基本的感知器。训练后的结果非常不一致,即使在1000年的时期之后也是如此。权重似乎适当调整,但模型无法准确预测。在结构上的第二双眼睛将非常感激,努力找到我错误的地方。准确度始终高达60%。

 // Perceptron
class Perceptron {

    constructor (x_train, y_train, learn_rate= 0.1, epochs=10) {
            this.epochs = epochs
            this.x_train = x_train
            this.y_train = y_train
            this.learn_rate = learn_rate
            this.weights = new Array(x_train[0].length)

            // initialize random weights
            for ( let n = 0; n < x_train[0].length; n++ ) {
                    this.weights[n] = this.random()
            }
    }

    // generate random float between -1 and 1 (for generating weights)
    random () {
            return Math.random() * 2 - 1
    }

    // activation function
    activation (n) {
            return n < 0 ? 0 : 1
    }

    // y-hat output given an input tensor 
    predict (input) {
            let total = 0
            this.weights.forEach((w, index) => { total += input[index] * w }) // multiply each weight by each input vector value
            return this.activation(total)
    }

    // training perceptron on data
    fit () {
            for ( let e = 0; e < this.epochs; e++) { // epochs loop
                    for ( let i = 0; i < this.x_train.length; i++ ) { // iterate over each training sample
                            let prediction = this.predict(this.x_train[i]) // predict sample output
                            console.log('Expected: ' + this.y_train[i] + '    Model Output: ' + prediction) // log expected vs predicted
                            let loss = this.y_train[i] - prediction // calculate loss
                            for ( let w = 0; w < this.weights.length; w++ ) { // loop weights for update
                                    this.weights[w] += loss * this.x_train[i][w] * this.learn_rate // update all weights to reduce loss
                            }
                    }
            }
    }
}

x = [[1, 1, 1], [0, 0, 0], [0, 0, 1], [1, 1, 0], [0, 0, 1]]
y = [1, 0, 0, 1, 0]

p = new Perceptron(x, y, epochs=5000, learn_rate=.1)

更新:

// Perceptron
module.exports = class Perceptron {

constructor (x_train, y_train, epochs=1000, learn_rate= 0.1) {

    // used to generate percent accuracy
    this.accuracy = 0
    this.samples = 0
    this.x_train = x_train
    this.y_train = y_train
    this.epochs = epochs
    this.learn_rate = learn_rate
    this.weights = new Array(x_train[0].length)
    this.bias = 0

    // initialize random weights
    for ( let n = 0; n < x_train[0].length; n++ ) {
                    this.weights[n] = this.random()
            }
}

// returns percent accuracy 
current_accuracy () {
    return this.accuracy/this.samples
}

// generate random float between -1 and 1 (for generating weights)
random () {
    return Math.random() * 2 - 1
}

// activation function
activation (n) {
    return n < 0 ? 0 : 1
}

// y-hat output given an input tensor 
predict (input) {
    let total = this.bias
    this.weights.forEach((w, index) => { total += input[index] * w }) // multiply each weight by each input vector value
    return this.activation(total)
}

// training perceptron on data
fit () {
    // epochs loop
    for ( let e = 0; e < this.epochs; e++) { 

        // for each training sample
        for ( let i = 0; i < this.x_train.length; i++ ) { 

            // get prediction
            let prediction = this.predict(this.x_train[i]) 
            console.log('Expected: ' + this.y_train[i] + '    Model Output: ' + prediction) 

            // update accuracy measures
            this.y_train[i] === prediction ? this.accuracy += 1 : this.accuracy -= 1
            this.samples++

            // calculate loss
            let loss = this.y_train[i] - prediction

            // update all weights
            for ( let w = 0; w < this.weights.length; w++ ) { 
                this.weights[w] += loss * this.x_train[i][w] * this.learn_rate
            }

            this.bias += loss * this.learn_rate
        }

        // accuracy post epoch
        console.log(this.current_accuracy())
    }
  }
}

1 个答案:

答案 0 :(得分:2)

这只是一个句法错误:)

切换最后两个参数的顺序,如下所示:

p = new Perceptron(x, y, learn_rate=.1, epochs=5000)

现在一切都应该正常。

然而,更严重的问题在于您的实施:

你忘记了偏见

使用感知器,您正在尝试学习线性函数,某种形式的

y = wx + b

但你目前正在计算的只是

y = wx

如果您要学习的内容只是单个输入的标识功能(如您的情况),那么这很好。但是,一旦你开始做一些稍微复杂的事情就像试图学习AND函数一样,它就无法工作,这可以用这样表示:

y = x1 + x2 - 1.5

如何修复?

非常简单,只需在构造函数中初始化this.bias = 0即可。然后,在predict()中,您初始化let total = this.bias,并在fit()中,在最内圈后添加this.bias += loss * this.learn_rate