反向传播在Tensorflow中不起作用

时间:2017-05-23 02:40:55

标签: tensorflow neural-network backpropagation

我是Tensorflow的新生。最近我想用两层神经网络模型拟合非线性函数“y = 1 + sin(x * pi / 4)”。程序代码如下:

    #!/usr/bin/python

    import tensorflow as tf
    import numpy as np
    import math
    import matplotlib.pyplot as plt

    def check_mode():
        x_data = np.linspace(-2,2,100)
        y_data = [1 + math.sin(x  * math.pi/4) for x in x_data]
        w_1 = tf.Variable(tf.random_uniform([1,2],0,0.5))
        b_1 = tf.Variable(tf.random_uniform([1,2],0,0.5))
        w_2 = tf.Variable(tf.random_uniform([2,1],0,0.5))
        b_2 = tf.Variable(tf.random_uniform([1,1],0,0.5))
        saver = tf.train.Saver()
        with tf.Session() as sess:
            saver.restore(sess,"mode.ckpt")
            print("lay1: ",sess.run(w_1),sess.run(b_1))
            print("lay2: ",sess.run(w_2),sess.run(b_2))
            a = []
            for x_i in x_data:
                w_plus_b = tf.matmul([[x_i]],w_1) + b_1
                a_1 = sigma(w_plus_b)
                a_2 = tf.matmul(a_1,w_2) + b_2
                a.append(sess.run(a_2[0][0]))
        print a
        draw_point(a,x_data,y_data)
        return
    def draw_point(a,x_data,y_data):
        fx,ax = plt.subplots()
        plt.plot(x_data,y_data,'o-')
        plt.plot(x_data,a,'k-')
        plt.show()  


    def sigma(x):
        return tf.div(tf.constant(1.0),tf.add(tf.constant(1.0),tf.exp(tf.negative(x))))

    def first_function():
        x_data = np.linspace(-2,2,100)
        y_data = [1 + math.sin(x  * math.pi/4) for x in x_data]

        x_i = tf.placeholder(tf.float32,[1,1])
        y_data_i = tf.placeholder(tf.float32,[1,1])

        w_1 = tf.Variable(tf.random_uniform([1,2],0,0.5))
        b_1 = tf.Variable(tf.random_uniform([1,2],0,0.5))

        w_2 = tf.Variable(tf.random_uniform([2,1],0,0.5))
        b_2 = tf.Variable(tf.random_uniform([1,1],0,0.5))

        z_1 = tf.add(tf.matmul(x_i,w_1), b_1)
        a_1 = sigma(z_1)
        a_2 = tf.add(tf.matmul(a_1,w_2),b_2)
        diff = tf.subtract(a_2,y_data_i)    

        loss = tf.multiply(diff,diff)

        optimizer = tf.train.GradientDescentOptimizer(0.1)
        train = optimizer.minimize(loss)

        init = tf.initialize_all_variables()

        sess = tf.Session()
        sess.run(init)
        saver = tf.train.Saver()
        for step in range(100):
            sess.run(train,feed_dict={x_i:[[x_data[step]]],y_data_i:[[y_data[step]]]})

        print("lay1: ",sess.run(w_1),sess.run(b_1))
        print("lay2: ",sess.run(w_2),sess.run(b_2))
        saver.save(sess,"mode.ckpt")
        return

    def main():
        #first_function()
        check_mode()
        return

    if __name__ == '__main__':
        main()

我不确定张量流是否以及如何完成后退自动传播?我真的不想自己实现反向传播。我的代码有什么问题吗?非常感谢你的帮助!!

1 个答案:

答案 0 :(得分:1)

看起来它看起来像完成了后卫,但看起来你并没有非常训练它。具体来说,您的训练循环只会遍历每个数据点一次。通过数据尝试许多循环。