我尝试使用张量流实现三层神经网络,但是没有用。 代码:-
df <- tribble(
~dressId, ~color,
6, 'yellow',
9, 'red',
10, 'green',
10, 'purple',
10, 'yellow',
12, 'purple',
12, 'red'
)
df %>%
group_by(dressId) %>%
summarize(colors = list(color))
# dressId colors
# 6 yellow
# 9 red
# 10 green, purple, yellow
# 12 purple, red
输出:-
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
learning_rate=0.5
epochs=10
batch_size=100
x=tf.placeholder(tf.float32,[None,784])
y=tf.placeholder(tf.float32,[None,10])
w1=tf.Variable(tf.random_normal([784,500]))
b1=tf.Variable(tf.random_normal([500]))
w2=tf.Variable(tf.random_normal([500,100]))
b2=tf.Variable(tf.random_normal([100]))
w3=tf.Variable(tf.random_normal([100,10]))
b3=tf.Variable(tf.random_normal([10]))
layer1=tf.add(tf.matmul(x,w1),b1)
layer1=tf.nn.relu(layer1)
layer2=tf.add(tf.matmul(layer1,w2),b2)
layer2=tf.nn.relu(layer2)
output_layer=tf.add(tf.matmul(layer2,w3),b3)
output_layer=tf.nn.softmax(output_layer)
y_clipped = tf.clip_by_value(output_layer,1e-10,0.9999999)
cross_entropy = -1*tf.reduce_mean(tf.reduce_sum(y*tf.log(y_clipped)+(1-y)*tf.log(1-y_clipped),axis=1))
optimiser=tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cross_entropy)
init_op = tf.global_variables_initializer()
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(output_layer,1))
accuracy=tf.reduce_mean(tf.cast(correct_prediction,tf.float32))
with tf.Session() as sess:
sess.run(init_op)
total_batch=int(len(mnist.train.labels)/batch_size)
for epoch in range(epochs):
avg_cost = 0
for i in range(total_batch):
batch_x,batch_y = mnist.train.next_batch(batch_size=batch_size)
_,c = sess.run([optimiser,cross_entropy],feed_dict={x:batch_x,y:batch_y})
avg_cost += c/total_batch
print("Epoch:", (epoch+1),"cost=","{:.3f}".format(avg_cost))
print(sess.run(accuracy,feed_dict={x:mnist.test.images, y:mnist.test.labels}))
成本停留在34.974,我找不到错误。准确度与猜测一样差。我尝试将层数减少到2,但仍然无法运行。
答案 0 :(得分:0)
如果删除最后一个 softmax 层并使用此优化器,则会得到此信息。
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=output_layer, labels=y))
optimiser = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cross_entropy)
费用:
Epoch: 1 cost= 127.026
Epoch: 2 cost= 26.847
Epoch: 3 cost= 16.122
Epoch: 4 cost= 10.895
Epoch: 5 cost= 7.779
Epoch: 6 cost= 5.635
Epoch: 7 cost= 4.110
Epoch: 8 cost= 3.103
Epoch: 9 cost= 2.304
Epoch: 10 cost= 1.698
0.9309