我使用tensorflow库实现堆叠自动编码器。它工作正常。现在我试图看到隐藏的图层激活值(y1,y2,y3,y4,y5)。但我没有找到任何方法来看到这一点。这是我的代码。
x= tf.placeholder(tf.float32,[None,784])
y_=tf.placeholder(tf.float32,[None,6])
k=190
l=180
m=150
n=130
o=100
num_of_epoch=10
w1=tf.Variable(tf.truncated_normal([784,k],stddev=0.1))
b1=tf.Variable(tf.zeros([k]))
w2=tf.Variable(tf.truncated_normal([k,l],stddev=0.1))
b2=tf.Variable(tf.zeros([l]))
w3=tf.Variable(tf.truncated_normal([l,m],stddev=0.1))
b3=tf.Variable(tf.zeros([m]))
w4=tf.Variable(tf.truncated_normal([m,n],stddev=0.1))
b4=tf.Variable(tf.zeros([n]))
w5=tf.Variable(tf.truncated_normal([n,o],stddev=0.1))
b5=tf.Variable(tf.zeros([o]))
w6=tf.Variable(tf.truncated_normal([o,6],stddev=0.1))
b6=tf.Variable(tf.zeros([6]))
y1=tf.nn.relu(tf.matmul(x,w1)+b1)
y2=tf.nn.relu(tf.matmul(y1,w2)+b2)
y3=tf.nn.relu(tf.matmul(y2,w3)+b3)
y4=tf.nn.relu(tf.matmul(y3,w4)+b4)
y5=tf.nn.relu(tf.matmul(y4,w5)+b5)
y=tf.nn.softmax(tf.matmul(y5,w6)+b6)
cross_entropy=tf.reduce_mean(-tf.reduce_sum(y_*tf.log(y),
reduction_indices=[1]))
train_step=tf.train.GradientDescentOptimizer(0.1).minimize(cross_entropy)
init=tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
for i in range(num_of_epoch):
train_data = {x:x_train,y_:y_train}
sess.run(train_step,feed_dict=train_data)
currect_prediction=tf.equal(tf.argmax(y,1),tf.argmax(y_,1))
accuracy=tf.reduce_mean(tf.cast(currect_prediction,tf.float32))
sess.run(accuracy,feed_dict={x:x_train,y_:y_train})
currect_prediction=tf.equal(tf.argmax(y,1),tf.argmax(y_,1))
accuracy=tf.reduce_mean(tf.cast(currect_prediction,tf.float32))
sess.run(accuracy,feed_dict= {x:x_test,y_:y_test})
如果你知道方式请与我分享或给我任何有效的链接,我可以找到赖特的答案。提前谢谢。