我是Tensorflow的新手。现在,我正在尝试创建一个简单的4层全连接神经网络,以对CIFAR-10数据集进行分类。但是,在我的测试集上,测试集上的神经网络准确性完全是静态的,并且停留在11%。
我知道完全连接的神经网络可能不适合完成此任务,但是奇怪的是该网络根本没有改善/改变。所以我想知道是否有人知道我的问题的解决方案。我已在下面复制了我的代码,感谢您的帮助!非常感谢。
import tensorflow as tf
import numpy as np
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data()
y_train_one_hot = np.zeros((y_train.shape[0], 10))
for i in range(y_train.shape[0]):
y_train_one_hot[i][y_train[i]] = 1
y_test_one_hot = np.zeros((y_test.shape[0], 10))
for i in range(y_test.shape[0]):
y_test_one_hot[i][y_test[i]] = 1
x = tf.placeholder(dtype=tf.float32, shape=(None, 32, 32, 3), name='X')
y = tf.placeholder(dtype=tf.float32, shape=(None, 10), name='Y')
keep_prob = tf.placeholder(tf.float32)
x_flatten = tf.reshape(x, [-1, 32*32*3])
nn = tf.layers.dense(x_flatten, 1028, activation=tf.nn.relu)
nn = tf.nn.dropout(nn, keep_prob)
nn = tf.layers.dense(nn, 1028, activation=tf.nn.relu)
nn = tf.nn.dropout(nn, keep_prob)
nn = tf.layers.dense(nn, 512, activation=tf.nn.relu)
nn = tf.nn.dropout(nn, keep_prob)
prediction = tf.layers.dense(nn, 10, activation=tf.nn.relu)
cross_entropy = tf.losses.softmax_cross_entropy(onehot_labels=y, logits=prediction)
loss = tf.reduce_mean(cross_entropy)
train_step = tf.train.AdamOptimizer().minimize(loss)
correct_prediction = tf.equal(tf.argmax(prediction, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
for iteration in range(1000):
sess.run(train_step, feed_dict={x: x_train[:1000], y: y_train_one_hot[:1000], keep_prob: 0.5})
if iteration % 10 == 0:
acc = sess.run(accuracy, feed_dict={x: x_test[:100], y: y_test_one_hot[:100], keep_prob: 1.0})
print(acc)