这是模型的代码,包括卷积层和使用张量流的训练函数。在对模型的所有层进行编码之后,展平层不会给出包含tf.reshape()的预期输出。请帮助我。
def model(self, inp):
# Layer 3
layer3_conv1 = tf.nn.conv2d(layer2_max_pool1, self.__W[3], strides=[1, 1, 1, 1], padding=self.padding)
layer3_relu1 = tf.nn.relu(tf.nn.bias_add(layer3_conv1, self.__b[3]))
layer3_max_pool1 = tf.nn.max_pool(layer3_relu1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding=self.padding)
w4 = (layer3_max_pool1.get_shape()[1:]).as_list()
print("layer3maxpool shape is ",layer3_max_pool1.shape)
print("w4 shape is ",w4)
# Flatten
print ("x is :",np.prod(w4))
flatten = tf.reshape(layer3_max_pool1, [ -1,np.prod(w4)])
print(" Flatten shape is",flatten.shape)
# Fully Connected Network
fc1 = tf.nn.relu(tf.matmul(flatten, self.__W[4]) + self.__b[4])
print(fc1.shape)
out = tf.nn.relu(tf.matmul(fc1, self.__W[5]) + self.__b[5])
# print(out.get_shape().as_list())
return out
输出为:
layer3maxpool shape is (?, 45, 58, 256)
> w4 shape is [45, 58, 256]
> x is : 668160
Flatten shape is (?, 668160)
> (?, 1024)