import tensorflow as tf
import numpy as np
import glob
fq=glob.glob("*.jpg")
filename_queue = tf.train.string_input_producer(fq)
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
my_img = tf.image.decode_jpeg(value,channels=3)
my_img=tf.cast(my_img,tf.float32)
resized_image = tf.image.resize_images(my_img, [50, 50])
labels=[1,1,1,0,0]
onehot = tf.one_hot(labels, depth=2)
image_batch = tf.train.batch([resized_image], batch_size=2)
# layer 1
w1 = tf.Variable(tf.truncated_normal([2, 2, 3, 52], stddev=0.01))
b1= tf.Variable(tf.constant(0.01, shape = [52]))
layer1=tf.nn.conv2d(image_batch,w1,[1,1,1,1],padding='SAME')
act1=tf.nn.relu(tf.nn.bias_add(layer1,b1))
pool1=tf.layers.max_pooling2d(act1,2,2)
# layer 2
b2= tf.Variable(tf.constant(0.01, shape = [104]))
w2=tf.Variable(tf.truncated_normal([2, 2,52, 104], stddev=0.01))
layer2=tf.nn.conv2d(pool1,w2,[1,1,1,1],padding='SAME')
act2=tf.nn.relu(tf.nn.bias_add(layer2,b2))
pool2=tf.layers.max_pooling2d(act2,2,2)
#fully connected layer
b3= tf.Variable(tf.constant(0.01, shape = [300]))
w3=tf.Variable(tf.truncated_normal([12*12*104, 300], stddev=0.01))
fcl1=tf.reshape(pool2,[-1,12*12*104])
fcl1 = tf.add(tf.matmul(fcl1, w3), b3)
fcl1 = tf.nn.relu(fcl1)
fcl1 = tf.nn.dropout(fcl1,0.5)
#output layer
b_out=b3= tf.Variable(tf.constant(0.01, shape = [2]))
w_out=tf.Variable(tf.truncated_normal([300, 2], stddev=0.01))
ans=tf.add(tf.matmul(fcl1,w_out),b_out)
#traning , loss , optimizer
logits = ans
prediction = tf.nn.softmax(logits)
loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
logits=logits, labels=onehot))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001)
train_op = optimizer.minimize(loss_op)
with tf.Session() as s:
s.run(tf.global_variables_initializer())
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
for step in range(1, 40):
s.run(train_op)
print(step,s.run(loss_op))
coord.request_stop()
coord.join(threads)
追踪(最近一次通话): 文件" test.py",第56行,in logits = logits,labels = onehot))
ValueError:两个形状中的尺寸0必须相等,但对于' SoftmaxCrossEntropyWithLogits' (op:' SoftmaxCrossEntropyWithLogits')输入形状:[2,2],[5,2]。
请告诉我,我的错误是什么,我认为我需要一次喂2个标签,但一次只需要5个标签。如何单独批量标签?
答案 0 :(得分:0)
您正在进行输入批次,但不进行标签批次。尝试喂养标签批次。为您的标签制作占位符:
public Word(String word) {
this.word = word;
}
然后再说:
labels = tf.placeholder(tf.int32, [None])
# Keep the tf.one_hot op.
onehot = tf.one_hot(labels, depth=2)
老实说,我会以同样的方式处理您的数据批次和标签批次,但我无法弄清楚如何让all_labels = [1, 1, 1, 0, 0]
start = 0
with tf.Session() as s:
# ...
for step in range(40):
t = all_labels[start:start+2] # grab next label batch
start += 2
if start > len(all_labels):
start = 0 # reset to start of list when we overrun the end
sess.run(train_op, feed_dict={labels: t})
处理tf.train.batch
的输出,我是无法先在原始标签阵列上运行tf.one_hot
,然后将其传递给tf.train.batch
。