我正在尝试使用以下this将script pix2pix GAN代码翻译为Tensorflow 1.0.0,如Tensorflow文档中所述,但我不断收到以下错误:
ValueError:变量d_h0_conv / w / Adam /不存在,或者不存在 使用tf.get_variable()创建。你的意思是设置reuse = None in VarScope?
这是Adam优化器部分:
d_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \
.minimize(self.d_loss, var_list=self.d_vars)
d_vars是:
t_vars = tf.trainable_variables()
self.d_vars = [var for var in t_vars if 'd_' in var.name]
鉴别码:
def discriminator(self, image, y=None, reuse=False):
# image is 256 x 256 x (input_c_dim + output_c_dim)
if reuse:
tf.get_variable_scope().reuse_variables()
else:
assert tf.get_variable_scope().reuse == False
h0 = lrelu(conv2d(image, self.df_dim, name='d_h0_conv'))
# h0 is (128 x 128 x self.df_dim)
h1 = lrelu(self.d_bn1(conv2d(h0, self.df_dim*2, name='d_h1_conv')))
# h1 is (64 x 64 x self.df_dim*2)
h2 = lrelu(self.d_bn2(conv2d(h1, self.df_dim*4, name='d_h2_conv')))
# h2 is (32x 32 x self.df_dim*4)
h3 = lrelu(self.d_bn3(conv2d(h2, self.df_dim*8, d_h=1, d_w=1, name='d_h3_conv')))
# h3 is (16 x 16 x self.df_dim*8)
h4 = linear(tf.reshape(h3, [self.batch_size, -1]), 1, 'd_h3_lin')
return tf.nn.sigmoid(h4), h4