ValueError:变量bcnn1 / W_conv2d已存在,不允许使用。您是要在VarScope中设置“ reuse = True”还是“ reuse = tf.AUTO_REUSE”?

时间:2018-12-26 07:37:43

标签: tensorflow deep-learning artificial-intelligence tensorlayer

问题是“ ValueError:变量bcnn1 / W_conv2d已存在,不允许。您是要在VarScope中设置复用= True还是复用= tf.AUTO_REUSE?最初定义为: “

我正在尝试建立MNIST数据集的二值化CNN网络。我收到上述错误。  

import tensorflow as tf
import tensorlayer as tl
sess = tf.InteractiveSession()
X_train, y_train, X_val, y_val, X_test, y_test = \
                                        tl.files.load_mnist_dataset(shape=(-1,784))
x = tf.placeholder(tf.float32, shape=[None, 784], name='x')
y_ = tf.placeholder(tf.int64, shape=[None, ], name='y_')
X_train = X_train.reshape((X_train.shape[0], 1, X_train.shape[1], 1))
X_test = X_test.reshape((X_test.shape[0],1,X_test.shape[1],1))
tl.layers.set_name_reuse(True)
network = tl.layers.InputLayer(x, name='input')
network = tl.layers.BinaryConv2d(network, 32, (5, 5), (1, 1), padding='SAME', name='bcnn1')
network = tl.layers.MaxPool2d(network, (2, 2), (2, 2), padding='SAME', name='pool1')
network = tl.layers.BatchNormLayer(network, act=tl.act.htanh, is_train=True, name='bn1')
network = tl.layers.SignLayer(network)
network = tl.layers.BinaryConv2d(network, 64, (5, 5), (1, 1), padding='SAME', name='bcnn2')
network = tl.layers.MaxPool2d(network, (2, 2), (2, 2), padding='SAME', name='pool2')
network = tl.layers.BatchNormLayer(network, act=tl.act.htanh, is_train=True, name='bn2')
network = tl.layers.FlattenLayer(network)
network = tl.layers.DenseLayer(network, n_units=10,act = tf.identity,name='output_layer')
# define cost function and metric.
y = network.outputs
cost = tl.cost.cross_entropy(y, y_, 'cost')
correct_prediction = tf.equal(tf.argmax(y, 1), y_)
acc = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
y_op = tf.argmax(tf.nn.softmax(y), 1)
    ​
# define the optimizer
train_params = network.all_params
train_op = tf.train.AdamOptimizer(learning_rate=0.0001, beta1=0.9, beta2=0.999,
                                epsilon=1e-08, use_locking=False).minimize(cost, var_list=train_params)
    ​
# initialize all variables in the session
tl.layers.initialize_global_variables(sess)
    ​
# print network information
network.print_params()
network.print_layers()
    ​
# train the network
tl.utils.fit(sess, network, train_op, cost, X_train, y_train, x, y_,
                acc=acc, batch_size=500, n_epoch=5, print_freq=5,
                X_val=X_val, y_val=y_val, eval_train=False)
    ​
# evaluation
tl.utils.test(sess, network, acc, X_test, y_test, x, y_, batch_size=None, cost=cost)
    ​
# save the network to .npz file
tl.files.save_npz(network.all_params , name='model.npz')
sess.close()

0 个答案:

没有答案