我正在尝试恢复经过训练的模型,并通过一些其他操作对其进行重新训练。
我有2个python文件,可以说
train.py
def train():
# 1 NN
Xinp1 = tf.placeholder("float", [None, 2], name="Xinp1")
Xhidden1 = tf.layers.dense(Xinp1, units=16 ,
kernel_initializer=tf.initializers.he_uniform(),
activation=tf.nn.relu, name="X_hidden1")
Xout1 = tf.layers.dense(X_hidden5, units=1,
kernel_initializer=tf.initializers.he_uniform(),activation=tf.nn.sigmoid, name="X_out")
Xout1 = tf.identity(Xout, name="Xout1")
#2 NN
Xinp2 = tf.placeholder("float", [None, 2], name="Xinp2")
Xhidden2 = tf.layers.dense(Xinp2, units=16 ,
kernel_initializer=tf.initializers.he_uniform(),
activation=tf.nn.relu, name="X_hidden2")
Xout2 = tf.layers.dense(X_hidden2, units=1,
kernel_initializer=tf.initializers.he_uniform(),activation=tf.nn.sigmoid, name="X_out2")
Xout2 = tf.identity(Xout2, name="Xout2")
Xout1_label = tf.placeholder("float", [None,1], name="Xout1_label")
Xout2_label = tf.placeholder("float", [None,1],name="Xout2_label")
learning_rate = 1e-2
# Define loss and optimizer
loss_op1 = tf.losses.absolute_difference(Xout1_label, Xout1)
loss_op2 = tf.losses.absolute_difference(Xout2_label, Xout2)
# debug gradients
trainables = tf.trainable_variables()
print ("trainables", trainables)
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate, epsilon=0.1)
train_op1 = optimizer.minimize(loss_op1)
train_op2 = optimizer.minimize(loss_op2)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver()
for _ in range(100):
_, c1, summary = sess.run([train_op1, loss_op1, merged_summary_op], feed_dict={
Xinp1: X1,
Xinp2: X2,
Xout1_label: X1label,
Xout2_label: X2label
})
_, c2, summary = sess.run([train_op2, loss_op2, merged_summary_op], feed_dict={
Xinp1: X1,
Xinp2: X2,
Xout1_label: X1label,
Xout2_label: X2label
})
saver.save(sess, 'Model/trained.ckpt')
sess.close()
作为输出,我得到了以下文件
retrain.py
def retrain():
with tf.Session() as sess:
saver = tf.train.import_meta_graph('Model/trained.ckpt.meta')
saver.restore(sess, 'Model/trained.ckpt')
graph = tf.get_default_graph()
Xinp1 = graph.get_tensor_by_name('Xinp1:0')
Xout1 = graph.get_tensor_by_name('Xout1:0')
Xinp2 = graph.get_tensor_by_name('Xinp2:0')
Xout2 = graph.get_tensor_by_name('Xout2:0')
# I want to add some additional nodes
T1 = tf.placeholder("float", [None, 1], name="T1")
T2 = tf.placeholder("float", [None, 1], name="T2")
Add1 = tf.add(tf.multiply(Xout1, tf.subtract(T1, T2)), T2, name="Add1_out")
T3 = tf.placeholder("float", [None, 1], name="T3")
Add2 = tf.multiply(tf.multiply(T3,tf.subtract(Add1, 300)),tf.multiply(radial_length,0.000001), name="Add2_out")
Addlabel = tf.placeholder("float", [None, 1], name="Addlabel")
loss_op = tf.losses.mean_squared_error(Addlabel, Add2)
# debug gradients
trainables = tf.trainable_variables()
print ("trainables", trainables)
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate, epsilon=0.1)
train_op = optimizer.minimize(loss_op)
sess.run(tf.global_variables_initializer())
#training starts
# Here I except weights of 1 NN and 2 NN are learned during the training
for _ in range(100):
_, c, summary = sess.run([train_op, loss_op, merged_summary_op], feed_dict={
Xinp1 : NewX1,
Xinp2 : NewX2,
T1 : T1inp,
T2 : T2inp,
T3 : T3inp,
Addlabel : Addtarget
})
我希望retrain.py在训练过程中调整与1 NN和2 NN相关的权重。
但是现在在运行retrain.py时,出现以下错误
Traceback (most recent call last):
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 1659, in _create_c_op
c_op = c_api.TF_FinishOperation(op_desc)
tensorflow.python.framework.errors_impl.InvalidArgumentError: Duplicate node name in graph: 'X_hidden1/kernel/Adam'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/itmsec/Documents/tipclearance/src/TTG_tensorflowv14.py", line 493, in <module>
restore_and_retrain(BDD)
File "/home/itmsec/Documents/tipclearance/src/TTG_tensorflowv14.py", line 244, in restore_and_retrain
train_op = optimizer.minimize(loss_op)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py", line 413, in minimize
name=name)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py", line 595, in apply_gradients
self._create_slots(var_list)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/training/adam.py", line 135, in _create_slots
self._zeros_slot(v, "m", self._name)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py", line 1153, in _zeros_slot
new_slot_variable = slot_creator.create_zeros_slot(var, op_name)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/training/slot_creator.py", line 183, in create_zeros_slot
colocate_with_primary=colocate_with_primary)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/training/slot_creator.py", line 157, in create_slot_with_initializer
dtype)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/training/slot_creator.py", line 65, in _create_slot_var
validate_shape=validate_shape)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py", line 1479, in get_variable
aggregation=aggregation)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py", line 1220, in get_variable
aggregation=aggregation)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py", line 547, in get_variable
aggregation=aggregation)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py", line 499, in _true_getter
aggregation=aggregation)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py", line 911, in _get_single_variable
aggregation=aggregation)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variables.py", line 213, in __call__
return cls._variable_v1_call(*args, **kwargs)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variables.py", line 176, in _variable_v1_call
aggregation=aggregation)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variables.py", line 155, in <lambda>
previous_getter = lambda **kwargs: default_variable_creator(None, **kwargs)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py", line 2495, in default_variable_creator
expected_shape=expected_shape, import_scope=import_scope)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variables.py", line 217, in __call__
return super(VariableMetaclass, cls).__call__(*args, **kwargs)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variables.py", line 1395, in __init__
constraint=constraint)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variables.py", line 1509, in _init_from_args
name=name)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/state_ops.py", line 79, in variable_op_v2
shared_name=shared_name)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/gen_state_ops.py", line 1425, in variable_v2
shared_name=shared_name, name=name)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/op_def_library.py", line 788, in _apply_op_helper
op_def=op_def)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/util/deprecation.py", line 507, in new_func
return func(*args, **kwargs)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 3300, in create_op
op_def=op_def)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 1823, in __init__
control_input_ops)
File "/home/itmsec/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 1662, in _create_c_op
raise ValueError(str(e))
ValueError: Duplicate node name in graph: 'X_hidden1/kernel/Adam'