我已经定义了以下创建NN的函数:
def nn_layer(input_tensor, output_dim, layer_name, act=tf.nn.relu):
# Adding a name scope ensures logical grouping of the layers in the graph.
with tf.name_scope(layer_name):
# This Variable will hold the state of the weights for the layer
with tf.name_scope('weights'):
weights = weight_variable([input_tensor.get_shape().as_list()[1], output_dim])
variable_summaries(weights)
with tf.name_scope('biases'):
biases = bias_variable([output_dim])
variable_summaries(biases)
with tf.name_scope('Wx_plus_b'):
preactivate = tf.matmul(input_tensor, weights) + biases
tf.summary.histogram('pre_activations', preactivate)
activations = act(preactivate, name='activation')
tf.summary.histogram('activations', activations)
return activations
def neural_net(features):
''' Model function for the NN '''
input_layer = tf.cast(features, tf.float32)
hidden_layer = nn_layer(input_layer, 2, 'hidden_layer', act=tf.nn.relu)
out_layer = nn_layer(hidden_layer, 2, 'out_layer', act=tf.nn.relu)
return out_layer
然后我可以用以下方法计算logits值:
logits = neural_net(x)
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=y))
train_step = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(loss)
sess = tf.Session()
for epoch in range(epochs):
loss,_ = sess.run([loss, train_step], feed_dict={x: x})
我想知道每次调用run方法时是否会创建一个新的NN。如果这是真的,我怎样才能创建网络并使其在整个培训中保持不变?
请记住,我尽可能地尝试简化代码