def weight_variable(shape, l2_reg_lambda=None, l1_reg_lambda=None):
regularizer = None
if l2_reg_lambda:
regularizer = tf.contrib.layers.l2_regularizer(l2_reg_lambda)
elif l1_reg_lambda:
regularizer = tf.contrib.layers.l1_regularizer(l1_reg_lambda)
return tf.get_variable('weight', shape, initializer=tf.random_normal_initializer(stddev=0.1), regularizer=regularizer)
def bias_variable(shape):
return tf.get_variable('bias', shape, initializer=tf.constant_initializer(0.1))
def full_connect(inputs, num_units, activation=None, name='full_connect'):
with tf.variable_scope(name):
shape = [inputs.get_shape()[-1], num_units]
weight = weight_variable(shape)
bias = bias_variable(shape[-1])
outputs = tf.matmul(inputs, weight) + bias
if activation=="relu":
outputs = tf.nn.relu(outputs)
elif activation == "tanh":
outputs = tf.tanh(outputs)
elif activation == "sigmoid":
outputs = tf.nn.sigmoid(outputs)
return outputs
这是我的完整连接层。我在这个图中使用它。
nn_layers.full_connect_(self.wide_deep_embed, config.num_classes, activation='sigmoid', name='output_layer')
当下一批数据进入时,在weight_variable中初始化的权重是否会再次初始化?或者只是在第一次从随机正态分布初始化? 感谢。
答案 0 :(得分:1)
变量初始化与输入数据没有任何共同之处。
一旦构建了图形,就会分配和初始化变量。之后,图表是静态的,不会改变。初始化只进行一次。
答案 1 :(得分:0)
变量值仅存在于会话(tf.Session())中,而不存在于图形上。只要批次之间保持会话,权重就不会重新初始化。
如E_net4所述,会话中需要显式初始化。