我需要一些简单的示例,通过这些示例我可以学习如何使用tf.nn.fused_batch_norm
。 (我无法通过谷歌搜索找到。)
具体来说,我想要准确理解函数中的输入部分mean=None, variance=None
。在推理阶段中,我是否通过tf.nn.moments计算总体均值和方差,然后使用这些输入参数将它们放入函数中?
我是否像ReLU等其他任何激活一样使用此功能?
答案 0 :(得分:0)
您可以查看tf.keras.layers.BatchNormalization的定义文件
def _fused_batch_norm(self, inputs, training):
"""Returns the output of fused batch norm."""
def _fused_batch_norm_training():
return nn.fused_batch_norm(
inputs,
self.gamma,
self.beta,
epsilon=self.epsilon)
def _fused_batch_norm_inference():
return nn.fused_batch_norm(
inputs,
self.gamma,
self.beta,
mean=self.moving_mean,
variance=self.moving_variance,
epsilon=self.epsilon,
is_training=False)
output, mean, variance = tf_utils.smart_cond(
training, _fused_batch_norm_training, _fused_batch_norm_inference)
if not self._bessels_correction_test_only:
# Remove Bessel's correction to be consistent with non-fused batch norm.
# Note that the variance computed by fused batch norm is
# with Bessel's correction.
sample_size = math_ops.cast(
array_ops.size(inputs) / array_ops.size(variance), variance.dtype)
factor = (sample_size - math_ops.cast(1.0, variance.dtype)) / sample_size
variance *= factor
training_value = tf_utils.constant_value(training)
if training_value is None:
momentum = tf_utils.smart_cond(training,
lambda: self.momentum,
lambda: 1.0)
else:
momentum = ops.convert_to_tensor(self.momentum)
if training_value or training_value is None:
mean_update = self._assign_moving_average(self.moving_mean, mean,
momentum)
variance_update = self._assign_moving_average(self.moving_variance,
variance, momentum)
self.add_update(mean_update, inputs=True)
self.add_update(variance_update, inputs=True)
return output