我使用tensorflow来实现向前传播的演示。相同的代码块在打包为方法之前可以正常运行,但是封装后将发生错误。
def create_placeholders(n_x, n_y):
X = tf.placeholder(tf.float32, name='X', shape=[n_x, None])
Y = tf.placeholder(tf.float32, name='Y', shape=[n_y, None])
return X, Y
def initialize_parameters():
tf.set_random_seed(1)
W1 = tf.get_variable(\
'W1', [25, 12288], initializer=tf.contrib.layers.xavier_initializer(seed=1))
b1 = tf.get_variable('b1', [25, 1], initializer=tf.zeros_initializer())
W2 = tf.get_variable(\
'W2', [12, 25], initializer=tf.contrib.layers.xavier_initializer(seed=1))
b2 = tf.get_variable('b2', [12, 1], initializer=tf.zeros_initializer())
W3 = tf.get_variable(\
'W3', [6, 12], initializer=tf.contrib.layers.xavier_initializer(seed=1))
b3 = tf.get_variable('b3', [6, 1], initializer=tf.zeros_initializer())
parameters = {
"W1":W1,
"b1":b1,
"W2":W2,
"b2":b2,
"W3":W3,
"b3":b3
}
return parameters
def forward_propagation(X, paramters):
W1 = parameters['W1']
b1 = parameters['b1']
W2 = parameters['W2']
b2 = parameters['b2']
W3 = parameters['W3']
b3 = parameters['b3']
Z1 = tf.add(tf.matmul(W1, X), b1)
A1 = tf.nn.relu(Z1)
Z2 = tf.add(tf.matmul(W2, A1), b2)
A2 = tf.nn.relu(Z2)
Z3= tf.add(tf.matmul(W3, A2), b3)
return Z3
运行以下代码可以正常工作:
tf.reset_default_graph()
with tf.Session() as sess:
X, Y = create_placeholders(12288, 6)
parameters = initialize_parameters()
Z3 = forward_propagation(X, parameters)
print("Z3 = " + str(Z3))
获得以下输出
Z3 = Tensor("Add_2:0", shape=(6, ?), dtype=float32)
但是当我运行以下代码时,出现错误。
def model():
tf.reset_default_graph()
with tf.Session() as sess:
X, Y = create_placeholders(12288, 6)
parameters = initialize_parameters()
Z3 = forward_propagation(X, parameters)
print("Z3 = " + str(Z3))
model()
错误是:
ValueError Traceback (most recent call last)
<ipython-input-11-e6d854a03121> in <module>
7 print("Z3 = " + str(Z3))
8
----> 9 model()
<ipython-input-11-e6d854a03121> in model()
4 X, Y = create_placeholders(12288, 6)
5 parameters = initialize_parameters()
----> 6 Z3 = forward_propagation(X, parameters)
7 print("Z3 = " + str(Z3))
8
<ipython-input-3-d758b2f33eff> in forward_propagation(X, paramters)
19 b3 = parameters['b3']
20
---> 21 Z1 = tf.add(tf.matmul(W1, X), b1)
22 A1 = tf.nn.relu(Z1)
23
G:\Anaconda3.7\lib\site-packages\tensorflow\python\ops\math_ops.py in matmul(a, b, transpose_a, transpose_b, adjoint_a, adjoint_b, a_is_sparse, b_is_sparse, name)
2385 are both set to True.
2386 """
-> 2387 with ops.name_scope(name, "MatMul", [a, b]) as name:
2388 if transpose_a and adjoint_a:
2389 raise ValueError("Only one of transpose_a and adjoint_a can be True.")
G:\Anaconda3.7\lib\site-packages\tensorflow\python\framework\ops.py in __enter__(self)
6081 if self._values is None:
6082 self._values = []
-> 6083 g = _get_graph_from_inputs(self._values)
6084 self._g_manager = g.as_default()
6085 self._g_manager.__enter__()
G:\Anaconda3.7\lib\site-packages\tensorflow\python\framework\ops.py in _get_graph_from_inputs(op_input_list, graph)
5711 graph = graph_element.graph
5712 elif original_graph_element is not None:
-> 5713 _assert_same_graph(original_graph_element, graph_element)
5714 elif graph_element.graph is not graph:
5715 raise ValueError("%s is not from the passed-in graph." % graph_element)
G:\Anaconda3.7\lib\site-packages\tensorflow\python\framework\ops.py in _assert_same_graph(original_item, item)
5647 if original_item.graph is not item.graph:
5648 raise ValueError("%s must be from the same graph as %s." % (item,
-> 5649 original_item))
5650
5651
ValueError: Tensor("X:0", shape=(12288, ?), dtype=float32) must be from the same graph as Tensor("W1:0", shape=(25, 12288), dtype=float32_ref).
这困扰了我很长时间,我将非常感谢你!