parameters = L_layer_model(X, y, layer_dimensions, learning_rate = 0.01, num_iterations = 3000, print_cost=True)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-560-2a4285d15cab> in <module>
----> 1 parameters = L_layer_model(X, y, layer_dimensions, learning_rate = 0.01, num_iterations = 3000, print_cost=True)
<ipython-input-556-cd86d2dbfd35> in L_layer_model(X, y, layer_dimensions, learning_rate, num_iterations, print_cost, hidden_layers_activation_fn)
23 # iterate over L-layers to get the final output and the cache
24 AL, caches = L_model_forward(
---> 25 X, parameters, hidden_layers_activation_fn)
26
27 # compute cost to plot it
<ipython-input-548-b2be18768ac9> in L_model_forward(X, parameters, hidden_layers_activation_fn)
9 A, cache = linear_activation_forward(
10 A_prev, parameters["W" + str(l)], parameters["b" + str(l)],
---> 11 activation_fn=hidden_layers_activation_fn)
12 caches.append(cache)
13
<ipython-input-547-9eb57809ddd5> in linear_activation_forward(A_prev, W, b, activation_fn)
8
9 elif activation_fn == "relu":
---> 10 Z, linear_cache = linear_forward(A_prev, W, b)
11 A, activation_cache = relu(Z)
12
<ipython-input-546-e0684360f96c> in linear_forward(A_prev, W, b)
15 cache -- a python dictionary with "A", "W" and "b" for backprop
16 """
---> 17 Z = np.dot(W, A_prev) + b
18 cache = (A_prev, W, b)
19 return Z, cache
ValueError: shapes (10,5) and (4870,32,32,3) not aligned: 5 (dim 1) != 32 (dim 2)