因此,我将跟着《用Python进行深度学习》这本书。我正在尝试复制他的代码结果,这些代码将这些图像放在下面(我能够成功复制该图像)
像这样变成更大的网格
但是,即使我复制并粘贴他的代码(对于大型网格),我也会得到
下面是获取上面图片的代码。该代码下方将是主代码中使用的支持代码。
for layer_name in ['block3_conv1']:
size = 64
margin = 5
# This a empty (black) image where we will store our results.
results = np.zeros((8 * size + 7 * margin, 8 * size + 7 * margin, 3))
for i in range(8): # iterate over the rows of our results grid
for j in range(8): # iterate over the columns of our results grid
# Generate the pattern for filter `i + (j * 8)` in `layer_name`
filter_img = generate_pattern(layer_name, i + (j * 8), size=size)
# Put the result in the square `(i, j)` of the results grid
horizontal_start = i * size + i * margin
horizontal_end = horizontal_start + size
vertical_start = j * size + j * margin
vertical_end = vertical_start + size
results[horizontal_start: horizontal_end, vertical_start: vertical_end, :] = filter_img
# Display the results grid
plt.figure(figsize=(20, 20))
plt.imshow(results)
def generate_pattern(layer_name, filter_index, size=150):
# Build a loss function that maximizes the activation
# of the nth filter of the layer considered.
layer_output = model.get_layer(layer_name).output
loss = K.mean(layer_output[:, :, :, filter_index])
# Compute the gradient of the input picture wrt this loss
grads = K.gradients(loss, model.input)[0]
# Normalization trick: we normalize the gradient
grads /= (K.sqrt(K.mean(K.square(grads))) + 1e-5)
# This function returns the loss and grads given the input picture
iterate = K.function([model.input], [loss, grads])
# We start from a gray image with some noise
input_img_data = np.random.random((1, size, size, 3)) * 20 + 128.
# Run gradient ascent for 40 steps
step = 1.
for i in range(40):
loss_value, grads_value = iterate([input_img_data])
input_img_data += grads_value * step
img = input_img_data[0]
return deprocess_image(img)
def deprocess_image(x):
x -= x.mean()
x /= (x.std() + 1e-5)
x *= 0.1
x += 0.5
x = np.clip(x,0,1)
x *= 255
x = np.clip(x,0,255).astype('uint8')
return x
答案 0 :(得分:0)
可以解决此问题(您会注意到网格中的图片是不同的,这是因为我复制的层与代码所显示的层不同)
count = 0
n_rows = 8
n_columns = 8
f, axarr = plt.subplots(nrows=n_rows, ncols=n_columns,
sharex=True, sharey=True,
figsize=(20,20))
for column in range(n_columns):
for row in range(n_rows):
axarr[row, column].imshow(generate_pattern('block3_conv1', count, size=150))
f.subplots_adjust(hspace=0.01, wspace=0.01)
count += 1