训练vgg16时,训练和验证曲线会出现峰值,表示损失和准确性。我正在使用转移学习技术,并已更改了用于对性别进行分类的二元类问题的分类器。有人可以建议我为什么会出现这种尖峰,如何减少这种尖峰。
代码如下:
from keras.layers import Input, Lambda, Dense, Flatten, Dropout
from keras.models import Model
from keras.applications.vgg16 import VGG16
from keras.applications.vgg16 import preprocess_input
from keras.preprocessing import image
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
import numpy as np
from glob import glob
import matplotlib.pyplot as plt
# re-size all the images to this
IMAGE_SIZE = [224, 224]
train_path = 'E:/decompressed_images/data_set/train'
valid_path = 'E:/decompressed_images/data_set/validation'
# add preprocessing layer to the front of VGG
vgg = VGG16(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)
# don't train existing weights
for layer in vgg.layers:
layer.trainable = False
# useful for getting number of classes
folders = glob('E:/decompressed_images/data_set/train*')
x = Flatten()(vgg.output)
# x = Dense(1000, activation='relu')(x)
prediction = Dense(len(folders), activation='sigmoid')(x)
# create a model object
model = Model(inputs=vgg.input, outputs=prediction)
# view the structure of the model
model.summary()
# tell the model what cost and optimization method to use
model.compile(
loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy']
)
from keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(rescale = 1./255,
horizontal_flip = True,
vertical_flip = True,
width_shift_range = 0.1,
height_shift_range = 0.1,
zoom_range = 0.1,
rotation_range = 10)
test_datagen = ImageDataGenerator(rescale = 1./255)
training_set = train_datagen.flow_from_directory('E:/Ullu/new_trial__/balanced_dataset/train',
target_size = (224, 224),
batch_size = 64,
class_mode = 'binary')
test_set = test_datagen.flow_from_directory('E:/Ullu/new_trial__/balanced_dataset/test',
target_size = (224, 224),
batch_size = 64,
class_mode = 'binary')
r = model.fit_generator(
training_set,
validation_data=test_set,
epochs=100,
steps_per_epoch=len(training_set),
validation_steps=len(test_set)
)
plt.plot(r.history['loss'], label='train loss')
plt.plot(r.history['val_loss'], label='val loss')
plt.legend()
plt.show()
plt.savefig('E:/Model_128_30/LossVal_loss.png')
# accuracies
plt.plot(r.history['accuracy'], label='train acc')
plt.plot(r.history['val_accuracy'], label='val acc')
plt.legend()
plt.show()
plt.savefig('E:/Model_128_30/AccVal_acc.png')
import tensorflow as tf
from keras.models import load_model
model.save('E:/Model_128_30/128_30_wt.h5')
High and fluctuating training and validation accuracy image
High and fluctuating training and validation loss image
我尝试在最后一层使用dropout层(0.5),但我的准确性和训练与验证的损失是相同的。有人可以建议我哪里错了。谢谢。