我正在训练UNET,从一开始我的准确性和val_accuracy很高。我怀疑我在文件夹中弄错了
可能是什么原因?
分割文件夹的方式对我来说不正确,但是如果我没有创建具有相同名称的文件夹(例如,图像文件夹中的图像),则找不到图像
这是我的代码:
def unet(input_size):
inputs = Input(input_size)
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4)
drop4 = Dropout(0.5)(conv4)
pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
drop5 = Dropout(0.5)(conv5)
up6 = Conv2D(512, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(drop5))
merge6 = concatenate([drop4,up6], axis = 3)
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge6)
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6)
up7 = Conv2D(256, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv6))
merge7 = concatenate([conv3,up7], axis = 3)
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge7)
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7)
up8 = Conv2D(128, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = concatenate([conv2,up8], axis = 3)
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge8)
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8)
up9 = Conv2D(64, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = concatenate([conv1,up9], axis = 3)
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge9)
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv9 = Conv2D(2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv10 = Conv2D(1, 1, activation = 'sigmoid')(conv9)
model = Model(input = inputs, output = conv10)
# model.compile(optimizer = Adam(lr = 1e-4), loss = 'binary_crossentropy', metrics = ['accuracy'])
# model.summary()
return model
seed = 100
width=512
def my_image_mask_generator(image_data_generator, mask_data_generator):
train_generator = zip(image_data_generator, mask_data_generator)
for (img, mask) in train_generator:
yield (img, mask)
################################################################################### Image Datagenerator
train_datagen = ImageDataGenerator(
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
################################ train
image_train_generator = train_datagen.flow_from_directory('/content/drive/My Drive/Datasets/Indian DR_flow_from_dir/Training/images',target_size=(512,512),class_mode=None,batch_size=1,seed=seed)
mask_train_generator = train_datagen.flow_from_directory(
'/content/drive/My Drive/Datasets/Indian DR_flow_from_dir/Training/masks',
target_size=(512,512),color_mode='grayscale',class_mode=None,batch_size=1,
seed=seed)
train_generator = my_image_mask_generator(image_train_generator, mask_train_generator)
################# test
test_datagen = ImageDataGenerator(rescale=1./255)
image_test_generator = test_datagen.flow_from_directory('/content/drive/My Drive/Datasets/Indian DR_flow_from_dir/Testing/images ',target_size=(512,512),class_mode=None,batch_size=1,seed=seed)
mask_test_generator =test_datagen.flow_from_directory(
'/content/drive/My Drive/Datasets/Indian DR_flow_from_dir/Testing/masks',
target_size=(512,512),color_mode='grayscale',class_mode=None,batch_size=1,
seed=seed)
test_generator = my_image_mask_generator(image_test_generator, mask_test_generator)
################################################################################ MODEL TRAINING
model=unet((width,width,3))
print("There are :",len(model.layers),"layers in model!!")
from keras import optimizers
adam=optimizers.Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, amsgrad=False)
model.compile(loss='binary_crossentropy',optimizer=adam,metrics=['accuracy'])
earlystopper = EarlyStopping(patience=10, verbose=1)
checkpointer = ModelCheckpoint('unet.{epoch:02d}-{val_loss:.2f}.h5',monitor='accuracy', verbose=1, save_best_only=True)
history=model.fit_generator(train_generator,
steps_per_epoch = 250,
nb_epoch = 100,callbacks=[earlystopper,checkpointer],
validation_data = test_generator,
nb_val_samples = 250)
path_dir='/content/drive/My Drive/Datasets/Indian DR'
os.chdir(path_dir)
os.mkdir(os.path.join(path_dir,'Folder with Unet Results'))
os.chdir(os.path.join(path_dir,'Folder with Unet Results'))
model.save('Custom_Unet.h5')
# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
plt.savefig('Custom_Unet_acc.png')
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
plt.savefig('Custom_Unet_loss.png')