我的数据是否经过正确预处理?我的网络没有学习,只是猜测

时间:2019-02-12 09:26:41

标签: python tensorflow keras deep-learning

我目前正在编写ML Vision程序,以对图片进行分类。 我的CNN根本不学习,只能猜测输出。

import os
import shutil
import numpy as np
import pandas as pd
from sklearn.utils import shuffle
from openpyxl import load_workbook
import random
random.seed(40)

import numpy as np
np.random.seed(40)  # for reproducibility

from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras.datasets import mnist
import tensorflow as tf


path_labels = '/Users/felix/Desktop/Daten/Labels.xlsx'


#import label data and construct label data frame
workbook = load_workbook(path_labels)
features = []
labels = []
for row in range (2, workbook['Sheet1' ].max_row+1):
    cell_Bezeichnung = workbook ['Sheet1']["{}{}".format('A', row) ].value
    cell_Label = workbook['Sheet1']["{}{}".format("B",row)].value

    features.append(str(cell_Bezeichnung)+'.JPG')
    labels.append(str(cell_Label))

data = pd.DataFrame(data={'Datei': features, 'Label': labels})
data = shuffle(data)
data = data.reset_index(drop=True)
#one hot encoding
targets = data
targets = targets.drop('Datei', 1)
targets = pd.get_dummies(targets)

print(targets.head())

files = data.drop('Label', 1)

print (files.head())

from keras.preprocessing import image                  
from tqdm import tqdm
os.environ['KMP_DUPLICATE_LIB_OK']='True'
path_images = '/Users/felix/Desktop/Daten/Bilddaten_zugeschnitten/'
def path_to_tensor(img_path):
    # loads RGB image as PIL.Image.Image type
    img_path = path_images+img_path

    img = image.load_img(img_path.item(0), target_size=(224, 224)) #Variation mit mehr als 100x100 pixel
    #tf.image.rgb_to_grayscale(img,name=None)

    # convert PIL.Image.Image type to 3D tensor with shape (224, 224, 3)
    x = image.img_to_array(img)
    # convert 3D tensor to 4D tensor with shape (1, 224, 224, 3) and return 4D tensor
    return np.expand_dims(x, axis=0)


def paths_to_tensor(img_paths):
    list_of_tensors = [path_to_tensor(img_path) for img_path in tqdm(img_paths)]
    return np.vstack(list_of_tensors)

#ImageFile.LOAD_TRUNCATED_IMAGES = True
from IPython.display import display
from PIL import Image

# pre-process the data for Keras
tensors = paths_to_tensor(files.values).astype('float32')/255

testing_share = 0.3 #0.01
testing_index = tensors.shape[0] - round(tensors.shape[0]*testing_share)
x_test, y_test = tensors[testing_index:], targets[testing_index:]
x_train, y_train = tensors[:testing_index], targets[:testing_index]

x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255

from keras.layers import Conv2D, Conv3D, GlobalMaxPooling2D, GlobalAveragePooling2D, BatchNormalization, GlobalMaxPooling3D, AveragePooling2D
from keras.layers import Dropout, Flatten, Dense
from keras.models import Sequential




model = Sequential()


### Define architecture.
model.add(Conv2D(32, 3, strides=(1,1),padding="same",input_shape=(224,224,3), activation="relu"))
model.add(Conv2D(64, 3, strides=(2,2),padding="same", activation="relu"))
model.add(Conv2D(128, 3, strides=(3,3),padding="same", activation="relu"))
model.add(AveragePooling2D(pool_size=(2, 2), strides=None, padding='valid', data_format=None))
model.add(GlobalMaxPooling2D(data_format=None))
model.add(Dropout(0,2))
model.add(Dense(128, activation="relu"))
model.add(Dense(y_test.shape[1], activation="softmax"))

#dropouts sind layer die funkionen deaktivieren
#netzwerk soll lernen, weiter zu arbeiten, wenn kernel sterben

model.summary()


# compile the model

import keras

from keras import optimizers

from keras.optimizers import SGD


from keras.optimizers import rmsprop


from keras.optimizers import adam

#optimizer = optimizers.sgd(lr=0.0001,) #0.1
model.compile(loss='categorical_crossentropy', optimizer='adam', 
 metrics=['accuracy'])

error = model.predict(x_test)

print(error)

[[0.25006285 0.24996501 0.25002083 0.24995136]
 [0.25008076 0.24997567 0.2500124  0.24993122]
 [0.2500582  0.24996252 0.2500291  0.2499501 ]
 [0.25007743 0.2499672  0.25001416 0.2499412 ]
 [0.25007534 0.24996328 0.25002202 0.24993943]
 [0.25008073 0.24996676 0.25001672 0.2499358 ]
 [0.25007495 0.2499676  0.2500172  0.24994019]
 [0.2500594  0.24995789 0.250027   0.24995567]
 [0.25008485 0.2499618  0.25001773 0.24993569]
 [0.25007278 0.24996817 0.25001302 0.2499461 ]
 [0.25007483 0.2499688  0.25001898 0.2499374 ]
 [0.25008535 0.24996835 0.25001317 0.24993314]
 [0.25007445 0.24996167 0.25001654 0.2499473 ]
 [0.2500802  0.24997373 0.2500132  0.24993286]
 [0.25007555 0.24996133 0.25003204 0.24993111]
 [0.25007793 0.24997222 0.25001374 0.24993618]
 [0.25005803 0.24997807 0.2500176  0.24994631]
 [0.2500784  0.24996012 0.25001165 0.24994987]
 [0.2500772  0.24996161 0.25001106 0.24995017]
 [0.2500813  0.2499702  0.2500116  0.24993694]
 [0.25007752 0.24997033 0.25001392 0.24993823]
 [0.25007617 0.24996412 0.25001463 0.24994507]
 [0.25007036 0.24997123 0.25001976 0.24993876]
 [0.25007662 0.2499671  0.25001895 0.2499373 ]
 [0.25007728 0.24996927 0.2500137  0.24993978]
 [0.2500731  0.2499717  0.25001666 0.2499386 ]
 [0.2500792  0.24997078 0.25001392 0.24993607]
 [0.2500816  0.2499651  0.25001276 0.24994059]
 [0.25007784 0.24995804 0.2500315  0.2499326 ]
 [0.25007495 0.24995953 0.2500203  0.24994528]
 [0.25007513 0.24995038 0.25003663 0.24993785]
 [0.25008193 0.24995089 0.25002486 0.24994227]
 [0.25006574 0.24995221 0.2500334  0.24994862]
 [0.25007313 0.24997073 0.2500068  0.2499494 ]
 [0.25007546 0.24996972 0.25002092 0.2499339 ]
 [0.2500602  0.24995638 0.25002822 0.24995506]
 [0.25007087 0.24998225 0.25001073 0.24993609]
 [0.25009194 0.24996428 0.25000528 0.24993849]
 [0.25007385 0.24997565 0.2500184  0.24993216]
 [0.25007346 0.24996477 0.25002295 0.24993882]
 [0.25006285 0.24995741 0.25003105 0.24994871]
 [0.25007546 0.24997638 0.25001225 0.24993594]
 [0.25006792 0.24995044 0.25003865 0.24994306]
 [0.25008726 0.24996646 0.25001788 0.24992841]
 [0.25007448 0.24997017 0.25001752 0.2499378 ]
 [0.25007167 0.24996181 0.25001797 0.24994862]
 [0.25008237 0.24996439 0.25000966 0.24994355]
 [0.2500695  0.24996364 0.25002092 0.24994592]
 [0.25007692 0.2499711  0.25001416 0.24993788]
 [0.2500824  0.24996519 0.25000864 0.24994376]
 [0.2500767  0.24996592 0.25001433 0.24994306]
 [0.2500684  0.24995859 0.25003484 0.2499382 ]
 [0.25007054 0.24996616 0.25001797 0.24994533]
 [0.25009173 0.24996561 0.25000745 0.24993522]
 [0.25008002 0.24996746 0.2500147  0.24993786]
 [0.2500748  0.24996786 0.25000873 0.24994864]
 [0.2500774  0.24997115 0.25001118 0.24994026]
 [0.25006884 0.24996077 0.25002298 0.24994741]
 [0.25005615 0.24995732 0.25003523 0.24995126]
 [0.25007707 0.24996826 0.25001627 0.24993831]
 [0.25008127 0.24996836 0.25001183 0.24993852]
 [0.25007385 0.2499684  0.2500187  0.24993904]
 [0.25006378 0.24997012 0.2500189  0.24994728]
 [0.2500762  0.24997364 0.25001153 0.24993856]
 [0.25008038 0.24995975 0.2500139  0.24994597]
 [0.2500857  0.2499656  0.25000656 0.24994215]
 [0.25008604 0.24996783 0.25001597 0.24993013]
 [0.25006878 0.24996276 0.25002745 0.24994111]
 [0.25007826 0.24996968 0.2500182  0.24993387]
 [0.2500698  0.24997027 0.2500128  0.24994715]
 [0.25007647 0.24996483 0.25001547 0.24994324]
 [0.25008276 0.24996983 0.25001243 0.24993502]
 [0.2500697  0.24997318 0.2500129  0.24994427]
 [0.2500747  0.24996682 0.2500108  0.24994765]
 [0.250073   0.24996653 0.25001878 0.24994177]
 [0.2500709  0.24997084 0.25001994 0.24993828]
 [0.25006709 0.24997054 0.2500242  0.24993815]
 [0.25008497 0.24996297 0.25001696 0.249935  ]
 [0.2500651  0.24995178 0.25003874 0.24994442]
 [0.25007528 0.24996938 0.2500194  0.24993595]
 [0.25007984 0.2499677  0.25001895 0.2499335 ]
 [0.25008506 0.24996516 0.25001836 0.24993142]
 [0.250082   0.24996546 0.25001726 0.24993534]
 [0.25006655 0.24996561 0.25002077 0.24994712]
 [0.25007233 0.24997492 0.25001252 0.24994019]
 [0.250074   0.24996708 0.25000945 0.24994949]
 [0.25008136 0.24995574 0.25002033 0.24994257]
 [0.2500709  0.24996184 0.2500229  0.24994433]
 [0.25007755 0.24995202 0.25003335 0.249937  ]
 [0.2500736  0.24997254 0.250015   0.24993886]
 [0.25006583 0.24996303 0.2500226  0.2499486 ]
 [0.25007483 0.24997738 0.25001508 0.24993278]
 [0.2500662  0.24996226 0.2500349  0.24993668]
 [0.2500774  0.24996667 0.25001344 0.24994245]
 [0.25006792 0.24995145 0.25003457 0.2499461 ]
 [0.2500603  0.24995281 0.2500334  0.24995343]
 [0.250059   0.24995674 0.25003695 0.24994728]
 [0.2500626  0.24996206 0.25002703 0.24994832]
 [0.25007066 0.24996097 0.25002307 0.2499453 ]
 [0.2500736  0.24997665 0.2500144  0.24993531]
 [0.25008708 0.24997056 0.25001645 0.2499259 ]
 [0.25005743 0.24996099 0.25002706 0.24995449]
 [0.250062   0.24998043 0.2500229  0.24993478]
 [0.25006992 0.24996766 0.25001696 0.24994548]
 [0.25007722 0.24996564 0.2500146  0.24994259]
 [0.25008243 0.24997042 0.2500136  0.24993362]
 [0.25006735 0.24996237 0.25002933 0.24994104]
 [0.25006872 0.24995397 0.2500326  0.24994478]
 [0.2500679  0.24996191 0.2500241  0.24994612]
 [0.25009197 0.24995779 0.2500152  0.24993509]
 [0.25008324 0.24996872 0.25001323 0.24993473]
 [0.25007743 0.24997263 0.25000903 0.24994095]
 [0.2500631  0.2499686  0.25003257 0.24993584]
 [0.25006607 0.24996045 0.25002548 0.24994805]
 [0.25005895 0.24998237 0.25001976 0.24993889]
 [0.25006407 0.2499555  0.25003356 0.24994694]
 [0.25006366 0.24995647 0.25002795 0.24995202]
 [0.25008795 0.24997298 0.25001886 0.24992022]
 [0.25005513 0.2499775  0.25001684 0.24995047]
 [0.25007448 0.24996139 0.25002256 0.24994159]
 [0.25006628 0.24997294 0.25002363 0.24993722]
 [0.2500835  0.24996667 0.2500097  0.24994011]
 [0.25006843 0.24995919 0.2500261  0.24994628]
 [0.25007316 0.24997073 0.25001794 0.24993816]
 [0.25008273 0.2499673  0.25001198 0.2499379 ]
 [0.2500748  0.24997313 0.25001442 0.2499376 ]
 [0.250075   0.24997061 0.25001994 0.2499345 ]
 [0.2500757  0.24997036 0.25001526 0.24993867]
 [0.250078   0.2499685  0.25000528 0.2499482 ]
 [0.25007048 0.24994877 0.2500401  0.24994063]
 [0.25006124 0.2499642  0.25001806 0.24995647]
 [0.2500705  0.2499617  0.25001982 0.24994802]
 [0.2500815  0.24996363 0.25000906 0.2499458 ]
 [0.25006425 0.24996895 0.25001505 0.24995178]
 [0.25007218 0.24997188 0.25002077 0.24993512]
 [0.25007123 0.24997196 0.2500212  0.24993564]
 [0.25006527 0.2499646  0.25001764 0.24995254]
 [0.25007126 0.24995747 0.25002196 0.2499494 ]
 [0.2500677  0.24995501 0.2500297  0.24994752]
 [0.2500733  0.24996659 0.2500171  0.24994294]
 [0.25007385 0.2499434  0.2500364  0.24994639]
 [0.25006333 0.24996033 0.25002643 0.24994989]
 [0.25007668 0.24996741 0.25001174 0.24994417]
 [0.25006187 0.24995287 0.25003016 0.24995513]
 [0.2500838  0.24996549 0.25001827 0.2499324 ]
 [0.25008044 0.24996719 0.25001672 0.24993567]
 [0.25007057 0.24995601 0.25002605 0.24994737]
 [0.2500773  0.24997096 0.25001264 0.24993913]
 [0.2500796  0.24997199 0.25000918 0.24993925]
 [0.2500773  0.2499672  0.25001302 0.24994251]
 [0.25007573 0.24997133 0.25000885 0.2499441 ]
 [0.25006795 0.24996015 0.2500245  0.24994741]
 [0.25007918 0.24996641 0.25001818 0.24993613]
 [0.25007203 0.24996912 0.25000858 0.24995026]
 [0.25007164 0.24996807 0.2500243  0.24993607]
 [0.2500733  0.249961   0.25001734 0.24994832]
 [0.2500638  0.24996074 0.25002792 0.24994762]
 [0.25007963 0.2499707  0.2500181  0.24993162]
 [0.25008625 0.24996635 0.2500065  0.24994084]
 [0.25008115 0.24996302 0.250018   0.24993783]
 [0.25007886 0.2499649  0.25001216 0.24994412]
 [0.25007793 0.24997391 0.2500138  0.24993435]
 [0.2500758  0.24996938 0.2500197  0.24993521]
 [0.25008273 0.24996585 0.25001657 0.2499349 ]
 [0.25006455 0.24997023 0.25001556 0.24994972]
 [0.25007576 0.24996512 0.2500098  0.24994932]
 [0.2500829  0.24997182 0.25001445 0.24993081]
 [0.2500687  0.24997266 0.25001624 0.24994251]
 [0.25007847 0.24997012 0.25000468 0.24994673]
 [0.25006625 0.24996042 0.25002444 0.24994884]
 [0.25007161 0.24996774 0.25001383 0.24994685]
 [0.25006938 0.2499722  0.25001884 0.24993967]
 [0.2500708  0.24997376 0.2500203  0.24993515]
 [0.25008178 0.24996838 0.25001445 0.24993533]
 [0.25008288 0.24996988 0.25000745 0.24993972]
 [0.25007623 0.249967   0.25000775 0.24994898]
 [0.25007024 0.24996075 0.25003254 0.24993643]
 [0.2500883  0.2499763  0.2500102  0.24992523]
 [0.2500711  0.24995194 0.25002965 0.24994734]
 [0.25008234 0.2499697  0.25001442 0.24993351]]

new_model = model.fit(x_train, y_train,
                      batch_size=32, epochs=20, verbose=1)

已修复的问题:

  • 将LR从0.01更改为0.00000001

  • 将像素从20x20更改为1000x1000

  • 更改了batch_size

  • 变化的时代

  • 更改的优化器

  • 将激活从S型转变为softmax

  • 更改的图层

  • 添加了batch_normalization

  • 添加了池化层

  • 添加了增强

  • 使用了不同的测试大小

任何举办的活动都非常感激!

感谢您的时间和帮助!

2 个答案:

答案 0 :(得分:1)

您在调用model.fit()之前先调用model.predict()

在Model.fit上进行训练,因此您正在尝试预测未训练模型上的数据。这是代码的最后一行-而是在调用model.compile()之后立即执行。

答案 1 :(得分:0)

您的模型似乎没有输入层,因此它甚至可能看不到图像。