暹罗网络在多次预测后给出相同的预测

时间:2019-10-17 18:45:47

标签: python tensorflow machine-learning keras neural-network

我使用keras实现了暹罗神经网络;这个神经网络应该预测给定的几个在线签名来自同一真实人,或者是伪造用户试图伪造该系统。 我给了3个训练示例,另外3个用于测试示例。 这是我的代码:

import numpy as np

from fnmatch import fnmatch
import os

from keras.models import Model
from keras.layers import Input, Dense, GRU, concatenate, Dropout
from keras.optimizers import RMSprop


BATCH_SIZE = 4096
EPOCHS = 40
UNITS_FIRT_GRU = 40
UNITS_SECOND_GRU = 20


def getData():
    forgeries_train = np.loadtxt('/content/forgery_train.txt', delimiter=",")
    genuines_train1 = np.loadtxt('/content/genuines_train1.txt', delimiter=",")
    genuines_train2 = np.loadtxt('/content/genuines_train2.txt', delimiter=",")
    forgeries_test = np.loadtxt('/content/forgery_test.txt', delimiter=",")
    genuine_test1 = np.loadtxt('/content/genuine_test_1.txt', delimiter=",")
    genuine_test2 = np.loadtxt('/content/genuine_test_2.txt', delimiter=",")
    genuine_enrolled = np.loadtxt('/content/genuines_enrolled.txt', delimiter=",")


    count = 0
    total_genuines_sample_size = int(genuines_train1.shape[0]) 
    total_forgery_sample_size = int(forgeries_train.shape[0]) 


    dim1 = 1 #1 sola riga
    dim2 = genuines_train1.shape[1]
    x_genuine_pair_train_1 = np.zeros([total_genuines_sample_size, 2, dim1, dim2]) # 2 is because we use pairs of signatures 
    x_genuine_pair_train_2 = np.zeros([total_genuines_sample_size, 2, dim1, dim2])

    y_genuine_train_1 = np.zeros([total_genuines_sample_size, 1])
    y_genuine_train_2 = np.zeros([total_genuines_sample_size, 1])

    x_imposite_pair_train = np.zeros([total_forgery_sample_size, 2, dim1, dim2])
    y_imposite_train = np.zeros([total_forgery_sample_size, 1])


    #------TRAINING--------
    for i in range(0, total_genuines_sample_size, 1):
      signature_enroll = genuine_enrolled[i]
      signature_train_1 = genuines_train1[i]
      signature_train_2 = genuines_train2[i]
      signature_forgery = forgeries_train[i]

      x_genuine_pair_train_1[count, 0, :, :] = signature_enroll
      x_genuine_pair_train_1[count, 1, :, :] = signature_train_1

      x_genuine_pair_train_2[count, 0, :, :] = signature_enroll
      x_genuine_pair_train_2[count, 1, :, :] = signature_train_2

      x_imposite_pair_train[count, 0, :, :] = signature_enroll
      x_imposite_pair_train[count, 1, :, :] = signature_forgery

      y_genuine_train_1[count] = 1
      y_genuine_train_2[count] = 1
      y_imposite_train[count] = 0

      count += 1

    #----TESTING------
    count = 0

    total_genuines_sample_size = int(genuine_test1.shape[0]) 
    total_forgery_sample_size = int(forgeries_test.shape[0]) 


    dim1 = 1 #1 sola riga
    dim2 = genuines_train1.shape[1]
    x_genuine_pair_test_1 = np.zeros([total_genuines_sample_size, 2, dim1, dim2]) # 2 is because we use pairs of signatures 
    x_genuine_pair_test_2 = np.zeros([total_genuines_sample_size, 2, dim1, dim2])

    y_genuine_test_1 = np.zeros([total_genuines_sample_size, 1])
    y_genuine_test_2 = np.zeros([total_genuines_sample_size, 1])

    x_imposite_pair_test = np.zeros([total_forgery_sample_size, 2, dim1, dim2])
    y_imposite_test = np.zeros([total_forgery_sample_size, 1])


    for i in range(0, total_genuines_sample_size+1, 4):
      signature_enroll = genuine_enrolled[i]
      signature_test1 = genuine_test1[i]
      signature_test2 = genuine_test2[i]
      signature_forgery = forgeries_test[i]

      x_genuine_pair_test_1[count, 0, :, :] = signature_enroll
      x_genuine_pair_test_1[count, 1, :, :] = signature_test1

      x_genuine_pair_test_2[count, 0, :, :] = signature_enroll
      x_genuine_pair_test_2[count, 1, :, :] = signature_test2

      x_imposite_pair_test[count, 0, :, :] = signature_enroll
      x_imposite_pair_test[count, 1, :, :] = signature_forgery

      y_genuine_test_1[count] = 1
      y_genuine_test_2[count] = 1
      y_imposite_test[count] = 0

      count += 1

    X_train = np.concatenate([x_genuine_pair_train_1, x_genuine_pair_train_2], axis=0)
    # X_train_2 = np.concatenate([x_genuine_pair_train_2, x_imposite_pair_train], axis=0)
    X_test = np.concatenate([x_genuine_pair_test_1, x_genuine_pair_test_2, x_imposite_pair_test], axis=0)
    Y_train = np.concatenate([y_genuine_train_1, y_genuine_train_2], axis=0)
    Y_test = np.concatenate([y_genuine_test_1, y_genuine_test_2, y_imposite_test], axis=0)


    return X_train, X_test, Y_train, Y_test


def save_data(X_train, X_test, Y_train, Y_test):
  # save file into current directory
  np.save('X_train', X_train)
  np.save('X_test', X_test)
  np.save('Y_train', Y_train)
  np.save('Y_test', Y_test)

def load_data():
  X_train = np.load('X_train.npy')
  X_test = np.load('X_test.npy')
  Y_train = np.load('Y_train.npy')
  Y_test = np.load('Y_test.npy')

  return X_train, X_test, Y_train, Y_test


def compute_accuracy(y_true, y_pred):
    '''Compute classification accuracy with a fixed threshold on distances.
    '''
    pred = y_pred.ravel() < 0.5
    return np.mean(pred == y_true)


def first_GRU(input_shape):
    '''Base network to be shared (eq. to feature extraction).
    '''

    model_gru = GRU(UNITS_FIRT_GRU, input_shape=input_shape, return_sequences=True)
    model_gru = Dropout(0.2)

    return model_gru


def second_GRU(merged_values):
  '''
  Create a second GRU layer
  '''
  model_gru = GRU(UNITS_SECOND_GRU, return_sequences=False)(merged_values)
  prediction = Dense(1, activation='sigmoid')(model_gru)

  return prediction


def siamese_model(X_train, X_test, Y_train, Y_test):
  input_shape = X_train.shape[2:]

  # Create first layer GRU
  base_network = first_GRU(input_shape)

  input_a = Input(shape=input_shape)
  input_b = Input(shape=input_shape)

  # because we re-use the same instance `base_network`,
  # the weights of the network
  # will be shared across the two branches
  processed_a = base_network(input_a)
  processed_b = base_network(input_b)

  # It take a list of tensors and return a single tensor
  merged = concatenate([processed_a, processed_b], axis=-1)

  # Create second layer GRU
  prediction = second_GRU(merged)

  # Create effective model
  model = Model([input_a, input_b], prediction)

  #--------FIT MODEL--------
  rms = RMSprop()
  model.compile(loss='binary_crossentropy', optimizer=rms, metrics=['accuracy'])
  model.summary()
  model.fit([X_train[:, 0], X_train[:, 1]], Y_train, batch_size=BATCH_SIZE, epochs=EPOCHS)

  #--------PREDICT MODEL--------
  y_pred = model.predict([X_train[:, 0], X_train[:, 1]])
  tr_acc = compute_accuracy(Y_train, y_pred)
  y_pred = model.predict([X_test[:, 0], X_test[:, 1]])
  print(y_pred)
  te_acc = compute_accuracy(Y_test, y_pred)

  print('* Accuracy on training set: %0.2f%%' % (100 * tr_acc))
  print('* Accuracy on test set: %0.2f%%' % (100 * te_acc))


def main():
  X_train, X_test, Y_train, Y_test = [], [], [], []

  # controlla se ci sono i 4 file .npy nella dir principale, se non esistono, li crea e li carica in file
  # .npy
  check_npy = []
  for file in os.listdir('.'):
    if fnmatch(file, '*.npy'):
      check_npy.append(file)  
  if len(check_npy) == 4:
      X_train, X_test, Y_train, Y_test = load_data()
  else:
      X_train, X_test, Y_train, Y_test = getData()
      save_data(X_train, X_test, Y_train, Y_test)

  siamese_model(X_train, X_test, Y_train, Y_test)



if __name__ == '__main__':
  main()

这是输出:

Model: "model_19"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_37 (InputLayer)           (None, 1, 200000)    0                                            
__________________________________________________________________________________________________
input_38 (InputLayer)           (None, 1, 200000)    0                                            
__________________________________________________________________________________________________
dropout_19 (Dropout)            (None, 1, 200000)    0           input_37[0][0]                   
                                                                 input_38[0][0]                   
__________________________________________________________________________________________________
concatenate_19 (Concatenate)    (None, 1, 400000)    0           dropout_19[0][0]                 
                                                                 dropout_19[1][0]                 
__________________________________________________________________________________________________
gru_38 (GRU)                    (None, 20)           24001260    concatenate_19[0][0]             
__________________________________________________________________________________________________
dense_19 (Dense)                (None, 1)            21          gru_38[0][0]                     
==================================================================================================
Total params: 24,001,281
Trainable params: 24,001,281
Non-trainable params: 0
__________________________________________________________________________________________________
Epoch 1/40
188/188 [==============================] - 4s 22ms/step - loss: 0.6875 - acc: 0.5479
Epoch 2/40
188/188 [==============================] - 0s 3ms/step - loss: 0.1593 - acc: 1.0000
Epoch 3/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0676 - acc: 1.0000
Epoch 4/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0469 - acc: 1.0000
Epoch 5/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0363 - acc: 1.0000
Epoch 6/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0322 - acc: 1.0000
Epoch 7/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0272 - acc: 1.0000
Epoch 8/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0240 - acc: 1.0000
Epoch 9/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0231 - acc: 1.0000
Epoch 10/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0209 - acc: 1.0000
Epoch 11/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0195 - acc: 1.0000
Epoch 12/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0184 - acc: 1.0000
Epoch 13/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0175 - acc: 1.0000
Epoch 14/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0170 - acc: 1.0000
Epoch 15/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0172 - acc: 1.0000
Epoch 16/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0155 - acc: 1.0000
Epoch 17/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0151 - acc: 1.0000
Epoch 18/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0147 - acc: 1.0000
Epoch 19/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0145 - acc: 1.0000
Epoch 20/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0140 - acc: 1.0000
Epoch 21/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0138 - acc: 1.0000
Epoch 22/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0137 - acc: 1.0000
Epoch 23/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0132 - acc: 1.0000
Epoch 24/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0127 - acc: 1.0000
Epoch 25/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0126 - acc: 1.0000
Epoch 26/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0122 - acc: 1.0000
Epoch 27/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0123 - acc: 1.0000
Epoch 28/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0117 - acc: 1.0000
Epoch 29/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0114 - acc: 1.0000
Epoch 30/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0112 - acc: 1.0000
Epoch 31/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0114 - acc: 1.0000
Epoch 32/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0107 - acc: 1.0000
Epoch 33/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0106 - acc: 1.0000
Epoch 34/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0103 - acc: 1.0000
Epoch 35/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0104 - acc: 1.0000
Epoch 36/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0099 - acc: 1.0000
Epoch 37/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0097 - acc: 1.0000
Epoch 38/40
188/188 [==============================] - 0s 2ms/step - loss: 0.0097 - acc: 1.0000
Epoch 39/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0094 - acc: 1.0000
Epoch 40/40
188/188 [==============================] - 0s 3ms/step - loss: 0.0092 - acc: 1.0000
[[0.9912311 ]
 [0.9917059 ]
 [0.98248976]
 [0.9905562 ]
 [0.9824898 ]
 [0.9904194 ]
 [0.99151504]
 [0.99102247]
 [0.99090576]
 [0.9905489 ]
 [0.9910957 ]
 [0.9869518 ]
 [0.9903573 ]
 [0.9872961 ]
 [0.9913367 ]
 [0.9909198 ]
 [0.99122727]
 [0.91190183]
 [0.9903209 ]
 [0.95386165]
 [0.9913297 ]
 [0.9914758 ]
 [0.9911514 ]
 [0.99090004]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.9911184 ]
 [0.99143684]
 [0.97831845]
 [0.93539655]
 [0.9817464 ]
 [0.9915248 ]
 [0.9913036 ]
 [0.9888044 ]
 [0.9674153 ]
 [0.9734842 ]
 [0.99110806]
 [0.9912795 ]
 [0.9907231 ]
 [0.99099576]
 [0.9912624 ]
 [0.9908354 ]
 [0.99075764]
 [0.98947716]
 [0.9904523 ]
 [0.9360013 ]
 [0.98419285]
 [0.9916017 ]
 [0.9828788 ]
 [0.9881275 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.9379822 ]
 [0.98724085]
 [0.97812414]
 [0.91363364]
 [0.9911182 ]
 [0.9870437 ]
 [0.9911665 ]
 [0.9909804 ]
 [0.94045985]
 [0.99055743]
 [0.9840276 ]
 [0.98697484]
 [0.99114954]
 [0.9001497 ]
 [0.95655084]
 [0.9635279 ]
 [0.99159086]
 [0.98143566]
 [0.9100242 ]
 [0.990397  ]
 [0.9907225 ]
 [0.99139476]
 [0.95371103]
 [0.9839037 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]
 [0.5070313 ]]
* Accuracy on training set: 0.00%
* Accuracy on test set: 82.98%

有人可以解释为什么在23日预测之后会发生这种奇怪的情况? 我是DL的新手,不是那么熟练。

0 个答案:

没有答案