为什么我的神经网络的恢复能力没有提高

时间:2017-03-21 20:16:56

标签: machine-learning

我的神经网络的灵敏度停留在70%而且没有得到证实。请帮我。以下是我的代码。

import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA
from sklearn.preprocessing import MinMaxScaler

data = pd.read_csv("data.csv", sep='|')
nonlinearitiesactFunction = {'sigmoid':tf.sigmoid, 'tanh':tf.tanh, 'elu':tf.nn.elu, 'softplus':tf.nn.softplus, 'softsign':tf.nn.softsign}
linearitiesactFunction = {'relu':tf.nn.relu, 'relu6':tf.nn.relu6}
clFunction = {'sigmoidCrossEntropy':tf.nn.sigmoid_cross_entropy_with_logits, 'softmax':tf.nn.softmax, 'softmaxCrossEntropy':tf.nn.softmax_cross_entropy_with_logits}
opt = {'Adam':tf.train.AdamOptimizer, 'GradientDescent':tf.train.GradientDescentOptimizer}

def retrieveXAndY():
    X = data.drop(['Name', 'md5', 'legitimate'], axis=1 ).as_matrix()
    y = data['legitimate'].as_matrix()  
    return X, y

def addLayer(input, inSize, outSize, activationFunction = None):
    hiddenlay = {'weights':tf.Variable(tf.random_normal([inSize, outSize])),'biases':tf.Variable(tf.random_normal([outSize]))} 
    hl = tf.add(tf.matmul(input,hiddenlay['weights']), hiddenlay['biases'])

    if activationFunction == None:
      output = hl
  else:
      output = activationFunction(hl)
    return output

 def costAndOpt(y,output, n_samples, learning_rate, optFunction = None):
  cost = tf.reduce_sum(tf.pow(y - output, 2))/(2*n_samples)
  optimizer = optFunction(learning_rate).minimize(cost)

  return cost, optimizer

def fiveLayer(x, n_nodes_hl1, n_nodes_hl2, n_nodes_hl3, n_nodes_hl4, n_nodes_hl5, n_output, activationFunction, optFunction):
    l1 = addLayer(x, n_feature,n_nodes_hl1,activationFunction)
    l2 = addLayer(l1, n_nodes_hl1,n_nodes_hl2,activationFunction)
    l3 = addLayer(l2, n_nodes_hl2,n_nodes_hl3,activationFunction)
    l4 = addLayer(l3, n_nodes_hl3,n_nodes_hl4,activationFunction)
    l5 = addLayer(l4, n_nodes_hl4,n_nodes_hl5,activationFunction)
    output = addLayer(l5, n_nodes_hl5, n_output, activationFunction)

    cost, optimizer = costAndOpt(y,output, n_samples, learning_rate, optFunction)

    return cost, optimizer

if __name__ == '__main__':

     inputX, inputY = retrieveXAndY()


#Normalization
    pca_input = PCA(n_components = int(inputX.shape[1]/2))
    inputX = pca_input.fit_transform(inputX)
    inputX = MinMaxScaler().fit_transform(inputX)

    inputY = inputY.reshape([-1,1])
    n_samples = inputY.size
    n_feature = inputX.shape[1]
    n_node = 40
    display_step = 50
    learning_rate=0.01

    n_nodes_hl1 = n_node 
    n_nodes_hl2 = n_node
    n_nodes_hl3 = n_node
    n_nodes_hl4 = n_node
    n_nodes_hl5 = n_node
    n_output = 1 #number of output

#input data
    x = tf.placeholder(tf.float32,[None,n_feature]) #feature
    y = tf.placeholder(tf.float32,[None,n_output]) #label#cost, optimizer 

cost, optimizer = fiveLayer(x, n_nodes_hl1, n_nodes_hl2, n_nodes_hl3, n_nodes_hl4, n_nodes_hl5, n_output, nonlinearitiesactFunction['softsign'], opt['Adam'])
cost1, optimizer1 = fiveLayer(x, n_nodes_hl1, n_nodes_hl2, n_nodes_hl3, n_nodes_hl4, n_nodes_hl5, n_output, clFunction['softmax'], opt['Adam'])
cost2, optimizer2 = fiveLayer(x, n_nodes_hl1, n_nodes_hl2, n_nodes_hl3, n_nodes_hl4, n_nodes_hl5, n_output, nonlinearitiesactFunction['softsign'], opt['Adam'])
cost3, optimizer3 = fiveLayer(x, n_nodes_hl1, n_nodes_hl2, n_nodes_hl3, n_nodes_hl4, n_nodes_hl5, n_output, nonlinearitiesactFunction['sigmoid'], opt['Adam'])
cost4, optimizer4 = fiveLayer(x, n_nodes_hl1, n_nodes_hl2, n_nodes_hl3, n_nodes_hl4, n_nodes_hl5, n_output, nonlinearitiesactFunction['tanh'], opt['Adam'])
cost5, optimizer5 = fiveLayer(x, n_nodes_hl1, n_nodes_hl2, n_nodes_hl3, n_nodes_hl4, n_nodes_hl5, n_output, nonlinearitiesactFunction['elu'], opt['Adam'])

hm_epochs = 2000

init = tf.global_variables_initializer()

with tf.Session() as sess:
    sess.run(init)
    for i in range(hm_epochs):
    # Take a gradient descent step using our inputs and labels
        _,c = sess.run([optimizer,cost],feed_dict={x: inputX, y: inputY})
        epoch_loss = 0
        if (i) % display_step == 0:
            epoch_loss += c
            print "Softmax Training step:", i, "cost=", c, 'loss=', epoch_loss  #, \"W=", sess.run(W), "b=", sess.run(b)

    mse = cost.eval({x: inputX, y: inputY}) 
    acc = 1-np.sqrt(mse) 
    print "Acc = ", acc, "\n"

    for i in range(hm_epochs):
    # Take a gradient descent step using our inputs and labels
        _,c1 = sess.run([optimizer1,cost1],feed_dict={x: inputX, y: inputY})
        epoch_loss1 = 0
        if (i) % display_step == 0:
            epoch_loss1 += c1
            print "Softsign Training step:", i, "cost=", c1, 'loss=', epoch_loss1  #, \"W=", sess.run(W), "b=", sess.run(b)

    mse1 = cost1.eval({x: inputX, y: inputY}) 
    acc1 = 1-np.sqrt(mse1) 
    print "Acc = ", acc1, "\n"

    for i in range(hm_epochs):
    # Take a gradient descent step using our inputs and labels
        _,c2 = sess.run([optimizer2,cost2],feed_dict={x: inputX, y: inputY})
        epoch_loss2 = 0
        if (i) % display_step == 0:
            epoch_loss2 += c2
            print "Sigmoid Training step:", i, "cost=", c2, 'loss=', epoch_loss2  #, \"W=", sess.run(W), "b=", sess.run(b)

    mse2 = cost2.eval({x: inputX, y: inputY}) 
    acc2 = 1-np.sqrt(mse2) 
    print "Acc = ", acc2, "\n"

    for i in range(hm_epochs):
    # Take a gradient descent step using our inputs and labels
        _,c3 = sess.run([optimizer3,cost3],feed_dict={x: inputX, y: inputY})
        epoch_loss3 = 0
        if (i) % display_step == 0:
            epoch_loss3 += c3
            print "tanh Training step:", i, "cost=", c3, 'loss=', epoch_loss3  #, \"W=", sess.run(W), "b=", sess.run(b)

    mse3 = cost3.eval({x: inputX, y: inputY}) 
    acc3 = 1-np.sqrt(mse3) 
    print "Acc = ", acc3, "\n"

    for i in range(hm_epochs):
    # Take a gradient descent step using our inputs and labels
        _,c4 = sess.run([optimizer4,cost4],feed_dict={x: inputX, y: inputY})
        epoch_loss4 = 0
        if (i) % display_step == 0:
            epoch_loss4 += c4
            print "elu Training step:", i, "cost=", c4, 'loss=', epoch_loss4  #, \"W=", sess.run(W), "b=", sess.run(b)

    mse4 = cost4.eval({x: inputX, y: inputY}) 
    acc4 = 1-np.sqrt(mse4) 
    print "Acc = ", acc4, "\n"

我正在使用包含60000条记录的数据集。以下是部分数据集的示例,或者可以从https://archive.ics.uci.edu/ml/datasets/Detect+Malacious+Executable(AntiVirus)下载数据集:

inputX = array([[  3.32000000e+02,   2.24000000e+02,   8.45000000e+03,
          8.00000000e+00,   0.00000000e+00,   5.32480000e+04, 
          1.63840000e+04,   0.00000000e+00,   5.40480000e+04,
          4.09600000e+03,   5.73440000e+04,   2.08594534e+09,
          4.09600000e+03,   4.09600000e+03,   4.00000000e+00,
          0.00000000e+00,   8.00000000e+00,   0.00000000e+00,
          4.00000000e+00,   0.00000000e+00,   7.37280000e+04,
          4.09600000e+03,   1.20607000e+05,   2.00000000e+00,
          3.20000000e+02,   1.04857600e+06,   4.09600000e+03,
          1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
          1.60000000e+01,   4.00000000e+00,   2.70373594e+00,
          1.05637876e+00,   6.22819008e+00,   1.63840000e+04,
          4.09600000e+03,   5.32480000e+04,   1.59390000e+04,
          9.92000000e+02,   5.28640000e+04,   6.00000000e+00,
          1.37000000e+02,   8.10000000e+01,   2.50000000e+01,
          1.00000000e+00,   3.52426821e+00,   3.52426821e+00,
          3.52426821e+00,   8.92000000e+02,   8.92000000e+02,
          8.92000000e+02,   7.20000000e+01,   1.60000000e+01], 
       [  3.32000000e+02,   2.24000000e+02,   8.45000000e+03,
          8.00000000e+00,   0.00000000e+00,   5.27360000e+04,
          1.12640000e+04,   0.00000000e+00,   5.35300000e+04,
          4.09600000e+03,   5.73440000e+04,   2.08699392e+09,
          4.09600000e+03,   5.12000000e+02,   4.00000000e+00,
          0.00000000e+00,   8.00000000e+00,   0.00000000e+00,
          4.00000000e+00,   0.00000000e+00,   7.37280000e+04,
          1.02400000e+03,   8.92300000e+04,   2.00000000e+00,
          3.20000000e+02,   1.04857600e+06,   4.09600000e+03,
          1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
          1.60000000e+01,   4.00000000e+00,   4.31899422e+00,
      3.30769150e+00,   6.15499505e+00,   1.42080000e+04,
      1.02400000e+03,   5.27360000e+04,   1.57382500e+04,
      9.92000000e+02,   5.22730000e+04,   6.00000000e+00,
      1.33000000e+02,   8.10000000e+01,   2.50000000e+01,
      1.00000000e+00,   3.54207119e+00,   3.54207119e+00,
      3.54207119e+00,   8.92000000e+02,   8.92000000e+02,
      8.92000000e+02,   7.20000000e+01,   1.60000000e+01],
   [  3.32000000e+02,   2.24000000e+02,   8.45000000e+03,
      8.00000000e+00,   0.00000000e+00,   4.09600000e+04,
      2.04800000e+04,   0.00000000e+00,   2.66080000e+04,
      4.09600000e+03,   4.50560000e+04,   1.92151552e+09,
      4.09600000e+03,   4.09600000e+03,   4.00000000e+00,
      0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      4.00000000e+00,   0.00000000e+00,   6.55360000e+04,
      4.09600000e+03,   1.21734000e+05,   2.00000000e+00,
      3.20000000e+02,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   5.00000000e+00,   3.58061262e+00,
      8.04176679e-02,   6.23193618e+00,   1.22880000e+04,
      4.09600000e+03,   4.09600000e+04,   1.04442000e+04,
      9.64000000e+02,   3.76480000e+04,   2.00000000e+00,
      6.80000000e+01,   0.00000000e+00,   1.12000000e+02,
      6.00000000e+00,   3.00438262e+00,   2.40651198e+00,
      3.59262288e+00,   6.10333333e+02,   1.24000000e+02,
      1.41200000e+03,   7.20000000e+01,   1.60000000e+01],
   [  3.32000000e+02,   2.24000000e+02,   2.58000000e+02,
      1.10000000e+01,   0.00000000e+00,   3.54816000e+05,
      2.57024000e+05,   0.00000000e+00,   1.83632000e+05,
      4.09600000e+03,   3.60448000e+05,   4.19430400e+06,
      4.09600000e+03,   5.12000000e+02,   5.00000000e+00,
      1.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      5.00000000e+00,   1.00000000e+00,   6.26688000e+05,
      1.02400000e+03,   0.00000000e+00,   2.00000000e+00,
      3.30880000e+04,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   5.00000000e+00,   4.59039653e+00,
      2.37894684e+00,   6.29682587e+00,   1.20524800e+05,
      7.68000000e+03,   3.54816000e+05,   1.22148600e+05,
      1.64680000e+04,   3.54799000e+05,   7.00000000e+00,
      1.38000000e+02,   0.00000000e+00,   0.00000000e+00,
      7.00000000e+00,   3.91441476e+00,   1.44168828e+00,
      7.67709054e+00,   7.29842857e+03,   1.60000000e+01,
      2.84380000e+04,   7.20000000e+01,   0.00000000e+00],
   [  3.32000000e+02,   2.24000000e+02,   2.71000000e+02,
      6.00000000e+00,   0.00000000e+00,   2.40640000e+04,
      1.64864000e+05,   1.02400000e+03,   1.25380000e+04,
      4.09600000e+03,   2.86720000e+04,   4.19430400e+06,
      4.09600000e+03,   5.12000000e+02,   4.00000000e+00,
      0.00000000e+00,   6.00000000e+00,   0.00000000e+00,
      4.00000000e+00,   0.00000000e+00,   2.41664000e+05,
      1.02400000e+03,   0.00000000e+00,   2.00000000e+00,
      3.27680000e+04,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   5.00000000e+00,   4.10454072e+00,
      0.00000000e+00,   6.44010555e+00,   6.75840000e+03,
      0.00000000e+00,   2.40640000e+04,   4.62608000e+04,
      3.14400000e+03,   1.54712000e+05,   8.00000000e+00,
      1.55000000e+02,   1.00000000e+00,   0.00000000e+00,
      6.00000000e+00,   3.19910735e+00,   1.97133529e+00,
      5.21481585e+00,   4.52000000e+02,   3.40000000e+01,
      9.58000000e+02,   0.00000000e+00,   1.50000000e+01],
   [  3.32000000e+02,   2.24000000e+02,   2.58000000e+02,
      1.00000000e+01,   0.00000000e+00,   1.18784000e+05,
      3.81952000e+05,   0.00000000e+00,   5.99140000e+04,
      4.09600000e+03,   1.22880000e+05,   4.19430400e+06,
      4.09600000e+03,   5.12000000e+02,   5.00000000e+00,
      1.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      5.00000000e+00,   1.00000000e+00,   5.20192000e+05,
      1.02400000e+03,   5.58287000e+05,   2.00000000e+00,
      3.30880000e+04,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   5.00000000e+00,   5.66240790e+00,
      4.18369159e+00,   7.96187140e+00,   1.00147200e+05,
      9.21600000e+03,   3.34848000e+05,   1.01559800e+05,
      9.36800000e+03,   3.34440000e+05,   7.00000000e+00,
      1.14000000e+02,   0.00000000e+00,   0.00000000e+00,
      1.80000000e+01,   6.53094643e+00,   2.45849223e+00,
      7.99268848e+00,   1.85234444e+04,   4.80000000e+01,
      3.39450000e+04,   7.20000000e+01,   1.40000000e+01],
   [  3.32000000e+02,   2.24000000e+02,   2.58000000e+02,
      1.00000000e+01,   0.00000000e+00,   1.74592000e+05,
      3.00032000e+05,   0.00000000e+00,   1.17140000e+05,
      4.09600000e+03,   1.80224000e+05,   4.19430400e+06,
      4.09600000e+03,   5.12000000e+02,   5.00000000e+00,
      1.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      5.00000000e+00,   1.00000000e+00,   4.87424000e+05,
      1.02400000e+03,   5.13173000e+05,   2.00000000e+00,
      3.30880000e+04,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   5.00000000e+00,   5.73547047e+00,
      4.75826034e+00,   7.36431335e+00,   9.30816000e+04,
      1.53600000e+04,   1.92000000e+05,   9.46988000e+04,
      2.15000000e+04,   1.91664000e+05,   1.10000000e+01,
      2.54000000e+02,   1.50000000e+01,   0.00000000e+00,
      1.50000000e+01,   5.73239307e+00,   2.85236422e+00,
      7.98772639e+00,   1.27061333e+04,   1.18000000e+02,
      6.05000000e+04,   7.20000000e+01,   1.40000000e+01],
   [  3.32000000e+02,   2.24000000e+02,   2.58000000e+02,
      9.00000000e+00,   0.00000000e+00,   4.75648000e+05,
      3.48672000e+05,   0.00000000e+00,   3.19769000e+05,
      4.09600000e+03,   4.83328000e+05,   4.19430400e+06,
      4.09600000e+03,   5.12000000e+02,   5.00000000e+00,
      0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      5.00000000e+00,   0.00000000e+00,   8.56064000e+05,
      1.02400000e+03,   1.82072586e+09,   2.00000000e+00,
      3.30880000e+04,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   5.00000000e+00,   5.13993423e+00,
      4.48079036e+00,   6.55814891e+00,   1.64864000e+05,
      1.38240000e+04,   4.75648000e+05,   1.68145200e+05,
      3.08400000e+04,   4.75580000e+05,   1.40000000e+01,
      4.21000000e+02,   1.50000000e+01,   0.00000000e+00,
      5.90000000e+01,   2.82782573e+00,   9.60953136e-01,
      7.21232881e+00,   2.63703390e+03,   2.00000000e+01,
      6.76240000e+04,   7.20000000e+01,   0.00000000e+00],
   [  3.32000000e+02,   2.24000000e+02,   2.59000000e+02,
      9.00000000e+00,   0.00000000e+00,   1.57696000e+05,
      6.24640000e+04,   0.00000000e+00,   6.70150000e+04,
      4.09600000e+03,   1.63840000e+05,   4.19430400e+06,
      4.09600000e+03,   5.12000000e+02,   5.00000000e+00,
      0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      5.00000000e+00,   0.00000000e+00,   2.33472000e+05,
      1.02400000e+03,   2.72988000e+05,   2.00000000e+00,
      3.30240000e+04,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   4.00000000e+00,   4.81988481e+00,
      2.97736539e+00,   6.48512410e+00,   5.50400000e+04,
      3.58400000e+03,   1.57696000e+05,   5.56267500e+04,
      6.70000000e+03,   1.57297000e+05,   2.00000000e+00,
      7.60000000e+01,   0.00000000e+00,   0.00000000e+00,
      1.30000000e+01,   3.94329633e+00,   1.81444345e+00,
      6.12204520e+00,   2.70815385e+03,   1.32000000e+02,
      9.64000000e+03,   7.20000000e+01,   1.40000000e+01],
   [  3.32000000e+02,   2.24000000e+02,   2.59000000e+02,
      8.30000000e+01,   8.20000000e+01,   7.24992000e+05,
      2.30604800e+06,   0.00000000e+00,   4.24345600e+06,
      3.52256000e+06,   4.30899200e+06,   4.19430400e+06,
      4.09600000e+03,   4.09600000e+03,   5.00000000e+00,
      0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      5.00000000e+00,   0.00000000e+00,   6.70924800e+06,
      4.09600000e+03,   3.07704700e+06,   2.00000000e+00,
      3.27680000e+04,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   9.00000000e+00,   3.78312500e+00,
      0.00000000e+00,   7.99951830e+00,   3.36782222e+05,
      0.00000000e+00,   1.88416000e+06,   7.44182333e+05,
      2.27200000e+03,   3.06129900e+06,   4.00000000e+00,
      2.43000000e+02,   0.00000000e+00,   0.00000000e+00,
      2.10000000e+01,   3.98746295e+00,   2.64215931e+00,
      6.47369968e+00,   1.42880000e+04,   7.60000000e+01,
      2.70376000e+05,   0.00000000e+00,   0.00000000e+00],
   [  3.32000000e+02,   2.24000000e+02,   2.58000000e+02,
      1.00000000e+01,   0.00000000e+00,   1.20320000e+05,
      3.85024000e+05,   0.00000000e+00,   6.15780000e+04,
      4.09600000e+03,   1.26976000e+05,   4.19430400e+06,
      4.09600000e+03,   5.12000000e+02,   5.00000000e+00,
      1.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      5.00000000e+00,   1.00000000e+00,   5.28384000e+05,
      1.02400000e+03,   5.66330000e+05,   2.00000000e+00,
      3.30880000e+04,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   5.00000000e+00,   5.64644365e+00,
      4.11726412e+00,   7.96277585e+00,   1.01068800e+05,
      9.72800000e+03,   3.30752000e+05,   1.02623800e+05,
      9.40400000e+03,   3.39652000e+05,   3.00000000e+00,
      8.90000000e+01,   0.00000000e+00,   0.00000000e+00,
      6.00000000e+00,   3.72982391e+00,   2.45849223e+00,
      5.31755236e+00,   2.73950000e+03,   4.80000000e+01,
      9.64000000e+03,   7.20000000e+01,   1.50000000e+01],
   [  3.32000000e+02,   2.24000000e+02,   2.59000000e+02,
      1.00000000e+01,   0.00000000e+00,   2.33984000e+05,
      1.37779200e+06,   0.00000000e+00,   9.31200000e+04,
      4.09600000e+03,   2.41664000e+05,   4.19430400e+06,
      4.09600000e+03,   5.12000000e+02,   5.00000000e+00,
      1.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      5.00000000e+00,   1.00000000e+00,   1.63020800e+06,
      1.02400000e+03,   1.66150900e+06,   2.00000000e+00,
      3.32800000e+04,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   3.00000000e+00,   5.46068132e+00,
      3.13962777e+00,   7.09009944e+00,   5.37258667e+05,
      5.63200000e+03,   1.37216000e+06,   5.39602667e+05,
      1.33160000e+04,   1.37185600e+06,   1.00000000e+00,
      8.00000000e+01,   0.00000000e+00,   0.00000000e+00,
      1.80000000e+01,   4.32832189e+00,   2.32321967e+00,
      7.06841290e+00,   7.61582778e+04,   9.00000000e+00,
      1.34273500e+06,   7.20000000e+01,   1.90000000e+01],
   [  3.32000000e+02,   2.24000000e+02,   2.71000000e+02,
      6.00000000e+00,   0.00000000e+00,   4.91520000e+04,
      5.61152000e+05,   0.00000000e+00,   3.38800000e+04,
      4.09600000e+03,   5.32480000e+04,   4.19430400e+06,
      4.09600000e+03,   4.09600000e+03,   4.00000000e+00,
      0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      4.00000000e+00,   0.00000000e+00,   6.14400000e+05,
      4.09600000e+03,   0.00000000e+00,   2.00000000e+00,
      0.00000000e+00,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   4.00000000e+00,   3.69925758e+00,
      0.00000000e+00,   6.48297395e+00,   1.94600000e+04,
      1.60000000e+01,   4.91520000e+04,   1.50074000e+05,
      1.60000000e+01,   5.48460000e+05,   4.00000000e+00,
      1.19000000e+02,   1.00000000e+01,   0.00000000e+00,
      0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      0.00000000e+00,   0.00000000e+00,   0.00000000e+00],
   [  3.32000000e+02,   2.24000000e+02,   2.58000000e+02,
      1.00000000e+01,   0.00000000e+00,   2.91840000e+04,
      4.45952000e+05,   1.68960000e+04,   1.48190000e+04,
      4.09600000e+03,   3.68640000e+04,   4.19430400e+06,
      4.09600000e+03,   5.12000000e+02,   5.00000000e+00,
      0.00000000e+00,   6.00000000e+00,   0.00000000e+00,
      5.00000000e+00,   0.00000000e+00,   1.76537600e+06,
      1.02400000e+03,   5.94294000e+05,   2.00000000e+00,
      3.41120000e+04,   1.04857600e+06,   4.09600000e+03,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   6.00000000e+00,   3.76419176e+00,
      0.00000000e+00,   6.47970818e+00,   7.93600000e+03,
      0.00000000e+00,   2.91840000e+04,   2.92339333e+05,
      2.53600000e+03,   1.28204800e+06,   8.00000000e+00,
      1.71000000e+02,   1.00000000e+00,   0.00000000e+00,
      6.00000000e+00,   3.15203588e+00,   2.16096405e+00,
      5.21367450e+00,   3.54333333e+02,   2.00000000e+01,
      7.44000000e+02,   0.00000000e+00,   0.00000000e+00],
   [  3.32000000e+02,   2.24000000e+02,   3.31670000e+04,
      2.00000000e+00,   2.50000000e+01,   3.78880000e+04,
      1.53600000e+04,   0.00000000e+00,   4.00000000e+04,
      4.09600000e+03,   4.50560000e+04,   4.19430400e+06,
      4.09600000e+03,   5.12000000e+02,   1.00000000e+00,
      0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
      4.00000000e+00,   0.00000000e+00,   8.19200000e+04,
      1.02400000e+03,   6.78554440e+07,   2.00000000e+00,
      0.00000000e+00,   1.04857600e+06,   1.63840000e+04,
      1.04857600e+06,   4.09600000e+03,   0.00000000e+00,
      1.60000000e+01,   8.00000000e+00,   2.33301385e+00,
      0.00000000e+00,   6.63664803e+00,   6.65600000e+03,
      0.00000000e+00,   3.78880000e+04,   7.19800000e+03,
      8.00000000e+00,   3.77320000e+04,   8.00000000e+00,
      9.60000000e+01,   0.00000000e+00,   0.00000000e+00,
      1.40000000e+01,   3.42918455e+00,   2.41356665e+00,
      5.05007355e+00,   7.17142857e+02,   4.40000000e+01,
      2.21600000e+03,   0.00000000e+00,   1.50000000e+01]])

inputY = array([ 1.,  1.,  1.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0., 0.,  0.])

0 个答案:

没有答案