我的神经网络不起作用[XOR问题]

时间:2018-11-04 18:32:20

标签: c++ neural-network xor backpropagation feed-forward

我正在尝试创建一个用于解决XOR问题的神经网络,但我做不到,总是给出错误的结果,也许我在数学上犯了一个错误,该网络无法学习,结果总是相似

我没有使用BIAS。

Note: execute function = (feed-forward + backpropagation)
ALPHA = 0.5

代码如下:

// main.cpp

#include <iostream>

#include "neural_network.h"

int main(int argc, char const *argv[])
{
 srand(time(NULL));

 double array[][3] = {{0.0, 0.0, 0.0},
                      {0.0, 1.0, 1.0},
                      {1.0, 0.0, 1.0},
                      {1.0, 1.0, 0.0}};


neural_network* nn = new neural_network(3, 2, 2, 1, 1.0);
nn->create_network();

for(int i = 0; i < 15000; i++)
{
    int index = rand() % 4;
#if DEBUG
    std::cout<<"Inputs :"<<array[index][0]<<" , "<<array[index][1]<<std::endl;
    std::cout<<"Outputs :"<<array[index][2]<<std::endl;
#endif    
    nn->execute(array[index], &array[index][2]);
}

nn->print_weight();

nn->execute(array[0], &array[0][2]);
nn->print_output();

nn->execute(array[1], &array[1][2]);
nn->print_output();

nn->execute(array[2], &array[2][2]);
nn->print_output();

nn->execute(array[3], &array[3][2]);
nn->print_output();

return 0;
}

//前馈功能

void neural_network::feed_forward(double* inputs)
{
int index = 0;

for(int i = 0; i < neural_network::input_layer_size; i++)
    neural_network::input_neuron[i] = inputs[i];

for(int i = 0; i < neural_network::hidden_layer_size; i++)
{
    for(int j = 0; j < neural_network::input_layer_size; j++)
    {
        neural_network::hidden_neuron[i] += neural_network::input_neuron[j] * weight_I_H[index++];
    }
    neural_network::hidden_neuron[i] = neural_network::activation_func(neural_network::hidden_neuron[i]);
}    

index = 0;

for(int i = 0; i < neural_network::output_layer_size; i++)
{
    for(int j = 0; j < neural_network::hidden_layer_size; j++)
    {
        neural_network::output_neuron[i] += neural_network::hidden_neuron[j] * weight_H_O[index++];
    }
    neural_network::output_neuron[i] = neural_network::activation_func(neural_network::output_neuron[i]);
}

}

//反向传播功能

void neural_network::back_propagation(double* outputs)
{
    int index;

for(int i = 0; i < neural_network::output_layer_size; i++)
    neural_network::err_output[i] = (outputs[i] - neural_network::output_neuron[i]);

for(int i = 0; i < neural_network::hidden_layer_size; i++)
{
    index = i;
    for(int j = 0; j < neural_network::output_layer_size; j++)
    {
        neural_network::err_hidden[i] += neural_network::weight_H_O[index] * neural_network::err_output[j] * neural_network::derivative_act_func(neural_network::output_neuron[j]);
        neural_network::weight_H_O[index] += ALPHA * neural_network::err_output[j] * neural_network::derivative_act_func(neural_network::output_neuron[j]) * neural_network::hidden_neuron[i];
        index += neural_network::hidden_layer_size;
    }
}

for(int i = 0; i < neural_network::input_layer_size; i++)
{
    index = i;
    for(int j = 0; j < neural_network::hidden_layer_size; j++)
    {
        neural_network::weight_I_H[index] += ALPHA * neural_network::err_hidden[j] * neural_network::derivative_act_func(neural_network::hidden_neuron[j]) * neural_network::input_neuron[i];
        index += neural_network::input_layer_size;
    }
}
}

//输出

Input To Hidden :
H-1 :
    Weight :-13.269
    Weight :-13.2705
H-2 :
    Weight :-12.5172
    Weight :-12.5195

Hidden To Output :
O-1 :
    Weight :-5.37707
    Weight :-2.93218


Outputs for (0,0):
    O-1 :0.0294265

Outputs for (0,1):
    O-1 :0.507348

Outputs for (1,0):
    O-1 :0.62418

Outputs for (1,1):
    O-1 :0.651169

1 个答案:

答案 0 :(得分:0)

这是不可能的,没有我的基于Furye变换(比keras更强大的功能)开发的网络真正决定这个XOR任务。我对这两个ANN都进行了非常准确的测试。最大可识别3个4(acc = 0.75-> 75%)。没有人回答1 xor 1 = 0。现在看来,这个人真的很认真地测试过这种情况。(ANN是多层的)