FANN XOR训练

时间:2013-12-25 19:42:07

标签: c++ artificial-intelligence neural-network fann

我正在开发一款使用FANN(快速人工神经网络库)的软件。 在尝试编写我自己的ANN代码以编译FANN示例程序(这里是C ++ XOR近似程序)之后,我尝试过。这是来源。

#include "../include/floatfann.h"
#include "../include/fann_cpp.h"


#include <ios>
#include <iostream>
#include <iomanip>
using std::cout;
using std::cerr;
using std::endl;
using std::setw;
using std::left;
using std::right;
using std::showpos;
using std::noshowpos;


// Callback function that simply prints the information to cout
int print_callback(FANN::neural_net &net, FANN::training_data &train,
    unsigned int max_epochs, unsigned int epochs_between_reports,
    float desired_error, unsigned int epochs, void *user_data)
{
    cout << "Epochs     " << setw(8) << epochs << ". "
         << "Current Error: " << left << net.get_MSE() << right << endl;
    return 0;
}

// Test function that demonstrates usage of the fann C++ wrapper
void xor_test()
{
    cout << endl << "XOR test started." << endl;

    const float learning_rate = 0.7f;
    const unsigned int num_layers = 3;
    const unsigned int num_input = 2;
    const unsigned int num_hidden = 3;
    const unsigned int num_output = 1;
    const float desired_error = 0.001f;
    const unsigned int max_iterations = 300000;
    const unsigned int iterations_between_reports = 10000;

    ////Make array for create_standard() workaround (prevent "FANN Error 11: Unable to allocate memory.")
    const unsigned int num_input_num_hidden_num_output__array[3] = {num_input, num_hidden, num_output};
    cout << endl << "Creating network." << endl;

    FANN::neural_net net;
//    cout<<"Debug 1"<<endl;
    //net.create_standard(num_layers, num_input, num_hidden, num_output);//doesn't work
    net.create_standard_array(num_layers, num_input_num_hidden_num_output__array);//this might work -- create_standard() workaround

    net.set_learning_rate(learning_rate);

    net.set_activation_steepness_hidden(1.0);
    net.set_activation_steepness_output(1.0);

    //Sample Code, changed below
    net.set_activation_function_hidden(FANN::SIGMOID_SYMMETRIC_STEPWISE);
    net.set_activation_function_output(FANN::SIGMOID_SYMMETRIC_STEPWISE);
    //changed above to sigmoid
    //net.set_activation_function_hidden(FANN::SIGMOID);
    //net.set_activation_function_output(FANN::SIGMOID);

    // Set additional properties such as the training algorithm
    //net.set_training_algorithm(FANN::TRAIN_QUICKPROP);

    // Output network type and parameters
    cout << endl << "Network Type                         :  ";
    switch (net.get_network_type())
    {
    case FANN::LAYER://only connected to next layer
        cout << "LAYER" << endl;
        break;
    case FANN::SHORTCUT://connected to all other layers
        cout << "SHORTCUT" << endl;
        break;
    default:
        cout << "UNKNOWN" << endl;
        break;
    }
    net.print_parameters();

    cout << endl << "Training network." << endl;

    FANN::training_data data;
    if (data.read_train_from_file("xor.data"))
    {
        // Initialize and train the network with the data
        net.init_weights(data);

        cout << "Max Epochs " << setw(8) << max_iterations << ". "
            << "Desired Error: " << left << desired_error << right << endl;
        net.set_callback(print_callback, NULL);
        net.train_on_data(data, max_iterations,
            iterations_between_reports, desired_error);

        cout << endl << "Testing network. (not really)" << endl;

        //I don't really get this code --- the funny for loop. Whatever. I'll skip it.
                for (unsigned int i = 0; i < data.length_train_data(); ++i)
                {
                    // Run the network on the test data
                    fann_type *calc_out = net.run(data.get_input()[i]);

                    cout << "XOR test (" << showpos << data.get_input()[i][0] << ", "
                         << data.get_input()[i][1] << ") -> " << *calc_out
                         << ", should be " << data.get_output()[i][0] << ", "
                         << "difference = " << noshowpos
                         << fann_abs(*calc_out - data.get_output()[i][0]) << endl;
                }

        cout << endl << "Saving network." << endl;

        // Save the network in floating point and fixed point
        net.save("xor_float.net");
        unsigned int decimal_point = net.save_to_fixed("xor_fixed.net");
        data.save_train_to_fixed("xor_fixed.data", decimal_point);

        cout << endl << "XOR test completed." << endl;
    }
}

/* Startup function. Synchronizes C and C++ output, calls the test function
   and reports any exceptions */
int main(int argc, char **argv)
{
    try
    {
        std::ios::sync_with_stdio(); // Synchronize cout and printf output
        xor_test();
    }
    catch (...)
    {
        cerr << endl << "Abnormal exception." << endl;
    }
    return 0;
}

这是我的输出。

XOR test started.

Creating network.

Network Type                         :  LAYER
Input layer                          :   2 neurons, 1 bias
  Hidden layer                       :   3 neurons, 1 bias
Output layer                         :   1 neurons
Total neurons and biases             :   8
Total connections                    :  13
Connection rate                      :   1.000
Network type                         :   FANN_NETTYPE_LAYER
Training algorithm                   :   FANN_TRAIN_RPROP
Training error function              :   FANN_ERRORFUNC_TANH
Training stop function               :   FANN_STOPFUNC_MSE
Bit fail limit                       :   0.350
Learning rate                        :   0.700
Learning momentum                    :   0.000
Quickprop decay                      :  -0.000100
Quickprop mu                         :   1.750
RPROP increase factor                :   1.200
RPROP decrease factor                :   0.500
RPROP delta min                      :   0.000
RPROP delta max                      :  50.000
Cascade output change fraction       :   0.010000
Cascade candidate change fraction    :   0.010000
Cascade output stagnation epochs     :  12
Cascade candidate stagnation epochs  :  12
Cascade max output epochs            : 150
Cascade min output epochs            :  50
Cascade max candidate epochs         : 150
Cascade min candidate epochs         :  50
Cascade weight multiplier            :   0.400
Cascade candidate limit              :1000.000
Cascade activation functions[0]      :   FANN_SIGMOID
Cascade activation functions[1]      :   FANN_SIGMOID_SYMMETRIC
Cascade activation functions[2]      :   FANN_GAUSSIAN
Cascade activation functions[3]      :   FANN_GAUSSIAN_SYMMETRIC
Cascade activation functions[4]      :   FANN_ELLIOT
Cascade activation functions[5]      :   FANN_ELLIOT_SYMMETRIC
Cascade activation functions[6]      :   FANN_SIN_SYMMETRIC
Cascade activation functions[7]      :   FANN_COS_SYMMETRIC
Cascade activation functions[8]      :   FANN_SIN
Cascade activation functions[9]      :   FANN_COS
Cascade activation steepnesses[0]    :   0.250
Cascade activation steepnesses[1]    :   0.500
Cascade activation steepnesses[2]    :   0.750
Cascade activation steepnesses[3]    :   1.000
Cascade candidate groups             :   2
Cascade no. of candidates            :  80

Training network.
Max Epochs   300000. Desired Error: 0.001
Epochs            1. Current Error: 0.25
Epochs        10000. Current Error: 0.25
Epochs        20000. Current Error: 0.25
Epochs        30000. Current Error: 0.25
Epochs        40000. Current Error: 0.25
Epochs        50000. Current Error: 0.25
Epochs        60000. Current Error: 0.25
Epochs        70000. Current Error: 0.25
Epochs        80000. Current Error: 0.25
Epochs        90000. Current Error: 0.25
Epochs       100000. Current Error: 0.25
Epochs       110000. Current Error: 0.25
Epochs       120000. Current Error: 0.25
Epochs       130000. Current Error: 0.25
Epochs       140000. Current Error: 0.25
Epochs       150000. Current Error: 0.25
Epochs       160000. Current Error: 0.25
Epochs       170000. Current Error: 0.25
Epochs       180000. Current Error: 0.25
Epochs       190000. Current Error: 0.25
Epochs       200000. Current Error: 0.25
Epochs       210000. Current Error: 0.25
Epochs       220000. Current Error: 0.25
Epochs       230000. Current Error: 0.25
Epochs       240000. Current Error: 0.25
Epochs       250000. Current Error: 0.25
Epochs       260000. Current Error: 0.25
Epochs       270000. Current Error: 0.25
Epochs       280000. Current Error: 0.25
Epochs       290000. Current Error: 0.25
Epochs       300000. Current Error: 0.25

Testing network. (not really)
XOR test (+0, -1.875) -> +0, should be +0, difference = -0
XOR test (+0, -1.875) -> +0, should be +0, difference = -0
XOR test (+0, +1.875) -> +0, should be +0, difference = -0
XOR test (+0, +1.875) -> +0, should be +0, difference = -0

Saving network.

XOR test completed.

培训数据(xor.data)在这里:

4 2 1    
-1 -1    
-1    
-1 1    
1    
1 -1    
1
1 1    
-1

什么解释了神经网络中令人毛骨悚然的学习缺乏?我非常确信我在某处配置了一些非常错误的东西,特别是考虑到这是示例程序。 ANN专家,有什么建议吗?

1 个答案:

答案 0 :(得分:2)

应用FANN补丁并确保对floatfanndoublefann等的所有引用都是一致的。