SIGABRT期间使用delete []删除双精度数组

时间:2018-12-25 21:50:59

标签: c++ pointers

我写了神经网络课。我对delete []运算符有疑问。 “删除[]重量;”发送SIGABRT。怎么了?对于inputX可以。在下面您可以看到我班上的文件。在文件.h中,有权重指针的定义。在文件.cpp中,使用了new运算符和delete指针运算符。

文件perceptron.h

#ifndef PERCEPTRON_H
#define PERCEPTRON_H

class Perceptron
{
public:
    Perceptron();
    ~Perceptron();
    int Nperceptrons;
    int numberlayer;
    int Nlayers;
    double sum;
    double n = 0.01;
    double sigma;
    double delta;
    double bias;
    double *weights;
    double *inputX;
    double answer;
    double result;
    void setWeights();
    void countSum(double *input = nullptr);
    void changeWeights();
    void countSigma();
    void changeDelta(Perceptron *tab = nullptr);
    void check(double *input);
};

#endif // PERCEPTRON_H

文件perceptron.cpp

#include "perceptron.h"
#include "mainwindow.h"
#include <cmath>

Perceptron::Perceptron()
{

}

Perceptron::~Perceptron()
{
    delete [] inputX;
    delete [] weights; // <------- here is a problem
}

void Perceptron::setWeights()
{
    if (numberlayer == 0) {
        weights = new double[10];
        for (int i = 0; i < 10; ++i) {
            weights[i] = 2 * static_cast<double>(rand()) / RAND_MAX - 1;
        }
    } else {
        weights = new double[Nperceptrons];
        for (int i = 0; i < Nperceptrons; ++i)
            weights[i] = 2 * static_cast<double>(rand()) / RAND_MAX - 1;
    }

    bias = 2 * static_cast<double>(rand()) / RAND_MAX - 1;
}

void Perceptron::countSum(double *input)
{
    double tmp = 0.0;
    if (numberlayer == 0) {
        inputX = new double[10];
        for (int i = 0; i < 10; ++i) {
            tmp += input[i] * weights[i];
        }
        memcpy(inputX, input, 10 * sizeof *input);
    } else {
        inputX = new double[Nperceptrons];
        for (int i = 0; i < Nperceptrons; ++i)
            tmp += input[i] * weights[i];
        memcpy(inputX, input, static_cast<unsigned long>(Nperceptrons) * sizeof *input);
    }

    tmp += bias;
    sum = tmp;
    countSigma();
}

void Perceptron::changeWeights()
{
    if (Nlayers - 1 == numberlayer) {
        for (int i = 0; i < 5; ++i) {
            weights[i] -= n * delta * sigma;
        }
        bias -= n * delta * sigma;
    } else if (numberlayer == 0) {
        for (int i = 0; i < 10; ++i) {
            weights[i] -= n * delta * inputX[i];
        }
        bias -= n * delta;
    } else {
        for (int i = 0; i < Nperceptrons; ++i) {
            weights[i] -= n * delta * inputX[i];
        }
        bias -= n * delta;
    }
}

void Perceptron::countSigma()
{
    sigma = 1.0 / (1.0 + exp(-sum));
}

void Perceptron::changeDelta(Perceptron *tab)
{
    if (Nlayers - 1 == numberlayer) {
        delta = (sigma - answer) * sigma * (1.0 - sigma);
    } else {
        double tmp = 0.0;
        int sizeTab = 5;
        if (numberlayer == Nlayers - 2) sizeTab = 3;
        for (int i = 0; i < Nperceptrons; ++i) {
            for (int k = 0; k < sizeTab; ++k)
                tmp += tab[k].delta * tab[k].weights[i];
        }
        delta = tmp * sigma * (1.0 - sigma);
    }
}

void Perceptron::check(double *input)
{
    double tmp = 0.0;
    if (numberlayer == 0) {
        for (int i = 0; i < 10; ++i)
            tmp += input[i] * weights[i];
    } else {
        for (int i = 0; i < Nperceptrons; ++i)
            tmp += input[i] * weights[i];
    }
    tmp += bias;
    result = 1.0 / (1.0 + exp(-tmp));
}

0 个答案:

没有答案