当我使用Caffe BP回归模型时,为什么大多数预测结果为0?

时间:2016-11-03 07:23:19

标签: machine-learning deep-learning caffe logistic-regression non-linear-regression

我将输入数据转换为hdf5格式。 每个输入数据的形状为309 dims和一个标签 输入数据如下: part of the input data like this

我的网络结构如下:

 name: "RegressionNet"
layer {
    name: "framert"
    type: "HDF5Data"
    top: "data"
    top: "label"
    include {
        phase: TRAIN
    }
    hdf5_data_param {
        source: "train_data_list.txt"
        batch_size: 100
    }
}
layer {
    name: "framert"
    type: "HDF5Data"
    top: "data"
    top: "label"
    include {
        phase: TEST
    }
    hdf5_data_param {
        source: "test_data_list.txt"
        batch_size: 100
    }
}
layer {
    name: "inner1"
    type: "InnerProduct"
    bottom: "data"
    top: "inner1"
    param {
        lr_mult: 1
        decay_mult: 1.5
    }
    param {
        lr_mult: 2
        decay_mult: 0
    }
    inner_product_param {
        num_output: 500
        weight_filler {
            type: "xavier"
        }
        bias_filler {
            type: "constant"
            value: 0.1
        }
    }
}
layer {
    name: "inner2"
    type: "InnerProduct"
    bottom: "inner1"
    top: "inner2"
    param {
        lr_mult: 1
        decay_mult: 1.0
    }
    param {
        lr_mult: 2
        decay_mult: 0
    }
    inner_product_param {
        num_output: 400 
        weight_filler {
            type: "xavier"
        }
        bias_filler {
            type: "constant"
            value: 0.1
        }
    }
}
layer {
    name: "inner3"
    type: "InnerProduct"
    bottom: "inner2"
    top: "inner3"
    param {
        lr_mult: 1
        decay_mult: 1.0
    }
    param {
        lr_mult: 2
        decay_mult: 0
    }
    inner_product_param {
        num_output: 300 
        weight_filler {
            type: "xavier"
        }
        bias_filler {
            type: "constant"
            value: 0.1
        }
    }
}
layer {
    name: "inner4"
    type: "InnerProduct"
    bottom: "inner3"
    top: "inner4"
    param {
        lr_mult: 1
        decay_mult: 1.0
    }
    param {
        lr_mult: 2
        decay_mult: 0
    }
    inner_product_param {
        num_output: 200 
        weight_filler {
            type: "xavier"
        }
        bias_filler {
            type: "constant"
            value: 0.1
        }
    }
}
layer {
    name: "inner5"
    type: "InnerProduct"
    bottom: "inner4"
    top: "inner5"
    param {
        lr_mult: 1
        decay_mult: 1.0
    }
    param {
        lr_mult: 2
        decay_mult: 0
    }
    inner_product_param {
        num_output: 100 
        weight_filler {
            type: "xavier"
        }
        bias_filler {
            type: "constant"
            value: 0.1
        }
    }
}
layer {
    name: "inner6"
    type: "InnerProduct"
    bottom: "inner5"
    top: "inner6"
    param {
        lr_mult: 1
        decay_mult: 1.0
    }
    param {
        lr_mult: 2
        decay_mult: 0
    }
    inner_product_param {
        num_output: 1   
        weight_filler {
            type: "xavier"
        }
        bias_filler {
            type: "constant"
            value: 0.1
        }
    }
}
layer {
    name: "relu1"
    type: "ReLU"
    bottom: "inner6"
    top: "inner6"
    relu_param {
        engine: CAFFE
    }
}
layer {
    name: "accuracy"
    type: "Accuracy"
    bottom: "inner6"
    bottom: "label"
    top: "accuracy"
    include {
        phase: TEST
    }
}
layer {
    name: "loss"
    type: "EuclideanLoss"
    bottom: "inner6"
    bottom: "label"
    top: "loss"
}

我的求解者如下:

net: "net_csv_hdf5.prototxt"

test_iter: 100

test_interval: 100

base_lr: 0.001

momentum: 0.9

weight_decay: 0.0005

lr_policy: "inv"

gamma: 0.0001

power: 0.75

display: 50

max_iter: 5000

snapshot: 2500

snapshot_prefix: "/examples"

solver_mode: CPU

完成训练阶段后,我使用测试数据进行预测,而预测结果如下(有太多零):

0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0.000122316
0.00318826
0.00606083
0.0092759
0.0124592
0.015264
0.0181027
0.021088
0.0237832
0.027108
0.0306765
0.0345342
0.0379068
0.0409781
0.044281
0.0478444
0.0509017

1 个答案:

答案 0 :(得分:1)

你忘了在net.prototxt中添加激活功能,就像这个“

layer {
   name: "Sigmoid1"
   type: "Sigmoid"
   bottom: "inner1"
   top: "Sigmoid1"
 }