为什么在最初的几次迭代中,nan会出现在损耗层中?

时间:2019-05-23 03:07:05

标签: deep-learning caffe

在前几次迭代中,损失开始减少。但是当迭代达到一定数量时,损耗层出现nan,而前面隐藏的层是正常的。

使用hdf5类型的数据为iamges,损失类型为欧式。框架就是咖啡。

求解器和网络如下。

net: "/home/ubuntu/caffe/examples/CaffeTrain/DCSCN_net.prototxt"
test_iter: 250
# Carry out testing every 500 training iterations.
test_interval: 500
# All parameters are from the cited paper above
base_lr: 0.0002
momentum: 0.9
momentum2: 0.999
# rate to a fixed value
lr_policy: "fixed"
# Display every 100 iterations
display: 100
# The maximum number of iterations
max_iter: 60000
# snapshot intermediate results
snapshot: 1500
snapshot_prefix: "/home/ubuntu/caffe/examples/CaffeTrain/DCSCN_net"
# solver mode: CPU or GPU
type: "Adam"
solver_mode: GPU
name: "DCSRCN"
layer {
  name: "data"
  type: "HDF5Data"
  top: "data"
  top: "label"
  hdf5_data_param {
    source: "/home/ubuntu/caffe/examples/CaffeTrain/train1.txt"
    batch_size: 64
  }
  include: { phase: TRAIN }
}
layer {
  name: "data"
  type: "HDF5Data"
  top: "data"
  top: "label"
  hdf5_data_param {
    source: "/home/ubuntu/caffe/examples/CaffeTrain/test1.txt"
    batch_size: 2
  }
  include: { phase: TEST }
}

layer {
  name: "conv1"
  type: "Convolution"
  bottom: "data"
  top: "conv1"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 32
    kernel_size: 3
    stride: 1
    pad: 1
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}


layer {
  name: "relu1"
  type: "PReLU"
  bottom: "conv1"
  top: "conv1"
}


layer {
  name: "conv2"
  type: "Convolution"
  bottom: "conv1"
  top: "conv2"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 26
    kernel_size: 3
    stride: 1
    pad: 1
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "relu2"
  type: "PReLU"
  bottom: "conv2"
  top: "conv2"
}


layer
{
    name:"drop"
    type:"Dropout"
    bottom:"conv2"
    top:"conv2"
    dropout_param
    {
        dropout_ratio: 0.8
    }
}

layer {
  name: "conv3"
  type: "Convolution"
  bottom: "conv2"
  top: "conv3"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 22
    kernel_size: 3
    stride: 1
    pad: 1
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}


layer {
  name: "relu3"
  type: "PReLU"
  bottom: "conv3"
  top: "conv3"
}

layer
{
    name:"drop"
    type:"Dropout"
    bottom:"conv3"
    top:"conv3"
    dropout_param
    {
        dropout_ratio: 0.8
    }
}
layer {
  name: "conv4"
  type: "Convolution"
  bottom: "conv3"
  top: "conv4"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 18
    kernel_size: 3
    stride: 1
    pad: 1
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "relu4"
  type: "PReLU"
  bottom: "conv4"
  top: "conv4"
}

layer
{
    name:"drop"
    type:"Dropout"
    bottom:"conv4"
    top:"conv4"
    dropout_param
    {
        dropout_ratio: 0.8
    }
}
layer {
  name: "conv5"
  type: "Convolution"
  bottom: "conv4"
  top: "conv5"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 14
    kernel_size: 3
    stride: 1
    pad: 1
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}


layer {
  name: "relu5"
  type: "PReLU"
  bottom: "conv5"
  top: "conv5"
}

layer
{
    name:"drop"
    type:"Dropout"
    bottom:"conv5"
    top:"conv5"
    dropout_param
    {
        dropout_ratio: 0.8
    }
}

layer {
  name: "conv6"
  type: "Convolution"
  bottom: "conv5"
  top: "conv6"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 11 
    kernel_size: 3
    stride: 1
    pad: 1
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "relu6"
  type: "PReLU"
  bottom: "conv6"
  top: "conv6"
}

layer
{
    name:"drop"
    type:"Dropout"
    bottom:"conv6"
    top:"conv6"
    dropout_param
    {
        dropout_ratio: 0.8
    }
}

layer {
  name: "conv7"
  type: "Convolution"
  bottom: "conv6"
  top: "conv7"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 8
    kernel_size: 3
    stride: 1
    pad: 1
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}


layer {
  name: "relu7"
  type: "PReLU"
  bottom: "conv7"
  top: "conv7"
}

layer
{
    name:"drop"
    type:"Dropout"
    bottom:"conv7"
    top:"conv7"
    dropout_param
    {
        dropout_ratio: 0.8
    }
}

layer {
  name: "concatenate1"
  bottom: "conv2"
  bottom: "conv3"
  bottom: "conv4"
  bottom: "conv5"
  bottom: "conv6"
  bottom: "conv7"
  top: "concatenate1"
  type: "Concat"
  concat_param{
   axis: 1
  }
}

layer {
  name: "A1"
  type: "Convolution"
  bottom: "concatenate1"
  top: "A1"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 24
    kernel_size: 1
    stride: 1
    pad: 0
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "reluA1"
  type: "PReLU"
  bottom: "A1"
  top: "A1"
}

layer
{
    name:"drop"
    type:"Dropout"
    bottom:"A1"
    top:"A1"
    dropout_param
    {
        dropout_ratio: 0.8
    }
}

layer {
  name: "B1"
  type: "Convolution"
  bottom: "concatenate1"
  top: "B1"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 8
    kernel_size: 1
    stride: 1
    pad: 0
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "reluB1"
  type: "PReLU"
  bottom: "B1"
  top: "B1"
}

layer
{
    name:"drop"
    type:"Dropout"
    bottom:"B1"
    top:"B1"
    dropout_param
    {
        dropout_ratio: 0.8
    }
} 

layer {
  name: "B2"
  type: "Convolution"
  bottom: "B1"
  top: "B2"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 8
    kernel_size: 3
    stride: 1
    pad: 1
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "reluB2"
  type: "PReLU"
  bottom: "B2"
  top: "B2"
}
layer
{
    name:"drop"
    type:"Dropout"
    bottom:"B2"
    top:"B2"
    dropout_param
    {
        dropout_ratio: 0.8
    }
}

layer {
  name: "concatenate2"
  bottom: "A1"
  bottom: "B2"
  top: "concatenate2"
  type: "Concat"
  concat_param{
   axis: 1
  }
}
layer {
  name: "conv8"
  type: "Convolution"
  bottom: "concatenate2"
  top: "conv8"
  param {
    lr_mult: 1
  }
  param {
    lr_mult: 0.1
  }
  convolution_param {
    num_output: 4
    kernel_size: 1
    stride: 1
    pad: 0
    weight_filler {
      type: "constant"
    }
    bias_filler {
      type: "constant"
    }
  }
}

layer {
  name: "slicer_label"
  type: "Slice"
  bottom: "label"
  top: "label1"
  top: "label2"
  slice_param {
    axis: 1
    slice_point: 4
  }
}

layer{
  name: "diff"
  type: "Eltwise"
  bottom: "label1"
  bottom: "label2"
  top: "diff"
  eltwise_param {
    operation: SUM
    coeff: 1
    coeff: -1
  }
}


layer {
  name: "loss"
  type: "EuclideanLoss"
  bottom: "diff"
  bottom: "conv8"
  top: "loss"
}

当损耗层出现nan时,迭代的后面将出现相同的情况。这是日志的一部分。

I0522 10:10:04.978370   950 solver.cpp:239] Iteration 201 (9.82038 iter/s, 0.101829s/1 iters), loss = 0.412089
I0522 10:10:04.978469   950 solver.cpp:258]     Train net output #0: loss = 0.412088 (* 1 = 0.412088 loss)
I0522 10:10:04.978521   950 sgd_solver.cpp:112] Iteration 201, lr = 0.002
I0522 10:10:04.980057   950 sgd_solver.cpp:99] Gradient clipping: scaling down gradients (L2 norm 1.80722 > 1) by scale factor 0.553335
I0522 10:10:04.984403   950 net.cpp:591]     [Forward] Layer data, top blob data data: 0.44866
I0522 10:10:04.984561   950 net.cpp:591]     [Forward] Layer data, top blob label data: 0.0258211
I0522 10:10:04.985262   950 net.cpp:591]     [Forward] Layer conv1, top blob conv1 data: 0.415502
I0522 10:10:04.985370   950 net.cpp:603]     [Forward] Layer conv1, param blob 0 data: 0.374245
I0522 10:10:04.985461   950 net.cpp:603]     [Forward] Layer conv1, param blob 1 data: 0.00277251
I0522 10:10:05.029992   950 net.cpp:591]     [Forward] Layer relu1, top blob conv1 data: 0.167436
I0522 10:10:05.030158   950 net.cpp:603]     [Forward] Layer relu1, param blob 0 data: 0.230917
I0522 10:10:05.030987   950 net.cpp:591]     [Forward] Layer conv2, top blob conv2 data: 0.213457
I0522 10:10:05.031092   950 net.cpp:603]     [Forward] Layer conv2, param blob 0 data: 0.065087
I0522 10:10:05.031244   950 net.cpp:603]     [Forward] Layer conv2, param blob 1 data: 0.00234184
I0522 10:10:05.031761   950 net.cpp:591]     [Forward] Layer relu2, top blob conv2 data: 0.0338422
I0522 10:10:05.031859   950 net.cpp:603]     [Forward] Layer relu2, param blob 0 data: 0.15821
I0522 10:10:05.032014   950 net.cpp:591]     [Forward] Layer conv2_relu2_0_split, top blob conv2_relu2_0_split_0 data: 0.0338422
I0522 10:10:05.032164   950 net.cpp:591]     [Forward] Layer conv2_relu2_0_split, top blob conv2_relu2_0_split_1 data: 0.0338422
I0522 10:10:05.032908   950 net.cpp:591]     [Forward] Layer conv3, top blob conv3 data: 0.013239
I0522 10:10:05.033012   950 net.cpp:603]     [Forward] Layer conv3, param blob 0 data: 0.0709371
I0522 10:10:05.033102   950 net.cpp:603]     [Forward] Layer conv3, param blob 1 data: 0.00266559
I0522 10:10:05.033550   950 net.cpp:591]     [Forward] Layer relu3, top blob conv3 data: 0.00230486
I0522 10:10:05.033643   950 net.cpp:603]     [Forward] Layer relu3, param blob 0 data: 0.167923
I0522 10:10:05.034057   950 net.cpp:591]     [Forward] Layer drop, top blob conv3 data: 0.00230195
I0522 10:10:05.034206   950 net.cpp:591]     [Forward] Layer conv3_drop_0_split, top blob conv3_drop_0_split_0 data: 0.00230195
I0522 10:10:05.034346   950 net.cpp:591]     [Forward] Layer conv3_drop_0_split, top blob conv3_drop_0_split_1 data: 0.00230195
I0522 10:10:05.034997   950 net.cpp:591]     [Forward] Layer conv4, top blob conv4 data: 0.00917583
I0522 10:10:05.035099   950 net.cpp:603]     [Forward] Layer conv4, param blob 0 data: 0.0726901
I0522 10:10:05.035188   950 net.cpp:603]     [Forward] Layer conv4, param blob 1 data: 0.00386205
I0522 10:10:05.035567   950 net.cpp:591]     [Forward] Layer relu4, top blob conv4 data: 0.00157258
I0522 10:10:05.035658   950 net.cpp:603]     [Forward] Layer relu4, param blob 0 data: 0.169566
I0522 10:10:05.035984   950 net.cpp:591]     [Forward] Layer drop, top blob conv4 data: 0.00157394
I0522 10:10:05.036109   950 net.cpp:591]     [Forward] Layer conv4_drop_0_split, top blob conv4_drop_0_split_0 data: 0.00157394
I0522 10:10:05.036231   950 net.cpp:591]     [Forward] Layer conv4_drop_0_split, top blob conv4_drop_0_split_1 data: 0.00157394
I0522 10:10:05.036847   950 net.cpp:591]     [Forward] Layer conv5, top blob conv5 data: 0.00580002
I0522 10:10:05.036949   950 net.cpp:603]     [Forward] Layer conv5, param blob 0 data: 0.0789717
I0522 10:10:05.037036   950 net.cpp:603]     [Forward] Layer conv5, param blob 1 data: 0.00541544
I0522 10:10:05.037344   950 net.cpp:591]     [Forward] Layer relu5, top blob conv5 data: 0.0010077
I0522 10:10:05.037436   950 net.cpp:603]     [Forward] Layer relu5, param blob 0 data: 0.171593
I0522 10:10:05.037714   950 net.cpp:591]     [Forward] Layer drop, top blob conv5 data: 0.00100811
I0522 10:10:05.037840   950 net.cpp:591]     [Forward] Layer conv5_drop_0_split, top blob conv5_drop_0_split_0 data: 0.00100811
I0522 10:10:05.037955   950 net.cpp:591]     [Forward] Layer conv5_drop_0_split, top blob conv5_drop_0_split_1 data: 0.00100811
I0522 10:10:05.038496   950 net.cpp:591]     [Forward] Layer conv6, top blob conv6 data: 0.00340542
I0522 10:10:05.038599   950 net.cpp:603]     [Forward] Layer conv6, param blob 0 data: 0.0900255
I0522 10:10:05.038691   950 net.cpp:603]     [Forward] Layer conv6, param blob 1 data: 0.00595053
I0522 10:10:05.038954   950 net.cpp:591]     [Forward] Layer relu6, top blob conv6 data: 0.000693495
I0522 10:10:05.039048   950 net.cpp:603]     [Forward] Layer relu6, param blob 0 data: 0.196659
I0522 10:10:05.039283   950 net.cpp:591]     [Forward] Layer drop, top blob conv6 data: 0.000695216
I0522 10:10:05.039396   950 net.cpp:591]     [Forward] Layer conv6_drop_0_split, top blob conv6_drop_0_split_0 data: 0.000695216
I0522 10:10:05.039502   950 net.cpp:591]     [Forward] Layer conv6_drop_0_split, top blob conv6_drop_0_split_1 data: 0.000695216
I0522 10:10:05.040009   950 net.cpp:591]     [Forward] Layer conv7, top blob conv7 data: 0.00464197
I0522 10:10:05.040110   950 net.cpp:603]     [Forward] Layer conv7, param blob 0 data: 0.100864
I0522 10:10:05.040400   950 net.cpp:603]     [Forward] Layer conv7, param blob 1 data: 0.00521081
I0522 10:10:05.040623   950 net.cpp:591]     [Forward] Layer relu7, top blob conv7 data: 0.000884805
I0522 10:10:05.040717   950 net.cpp:603]     [Forward] Layer relu7, param blob 0 data: 0.190243
I0522 10:10:05.040915   950 net.cpp:591]     [Forward] Layer drop, top blob conv7 data: 0.000885196
I0522 10:10:05.041829   950 net.cpp:591]     [Forward] Layer concatenate1, top blob concatenate1 data: 0.00997691
I0522 10:10:05.042186   950 net.cpp:591]     [Forward] Layer concatenate1_concatenate1_0_split, top blob concatenate1_concatenate1_0_split_0 data: 0.00997691
I0522 10:10:05.042533   950 net.cpp:591]     [Forward] Layer concatenate1_concatenate1_0_split, top blob concatenate1_concatenate1_0_split_1 data: 0.00997691
I0522 10:10:05.043359   950 net.cpp:591]     [Forward] Layer A1, top blob A1 data: 0.00780515
I0522 10:10:05.043488   950 net.cpp:603]     [Forward] Layer A1, param blob 0 data: 0.096412
I0522 10:10:05.043576   950 net.cpp:603]     [Forward] Layer A1, param blob 1 data: 0.00323188
I0522 10:10:05.044054   950 net.cpp:591]     [Forward] Layer reluA1, top blob A1 data: 0.000746234
I0522 10:10:05.044150   950 net.cpp:603]     [Forward] Layer reluA1, param blob 0 data: 0.0983066
I0522 10:10:05.044556   950 net.cpp:591]     [Forward] Layer drop, top blob A1 data: 0.00074668
I0522 10:10:05.045150   950 net.cpp:591]     [Forward] Layer B1, top blob B1 data: 0.00828996
I0522 10:10:05.045250   950 net.cpp:603]     [Forward] Layer B1, param blob 0 data: 0.0985074
I0522 10:10:05.045353   950 net.cpp:603]     [Forward] Layer B1, param blob 1 data: 0.00344047
I0522 10:10:05.045567   950 net.cpp:591]     [Forward] Layer reluB1, top blob B1 data: 0.00125219
I0522 10:10:05.045658   950 net.cpp:603]     [Forward] Layer reluB1, param blob 0 data: 0.147279
I0522 10:10:05.045874   950 net.cpp:591]     [Forward] Layer drop, top blob B1 data: 0.00125118
I0522 10:10:05.046331   950 net.cpp:591]     [Forward] Layer B2, top blob B2 data: 0.00503351
I0522 10:10:05.046430   950 net.cpp:603]     [Forward] Layer B2, param blob 0 data: 0.106241
I0522 10:10:05.046519   950 net.cpp:603]     [Forward] Layer B2, param blob 1 data: 0.00502009
I0522 10:10:05.046782   950 net.cpp:591]     [Forward] Layer reluB2, top blob B2 data: 0.000425956
I0522 10:10:05.046880   950 net.cpp:603]     [Forward] Layer reluB2, param blob 0 data: 0.0860905
I0522 10:10:05.047076   950 net.cpp:591]     [Forward] Layer drop, top blob B2 data: 0.000426802
I0522 10:10:05.047433   950 net.cpp:591]     [Forward] Layer concatenate2, top blob concatenate2 data: 0.000666711
I0522 10:10:05.047777   950 net.cpp:591]     [Forward] Layer conv8, top blob conv8 data: 0.000599165
I0522 10:10:05.047876   950 net.cpp:603]     [Forward] Layer conv8, param blob 0 data: 0.132703
I0522 10:10:05.047969   950 net.cpp:603]     [Forward] Layer conv8, param blob 1 data: 0.000346347
I0522 10:10:05.049504   950 net.cpp:591]     [Forward] Layer loss, top blob loss data: nan
I0522 10:10:05.050093   950 net.cpp:619]     [Backward] Layer loss, bottom blob conv8 diff: 0.000404195
I0522 10:10:05.051265   950 net.cpp:619]     [Backward] Layer conv8, bottom blob concatenate2 diff: 0.000129882
I0522 10:10:05.051373   950 net.cpp:630]     [Backward] Layer conv8, param blob 0 diff: 0.000461379
I0522 10:10:05.051470   950 net.cpp:630]     [Backward] Layer conv8, param blob 1 diff: 0.572021
I0522 10:10:05.051812   950 net.cpp:619]     [Backward] Layer concatenate2, bottom blob A1 diff: 0.00012783
I0522 10:10:05.051916   950 net.cpp:619]     [Backward] Layer concatenate2, bottom blob B2 diff: 0.000136037
I0522 10:10:05.052081   950 net.cpp:619]     [Backward] Layer drop, bottom blob B2 diff: 0.000135995
I0522 10:10:05.052304   950 net.cpp:619]     [Backward] Layer reluB2, bottom blob B2 diff: 1.31521e-05
I0522 10:10:05.052402   950 net.cpp:630]     [Backward] Layer reluB2, param blob 0 diff: 0.00102617
I0522 10:10:05.053320   950 net.cpp:619]     [Backward] Layer B2, bottom blob B1 diff: 2.517e-05
I0522 10:10:05.053484   950 net.cpp:630]     [Backward] Layer B2, param blob 0 diff: 2.97466e-05
I0522 10:10:05.053581   950 net.cpp:630]     [Backward] Layer B2, param blob 1 diff: 0.0198924
I0522 10:10:05.053745   950 net.cpp:619]     [Backward] Layer drop, bottom blob B1 diff: 2.53463e-05
I0522 10:10:05.053974   950 net.cpp:619]     [Backward] Layer reluB1, bottom blob B1 diff: 4.42646e-06
I0522 10:10:05.054075   950 net.cpp:630]     [Backward] Layer reluB1, param blob 0 diff: 0.000282126
I0522 10:10:05.055465   950 net.cpp:619]     [Backward] Layer B1, bottom blob concatenate1_concatenate1_0_split_1 diff: 2.11766e-06
I0522 10:10:05.055578   950 net.cpp:630]     [Backward] Layer B1, param blob 0 diff: 5.33014e-05
I0522 10:10:05.055784   950 net.cpp:630]     [Backward] Layer B1, param blob 1 diff: 0.0042211
I0522 10:10:05.056212   950 net.cpp:619]     [Backward] Layer drop, bottom blob A1 diff: 0.000128165
I0522 10:10:05.056720   950 net.cpp:619]     [Backward] Layer reluA1, bottom blob A1 diff: 1.4212e-05
I0522 10:10:05.056843   950 net.cpp:630]     [Backward] Layer reluA1, param blob 0 diff: 0.00145511
I0522 10:10:05.058341   950 net.cpp:619]     [Backward] Layer A1, bottom blob concatenate1_concatenate1_0_split_0 diff: 1.19995e-05
I0522 10:10:05.058472   950 net.cpp:630]     [Backward] Layer A1, param blob 0 diff: 0.000175081
I0522 10:10:05.058590   950 net.cpp:630]     [Backward] Layer A1, param blob 1 diff: 0.0165867
I0522 10:10:05.059746   950 net.cpp:619]     [Backward] Layer concatenate1_concatenate1_0_split, bottom blob concatenate1 diff: 1.24789e-05
I0522 10:10:05.060521   950 net.cpp:619]     [Backward] Layer concatenate1, bottom blob conv2_relu2_0_split_1 diff: 1.26877e-05
I0522 10:10:05.060714   950 net.cpp:619]     [Backward] Layer concatenate1, bottom blob conv3_drop_0_split_1 diff: 1.22685e-05
I0522 10:10:05.060895   950 net.cpp:619]     [Backward] Layer concatenate1, bottom blob conv4_drop_0_split_1 diff: 1.26432e-05
I0522 10:10:05.061036   950 net.cpp:619]     [Backward] Layer concatenate1, bottom blob conv5_drop_0_split_1 diff: 1.21001e-05
I0522 10:10:05.061169   950 net.cpp:619]     [Backward] Layer concatenate1, bottom blob conv6_drop_0_split_1 diff: 1.27764e-05
I0522 10:10:05.061295   950 net.cpp:619]     [Backward] Layer concatenate1, bottom blob conv7 diff: 1.22628e-05
I0522 10:10:05.061476   950 net.cpp:619]     [Backward] Layer drop, bottom blob conv7 diff: 1.23225e-05
I0522 10:10:05.061727   950 net.cpp:619]     [Backward] Layer relu7, bottom blob conv7 diff: 2.36587e-06
I0522 10:10:05.061851   950 net.cpp:630]     [Backward] Layer relu7, param blob 0 diff: 6.1144e-05
I0522 10:10:05.062788   950 net.cpp:619]     [Backward] Layer conv7, bottom blob conv6_drop_0_split_0 diff: 4.13267e-06
I0522 10:10:05.062914   950 net.cpp:630]     [Backward] Layer conv7, param blob 0 diff: 2.07931e-06
I0522 10:10:05.063030   950 net.cpp:630]     [Backward] Layer conv7, param blob 1 diff: 0.0026707
I0522 10:10:05.063244   950 net.cpp:619]     [Backward] Layer conv6_drop_0_split, bottom blob conv6 diff: 1.36793e-05
I0522 10:10:05.063462   950 net.cpp:619]     [Backward] Layer drop, bottom blob conv6 diff: 1.38275e-05
I0522 10:10:05.063757   950 net.cpp:619]     [Backward] Layer relu6, bottom blob conv6 diff: 3.1853e-06
I0522 10:10:05.063877   950 net.cpp:630]     [Backward] Layer relu6, param blob 0 diff: 4.92523e-05
I0522 10:10:05.064877   950 net.cpp:619]     [Backward] Layer conv6, bottom blob conv5_drop_0_split_0 diff: 7.0532e-06
I0522 10:10:05.065002   950 net.cpp:630]     [Backward] Layer conv6, param blob 0 diff: 4.40123e-06
I0522 10:10:05.065122   950 net.cpp:630]     [Backward] Layer conv6, param blob 1 diff: 0.00308829
I0522 10:10:05.065369   950 net.cpp:619]     [Backward] Layer conv5_drop_0_split, bottom blob conv5 diff: 1.46071e-05
I0522 10:10:05.065619   950 net.cpp:619]     [Backward] Layer drop, bottom blob conv5 diff: 1.48998e-05
I0522 10:10:05.065992   950 net.cpp:619]     [Backward] Layer relu5, bottom blob conv5 diff: 2.80906e-06
I0522 10:10:05.066118   950 net.cpp:630]     [Backward] Layer relu5, param blob 0 diff: 8.71506e-05
I0522 10:10:05.067418   950 net.cpp:619]     [Backward] Layer conv5, bottom blob conv4_drop_0_split_0 diff: 5.48825e-06
I0522 10:10:05.067548   950 net.cpp:630]     [Backward] Layer conv5, param blob 0 diff: 7.13034e-06
I0522 10:10:05.067664   950 net.cpp:630]     [Backward] Layer conv5, param blob 1 diff: 0.00280526
I0522 10:10:05.067955   950 net.cpp:619]     [Backward] Layer conv4_drop_0_split, bottom blob conv4 diff: 1.41262e-05
I0522 10:10:05.068244   950 net.cpp:619]     [Backward] Layer drop, bottom blob conv4 diff: 1.41677e-05
I0522 10:10:05.068647   950 net.cpp:619]     [Backward] Layer relu4, bottom blob conv4 diff: 2.68117e-06
I0522 10:10:05.068766   950 net.cpp:630]     [Backward] Layer relu4, param blob 0 diff: 0.000180559
I0522 10:10:05.070330   950 net.cpp:619]     [Backward] Layer conv4, bottom blob conv3_drop_0_split_0 diff: 5.59543e-06
I0522 10:10:05.070489   950 net.cpp:630]     [Backward] Layer conv4, param blob 0 diff: 1.33325e-05
I0522 10:10:05.070636   950 net.cpp:630]     [Backward] Layer conv4, param blob 1 diff: 0.00274354
I0522 10:10:05.070966   950 net.cpp:619]     [Backward] Layer conv3_drop_0_split, bottom blob conv3 diff: 1.38212e-05
I0522 10:10:05.071302   950 net.cpp:619]     [Backward] Layer drop, bottom blob conv3 diff: 1.3831e-05
I0522 10:10:05.071768   950 net.cpp:619]     [Backward] Layer relu3, bottom blob conv3 diff: 2.75353e-06
I0522 10:10:05.071887   950 net.cpp:630]     [Backward] Layer relu3, param blob 0 diff: 0.000372754
I0522 10:10:05.073459   950 net.cpp:619]     [Backward] Layer conv3, bottom blob conv2_relu2_0_split_0 diff: 6.72983e-06
I0522 10:10:05.073590   950 net.cpp:630]     [Backward] Layer conv3, param blob 0 diff: 0.000126346
I0522 10:10:05.073721   950 net.cpp:630]     [Backward] Layer conv3, param blob 1 diff: 0.00375996
I0522 10:10:05.074102   950 net.cpp:619]     [Backward] Layer conv2_relu2_0_split, bottom blob conv2 diff: 1.49227e-05
I0522 10:10:05.074637   950 net.cpp:619]     [Backward] Layer relu2, bottom blob conv2 diff: 2.39557e-06
I0522 10:10:05.074760   950 net.cpp:630]     [Backward] Layer relu2, param blob 0 diff: 0.00293634
I0522 10:10:05.076663   950 net.cpp:619]     [Backward] Layer conv2, bottom blob conv1 diff: 3.39956e-06
I0522 10:10:05.076791   950 net.cpp:630]     [Backward] Layer conv2, param blob 0 diff: 0.000364487
I0522 10:10:05.076907   950 net.cpp:630]     [Backward] Layer conv2, param blob 1 diff: 0.00226531
I0522 10:10:05.077531   950 net.cpp:619]     [Backward] Layer relu1, bottom blob conv1 diff: 1.472e-06
I0522 10:10:05.077652   950 net.cpp:630]     [Backward] Layer relu1, param blob 0 diff: 0.000822316
I0522 10:10:05.078137   950 net.cpp:630]     [Backward] Layer conv1, param blob 0 diff: 0.000404451
I0522 10:10:05.078263   950 net.cpp:630]     [Backward] Layer conv1, param blob 1 diff: 0.00096731
E0522 10:10:05.084373   950 net.cpp:719]     [Backward] All net params (data, diff): L1 norm = (2017.62, 7.4594); L2 norm = (17.3085, 1.26868)
I0522 10:10:05.084547   950 solver.cpp:239] Iteration 202 (9.42539 iter/s, 0.106096s/1 iters), loss = nan
I0522 10:10:05.084625   950 solver.cpp:258]     Train net output #0: loss = nan (* 1 = nan loss)
I0522 10:10:05.084668   950 sgd_solver.cpp:112] Iteration 202, lr = 0.002

我现在非常困惑,感谢您的帮助。

0 个答案:

没有答案