当我在PASCAL VOC 2012数据集上测试Deeplab-ver2时,测试网仅生成具有输出的大尺寸日志文件[参见下面的日志],但它不会在features / deeplab_largeFOV中生成任何.mat文件/ val / fc8文件夹。我的网络运行没有任何错误,即使我让它运行超过24小时也不会终止。任何帮助都将受到高度赞赏。
PS。我查看了run_pascal.sh脚本生成的test_val.prototxt文件,所有路径都显示正常。
`Log file created at: 2016/09/20 12:57:35
Running on machine: CECS50P7PJ1
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I0920 12:57:35.378067 12793 caffe.cpp:237] Use GPU with device ID 0
I0920 12:57:35.460089 12793 caffe.cpp:241] GPU device name: GeForce GTX TITAN X
I0920 12:57:35.947268 12793 net.cpp:49] Initializing net from parameters:
name: "deeplab_largeFOV"
state {
phase: TEST
}
layer {
name: "data"
type: "ImageSegData"
top: "data"
top: "label"
include {
phase: TEST
}
transform_param {
mirror: false
crop_size: 513
mean_value: 104.008
mean_value: 116.669
mean_value: 122.675
}
image_data_param {
source: "voc12/list/val.txt"
batch_size: 1
root_folder: "/home/aisha/VOCdevkit/VOC2012"
label_type: NONE
}
}
layer {
name: "conv1_1"
type: "Convolution"
bottom: "data"
top: "conv1_1"
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
}
}
layer {
name: "relu1_1"
type: "ReLU"
bottom: "conv1_1"
top: "conv1_1"
}
layer {
name: "conv1_2"
type: "Convolution"
bottom: "conv1_1"
top: "conv1_2"
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
}
}
layer {
name: "relu1_2"
type: "ReLU"
bottom: "conv1_2"
top: "conv1_2"
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1_2"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
pad: 1
}
}
layer {
name: "conv2_1"
type: "Convolution"
bottom: "pool1"
top: "conv2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "relu2_1"
type: "ReLU"
bottom: "conv2_1"
top: "conv2_1"
}
layer {
name: "conv2_2"
type: "Convolution"
bottom: "conv2_1"
top: "conv2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "relu2_2"
type: "ReLU"
bottom: "conv2_2"
top: "conv2_2"
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2_2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
pad: 1
}
}
layer {
name: "conv3_1"
type: "Convolution"
bottom: "pool2"
top: "conv3_1"
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3_1"
type: "ReLU"
bottom: "conv3_1"
top: "conv3_1"
}
layer {
name: "conv3_2"
type: "Convolution"
bottom: "conv3_1"
top: "conv3_2"
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3_2"
type: "ReLU"
bottom: "conv3_2"
top: "conv3_2"
}
layer {
name: "conv3_3"
type: "Convolution"
bottom: "conv3_2"
top: "conv3_3"
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3_3"
type: "ReLU"
bottom: "conv3_3"
top: "conv3_3"
}
layer {
name: "pool3"
type: "Pooling"
bottom: "conv3_3"
top: "pool3"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
pad: 1
}
}
layer {
name: "conv4_1"
type: "Convolution"
bottom: "pool3"
top: "conv4_1"
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layer {
name: "relu4_1"
type: "ReLU"
bottom: "conv4_1"
top: "conv4_1"
}
layer {
name: "conv4_2"
type: "Convolution"
bottom: "conv4_1"
top: "conv4_2"
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layer {
name: "relu4_2"
type: "ReLU"
bottom: "conv4_2"
top: "conv4_2"
}
layer {
name: "conv4_3"
type: "Convolution"
bottom: "conv4_2"
top: "conv4_3"
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layer {
name: "relu4_3"
type: "ReLU"
bottom: "conv4_3"
top: "conv4_3"
}
layer {
name: "pool4"
type: "Pooling"
bottom: "conv4_3"
top: "pool4"
pooling_param {
pool: MAX
kernel_size: 3
stride: 1
pad: 1
}
}
layer {
name: "conv5_1"
type: "Convolution"
bottom: "pool4"
top: "conv5_1"
convolution_param {
num_output: 512
pad: 2
kernel_size: 3
dilation: 2
}
}
layer {
name: "relu5_1"
type: "ReLU"
bottom: "conv5_1"
top: "conv5_1"
}
layer {
name: "conv5_2"
type: "Convolution"
bottom: "conv5_1"
top: "conv5_2"
convolution_param {
num_output: 512
pad: 2
kernel_size: 3
dilation: 2
}
}
layer {
name: "relu5_2"
type: "ReLU"
bottom: "conv5_2"
top: "conv5_2"
}
layer {
name: "conv5_3"
type: "Convolution"
bottom: "conv5_2"
top: "conv5_3"
convolution_param {
num_output: 512
pad: 2
kernel_size: 3
dilation: 2
}
}
layer {
name: "relu5_3"
type: "ReLU"
bottom: "conv5_3"
top: "conv5_3"
}
layer {
name: "pool5"
type: "Pooling"
bottom: "conv5_3"
top: "pool5"
pooling_param {
pool: MAX
kernel_size: 3
stride: 1
pad: 1
}
}
layer {
name: "pool5a"
type: "Pooling"
bottom: "pool5"
top: "pool5a"
pooling_param {
pool: AVE
kernel_size: 3
stride: 1
pad: 1
}
}
layer {
name: "fc6"
type: "Convolution"
bottom: "pool5a"
top: "fc6"
param {
name: "fc6_w"
lr_mult: 1
decay_mult: 1
}
param {
name: "fc6_b"
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 1024
pad: 12
kernel_size: 3
dilation: 12
}
}
layer {
name: "relu6"
type: "ReLU"
bottom: "fc6"
top: "fc6"
}
layer {
name: "drop6"
type: "Dropout"
bottom: "fc6"
top: "fc6"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc7"
type: "Convolution"
bottom: "fc6"
top: "fc7"
param {
name: "fc7_w"
lr_mult: 1
decay_mult: 1
}
param {
name: "fc7_b"
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 1024
kernel_size: 1
}
}
layer {
name: "relu7"
type: "ReLU"
bottom: "fc7"
top: "fc7"
}
layer {
name: "drop7"
type: "Dropout"
bottom: "fc7"
top: "fc7"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc8_voc12"
type: "Convolution"
bottom: "fc7"
top: "fc8_voc12"
param {
name: "fc8_w"
lr_mult: 10
decay_mult: 1
}
param {
name: "fc8_b"
lr_mult: 20
decay_mult: 0
}
convolution_param {
num_output: 21
kernel_size: 1
}
}
layer {
name: "fc8_interp"
type: "Interp"
bottom: "fc8_voc12"
top: "fc8_interp"
interp_param {
zoom_factor: 8
}
}
layer {
name: "fc8_mat"
type: "MatWrite"
include {
phase: TEST
}
mat_write_param {
prefix: "voc12/features/deeplab_largeFOV/val/fc8/"
source: "voc12/list/val_id.txt"
strip: 0
period: 1
}
}
layer {
name: "silence"
type: "Silence"
bottom: "label"
include {
phase: TEST
}
}
I0920 12:57:35.947854 12793 layer_factory.hpp:77] Creating layer data
I0920 12:57:35.947927 12793 net.cpp:106] Creating Layer data
I0920 12:57:35.947945 12793 net.cpp:411] data -> data
I0920 12:57:35.947999 12793 net.cpp:411] data -> label
I0920 12:57:35.948024 12793 net.cpp:411] data -> (automatic)
I0920 12:57:35.948052 12793 image_seg_data_layer.cpp:46] Opening file voc12/list/val.txt
I0920 12:57:35.950197 12793 image_seg_data_layer.cpp:68] A total of 1449 images.
I0920 12:57:35.971616 12793 image_seg_data_layer.cpp:137] output data size: 1,3,513,513
I0920 12:57:35.971668 12793 image_seg_data_layer.cpp:141] output label size: 1,1,513,513
`
答案 0 :(得分:2)
我弄明白了这个问题。原型文件中的我的MatWrite图层没有来自先前图层的任何输入。我添加了行bottom: "fc8_interp"
,现在它工作正常。
layer {
name: "fc8_mat"
type: "MatWrite"
bottom: "fc8_interp"
mat_write_param {
prefix: "${FEATURE_DIR}/${TEST_SET}/fc8/"
source: "${EXP}/list/${TEST_SET}_id.txt"
strip: 0
period: 1
}
include: { phase: TEST }
}
答案 1 :(得分:1)
以下是日志文件的其余部分:
I0920 12:57:35.971684 12793 image_seg_data_layer.cpp:145] output data_dim size: 1,1,1,2
I0920 12:57:35.997220 12793 net.cpp:150] Setting up data
I0920 12:57:35.997285 12793 net.cpp:157] Top shape: 1 3 513 513 (789507)
I0920 12:57:35.997301 12793 net.cpp:157] Top shape: 1 1 513 513 (263169)
I0920 12:57:35.997314 12793 net.cpp:157] Top shape: 1 1 1 2 (2)
I0920 12:57:35.997325 12793 net.cpp:165] Memory required for data: 4210712
I0920 12:57:35.997349 12793 layer_factory.hpp:77] Creating layer conv1_1
I0920 12:57:35.997405 12793 net.cpp:106] Creating Layer conv1_1
I0920 12:57:35.997421 12793 net.cpp:454] conv1_1 <- data
I0920 12:57:35.997447 12793 net.cpp:411] conv1_1 -> conv1_1
I0920 12:57:35.999809 12793 net.cpp:150] Setting up conv1_1
I0920 12:57:35.999832 12793 net.cpp:157] Top shape: 1 64 513 513 (16842816)
I0920 12:57:35.999840 12793 net.cpp:165] Memory required for data: 71581976
I0920 12:57:35.999869 12793 layer_factory.hpp:77] Creating layer relu1_1
I0920 12:57:35.999887 12793 net.cpp:106] Creating Layer relu1_1
I0920 12:57:35.999897 12793 net.cpp:454] relu1_1 <- conv1_1
I0920 12:57:35.999908 12793 net.cpp:397] relu1_1 -> conv1_1 (in-place)
I0920 12:57:35.999977 12793 net.cpp:150] Setting up relu1_1
I0920 12:57:35.999989 12793 net.cpp:157] Top shape: 1 64 513 513 (16842816)
I0920 12:57:35.999995 12793 net.cpp:165] Memory required for data: 138953240
I0920 12:57:36.000003 12793 layer_factory.hpp:77] Creating layer conv1_2
I0920 12:57:36.000018 12793 net.cpp:106] Creating Layer conv1_2
I0920 12:57:36.000026 12793 net.cpp:454] conv1_2 <- conv1_1
I0920 12:57:36.000038 12793 net.cpp:411] conv1_2 -> conv1_2
I0920 12:57:36.002727 12793 net.cpp:150] Setting up conv1_2
I0920 12:57:36.002753 12793 net.cpp:157] Top shape: 1 64 513 513 (16842816)
I0920 12:57:36.002763 12793 net.cpp:165] Memory required for data: 206324504
I0920 12:57:36.002785 12793 layer_factory.hpp:77] Creating layer relu1_2
I0920 12:57:36.002810 12793 net.cpp:106] Creating Layer relu1_2
I0920 12:57:36.002821 12793 net.cpp:454] relu1_2 <- conv1_2
I0920 12:57:36.002835 12793 net.cpp:397] relu1_2 -> conv1_2 (in-place)
I0920 12:57:36.002851 12793 net.cpp:150] Setting up relu1_2
I0920 12:57:36.002866 12793 net.cpp:157] Top shape: 1 64 513 513 (16842816)
I0920 12:57:36.002876 12793 net.cpp:165] Memory required for data: 273695768
I0920 12:57:36.002887 12793 layer_factory.hpp:77] Creating layer pool1
I0920 12:57:36.002904 12793 net.cpp:106] Creating Layer pool1
I0920 12:57:36.002920 12793 net.cpp:454] pool1 <- conv1_2
I0920 12:57:36.002934 12793 net.cpp:411] pool1 -> pool1
I0920 12:57:36.003037 12793 net.cpp:150] Setting up pool1
I0920 12:57:36.003053 12793 net.cpp:157] Top shape: 1 64 257 257 (4227136)
I0920 12:57:36.003063 12793 net.cpp:165] Memory required for data: 290604312
I0920 12:57:36.003074 12793 layer_factory.hpp:77] Creating layer conv2_1
I0920 12:57:36.003092 12793 net.cpp:106] Creating Layer conv2_1
I0920 12:57:36.003101 12793 net.cpp:454] conv2_1 <- pool1
I0920 12:57:36.003121 12793 net.cpp:411] conv2_1 -> conv2_1
I0920 12:57:36.004442 12793 net.cpp:150] Setting up conv2_1
I0920 12:57:36.004462 12793 net.cpp:157] Top shape: 1 128 257 257 (8454272)
I0920 12:57:36.004472 12793 net.cpp:165] Memory required for data: 324421400
I0920 12:57:36.004490 12793 layer_factory.hpp:77] Creating layer relu2_1
I0920 12:57:36.004505 12793 net.cpp:106] Creating Layer relu2_1
I0920 12:57:36.004516 12793 net.cpp:454] relu2_1 <- conv2_1
I0920 12:57:36.004528 12793 net.cpp:397] relu2_1 -> conv2_1 (in-place)
I0920 12:57:36.004541 12793 net.cpp:150] Setting up relu2_1
I0920 12:57:36.004551 12793 net.cpp:157] Top shape: 1 128 257 257 (8454272)
I0920 12:57:36.004559 12793 net.cpp:165] Memory required for data: 358238488
I0920 12:57:36.004570 12793 layer_factory.hpp:77] Creating layer conv2_2
I0920 12:57:36.004586 12793 net.cpp:106] Creating Layer conv2_2
I0920 12:57:36.004595 12793 net.cpp:454] conv2_2 <- conv2_1
I0920 12:57:36.004608 12793 net.cpp:411] conv2_2 -> conv2_2
I0920 12:57:36.006110 12793 net.cpp:150] Setting up conv2_2
I0920 12:57:36.006130 12793 net.cpp:157] Top shape: 1 128 257 257 (8454272)
I0920 12:57:36.006141 12793 net.cpp:165] Memory required for data: 392055576
I0920 12:57:36.006157 12793 layer_factory.hpp:77] Creating layer relu2_2
I0920 12:57:36.006172 12793 net.cpp:106] Creating Layer relu2_2
I0920 12:57:36.006184 12793 net.cpp:454] relu2_2 <- conv2_2
I0920 12:57:36.006196 12793 net.cpp:397] relu2_2 -> conv2_2 (in-place)
I0920 12:57:36.006208 12793 net.cpp:150] Setting up relu2_2
I0920 12:57:36.006218 12793 net.cpp:157] Top shape: 1 128 257 257 (8454272)
I0920 12:57:36.006225 12793 net.cpp:165] Memory required for data: 425872664
I0920 12:57:36.006233 12793 layer_factory.hpp:77] Creating layer pool2
I0920 12:57:36.006245 12793 net.cpp:106] Creating Layer pool2
I0920 12:57:36.006253 12793 net.cpp:454] pool2 <- conv2_2
I0920 12:57:36.006264 12793 net.cpp:411] pool2 -> pool2
I0920 12:57:36.006325 12793 net.cpp:150] Setting up pool2
I0920 12:57:36.006337 12793 net.cpp:157] Top shape: 1 128 129 129 (2130048)
I0920 12:57:36.006345 12793 net.cpp:165] Memory required for data: 434392856
I0920 12:57:36.006352 12793 layer_factory.hpp:77] Creating layer conv3_1
I0920 12:57:36.006367 12793 net.cpp:106] Creating Layer conv3_1
I0920 12:57:36.006410 12793 net.cpp:454] conv3_1 <- pool2
I0920 12:57:36.006423 12793 net.cpp:411] conv3_1 -> conv3_1
I0920 12:57:36.008404 12793 net.cpp:150] Setting up conv3_1
I0920 12:57:36.008435 12793 net.cpp:157] Top shape: 1 256 129 129 (4260096)
I0920 12:57:36.008452 12793 net.cpp:165] Memory required for data: 451433240
I0920 12:57:36.008476 12793 layer_factory.hpp:77] Creating layer relu3_1
I0920 12:57:36.008492 12793 net.cpp:106] Creating Layer relu3_1
I0920 12:57:36.008502 12793 net.cpp:454] relu3_1 <- conv3_1
I0920 12:57:36.008517 12793 net.cpp:397] relu3_1 -> conv3_1 (in-place)
I0920 12:57:36.008533 12793 net.cpp:150] Setting up relu3_1
I0920 12:57:36.008543 12793 net.cpp:157] Top shape: 1 256 129 129 (4260096)
I0920 12:57:36.008551 12793 net.cpp:165] Memory required for data: 468473624
I0920 12:57:36.008560 12793 layer_factory.hpp:77] Creating layer conv3_2
I0920 12:57:36.008579 12793 net.cpp:106] Creating Layer conv3_2
I0920 12:57:36.008589 12793 net.cpp:454] conv3_2 <- conv3_1
I0920 12:57:36.008602 12793 net.cpp:411] conv3_2 -> conv3_2
I0920 12:57:36.011996 12793 net.cpp:150] Setting up conv3_2
I0920 12:57:36.012034 12793 net.cpp:157] Top shape: 1 256 129 129 (4260096)
I0920 12:57:36.012044 12793 net.cpp:165] Memory required for data: 485514008
I0920 12:57:36.012060 12793 layer_factory.hpp:77] Creating layer relu3_2
I0920 12:57:36.012079 12793 net.cpp:106] Creating Layer relu3_2
I0920 12:57:36.012089 12793 net.cpp:454] relu3_2 <- conv3_2
I0920 12:57:36.012102 12793 net.cpp:397] relu3_2 -> conv3_2 (in-place)
I0920 12:57:36.012120 12793 net.cpp:150] Setting up relu3_2
I0920 12:57:36.012130 12793 net.cpp:157] Top shape: 1 256 129 129 (4260096)
I0920 12:57:36.012137 12793 net.cpp:165] Memory required for data: 502554392
I0920 12:57:36.012145 12793 layer_factory.hpp:77] Creating layer conv3_3
I0920 12:57:36.012161 12793 net.cpp:106] Creating Layer conv3_3
I0920 12:57:36.012169 12793 net.cpp:454] conv3_3 <- conv3_2
I0920 12:57:36.012182 12793 net.cpp:411] conv3_3 -> conv3_3
I0920 12:57:36.015485 12793 net.cpp:150] Setting up conv3_3
I0920 12:57:36.015522 12793 net.cpp:157] Top shape: 1 256 129 129 (4260096)
I0920 12:57:36.015532 12793 net.cpp:165] Memory required for data: 519594776
I0920 12:57:36.015548 12793 layer_factory.hpp:77] Creating layer relu3_3
I0920 12:57:36.015584 12793 net.cpp:106] Creating Layer relu3_3
I0920 12:57:36.015596 12793 net.cpp:454] relu3_3 <- conv3_3
I0920 12:57:36.015611 12793 net.cpp:397] relu3_3 -> conv3_3 (in-place)
I0920 12:57:36.015630 12793 net.cpp:150] Setting up relu3_3
I0920 12:57:36.015641 12793 net.cpp:157] Top shape: 1 256 129 129 (4260096)
I0920 12:57:36.015651 12793 net.cpp:165] Memory required for data: 536635160
I0920 12:57:36.015664 12793 layer_factory.hpp:77] Creating layer pool3
I0920 12:57:36.015681 12793 net.cpp:106] Creating Layer pool3
I0920 12:57:36.015691 12793 net.cpp:454] pool3 <- conv3_3
I0920 12:57:36.015714 12793 net.cpp:411] pool3 -> pool3
I0920 12:57:36.015780 12793 net.cpp:150] Setting up pool3
I0920 12:57:36.015799 12793 net.cpp:157] Top shape: 1 256 65 65 (1081600)
I0920 12:57:36.015807 12793 net.cpp:165] Memory required for data: 540961560
I0920 12:57:36.015820 12793 layer_factory.hpp:77] Creating layer conv4_1
I0920 12:57:36.015841 12793 net.cpp:106] Creating Layer conv4_1
I0920 12:57:36.015853 12793 net.cpp:454] conv4_1 <- pool3
I0920 12:57:36.015868 12793 net.cpp:411] conv4_1 -> conv4_1
I0920 12:57:36.023669 12793 net.cpp:150] Setting up conv4_1
I0920 12:57:36.023726 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.023739 12793 net.cpp:165] Memory required for data: 549614360
I0920 12:57:36.023766 12793 layer_factory.hpp:77] Creating layer relu4_1
I0920 12:57:36.023789 12793 net.cpp:106] Creating Layer relu4_1
I0920 12:57:36.023802 12793 net.cpp:454] relu4_1 <- conv4_1
I0920 12:57:36.023825 12793 net.cpp:397] relu4_1 -> conv4_1 (in-place)
I0920 12:57:36.023850 12793 net.cpp:150] Setting up relu4_1
I0920 12:57:36.023866 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.023924 12793 net.cpp:165] Memory required for data: 558267160
I0920 12:57:36.023941 12793 layer_factory.hpp:77] Creating layer conv4_2
I0920 12:57:36.023962 12793 net.cpp:106] Creating Layer conv4_2
I0920 12:57:36.023973 12793 net.cpp:454] conv4_2 <- conv4_1
I0920 12:57:36.023996 12793 net.cpp:411] conv4_2 -> conv4_2
I0920 12:57:36.039988 12793 net.cpp:150] Setting up conv4_2
I0920 12:57:36.040055 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.040066 12793 net.cpp:165] Memory required for data: 566919960
I0920 12:57:36.040099 12793 layer_factory.hpp:77] Creating layer relu4_2
I0920 12:57:36.040125 12793 net.cpp:106] Creating Layer relu4_2
I0920 12:57:36.040136 12793 net.cpp:454] relu4_2 <- conv4_2
I0920 12:57:36.040153 12793 net.cpp:397] relu4_2 -> conv4_2 (in-place)
I0920 12:57:36.040172 12793 net.cpp:150] Setting up relu4_2
I0920 12:57:36.040182 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.040190 12793 net.cpp:165] Memory required for data: 575572760
I0920 12:57:36.040199 12793 layer_factory.hpp:77] Creating layer conv4_3
I0920 12:57:36.040236 12793 net.cpp:106] Creating Layer conv4_3
I0920 12:57:36.040251 12793 net.cpp:454] conv4_3 <- conv4_2
I0920 12:57:36.040277 12793 net.cpp:411] conv4_3 -> conv4_3
I0920 12:57:36.056390 12793 net.cpp:150] Setting up conv4_3
I0920 12:57:36.056465 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.056478 12793 net.cpp:165] Memory required for data: 584225560
I0920 12:57:36.056500 12793 layer_factory.hpp:77] Creating layer relu4_3
I0920 12:57:36.056524 12793 net.cpp:106] Creating Layer relu4_3
I0920 12:57:36.056540 12793 net.cpp:454] relu4_3 <- conv4_3
I0920 12:57:36.056557 12793 net.cpp:397] relu4_3 -> conv4_3 (in-place)
I0920 12:57:36.056579 12793 net.cpp:150] Setting up relu4_3
I0920 12:57:36.056591 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.056602 12793 net.cpp:165] Memory required for data: 592878360
I0920 12:57:36.056612 12793 layer_factory.hpp:77] Creating layer pool4
I0920 12:57:36.056628 12793 net.cpp:106] Creating Layer pool4
I0920 12:57:36.056639 12793 net.cpp:454] pool4 <- conv4_3
I0920 12:57:36.056654 12793 net.cpp:411] pool4 -> pool4
I0920 12:57:36.056733 12793 net.cpp:150] Setting up pool4
I0920 12:57:36.056751 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.056761 12793 net.cpp:165] Memory required for data: 601531160
I0920 12:57:36.056772 12793 layer_factory.hpp:77] Creating layer conv5_1
I0920 12:57:36.056793 12793 net.cpp:106] Creating Layer conv5_1
I0920 12:57:36.056804 12793 net.cpp:454] conv5_1 <- pool4
I0920 12:57:36.056818 12793 net.cpp:411] conv5_1 -> conv5_1
I0920 12:57:36.075142 12793 net.cpp:150] Setting up conv5_1
I0920 12:57:36.075218 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.075232 12793 net.cpp:165] Memory required for data: 610183960
I0920 12:57:36.075259 12793 layer_factory.hpp:77] Creating layer relu5_1
I0920 12:57:36.075287 12793 net.cpp:106] Creating Layer relu5_1
I0920 12:57:36.075302 12793 net.cpp:454] relu5_1 <- conv5_1
I0920 12:57:36.075322 12793 net.cpp:397] relu5_1 -> conv5_1 (in-place)
I0920 12:57:36.075348 12793 net.cpp:150] Setting up relu5_1
I0920 12:57:36.075361 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.075371 12793 net.cpp:165] Memory required for data: 618836760
I0920 12:57:36.075381 12793 layer_factory.hpp:77] Creating layer conv5_2
I0920 12:57:36.075403 12793 net.cpp:106] Creating Layer conv5_2
I0920 12:57:36.075415 12793 net.cpp:454] conv5_2 <- conv5_1
I0920 12:57:36.075430 12793 net.cpp:411] conv5_2 -> conv5_2
I0920 12:57:36.093725 12793 net.cpp:150] Setting up conv5_2
I0920 12:57:36.093797 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.093812 12793 net.cpp:165] Memory required for data: 627489560
I0920 12:57:36.093837 12793 layer_factory.hpp:77] Creating layer relu5_2
I0920 12:57:36.093863 12793 net.cpp:106] Creating Layer relu5_2
I0920 12:57:36.093881 12793 net.cpp:454] relu5_2 <- conv5_2
I0920 12:57:36.093902 12793 net.cpp:397] relu5_2 -> conv5_2 (in-place)
I0920 12:57:36.094008 12793 net.cpp:150] Setting up relu5_2
I0920 12:57:36.094022 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.094033 12793 net.cpp:165] Memory required for data: 636142360
I0920 12:57:36.094043 12793 layer_factory.hpp:77] Creating layer conv5_3
I0920 12:57:36.094066 12793 net.cpp:106] Creating Layer conv5_3
I0920 12:57:36.094077 12793 net.cpp:454] conv5_3 <- conv5_2
I0920 12:57:36.094094 12793 net.cpp:411] conv5_3 -> conv5_3
I0920 12:57:36.116233 12793 net.cpp:150] Setting up conv5_3
I0920 12:57:36.116317 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.116329 12793 net.cpp:165] Memory required for data: 644795160
I0920 12:57:36.116353 12793 layer_factory.hpp:77] Creating layer relu5_3
I0920 12:57:36.116377 12793 net.cpp:106] Creating Layer relu5_3
I0920 12:57:36.116392 12793 net.cpp:454] relu5_3 <- conv5_3
I0920 12:57:36.116410 12793 net.cpp:397] relu5_3 -> conv5_3 (in-place)
I0920 12:57:36.116432 12793 net.cpp:150] Setting up relu5_3
I0920 12:57:36.116444 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.116457 12793 net.cpp:165] Memory required for data: 653447960
I0920 12:57:36.116467 12793 layer_factory.hpp:77] Creating layer pool5
I0920 12:57:36.116499 12793 net.cpp:106] Creating Layer pool5
I0920 12:57:36.116518 12793 net.cpp:454] pool5 <- conv5_3
I0920 12:57:36.116539 12793 net.cpp:411] pool5 -> pool5
I0920 12:57:36.116619 12793 net.cpp:150] Setting up pool5
I0920 12:57:36.116634 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.116647 12793 net.cpp:165] Memory required for data: 662100760
I0920 12:57:36.116658 12793 layer_factory.hpp:77] Creating layer pool5a
I0920 12:57:36.116695 12793 net.cpp:106] Creating Layer pool5a
I0920 12:57:36.116706 12793 net.cpp:454] pool5a <- pool5
I0920 12:57:36.116720 12793 net.cpp:411] pool5a -> pool5a
I0920 12:57:36.116766 12793 net.cpp:150] Setting up pool5a
I0920 12:57:36.116781 12793 net.cpp:157] Top shape: 1 512 65 65 (2163200)
I0920 12:57:36.116788 12793 net.cpp:165] Memory required for data: 670753560
I0920 12:57:36.116804 12793 layer_factory.hpp:77] Creating layer fc6
I0920 12:57:36.116832 12793 net.cpp:106] Creating Layer fc6
I0920 12:57:36.116840 12793 net.cpp:454] fc6 <- pool5a
I0920 12:57:36.116852 12793 net.cpp:411] fc6 -> fc6
I0920 12:57:36.156725 12793 net.cpp:150] Setting up fc6
I0920 12:57:36.156795 12793 net.cpp:157] Top shape: 1 1024 65 65 (4326400)
I0920 12:57:36.156808 12793 net.cpp:165] Memory required for data: 688059160
I0920 12:57:36.156838 12793 layer_factory.hpp:77] Creating layer relu6
I0920 12:57:36.156864 12793 net.cpp:106] Creating Layer relu6
I0920 12:57:36.156878 12793 net.cpp:454] relu6 <- fc6
I0920 12:57:36.156900 12793 net.cpp:397] relu6 -> fc6 (in-place)
I0920 12:57:36.157707 12793 net.cpp:150] Setting up relu6
I0920 12:57:36.157738 12793 net.cpp:157] Top shape: 1 1024 65 65 (4326400)
I0920 12:57:36.157749 12793 net.cpp:165] Memory required for data: 705364760
I0920 12:57:36.157763 12793 layer_factory.hpp:77] Creating layer drop6
I0920 12:57:36.157820 12793 net.cpp:106] Creating Layer drop6
I0920 12:57:36.157836 12793 net.cpp:454] drop6 <- fc6
I0920 12:57:36.157852 12793 net.cpp:397] drop6 -> fc6 (in-place)
I0920 12:57:36.157995 12793 net.cpp:150] Setting up drop6
I0920 12:57:36.158010 12793 net.cpp:157] Top shape: 1 1024 65 65 (4326400)
I0920 12:57:36.158020 12793 net.cpp:165] Memory required for data: 722670360
I0920 12:57:36.158031 12793 layer_factory.hpp:77] Creating layer fc7
I0920 12:57:36.158071 12793 net.cpp:106] Creating Layer fc7
I0920 12:57:36.158084 12793 net.cpp:454] fc7 <- fc6
I0920 12:57:36.158099 12793 net.cpp:411] fc7 -> fc7
I0920 12:57:36.167103 12793 net.cpp:150] Setting up fc7
I0920 12:57:36.167170 12793 net.cpp:157] Top shape: 1 1024 65 65 (4326400)
I0920 12:57:36.167181 12793 net.cpp:165] Memory required for data: 739975960
I0920 12:57:36.167203 12793 layer_factory.hpp:77] Creating layer relu7
I0920 12:57:36.167227 12793 net.cpp:106] Creating Layer relu7
I0920 12:57:36.167240 12793 net.cpp:454] relu7 <- fc7
I0920 12:57:36.167269 12793 net.cpp:397] relu7 -> fc7 (in-place)
I0920 12:57:36.168635 12793 net.cpp:150] Setting up relu7
I0920 12:57:36.168705 12793 net.cpp:157] Top shape: 1 1024 65 65 (4326400)
I0920 12:57:36.168717 12793 net.cpp:165] Memory required for data: 757281560
I0920 12:57:36.168732 12793 layer_factory.hpp:77] Creating layer drop7
I0920 12:57:36.168759 12793 net.cpp:106] Creating Layer drop7
I0920 12:57:36.168773 12793 net.cpp:454] drop7 <- fc7
I0920 12:57:36.168793 12793 net.cpp:397] drop7 -> fc7 (in-place)
I0920 12:57:36.168932 12793 net.cpp:150] Setting up drop7
I0920 12:57:36.168947 12793 net.cpp:157] Top shape: 1 1024 65 65 (4326400)
I0920 12:57:36.168957 12793 net.cpp:165] Memory required for data: 774587160
I0920 12:57:36.168967 12793 layer_factory.hpp:77] Creating layer fc8_voc12
I0920 12:57:36.168993 12793 net.cpp:106] Creating Layer fc8_voc12
I0920 12:57:36.169016 12793 net.cpp:454] fc8_voc12 <- fc7
I0920 12:57:36.169034 12793 net.cpp:411] fc8_voc12 -> fc8_voc12
I0920 12:57:36.170449 12793 net.cpp:150] Setting up fc8_voc12
I0920 12:57:36.170548 12793 net.cpp:157] Top shape: 1 21 65 65 (88725)
I0920 12:57:36.170564 12793 net.cpp:165] Memory required for data: 774942060
I0920 12:57:36.170589 12793 layer_factory.hpp:77] Creating layer fc8_interp
I0920 12:57:36.170630 12793 net.cpp:106] Creating Layer fc8_interp
I0920 12:57:36.170644 12793 net.cpp:454] fc8_interp <- fc8_voc12
I0920 12:57:36.170668 12793 net.cpp:411] fc8_interp -> fc8_interp
I0920 12:57:36.170732 12793 net.cpp:150] Setting up fc8_interp
I0920 12:57:36.170747 12793 net.cpp:157] Top shape: 1 21 513 513 (5526549)
I0920 12:57:36.170760 12793 net.cpp:165] Memory required for data: 797048256
I0920 12:57:36.170770 12793 layer_factory.hpp:77] Creating layer fc8_mat
I0920 12:57:36.170790 12793 net.cpp:106] Creating Layer fc8_mat
I0920 12:57:36.173951 12793 mat_write_layer.cpp:30] MatWrite will save a maximum of 1449 files.
I0920 12:57:36.174042 12793 net.cpp:150] Setting up fc8_mat
I0920 12:57:36.174057 12793 net.cpp:165] Memory required for data: 797048256
I0920 12:57:36.174078 12793 layer_factory.hpp:77] Creating layer silence
I0920 12:57:36.174113 12793 net.cpp:106] Creating Layer silence
I0920 12:57:36.174132 12793 net.cpp:454] silence <- label
I0920 12:57:36.174155 12793 net.cpp:150] Setting up silence
I0920 12:57:36.174166 12793 net.cpp:165] Memory required for data: 797048256
I0920 12:57:36.174178 12793 net.cpp:228] silence does not need backward computation.
I0920 12:57:36.174191 12793 net.cpp:228] fc8_mat does not need backward computation.
I0920 12:57:36.174202 12793 net.cpp:228] fc8_interp does not need backward computation.
I0920 12:57:36.174214 12793 net.cpp:228] fc8_voc12 does not need backward computation.
I0920 12:57:36.174232 12793 net.cpp:228] drop7 does not need backward computation.
I0920 12:57:36.174244 12793 net.cpp:228] relu7 does not need backward computation.
I0920 12:57:36.174257 12793 net.cpp:228] fc7 does not need backward computation.
I0920 12:57:36.174273 12793 net.cpp:228] drop6 does not need backward computation.
I0920 12:57:36.174285 12793 net.cpp:228] relu6 does not need backward computation.
I0920 12:57:36.174299 12793 net.cpp:228] fc6 does not need backward computation.
I0920 12:57:36.174310 12793 net.cpp:228] pool5a does not need backward computation.
I0920 12:57:36.174326 12793 net.cpp:228] pool5 does not need backward computation.
I0920 12:57:36.174340 12793 net.cpp:228] relu5_3 does not need backward computation.
I0920 12:57:36.174352 12793 net.cpp:228] conv5_3 does not need backward computation.
I0920 12:57:36.174363 12793 net.cpp:228] relu5_2 does not need backward computation.
I0920 12:57:36.174376 12793 net.cpp:228] conv5_2 does not need backward computation.
I0920 12:57:36.174392 12793 net.cpp:228] relu5_1 does not need backward computation.
I0920 12:57:36.174406 12793 net.cpp:228] conv5_1 does not need backward computation.
I0920 12:57:36.174418 12793 net.cpp:228] pool4 does not need backward computation.
I0920 12:57:36.174432 12793 net.cpp:228] relu4_3 does not need backward computation.
I0920 12:57:36.174444 12793 net.cpp:228] conv4_3 does not need backward computation.
I0920 12:57:36.174568 12793 net.cpp:228] relu4_2 does not need backward computation.
I0920 12:57:36.174582 12793 net.cpp:228] conv4_2 does not need backward computation.
I0920 12:57:36.174593 12793 net.cpp:228] relu4_1 does not need backward computation.
I0920 12:57:36.174607 12793 net.cpp:228] conv4_1 does not need backward computation.
I0920 12:57:36.174618 12793 net.cpp:228] pool3 does not need backward computation.
I0920 12:57:36.174631 12793 net.cpp:228] relu3_3 does not need backward computation.
I0920 12:57:36.174643 12793 net.cpp:228] conv3_3 does not need backward computation.
I0920 12:57:36.174655 12793 net.cpp:228] relu3_2 does not need backward computation.
I0920 12:57:36.174666 12793 net.cpp:228] conv3_2 does not need backward computation.
I0920 12:57:36.174677 12793 net.cpp:228] relu3_1 does not need backward computation.
I0920 12:57:36.174690 12793 net.cpp:228] conv3_1 does not need backward computation.
I0920 12:57:36.174705 12793 net.cpp:228] pool2 does not need backward computation.
I0920 12:57:36.174716 12793 net.cpp:228] relu2_2 does not need backward computation.
I0920 12:57:36.174728 12793 net.cpp:228] conv2_2 does not need backward computation.
I0920 12:57:36.174741 12793 net.cpp:228] relu2_1 does not need backward computation.
I0920 12:57:36.174751 12793 net.cpp:228] conv2_1 does not need backward computation.
I0920 12:57:36.174763 12793 net.cpp:228] pool1 does not need backward computation.
I0920 12:57:36.174775 12793 net.cpp:228] relu1_2 does not need backward computation.
I0920 12:57:36.174787 12793 net.cpp:228] conv1_2 does not need backward computation.
I0920 12:57:36.174798 12793 net.cpp:228] relu1_1 does not need backward computation.
I0920 12:57:36.174820 12793 net.cpp:228] conv1_1 does not need backward computation.
I0920 12:57:36.174836 12793 net.cpp:228] data does not need backward computation.
I0920 12:57:36.174847 12793 net.cpp:270] This network produces output fc8_interp
I0920 12:57:36.174891 12793 net.cpp:283] Network initialization done.
I0920 12:57:36.685827 12793 upgrade_proto.cpp:51] Attempting to upgrade input file specified using deprecated V1LayerParameter: voc12/model/deeplab_largeFOV/train_iter_8000.caffemodel
I0920 12:57:37.033308 12793 upgrade_proto.cpp:59] Successfully upgraded file specified using deprecated V1LayerParameter
I0920 12:57:37.099608 12793 net.cpp:816] Ignoring source layer fc8_voc12_fc8_voc12_0_split
I0920 12:57:37.099673 12793 net.cpp:816] Ignoring source layer label_shrink
I0920 12:57:37.099685 12793 net.cpp:816] Ignoring source layer label_shrink_label_shrink_0_split
I0920 12:57:37.099701 12793 net.cpp:816] Ignoring source layer loss
I0920 12:57:37.099715 12793 net.cpp:816] Ignoring source layer accuracy
I0920 12:57:37.120515 12793 caffe.cpp:252] Running for 1449 iterations.
I0920 12:57:37.247792 12793 caffe.cpp:276] Batch 0, fc8_interp = 13.2258
I0920 12:57:37.247889 12793 caffe.cpp:276] Batch 0, fc8_interp = 13.3482
I0920 12:57:37.247908 12793 caffe.cpp:276] Batch 0, fc8_interp = 13.4705
I0920 12:57:37.247921 12793 caffe.cpp:276] Batch 0, fc8_interp = 13.5928
I0920 12:57:37.247934 12793 caffe.cpp:276] Batch 0, fc8_interp = 13.7151
I0920 12:57:37.247947 12793 caffe.cpp:276] Batch 0, fc8_interp = 13.8374
I0920 12:57:37.247967 12793 caffe.cpp:276] Batch 0, fc8_interp = 13.9597
I0920 12:57:37.247987 12793 caffe.cpp:276] Batch 0, fc8_interp = 14.0821
I0920 12:57:37.248010 12793 caffe.cpp:276] Batch 0, fc8_interp = 14.2044
I0920 12:57:37.248025 12793 caffe.cpp:276] Batch 0, fc8_interp = 14.2195
I0920 12:57:37.248037 12793 caffe.cpp:276] Batch 0, fc8_interp = 14.2346