简体   繁体   中英

I add a new layer in caffe and there is an error about “layer_param_”

I'm new to Caffe and I want ot make a test about deconvolution. I add two new layers(unpooling layer and BN layer). There is an error during test and this is information of error:

F0715 16:53:43.956820 5838 unpooling_layer.cpp:29] Check failed: !unpool_param.has_kernel_size() != !(unpool_param.has_kernel_h() && unpool_param.has_kernel_w()) Filter size is kernel_size OR kernel_h and kernel_w; not both * Check failure stack trace: * Aborted (core dumped)

This is the whole script:

I0715 16:53:43.953850  5838 upgrade_proto.cpp:53] Attempting to upgrade input file specified using deprecated V1LayerParameter: deconv.prototxt
I0715 16:53:43.954031  5838 upgrade_proto.cpp:61] Successfully upgraded file specified using deprecated V1LayerParameter
I0715 16:53:43.954062  5838 upgrade_proto.cpp:67] Attempting to upgrade input file specified using deprecated input fields: deconv.prototxt
I0715 16:53:43.954092  5838 upgrade_proto.cpp:70] Successfully upgraded file specified using deprecated input fields.
W0715 16:53:43.954098  5838 upgrade_proto.cpp:72] Note that future Caffe releases will only support input layers and not input fields.
I0715 16:53:43.954301  5838 net.cpp:51] Initializing net from parameters: 
name: "Deconv_test"
state {
  phase: TEST
  level: 0
}
layer {
  name: "input"
  type: "Input"
  top: "data"
  input_param {
    shape {
      dim: 1
      dim: 3
      dim: 224
      dim: 224
    }
  }
}
layer {
  name: "conv1_1"
  type: "Convolution"
  bottom: "data"
  top: "conv1_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
  }
}
layer {
  name: "bn1_1"
  type: "BN"
  bottom: "conv1_1"
  top: "conv1_1"
}
layer {
  name: "relu1_1"
  type: "ReLU"
  bottom: "conv1_1"
  top: "conv1_1"
}
layer {
  name: "conv1_2"
  type: "Convolution"
  bottom: "conv1_1"
  top: "conv1_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
  }
}
layer {
  name: "bn1_2"
  type: "BN"
  bottom: "conv1_2"
  top: "conv1_2"
}
layer {
  name: "relu1_2"
  type: "ReLU"
  bottom: "conv1_2"
  top: "conv1_2"
}
layer {
  name: "pool1"
  type: "Pooling"
  bottom: "conv1_2"
  top: "pool1"
  top: "pool1_mask"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "unpool1"
  type: "Unpooling"
  bottom: "pool1"
  bottom: "pool1_mask"
  top: "unpool1"
}
layer {
  name: "deconv1_1"
  type: "Deconvolution"
  bottom: "unpool1"
  top: "deconv1_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "debn1_1"
  type: "BN"
  bottom: "deconv1_1"
  top: "deconv1_1"
}
layer {
  name: "derelu1_1"
  type: "ReLU"
  bottom: "deconv1_1"
  top: "deconv1_1"
}
layer {
  name: "deconv1_2"
  type: "Deconvolution"
  bottom: "deconv1_1"
  top: "deconv1_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "debn1_2"
  type: "BN"
  bottom: "deconv1_2"
  top: "deconv1_2"
}
layer {
  name: "derelu1_2"
  type: "ReLU"
  bottom: "deconv1_2"
  top: "deconv1_2"
}
layer {
  name: "seg-score-voc"
  type: "Convolution"
  bottom: "deconv1_2"
  top: "seg-score"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 21
    kernel_size: 1
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
I0715 16:53:43.954690  5838 layer_factory.hpp:77] Creating layer input
I0715 16:53:43.954740  5838 net.cpp:84] Creating Layer input
I0715 16:53:43.954752  5838 net.cpp:380] input -> data
I0715 16:53:43.954807  5838 net.cpp:122] Setting up input
I0715 16:53:43.954823  5838 net.cpp:129] Top shape: 1 3 224 224 (150528)
I0715 16:53:43.954828  5838 net.cpp:137] Memory required for data: 602112
I0715 16:53:43.954839  5838 layer_factory.hpp:77] Creating layer conv1_1
I0715 16:53:43.954865  5838 net.cpp:84] Creating Layer conv1_1
I0715 16:53:43.954876  5838 net.cpp:406] conv1_1 <- data
I0715 16:53:43.954898  5838 net.cpp:380] conv1_1 -> conv1_1
I0715 16:53:43.955159  5838 net.cpp:122] Setting up conv1_1
I0715 16:53:43.955174  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.955179  5838 net.cpp:137] Memory required for data: 13447168
I0715 16:53:43.955215  5838 layer_factory.hpp:77] Creating layer bn1_1
I0715 16:53:43.955237  5838 net.cpp:84] Creating Layer bn1_1
I0715 16:53:43.955246  5838 net.cpp:406] bn1_1 <- conv1_1
I0715 16:53:43.955265  5838 net.cpp:367] bn1_1 -> conv1_1 (in-place)
I0715 16:53:43.955569  5838 net.cpp:122] Setting up bn1_1
I0715 16:53:43.955579  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.955585  5838 net.cpp:137] Memory required for data: 26292224
I0715 16:53:43.955611  5838 layer_factory.hpp:77] Creating layer relu1_1
I0715 16:53:43.955628  5838 net.cpp:84] Creating Layer relu1_1
I0715 16:53:43.955649  5838 net.cpp:406] relu1_1 <- conv1_1
I0715 16:53:43.955665  5838 net.cpp:367] relu1_1 -> conv1_1 (in-place)
I0715 16:53:43.955680  5838 net.cpp:122] Setting up relu1_1
I0715 16:53:43.955688  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.955693  5838 net.cpp:137] Memory required for data: 39137280
I0715 16:53:43.955699  5838 layer_factory.hpp:77] Creating layer conv1_2
I0715 16:53:43.955723  5838 net.cpp:84] Creating Layer conv1_2
I0715 16:53:43.955730  5838 net.cpp:406] conv1_2 <- conv1_1
I0715 16:53:43.955749  5838 net.cpp:380] conv1_2 -> conv1_2
I0715 16:53:43.956133  5838 net.cpp:122] Setting up conv1_2
I0715 16:53:43.956148  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.956153  5838 net.cpp:137] Memory required for data: 51982336
I0715 16:53:43.956182  5838 layer_factory.hpp:77] Creating layer bn1_2
I0715 16:53:43.956198  5838 net.cpp:84] Creating Layer bn1_2
I0715 16:53:43.956207  5838 net.cpp:406] bn1_2 <- conv1_2
I0715 16:53:43.956223  5838 net.cpp:367] bn1_2 -> conv1_2 (in-place)
I0715 16:53:43.956513  5838 net.cpp:122] Setting up bn1_2
I0715 16:53:43.956524  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.956528  5838 net.cpp:137] Memory required for data: 64827392
I0715 16:53:43.956544  5838 layer_factory.hpp:77] Creating layer relu1_2
I0715 16:53:43.956558  5838 net.cpp:84] Creating Layer relu1_2
I0715 16:53:43.956567  5838 net.cpp:406] relu1_2 <- conv1_2
I0715 16:53:43.956583  5838 net.cpp:367] relu1_2 -> conv1_2 (in-place)
I0715 16:53:43.956598  5838 net.cpp:122] Setting up relu1_2
I0715 16:53:43.956604  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.956609  5838 net.cpp:137] Memory required for data: 77672448
I0715 16:53:43.956615  5838 layer_factory.hpp:77] Creating layer pool1
I0715 16:53:43.956630  5838 net.cpp:84] Creating Layer pool1
I0715 16:53:43.956637  5838 net.cpp:406] pool1 <- conv1_2
I0715 16:53:43.956655  5838 net.cpp:380] pool1 -> pool1
I0715 16:53:43.956674  5838 net.cpp:380] pool1 -> pool1_mask
I0715 16:53:43.956704  5838 net.cpp:122] Setting up pool1
I0715 16:53:43.956715  5838 net.cpp:129] Top shape: 1 64 112 112 (802816)
I0715 16:53:43.956723  5838 net.cpp:129] Top shape: 1 64 112 112 (802816)
I0715 16:53:43.956727  5838 net.cpp:137] Memory required for data: 84094976
I0715 16:53:43.956734  5838 layer_factory.hpp:77] Creating layer unpool1
I0715 16:53:43.956753  5838 net.cpp:84] Creating Layer unpool1
I0715 16:53:43.956760  5838 net.cpp:406] unpool1 <- pool1
I0715 16:53:43.956775  5838 net.cpp:406] unpool1 <- pool1_mask
I0715 16:53:43.956789  5838 net.cpp:380] unpool1 -> unpool1
kernel_size:0has_kernel_h:0has_kernel_w:0
F0715 16:53:43.956820  5838 unpooling_layer.cpp:29] Check failed: !unpool_param.has_kernel_size() != !(unpool_param.has_kernel_h() && unpool_param.has_kernel_w()) Filter size is kernel_size OR kernel_h and kernel_w; not both
*** Check failure stack trace: ***
Aborted (core dumped)

I print the value of kernel_size, has_kernel_h and has_kernel_w. which are all 0.

This is my deploy.prototxt file.

Prototxt file

name: "Deconv_test"
input: "data"
input_dim: 1
input_dim: 3
input_dim: 224
input_dim: 224
#data:224*224
layers 
{
    bottom: "data"  
    top: "conv1_1"  
    name: "conv1_1"  
    type: CONVOLUTION
    blobs_lr: 1 blobs_lr: 2 
    weight_decay: 1 weight_decay: 0
    convolution_param {    
        num_output: 64    
        pad: 1    
        kernel_size: 3  }
}
#conv1_1
layers 
{ 
    bottom: 'conv1_1' 
    top: 'conv1_1' 
    name: 'bn1_1' 
    type: BN
    bn_param {
        scale_filler { type: 'constant' value: 1 }
        shift_filler { type: 'constant' value: 0.001 }
        bn_mode: INFERENCE} 
}
layers {  
     bottom: "conv1_1"  
     top: "conv1_1"  
     name: "relu1_1"  
     type: RELU}
# conv1_2
layers 
{  
    bottom: "conv1_1"  
    top: "conv1_2"  
    name: "conv1_2"  
    type: CONVOLUTION
    blobs_lr: 1 blobs_lr: 2 
    weight_decay: 1 weight_decay: 0
    convolution_param {    
        num_output: 64    
        pad: 1    
        kernel_size: 3 }
}
layers 
{ 
    bottom: 'conv1_2' 
    top: 'conv1_2' 
    name: 'bn1_2' 
    type: BN
    bn_param { 
        scale_filler { type: 'constant' value: 1 }
        shift_filler { type: 'constant' value: 0.001 }
        bn_mode: INFERENCE } 
}
layers {  
     bottom: "conv1_2"  
     top: "conv1_2"  
     name: "relu1_2"  
     type: RELU}
# pool1
layers 
{
    bottom: "conv1_2"  
    top: "pool1" 
    top:"pool1_mask"  
    name: "pool1"  
    type: POOLING
    pooling_param {    
         pool: MAX    
         kernel_size: 2    
         stride: 2  }
}
# unpool1
layers 
{ 
    type: UNPOOLING  
    bottom: "pool1"  
    bottom: "pool1_mask"  
    top: "unpool1"  
    name: "unpool1"
    unpooling_param {   
        unpool: MAX   
        kernel_size: 2    
        stride: 2   
        unpool_size: 224 
    }
}
# deconv1_1
layers { 
    bottom: 'unpool1' 
    top: 'deconv1_1' 
    name: 'deconv1_1' 
    type: DECONVOLUTION
    blobs_lr: 1 blobs_lr: 2 
    weight_decay: 1 weight_decay: 0
    convolution_param { 
        num_output:64   
        pad:1   
        kernel_size: 3
        weight_filler {      type: "gaussian"      std: 0.01    }
        bias_filler {      type: "constant"      value: 0    }
    } 
}
layers 
{ 
    bottom: 'deconv1_1' 
    top: 'deconv1_1' 
    name: 'debn1_1' 
    type: BN
    bn_param { 
        scale_filler { type: 'constant' value: 1 }
        shift_filler { type: 'constant' value: 0.001 }
        bn_mode: INFERENCE } 
}
layers { 
    bottom: 'deconv1_1' 
    top: 'deconv1_1' 
    name: 'derelu1_1' 
    type: RELU 
}
# deconv1_2
layers 
{ 
    bottom: 'deconv1_1' 
    top: 'deconv1_2' 
    name: 'deconv1_2' 
    type: DECONVOLUTION
    blobs_lr: 1 
    blobs_lr: 2 
    weight_decay: 1 
    weight_decay: 0
    convolution_param {
        num_output:64     
        pad:1   
        kernel_size: 3
        weight_filler {      type: "gaussian"      std: 0.01    }
        bias_filler {      type: "constant"      value: 0    }
    } 
}
layers 
{ 
    bottom: 'deconv1_2' 
    top: 'deconv1_2' 
    name: 'debn1_2' 
    type: BN
    bn_param { scale_filler { type: 'constant' value: 1 }
               shift_filler { type: 'constant' value: 0.001 }
               bn_mode: INFERENCE } }
layers {
    bottom: 'deconv1_2' 
    top: 'deconv1_2' 
    name: 'derelu1_2' 
    type: RELU }
# seg-score
layers 
{ 
    name: 'seg-score-voc' type: CONVOLUTION bottom: 'deconv1_2' top: 'seg-score'
    blobs_lr: 1 blobs_lr: 2 weight_decay: 1 weight_decay: 0
    convolution_param {
        num_output: 21 kernel_size: 1
        weight_filler {
            type: "gaussian"
            std: 0.01 }
        bias_filler {
            type: "constant"
            value: 0 }
    } 
}

I am searching for a long time on net. But no use. Please help or try to give some ideas how to solve this.

You need to define kernel_size for your layer! kernel_size cannot be zero.
Caffe allows you to define kernel_size in two ways:

  1. Using kernel_size once to use the same value for all spatial dimensions, or once per spatial dimension.
  2. Alternatively, for 2D Blobs, you can specify kernel_h and kernel_w for height and width of the kernel respectively.

See "Deconvolution" layer help for more information.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM