Skip to content

Commit

Permalink
update comments
Browse files Browse the repository at this point in the history
  • Loading branch information
chwilms authored Feb 25, 2019
1 parent e8628c8 commit e8c0695
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions models/attentionMask-8-128.test.prototxt
Original file line number Diff line number Diff line change
Expand Up @@ -3482,7 +3482,7 @@ layer {
top: "res4f_div3"
}

########## shared deep_mask feature on VGG16 48s ##########
########## shared neck at scale 48 ##########

layer { name: "conv5_4_div3" type:"Convolution" bottom: "res4f_div3" top: "conv5_4_div3"
param { name: 'rnn_conv_1_weight' lr_mult: 10} param { name: 'rnn_conv_1_bias' lr_mult: 20}
Expand All @@ -3506,7 +3506,7 @@ layer { name: "sum_48s" type: "Eltwise" bottom: "pool5a_div3" bottom: "pool5b_di
layer { name: "sum_48s_relu" type: "ReLU" bottom: "sum_48s" top: "sum_48s" }


########## shared deep_mask feature on VGG16 96s ##########
########## shared neck at scale 96 ##########

layer { name: "conv6_4_div3" type:"Convolution" bottom: "sum_48s" top: "conv6_4_div3"
param { name: 'rnn_conv_1_weight' lr_mult: 10} param { name: 'rnn_conv_1_bias' lr_mult: 20}
Expand All @@ -3530,7 +3530,7 @@ layer { name: "sum_96s" type: "Eltwise" bottom: "pool6a_div3" bottom: "pool6b_di
layer { name: "sum_96s_relu" type: "ReLU" bottom: "sum_96s" top: "sum_96s" }


########## shared deep_mask feature on VGG16 32s ##########
########## shared neck at scale 32 ##########

layer { name: "conv5_4" type:"Convolution" bottom: "res4f" top: "conv5_4"
param { name: 'rnn_conv_1_weight' } param { name: 'rnn_conv_1_bias' }
Expand All @@ -3553,7 +3553,7 @@ layer { name: "pool5a" type: "Pooling" bottom: "res4f" top: "pool5a"
layer { name: "sum_32s" type: "Eltwise" bottom: "pool5a" bottom: "pool5b" top: "sum_32s" }
layer { name: "sum_32s_relu" type: "ReLU" bottom: "sum_32s" top: "sum_32s" }

########## shared deep_mask feature on VGG16 64s ##########
########## shared neck at scale 64 ##########

layer { name: "conv6_4" type:"Convolution" bottom: "sum_32s" top: "conv6_4"
param { name: 'rnn_conv_1_weight' } param { name: 'rnn_conv_1_bias' }
Expand All @@ -3577,7 +3577,7 @@ layer { name: "sum_64s" type: "Eltwise" bottom: "pool6a" bottom: "pool6b" top: "
layer { name: "sum_64s_relu" type: "ReLU" bottom: "sum_64s" top: "sum_64s" }


########## shared deep_mask feature on VGG16 128s ##########
########## shared neck at scale 128 ##########

layer { name: "conv7_4" type:"Convolution" bottom: "sum_64s" top: "conv7_4"
param { name: 'rnn_conv_1_weight' } param { name: 'rnn_conv_1_bias' }
Expand Down

0 comments on commit e8c0695

Please sign in to comment.