Skip to content

Commit

Permalink
fixes to dropout layer
Browse files Browse the repository at this point in the history
  • Loading branch information
cheeyos committed Jan 27, 2015
1 parent a6d13f0 commit 2a01554
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 18 deletions.
4 changes: 2 additions & 2 deletions models/brody/solver_driving_softmax.prototxt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
net: "models/brody/train_val_driving_softmax.prototxt"
net: "models/brody/train_val_driving_softmax_norm.prototxt"
test_iter: 20
test_interval: 5000
test_compute_loss: true
Expand All @@ -11,5 +11,5 @@ max_iter: 1450000
momentum: 0.9
weight_decay: 0.00005
snapshot: 1000
snapshot_prefix: "models/brody/driving_softmax_8x8"
snapshot_prefix: "models/brody/driving_softmax_8x8_norm"
solver_mode: GPU
4 changes: 2 additions & 2 deletions models/brody/solver_normalization.prototxt
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@ net: "models/brody/train_val_driving_normalization.prototxt"
test_iter: 20
test_interval: 5000
test_compute_loss: true
base_lr: 0.0000001
base_lr: 0.002
lr_policy: "step"
gamma: 0.1
stepsize: 100000
display: 20
max_iter: 1450000
momentum: 0.9
weight_decay: 0.00005
weight_decay: 0.0005
snapshot: 1000
snapshot_prefix: "models/brody/driving_normalization"
solver_mode: GPU
26 changes: 13 additions & 13 deletions models/brody/train_val_driving_normalization.prototxt
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ layers {
bottom: "data"
top: "L0"
blobs_lr: 1
blobs_lr: 2
blobs_lr: 1
weight_decay: 1
weight_decay: 0
convolution_param {
Expand Down Expand Up @@ -169,7 +169,7 @@ layers {
bottom: "pool1"
top: "L1"
blobs_lr: 1
blobs_lr: 2
blobs_lr: 1
weight_decay: 1
weight_decay: 0
convolution_param {
Expand Down Expand Up @@ -221,7 +221,7 @@ layers {
bottom: "pool2"
top: "L2"
blobs_lr: 1
blobs_lr: 2
blobs_lr: 1
weight_decay: 1
weight_decay: 0
convolution_param {
Expand Down Expand Up @@ -250,7 +250,7 @@ layers {
bottom: "L2"
top: "L3"
blobs_lr: 1
blobs_lr: 2
blobs_lr: 1
weight_decay: 1
weight_decay: 0
convolution_param {
Expand Down Expand Up @@ -280,7 +280,7 @@ layers {
bottom: "L3"
top: "L4"
blobs_lr: 1
blobs_lr: 2
blobs_lr: 1
weight_decay: 1
weight_decay: 0
convolution_param {
Expand Down Expand Up @@ -322,7 +322,7 @@ layers {
bottom: "pool5"
top: "L5"
blobs_lr: 1
blobs_lr: 2
blobs_lr: 1
weight_decay: 1
weight_decay: 0
convolution_param {
Expand Down Expand Up @@ -360,7 +360,7 @@ layers {
bottom: "L5"
top: "L6"
blobs_lr: 1
blobs_lr: 2
blobs_lr: 1
weight_decay: 1
weight_decay: 0
convolution_param {
Expand Down Expand Up @@ -398,19 +398,19 @@ layers {
bottom: "L6"
top: "bb-output"
blobs_lr: 1
blobs_lr: 2
blobs_lr: 1
weight_decay: 1
weight_decay: 0
convolution_param {
num_output: 256
kernel_size: 1
weight_filler {
type: "gaussian"
std: 0.005
std: 0.01
}
bias_filler {
type: "constant"
value: 1.0
value: 0.0
}
}
}
Expand All @@ -421,19 +421,19 @@ layers {
bottom: "L6"
top: "pixel-conv"
blobs_lr: 1
blobs_lr: 2
blobs_lr: 1
weight_decay: 1
weight_decay: 0
convolution_param {
num_output: 128
kernel_size: 1
weight_filler {
type: "gaussian"
std: 0.005
std: 0.01
}
bias_filler {
type: "constant"
value: 1.0
value: 0.0
}
}
}
Expand Down
3 changes: 2 additions & 1 deletion src/caffe/layers/dropout_fixed_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ void DropoutFixedLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
threshold_ = this->layer_param_.dropout_param().dropout_ratio();
DCHECK(threshold_ > 0.);
DCHECK(threshold_ < 1.);
scale_ = 1. / (1. - threshold_);
//scale_ = 1. / (1. - threshold_);
scale_ = 1.;
uint_thres_ = static_cast<unsigned int>(UINT_MAX * threshold_);
}

Expand Down

0 comments on commit 2a01554

Please sign in to comment.