Skip to content

Commit

Permalink
updated Caffe template models for external testing
Browse files Browse the repository at this point in the history
  • Loading branch information
beniz committed Mar 9, 2015
1 parent fbb91c0 commit 8218120
Show file tree
Hide file tree
Showing 6 changed files with 85 additions and 82 deletions.
46 changes: 30 additions & 16 deletions templates/caffe/alexnet/alexnet.prototxt
Original file line number Diff line number Diff line change
Expand Up @@ -18,24 +18,39 @@ layer {
backend: LMDB
}
}
#layer {
# name: "data"
# type: "Data"
# top: "data"
# top: "label"
# include {
# phase: TEST
# }
# transform_param {
# mirror: false
# crop_size: 227
# mean_file: "mean.binaryproto"
# }
# data_param {
# source: "test.lmdb"
# batch_size: 50
# backend: LMDB
# }
#}
layer {
name: "data"
type: "Data"
name: "alexnet"
type: "MemoryData"
top: "data"
top: "label"
memory_data_param {
batch_size: 256
channels: 3
height: 227
width: 227
}
include {
phase: TEST
}
transform_param {
mirror: false
crop_size: 227
mean_file: "mean.binaryproto"
}
data_param {
source: "test.lmdb"
batch_size: 50
backend: LMDB
}
}
layer {
name: "conv1"
Expand Down Expand Up @@ -366,11 +381,10 @@ layer {
}
}
layer {
name: "accuracy"
type: "Accuracy"
name: "losst"
type: "Softmax"
bottom: "fc8"
bottom: "label"
top: "accuracy"
top: "losst"
include {
phase: TEST
}
Expand Down
26 changes: 10 additions & 16 deletions templates/caffe/cifar/cifar.prototxt
Original file line number Diff line number Diff line change
Expand Up @@ -18,20 +18,18 @@ layer {
}
layer {
name: "cifar"
type: "Data"
type: "MemoryData"
top: "data"
top: "label"
memory_data_param {
batch_size: 100
channels: 3
height: 32
width: 32
}
include {
phase: TEST
}
transform_param {
mean_file: "mean.binaryproto"
}
data_param {
source: "test_lmdb"
batch_size: 100
backend: LMDB
}
}
layer {
name: "conv1"
Expand Down Expand Up @@ -202,14 +200,10 @@ layer {
}
}
layer {
name: "accuracy"
type: "Accuracy"
name: "losst"
type: "Softmax"
bottom: "ip1"
bottom: "label"
top: "accuracy"
include {
phase: TEST
}
top: "losst"
}
layer {
name: "loss"
Expand Down
50 changes: 26 additions & 24 deletions templates/caffe/googlenet/googlenet.prototxt
Original file line number Diff line number Diff line change
Expand Up @@ -22,25 +22,18 @@ layer {
}
}
layer {
name: "data"
type: "Data"
name: "googlenet"
type: "MemoryData"
top: "data"
top: "label"
include {
phase: TEST
}
transform_param {
mirror: false
crop_size: 224
# mean_value: 104
# mean_value: 117
# mean_value: 123
mean_file: "mean.binaryproto"
memory_data_param {
batch_size: 32
channels: 3
height: 224
width: 224
}
data_param {
source: "test.lmdb"
batch_size: 50
backend: LMDB
include {
phase: TEST
}
}
layer {
Expand Down Expand Up @@ -2483,16 +2476,25 @@ layer {
phase: TEST
}
}
layer {
name: "loss3/top-5"
type: "Accuracy"
#layer {
# name: "loss3/top-5"
# type: "Accuracy"
# bottom: "loss3/classifier"
# bottom: "label"
# top: "loss3/top-5"
# include {
# phase: TEST
# }
# accuracy_param {
# top_k: 5
# }
#}
layer {
name: "probt"
type: "Softmax"
bottom: "loss3/classifier"
bottom: "label"
top: "loss3/top-5"
top: "probt"
include {
phase: TEST
}
accuracy_param {
top_k: 5
}
}
7 changes: 3 additions & 4 deletions templates/caffe/lregression/lregression.prototxt
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,10 @@ layer {
}
}
layer {
name: "accuracy"
type: "Accuracy"
name: "losst"
type: "Softmax"
bottom: "ip1"
bottom: "label"
top: "accuracy"
top: "losst"
include: { phase: TEST }
}
layer {
Expand Down
11 changes: 6 additions & 5 deletions templates/caffe/mlp/mlp.prototxt
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,13 @@ layer {
}
}
layer {
name: "accuracy"
type: "Accuracy"
name: "losst"
type: "Softmax"
bottom: "ip2"
bottom: "label"
top: "accuracy"
include: { phase: TEST }
top: "losst"
include {
phase: TEST
}
}
layer {
name: "loss"
Expand Down
27 changes: 10 additions & 17 deletions templates/caffe/nin/nin.prototxt
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,13 @@ layer {
layer {
top: "data"
top: "label"
name: "nin"
type: "Data"
data_param {
source: "train.lmdb"
backend: LMDB
batch_size: 89
}
transform_param {
crop_size: 224
mirror: false
mean_file: "mean.binaryproto"
name: "data"
type: "MemoryData"
memory_data_param {
batch_size: 64
channels: 3
height: 224
width: 224
}
include: { phase: TEST }
}
Expand Down Expand Up @@ -500,18 +496,15 @@ layer {
}
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "pool4"
bottom: "label"
top: "accuracy"
include: { phase: TEST }
name: "losst"
type: "Softmax"
top: "losst"
}
layer {
bottom: "pool4"
bottom: "label"
name: "loss"
type: "SoftmaxWithLoss"
include: { phase: TRAIN }
top: "loss"
}

0 comments on commit 8218120

Please sign in to comment.