-
Notifications
You must be signed in to change notification settings - Fork 2
/
my_resnet.py
138 lines (113 loc) · 4.72 KB
/
my_resnet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
'''
Code Source:
https://towardsdatascience.com/implementing-a-resnet-model-from-scratch-971be7193718
https://github.com/raghakot/keras-resnet/blob/master/resnet.py?fbclid=IwAR2Y%20fX6TO1HXrPcFDLNXuPJOnUBLFDi36hmErRB_T7DOoIY-z3RBVXJf9RU
'''
from __future__ import division
import six
from keras.layers.normalization import BatchNormalization
from keras.models import Model
from keras.layers.core import (
Activation,
Dense
)
from keras.layers import (
Input,
# Activation,
# Dense,
Flatten,
add
)
from keras.layers.convolutional import (
Conv2D,
MaxPooling2D,
AveragePooling2D,
ZeroPadding2D
)
from keras.layers.merge import add
from keras.regularizers import l2
from keras import backend as K
class ResNet:
@staticmethod
def residual_module(data, K, stride, chanDim, red=False,
reg=0.0001, bnEps=2e-5, bnMom=0.9):
# shortcut init. For skipping to future layer
shortcut = data
# first block of ResNet: 1x1 Convs, K/4
# bn1->act1->conv1
bn1 = BatchNormalization(axis=chanDim, epsilon=bnEps, momentum=bnMom)(data)
act1 = Activation("relu")(bn1)
conv1 = Conv2D(int(K*0.25), (1,1), use_bias=False,
kernel_regularizer=l2(reg))(act1)
# second block of ResNet: 3x3 Convs, K/4
# conv1->bn2->act2->conv2
bn2 = BatchNormalization(axis=chanDim, epsilon=bnEps, momentum=bnMom)(conv1)
act2 = Activation("relu")(bn2)
conv2 = Conv2D(int(K*0.25), (3,3), strides=stride, padding="same", use_bias=False,
kernel_regularizer=l2(reg))(act2)
# third block of ResNet: 1x1 Convs, K/4
# conv2->bn3->act3->conv3
bn3 = BatchNormalization(axis=chanDim, epsilon=bnEps, momentum=bnMom)(conv2)
act3 = Activation("relu")(bn3)
conv3 = Conv2D(K, (1,1), use_bias=False, kernel_regularizer=l2(reg))(act3)
'''
check if spacial dimensions are necessary
'''
#if reducing spatial size, then apply CONV layer to shortcut
if red:
shortcut = Conv2D(K, (1,1), strides=stride, use_bias=False,
kernel_regularizer=l2(reg))(act1)
# shotcut + final CONV
resOut = add([conv3, shortcut])
# return above for output of ResNet model
return resOut
'''
Stack residual blocks with stages (can have n stages)
Residual model in SAME stage will have same filter
Repeat until ready for avg pooling & softmax
'''
@staticmethod
def build(width, height, depth, classes, stages, filters,
reg=0.0001, bnEps=2e-5, bnMom=0.9):
# initialize input shape and dim. (manual?)
inputShape = (height, width, depth)
chanDim = -1
# if "channel first", update input shape & channel dim
if K.image_data_format() == "channels_first":
inputShape = (depth, height, width)
chanDim = 1
# set input and apply BN
inputs = Input(shape=inputShape)
x = BatchNormalization(axis=chanDim, epsilon=bnEps,
momentum=bnMom)(inputs)
# CONV -> BN -> ACT -> POOL. Reduce spatial size
x = Conv2D(filters[0], (5,5), use_bias=False,
padding="same", kernel_regularizer=l2(reg))(x)
x = BatchNormalization(axis=chanDim, epsilon=bnEps,
momentum=bnMom)(x)
x = Activation("relu")(x)
#x = ZeroPadding2D((1,1))(x)
x = MaxPooling2D((3,3), strides=(2,2))(x)
#reduce input volume
for i in range(0, len(stages)):
# init stride, apply residual
stride = (1,1) if i == 0 else (2,2)
x = ResNet.residual_module(x, filters[i+1], stride,
chanDim, red=True, bnEps=bnEps, bnMom=bnMom)
# loop through layers in stage
for j in range(0, stages[i] - 1):
# apply resnet module
x = ResNet.residual_module(x, filters[i+1],
(1,1), chanDim, bnEps=bnEps, bnMom=bnMom)
x = BatchNormalization(axis=chanDim, epsilon=bnEps,
momentum=bnMom)(x)
x = Activation("relu")(x)
x = AveragePooling2D((2,2))(x)
# softmax
x = Flatten()(x)
x = Dense(classes, kernel_regularizer=l2(reg))(x)
x = Activation("softmax")(x)
# create the model
model = Model(inputs, x, name="resnet")
# return constructed network architecture
return model