-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdensenet.py
184 lines (139 loc) · 6.33 KB
/
densenet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
from keras.models import Model
from keras.layers.core import Dense, Activation
from keras.layers.convolutional import Conv2D
from keras.layers.pooling import AveragePooling2D, GlobalAveragePooling2D, MaxPooling2D
from keras.layers import Input, Concatenate
from keras.layers.normalization import BatchNormalization
from keras.regularizers import l2
def H_factory(x,nb_filters,growth_rate,bottleneck=False, weight_decay=1E-4):
""" Tool to design H layers within each dense block
x - input
nb_filters - number of filters. Supplied from transitional layer if not compression is used.
bottleneck - if True, another 1x1 conv layer will be added before 3x3 conv. If bottleneck is true,
growth rate has to be supplied as well.
"""
x = BatchNormalization(axis=-1,
gamma_regularizer=l2(weight_decay),
beta_regularizer=l2(weight_decay))(x)
x = Activation('relu')(x)
# if bottleneck is True then add another 1x1 conv layer before 3x3 conv
if bottleneck:
x = Conv2D(filters=4*growth_rate,kernel_size=(1,1),
kernel_initializer="he_uniform",
padding="same", use_bias=False,
kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(filters=nb_filters,kernel_size=(3,3),
kernel_initializer="he_uniform",
padding="same", use_bias=False,
kernel_regularizer=l2(weight_decay))(x)
return x
def dense_block(x,nb_layers,growth_rate,nb_filters,
bottleneck=False, weight_decay=1E-4):
""" Creates dense block by using H_factory.
x - input (from previous layer)
nb_layers - number of layers in dense block
growth_rate - growth rate of network (see paper)
nb_filters - number of initial filters
bottleneck - add 1x1 layer before 3x3 layer.
"""
# First create a list with only input feature maps. We'll add more in the loop below.
list_of_features = [x]
# Generate conv layers in dense block. After each layer add it's feature map to the list of feature
# maps. This is then used as an input to the following layer.
for i in range(nb_layers):
cb = H_factory(x,growth_rate,growth_rate,bottleneck)
list_of_features.append(cb)
x = Concatenate(axis=-1)([x,cb])
nb_filters += growth_rate
# Return the final number of feature maps and the whole dense block
return nb_filters,x
def transition_layer(x,nb_filters,compression_factor=1, weight_decay=1E-4):
""" Transition layer, including batch norm, ReLU, 1x1 convolution and avg pool
x - input (previous layer)
nb_filters - number of filters to use. Take output from dense block and apply compression
if needed
"""
# use compression factor
nb_filters = int(nb_filters//(1/compression_factor))
x = BatchNormalization(axis=-1,
gamma_regularizer=l2(weight_decay),
beta_regularizer=l2(weight_decay))(x)
x = Activation('relu')(x)
x = Conv2D(filters=nb_filters,kernel_size=(1,1),
kernel_initializer="he_uniform",
padding="same", use_bias=False,
kernel_regularizer=l2(weight_decay))(x)
x = AveragePooling2D(pool_size=(2,2),strides=(2,2))(x)
return x
def DenseNet(img_dim,growth_rate,nb_classes,nb_filters,nb_layers,
init_kernel_size=(3,3),
bottleneck=False,
compression_factor=1,
weight_decay=1E-4):
""" Function to generate the model of the DenseNet
img_dim - dimensions of input images, tuple
nb_classes - number of classes at the dense layer
nb_filters - number of initial filters (usually 16)
nb_layers - number of conv layers in each dense block, a list.
"""
model_input = Input(shape=img_dim)
x = Conv2D(filters=nb_filters,kernel_size=(3,3),
kernel_initializer="he_uniform",
padding="same", use_bias=False,
name="initialConv2D",
kernel_regularizer=l2(weight_decay))(model_input)
# for ImageNet type of densenet we also need maxpooling
#x = MaxPooling2D(pool_size=(2,2),strides=(2,2))(x)
# generate dense blocks
for i,n in enumerate(nb_layers):
nb_filters,x = dense_block(x,n,growth_rate,nb_filters,bottleneck)
# last dense block doesn't have transition layer
if i+1<len(nb_layers):
x = transition_layer(x,nb_filters)
# batch norm, ReLU after last dense block
x = BatchNormalization()(x)
x = Activation('relu')(x)
# perform global average pool
x = GlobalAveragePooling2D()(x)
# FC layer with number of classes and softmax activation.
x = Dense(nb_classes,activation='softmax')(x)
# generate model with certain inputs and outputs
model = Model(inputs=[model_input], outputs=[x], name="DenseNet")
# return the model
return model
def DenseNetIN(img_dim,growth_rate,nb_classes,nb_filters,nb_layers,
init_kernel_size=(7,7),
bottleneck=True,
compression_factor=0.5,
weight_decay=1E-4):
""" Function to generate the model of the DenseNet
img_dim - dimensions of input images, tuple
nb_classes - number of classes at the dense layer
nb_filters - number of initial filters (usually 16)
nb_layers - number of conv layers in each dense block, a list.
"""
model_input = Input(shape=img_dim)
x = Conv2D(filters=nb_filters,kernel_size=(3,3),
kernel_initializer="he_uniform",
padding="same", use_bias=False,
name="initialConv2D",
kernel_regularizer=l2(weight_decay))(model_input)
# for ImageNet type of densenet we also need maxpooling
x = MaxPooling2D(pool_size=(2,2),strides=(2,2))(x)
# generate dense blocks
for i,n in enumerate(nb_layers):
nb_filters,x = dense_block(x,n,growth_rate,nb_filters,bottleneck)
# last dense block doesn't have transition layer
if i+1<len(nb_layers):
x = transition_layer(x,nb_filters)
# batch norm, ReLU after last dense block
x = BatchNormalization()(x)
x = Activation('relu')(x)
# perform global average pool
x = GlobalAveragePooling2D()(x)
# FC layer with number of classes and softmax activation.
x = Dense(nb_classes,activation='softmax')(x)
# generate model with certain inputs and outputs
model = Model(inputs=[model_input], outputs=[x], name="DenseNet")
# return the model
return model