-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathResNetBuilder.py
165 lines (126 loc) · 6.27 KB
/
ResNetBuilder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
from __future__ import print_function
import tensorflow
import tensorflow.keras
from tensorflow.keras.layers import AveragePooling2D, Input, Flatten
from tensorflow.keras.layers import BatchNormalization, Activation
from tensorflow.keras.models import Model
from localLayers import QuantizedConv2D, QuantizedDense
def resnet_layer(inputs, num_filters=16, kernel_size=3, strides=1, activation='relu', batch_normalization=True, conv_first=False, layersconfig=None):
initializer = layersconfig['initializer']
conv = QuantizedConv2D(num_filters, kernel_size, activation, initializer, strides, layersconfig)
x = inputs
if conv_first:
x = conv(x)
if batch_normalization:
x = BatchNormalization()(x)
# if activation is not None:
# # x = Activation(activation)(x)
# x = activate(x, activation)
else:
if batch_normalization:
x = BatchNormalization()(x)
# if activation is not None:
# # x = Activation(activation)(x)
# x = activate(x, activation)
x = conv(x)
return x
def resnet_v1(input_shape, depth, num_classes=10, initializer='heconstant', layersconfig=None):
if (depth - 2) % 6 != 0:
raise ValueError('depth should be 6n+2 (eg 20, 32, 44 in [a])')
# Start model definition.
num_filters = 16
num_res_blocks = int((depth - 2) / 6)
inputs = Input(shape=input_shape)
x = resnet_layer(inputs=inputs, layersconfig=layersconfig)
# Instantiate the stack of residual units
for stack in range(3):
for res_block in range(num_res_blocks):
strides = 1
if stack > 0 and res_block == 0: # first layer but not first stack
strides = 2 # downsample
y = resnet_layer(inputs=x, num_filters=num_filters, strides=strides, layersconfig=layersconfig)
y = resnet_layer(inputs=y, num_filters=num_filters, activation=None, layersconfig=layersconfig)
if stack > 0 and res_block == 0: # first layer but not first stack
# linear projection residual shortcut connection to match changed dims
x = resnet_layer(inputs=x, num_filters=num_filters, kernel_size=1, strides=strides, activation=None, batch_normalization=False, layersconfig=layersconfig)
x = tensorflow.keras.layers.add([x, y])
# x = Activation('relu')(x)
# x = Activation(layersconfig["activation"])(x)
# x = activate(x, layersconfig["activation"])
num_filters *= 2
# Add classifier on top.
# v1 does not use BN after last shortcut connection-ReLU
x = AveragePooling2D(pool_size=8)(x)
y = Flatten()(x)
dense = QuantizedDense(num_classes, 'softmax', initializer, layersconfig)
outputs = dense(y)
# Instantiate model.
model = Model(inputs=inputs, outputs=outputs)
return model
def resnet_v2(input_shape, depth, num_classes=10, initializer='heconstant', layersconfig=None):
if (depth - 2) % 9 != 0:
raise ValueError('depth should be 9n+2 (eg 56 or 110 in [b])')
# Start model definition.
num_filters_in = 16
num_res_blocks = int((depth - 2) / 9)
inputs = Input(shape=input_shape)
# v2 performs Conv2D with BN-ReLU on input before splitting into 2 paths
x = resnet_layer(inputs=inputs, num_filters=num_filters_in, conv_first=True, layersconfig=layersconfig)
num_filters_out = 1
# Instantiate the stack of residual units
for stage in range(3):
for res_block in range(num_res_blocks):
activation = 'relu'
batch_normalization = True
strides = 1
if stage == 0:
num_filters_out = num_filters_in * 4
if res_block == 0: # first layer and first stage
activation = None
batch_normalization = False
else:
num_filters_out = num_filters_in * 2
if res_block == 0: # first layer but not first stage
strides = 2 # downsample
# bottleneck residual unit
y = resnet_layer(inputs=x, num_filters=num_filters_in, kernel_size=1, strides=strides, activation=activation, batch_normalization=batch_normalization,
conv_first=False, layersconfig=layersconfig)
y = resnet_layer(inputs=y, num_filters=num_filters_in, conv_first=False, layersconfig=layersconfig)
y = resnet_layer(inputs=y, num_filters=num_filters_out, kernel_size=1, conv_first=False, layersconfig=layersconfig)
if res_block == 0:
# linear projection residual shortcut connection to match
# changed dims
x = resnet_layer(inputs=x, num_filters=num_filters_out, kernel_size=1, strides=strides, activation=None, batch_normalization=False, layersconfig=layersconfig)
x = tensorflow.keras.layers.add([x, y])
num_filters_in = num_filters_out
# Add classifier on top.
# v2 has BN-ReLU before Pooling
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = AveragePooling2D(pool_size=8)(x)
y = Flatten()(x)
myseed = layersconfig['seed']
trainW, trainM = layersconfig['trainW'], layersconfig['trainM']
initializer = layersconfig['initializer']
masktype = layersconfig['masktype']
alpha = layersconfig['alpha']
p1 = layersconfig['p1']
si0 = layersconfig['si0']
dense = QuantizedDense(num_classes, None, initializer, weightmasktype=masktype, trainweights=trainW, trainmask=trainM, abg=alpha, si0=si0)
outputs = dense(y)
outputs = Activation('softmax')(outputs)
# Instantiate model.
model = Model(inputs=inputs, outputs=outputs)
return model
def MakeResNet(input_shape, version=1, n=3, layersconfig=None):
# Computed depth from supplied model parameter n
depth = 1
if version == 1:
depth = n * 6 + 2
elif version == 2:
depth = n * 9 + 2
if version == 1:
model = resnet_v1(input_shape=input_shape, depth=depth, layersconfig=layersconfig)
else:
model = resnet_v2(input_shape=input_shape, depth=depth, layersconfig=layersconfig)
return model