forked from csaw-hackml/CSAW-HackML-2020
-
Notifications
You must be signed in to change notification settings - Fork 1
/
architecture.py
40 lines (34 loc) · 1.37 KB
/
architecture.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import keras
import keras.backend as K
from keras import initializers
def Net():
# define input
x = keras.Input(shape=(55, 47, 3), name='input')
# feature extraction
conv_1 = keras.layers.Conv2D(20, (4, 4), activation='relu', name='conv_1')(x)
pool_1 = keras.layers.MaxPooling2D((2, 2), name='pool_1')(conv_1)
conv_2 = keras.layers.Conv2D(40, (3, 3), activation='relu', name='conv_2')(pool_1)
pool_2 = keras.layers.MaxPooling2D((2, 2), name='pool_2')(conv_2)
conv_3 = keras.layers.Conv2D(60, (3, 3), activation='relu', name='conv_3')(pool_2)
pool_3 = keras.layers.MaxPooling2D((2, 2), name='pool_3')(conv_3)
# first interpretation model
flat_1 = keras.layers.Flatten()(pool_3)
fc_1 = keras.layers.Dense(160, name='fc_1')(flat_1)
# second interpretation model
conv_4 = keras.layers.Conv2D(80, (2, 2), activation='relu', name='conv_4')(pool_3)
flat_2 = keras.layers.Flatten()(conv_4)
fc_2 = keras.layers.Dense(160, name='fc_2')(flat_2)
# merge interpretation
merge = keras.layers.Add()([fc_1, fc_2])
add_1 = keras.layers.Activation('relu')(merge)
drop = keras.layers.Dropout(0.5)
# output
y_hat = keras.layers.Dense(1283, activation='softmax', name='output')(add_1)
model = keras.Model(inputs=x, outputs=y_hat)
# summarize layers
#print(model.summary())
# plot graph
#plot_model(model, to_file='model_architecture.png')
return model
K.clear_session()
model = Net()