-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathextra_dense.py
84 lines (63 loc) · 2.74 KB
/
extra_dense.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import json
import numpy as np
import keras
print(keras.__version__)
from keras.models import Model
import keras.layers as layers
from keras.layers import Dense, Dropout, GlobalAveragePooling2D
from keras.applications.mobilenet import MobileNet
from keras.applications.mobilenet import preprocess_input
from keras.preprocessing.image import load_img, img_to_array, ImageDataGenerator
from modelanddata import *
params = "./params.json"
#Load the hyperparameter file. This is a json file.
with open(params) as json_file:
params = json.load(json_file)
#Set the path of train and test. Also set the path to save the model
outing = "./"
weightfile = "./NIMA.hdf5"
#Define the base model for loading the structure of the mobilenet architecture.
base_model = MobileNet((224, 224, 3), alpha=1, include_top=False, pooling='avg', weights=None)
#Load the weights in the model from the NIMA architecture. We only require the convolution layers
#of the NIMA architecture
base_model.load_weights(weightfile, by_name=True)
#Define a model cut till the conv_pw_12_relu layer.
modelcut = build_bottleneck_model(base_model, 'conv_pw_13_relu')
#Add extra Depthwise conv BLocks
interimoutput =depthwise_conv_block(modelcut.output, 2048, 1, 1, strides=(2, 2), block_id=14)
#Do Global Average pooling as intended in the NIMA paper.
interimoutput = GlobalAveragePooling2D()(interimoutput)
#The Dropout as in the NIMA paper.
interimoutput = Dropout(.35)(interimoutput)
#Dense layers for the final output.
interimoutput = Dense(32, activation='relu', name = "Dense_1")(interimoutput)
interimoutput = Dense(16, activation='relu', name = "Dense_2")(interimoutput)
interimoutput = Dense(8, activation='relu', name = "Dense_3")(interimoutput)
#Final output
finaloutput = Dense(2, activation='softmax', name="Logits")(interimoutput)
#Define the model
model = Model(base_model.input, finaloutput, name='Modified-NIMA-Architecture')
print("The Model Summary is:")
print(model.summary())
# Freeze the layers except the last 4 layers
for layer in model.layers[:-9]:
layer.trainable = False
# Check the trainable status of the individual layers
for layer in model.layers:
print(layer, layer.trainable)
train_generator, validation_generator = makedatagenerators(params)
# Compile the model
model.compile(loss=params['loss'],
optimizer=params['optimizer'],
metrics=['accuracy'])
# Train the model
model.fit_generator(
train_generator,
steps_per_epoch=params['steps_per_epoch'],
epochs=params['epochs'],
validation_data=validation_generator,
validation_steps=params['validation_steps'])
model.save("modified-NIMA")
reloadmodel = keras.models.load_model('modified-NIMA')
print("Summar of the saved model for verification :")
reloadmodel.summary()