-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathtrain_lrn.py
96 lines (74 loc) · 2.44 KB
/
train_lrn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
from alexnet_lrn import AlexNet
import caffe
from caffe.proto import caffe_pb2
import plyvel
import numpy as np
import h5py
from keras import backend as K
# nohup python train_lrn.py &
# ps -ef | grep train_lrn.py
# tail -f nohup.out
# kill UID
def train(db, keys, avg):
m = len(keys)
# epochs = 19
# iterations = 140000
batch_size = 64
stream_size = batch_size * 100 # ~10K images loaded at a time
model = AlexNet()
model.load_weights('alexnet_weights.h5')
for i in range(0, m, stream_size):
X_batch, Y_batch = get_data(db, keys[i:(i + stream_size)], avg)
model.fit(X_batch, Y_batch, batch_size=batch_size, nb_epoch=1, verbose=2)
# requires adam optimizer
# model.fit(X_train, Y_train,
# batch_size=64, nb_epoch=4700, verbose=1,
# validation_data=(X_test, Y_test))
# max_iter = #epochs * (training set/training_batch_size)
return model
def get_data(db, keys, avg):
n = len(keys)
if K.image_dim_ordering() == 'tf':
X_train = np.empty((n, 210, 280, 3))
else:
X_train = np.empty((n, 3, 210, 280))
Y_train = np.empty((n, 14))
for i, key in enumerate(keys):
datum = caffe_pb2.Datum.FromString(db.get(key))
img = caffe.io.datum_to_array(datum)
# img.shape = 3x210x280
if K.image_dim_ordering() == 'tf':
img = np.swapaxes(img, 0, 1)
img = np.swapaxes(img, 1, 2)
# if 'th', leave as is
img = img.astype('float32')
img = img / 255
img = np.subtract(img, avg)
X_train[i] = img
affordances = [j for j in datum.float_data]
affordances = np.array(affordances)
affordances = affordances.reshape(1, 14)
affordances = affordances.astype('float32')
Y_train[i] = affordances
return X_train, Y_train
def load_average():
h5f = h5py.File('deepdriving_average.h5', 'r')
avg = h5f['average'][:]
h5f.close()
return avg
def load_keys():
keys = []
with open('keys.txt', 'rb') as f:
keys = [line.strip() for line in f]
return keys
if __name__ == "__main__":
dbpath = '../TORCS_Training_1F/'
db = plyvel.DB(dbpath)
keys = load_keys()
avg = load_average()
model = train(db, keys, avg)
model.save('deepdriving_model_lrn.h5')
model.save_weights('deepdriving_weights_lrn.h5')
#with open('deepdriving_model.json', 'w') as f:
# f.write(model.to_json())
db.close()