-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathtrain.py
136 lines (101 loc) · 3.21 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
from alexnet import AlexNet
import caffe
from caffe.proto import caffe_pb2
import plyvel
import numpy as np
import h5py
from keras import backend as K
# nohup python train.py &
# ps -ef | grep train.py
# tail -f nohup.out
# kill UID
def train(db, keys, avg):
m = len(keys)
# epochs = 19
# iterations = 140000
batch_size = 64
stream_size = batch_size * 100 # ~10K images loaded at a time
model = AlexNet()
for i in range(0, m, stream_size):
X_batch, Y_batch = get_data(db, keys[i:(i + stream_size)], avg)
model.fit(X_batch, Y_batch, batch_size=batch_size, nb_epoch=1, verbose=2)
# requires adam optimizer
# model.fit(X_train, Y_train,
# batch_size=64, nb_epoch=4700, verbose=1,
# validation_data=(X_test, Y_test))
# max_iter = #epochs * (training set/training_batch_size)
return model
def get_data(db, keys, avg):
n = len(keys)
if K.image_dim_ordering() == 'tf':
X_train = np.empty((n, 210, 280, 3))
else:
X_train = np.empty((n, 3, 210, 280))
Y_train = np.empty((n, 14))
for i, key in enumerate(keys):
datum = caffe_pb2.Datum.FromString(db.get(key))
img = caffe.io.datum_to_array(datum)
# img.shape = 3x210x280
if K.image_dim_ordering() == 'tf':
img = np.swapaxes(img, 0, 1)
img = np.swapaxes(img, 1, 2)
# if 'th', leave as is
img = img.astype('float32')
img = img / 255.0
img = np.subtract(img, avg)
X_train[i] = img
affordances = [j for j in datum.float_data]
affordances = np.array(affordances)
affordances = affordances.reshape(1, 14)
affordances = affordances.astype('float32')
Y_train[i] = affordances
return X_train, Y_train
def calc_average(db, keys):
avg = np.zeros((3, 210, 280))
n = 0
for key in keys:
datum = caffe_pb2.Datum.FromString(db.get(key))
img = caffe.io.datum_to_array(datum)
avg = np.add(avg * n, img) / (n + 1)
n = n + 1
if K.image_dim_ordering() == 'tf':
avg = np.swapaxes(avg, 0, 1)
avg = np.swapaxes(avg, 1, 2)
# if 'th', leave as is
avg = avg.astype('float32')
avg = avg / 255.0
return avg
def save_average(avg):
h5f = h5py.File('deepdriving_average.h5', 'w')
h5f.create_dataset('average', data=avg)
h5f.close()
def load_average():
h5f = h5py.File('deepdriving_average.h5', 'r')
avg = h5f['average'][:]
h5f.close()
return avg
def find_keys(db):
keys = []
for key, value in db:
keys.append(key)
return keys
def save_keys(keys):
with open('keys.txt', 'wb') as f:
f.writelines([b'%s\n' % key for key in keys])
def load_keys():
keys = []
with open('keys.txt', 'rb') as f:
keys = [line.strip() for line in f]
return keys
if __name__ == "__main__":
dbpath = '../TORCS_Training_1F/'
db = plyvel.DB(dbpath)
keys = load_keys()
avg = calc_average(db, keys)
save_average(avg)
model = train(db, keys, avg)
model.save('deepdriving_model.h5')
model.save_weights('deepdriving_weights.h5')
with open('deepdriving_model.json', 'w') as f:
f.write(model.to_json())
db.close()