Skip to content

Commit

Permalink
Merge pull request arabian9ts#16 from arabian9ts/tuning
Browse files Browse the repository at this point in the history
Tuning
  • Loading branch information
arabian9ts authored Jan 25, 2018
2 parents 8d4a494 + 1c66253 commit 52e3d1f
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 34 deletions.
8 changes: 5 additions & 3 deletions matcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ class label loss is evaled by loss_conf
actual_labels: answer class labels
actual_locs: answer box locations
Returns:
postive_list: if pos -> 1 else 0
postive_list: if pos -> 1 else -> 0
negative_list: if neg and label is not classes(not unknown class) 1 else 0
expanded_gt_labels: gt_label if pos else classes
expanded_gt_locs: gt_locs if pos else [0, 0, 0, 0]
Expand All @@ -92,6 +92,7 @@ class label loss is evaled by loss_conf
expanded_gt_locs = []
matches = []

matched = []
# generate serializationd matching boxes
for i in range(len(boxes)):
for _ in range(self.fmap_shapes[i][1]):
Expand All @@ -105,17 +106,18 @@ class label loss is evaled by loss_conf
near_index = None
for i in range(len(matches)):
jacc = jaccard(center2corner(gt_box), center2corner(self.default_boxes[i]))
if 0.3 <= jacc:
if 0.5 <= jacc:
matches[i] = Box(gt_box, gt_label)
pos += 1
matched.append(gt_label)
else:
if near_jacc < jacc:
near_miss = jacc
near_index = i

# prevent pos from becoming 0 <=> loss_loc is 0
# force to match most near box to ground truth box
if near_index is not None and matches[near_index] is None:
if 0 == len(matched) and near_index is not None and matches[near_index] is None:
matches[near_index] = Box(gt_box, gt_label)
pos += 1

Expand Down
2 changes: 1 addition & 1 deletion model/SSD300.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def __init__(self, sess):
loss, loss_conf, loss_loc, self.pos, self.neg, self.gt_labels, self.gt_boxes = self.ssd.loss(len(self.dboxes))
self.train_set = [loss, loss_conf, loss_loc]
# optimizer = tf.train.AdamOptimizer(0.05)
optimizer = tf.train.AdamOptimizer(learning_rate=0.05, beta1=0.9, beta2=0.999, epsilon=1e-08, use_locking=False, name='Adam')
optimizer = tf.train.AdamOptimizer(learning_rate=1e-3, beta1=0.9, beta2=0.999, epsilon=1e-08, use_locking=False, name='Adam')
self.train_step = optimizer.minimize(loss)

# provides matching method
Expand Down
26 changes: 14 additions & 12 deletions model/default_box.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def scale(k):
Args: feature map number
Returns: scale
"""
s_min = 0.2
s_min = 0.1
s_max = 0.9
m = 6.0
s_k = s_min + (s_max - s_min) * k / (m - 1.0)
Expand Down Expand Up @@ -50,24 +50,26 @@ def generate_boxes(fmap_shapes):
default_boxes = []

# this loop should be already 6 loops
for index, map_shape in zip(range(len(fmap_shapes)), fmap_shapes):
for index, map_shape in enumerate(fmap_shapes):
s_k = scale(index)
s_k1 = scale(index+1)
height = map_shape[1]
width = map_shape[2]
ratios = box_ratios[index]
s = 0.0

for y in range(height):
center_y = (y + 0.5) / float(height)
for x in range(width):
for ratio in ratios:
s_k = scale(index)
s_k1 = scale(index+1)
center_x = (x + 0.5) / float(width)
for i, ratio in enumerate(ratios):
s = s_k

if 1.0 == ratio:
s_k = np.sqrt(s_k*s_k1)

center_x = (x + 0.5) / float(width)
center_y = (y + 0.5) / float(height)
box_width = s_k * np.sqrt(ratio)
box_height = s_k / np.sqrt(ratio)
if 0 == i:
s = np.sqrt(s_k*s_k1)

box_width = s * np.sqrt(ratio)
box_height = s / np.sqrt(ratio)

default_boxes.append([center_x, center_y, box_width, box_height])

Expand Down
14 changes: 7 additions & 7 deletions model/policy.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@
classes = 21

# the number of boxes per feature map
boxes = [3, 6, 6, 6, 6, 6,]
boxes = [6, 3, 6, 3, 6, 3,]

# default box ratios
# each length should be matches boxes[index]
box_ratios = [
[1.0, 0.5, 2.0],
[1.0, 1.0, 2.0, 3.0, 0.5, 1.0/3.0],
[1.0, 1.0, 2.0, 3.0, 0.5, 1.0/3.0],
[1.0, 1.0, 2.0, 3.0, 0.5, 1.0/3.0],
[1.0, 1.0, 2.0, 3.0, 0.5, 1.0/3.0],
[1.0, 1.0, 2.0, 3.0, 0.5, 1.0/3.0],
[1.0, 1.0, 2.0, 1.0/2.0, 3.0, 1.0/3.0],
[1.0, 2.0, 1.0/2.0],
[1.0, 1.0, 2.0, 1.0/2.0, 3.0, 1.0/3.0],
[1.0, 2.0, 1.0/2.0],
[1.0, 1.0, 2.0, 1.0/2.0, 3.0, 1.0/3.0],
[1.0, 2.0, 1.0/2.0],
]
4 changes: 2 additions & 2 deletions model/ssd.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def detect_objects(self, pred_confs, pred_locs):

detected_locs = []
detected_labels = []
hist = [0 for _ in range(21)]
hist = [0 for _ in range(classes)]
for conf, loc in zip(pred_confs[0], pred_locs[0]):
hist[np.argmax(conf)] += 1
print(hist)
Expand All @@ -220,7 +220,7 @@ def detect_objects(self, pred_confs, pred_locs):
possibilities = [np.amax(np.exp(conf)) / (np.sum(np.exp(conf)) + 1e-3) for conf in pred_confs[0]]
indicies = np.argpartition(possibilities, -200)[-200:]
top200 = np.asarray(possibilities)[indicies]
slicer = indicies[0.1 < top200]
slicer = indicies[0.7 < top200]

locations, labels = self._filter(pred_confs[0][slicer], pred_locs[0][slicer])

Expand Down
27 changes: 18 additions & 9 deletions train.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,14 @@
import matplotlib.pyplot as plt

from util.util import *
from tqdm import trange
from model.SSD300 import *

# ====================== Training Parameters ====================== #
BATCH_SIZE = 10
EPOCH = 200
EPOCH = 100
EPOCH_LOSSES = []
SHUFFLED_INDECES = []
# ============================== END ============================== #

if __name__ == '__main__':
Expand All @@ -42,12 +44,17 @@
BATCH = int(len(keys) / BATCH_SIZE)

def next_batch():
global buff
global buff, BATCH_SIZE ,SHUFFLED_INDECES
mini_batch = []
actual_data = []
indicies = np.random.choice(len(keys), BATCH_SIZE)

for idx in indicies:
if 0 == len(SHUFFLED_INDECES):
SHUFFLED_INDECES = list(np.random.permutation(len(keys)))

indices = SHUFFLED_INDECES[:min(BATCH_SIZE, len(SHUFFLED_INDECES))]
del SHUFFLED_INDECES[:min(BATCH_SIZE, len(SHUFFLED_INDECES))]

for idx in indices:
# make images mini batch

img = load_image('voc2007/'+keys[idx])
Expand Down Expand Up @@ -102,33 +109,35 @@ def draw_marker(image_name, save):

# saver.restore(sess, './checkpoints/params.ckpt')

SHUFFLED_INDECES = list(np.random.permutation(len(keys)))

print('\nSTART LEARNING')
print('==================== '+str(datetime.datetime.now())+' ====================')

for _ in range(5):
next_batch()

for ep in range(EPOCH):
BATCH_LOSSES = []
for ba in range(BATCH):
for ba in trange(BATCH):
batch, actual = buff.pop(0)
threading.Thread(name='load', target=next_batch).start()
_, _, batch_loc, batch_conf, batch_loss = ssd.eval(batch, actual, True)
BATCH_LOSSES.append(batch_loss)

print('BATCH: {0} / EPOCH: {1}, LOSS: {2}'.format(ba+1, ep+1, batch_loss))
# print('BATCH: {0} / EPOCH: {1}, LOSS: {2}'.format(ba+1, ep+1, batch_loss))
EPOCH_LOSSES.append(np.mean(BATCH_LOSSES))
print('\n*** AVERAGE: '+str(EPOCH_LOSSES[-1])+' ***')

saver.save(sess, './checkpoints/params.ckpt')

"""

print('\n*** TEST ***')
id = np.random.choice(len(keys))
name = keys[id]
draw_marker(image_name=name, save=True)
print('\nSaved Evaled Image')
"""


print('\n========== EPOCH: '+str(ep+1)+' END ==========')

Expand Down

0 comments on commit 52e3d1f

Please sign in to comment.