-
Notifications
You must be signed in to change notification settings - Fork 0
/
eval_fastercnn.py
169 lines (160 loc) · 7.76 KB
/
eval_fastercnn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
from __future__ import division
from __future__ import print_function
import os
# disable autotune
os.environ['MXNET_CUDNN_AUTOTUNE_DEFAULT'] = '0'
import argparse
import glob
import logging
logging.basicConfig(level=logging.INFO)
import time
import numpy as np
import mxnet as mx
from tqdm import tqdm
from mxnet import nd
from mxnet import gluon
import gluoncv as gcv
gcv.utils.check_version('0.6.0')
from gluoncv import data as gdata
from gluoncv.data import VOCDetection
from gluoncv.data import batchify
from gluoncv.data.transforms.presets.rcnn import FasterRCNNDefaultValTransform
from gluoncv.utils.metrics.voc_detection import VOC07MApMetric
from gluoncv.utils.metrics.coco_detection import COCODetectionMetric
from gluoncv.utils.metrics.voc_detection import VOC07MApMetric,VOCMApMetric
def parse_args():
parser = argparse.ArgumentParser(description='Validate Faster-RCNN networks.')
parser.add_argument('--network', type=str, default='resnet50_v1b',
help="Base feature extraction network name")
parser.add_argument('--dataset', type=str, default='voc',
help='Training dataset.')
parser.add_argument('--num-workers', '-j', dest='num_workers', type=int,
default=4, help='Number of data workers')
parser.add_argument('--gpus', type=str, default='0',
help='Training with GPUs, you can specify 1,3 for example.')
parser.add_argument('--pretrained', type=str, default='True',
help='Load weights from previously saved parameters.')
parser.add_argument('--save-prefix', type=str, default='',
help='Saving parameter prefix')
parser.add_argument('--save-json', action='store_true',
help='Save coco output json')
parser.add_argument('--eval-all', action='store_true',
help='Eval all models begins with save prefix. Use with pretrained.')
parser.add_argument('--norm-layer', type=str, default=None,
help='Type of normalization layer to use. '
'If set to None, backbone normalization layer will be fixed,'
' and no normalization layer will be used. '
'Currently supports \'bn\', and None, default is None')
parser.add_argument('--use-fpn', action='store_true',
help='Whether to use feature pyramid network.')
args = parser.parse_args()
return args
class VOCLike(VOCDetection):
CLASSES = ['flower']
def __init__(self, root, splits, transform=None, index_map=None, preload_label=True):
super(VOCLike, self).__init__(root, splits, transform, index_map, preload_label)
def get_dataset(dataset, data_shape):
if dataset.lower() == 'voc':
val_dataset = VOCLike(root='D:/data', splits=((2007, 'trainval'),))
val_metric = VOCMApMetric(iou_thresh=0.5, class_names=val_dataset.classes)
elif dataset.lower() == 'coco':
val_dataset = gdata.COCODetection(splits='instances_val2017', skip_empty=False)
val_metric = COCODetectionMetric(
val_dataset, args.save_prefix + '_eval', cleanup=True,
data_shape=(data_shape, data_shape))
else:
raise NotImplementedError('Dataset: {} not implemented.'.format(dataset))
return val_dataset, val_metric
def get_dataloader(net, val_dataset, batch_size, num_workers):
"""Get dataloader."""
val_bfn = batchify.Tuple(*[batchify.Append() for _ in range(3)])
val_loader = mx.gluon.data.DataLoader(
val_dataset.transform(FasterRCNNDefaultValTransform(net.short, net.max_size)),
batch_size, False, batchify_fn=val_bfn, last_batch='keep', num_workers=num_workers)
return val_loader
def split_and_load(batch, ctx_list):
"""Split data to 1 batch each device."""
num_ctx = len(ctx_list)
new_batch = []
for i, data in enumerate(batch):
new_data = [x.as_in_context(ctx) for x, ctx in zip(data, ctx_list)]
new_batch.append(new_data)
return new_batch
def validate(net, val_data, ctx, eval_metric, size):
"""Test on validation dataset."""
clipper = gcv.nn.bbox.BBoxClipToImage()
eval_metric.reset()
net.hybridize(static_alloc=True)
with tqdm(total=size) as pbar:
for ib, batch in enumerate(val_data):
batch = split_and_load(batch, ctx_list=ctx)
det_bboxes = []
det_ids = []
det_scores = []
gt_bboxes = []
gt_ids = []
gt_difficults = []
for x, y, im_scale in zip(*batch):
# get prediction results
ids, scores, bboxes = net(x)
det_ids.append(ids)
det_scores.append(scores)
# clip to image size
det_bboxes.append(clipper(bboxes, x))
# rescale to original resolution
im_scale = im_scale.reshape((-1)).asscalar()
det_bboxes[-1] *= im_scale
# split ground truths
gt_ids.append(y.slice_axis(axis=-1, begin=4, end=5))
gt_bboxes.append(y.slice_axis(axis=-1, begin=0, end=4))
gt_bboxes[-1] *= im_scale
gt_difficults.append(y.slice_axis(axis=-1, begin=5, end=6) if y.shape[-1] > 5 else None)
# update metric
for det_bbox, det_id, det_score, gt_bbox, gt_id, gt_diff in zip(det_bboxes, det_ids, det_scores, gt_bboxes, gt_ids, gt_difficults):
eval_metric.update(det_bbox, det_id, det_score, gt_bbox, gt_id, gt_diff)
pbar.update(len(ctx))
return eval_metric.get()
if __name__ == '__main__':
args = parse_args()
# contexts
ctx = [mx.gpu(int(i)) for i in args.gpus.split(',') if i.strip()]
ctx = ctx if ctx else [mx.cpu()]
args.batch_size = len(ctx) # 1 batch per device
# network
kwargs = {}
module_list = []
if args.use_fpn:
module_list.append('fpn')
if args.norm_layer is not None:
module_list.append(args.norm_layer)
if args.norm_layer == 'bn':
kwargs['num_devices'] = len(args.gpus.split(','))
net_name = '_'.join(('faster_rcnn', *module_list, args.network, args.dataset))
args.save_prefix += net_name
if args.pretrained.lower() in ['true', '1', 'yes', 't']:
net = gcv.model_zoo.get_model(net_name, pretrained=True, **kwargs)
else:
net = gcv.model_zoo.get_model(net_name, pretrained=False, **kwargs)
net.load_parameters(args.pretrained.strip(), cast_dtype=True)
net.collect_params().reset_ctx(ctx)
# validation data
val_dataset, eval_metric = get_dataset(args.dataset, args)
val_data = get_dataloader(
net, val_dataset, args.batch_size, args.num_workers)
# validation
if not args.eval_all:
names, values = validate(net, val_data, ctx, eval_metric, len(val_dataset))
for k, v in zip(names, values):
print(k, v)
else:
saved_models = glob.glob(args.save_prefix + '*.params')
for epoch, saved_model in enumerate(sorted(saved_models)):
print('[Epoch {}] Validating from {}'.format(epoch, saved_model))
net.load_parameters(saved_model)
net.collect_params().reset_ctx(ctx)
map_name, mean_ap = validate(net, val_data, ctx, eval_metric, len(val_dataset))
val_msg = '\n'.join(['{}={}'.format(k, v) for k, v in zip(map_name, mean_ap)])
print('[Epoch {}] Validation: \n{}'.format(epoch, val_msg))
current_map = float(mean_ap[-1])
with open(args.save_prefix+'_best_map.log', 'a') as f:
f.write('\n{:04d}:\t{:.4f}'.format(epoch, current_map))