diff --git a/motmetrics/metrics.py b/motmetrics/metrics.py index fc093068..75ecb82d 100644 --- a/motmetrics/metrics.py +++ b/motmetrics/metrics.py @@ -20,11 +20,14 @@ import numpy as np import pandas as pd +import sys +import more_itertools as mit from motmetrics import math_util from motmetrics.lap import linear_sum_assignment from motmetrics.mot import MOTAccumulator +from scipy.optimize import linear_sum_assignment as hungarian try: _getargspec = inspect.getfullargspec except AttributeError: @@ -568,6 +571,30 @@ def events_to_df_map(df): df_map = DataFrameMap(full=df, raw=raw, noraw=noraw, extra=extra) return df_map +def get_missing_range(oid_dict): + interrupt_dict = {} + for k,v in oid_dict.items(): + consec_group = [list(group) for group in mit.consecutive_groups(v)] + if len(consec_group) == 1: + continue + missing_list = [] + for l1, l2 in zip(consec_group, consec_group[1:]): + missing_list.append((l1[-1], l2[0])) + interrupt_dict[k] = missing_list + interrupt_dict = {int(k) : v for k,v in interrupt_dict.items()} + return interrupt_dict + +def get_nfrag_matches(interrupt_dict, frame_matches): + n_matches = 0 + for oid, frames in interrupt_dict.items(): + for (start, stop) in frames: + p_before = frame_matches[start].get(oid, None) + p_after = frame_matches[stop].get(oid, None) + if p_before or p_after is None: + continue + if p_before == p_after: + n_matches += 1 + return n_matches def extract_counts_from_df_map(df): """ @@ -593,6 +620,77 @@ def extract_counts_from_df_map(df): tps = dists.groupby(['OId', 'HId'])['D'].count().to_dict() return ocs, hcs, tps +# def idfrag(df): +# oids = df.full['OId'].dropna().unique() +# hids = df.full['HId'].dropna().unique() +# flat = df.raw.reset_index() +# flat = flat[flat['OId'].isin(oids) | flat['HId'].isin(hids)] +# gtid_frames = flat.set_index('OId')['FrameId'].groupby('OId').apply(list).to_dict() +# interrupt_dict = get_missing_range(gtid_frames) +# matches_df = df.full[df.full.Type == 'MATCH'].reset_index() +# frame_matches = matches_df.groupby('FrameId').apply(lambda x: dict(zip(x['OId'],x['HId']))).to_dict() +# ngt_frag = sum([len(i) for i in list(interrupt_dict.values())]) +# npred_frag = get_nfrag_matches(interrupt_dict, frame_matches) +# return math_util.quiet_divide(npred_frag, ngt_frag) + +def ideucl(df): + oids = df.full['OId'].dropna().unique() + hids = df.full['HId'].dropna().unique() + flat = df.raw.reset_index() + flat = flat[flat['OId'].isin(oids)].dropna() + + # Compute the total distance travelled by GT + id_cent_dict = flat.set_index('OId')['C'].groupby('OId').apply(list).to_dict() + id_cent_dist_dict = {int(k) : np.sum(np.linalg.norm(np.diff(np.array(v), axis=0), axis=1)) for k, v in id_cent_dict.items()} + + # DF with only MATCH + matches_df = df.full[df.full.Type == 'MATCH'].reset_index() + matches_df = matches_df[matches_df['OId'].isin(oids)].dropna() + ## Important + # Object ID -> (Multiple hypothesis -> (Multiple frames centroids)) + oid_hid_cent_dict = matches_df.set_index(['OId', 'HId'])['C'].groupby(['OId', 'HId']).apply(list).to_dict() + oid_hid_dist_dict = {k : np.sum(np.linalg.norm(np.diff(np.array(v), axis=0), axis=1)) for k, v in oid_hid_cent_dict.items()} + + min_oid = min([i[0] for i in oid_hid_dist_dict.keys()]) + max_oid = max([i[0] for i in oid_hid_dist_dict.keys()]) + min_hid = min([i[1] for i in oid_hid_dist_dict.keys()]) + max_hid = max([i[1] for i in oid_hid_dist_dict.keys()]) + # o_len = int(max_oid) + 1 + # h_len = int(max_hid) + 1 + unique_oid = np.unique([i[0] for i in oid_hid_dist_dict.keys()]).tolist() + unique_hid = np.unique([i[1] for i in oid_hid_dist_dict.keys()]).tolist() + o_len = len(unique_oid) + h_len = len(unique_hid) + dist_matrix = np.zeros((o_len, h_len)) + for ((oid, hid), dist) in oid_hid_dist_dict.items(): + oid_ind = unique_oid.index(oid) + hid_ind = unique_hid.index(hid) + dist_matrix[oid_ind, hid_ind] = dist + + ## Perform Hungarian for optimal Hyp Traj assignment + # Converting to list as munkres doesn't operate on numpy + # opt_hyp_dist contains GT ID : max dist covered by track + opt_hyp_dist = dict.fromkeys(id_cent_dist_dict.keys(), 0.) + cost_matrix = np.max(dist_matrix) - dist_matrix + # dist_matrix = dist_matrix.tolist() + # m = Munkres() + # cost_matrix = make_cost_matrix(dist_matrix) + rows, cols = hungarian(cost_matrix) + + for (row, col) in zip(rows, cols): + value = dist_matrix[row, col] + opt_hyp_dist[int(unique_oid[row])] = value + + assert len(opt_hyp_dist.keys()) == len(id_cent_dist_dict.keys()) + track_scores = [math_util.quiet_divide(opt_hyp_dist[i], id_cent_dist_dict[j]) for (i,j) in zip(list(opt_hyp_dist.keys()),list(id_cent_dist_dict.keys()))] + if np.any(np.isnan(np.array(track_scores))): + track_scores = np.nan_to_num(np.array(track_scores)) + return np.average(track_scores) + + #hyp_length = np.sum(list(opt_hyp_dist.values())) + #gt_length = np.sum(list(id_cent_dist_dict.values())) + #return math_util.quiet_divide(hyp_length, gt_length) + def id_global_assignment(df, ana=None): """ID measures: Global min-cost assignment for ID measures.""" @@ -750,11 +848,16 @@ def create(): m.register(idp, formatter='{:.1%}'.format) m.register(idr, formatter='{:.1%}'.format) m.register(idf1, formatter='{:.1%}'.format) + m.register(ideucl, formatter='{:.1%}'.format) + # m.register(idfrag, formatter='{:.1%}'.format) + return m motchallenge_metrics = [ + # 'idfrag', + 'ideucl', 'idf1', 'idp', 'idr', diff --git a/motmetrics/mot.py b/motmetrics/mot.py index a6bf0e76..af141be5 100644 --- a/motmetrics/mot.py +++ b/motmetrics/mot.py @@ -20,7 +20,7 @@ from motmetrics.lap import linear_sum_assignment _INDEX_FIELDS = ['FrameId', 'Event'] -_EVENT_FIELDS = ['Type', 'OId', 'HId', 'D'] +_EVENT_FIELDS = ['Type', 'OId', 'HId', 'D', 'C'] class MOTAccumulator(object): @@ -126,13 +126,14 @@ def _append_to_indices(self, frameid, eid): self._indices['FrameId'].append(frameid) self._indices['Event'].append(eid) - def _append_to_events(self, typestr, oid, hid, distance): + def _append_to_events(self, typestr, oid, hid, distance, centroid): self._events['Type'].append(typestr) self._events['OId'].append(oid) self._events['HId'].append(hid) self._events['D'].append(distance) + self._events['C'].append(centroid) - def update(self, oids, hids, dists, frameid=None, vf=''): + def update(self, oids, hids, dists, frameid=None, vf='', gt_centroids=None): """Updates the accumulator with frame specific objects/detections. This method generates events based on the following algorithm [1]: @@ -172,7 +173,7 @@ def update(self, oids, hids, dists, frameid=None, vf=''): EURASIP Journal on Image and Video Processing 2008.1 (2008): 1-10. """ # pylint: disable=too-many-locals, too-many-statements - + gt_centroids = np.array(gt_centroids) self.dirty_events = True oids = np.asarray(oids) oids_masked = np.zeros_like(oids, dtype=np.bool) @@ -198,7 +199,7 @@ def update(self, oids, hids, dists, frameid=None, vf=''): # Add a RAW event simply to ensure the frame is counted. self._append_to_indices(frameid, next(eid)) - self._append_to_events('RAW', np.nan, np.nan, np.nan) + self._append_to_events('RAW', np.nan, np.nan, np.nan, np.nan) # There must be at least one RAW event per object and hypothesis. # Record all finite distances as RAW events. @@ -206,19 +207,23 @@ def update(self, oids, hids, dists, frameid=None, vf=''): valid_dists = dists[valid_i, valid_j] for i, j, dist_ij in zip(valid_i, valid_j, valid_dists): self._append_to_indices(frameid, next(eid)) - self._append_to_events('RAW', oids[i], hids[j], dist_ij) + try: + self._append_to_events('RAW', oids[i], hids[j], dist_ij, gt_centroids[i]) + except: + import pdb;pdb.set_trace() # Add a RAW event for objects and hypotheses that were present but did # not overlap with anything. used_i = np.unique(valid_i) used_j = np.unique(valid_j) unused_i = np.setdiff1d(np.arange(no), used_i) unused_j = np.setdiff1d(np.arange(nh), used_j) - for oid in oids[unused_i]: + assert len(oids[unused_i]) == len(gt_centroids[unused_i]) + for oid, ocent in zip(oids[unused_i], gt_centroids[unused_i]): self._append_to_indices(frameid, next(eid)) - self._append_to_events('RAW', oid, np.nan, np.nan) + self._append_to_events('RAW', oid, np.nan, np.nan, ocent) for hid in hids[unused_j]: self._append_to_indices(frameid, next(eid)) - self._append_to_events('RAW', np.nan, hid, np.nan) + self._append_to_events('RAW', np.nan, hid, np.nan, np.nan) if oids.size * hids.size > 0: # 1. Try to re-establish tracks from previous correspondences @@ -241,7 +246,7 @@ def update(self, oids, hids, dists, frameid=None, vf=''): self.m[oids[i]] = hids[j] self._append_to_indices(frameid, next(eid)) - self._append_to_events('MATCH', oids[i], hids[j], dists[i, j]) + self._append_to_events('MATCH', oids[i], hids[j], dists[i, j], gt_centroids[i]) self.last_match[o] = frameid self.hypHistory[h] = frameid @@ -265,7 +270,7 @@ def update(self, oids, hids, dists, frameid=None, vf=''): if h not in self.hypHistory: subcat = 'ASCEND' self._append_to_indices(frameid, next(eid)) - self._append_to_events(subcat, oids[i], hids[j], dists[i, j]) + self._append_to_events(subcat, oids[i], hids[j], dists[i, j], np.nan) # ignore the last condition temporarily is_transfer = (h in self.res_m and self.res_m[h] != o) @@ -277,9 +282,9 @@ def update(self, oids, hids, dists, frameid=None, vf=''): if o not in self.last_match: subcat = 'MIGRATE' self._append_to_indices(frameid, next(eid)) - self._append_to_events(subcat, oids[i], hids[j], dists[i, j]) + self._append_to_events(subcat, oids[i], hids[j], dists[i, j], np.nan) self._append_to_indices(frameid, next(eid)) - self._append_to_events(cat2, oids[i], hids[j], dists[i, j]) + self._append_to_events(cat2, oids[i], hids[j], dists[i, j], np.nan) if vf != '' and (cat1 != 'MATCH' or cat2 != 'MATCH'): if cat1 == 'SWITCH': vf.write('%s %d %d %d %d %d\n' % (subcat[:2], o, self.last_match[o], self.m[o], frameid, h)) @@ -288,7 +293,7 @@ def update(self, oids, hids, dists, frameid=None, vf=''): self.hypHistory[h] = frameid self.last_match[o] = frameid self._append_to_indices(frameid, next(eid)) - self._append_to_events(cat1, oids[i], hids[j], dists[i, j]) + self._append_to_events(cat1, oids[i], hids[j], dists[i, j], np.nan) oids_masked[i] = True hids_masked[j] = True self.m[o] = h @@ -297,14 +302,14 @@ def update(self, oids, hids, dists, frameid=None, vf=''): # 3. All remaining objects are missed for o in oids[~oids_masked]: self._append_to_indices(frameid, next(eid)) - self._append_to_events('MISS', o, np.nan, np.nan) + self._append_to_events('MISS', o, np.nan, np.nan, np.nan) if vf != '': vf.write('FN %d %d\n' % (frameid, o)) # 4. All remaining hypotheses are false alarms for h in hids[~hids_masked]: self._append_to_indices(frameid, next(eid)) - self._append_to_events('FP', np.nan, h, np.nan) + self._append_to_events('FP', np.nan, h, np.nan, np.nan) if vf != '': vf.write('FP %d %d\n' % (frameid, h)) @@ -337,6 +342,7 @@ def new_event_dataframe(): ('OId', pd.Series(dtype=float)), # Object ID or -1 if FP. Using float as missing values will be converted to NaN anyways. ('HId', pd.Series(dtype=float)), # Hypothesis ID or NaN if MISS. Using float as missing values will be converted to NaN anyways. ('D', pd.Series(dtype=float)), # Distance or NaN when FP or MISS + ('C', pd.Series(dtype=object)) # GT centroid. Only for RAW for the moment ]), index=idx ) @@ -365,7 +371,9 @@ def new_event_dataframe_with_data(indices, events): pd.Series(raw_type, name='Type'), pd.Series(events['OId'], dtype=float, name='OId'), pd.Series(events['HId'], dtype=float, name='HId'), - pd.Series(events['D'], dtype=float, name='D') + pd.Series(events['D'], dtype=float, name='D'), + pd.Series(events['C'], dtype=object, name='C') + ] idx = pd.MultiIndex.from_arrays(