Skip to content

Commit

Permalink
Add script to process pred result to submission format, recover the s…
Browse files Browse the repository at this point in the history
…ort ops for pred bboxes when calculating grounding AP
  • Loading branch information
Tai-Wang committed Mar 22, 2024
1 parent 41c91a9 commit 30c4b89
Show file tree
Hide file tree
Showing 3 changed files with 124 additions and 78 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -136,3 +136,7 @@ demo/data
# logs and checkpoints
work_dirs/
tools/*.sh

# test submission results
*.pkl
*.json
159 changes: 81 additions & 78 deletions tools/cal_results.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
# Copyright (c) OpenMMLab. All rights reserved.
# Copyright (c) OpenRobotLab. All rights reserved.
import argparse

import mmengine
from mmengine.logging import print_log
from terminaltables import AsciiTable

from embodiedscan.structures import EulerDepthInstance3DBoxes


def parse_args():
parser = argparse.ArgumentParser(
description='MMDet3D test (and eval) a model')
parser.add_argument('results_file', help='the results json file')
parser.add_argument('results_file', help='the results pkl file')
parser.add_argument('ann_file', help='annoations json file')

parser.add_argument('--iou_thr',
Expand All @@ -20,97 +22,98 @@ def parse_args():
args = parser.parse_args()
return args


def ground_eval(gt_annos, det_annos, iou_thr):

assert len(det_annos) == len(gt_annos)
assert len(det_annos) == len(gt_annos)

pred = {}
gt = {}
pred = {}
gt = {}

object_types = [
'Easy', 'Hard', 'View-Dep', 'View-Indep', 'Unique', 'Multi',
'Overall'
]
object_types = [
'Easy', 'Hard', 'View-Dep', 'View-Indep', 'Unique', 'Multi', 'Overall'
]

for t in iou_thr:
for object_type in object_types:
pred.update({object_type + '@' + str(t): 0})
gt.update({object_type + '@' + str(t): 1e-14})

for sample_id in range(len(det_annos)):
det_anno = det_annos[sample_id]
gt_anno = gt_annos[sample_id]['ann_info']
# target_scores = det_anno['target_scores_3d'] # (num_query, )
scores = det_anno['scores_3d'] # (num_query, )

bboxes = det_anno['bboxes_3d']
gt_bboxes = gt_anno['gt_bboxes_3d']
bboxes = EulerDepthInstance3DBoxes(bboxes,
origin=(0.5, 0.5, 0.5))
gt_bboxes = EulerDepthInstance3DBoxes(gt_bboxes,
origin=(0.5, 0.5, 0.5))

view_dep = gt_anno['is_view_dep']
hard = gt_anno['is_hard']
unique = gt_anno['is_unique']

top_bboxes = bboxes[:10]

iou = top_bboxes.overlaps(top_bboxes, gt_bboxes) # (num_query, 1)

for t in iou_thr:
threshold = iou > t
found = int(threshold.any())
if view_dep:
gt['View-Dep@' + str(t)] += 1
pred['View-Dep@' + str(t)] += found
else:
gt['View-Indep@' + str(t)] += 1
pred['View-Indep@' + str(t)] += found
if hard:
gt['Hard@' + str(t)] += 1
pred['Hard@' + str(t)] += found
else:
gt['Easy@' + str(t)] += 1
pred['Easy@' + str(t)] += found
if unique:
gt['Unique@' + str(t)] += 1
pred['Unique@' + str(t)] += found
else:
gt['Multi@' + str(t)] += 1
pred['Multi@' + str(t)] += found

gt['Overall@' + str(t)] += 1
pred['Overall@' + str(t)] += found

header = ['Type']
header.extend(object_types)
ret_dict = {}
for t in iou_thr:
for object_type in object_types:
pred.update({object_type + '@' + str(t): 0})
gt.update({object_type + '@' + str(t): 1e-14})

for t in iou_thr:
table_columns = [['results']]
for object_type in object_types:
metric = object_type + '@' + str(t)
value = pred[metric] / max(gt[metric], 1)
ret_dict[metric] = value
table_columns.append([f'{value:.4f}'])
for sample_id in range(len(det_annos)):
det_anno = det_annos[sample_id]
gt_anno = gt_annos[sample_id]['ann_info']

table_data = [header]
table_rows = list(zip(*table_columns))
table_data += table_rows
table = AsciiTable(table_data)
table.inner_footing_row_border = True
print_log('\n' + table.table)
bboxes = det_anno['bboxes_3d']
gt_bboxes = gt_anno['gt_bboxes_3d']
bboxes = EulerDepthInstance3DBoxes(bboxes, origin=(0.5, 0.5, 0.5))
gt_bboxes = EulerDepthInstance3DBoxes(gt_bboxes,
origin=(0.5, 0.5, 0.5))
scores = bboxes.tensor.new_tensor(
det_anno['scores_3d']) # (num_query, )

return ret_dict
view_dep = gt_anno['is_view_dep']
hard = gt_anno['is_hard']
unique = gt_anno['is_unique']

box_index = scores.argsort(dim=-1, descending=True)[:10]
top_bboxes = bboxes[box_index]

iou = top_bboxes.overlaps(top_bboxes, gt_bboxes) # (num_query, 1)

for t in iou_thr:
threshold = iou > t
found = int(threshold.any())
if view_dep:
gt['View-Dep@' + str(t)] += 1
pred['View-Dep@' + str(t)] += found
else:
gt['View-Indep@' + str(t)] += 1
pred['View-Indep@' + str(t)] += found
if hard:
gt['Hard@' + str(t)] += 1
pred['Hard@' + str(t)] += found
else:
gt['Easy@' + str(t)] += 1
pred['Easy@' + str(t)] += found
if unique:
gt['Unique@' + str(t)] += 1
pred['Unique@' + str(t)] += found
else:
gt['Multi@' + str(t)] += 1
pred['Multi@' + str(t)] += found

gt['Overall@' + str(t)] += 1
pred['Overall@' + str(t)] += found

header = ['Type']
header.extend(object_types)
ret_dict = {}

for t in iou_thr:
table_columns = [['results']]
for object_type in object_types:
metric = object_type + '@' + str(t)
value = pred[metric] / max(gt[metric], 1)
ret_dict[metric] = value
table_columns.append([f'{value:.4f}'])

table_data = [header]
table_rows = list(zip(*table_columns))
table_data += table_rows
table = AsciiTable(table_data)
table.inner_footing_row_border = True
print_log('\n' + table.table)

return ret_dict


def main():
args = parse_args()
preds = mmengine.load(args.results_file)
preds = mmengine.load(args.results_file)['results']
annotations = mmengine.load(args.ann_file)
assert len(preds) == len(annotations)
ground_eval(annotations, preds, args.iou_thr)


if __name__ == '__main__':
main()
39 changes: 39 additions & 0 deletions tools/submit_results.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import mmengine

# Please modify the following content to submit your results
results_file = './test_results_mini.json'
submit_file = './submission_mini.pkl'

method = 'Baseline'
team = 'EmbodiedScan'
authors = 'EmbodiedScan Team'
email = '[email protected]'
institution = 'Shanghai AI Laboratory'
country = 'China'

# submission prototype:
# dict {
# 'method': <str> -- name of the method
# 'team': <str> -- name of the team, identical to the Google Form
# 'authors': <list> -- list of str, authors
# 'e-mail': <str> -- e-mail address
# 'institution / company': <str> -- institution or company
# 'country / region': <str> -- country or region
# 'results': {
# [identifier]: <frame_token> -- identifier of the frame
# dict or list, a single frame prediction
# ,
# ...
# }
# }
results = mmengine.load(results_file)
submit_data = {
'method': method,
'team': team,
'authors': authors,
'e-mail': email,
'institution': institution,
'country': country,
'results': results
}
mmengine.dump(submit_data, submit_file)

0 comments on commit 30c4b89

Please sign in to comment.