forked from experiencor/keras-yolo3
-
Notifications
You must be signed in to change notification settings - Fork 0
/
evaluate.py
68 lines (56 loc) · 2.43 KB
/
evaluate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
#! /usr/bin/env python
import argparse
import os
import numpy as np
import json
from voc import parse_voc_annotation
from yolo import create_yolov3_model
from generator import BatchGenerator
from utils.utils import normalize, evaluate
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras.optimizers import Adam
from keras.models import load_model
def _main_(args):
config_path = args.conf
with open(config_path) as config_buffer:
config = json.loads(config_buffer.read())
###############################
# Create the validation generator
###############################
valid_ints, labels = parse_voc_annotation(
config['valid']['valid_annot_folder'],
config['valid']['valid_image_folder'],
config['valid']['cache_name'],
config['model']['labels']
)
labels = labels.keys() if len(config['model']['labels']) == 0 else config['model']['labels']
labels = sorted(labels)
valid_generator = BatchGenerator(
instances = valid_ints,
anchors = config['model']['anchors'],
labels = labels,
downsample = 32, # ratio between network input's size and network output's size, 32 for YOLOv3
max_box_per_image = 0,
batch_size = config['train']['batch_size'],
min_net_size = config['model']['min_input_size'],
max_net_size = config['model']['max_input_size'],
shuffle = True,
jitter = 0.0,
norm = normalize
)
###############################
# Load the model and do evaluation
###############################
os.environ['CUDA_VISIBLE_DEVICES'] = config['train']['gpus']
infer_model = load_model(config['train']['saved_weights_name'])
# compute mAP for all the classes
average_precisions = evaluate(infer_model, valid_generator)
# print the score
for label, average_precision in average_precisions.items():
print(labels[label] + ': {:.4f}'.format(average_precision))
print('mAP: {:.4f}'.format(sum(average_precisions.values()) / len(average_precisions)))
if __name__ == '__main__':
argparser = argparse.ArgumentParser(description='Evaluate YOLO_v3 model on any dataset')
argparser.add_argument('-c', '--conf', help='path to configuration file')
args = argparser.parse_args()
_main_(args)