From 857f3c8ccda834f0eda0aaa2e5082c798498833b Mon Sep 17 00:00:00 2001 From: Arjun Suresh Date: Tue, 29 Oct 2024 03:02:55 +0530 Subject: [PATCH] Update mlperf_utils for latest MLPerf inference submission checker changes --- script/app-mlperf-inference/customize.py | 2 +- script/get-mlperf-inference-utils/mlperf_utils.py | 14 ++++++++++---- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/script/app-mlperf-inference/customize.py b/script/app-mlperf-inference/customize.py index d0587b80db..64cb42a8ed 100644 --- a/script/app-mlperf-inference/customize.py +++ b/script/app-mlperf-inference/customize.py @@ -242,7 +242,7 @@ def postprocess(i): if os.path.exists(env['CM_MLPERF_USER_CONF']): shutil.copy(env['CM_MLPERF_USER_CONF'], 'user.conf') - result, valid, power_result = mlperf_utils.get_result_from_log(env['CM_MLPERF_LAST_RELEASE'], model, scenario, output_dir, mode) + result, valid, power_result = mlperf_utils.get_result_from_log(env['CM_MLPERF_LAST_RELEASE'], model, scenario, output_dir, mode, env.get('CM_MLPERF_INFERENCE_SOURCE_VERSION')) power = None power_efficiency = None if power_result: diff --git a/script/get-mlperf-inference-utils/mlperf_utils.py b/script/get-mlperf-inference-utils/mlperf_utils.py index c4381e552e..e09448caee 100644 --- a/script/get-mlperf-inference-utils/mlperf_utils.py +++ b/script/get-mlperf-inference-utils/mlperf_utils.py @@ -4,7 +4,7 @@ from log_parser import MLPerfLog -def get_result_from_log(version, model, scenario, result_path, mode): +def get_result_from_log(version, model, scenario, result_path, mode, inference_src_version = None): config = checker.Config( version, @@ -20,7 +20,14 @@ def get_result_from_log(version, model, scenario, result_path, mode): valid = {} if mode == "performance": has_power = os.path.exists(os.path.join(result_path, "..", "power")) - result_ = checker.get_performance_metric(config, mlperf_model, result_path, scenario, None, None, has_power) + version_tuple = None + if inference_src_version: + version_tuple = tuple(map(int, inference_src_version.split('.'))) + + if version_tuple and version_tuple >= (4,1,22): + result_ = checker.get_performance_metric(config, mlperf_model, result_path, scenario) + else: + result_ = checker.get_performance_metric(config, mlperf_model, result_path, scenario, None, None, has_power) mlperf_log = MLPerfLog(os.path.join(result_path, "mlperf_log_detail.txt")) if ( "result_validity" not in mlperf_log.get_keys() @@ -133,7 +140,7 @@ def get_accuracy_metric(config, model, path): return is_valid, acc_results, acc_targets, acc_limits -def get_result_string(version, model, scenario, result_path, has_power, sub_res, division="open", system_json=None, model_precision="fp32"): +def get_result_string(version, model, scenario, result_path, has_power, sub_res, division="open", system_json=None, model_precision="fp32", inference_src_version = None): config = checker.Config( version, @@ -152,7 +159,6 @@ def get_result_string(version, model, scenario, result_path, has_power, sub_res, inferred = False result = {} - inference_src_version = os.environ.get('CM_MLPERF_INFERENCE_SOURCE_VERSION', '') version_tuple = None if inference_src_version: version_tuple = tuple(map(int, inference_src_version.split('.')))