Skip to content

Commit

Permalink
add exit if there are failed tests
Browse files Browse the repository at this point in the history
  • Loading branch information
akashchi committed Sep 19, 2023
1 parent 04c72d5 commit 63f7a6a
Show file tree
Hide file tree
Showing 2 changed files with 49 additions and 34 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ jobs:
-t=${{ matrix.TEST_TYPE }} \
-w=${{ env.CONFORMANCE_ARTIFACTS_DIR }} \
-f=${{ env.CONFORMANCE_ARTIFACTS_DIR }}/expected_failures_cpu.csv \
${{ github.ref_name == 'master' && '-u' || '' }}
${{ github.ref_name == 'master' && '-u' || '' }}
# Required due to: https://github.com/actions/cache/tree/main/restore#ensuring-proper-restores-and-save-happen-across-the-actions
- name: Rename Expected Failures List
Expand Down
Original file line number Diff line number Diff line change
@@ -1,23 +1,23 @@
# Copyright (C) 2018-2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

import csv
import os
import urllib.request as ur
from argparse import ArgumentParser
from subprocess import Popen
from shutil import copytree, rmtree
from summarize import create_summary, create_api_summary
from merge_xmls import merge_xml
from run_parallel import TestParallelRunner
from pathlib import Path
from shutil import copytree, rmtree
from subprocess import Popen
from urllib.parse import urlparse

import defusedxml.ElementTree as ET
from urllib.parse import urlparse

import os
import csv
import urllib.request as ur
from merge_xmls import merge_xml
from run_parallel import TestParallelRunner
from summarize import create_summary, create_api_summary
from utils import constants
from utils.conformance_utils import get_logger
from utils import file_utils
from utils.conformance_utils import get_logger

logger = get_logger('conformance_runner')
has_python_api = True
Expand All @@ -34,10 +34,12 @@
SCRIPT_DIR_PATH, SCRIPT_NAME = os.path.split(os.path.abspath(__file__))
NO_MODEL_CONSTANT = os.path.join(SCRIPT_DIR_PATH, "data", "models.lst")


def get_default_working_dir():
path = Path(__file__).parent.resolve()
return os.path.join(path, "temp")


def parse_arguments():
parser = ArgumentParser()

Expand All @@ -56,27 +58,31 @@ def parse_arguments():
cache_path_help = "Path to the cache file with test_name list sorted by execution time as `.lst` file!"
expected_failures_update_help = "Overwrite expected failures list in case same failures were fixed"

parser.add_argument("-d", "--device", help= device_help, type=str, required=False, default="CPU")
parser.add_argument("-d", "--device", help=device_help, type=str, required=False, default="CPU")
parser.add_argument("-t", "--type", help=type_help, type=str, required=False, default=constants.OP_CONFORMANCE)
parser.add_argument("--gtest_filter", help=gtest_filter_helper, type=str, required=False, default="*")
parser.add_argument("-w", "--working_dir", help=working_dir_help, type=str, required=False, default=get_default_working_dir())
parser.add_argument("-m", "--models_path", help=models_path_help, type=str, required=False, default=NO_MODEL_CONSTANT)
parser.add_argument("-w", "--working_dir", help=working_dir_help, type=str, required=False,
default=get_default_working_dir())
parser.add_argument("-m", "--models_path", help=models_path_help, type=str, required=False,
default=NO_MODEL_CONSTANT)
parser.add_argument("-ov", "--ov_path", help=ov_help, type=str, required=False, default="")
parser.add_argument("-j", "--workers", help=workers_help, type=int, required=False, default=os.cpu_count()-1)
parser.add_argument("-j", "--workers", help=workers_help, type=int, required=False, default=os.cpu_count() - 1)
parser.add_argument("-c", "--ov_config_path", help=ov_config_path_helper, type=str, required=False, default="")
parser.add_argument("-s", "--dump_graph", help=dump_graph_help, type=int, required=False, default=0)
parser.add_argument("-sm", "--special_mode", help=special_mode_help, type=str, required=False, default="")
parser.add_argument("-p", "--parallel_devices", help=parallel_help, type=bool, required=False, default=False)
parser.add_argument("-f", "--expected_failures", help=expected_failures_help, type=str, required=False, default="")
parser.add_argument("-u", "--expected_failures_update", help=expected_failures_update_help, type=bool, required=False, default=False)
parser.add_argument("-u", "--expected_failures_update", help=expected_failures_update_help, type=bool,
required=False, default=False)
parser.add_argument("--cache_path", help=cache_path_help, type=str, required=False, default="")

return parser.parse_args()


class Conformance:
def __init__(self, device:str, model_path:os.path, ov_path:os.path, type:str, workers:int,
gtest_filter:str, working_dir:os.path, ov_config_path:os.path, special_mode:str,
cache_path:str, parallel_devices:bool, expected_failures_file: str,
def __init__(self, device: str, model_path: os.path, ov_path: os.path, type: str, workers: int,
gtest_filter: str, working_dir: os.path, ov_config_path: os.path, special_mode: str,
cache_path: str, parallel_devices: bool, expected_failures_file: str,
expected_failures_update: bool):
self._device = device
self._model_path = model_path
Expand Down Expand Up @@ -107,7 +113,8 @@ def __init__(self, device:str, model_path:os.path, ov_path:os.path, type:str, wo
logger.error(f'Incorrect value to set API scope: {special_mode}. Please check to get possible values')
exit(-1)
else:
logger.error(f"Incorrect conformance type: {type}. Please use '{constants.OP_CONFORMANCE}' or '{constants.API_CONFORMANCE}'")
logger.error(
f"Incorrect conformance type: {type}. Please use '{constants.OP_CONFORMANCE}' or '{constants.API_CONFORMANCE}'")
exit(-1)
self._type = type
self._workers = workers
Expand All @@ -124,6 +131,8 @@ def __init__(self, device:str, model_path:os.path, ov_path:os.path, type:str, wo
logger.warning(f"Expected failures testlist `{self._expected_failures_file}` does not exist!")
self._expected_failures_update = expected_failures_update

self.is_successful_run = False

def __download_models(self, url_to_download, path_to_save):
_, file_name = os.path.split(urlparse(url_to_download).path)
download_path = os.path.join(path_to_save, file_name)
Expand All @@ -138,11 +147,10 @@ def __download_models(self, url_to_download, path_to_save):
logger.error(f"{download_path} is not a file. Exit!")
exit(-1)
if file_utils.is_archieve(download_path):
logger.info(f"The file {download_path} is archieve. Should be unzip to {path_to_save}")
logger.info(f"The file {download_path} is archived. Should be unzipped to {path_to_save}")
return file_utils.unzip_archieve(download_path, path_to_save)
return download_path


def __dump_subgraph(self):
subgraph_dumper_path = os.path.join(self._ov_path, f'{SUBGRAPH_DUMPER_BIN_NAME}{constants.OS_BIN_FILE_EXT}')
if not os.path.isfile(subgraph_dumper_path):
Expand All @@ -153,7 +161,9 @@ def __dump_subgraph(self):
logger.info(f"Remove directory {conformance_ir_path}")
rmtree(conformance_ir_path)
os.mkdir(conformance_ir_path)
self._model_path = file_utils.prepare_filelist(self._model_path, ["*.onnx", "*.pdmodel", "*.__model__", "*.pb", "*.xml", "*.tflite"])
self._model_path = file_utils.prepare_filelist(self._model_path,
["*.onnx", "*.pdmodel", "*.__model__", "*.pb", "*.xml",
"*.tflite"])
logger.info(f"Stating model dumping from {self._model_path}")
cmd = f'{subgraph_dumper_path} --input_folders="{self._model_path}" --output_folder="{conformance_ir_path}"'
process = Popen(cmd, shell=True)
Expand All @@ -172,11 +182,12 @@ def __dump_subgraph(self):
save_rel_weights(Path(self._model_path), op_rel_weight)
logger.info(f"All conformance IRs in {self._model_path} were renamed based on hash")
else:
logger.warning("The OV Python was not built or Environment was not updated to requirments. Skip the step to rename Conformance IR based on a hash")

logger.warning(
"The OV Python was not built or Environment was not updated to requirements. "
"Skip the step to rename Conformance IR based on a hash")

@staticmethod
def __get_failed_test_from_csv(csv_file:str):
def __get_failed_test_from_csv(csv_file: str):
failures = set()
with open(csv_file, "r") as failures_file:
for row in csv.reader(failures_file, delimiter=','):
Expand Down Expand Up @@ -212,7 +223,7 @@ def __run_conformance(self):
conformance_path = os.path.join(self._ov_path, f'{API_CONFORMANCE_BIN_NAME}{constants.OS_BIN_FILE_EXT}')

if not os.path.isfile(conformance_path):
logger.error(f"{conformance_path} is not exist!")
logger.error(f"{conformance_path} does not exist!")
exit(-1)

logs_dir = os.path.join(self._working_dir, f'{self._device}_logs')
Expand Down Expand Up @@ -241,21 +252,23 @@ def __run_conformance(self):
is_parallel_devices=self._is_parallel_over_devices,
excluded_tests=self._expected_failures if not self._expected_failures_update else set())
conformance.run()
conformance.postprocess_logs()
self.is_successful_run = conformance.postprocess_logs()

if os.path.isfile(self._expected_failures_file):
self.__check_expected_failures()

final_report_name = f'report_{self._type.lower()}'
merge_xml([parallel_report_dir], report_dir, final_report_name, self._type, True)

logger.info(f"Conformance is successful. XML reportwas saved to {report_dir}")
return (os.path.join(report_dir, final_report_name + ".xml"), report_dir)
logger.info(f"Conformance is successful. XML report was saved to {report_dir}")
return os.path.join(report_dir, final_report_name + ".xml"), report_dir

def __summarize(self, xml_report_path:os.path, report_dir: os.path):
def __summarize(self, xml_report_path: os.path, report_dir: os.path):
if self._type == constants.OP_CONFORMANCE:
summary_root = ET.parse(xml_report_path).getroot()
rel_weights_path = os.path.join(self._model_path, constants.REL_WEIGHTS_FILENAME.replace(constants.REL_WEIGHTS_REPLACE_STR, self._special_mode))
rel_weights_path = os.path.join(self._model_path,
constants.REL_WEIGHTS_FILENAME.replace(constants.REL_WEIGHTS_REPLACE_STR,
self._special_mode))
create_summary(summary_root, report_dir, [], "", "", True, True, rel_weights_path)
else:
create_api_summary([xml_report_path], report_dir, [], "", "")
Expand Down Expand Up @@ -303,14 +316,15 @@ def run(self, dump_models: bool):
if dump_models:
self.__dump_subgraph()
if not os.path.exists(self._model_path):
logger.error(f"The model direstory {self._model_path} does not exist!")
logger.error(f"The model directory {self._model_path} does not exist!")
exit(-1)
if not os.path.exists(self._model_path):
logger.error(f"Directory {self._model_path} does not exist")
exit(-1)
xml_report, report_dir = self.__run_conformance()
self.__summarize(xml_report, report_dir)


if __name__ == "__main__":
args = parse_arguments()
conformance = Conformance(args.device, args.models_path,
Expand All @@ -321,4 +335,5 @@ def run(self, dump_models: bool):
args.parallel_devices, args.expected_failures,
args.expected_failures_update)
conformance.run(args.dump_graph)

if not conformance.is_successful_run:
exit(-1)

0 comments on commit 63f7a6a

Please sign in to comment.