From 3aca403b8d4fd9de18952d6f90c13c9e2b03eaef Mon Sep 17 00:00:00 2001 From: yiliu30 Date: Tue, 11 Jun 2024 17:34:13 +0800 Subject: [PATCH 1/2] fixed the empty options Signed-off-by: yiliu30 --- neural_compressor/common/base_config.py | 1 + test/3x/common/test_common.py | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/neural_compressor/common/base_config.py b/neural_compressor/common/base_config.py index 35b0f532738..92bbe6e5fad 100644 --- a/neural_compressor/common/base_config.py +++ b/neural_compressor/common/base_config.py @@ -377,6 +377,7 @@ def expand(self) -> List[BaseConfig]: if len(tuning_param_list) == 0: config_list = [config] else: + tuning_param_list = list(filter(lambda x: len(x.options) > 0, tuning_param_list)) tuning_param_name_lst = [tuning_param.name for tuning_param in tuning_param_list] for params_values in product(*[tuning_param.options for tuning_param in tuning_param_list]): tuning_param_pair = dict(zip(tuning_param_name_lst, params_values)) diff --git a/test/3x/common/test_common.py b/test/3x/common/test_common.py index 4af0e1a276d..90a5db3c315 100644 --- a/test/3x/common/test_common.py +++ b/test/3x/common/test_common.py @@ -277,6 +277,11 @@ def test_config_expand_complex_tunable_type(self): for i in range(len(configs_list)): self.assertEqual(configs_list[i].target_op_type_list, target_op_type_list_options[i]) + def test_config_expand_with_empty_options(self): + configs = FakeAlgoConfig(weight_dtype=["int", "float32"], weight_bits=[]) + configs_list = configs.expand() + self.assertEqual(len(configs_list), 2) + def test_mixed_two_algos(self): model = FakeModel() OP1_NAME = "OP1_NAME" From 0ae801388c641dbbbc754da7dcf1bb2f475cb8e1 Mon Sep 17 00:00:00 2001 From: yiliu30 Date: Tue, 11 Jun 2024 17:35:27 +0800 Subject: [PATCH 2/2] --amend Signed-off-by: yiliu30 --- neural_compressor/common/base_config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/neural_compressor/common/base_config.py b/neural_compressor/common/base_config.py index 92bbe6e5fad..3f65a2ea9c0 100644 --- a/neural_compressor/common/base_config.py +++ b/neural_compressor/common/base_config.py @@ -377,6 +377,8 @@ def expand(self) -> List[BaseConfig]: if len(tuning_param_list) == 0: config_list = [config] else: + # The `TuningParam` instance with no options will cause the product to be empty. + # Filter out the `TuningParam` instances with no options tuning_param_list = list(filter(lambda x: len(x.options) > 0, tuning_param_list)) tuning_param_name_lst = [tuning_param.name for tuning_param in tuning_param_list] for params_values in product(*[tuning_param.options for tuning_param in tuning_param_list]):