From 44ce8fdf790eb56e9f93ac30486a7da7fff9f1cd Mon Sep 17 00:00:00 2001 From: Sunyanan Choochotkaew Date: Wed, 21 Aug 2024 17:39:44 +0900 Subject: [PATCH 1/2] feat: get type in /best-models API Signed-off-by: Sunyanan Choochotkaew --- src/kepler_model/server/model_server.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/kepler_model/server/model_server.py b/src/kepler_model/server/model_server.py index 46add84a..6c975b1f 100644 --- a/src/kepler_model/server/model_server.py +++ b/src/kepler_model/server/model_server.py @@ -189,6 +189,7 @@ def get_available_models(): fg = request.args.get("fg") ot = request.args.get("ot") energy_source = request.args.get("source") + node_type = request.args.get("type") filter = request.args.get("filter") try: @@ -205,6 +206,11 @@ def get_available_models(): if energy_source is None or "rapl" in energy_source: energy_source = "rapl-sysfs" + if node_type is None: + node_type = any_node_type + else: + node_type = int(node_type) + if filter is None: filters = dict() else: @@ -212,11 +218,13 @@ def get_available_models(): model_names = dict() for output_type in output_types: + logger.debug(f"Searching output type {output_type}") model_names[output_type.name] = dict() for fg in valid_fgs: + logger.debug(f"Searching feature group {fg}") valid_groupath = get_model_group_path(model_toppath, output_type, fg, energy_source, pipeline_name=pipelineName[energy_source]) if os.path.exists(valid_groupath): - best_candidate, _ = select_best_model(None, valid_groupath, filters, energy_source) + best_candidate, _ = select_best_model(None, valid_groupath, filters, energy_source, node_type=node_type) if best_candidate is None: continue model_names[output_type.name][fg.name] = best_candidate["model_name"] From d414cf1231dc45f62d508c785d1851cba96f85d0 Mon Sep 17 00:00:00 2001 From: Sunyanan Choochotkaew Date: Wed, 21 Aug 2024 17:49:55 +0900 Subject: [PATCH 2/2] fix: pass source, node_type in list_all_available function Signed-off-by: Sunyanan Choochotkaew --- src/kepler_model/estimate/model_server_connector.py | 10 ++++++++-- tests/estimator_model_request_test.py | 2 +- tests/weight_model_request_test.py | 4 ++-- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/kepler_model/estimate/model_server_connector.py b/src/kepler_model/estimate/model_server_connector.py index a50ecae2..473a1143 100644 --- a/src/kepler_model/estimate/model_server_connector.py +++ b/src/kepler_model/estimate/model_server_connector.py @@ -68,11 +68,17 @@ def make_request(power_request): return unpack(power_request.energy_source, output_type, response) -def list_all_models(): +def list_all_models(energy_source=None, node_type=None): if not is_model_server_enabled(): return dict() try: - response = requests.get(get_model_server_list_endpoint()) + endpoint = get_model_server_list_endpoint() + params= {} + if energy_source: + params["source"] = energy_source + if node_type: + params["type"] = node_type + response = requests.get(endpoint, params=params) except Exception as err: print(f"cannot list model: {err}") return dict() diff --git a/tests/estimator_model_request_test.py b/tests/estimator_model_request_test.py index 80e82329..91cab045 100644 --- a/tests/estimator_model_request_test.py +++ b/tests/estimator_model_request_test.py @@ -40,7 +40,7 @@ def test_model_request(): energy_source = test_energy_source # test getting model from server os.environ["MODEL_SERVER_ENABLE"] = "true" - available_models = list_all_models() + available_models = list_all_models(energy_source=energy_source) assert len(available_models) > 0, "must have more than one available models" print("Available Models:", available_models) for output_type_name, valid_fgs in available_models.items(): diff --git a/tests/weight_model_request_test.py b/tests/weight_model_request_test.py index 7765e19f..6ca49685 100644 --- a/tests/weight_model_request_test.py +++ b/tests/weight_model_request_test.py @@ -29,11 +29,11 @@ os.environ["MODEL_SERVER_ENABLE"] = "true" energy_source = test_energy_source - available_models = list_all_models() + available_models = list_all_models(energy_source=energy_source) while len(available_models) == 0: time.sleep(1) print("wait for kepler model server response") - available_models = list_all_models() + available_models = list_all_models(energy_source=energy_source) for output_type_name, valid_fgs in available_models.items(): output_type = ModelOutputType[output_type_name]