From 0255749ad835a9bb57a334c0350c90fa0a533a8f Mon Sep 17 00:00:00 2001 From: Tyler Gu Date: Thu, 5 Sep 2024 17:04:46 -0500 Subject: [PATCH] Remove deadcode Signed-off-by: Tyler Gu --- acto/__main__.py | 4 +-- acto/engine.py | 85 +++++++-------------------------------------- acto/input/input.py | 2 -- 3 files changed, 14 insertions(+), 77 deletions(-) diff --git a/acto/__main__.py b/acto/__main__.py index 0ca73a0166..64abe7ca85 100644 --- a/acto/__main__.py +++ b/acto/__main__.py @@ -137,7 +137,6 @@ workdir_path=args.workdir_path, operator_config=config, cluster_runtime="KIND", - preload_images_=None, context_file=context_cache, helper_crd=args.helper_crd, num_workers=args.num_workers, @@ -147,13 +146,12 @@ is_reproduce=False, input_model=DeterministicInputModel, apply_testcase_f=apply_testcase_f, - delta_from=None, focus_fields=config.focus_fields, ) generation_time = datetime.now() logger.info("Acto initialization finished in %s", generation_time - start_time) if not args.learn: - acto.run(modes=["normal"]) + acto.run() normal_finish_time = datetime.now() logger.info("Acto normal run finished in %s", normal_finish_time - start_time) logger.info("Start post processing steps") diff --git a/acto/engine.py b/acto/engine.py index 5086cae2a6..09ee5c5936 100644 --- a/acto/engine.py +++ b/acto/engine.py @@ -751,7 +751,6 @@ def __init__( workdir_path: str, operator_config: OperatorConfig, cluster_runtime: str, - preload_images_: Optional[list], context_file: str, helper_crd: Optional[str], num_workers: int, @@ -761,7 +760,6 @@ def __init__( is_reproduce: bool, input_model: type[DeterministicInputModel], apply_testcase_f: Callable, - delta_from: Optional[str] = None, mount: Optional[list] = None, focus_fields: Optional[list] = None, acto_namespace: int = 0, @@ -820,10 +818,6 @@ def __init__( analysis_only=analysis_only, ) - # Add additional preload images from arguments - if preload_images_ is not None: - self.context["preload_images"].update(preload_images_) - self.input_model: DeterministicInputModel = input_model( crd=self.context["crd"]["body"], seed_input=self.seed, @@ -835,7 +829,7 @@ def __init__( custom_module_path=operator_config.custom_module, ) - self.sequence_base = 20 if delta_from else 0 + self.sequence_base = 0 if operator_config.custom_oracle is not None: module = importlib.import_module(operator_config.custom_oracle) @@ -846,11 +840,8 @@ def __init__( self.custom_on_init = None # Generate test cases - testplan_path = None - if delta_from is not None: - testplan_path = os.path.join(delta_from, "test_plan.json") self.test_plan = self.input_model.generate_test_plan( - testplan_path, focus_fields=focus_fields + focus_fields=focus_fields ) with open( os.path.join(self.workdir_path, "test_plan.json"), @@ -949,7 +940,7 @@ def __learn(self, context_file, helper_crd, analysis_only=False): ) if not deployed: raise RuntimeError( - f"Failed to deploy operator due to max retry exceed" + "Failed to deploy operator due to max retry exceed" ) apiclient = kubernetes_client(learn_kubeconfig, learn_context_name) @@ -1037,9 +1028,7 @@ def __learn(self, context_file, helper_crd, analysis_only=False): sort_keys=True, ) - def run( - self, modes: list = ["normal", "overspecified", "copiedover"] - ) -> list[OracleResults]: + def run(self) -> list[OracleResults]: """Run the test cases""" logger = get_thread_logger(with_prefix=True) @@ -1087,63 +1076,19 @@ def run( ) runners.append(runner) - if "normal" in modes: - threads = [] - for runner in runners: - t = threading.Thread( - target=runner.run, args=[errors, InputModel.NORMAL] - ) - t.start() - threads.append(t) + threads = [] + for runner in runners: + t = threading.Thread( + target=runner.run, args=[errors, InputModel.NORMAL] + ) + t.start() + threads.append(t) - for t in threads: - t.join() + for t in threads: + t.join() normal_time = time.time() - if "overspecified" in modes: - threads = [] - for runner in runners: - t = threading.Thread( - target=runner.run, args=([errors, InputModel.OVERSPECIFIED]) - ) - t.start() - threads.append(t) - - for t in threads: - t.join() - - overspecified_time = time.time() - - if "copiedover" in modes: - threads = [] - for runner in runners: - t = threading.Thread( - target=runner.run, args=([errors, InputModel.COPIED_OVER]) - ) - t.start() - threads.append(t) - - for t in threads: - t.join() - - additional_semantic_time = time.time() - - if InputModel.ADDITIONAL_SEMANTIC in modes: - threads = [] - for runner in runners: - t = threading.Thread( - target=runner.run, - args=([errors, InputModel.ADDITIONAL_SEMANTIC]), - ) - t.start() - threads.append(t) - - for t in threads: - t.join() - - end_time = time.time() - num_total_failed = 0 for runner in runners: for testcases in runner.discarded_testcases.values(): @@ -1151,10 +1096,6 @@ def run( testrun_info = { "normal_duration": normal_time - start_time, - "overspecified_duration": overspecified_time - normal_time, - "copied_over_duration": additional_semantic_time - - overspecified_time, - "additional_semantic_duration": end_time - additional_semantic_time, "num_workers": self.num_workers, "num_total_testcases": self.input_model.metadata, "num_total_failed": num_total_failed, diff --git a/acto/input/input.py b/acto/input/input.py index f7271f28f0..c01253ee80 100644 --- a/acto/input/input.py +++ b/acto/input/input.py @@ -67,7 +67,6 @@ def set_worker_id(self, worker_id: int): @abc.abstractmethod def generate_test_plan( self, - delta_from: Optional[str] = None, focus_fields: Optional[list] = None, ) -> dict: """Generate test plan based on CRD""" @@ -281,7 +280,6 @@ def set_worker_id(self, worker_id: int): def generate_test_plan( self, - delta_from: Optional[str] = None, focus_fields: Optional[list] = None, ) -> dict: """Generate test plan based on CRD"""