Skip to content

Commit

Permalink
Remove deadcode
Browse files Browse the repository at this point in the history
Signed-off-by: Tyler Gu <[email protected]>
  • Loading branch information
tylergu committed Sep 5, 2024
1 parent f7562e6 commit 0255749
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 77 deletions.
4 changes: 1 addition & 3 deletions acto/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,6 @@
workdir_path=args.workdir_path,
operator_config=config,
cluster_runtime="KIND",
preload_images_=None,
context_file=context_cache,
helper_crd=args.helper_crd,
num_workers=args.num_workers,
Expand All @@ -147,13 +146,12 @@
is_reproduce=False,
input_model=DeterministicInputModel,
apply_testcase_f=apply_testcase_f,
delta_from=None,
focus_fields=config.focus_fields,
)
generation_time = datetime.now()
logger.info("Acto initialization finished in %s", generation_time - start_time)
if not args.learn:
acto.run(modes=["normal"])
acto.run()
normal_finish_time = datetime.now()
logger.info("Acto normal run finished in %s", normal_finish_time - start_time)
logger.info("Start post processing steps")
Expand Down
85 changes: 13 additions & 72 deletions acto/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -751,7 +751,6 @@ def __init__(
workdir_path: str,
operator_config: OperatorConfig,
cluster_runtime: str,
preload_images_: Optional[list],
context_file: str,
helper_crd: Optional[str],
num_workers: int,
Expand All @@ -761,7 +760,6 @@ def __init__(
is_reproduce: bool,
input_model: type[DeterministicInputModel],
apply_testcase_f: Callable,
delta_from: Optional[str] = None,
mount: Optional[list] = None,
focus_fields: Optional[list] = None,
acto_namespace: int = 0,
Expand Down Expand Up @@ -820,10 +818,6 @@ def __init__(
analysis_only=analysis_only,
)

# Add additional preload images from arguments
if preload_images_ is not None:
self.context["preload_images"].update(preload_images_)

self.input_model: DeterministicInputModel = input_model(
crd=self.context["crd"]["body"],
seed_input=self.seed,
Expand All @@ -835,7 +829,7 @@ def __init__(
custom_module_path=operator_config.custom_module,
)

self.sequence_base = 20 if delta_from else 0
self.sequence_base = 0

if operator_config.custom_oracle is not None:
module = importlib.import_module(operator_config.custom_oracle)
Expand All @@ -846,11 +840,8 @@ def __init__(
self.custom_on_init = None

# Generate test cases
testplan_path = None
if delta_from is not None:
testplan_path = os.path.join(delta_from, "test_plan.json")
self.test_plan = self.input_model.generate_test_plan(
testplan_path, focus_fields=focus_fields
focus_fields=focus_fields
)
with open(
os.path.join(self.workdir_path, "test_plan.json"),
Expand Down Expand Up @@ -949,7 +940,7 @@ def __learn(self, context_file, helper_crd, analysis_only=False):
)
if not deployed:
raise RuntimeError(
f"Failed to deploy operator due to max retry exceed"
"Failed to deploy operator due to max retry exceed"
)

apiclient = kubernetes_client(learn_kubeconfig, learn_context_name)
Expand Down Expand Up @@ -1037,9 +1028,7 @@ def __learn(self, context_file, helper_crd, analysis_only=False):
sort_keys=True,
)

def run(
self, modes: list = ["normal", "overspecified", "copiedover"]
) -> list[OracleResults]:
def run(self) -> list[OracleResults]:
"""Run the test cases"""
logger = get_thread_logger(with_prefix=True)

Expand Down Expand Up @@ -1087,74 +1076,26 @@ def run(
)
runners.append(runner)

if "normal" in modes:
threads = []
for runner in runners:
t = threading.Thread(
target=runner.run, args=[errors, InputModel.NORMAL]
)
t.start()
threads.append(t)
threads = []
for runner in runners:
t = threading.Thread(
target=runner.run, args=[errors, InputModel.NORMAL]
)
t.start()
threads.append(t)

for t in threads:
t.join()
for t in threads:
t.join()

normal_time = time.time()

if "overspecified" in modes:
threads = []
for runner in runners:
t = threading.Thread(
target=runner.run, args=([errors, InputModel.OVERSPECIFIED])
)
t.start()
threads.append(t)

for t in threads:
t.join()

overspecified_time = time.time()

if "copiedover" in modes:
threads = []
for runner in runners:
t = threading.Thread(
target=runner.run, args=([errors, InputModel.COPIED_OVER])
)
t.start()
threads.append(t)

for t in threads:
t.join()

additional_semantic_time = time.time()

if InputModel.ADDITIONAL_SEMANTIC in modes:
threads = []
for runner in runners:
t = threading.Thread(
target=runner.run,
args=([errors, InputModel.ADDITIONAL_SEMANTIC]),
)
t.start()
threads.append(t)

for t in threads:
t.join()

end_time = time.time()

num_total_failed = 0
for runner in runners:
for testcases in runner.discarded_testcases.values():
num_total_failed += len(testcases)

testrun_info = {
"normal_duration": normal_time - start_time,
"overspecified_duration": overspecified_time - normal_time,
"copied_over_duration": additional_semantic_time
- overspecified_time,
"additional_semantic_duration": end_time - additional_semantic_time,
"num_workers": self.num_workers,
"num_total_testcases": self.input_model.metadata,
"num_total_failed": num_total_failed,
Expand Down
2 changes: 0 additions & 2 deletions acto/input/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ def set_worker_id(self, worker_id: int):
@abc.abstractmethod
def generate_test_plan(
self,
delta_from: Optional[str] = None,
focus_fields: Optional[list] = None,
) -> dict:
"""Generate test plan based on CRD"""
Expand Down Expand Up @@ -281,7 +280,6 @@ def set_worker_id(self, worker_id: int):

def generate_test_plan(
self,
delta_from: Optional[str] = None,
focus_fields: Optional[list] = None,
) -> dict:
"""Generate test plan based on CRD"""
Expand Down

0 comments on commit 0255749

Please sign in to comment.