Skip to content

Commit

Permalink
Add evaluation scripts for experiments (#354)
Browse files Browse the repository at this point in the history
* Add evaluation scripts for livecell experiments
  • Loading branch information
anwai98 authored Jan 25, 2024
1 parent 7cbab83 commit e6b6e29
Show file tree
Hide file tree
Showing 2 changed files with 224 additions and 0 deletions.
95 changes: 95 additions & 0 deletions finetuning/livecell/experiments/run_experiment_evaluation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
import os
import re
import itertools
import subprocess


ROOT = "/scratch/usr/nimanwai"
EXPERIMENT_ROOT = "/scratch/projects/nim00007/sam/experiments/new_models/test/"

CMD = "python submit_experiment_evaluation.py "


def run_eval_process(cmd):
proc = subprocess.Popen(cmd)
try:
outs, errs = proc.communicate(timeout=60)
except subprocess.TimeoutExpired:
proc.terminate()
outs, errs = proc.communicate()


def for_vit_t():
checkpoint = os.path.join(
ROOT, "experiments", "test", "micro-sam", "vit_t", "checkpoints", "vit_t", "livecell_sam", "best.pt"
)
experiment_folder = os.path.join(EXPERIMENT_ROOT, "vit_t")

cmd = CMD + "-m vit_t " + f"-c {checkpoint} " + f"-e {experiment_folder}"
print(f"Running the command: {cmd} \n")

_cmd = re.split(r"\s", cmd)

run_eval_process(_cmd)


def for_n_objects(max_objects=45):
ckpt_root = os.path.join(ROOT, "experiments", "micro-sam", "n_objects_per_batch")
exp_root = os.path.join(EXPERIMENT_ROOT, "n_objects_per_batch")
for i in range(1, max_objects+1):
checkpoint = os.path.join(ckpt_root, f"{i}", "checkpoints", "vit_b", "livecell_sam", "best.pt")
experiment_folder = os.path.join(exp_root, f"{i}")

cmd = CMD + "-m vit_b " + f"-c {checkpoint} " + f"-e {experiment_folder}"
print(f"Running the command: {cmd} \n")

_cmd = re.split(r"\s", cmd)

run_eval_process(_cmd)


def for_freezing_backbones():
ckpt_root = os.path.join(ROOT, "experiments", "micro-sam", "partial-finetuning")
exp_root = os.path.join(EXPERIMENT_ROOT, "partial-finetuning")

# let's get all combinations need for the freezing backbone experiments
backbone_combinations = ["image_encoder", "prompt_encoder", "mask_decoder"]

all_combinations = []
for i in range(len(backbone_combinations)):
_one_set = itertools.combinations(backbone_combinations, r=i)
for _per_combination in _one_set:
if len(_per_combination) == 0:
all_combinations.append(None)
else:
all_combinations.append(list(_per_combination))

for _setup in all_combinations:
if isinstance(_setup, list):
checkpoint = os.path.join(ckpt_root, "freeze-")
experiment_folder = os.path.join(exp_root, "freeze-")
for _name in _setup:
checkpoint += f"{_name}-"
experiment_folder += f"{_name}-"
checkpoint = checkpoint[:-1]
experiment_folder = experiment_folder[:-1]
else:
checkpoint = os.path.join(ckpt_root, f"freeze-{_setup}")
experiment_folder = os.path.join(exp_root, f"freeze-{_setup}")

checkpoint = os.path.join(checkpoint, "checkpoints", "vit_b", "livecell_sam", "best.pt")

cmd = CMD + "-m vit_b " + f"-c {checkpoint} " + f"-e {experiment_folder}"
print(f"Running the command: {cmd} \n")

_cmd = re.split(r"\s", cmd)

run_eval_process(_cmd)


def main():
for_freezing_backbones()


if __name__ == "__main__":
main()
129 changes: 129 additions & 0 deletions finetuning/livecell/experiments/submit_experiment_evaluation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
import os
import re
import shutil
import argparse
import subprocess
from glob import glob
from datetime import datetime


def write_batch_script(env_name, out_path, inference_setup, checkpoint, model_type, experiment_folder, delay=None):
"""Writing scripts with different fold-trainings for micro-sam evaluation
"""
batch_script = f"""#!/bin/bash
#SBATCH -c 8
#SBATCH --mem 64G
#SBATCH -t 2-00:00:00
#SBATCH -p grete:shared
#SBATCH -G A100:1
#SBATCH -A gzz0001
#SBATCH --job-name={inference_setup}
source ~/.bashrc
mamba activate {env_name} \n"""

if delay is not None:
batch_script += f"sleep {delay} \n"

# python script
python_script = f"python ../evaluation/{inference_setup}.py "

_op = out_path[:-3] + f"_{inference_setup}.sh"

# add the finetuned checkpoint
python_script += f"-c {checkpoint} "

# name of the model configuration
python_script += f"-m {model_type} "

# experiment folder
python_script += f"-e {experiment_folder} "

# let's add the python script to the bash script
batch_script += python_script

with open(_op, "w") as f:
f.write(batch_script)

# we run the first prompt for iterative once starting with point, and then starting with box (below)
if inference_setup == "iterative_prompting":
batch_script += "--box "

new_path = out_path[:-3] + f"_{inference_setup}_box.sh"
with open(new_path, "w") as f:
f.write(batch_script)


def get_batch_script_names(tmp_folder):
tmp_folder = os.path.expanduser(tmp_folder)
os.makedirs(tmp_folder, exist_ok=True)

script_name = "livecell-inference"

dt = datetime.now().strftime("%Y_%m_%d_%H_%M_%S_%f")
tmp_name = script_name + dt
batch_script = os.path.join(tmp_folder, f"{tmp_name}.sh")

return batch_script


def submit_slurm(model_type, checkpoint, experiment_folder):
"""Submit python script that needs gpus with given inputs on a slurm node.
"""
tmp_folder = "./gpu_jobs"

# parameters to run the inference scripts
environment_name = "sam"
make_delay = "1m" # wait for precomputing the embeddings and later run inference scripts

assert os.path.exists(checkpoint), checkpoint

# now let's run the experiments
all_setups = ["precompute_embeddings", "evaluate_amg", "evaluate_instance_segmentation", "iterative_prompting"]
for current_setup in all_setups:
write_batch_script(
env_name=environment_name,
out_path=get_batch_script_names(tmp_folder),
inference_setup=current_setup,
checkpoint=checkpoint,
model_type=model_type,
experiment_folder=experiment_folder,
delay=None if current_setup == "precompute_embeddings" else make_delay
)

# the logic below automates the process of first running the precomputation of embeddings, and only then inference.
job_id = []
for i, my_script in enumerate(sorted(glob(tmp_folder + "/*"))):
cmd = ["sbatch", my_script]

if i > 0:
cmd.insert(1, f"--dependency=afterany:{job_id[0]}")

cmd_out = subprocess.run(cmd, capture_output=True, text=True)
print(cmd_out.stdout if len(cmd_out.stdout) > 1 else cmd_out.stderr)

if i == 0:
job_id.append(re.findall(r'\d+', cmd_out.stdout)[0])


def main(args):
submit_slurm(
model_type=args.model_type,
checkpoint=args.checkpoint,
experiment_folder=args.experiment_folder
)


if __name__ == "__main__":
try:
shutil.rmtree("./gpu_jobs")
except FileNotFoundError:
pass

parser = argparse.ArgumentParser()
parser.add_argument("-m", "--model_type", type=str, required=True)
parser.add_argument("-c", "--checkpoint", type=str, required=True)
parser.add_argument("-e", "--experiment_folder", type=str, required=True)
args = parser.parse_args()

main(args)

0 comments on commit e6b6e29

Please sign in to comment.