From 53f65f756dbbcccf7fbbcb9ae8e4f8c664090a50 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Wed, 15 Nov 2023 08:10:31 -0800 Subject: [PATCH 01/10] chore: ensure correct action is being used --- ot_analyze.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ot_analyze.py b/ot_analyze.py index 802c5b1..28d456c 100644 --- a/ot_analyze.py +++ b/ot_analyze.py @@ -110,6 +110,7 @@ def find_pd_protocols(directory: Path) -> List[Path]: def main(): repo_relative_path = Path(os.getenv("GITHUB_WORKSPACE"), os.getenv("INPUT_BASE_DIRECTORY")) + print("Hello World") print(f"Analyzing all protocol files in {repo_relative_path}") python_files = find_python_protocols(repo_relative_path) pd_files = find_pd_protocols(repo_relative_path) From d31d19e5f0a2b77a968c8b2cbd81910f900d0ccc Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Wed, 15 Nov 2023 08:25:47 -0800 Subject: [PATCH 02/10] chore: for development, build from Dockerfile instead of image --- action.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/action.yml b/action.yml index d54503f..04e1789 100644 --- a/action.yml +++ b/action.yml @@ -9,7 +9,8 @@ inputs: default: 'protocols' runs: using: 'docker' - image: 'docker://ghcr.io/y3rsh/ot-analyze:main' + # image: 'docker://ghcr.io/y3rsh/ot-analyze:main' + image: 'Dockerfile' # For development branding: icon: 'check-square' color: 'white' From 857729ab9a071e118fc6bfdd55a0a544a6fc23b6 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Wed, 15 Nov 2023 09:07:50 -0800 Subject: [PATCH 03/10] feat: add output type chore: cleanup a bit chore: nicer output --- action.yml | 9 +++++++++ ot_analyze.py | 22 +++++++++++++++++++++- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/action.yml b/action.yml index 04e1789..47baacb 100644 --- a/action.yml +++ b/action.yml @@ -7,6 +7,15 @@ inputs: description: 'relative path in the repository of the root folder in which to search for to search for protocols' required: false default: 'protocols' + OUTPUT_TYPE: + type: choice + description: 'type of output the action generates' + required: false + default: 'none' + options: + - 'none' + - 'zip' + - 'markdown' runs: using: 'docker' # image: 'docker://ghcr.io/y3rsh/ot-analyze:main' diff --git a/ot_analyze.py b/ot_analyze.py index 28d456c..4a86fb9 100644 --- a/ot_analyze.py +++ b/ot_analyze.py @@ -3,12 +3,19 @@ import subprocess import time from concurrent.futures import ThreadPoolExecutor, as_completed +from enum import Enum, auto from pathlib import Path from typing import List from write_failed_analysis import write_failed_analysis +class OutputType(Enum): + NONE = auto() + ZIP = auto() + MARKDOWN = auto() + + def generate_analysis_path(protocol_file: Path) -> Path: """ Takes a Path to a protocol file and returns a Path to the analysis file that @@ -107,10 +114,23 @@ def find_pd_protocols(directory: Path) -> List[Path]: filtered_json_files = [file for file in json_files if has_designer_application(file)] return filtered_json_files +def get_output_type() -> OutputType: + """Get the output type from the environment variable OUTPUT_TYPE""" + if os.getenv("OUTPUT_TYPE") == "markdown": + output_type = OutputType.MARKDOWN + elif os.getenv("OUTPUT_TYPE") == "zip": + output_type = OutputType.ZIP + elif os.getenv("OUTPUT_TYPE") == "none": + output_type = OutputType.NONE + else: + print(f'Invalid OUTPUT_TYPE: {os.getenv("OUTPUT_TYPE")}. Defaulting to "none"') + output_type = OutputType.NONE + return output_type + def main(): repo_relative_path = Path(os.getenv("GITHUB_WORKSPACE"), os.getenv("INPUT_BASE_DIRECTORY")) - print("Hello World") + print(f"Using output type: {get_output_type().name.capitalize()}") print(f"Analyzing all protocol files in {repo_relative_path}") python_files = find_python_protocols(repo_relative_path) pd_files = find_pd_protocols(repo_relative_path) From 27c02fdaa9218884f0047b31e034e13ee9b1e267 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Wed, 15 Nov 2023 12:35:44 -0800 Subject: [PATCH 04/10] feat: add zip method chore: it has been a while since I wrote code fix: fix zipfile method that was resaving the zipfile chore: no idea how this passed linting chore: use shutil.make_archive chore: rename as the file gets moved chore: this is getting annoying chore: ugh chore: needs to be an absolute path chore: an absolute path --- ot_analyze.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/ot_analyze.py b/ot_analyze.py index 4a86fb9..c0ba72e 100644 --- a/ot_analyze.py +++ b/ot_analyze.py @@ -1,6 +1,8 @@ import json import os +import shutil import subprocess +import tempfile import time from concurrent.futures import ThreadPoolExecutor, as_completed from enum import Enum, auto @@ -10,6 +12,9 @@ from write_failed_analysis import write_failed_analysis +ZIP_FILE_BASENAME = "protocols_and_analyses" + + class OutputType(Enum): NONE = auto() ZIP = auto() @@ -128,15 +133,31 @@ def get_output_type() -> OutputType: return output_type + +def create_zip(directory_path: Path): + absolute_directory_path = directory_path.absolute() + try: + archive_name = shutil.make_archive(ZIP_FILE_BASENAME, 'zip', absolute_directory_path, absolute_directory_path) + print(f"Zipfile created and saved to: {absolute_directory_path / archive_name}") + + except Exception as e: + print(f"Error: {e}") + + def main(): repo_relative_path = Path(os.getenv("GITHUB_WORKSPACE"), os.getenv("INPUT_BASE_DIRECTORY")) - print(f"Using output type: {get_output_type().name.capitalize()}") + output_type = get_output_type() + print(f"Using output type: {output_type.name.lower()}") print(f"Analyzing all protocol files in {repo_relative_path}") python_files = find_python_protocols(repo_relative_path) pd_files = find_pd_protocols(repo_relative_path) all_protocol_files = python_files + pd_files run_analyze_in_parallel(all_protocol_files) + if output_type == OutputType.ZIP: + create_zip(repo_relative_path) + + if __name__ == "__main__": main() From a9d302df514b5ca94b6d143c3ac2aa16283dfbd6 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 16 Nov 2023 07:56:36 -0800 Subject: [PATCH 05/10] chore: rework paths to be grouped together chore: remove prints and fix imports --- ot_analyze.py | 80 +++++++++++++++++++++++++------------------ tests/test_analyze.py | 32 ++++++++++------- 2 files changed, 65 insertions(+), 47 deletions(-) diff --git a/ot_analyze.py b/ot_analyze.py index c0ba72e..a6dc029 100644 --- a/ot_analyze.py +++ b/ot_analyze.py @@ -2,16 +2,15 @@ import os import shutil import subprocess -import tempfile import time from concurrent.futures import ThreadPoolExecutor, as_completed +from dataclasses import dataclass from enum import Enum, auto from pathlib import Path from typing import List from write_failed_analysis import write_failed_analysis - ZIP_FILE_BASENAME = "protocols_and_analyses" @@ -21,6 +20,12 @@ class OutputType(Enum): MARKDOWN = auto() +@dataclass +class ProtocolPaths: + protocol_file: Path + analysis_file: Path + + def generate_analysis_path(protocol_file: Path) -> Path: """ Takes a Path to a protocol file and returns a Path to the analysis file that @@ -32,14 +37,13 @@ def generate_analysis_path(protocol_file: Path) -> Path: return Path(protocol_file.parent, f"{protocol_file.stem}_analysis.json") -def analyze(protocol_file: Path): +def analyze(protocol_path: ProtocolPaths) -> float: start_time = time.time() # Start timing - analysis_file = generate_analysis_path(protocol_file) - custom_labware_directory = Path(protocol_file.parent, "custom_labware") + custom_labware_directory = Path(protocol_path.protocol_file.parent, "custom_labware") custom_labware = [] # PD protocols contain their own custom labware - if custom_labware_directory.is_dir() and protocol_file.suffix == ".py": + if custom_labware_directory.is_dir() and protocol_path.protocol_file.suffix == ".py": custom_labware = [ os.path.join(custom_labware_directory, file) for file in os.listdir(custom_labware_directory) if file.endswith(".json") ] @@ -51,23 +55,23 @@ def analyze(protocol_file: Path): "opentrons.cli", "analyze", "--json-output", - analysis_file, - protocol_file, + protocol_path.analysis_file, + protocol_path.protocol_file, ] + custom_labware try: subprocess.run(command, capture_output=True, text=True, check=True) except Exception as e: - print(f"Error in analysis of {protocol_file}") - write_failed_analysis(analysis_file, e) + print(f"Error in analysis of {protocol_path.protocol_file}") + write_failed_analysis(protocol_path.analysis_file, e) end_time = time.time() return end_time - start_time end_time = time.time() elapsed_time = end_time - start_time - print(f"Successful analysis of {protocol_file} completed in {elapsed_time:.2f} seconds") + print(f"Successful analysis of {protocol_path.protocol_file} completed in {elapsed_time:.2f} seconds") return elapsed_time -def run_analyze_in_parallel(protocol_files: List[Path]): +def run_analyze_in_parallel(protocol_files: List[ProtocolPaths]): start_time = time.time() with ThreadPoolExecutor() as executor: futures = [executor.submit(analyze, file) for file in protocol_files] @@ -86,16 +90,7 @@ def run_analyze_in_parallel(protocol_files: List[Path]): ) -def find_python_protocols(directory: Path) -> List[Path]: - # Check if the provided path is a valid directory - - if not directory.is_dir(): - raise NotADirectoryError(f"The path {directory} is not a valid directory.") - # Recursively find all .py files - python_files = list(directory.rglob("*.py")) - # TODO: shallow test that they are valid protocol files - return python_files def has_designer_application(json_file_path): @@ -109,15 +104,34 @@ def has_designer_application(json_file_path): return False -def find_pd_protocols(directory: Path) -> List[Path]: - # Check if the provided path is a valid directory - if not directory.is_dir(): - raise NotADirectoryError(f"The path {directory} is not a valid directory.") - # Recursively find all .json files - json_files = list(directory.rglob("*.json")) - filtered_json_files = [file for file in json_files if has_designer_application(file)] - return filtered_json_files + +def find_protocol_paths(repo_relative_path: Path) -> List[ProtocolPaths]: + def find_pd_protocols(directory: Path) -> List[Path]: + # Check if the provided path is a valid directory + if not directory.is_dir(): + raise NotADirectoryError(f"The path {directory} is not a valid directory.") + + # Recursively find all .json files + json_files = list(directory.rglob("*.json")) + filtered_json_files = [file for file in json_files if has_designer_application(file)] + + return filtered_json_files + + def find_python_protocols(directory: Path) -> List[Path]: + # Check if the provided path is a valid directory + if not directory.is_dir(): + raise NotADirectoryError(f"The path {directory} is not a valid directory.") + + # Recursively find all .py files + python_files = list(directory.rglob("*.py")) + # TODO: shallow test that they are valid protocol files + return python_files + return [ + ProtocolPaths(protocol_file, generate_analysis_path(protocol_file)) + for protocol_file + in find_python_protocols(repo_relative_path) + find_pd_protocols(repo_relative_path) + ] def get_output_type() -> OutputType: """Get the output type from the environment variable OUTPUT_TYPE""" @@ -149,15 +163,13 @@ def main(): output_type = get_output_type() print(f"Using output type: {output_type.name.lower()}") print(f"Analyzing all protocol files in {repo_relative_path}") - python_files = find_python_protocols(repo_relative_path) - pd_files = find_pd_protocols(repo_relative_path) - all_protocol_files = python_files + pd_files - run_analyze_in_parallel(all_protocol_files) + protocol_paths = find_protocol_paths(repo_relative_path) + run_analyze_in_parallel(protocol_paths) if output_type == OutputType.ZIP: create_zip(repo_relative_path) - if __name__ == "__main__": main() + diff --git a/tests/test_analyze.py b/tests/test_analyze.py index 404f660..02ece22 100644 --- a/tests/test_analyze.py +++ b/tests/test_analyze.py @@ -1,7 +1,7 @@ import json from pathlib import Path -from ot_analyze import analyze, generate_analysis_path +from ot_analyze import analyze, generate_analysis_path, ProtocolPaths import test_data.data as td @@ -27,30 +27,36 @@ def check_errors_in_analysis(analysis: Path): def test_analyze_ot2_positive(): - analyze(td.POSITIVE_OT2) - check_no_errors_in_analysis(generate_analysis_path(td.POSITIVE_OT2)) + protocol_path = ProtocolPaths(td.POSITIVE_OT2, generate_analysis_path(td.POSITIVE_OT2)) + analyze(protocol_path) + check_no_errors_in_analysis(protocol_path.analysis_file) def test_analyze_flex_positive(): - analyze(td.POSITIVE_FLEX) - check_no_errors_in_analysis(generate_analysis_path(td.POSITIVE_FLEX)) + protocol_path = ProtocolPaths(td.POSITIVE_FLEX, generate_analysis_path(td.POSITIVE_FLEX)) + analyze(protocol_path) + check_no_errors_in_analysis(protocol_path.analysis_file) def test_analyze_ot2_negative(): - analyze(td.ERROR) - check_errors_in_analysis(generate_analysis_path(td.ERROR)) + protocol_path = ProtocolPaths(td.ERROR, generate_analysis_path(td.ERROR)) + analyze(protocol_path) + check_errors_in_analysis(protocol_path.analysis_file) def test_analyze_flex_negative(): - analyze(td.FLEX_ERROR) - check_errors_in_analysis(generate_analysis_path(td.FLEX_ERROR)) + protocol_path = ProtocolPaths(td.FLEX_ERROR, generate_analysis_path(td.FLEX_ERROR)) + analyze(protocol_path) + check_errors_in_analysis(protocol_path.analysis_file) def test_analyze_json_positive(): - analyze(td.JSON_POSITIVE) - check_no_errors_in_analysis(generate_analysis_path(td.JSON_POSITIVE)) + protocol_path = ProtocolPaths(td.JSON_POSITIVE, generate_analysis_path(td.JSON_POSITIVE)) + analyze(protocol_path) + check_no_errors_in_analysis(protocol_path.analysis_file) def test_analyze_json_error(): - analyze(td.JSON_ERROR) - check_errors_in_analysis(generate_analysis_path(td.JSON_ERROR)) + protocol_path = ProtocolPaths(td.JSON_ERROR, generate_analysis_path(td.JSON_ERROR)) + analyze(protocol_path) + check_errors_in_analysis(protocol_path.analysis_file) From 169f9dc4380a0611c0f75dab80776530da9f8a0c Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 16 Nov 2023 11:07:02 -0800 Subject: [PATCH 06/10] feat: Add temp env vars to be able to execute locally --- ot_analyze.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/ot_analyze.py b/ot_analyze.py index a6dc029..f97d338 100644 --- a/ot_analyze.py +++ b/ot_analyze.py @@ -171,5 +171,27 @@ def main(): if __name__ == "__main__": - main() + import contextlib + + @contextlib.contextmanager + def set_env(**environ: Dict[str, str]) -> Iterator[None]: + old_environ = dict(os.environ) + os.environ.update(environ) + try: + yield + finally: + os.environ.clear() + os.environ.update(old_environ) + + environ_vars_to_add = {} + if os.getenv("GITHUB_WORKSPACE") is None: + environ_vars_to_add["GITHUB_WORKSPACE"] = str(Path(__file__).parent.absolute()) + + if os.getenv("INPUT_BASE_DIRECTORY") is None: + environ_vars_to_add["INPUT_BASE_DIRECTORY"] = "../ot-analyze-test/protocols" + if len(environ_vars_to_add) > 0: + print(f"Running with the following temporary environment variables: {environ_vars_to_add}") + + with set_env(**environ_vars_to_add): + main() From 04fdb350767da4f85b9a6e7378da34f97b538411 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 16 Nov 2023 11:09:39 -0800 Subject: [PATCH 07/10] chore: remove output type paradigm --- ot_analyze.py | 38 ++++++++------------------------------ 1 file changed, 8 insertions(+), 30 deletions(-) diff --git a/ot_analyze.py b/ot_analyze.py index f97d338..0590b30 100644 --- a/ot_analyze.py +++ b/ot_analyze.py @@ -7,17 +7,17 @@ from dataclasses import dataclass from enum import Enum, auto from pathlib import Path -from typing import List +from typing import Any, Dict, Iterator, List from write_failed_analysis import write_failed_analysis -ZIP_FILE_BASENAME = "protocols_and_analyses" +FILE_BASENAME = "protocols_and_analyses" -class OutputType(Enum): - NONE = auto() - ZIP = auto() - MARKDOWN = auto() +class ProtocolType(Enum): + PROTOCOL_DESIGNER = auto() + PYTHON = auto() + @dataclass @@ -90,9 +90,6 @@ def run_analyze_in_parallel(protocol_files: List[ProtocolPaths]): ) - - - def has_designer_application(json_file_path): try: with open(json_file_path, "r", encoding="utf-8") as file: @@ -133,25 +130,10 @@ def find_python_protocols(directory: Path) -> List[Path]: in find_python_protocols(repo_relative_path) + find_pd_protocols(repo_relative_path) ] -def get_output_type() -> OutputType: - """Get the output type from the environment variable OUTPUT_TYPE""" - if os.getenv("OUTPUT_TYPE") == "markdown": - output_type = OutputType.MARKDOWN - elif os.getenv("OUTPUT_TYPE") == "zip": - output_type = OutputType.ZIP - elif os.getenv("OUTPUT_TYPE") == "none": - output_type = OutputType.NONE - else: - print(f'Invalid OUTPUT_TYPE: {os.getenv("OUTPUT_TYPE")}. Defaulting to "none"') - output_type = OutputType.NONE - return output_type - - - def create_zip(directory_path: Path): absolute_directory_path = directory_path.absolute() try: - archive_name = shutil.make_archive(ZIP_FILE_BASENAME, 'zip', absolute_directory_path, absolute_directory_path) + archive_name = shutil.make_archive(FILE_BASENAME, 'zip', absolute_directory_path, absolute_directory_path) print(f"Zipfile created and saved to: {absolute_directory_path / archive_name}") except Exception as e: @@ -160,14 +142,10 @@ def create_zip(directory_path: Path): def main(): repo_relative_path = Path(os.getenv("GITHUB_WORKSPACE"), os.getenv("INPUT_BASE_DIRECTORY")) - output_type = get_output_type() - print(f"Using output type: {output_type.name.lower()}") print(f"Analyzing all protocol files in {repo_relative_path}") protocol_paths = find_protocol_paths(repo_relative_path) run_analyze_in_parallel(protocol_paths) - - if output_type == OutputType.ZIP: - create_zip(repo_relative_path) + create_zip(repo_relative_path) if __name__ == "__main__": From d73a22eb98461baa17561f2eb92592a714774cba Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 16 Nov 2023 11:10:03 -0800 Subject: [PATCH 08/10] feat: Add markdown output --- ot_analyze.py | 110 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 110 insertions(+) diff --git a/ot_analyze.py b/ot_analyze.py index 0590b30..ff254ad 100644 --- a/ot_analyze.py +++ b/ot_analyze.py @@ -1,5 +1,6 @@ import json import os +import pprint import shutil import subprocess import time @@ -13,17 +14,91 @@ FILE_BASENAME = "protocols_and_analyses" +SUCCESS_COLOR = "#7beb73" +FAILURE_COLOR = "#eb7373" + +MARKDOWN_TEMPLATE = """ +# OT-Analyze Test Results + +## Result Breakdown + +Below is a list of protocols and their analysis results. + +{results} + +""" + +ANALYSIS_ERROR_TEMPLATE = """ + +Analysis Error: {analysis_error} + +""" + +RESULTS_TEMPLATE = """ +
+ +{protocol_file_name} [ {pass_or_fail} ] + +Protocol Type: {protocol_type} + +Analysis Execution Time: {execution_time:.2f} seconds + +{analysis_error} + +
+""" + class ProtocolType(Enum): PROTOCOL_DESIGNER = auto() PYTHON = auto() +class AnalysisResult(Enum): + PASS = auto() + FAIL = auto() @dataclass class ProtocolPaths: protocol_file: Path analysis_file: Path + analysis_execution_time: float | None = None + + @property + def _analysis_file_content(self) -> Dict[str, Any]: + with open(self.analysis_file.absolute(), "r") as file: + return json.load(file) + + @property + def _analysis_success(self) -> bool: + return self._analysis_file_content["errors"] == [] + + @property + def analysis_error(self) -> str: + return self._analysis_file_content["errors"] + + @property + def analysis_result(self) -> AnalysisResult: + return ( + AnalysisResult.PASS + if self._analysis_success + else AnalysisResult.FAIL + ) + + @property + def protocol_file_name(self) -> str: + return self.protocol_file.name + + @property + def protocol_type(self) -> str: + return ( + ProtocolType.PYTHON + if self.protocol_file.suffix == ".py" + else ProtocolType.PROTOCOL_DESIGNER + ).name.title() + + def set_analysis_execution_time(self, analysis_execution_time: float) -> None: + self.analysis_execution_time = analysis_execution_time def generate_analysis_path(protocol_file: Path) -> Path: @@ -64,9 +139,11 @@ def analyze(protocol_path: ProtocolPaths) -> float: print(f"Error in analysis of {protocol_path.protocol_file}") write_failed_analysis(protocol_path.analysis_file, e) end_time = time.time() + protocol_path.set_analysis_execution_time(end_time) return end_time - start_time end_time = time.time() elapsed_time = end_time - start_time + protocol_path.set_analysis_execution_time(elapsed_time) print(f"Successful analysis of {protocol_path.protocol_file} completed in {elapsed_time:.2f} seconds") return elapsed_time @@ -139,6 +216,38 @@ def create_zip(directory_path: Path): except Exception as e: print(f"Error: {e}") +def create_markdown(protocol_paths: List[ProtocolPaths]) -> None: + def generate_result(protocol_path: ProtocolPaths) -> str: + if protocol_path.analysis_result == AnalysisResult.PASS: + summary_color = SUCCESS_COLOR + analysis_error = "" + else: + summary_color = FAILURE_COLOR + analysis_error = ANALYSIS_ERROR_TEMPLATE.format( + analysis_error="\n".join( + error["detail"] + for error + in protocol_path.analysis_error + ) + ) + + return RESULTS_TEMPLATE.format( + protocol_file_name=protocol_path.protocol_file_name, + protocol_type=protocol_path.protocol_type, + summary_color=summary_color, + pass_or_fail=protocol_path.analysis_result.name.upper(), + analysis_error=analysis_error, + execution_time=protocol_path.analysis_execution_time, + ) + markdown_content = MARKDOWN_TEMPLATE.format( + results="\n".join([generate_result(protocol_path) for protocol_path in protocol_paths]), + ) + markdown_file_name = f"{FILE_BASENAME}.md" + absolute_directory_path = Path.cwd() + with open(FILE_BASENAME + ".md", "w") as file: + file.write(markdown_content) + print(f"Markdown file created and saved to: {absolute_directory_path / markdown_file_name}") + def main(): repo_relative_path = Path(os.getenv("GITHUB_WORKSPACE"), os.getenv("INPUT_BASE_DIRECTORY")) @@ -146,6 +255,7 @@ def main(): protocol_paths = find_protocol_paths(repo_relative_path) run_analyze_in_parallel(protocol_paths) create_zip(repo_relative_path) + create_markdown(protocol_paths) if __name__ == "__main__": From 08b08c0aff39c49bdc58ab5ce3e32d72aa64a5b5 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 16 Nov 2023 11:30:05 -0800 Subject: [PATCH 09/10] chore: cleanup chore: Don't check in generated files chore: remove OUTPUT_TYPE env var chore: rename uploaded files chore: sort inputs --- .gitignore | 4 ++++ action.yml | 10 +--------- ot_analyze.py | 9 +++++---- tests/test_analyze.py | 2 +- 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/.gitignore b/.gitignore index 6cf8934..cca30d0 100644 --- a/.gitignore +++ b/.gitignore @@ -163,3 +163,7 @@ tests/test_data/**/*analysis.json test-results.xml report.html result/ + +# Generated Report Files +protocols_and_analyses.md +protocols_and_analyses.zip \ No newline at end of file diff --git a/action.yml b/action.yml index 47baacb..e3b0356 100644 --- a/action.yml +++ b/action.yml @@ -7,15 +7,7 @@ inputs: description: 'relative path in the repository of the root folder in which to search for to search for protocols' required: false default: 'protocols' - OUTPUT_TYPE: - type: choice - description: 'type of output the action generates' - required: false - default: 'none' - options: - - 'none' - - 'zip' - - 'markdown' + type: string runs: using: 'docker' # image: 'docker://ghcr.io/y3rsh/ot-analyze:main' diff --git a/ot_analyze.py b/ot_analyze.py index ff254ad..6ba7eb2 100644 --- a/ot_analyze.py +++ b/ot_analyze.py @@ -12,7 +12,8 @@ from write_failed_analysis import write_failed_analysis -FILE_BASENAME = "protocols_and_analyses" +ZIP_FILE_BASENAME = "protocols_and_analyses" +MARKDOWN_FILE_BASENAME = "summary" SUCCESS_COLOR = "#7beb73" FAILURE_COLOR = "#eb7373" @@ -210,7 +211,7 @@ def find_python_protocols(directory: Path) -> List[Path]: def create_zip(directory_path: Path): absolute_directory_path = directory_path.absolute() try: - archive_name = shutil.make_archive(FILE_BASENAME, 'zip', absolute_directory_path, absolute_directory_path) + archive_name = shutil.make_archive(ZIP_FILE_BASENAME, 'zip', absolute_directory_path, absolute_directory_path) print(f"Zipfile created and saved to: {absolute_directory_path / archive_name}") except Exception as e: @@ -242,9 +243,9 @@ def generate_result(protocol_path: ProtocolPaths) -> str: markdown_content = MARKDOWN_TEMPLATE.format( results="\n".join([generate_result(protocol_path) for protocol_path in protocol_paths]), ) - markdown_file_name = f"{FILE_BASENAME}.md" + markdown_file_name = f"{MARKDOWN_FILE_BASENAME}.md" absolute_directory_path = Path.cwd() - with open(FILE_BASENAME + ".md", "w") as file: + with open(markdown_file_name, "w") as file: file.write(markdown_content) print(f"Markdown file created and saved to: {absolute_directory_path / markdown_file_name}") diff --git a/tests/test_analyze.py b/tests/test_analyze.py index 02ece22..32dc623 100644 --- a/tests/test_analyze.py +++ b/tests/test_analyze.py @@ -1,7 +1,7 @@ import json from pathlib import Path -from ot_analyze import analyze, generate_analysis_path, ProtocolPaths +from ot_analyze import ProtocolPaths, analyze, generate_analysis_path import test_data.data as td From 03ad2b23b7e629cdc3c6ca7033d09211fc79cddb Mon Sep 17 00:00:00 2001 From: Josh McVey Date: Tue, 28 Nov 2023 07:21:44 -0600 Subject: [PATCH 10/10] Update action.yml --- action.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/action.yml b/action.yml index e3b0356..4fb7ad3 100644 --- a/action.yml +++ b/action.yml @@ -10,8 +10,7 @@ inputs: type: string runs: using: 'docker' - # image: 'docker://ghcr.io/y3rsh/ot-analyze:main' - image: 'Dockerfile' # For development + image: 'docker://ghcr.io/y3rsh/ot-analyze:main' branding: icon: 'check-square' color: 'white'